├── 1024
└── down.py
├── ethCrawler
├── log.txt
├── readme.txt
├── EthTradeList.py
├── TradeList.py
├── etherscantradelist.sql
├── eth.sql
├── crawler.py
└── etherEumCrawler.py
├── appdata
├── __init__.py
├── common
│ ├── __init__.py
│ └── User.py
├── userinfo (2).sql
└── crawlinfo.py
├── dingding
├── __init__.py
├── common
│ ├── __init__.py
│ ├── token.txt
│ ├── config.py
│ └── functions.py
├── mappers
│ ├── __init__.py
│ ├── CarCostHistory.py
│ ├── ComplainRecord.py
│ ├── DailyWorkReport.py
│ ├── ServerCheck.py
│ ├── Returnvisit.py
│ ├── FaultHistory.py
│ ├── ImportantEvent.py
│ ├── InspectionRecord.py
│ └── CostApplication.py
├── error.log
├── test.json
└── main.py
├── jingdong
├── __init__.py
├── jingdong
│ ├── __init__.py
│ ├── spiders
│ │ ├── __init__.py
│ │ └── price.py
│ ├── pipelines.py
│ ├── items.py
│ ├── middlewares.py
│ └── settings.py
└── scrapy.cfg
├── miaosha
├── __init__.py
├── checkcode.jpg
├── miaosha
│ ├── __init__.py
│ ├── spiders
│ │ └── __init__.py
│ ├── pipelines.py
│ ├── items.py
│ ├── middlewares.py
│ └── settings.py
└── scrapy.cfg
├── crawl_fund
├── Spiders
│ ├── __init__.py
│ └── __pycache__
│ │ ├── Fund.cpython-36.pyc
│ │ └── __init__.cpython-36.pyc
├── common
│ ├── __init__.py
│ ├── __pycache__
│ │ ├── config.cpython-36.pyc
│ │ ├── __init__.cpython-36.pyc
│ │ └── function.cpython-36.pyc
│ ├── function.py
│ └── config.py
├── mappers
│ ├── __init__.py
│ ├── __pycache__
│ │ ├── Fund.cpython-36.pyc
│ │ └── __init__.cpython-36.pyc
│ ├── Detail.py
│ └── Fund.py
├── csvfiles
│ └── fund.csv
├── main.py
├── sql
│ └── funddetail.sql
└── htmls
│ └── details
│ ├── 580005
│ ├── 101.txt
│ ├── 100.txt
│ ├── 16.txt
│ ├── 19.txt
│ ├── 25.txt
│ ├── 30.txt
│ ├── 32.txt
│ ├── 4.txt
│ ├── 58.txt
│ ├── 88.txt
│ ├── 92.txt
│ ├── 1.txt
│ ├── 10.txt
│ ├── 11.txt
│ ├── 13.txt
│ ├── 14.txt
│ ├── 17.txt
│ ├── 18.txt
│ ├── 2.txt
│ ├── 21.txt
│ ├── 22.txt
│ ├── 23.txt
│ └── 24.txt
│ └── 001112
│ ├── 30.txt
│ ├── 25.txt
│ ├── 27.txt
│ ├── 8.txt
│ ├── 9.txt
│ ├── 10.txt
│ ├── 11.txt
│ ├── 12.txt
│ ├── 13.txt
│ ├── 15.txt
│ ├── 23.txt
│ ├── 24.txt
│ ├── 28.txt
│ └── 29.txt
├── .gitignore
├── alishiyong
├── report.csv
├── 冬虫夏草试用报告.xlsx
└── itemLinks.txt
├── doubandingtie
└── code.jpg
├── crawlDajiawen
├── dajiawen.csv
├── 大家问冬虫夏草Top50问答数据.xlsx
├── goodid.txt
└── spider.py
├── easy_distributed_crawler
├── 执行流程.png
├── 爬虫结构.png
├── 爬虫队列.png
├── SlaveNode
│ ├── HtmlDownloader.py
│ ├── HtmlParser.py
│ └── SlaveWork.py
├── readme.md
└── MasterNode
│ ├── DataOuput.py
│ └── URlManager.py
├── souhuVideoUpload
└── upload.py
├── README.md
└── pdfdownload
├── pdfdown.py
└── pdfdown_mutiprocess.py
/ethCrawler/log.txt:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/appdata/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/dingding/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/jingdong/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/miaosha/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/miaosha/checkcode.jpg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/appdata/common/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/dingding/common/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/dingding/mappers/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/miaosha/miaosha/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/crawl_fund/Spiders/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/crawl_fund/common/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/crawl_fund/mappers/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/jingdong/jingdong/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/dingding/common/token.txt:
--------------------------------------------------------------------------------
1 | ae2cb26810763b2c946b767dea54e932
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea/
2 | ghostdriver.log
3 | __pycache__/
4 | test.py
--------------------------------------------------------------------------------
/dingding/error.log:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shisiying/crawer_python/HEAD/dingding/error.log
--------------------------------------------------------------------------------
/alishiyong/report.csv:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shisiying/crawer_python/HEAD/alishiyong/report.csv
--------------------------------------------------------------------------------
/doubandingtie/code.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shisiying/crawer_python/HEAD/doubandingtie/code.jpg
--------------------------------------------------------------------------------
/alishiyong/冬虫夏草试用报告.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shisiying/crawer_python/HEAD/alishiyong/冬虫夏草试用报告.xlsx
--------------------------------------------------------------------------------
/ethCrawler/readme.txt:
--------------------------------------------------------------------------------
1 | - 新建一个数据库,命名叫eth
2 | - 在数据库下执行目录中的你文件eth.sql
3 | - 修改crawler中的dburl修改的mysql密码以及服务器地址
--------------------------------------------------------------------------------
/crawlDajiawen/dajiawen.csv:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shisiying/crawer_python/HEAD/crawlDajiawen/dajiawen.csv
--------------------------------------------------------------------------------
/easy_distributed_crawler/执行流程.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shisiying/crawer_python/HEAD/easy_distributed_crawler/执行流程.png
--------------------------------------------------------------------------------
/easy_distributed_crawler/爬虫结构.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shisiying/crawer_python/HEAD/easy_distributed_crawler/爬虫结构.png
--------------------------------------------------------------------------------
/easy_distributed_crawler/爬虫队列.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shisiying/crawer_python/HEAD/easy_distributed_crawler/爬虫队列.png
--------------------------------------------------------------------------------
/crawlDajiawen/大家问冬虫夏草Top50问答数据.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shisiying/crawer_python/HEAD/crawlDajiawen/大家问冬虫夏草Top50问答数据.xlsx
--------------------------------------------------------------------------------
/crawl_fund/Spiders/__pycache__/Fund.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shisiying/crawer_python/HEAD/crawl_fund/Spiders/__pycache__/Fund.cpython-36.pyc
--------------------------------------------------------------------------------
/crawl_fund/common/__pycache__/config.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shisiying/crawer_python/HEAD/crawl_fund/common/__pycache__/config.cpython-36.pyc
--------------------------------------------------------------------------------
/crawl_fund/mappers/__pycache__/Fund.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shisiying/crawer_python/HEAD/crawl_fund/mappers/__pycache__/Fund.cpython-36.pyc
--------------------------------------------------------------------------------
/crawl_fund/Spiders/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shisiying/crawer_python/HEAD/crawl_fund/Spiders/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/crawl_fund/common/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shisiying/crawer_python/HEAD/crawl_fund/common/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/crawl_fund/common/__pycache__/function.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shisiying/crawer_python/HEAD/crawl_fund/common/__pycache__/function.cpython-36.pyc
--------------------------------------------------------------------------------
/crawl_fund/mappers/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shisiying/crawer_python/HEAD/crawl_fund/mappers/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/miaosha/miaosha/spiders/__init__.py:
--------------------------------------------------------------------------------
1 | # This package will contain the spiders of your Scrapy project
2 | #
3 | # Please refer to the documentation for information on how to create and manage
4 | # your spiders.
5 |
--------------------------------------------------------------------------------
/crawl_fund/common/function.py:
--------------------------------------------------------------------------------
1 | def getText(element):
2 | if element!=None:
3 | txt=element.get_text()
4 | if str(txt).strip()=="---":
5 | txt="0"
6 | return txt
7 | return ""
--------------------------------------------------------------------------------
/jingdong/jingdong/spiders/__init__.py:
--------------------------------------------------------------------------------
1 | # This package will contain the spiders of your Scrapy project
2 | #
3 | # Please refer to the documentation for information on how to create and manage
4 | # your spiders.
5 |
--------------------------------------------------------------------------------
/crawl_fund/common/config.py:
--------------------------------------------------------------------------------
1 | dbconfig={"host":'localhost',"user":'root',"password":'hello2016',"db":'jijin',"charset":'utf8'}
2 | dburl="mysql+pymysql://root:hello2016@localhost/jijin?charset=utf8"
3 | detailurl="http://fund.eastmoney.com/f10/jjjz_580005.html"
4 |
5 |
--------------------------------------------------------------------------------
/miaosha/scrapy.cfg:
--------------------------------------------------------------------------------
1 | # Automatically created by: scrapy startproject
2 | #
3 | # For more information about the [deploy] section see:
4 | # https://scrapyd.readthedocs.org/en/latest/deploy.html
5 |
6 | [settings]
7 | default = miaosha.settings
8 |
9 | [deploy]
10 | #url = http://localhost:6800/
11 | project = miaosha
12 |
--------------------------------------------------------------------------------
/jingdong/scrapy.cfg:
--------------------------------------------------------------------------------
1 | # Automatically created by: scrapy startproject
2 | #
3 | # For more information about the [deploy] section see:
4 | # https://scrapyd.readthedocs.org/en/latest/deploy.html
5 |
6 | [settings]
7 | default = jingdong.settings
8 |
9 | [deploy]
10 | #url = http://localhost:6800/
11 | project = jingdong
12 |
--------------------------------------------------------------------------------
/dingding/common/config.py:
--------------------------------------------------------------------------------
1 | # sqlacodegen --tables returnvisit --outfile Returnvisit.py mysql+pymysql://root:mysql123456@1.85.18.26/jtdz?charset=utf8
2 | dburl="mysql+pymysql://root:mysql123456@1.185.118.26/jtdz?charset=utf8"
3 | corpid = 'ding95d0e17f21c9bFDFD99a' ##假的id
4 | corpsecret = 'sn8LZ2Vg-ryUtk9YcyyGoIcRBfJ7NoevxwUlh4eXXaySDkwBpKkDFDSDSa3P2QMjitc1fElk' ##假的
5 |
--------------------------------------------------------------------------------
/miaosha/miaosha/pipelines.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # Define your item pipelines here
4 | #
5 | # Don't forget to add your pipeline to the ITEM_PIPELINES setting
6 | # See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
7 |
8 |
9 | class MiaoshaPipeline(object):
10 | def process_item(self, item, spider):
11 | return item
12 |
--------------------------------------------------------------------------------
/jingdong/jingdong/pipelines.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # Define your item pipelines here
4 | #
5 | # Don't forget to add your pipeline to the ITEM_PIPELINES setting
6 | # See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
7 |
8 |
9 | class JingdongPipeline(object):
10 | def process_item(self, item, spider):
11 | return item
12 |
--------------------------------------------------------------------------------
/miaosha/miaosha/items.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # Define here the models for your scraped items
4 | #
5 | # See documentation in:
6 | # http://doc.scrapy.org/en/latest/topics/items.html
7 |
8 | import scrapy
9 |
10 |
11 | class MiaoshaItem(scrapy.Item):
12 | # define the fields for your item here like:
13 | # name = scrapy.Field()
14 | pass
15 |
--------------------------------------------------------------------------------
/jingdong/jingdong/items.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # Define here the models for your scraped items
4 | #
5 | # See documentation in:
6 | # http://doc.scrapy.org/en/latest/topics/items.html
7 |
8 | import scrapy
9 |
10 |
11 | class JingdongItem(scrapy.Item):
12 | # define the fields for your item here like:
13 | # name = scrapy.Field()
14 | pass
15 |
--------------------------------------------------------------------------------
/easy_distributed_crawler/SlaveNode/HtmlDownloader.py:
--------------------------------------------------------------------------------
1 | # coding:utf-8
2 | import requests
3 |
4 | class HtmlDownloader(object):
5 |
6 | def download(self,url):
7 | if url is None:
8 | return None
9 | user_agent = 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)'
10 | headers = {'User-Agent':user_agent}
11 |
12 | r = requests.get(url,headers=headers)
13 | if r.status_code ==200:
14 | r.encoding='utf-8'
15 | return r.text
16 | return None
--------------------------------------------------------------------------------
/easy_distributed_crawler/readme.md:
--------------------------------------------------------------------------------
1 | ## 项目介绍
2 | 简单分布式爬虫项目,该项目,分布式采用简单的主从模式,采用分布式进程和进程间的通信,同时,涵盖了普通爬虫应有的几个模块,URL管理模块,Html解析模块,Html下载模块,数据存储模块,爬虫调度模块
3 |
4 | ### 项目目录介绍
5 | MasterNode--主节点
6 | SlaveNode--从节点
7 |
8 | ### 爬虫结构
9 |
10 | 
11 |
12 | ### 爬虫执行流程
13 |
14 | 
15 |
16 | ### 爬虫分布式进程通信的队列
17 |
18 | 
19 |
20 |
21 |
--------------------------------------------------------------------------------
/crawl_fund/mappers/Detail.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from sqlalchemy import Column, DateTime, Integer, Numeric, String
3 | from sqlalchemy.ext.declarative import declarative_base
4 |
5 |
6 | Base = declarative_base()
7 | metadata = Base.metadata
8 |
9 |
10 | class Funddetail(Base):
11 | __tablename__ = 'funddetail'
12 |
13 | id = Column(Integer, primary_key=True)
14 | fcode = Column(String(10), nullable=False)
15 | fdate = Column(DateTime)
16 | NAV = Column(Numeric(10, 4))
17 | ACCNAV = Column(Numeric(10, 4))
18 | DGR = Column(String(20))
19 | pstate = Column(String(20))
20 | rstate = Column(String(20))
21 |
--------------------------------------------------------------------------------
/appdata/common/User.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from sqlalchemy import Column, Integer, String
3 | from sqlalchemy.ext.declarative import declarative_base
4 |
5 |
6 | Base = declarative_base()
7 | metadata = Base.metadata
8 |
9 |
10 | class Userinfo(Base):
11 | __tablename__ = 'userinfo'
12 |
13 | id = Column(Integer, primary_key=True)
14 | phone = Column(String(20), nullable=False)
15 | datetime = Column(String(20), nullable=False)
16 | amount = Column(Integer, nullable=False)
17 | num = Column(String(50), nullable=False)
18 | userid = Column(Integer, nullable=False)
19 | name = Column(String(10), nullable=False)
20 |
--------------------------------------------------------------------------------
/crawl_fund/mappers/Fund.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from sqlalchemy import Column, DateTime, Numeric, String
3 | from sqlalchemy.ext.declarative import declarative_base
4 |
5 |
6 | Base = declarative_base()
7 | metadata = Base.metadata
8 |
9 |
10 | class Myfund(Base):
11 | __tablename__ = 'myfund'
12 |
13 | fcode = Column(String(20), primary_key=True, nullable=False)
14 | fname = Column(String(20))
15 | NAV = Column(Numeric(10, 4))
16 | ACCNAV = Column(Numeric(10, 4))
17 | updatetime = Column(DateTime)
18 | fdate = Column(DateTime, primary_key=True, nullable=False)
19 | DGR = Column(String(20))
20 | DGV = Column(String(20))
21 | fee = Column(String(20))
22 |
--------------------------------------------------------------------------------
/ethCrawler/EthTradeList.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from sqlalchemy import Column, Integer, String
3 | from sqlalchemy.ext.declarative import declarative_base
4 |
5 |
6 | Base = declarative_base()
7 | metadata = Base.metadata
8 |
9 |
10 | class Etherscantradelist(Base):
11 | __tablename__ = 'etherscantradelist'
12 |
13 | id = Column(Integer, primary_key=True)
14 | txHash = Column(String(70, 'utf8_unicode_ci'))
15 | age = Column(String(30, 'utf8_unicode_ci'))
16 | fromadress = Column(String(42, 'utf8_unicode_ci'))
17 | to = Column(String(42, 'utf8_unicode_ci'))
18 | value = Column(String(20, 'utf8_unicode_ci'))
19 | token = Column(String(42, 'utf8_unicode_ci'))
20 | name = Column(String(50, 'utf8_unicode_ci'))
21 |
--------------------------------------------------------------------------------
/ethCrawler/TradeList.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from sqlalchemy import Column, DateTime, Integer, String
3 | from sqlalchemy.ext.declarative import declarative_base
4 |
5 |
6 | Base = declarative_base()
7 | metadata = Base.metadata
8 |
9 |
10 | class tradelist(Base):
11 | __tablename__ = 'tradelist'
12 |
13 | id = Column(Integer, primary_key=True)
14 | txHash = Column(String(70, 'utf8_unicode_ci'))
15 | blockHeight = Column(String(10, 'utf8_unicode_ci'))
16 | amount = Column(String(30, 'utf8_unicode_ci'))
17 | originatorAdress = Column(String(50, 'utf8_unicode_ci'))
18 | recevierAdress = Column(String(50, 'utf8_unicode_ci'))
19 | confirmTime = Column(DateTime)
20 | brokerage = Column(String(15, 'utf8_unicode_ci'))
21 |
--------------------------------------------------------------------------------
/crawlDajiawen/goodid.txt:
--------------------------------------------------------------------------------
1 | 560734559975
2 | 549724367159
3 | 551078442907
4 | 548380875678
5 | 547316356177
6 | 563146870502
7 | 557266664952
8 | 546603177326
9 | 556584196136
10 | 546202817226
11 | 529058554339
12 | 541786219450
13 | 20201622423
14 | 558884189926
15 | 43448003366
16 | 528989787954
17 | 526945890628
18 | 556196525667
19 | 561737055846
20 | 560884655029
21 | 548519225616
22 | 531125275296
23 | 556588556661
24 | 558366035242
25 | 528754947994
26 | 555944007766
27 | 36150091939
28 | 560801413654
29 | 36589584338
30 | 564333118607
31 | 560508968944
32 | 547416061912
33 | 525751414595
34 | 562277053264
35 | 560282892865
36 | 525596083676
37 | 549823127904
38 | 533060108962
39 | 530738234731
40 | 560283404399
41 | 14365652310
42 | 536454208173
43 | 554671065906
44 | 560601761588
45 |
--------------------------------------------------------------------------------
/souhuVideoUpload/upload.py:
--------------------------------------------------------------------------------
1 | # _*_ coding: utf-8 _*_
2 | import requests
3 |
4 | def login(user_name,passwd):
5 | headers = {
6 | 'user-agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.113 Safari/537.36',
7 | 'cookie': 'beans_dmp_done = 1;IPLOC = CN;SUV = 1710021626598866;reqtype = pc;gidinf = x099980109ee0ce6660204c290009a05135098259632;beans_freq = 1;lastpassport = 15626832124;jv = 4de511653f75dab9336e058a95ad09ef - qgxCfp3p1510458408529'
8 | }
9 | form_data = {
10 | 'userid': user_name,
11 | 'password': passwd,
12 | 'persistentCookie': 1,
13 | 'appid': 107405,
14 | 'callback': 'passport401_cb1510458090735'
15 | }
16 |
17 |
18 | s = requests.Session()
19 | user_name = '15626832124'
20 | passwd = 'hello2016'
21 |
22 |
23 |
24 |
--------------------------------------------------------------------------------
/jingdong/jingdong/spiders/price.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import scrapy
3 | import chardet
4 | import json
5 | import re
6 | from urllib.parse import urlencode
7 | from bs4 import BeautifulSoup
8 |
9 |
10 | class PriceSpider(scrapy.Spider):
11 | name = 'price'
12 | allowed_domains = ['jd.com']
13 |
14 | def start_requests(self):
15 | commodity_url = 'https://item.jd.com/15890328841.html'
16 | commodity_id = re.findall('(\d+)', commodity_url)[0]
17 | url_end = '&area=1_2901_4135_0&cat=737,794,878&extraParam={"originid":"1"}'
18 | price_url = 'https://c0.3.cn/stock?skuId={}{}'.format(
19 | commodity_id, url_end)
20 | yield scrapy.Request(price_url)
21 |
22 | def parse(self, response):
23 | print(response.body.decode('cp1251').encode('utf8'))
24 | data = json.loads(response.text)
25 |
--------------------------------------------------------------------------------
/crawl_fund/csvfiles/fund.csv:
--------------------------------------------------------------------------------
1 | fcode,fname,NAV,ACCNAV,updatetime,fdate,DGR,DGV,fee
2 | 000001,华夏成长,1.1900,3.5210,2017-10-04 23:12:27,2017-09-28 00:00:00,0.25%,0.0030,0.15%
3 | 000003,中海可转债A,0.8120,1.0220,2017-10-04 23:12:20,2017-09-28 00:00:00,-0.12%,-0.0010,0.08%
4 | 000004,中海可转债C,0.8150,1.0250,2017-10-04 23:12:20,2017-09-28 00:00:00,-0.12%,-0.0010,0.00%
5 | 000005,嘉实增强信用定期债券,1.0040,1.2050,2017-10-04 23:12:17,2017-09-28 00:00:00,0.00%,0.0000,0.08%
6 | 000007,鹏华国企债债券,1.1381,1.1469,2017-10-04 23:12:18,2017-09-28 00:00:00,-0.05%,-0.0006,0.08%
7 | 000008,嘉实中证500ETF联接,1.8007,1.8007,2017-10-04 23:12:21,2017-09-28 00:00:00,-0.21%,-0.0038,0.12%
8 | 000011,华夏大盘精选,12.2000,16.2800,2017-10-04 23:12:27,2017-09-28 00:00:00,0.24%,0.0290,0.15%
9 | 000014,华夏聚利债券,1.1670,1.1670,2017-10-04 23:12:20,2017-09-28 00:00:00,-0.09%,-0.0010,0.06%
10 | 000015,华夏纯债债券A,1.1700,1.2000,2017-10-04 23:12:30,2017-09-28 00:00:00,0.09%,0.0010,0.08%
11 | 000016,华夏纯债债券C,1.1480,1.1780,2017-10-04 23:12:15,2017-09-28 00:00:00,0.00%,0.0000,0.00%
12 |
--------------------------------------------------------------------------------
/crawl_fund/main.py:
--------------------------------------------------------------------------------
1 | from crawl_fund.Spiders.Fund import SaveDb,getFundhtml
2 | from sqlalchemy import create_engine
3 | from crawl_fund.common.config import dburl
4 | from sqlalchemy.orm import sessionmaker
5 | from crawl_fund.mappers.Fund import Myfund
6 | import csv
7 | import pandas
8 | if __name__=='__main__':
9 | #将抓取的数据文件入库
10 | # SaveDb()
11 | #写入csv文件当中
12 | # engine = create_engine(dburl, echo=True)
13 | # mysession=sessionmaker(bind=engine)()
14 | # result=mysession.query(Myfund).limit(10).all()
15 | # with open('./csvfiles/fund.csv','w',encoding='UTF-8') as file:
16 | # writer=csv.writer(file)
17 | # writer.writerow(['fcode','fname','NAV','ACCNAV','updatetime','fdate','DGR','DGV','fee'])
18 | # for re in result:
19 | # writer.writerow([re.fcode, re.fname, re.NAV, re.ACCNAV, re.updatetime, re.fdate, re.DGR, re.DGV, re.fee])
20 | # file.close()
21 | pd=pandas.read_csv('./csvfiles/fund.csv',dtype={'fcode':pandas.np.str})
22 | result=pd.sort_values(by='NAV',ascending=False)
23 | print(result)
--------------------------------------------------------------------------------
/crawl_fund/sql/funddetail.sql:
--------------------------------------------------------------------------------
1 | /*
2 | Navicat MySQL Data Transfer
3 |
4 | Source Server : 5kcrm
5 | Source Server Version : 50505
6 | Source Host : localhost:3306
7 | Source Database : jijin
8 |
9 | Target Server Type : MYSQL
10 | Target Server Version : 50505
11 | File Encoding : 65001
12 |
13 | Date: 2017-10-07 22:00:34
14 | */
15 |
16 | SET FOREIGN_KEY_CHECKS=0;
17 |
18 | -- ----------------------------
19 | -- Table structure for `funddetail`
20 | -- ----------------------------
21 | DROP TABLE IF EXISTS `funddetail`;
22 | CREATE TABLE `funddetail` (
23 | `id` int(11) NOT NULL COMMENT '自增字段',
24 | `fcode` varchar(10) NOT NULL COMMENT '基金编码',
25 | `fdate` datetime DEFAULT NULL COMMENT '基金日期',
26 | `NAV` decimal(10,4) DEFAULT NULL COMMENT '单位净值',
27 | `ACCNAV` decimal(10,4) DEFAULT NULL COMMENT '累计净值',
28 | `DGR` varchar(20) DEFAULT NULL COMMENT '日增长率',
29 | `pstate` varchar(20) DEFAULT NULL COMMENT '申购状态',
30 | `rstate` varchar(20) DEFAULT NULL COMMENT '赎回状态',
31 | PRIMARY KEY (`id`)
32 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
33 |
34 | -- ----------------------------
35 | -- Records of funddetail
36 | -- ----------------------------
37 |
--------------------------------------------------------------------------------
/dingding/mappers/CarCostHistory.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from sqlalchemy import Column, DateTime, String, Text
3 | from sqlalchemy.ext.declarative import declarative_base
4 |
5 |
6 | Base = declarative_base()
7 | metadata = Base.metadata
8 |
9 |
10 | class Carcosthistory(Base):
11 | __tablename__ = 'carcosthistory'
12 |
13 | approvalNumber = Column(String(255), primary_key=True)
14 | headlin = Column(String(255))
15 | approvalStatus = Column(String(255))
16 | approvalResult = Column(String(255))
17 | approvalTime = Column(DateTime)
18 | approvalFinshTime = Column(DateTime)
19 | initiatorsNumber = Column(String(255))
20 | initiatorsUserID = Column(String(255))
21 | initiatorsName = Column(String(255))
22 | initiatorsDepartment = Column(String(255))
23 | historicalApproverName = Column(String(255))
24 | approvalHistory = Column(String(255))
25 | currentProcessingName = Column(String(255))
26 | reviewsTake = Column(String(255))
27 | carNumber = Column(String(255))
28 | highwaySection = Column(String(255))
29 | mileage = Column(String(255))
30 | oilPrice = Column(String(255))
31 | cost = Column(String(255))
32 | instrumenBoardPhoto = Column(Text)
33 | receiptPhoto = Column(Text)
34 |
--------------------------------------------------------------------------------
/dingding/mappers/ComplainRecord.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from sqlalchemy import Column, DateTime, String, Text
3 | from sqlalchemy.ext.declarative import declarative_base
4 |
5 |
6 | Base = declarative_base()
7 | metadata = Base.metadata
8 |
9 |
10 | class Complainrecord(Base):
11 | __tablename__ = 'complainrecord'
12 |
13 | approvalNumber = Column(String(255), primary_key=True, nullable=False)
14 | headline = Column(String(255))
15 | approvalStatus = Column(String(255))
16 | approvalResult = Column(String(255))
17 | approvalTime = Column(DateTime)
18 | approvalFinishTime = Column(DateTime)
19 | initiatorsNumber = Column(String(255))
20 | initiatorsUserID = Column(String(255))
21 | initiatorsName = Column(String(255))
22 | initiatorsDepartment = Column(String(255))
23 | historicalApproverName = Column(String(255))
24 | approverHistory = Column(Text)
25 | currentProcessingName = Column(String(255))
26 | reviewTake = Column(String(255))
27 | customerName = Column(String(255))
28 | highwaySection = Column(String(255))
29 | list = Column(String(255), primary_key=True, nullable=False)
30 | complain = Column(String(255))
31 | photo = Column(String(255))
32 | accessory = Column(Text)
33 |
--------------------------------------------------------------------------------
/ethCrawler/etherscantradelist.sql:
--------------------------------------------------------------------------------
1 | /*
2 | Navicat MySQL Data Transfer
3 |
4 | Source Server : 5kcrm
5 | Source Server Version : 50505
6 | Source Host : localhost:3306
7 | Source Database : eth
8 |
9 | Target Server Type : MYSQL
10 | Target Server Version : 50505
11 | File Encoding : 65001
12 |
13 | Date: 2018-06-03 20:57:19
14 | */
15 |
16 | SET FOREIGN_KEY_CHECKS=0;
17 |
18 | -- ----------------------------
19 | -- Table structure for `etherscantradelist`
20 | -- ----------------------------
21 | DROP TABLE IF EXISTS `etherscantradelist`;
22 | CREATE TABLE `etherscantradelist` (
23 | `id` int(11) NOT NULL AUTO_INCREMENT,
24 | `txHash` varchar(70) COLLATE utf8_unicode_ci DEFAULT NULL,
25 | `age` varchar(30) COLLATE utf8_unicode_ci DEFAULT NULL,
26 | `fromadress` varchar(42) COLLATE utf8_unicode_ci DEFAULT NULL,
27 | `to` varchar(42) COLLATE utf8_unicode_ci DEFAULT NULL,
28 | `value` varchar(20) COLLATE utf8_unicode_ci DEFAULT NULL,
29 | `token` varchar(42) COLLATE utf8_unicode_ci DEFAULT NULL,
30 | `name` varchar(50) COLLATE utf8_unicode_ci DEFAULT NULL,
31 | PRIMARY KEY (`id`)
32 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
33 |
34 | -- ----------------------------
35 | -- Records of etherscantradelist
36 | -- ----------------------------
37 |
--------------------------------------------------------------------------------
/dingding/test.json:
--------------------------------------------------------------------------------
1 | # 'approvalNumber': data_list['process_instance_id'],
2 | # 'headline': data_list['title'],
3 | # 'approvalStatus': 'COMPLETED',
4 | # 'approvalResult': data_list['process_instance_result'],
5 | # 'approvalTime': data_list['create_time'],
6 | # 'approvalFinshTime': data_list['finish_time'],
7 | # 'initiatorsNumber': None,
8 | # 'initiatorsUserID': data_list['originator_userid'],
9 | # 'initiatorsName': getName(data_list['title']),
10 | # 'initiatorsDepartment': data_list['originator_dept_id'],
11 | # 'historicalApproverName': data_list['approver_userid_list']['string'],
12 | # 'approvalHistory' = data_list['approver_userid_list']['string'],
13 | # 'currentProcessingName' =
14 | # data_list['approver_userid_list']['string'][-1],
15 | # 'reviewTake' = duration(data_list['create_time'], data_list['finish_time']), ##day
16 | #
--------------------------------------------------------------------------------
/ethCrawler/eth.sql:
--------------------------------------------------------------------------------
1 | /*
2 | Navicat MySQL Data Transfer
3 |
4 | Source Server : 5kcrm
5 | Source Server Version : 50505
6 | Source Host : localhost:3306
7 | Source Database : eth
8 |
9 | Target Server Type : MYSQL
10 | Target Server Version : 50505
11 | File Encoding : 65001
12 |
13 | Date: 2018-06-02 13:45:15
14 | */
15 |
16 | SET FOREIGN_KEY_CHECKS=0;
17 |
18 | -- ----------------------------
19 | -- Table structure for `tradelist`
20 | -- ----------------------------
21 | DROP TABLE IF EXISTS `tradelist`;
22 | CREATE TABLE `tradelist` (
23 | `id` int(8) NOT NULL AUTO_INCREMENT COMMENT 'id',
24 | `txHash` varchar(70) COLLATE utf8_unicode_ci DEFAULT NULL COMMENT '交易哈希',
25 | `blockHeight` varchar(10) COLLATE utf8_unicode_ci DEFAULT NULL COMMENT '高度',
26 | `amount` varchar(30) COLLATE utf8_unicode_ci DEFAULT NULL COMMENT '金额变化数量',
27 | `originatorAdress` varchar(50) COLLATE utf8_unicode_ci DEFAULT NULL COMMENT '发送方地址',
28 | `recevierAdress` varchar(50) COLLATE utf8_unicode_ci DEFAULT NULL COMMENT '接受者地址',
29 | `confirmTime` datetime DEFAULT NULL COMMENT '确认时间',
30 | `brokerage` varchar(15) COLLATE utf8_unicode_ci DEFAULT NULL COMMENT '矿工费',
31 | PRIMARY KEY (`id`)
32 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
33 |
34 | -- ----------------------------
35 | -- Records of tradelist
36 | -- ----------------------------
37 |
--------------------------------------------------------------------------------
/dingding/mappers/DailyWorkReport.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from sqlalchemy import Column, Date, DateTime, String, Text
3 | from sqlalchemy.ext.declarative import declarative_base
4 |
5 |
6 | Base = declarative_base()
7 | metadata = Base.metadata
8 |
9 |
10 | class Dailyworkreport(Base):
11 | __tablename__ = 'dailyworkreport'
12 |
13 | approvalNumber = Column(String(255), primary_key=True)
14 | headline = Column(String(255))
15 | approvalStatus = Column(String(255))
16 | approvalResult = Column(String(255))
17 | approvalTime = Column(DateTime)
18 | approvalFinishTime = Column(DateTime)
19 | initiatorsNumber = Column(String(255))
20 | initiatorsUserID = Column(String(255))
21 | initiatorsName = Column(String(255))
22 | initiatorsDepartment = Column(String(255))
23 | historicalApproverName = Column(String(255))
24 | approverHistory = Column(Text)
25 | currentProcessingName = Column(String(255))
26 | reviewTake = Column(String(255))
27 | highwaySection = Column(String(255))
28 | date = Column(Date)
29 | weather = Column(String(255))
30 | temperature = Column(String(255))
31 | rate = Column(String(255))
32 | ratePhoto = Column(Text)
33 | workGoing = Column(String(255))
34 | unfinshedWork = Column(String(255))
35 | importantEvent = Column(String(255))
36 | photo = Column(Text)
37 | accessory = Column(Text)
38 |
--------------------------------------------------------------------------------
/easy_distributed_crawler/SlaveNode/HtmlParser.py:
--------------------------------------------------------------------------------
1 | #coding:utf-8
2 |
3 | import re
4 | from urllib.parse import urljoin
5 |
6 | from bs4 import BeautifulSoup
7 |
8 | class HtmlParse(object):
9 |
10 | ##解析网页内容抽取url和数据
11 | def parser(self,page_url,html_cont):
12 | if page_url is None or html_cont is None:
13 | return
14 | soup = BeautifulSoup(html_cont,'html.parser',from_encoding='utf-8')
15 | new_urls = self.get_new_urls(page_url,soup)
16 | new_data = self.get_new_data(page_url,soup)
17 |
18 | return new_urls,new_data
19 | ##抽取新的url集合
20 | def get_new_urls(self,page_url,soup):
21 |
22 | new_urls =set()
23 |
24 | links = soup.find_all('a',href=re.compile(r'/item/.*'))
25 | for link in links:
26 | ##提取href属性
27 | new_url = link['href']
28 | #拼接成完整网址
29 | new_full_url = urljoin(page_url,new_url)
30 | new_urls.add(new_full_url)
31 | return new_urls
32 |
33 | ##抽取有效数据
34 | def get_new_data(self,page_url,soup):
35 | data = {}
36 | data['url'] = page_url
37 | title = soup.find('dd', class_='lemmaWgt-lemmaTitle-title').find('h1')
38 | data['title'] = title.get_text()
39 | summary = soup.find('div', class_='lemma-summary')
40 | # 获取到tag中包含的所有文版内容包括子孙tag中的内容,并将结果作为Unicode字符串返回
41 | data['summary'] = summary.get_text()
42 | return data
43 |
--------------------------------------------------------------------------------
/dingding/mappers/ServerCheck.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from sqlalchemy import Column, DateTime, String, Text
3 | from sqlalchemy.ext.declarative import declarative_base
4 |
5 |
6 | Base = declarative_base()
7 | metadata = Base.metadata
8 |
9 |
10 | class Servercheck(Base):
11 | __tablename__ = 'servercheck'
12 |
13 | approvalNumber = Column(String(255), primary_key=True)
14 | headline = Column(String(255))
15 | approvalStatus = Column(String(255))
16 | approvalResult = Column(String(255))
17 | approvalTime = Column(DateTime)
18 | approvalFinshTime = Column(DateTime)
19 | initiatorsNumber = Column(String(255))
20 | initiatorsUserID = Column(String(255))
21 | initiatorsName = Column(String(255))
22 | initiatorsDepartment = Column(String(255))
23 | historicalApproverName = Column(String(255))
24 | approvalHistory = Column(Text)
25 | currentProcessingName = Column(String(255))
26 | reviewTake = Column(String(255))
27 | highwaySection = Column(String(255))
28 | serverName = Column(String(255))
29 | CPU = Column(String(255))
30 | RAM = Column(String(255))
31 | virusDB = Column(String(255))
32 | virusDBphoto = Column(Text)
33 | CPUphoto = Column(Text)
34 | presentTime = Column(String(255))
35 | presentSite = Column(String(255))
36 | serverBrand = Column(String(255))
37 | serverStatus = Column(String(255))
38 | statusSign = Column(String(255))
39 | hddSign = Column(String(255))
40 |
--------------------------------------------------------------------------------
/easy_distributed_crawler/MasterNode/DataOuput.py:
--------------------------------------------------------------------------------
1 | # coding:utf-8
2 |
3 | import codecs
4 | import time
5 | class DataOutput(object):
6 |
7 | def __init__(self):
8 | self.filepath = 'baike_%s.html' % (time.strftime("%Y_%m_%d_%H_%M_%S",time.localtime()))
9 | self.output_head(self.filepath)
10 | self.datas = []
11 |
12 | def store_data(self,data):
13 | if data is None:
14 | return
15 | self.datas.append(data)
16 | if len(self.datas)>10:
17 | self.output_html(self.filepath)
18 |
19 | ##写入html头
20 | def output_head(self,path):
21 | fout = codecs.open(path,'w',encoding='utf-8')
22 | fout.write("")
23 | fout.write("
")
24 | fout.write("")
25 | fout.close()
26 |
27 |
28 | ##将数据写入html文件中
29 | def output_html(self,path):
30 | fout = codecs.open(path,'a',encoding='utf-8')
31 | for data in self.datas:
32 | fout.write("")
33 | fout.write("| %s | "%data['url'])
34 | fout.write("%s | "%data['title'])
35 | fout.write("%s | "%data['summary'])
36 | fout.write("
")
37 | self.datas.remove(data)
38 | fout.close()
39 |
40 | ###输出html结束
41 | def output_end(self,path):
42 | fout = codecs.open(path,'a',encoding='utf-8')
43 | fout.write("
")
44 | fout.write("")
45 | fout.write("")
46 |
--------------------------------------------------------------------------------
/dingding/mappers/Returnvisit.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from sqlalchemy import Column, DateTime, String, Text
3 | from sqlalchemy.ext.declarative import declarative_base
4 |
5 |
6 | Base = declarative_base()
7 | metadata = Base.metadata
8 |
9 |
10 | class Returnvisit(Base):
11 | __tablename__ = 'returnvisit'
12 |
13 | approvalNumber = Column(String(255), primary_key=True)
14 | headline = Column(String(255))
15 | approvalStatus = Column(String(255))
16 | approvalResult = Column(String(255))
17 | approvalTime = Column(DateTime)
18 | approvalFinishTime = Column(DateTime)
19 | initiatorsNumber = Column(String(255))
20 | initiatorsUserID = Column(String(255))
21 | initiatorsName = Column(String(255))
22 | initiatorsDepartment = Column(String(255))
23 | historicalApproverName = Column(String(255))
24 | approverHistory = Column(Text)
25 | currentProcessingName = Column(String(255))
26 | reviewTake = Column(String(255))
27 |
28 | highwaySection = Column(String(255))
29 | teamName = Column(String(255))
30 | chargePersonName = Column(String(255))
31 | customerName = Column(String(255))
32 | complain = Column(String(255))
33 | feedBack = Column(String(255))
34 | faultComplain = Column(String(255))
35 | dress = Column(String(255))
36 | speed = Column(String(255))
37 | ability = Column(String(255))
38 | attitude = Column(String(255))
39 | communication = Column(String(255))
40 | accessory = Column(Text)
41 |
--------------------------------------------------------------------------------
/dingding/mappers/FaultHistory.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from sqlalchemy import Column, DateTime, String, Text
3 | from sqlalchemy.ext.declarative import declarative_base
4 |
5 |
6 | Base = declarative_base()
7 | metadata = Base.metadata
8 |
9 |
10 | class Faulthistory(Base):
11 | __tablename__ = 'faulthistory'
12 |
13 | approvalNumber = Column(String(255), primary_key=True)
14 | headline = Column(String(255))
15 | approvalStatus = Column(String(255))
16 | approvalResult = Column(String(255))
17 | approvalTime = Column(DateTime)
18 | approvalFinshTime = Column(DateTime)
19 | initiatorsNumber = Column(String(255))
20 | initiatorsUserID = Column(String(255))
21 | initiatorsName = Column(String(255))
22 | initiatorsDepartment = Column(String(255))
23 | historicalApproverName = Column(Text)
24 | approvalHistory = Column(Text)
25 | currentProcessingName = Column(String(255))
26 | reviewTake = Column(String(255))
27 | highwaySection = Column(String(255))
28 | controalStation = Column(String(255))
29 | Station = Column(String(255))
30 | lane = Column(String(255))
31 | faultType = Column(String(255))
32 | faultPhenomenon = Column(String(255))
33 | otherPhenomenon = Column(String(255))
34 | result = Column(String(255))
35 | presentTime = Column(String(255))
36 | presentSite = Column(String(255))
37 | photo = Column(Text)
38 | photo2 = Column(Text)
39 | photo3 = Column(Text)
40 | photo4 = Column(Text)
41 |
--------------------------------------------------------------------------------
/dingding/mappers/ImportantEvent.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from sqlalchemy import Column, DateTime, String, Text
3 | from sqlalchemy.ext.declarative import declarative_base
4 |
5 |
6 | Base = declarative_base()
7 | metadata = Base.metadata
8 |
9 |
10 | class Importantevent(Base):
11 | __tablename__ = 'importantevent'
12 |
13 | approvalNumber = Column(String(255), primary_key=True)
14 | headline = Column(String(255))
15 | approvalStatus = Column(String(255))
16 | approvalResult = Column(String(255))
17 | approvalTime = Column(DateTime)
18 | approvalFinishTime = Column(DateTime)
19 | initiatorsNumber = Column(String(255))
20 | initiatorsUserID = Column(String(255))
21 | initiatorsName = Column(String(255))
22 | InitiatorsDepartment = Column(String(255), nullable=False)
23 | historicalApproverName = Column(Text)
24 | approvalHistory = Column(Text)
25 | currentProcessingName = Column(String(255))
26 | reviewTake = Column(String(255))
27 | department = Column(String(255))
28 | highwaySection = Column(String(255))
29 | eventTime = Column(String(255))
30 | FinshTime = Column(String(255))
31 | influenceTime = Column(String(255))
32 | eventSite = Column(String(255))
33 | eventType = Column(String(255))
34 | eventDescription = Column(String(255))
35 | influence = Column(String(255))
36 | method = Column(String(255))
37 | loss = Column(String(255))
38 | lossCapital = Column(String(255))
39 | photo = Column(Text)
40 | accessory = Column(String(255))
41 |
--------------------------------------------------------------------------------
/dingding/mappers/InspectionRecord.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from sqlalchemy import Column, DateTime, String
3 | from sqlalchemy.ext.declarative import declarative_base
4 |
5 |
6 | Base = declarative_base()
7 | metadata = Base.metadata
8 |
9 |
10 | class Inspectionrecord(Base):
11 | __tablename__ = 'inspectionrecord'
12 |
13 | type = Column(String(255))
14 | approvalNumber = Column(String(255), primary_key=True)
15 | headline = Column(String(255))
16 | approvalStatus = Column(String(255))
17 | approvalResult = Column(String(255))
18 | approvalTime = Column(DateTime)
19 | approvalFinshTime = Column(DateTime)
20 | initiatorsNumber = Column(String(255))
21 | initiatorsUserID = Column(String(255))
22 | initiatorsName = Column(String(255))
23 | initiatorsDepartment = Column(String(255))
24 | historicalApproverName = Column(String(255))
25 | approvalHistory = Column(String(255))
26 | currentProcessingName = Column(String(255))
27 | reviewTake = Column(String(255))
28 | highwaySection = Column(String(255))
29 | recordType = Column(String(255))
30 | site = Column(String(255))
31 | otherSite = Column(String(255))
32 | temperature = Column(String(255))
33 | humidness = Column(String(255))
34 | jobContent = Column(String(255))
35 | foundFault = Column(String(255))
36 | presentTime = Column(String(255))
37 | presentSite = Column(String(255))
38 | photo = Column(String(255))
39 | photo2 = Column(String(255))
40 | photo3 = Column(String(255))
41 | photo4 = Column(String(255))
42 |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/001112/30.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2015-05-22 | 1.0770 | 1.0770 | | 封闭期 | 封闭期 | |
| 2015-05-15 | 1.0210 | 1.0210 | | 封闭期 | 封闭期 | |
| 2015-05-08 | 1.0220 | 1.0220 | | 封闭期 | 封闭期 | |
| 2015-04-30 | 1.0210 | 1.0210 | | 封闭期 | 封闭期 | |
| 2015-04-24 | 1.0110 | 1.0110 | | 封闭期 | 封闭期 | |
| 2015-04-17 | 1.0150 | 1.0150 | | 封闭期 | 封闭期 | |
| 2015-04-10 | 1.0090 | 1.0090 | | 封闭期 | 封闭期 | |
| 2015-04-07 | 1.0000 | 1.0000 | | 封闭期 | 封闭期 | |
--------------------------------------------------------------------------------
/appdata/userinfo (2).sql:
--------------------------------------------------------------------------------
1 | -- phpMyAdmin SQL Dump
2 | -- version 4.7.4
3 | -- https://www.phpmyadmin.net/
4 | --
5 | -- Host: 127.0.0.1
6 | -- Generation Time: 2017-11-18 04:46:36
7 | -- 服务器版本: 10.1.26-MariaDB
8 | -- PHP Version: 7.0.23
9 |
10 | SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
11 | SET AUTOCOMMIT = 0;
12 | START TRANSACTION;
13 | SET time_zone = "+00:00";
14 |
15 |
16 | /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
17 | /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
18 | /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
19 | /*!40101 SET NAMES utf8mb4 */;
20 |
21 | --
22 | -- Database: `appdata`
23 | --
24 |
25 | -- --------------------------------------------------------
26 |
27 | --
28 | -- 表的结构 `userinfo`
29 | --
30 |
31 | CREATE TABLE `userinfo` (
32 | `id` int(11) NOT NULL,
33 | `phone` varchar(20) NOT NULL,
34 | `datetime` varchar(20) NOT NULL,
35 | `amount` int(11) NOT NULL,
36 | `num` varchar(50) NOT NULL,
37 | `userid` int(11) NOT NULL,
38 | `name` varchar(10) NOT NULL
39 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
40 |
41 | --
42 | -- Indexes for dumped tables
43 | --
44 |
45 | --
46 | -- Indexes for table `userinfo`
47 | --
48 | ALTER TABLE `userinfo`
49 | ADD PRIMARY KEY (`id`);
50 |
51 | --
52 | -- 在导出的表使用AUTO_INCREMENT
53 | --
54 |
55 | --
56 | -- 使用表AUTO_INCREMENT `userinfo`
57 | --
58 | ALTER TABLE `userinfo`
59 | MODIFY `id` int(11) NOT NULL AUTO_INCREMENT;
60 | COMMIT;
61 |
62 | /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
63 | /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
64 | /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
65 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 | ## [简单分布式多进程爬虫](https://github.com/shisiying/crawer_python/tree/master/easy_distributed_crawler)
3 |
4 | 简单分布式爬虫项目,该项目,分布式采用简单的主从模式,采用分布式进程和进程间的通信,同时,涵盖了普通爬虫应有的几个模块,URL管理模块,Html解析模块,Html下载模块,数据存储模块,爬虫调度模块
5 |
6 | ## [基金爬虫](https://github.com/shisiying/crawer_python/blob/master/crawl_fund/Spiders/FundDetail.py)
7 |
8 | This is a demo for crawling the website 'http://fund.eastmoney.com/fund.html'
9 | at this demo you can learn how to use the selenium,beautifulsoup,sqlacheme,process,and manager modules
10 |
11 | ## [豆瓣模拟登陆人工打码自动顶贴](https://github.com/shisiying/crawer_python/blob/master/doubandingtie/login_douban.py)
12 |
13 | the robot for the douban comment
14 |
15 | ## [多线程整站下载pdf和dwg文件](https://github.com/shisiying/crawer_python/blob/master/pdfdownload/pdfdown_mutiprocess.py)
16 |
17 | the crawler for the website http://www.jameshardie.co.nz/specifiers/cad-library
18 |
19 | ## [appapi数据获取批量入库](https://github.com/shisiying/crawer_python/blob/master/appdata/crawlinfo.py)
20 | the crawler for the app api
21 |
22 | ## [钉钉数据同步多线程更新入库](https://github.com/shisiying/crawer_python/blob/master/dingding/main.py)
23 | the auto crawler for dingding data
24 |
25 | ## [使用selnium+chrome+asyncio+aiohttp多进程异步抓取今日头条整站数据](https://github.com/shisiying/crawer_python/blob/master/aiohttptoutiao/toutiao.py)
26 | 今日头条整站数据
27 |
28 | ## [使用selnium+chrome抓取淘宝大家问的评论数据](https://github.com/shisiying/crawer_python/tree/master/crawlDajiawen)
29 | 淘宝商品大家问的评论数据
30 |
31 | ## [使用selnium+chrome抓取商品阿里试用报告的数据](https://github.com/shisiying/crawer_python/tree/master/alishiyong)
32 | 阿里试用报告的用户评分及其他数据
33 |
34 | ## [post提交json参数分页抓取区块链交易记录](https://github.com/shisiying/crawer_python/blob/master/ethCrawler/crawler.py)
35 | 稍微改造可以抓取整站需要抓取的交易记录
36 |
--------------------------------------------------------------------------------
/dingding/mappers/CostApplication.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from sqlalchemy import Column, DateTime, String, Text,Integer
3 | from sqlalchemy.ext.declarative import declarative_base
4 | '''费用申请表'''
5 |
6 | Base = declarative_base()
7 | metadata = Base.metadata
8 |
9 |
10 | class Costapplication(Base):
11 | __tablename__ = 'costapplication'
12 |
13 | costType = Column(String(255))
14 | approvalNumber = Column(String(255), primary_key=True, nullable=False)
15 | headlin = Column(String(255))
16 | approvalStatus = Column(String(255))
17 | approvalResult = Column(String(255))
18 | approvalTime = Column(DateTime)
19 | approvalFinshTime = Column(DateTime)
20 | initiatorsNumber = Column(String(255))
21 | initiatorsUserID = Column(String(255))
22 | initiatorsName = Column(String(255))
23 | InitiatorsDepartment = Column(String(255))
24 | historicalApproverName = Column(Text)
25 | approvalHistory = Column(Text)
26 | currentProcessingName = Column(String(255))
27 | reviewsTake = Column(String(255))
28 | companyName = Column(String(255))
29 | highwaySection = Column(String(255))
30 | type = Column(String(255))
31 | expensesStatement = Column(Integer, primary_key=True, nullable=False)
32 | projectName = Column(String(255))
33 | tradeMark = Column(String(255))
34 | specificationModels = Column(String(255))
35 | units = Column(String(255))
36 | amount = Column(String(255))
37 | unitPrice = Column(String(255))
38 | totalPrice = Column(String(255))
39 | stationName = Column(String(255))
40 | laneNumber = Column(String(255))
41 | useLocation = Column(String(255))
42 | remark = Column(String(255))
43 | photo = Column(Text)
44 | otherAccessory = Column(String(Text))
45 | applicaionReason = Column(String(255))
46 |
--------------------------------------------------------------------------------
/easy_distributed_crawler/MasterNode/URlManager.py:
--------------------------------------------------------------------------------
1 | # coding:utf-8
2 | import pickle
3 | import hashlib
4 |
5 | class UrlManager(object):
6 | def __init__(self):
7 | ## 未爬取URL集合
8 | self.new_urls = self.load_progress('new_urls.txt')
9 | ## 已经爬取的URl集合
10 | self.old_urls = self.load_progress('old_urls.txt')
11 |
12 | ##判断是否有未爬取的Url
13 | def has_new_url(self):
14 | return self.new_url_size()!=0
15 |
16 | ##获取未爬取URl集合的大小
17 | def new_url_size(self):
18 | return len(self.new_urls)
19 |
20 | ## 获取未爬取的URl
21 | def get_new_url(self):
22 | new_url = self.new_urls.pop()
23 | m = hashlib.md5()
24 | m.update(new_url)
25 | self.old_urls.add(m.hexdigest()[8:-8])
26 | return new_url
27 |
28 | ## 新的url添加到未爬取的url集合中
29 | def add_new_url(self,url):
30 | if url is None:
31 | return
32 | m = hashlib.md5()
33 | m.update(url)
34 | url_md5 = m.hexdigest()[8:-8]
35 | if url not in self.new_urls and url_md5 not in self.old_urls:
36 | self.new_urls.add(url)
37 |
38 | ##将新的url添加到未爬取的url集合
39 | def add_new_urls(self,urls):
40 |
41 | if urls is None or len(urls)==0:
42 | return
43 |
44 | for url in urls:
45 | self.add_new_url(url)
46 |
47 | ##获取已爬取url集合大小
48 | def old_url_size(self):
49 | return len(self.old_urls)
50 |
51 | ##保存进度
52 | def save_progress(self,path,data):
53 | with open(path,'wb') as f:
54 | pickle.dump(data,f)
55 |
56 | ###从本地文件加载进度
57 | def load_progress(self,path):
58 | print('[+]从文件中加载进度:%s'% path)
59 | try:
60 | with open(path,'rb') as f:
61 | tmp = pickle.load(f)
62 | return tmp
63 | except:
64 | print("【!】无进度文件,创建: %s" % path)
65 | return set()
--------------------------------------------------------------------------------
/easy_distributed_crawler/SlaveNode/SlaveWork.py:
--------------------------------------------------------------------------------
1 | from multiprocessing.managers import BaseManager
2 |
3 | from easy_distributed_crawler.SlaveNode.HtmlDownloader import HtmlDownloader
4 | from easy_distributed_crawler.SlaveNode.HtmlParser import HtmlParse
5 |
6 | class SlaveWork(object):
7 |
8 | def __init__(self):
9 |
10 | #初始化分布式进程中的工作节点的链接工作
11 | #实现第一步,使用basemanager注册获取queue的方法名称
12 | BaseManager.register('get_task_queue')
13 | BaseManager.register('get_result_queue')
14 |
15 | ##实现第二步,连接到服务器
16 | server_addr = '127.0.0.1'
17 | # 端口和验证口令注意保持与服务进程设置的完全一致:
18 | self.m = BaseManager(address=(server_addr, 8081), authkey='seven')
19 | # 从网络连接:
20 | self.m.connect()
21 |
22 | ##实现第三步
23 | self.task = self.m.get_task_queue()
24 | self.result = self.m.get_result_queue()
25 |
26 | ##初始化网页下载器和解析器
27 | self.downloader = HtmlDownloader()
28 | self.parser = HtmlParse()
29 |
30 | def crawl(self):
31 | while(True):
32 | try:
33 | if not self.task.empty():
34 | url = self.task.get()
35 | if url =='end':
36 | print("控制节点通知爬虫节点停止工作")
37 | self.result.put({'new_urls':'end','data':'end'})
38 | return
39 | print('爬虫节点正在解析:%s' % url.encode('utf-8'))
40 | content = self.downloader.download(url)
41 | new_urls, data = self.parser.parser(url, content)
42 | self.result.put({"new_urls": new_urls, "data": data})
43 | except EOFError:
44 | print("连接工作节点失败")
45 | return
46 | except Exception:
47 | print('Crawl fali ')
48 |
49 | if __name__=="__main__":
50 | spider = SlaveWork()
51 | spider.crawl()
--------------------------------------------------------------------------------
/jingdong/jingdong/middlewares.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # Define here the models for your spider middleware
4 | #
5 | # See documentation in:
6 | # http://doc.scrapy.org/en/latest/topics/spider-middleware.html
7 |
8 | from scrapy import signals
9 |
10 |
11 | class JingdongSpiderMiddleware(object):
12 | # Not all methods need to be defined. If a method is not defined,
13 | # scrapy acts as if the spider middleware does not modify the
14 | # passed objects.
15 |
16 | @classmethod
17 | def from_crawler(cls, crawler):
18 | # This method is used by Scrapy to create your spiders.
19 | s = cls()
20 | crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
21 | return s
22 |
23 | def process_spider_input(self, response, spider):
24 | # Called for each response that goes through the spider
25 | # middleware and into the spider.
26 |
27 | # Should return None or raise an exception.
28 | return None
29 |
30 | def process_spider_output(self, response, result, spider):
31 | # Called with the results returned from the Spider, after
32 | # it has processed the response.
33 |
34 | # Must return an iterable of Request, dict or Item objects.
35 | for i in result:
36 | yield i
37 |
38 | def process_spider_exception(self, response, exception, spider):
39 | # Called when a spider or process_spider_input() method
40 | # (from other spider middleware) raises an exception.
41 |
42 | # Should return either None or an iterable of Response, dict
43 | # or Item objects.
44 | pass
45 |
46 | def process_start_requests(self, start_requests, spider):
47 | # Called with the start requests of the spider, and works
48 | # similarly to the process_spider_output() method, except
49 | # that it doesn’t have a response associated.
50 |
51 | # Must return only requests (not items).
52 | for r in start_requests:
53 | yield r
54 |
55 | def spider_opened(self, spider):
56 | spider.logger.info('Spider opened: %s' % spider.name)
57 |
--------------------------------------------------------------------------------
/miaosha/miaosha/middlewares.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # Define here the models for your spider middleware
4 | #
5 | # See documentation in:
6 | # http://doc.scrapy.org/en/latest/topics/spider-middleware.html
7 |
8 | from scrapy import signals
9 |
10 |
11 | class MiaoshaSpiderMiddleware(object):
12 | # Not all methods need to be defined. If a method is not defined,
13 | # scrapy acts as if the spider middleware does not modify the
14 | # passed objects.
15 |
16 | @classmethod
17 | def from_crawler(cls, crawler):
18 | # This method is used by Scrapy to create your spiders.
19 | s = cls()
20 | crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
21 | return s
22 |
23 | def process_spider_input(self, response, spider):
24 | # Called for each response that goes through the spider
25 | # middleware and into the spider.
26 |
27 | # Should return None or raise an exception.
28 | return None
29 |
30 | def process_spider_output(self, response, result, spider):
31 | # Called with the results returned from the Spider, after
32 | # it has processed the response.
33 |
34 | # Must return an iterable of Request, dict or Item objects.
35 | for i in result:
36 | yield i
37 |
38 | def process_spider_exception(self, response, exception, spider):
39 | # Called when a spider or process_spider_input() method
40 | # (from other spider middleware) raises an exception.
41 |
42 | # Should return either None or an iterable of Response, dict
43 | # or Item objects.
44 | pass
45 |
46 | def process_start_requests(self, start_requests, spider):
47 | # Called with the start requests of the spider, and works
48 | # similarly to the process_spider_output() method, except
49 | # that it doesn’t have a response associated.
50 |
51 | # Must return only requests (not items).
52 | for r in start_requests:
53 | yield r
54 |
55 | def spider_opened(self, spider):
56 | spider.logger.info('Spider opened: %s' % spider.name)
57 |
--------------------------------------------------------------------------------
/1024/down.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import os
3 | from bs4 import BeautifulSoup
4 | from multiprocessing import Process
5 | import sys
6 | import time
7 | sys.setrecursionlimit(1000000) #例如这里设置为一百万
8 |
9 | url = [
10 | 'https://ns.postcc.us/htm_data/8/1711/2813398.html'
11 | ]
12 |
13 | def getImageUrl(url):
14 | header = {
15 | 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.114 Safari/537.36',
16 | }
17 | time.sleep(2)
18 | html = requests.get(url,headers=header)
19 | html.encoding = 'gbk'
20 | Soup = BeautifulSoup(html.text,'lxml')
21 | title = Soup.title.get_text().split('-')[0].split(' ')[0]
22 | imgsrc = Soup.select('input[type="image"]')
23 | return {'title':title,'imgsrcs':imgsrc}
24 |
25 | def downloadImg(imageLists,title,range_list):
26 |
27 | header = {
28 | 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.114 Safari/537.36',
29 | }
30 | if not os.path.exists('img/'+title):
31 | os.makedirs('img/'+title)
32 | print("创建文件夹--{}成功".format(title))
33 | for imglist in range_list:
34 | try:
35 | img = imageLists[imglist]
36 | except:
37 | pass
38 | print("正在下载图片"+str(imglist))
39 | img_response = requests.get(img.get('src'), stream=True, headers=header)
40 | with open('img/'+title + '/' + str(imglist) + '.jpg', 'wb') as img_file:
41 | img_file.write(img_response.content)
42 | print("下载图片" + str(imglist)+'成功')
43 |
44 | def run(imageLists):
45 | blocks = range(1, len(imageLists['imgsrcs']) + 1)
46 | step = 10
47 | ##将数据分段,实行多线程下载
48 | range_lists = [blocks[x:x + step] for x in range(0, len(blocks), step)]
49 | processlist = []
50 | for range_list in range_lists:
51 | p = Process(target=downloadImg, args=(imageLists['imgsrcs'],imageLists['title'], range_list))
52 | processlist.append(p)
53 | for ps in processlist:
54 | ps.start()
55 |
56 | if __name__ == '__main__':
57 | for ll in url:
58 | imglist = getImageUrl(ll)
59 | run(imglist)
60 |
--------------------------------------------------------------------------------
/dingding/main.py:
--------------------------------------------------------------------------------
1 | from dingapi import writeAccessToken,getAccessToken,insertIntoCostapplication,insertCarCostHistory,insertComplainRocord,insertDailyWorkReport,insertFaultHistory,insertImportantEvent,insertServerCheck,inspectionRecord,insertReturnvisit
2 | from common.config import corpid,corpsecret
3 | from dingapi import sendMessage
4 |
5 | import time
6 | import threading
7 | import datetime
8 |
9 | if __name__ == '__main__':
10 |
11 |
12 | end_time = datetime.datetime.now()
13 | d1 = end_time
14 | end_time = time.mktime(end_time.timetuple())*1000
15 | start_time = d1 - datetime.timedelta(days=60)
16 | start_time = time.mktime(start_time.timetuple())*1000
17 |
18 | start_time = str(start_time).split('.')[0]
19 | end_time = str(end_time).split('.')[0]
20 | # sendMessage('开始爬虫','爬取爬取两个月以来的数据')
21 |
22 | AccessToken = getAccessToken()
23 |
24 | #多綫程執行不同的入庫
25 | threads = []
26 | t1 = threading.Thread(target=insertIntoCostapplication, args=(start_time, end_time, AccessToken))
27 | threads.append(t1)
28 | # t2 = threading.Thread(target=insertCarCostHistory, args=(start_time, end_time, AccessToken))
29 | # threads.append(t2)
30 | # t3 = threading.Thread(target=insertComplainRocord, args=(start_time, end_time, AccessToken))
31 | # threads.append(t3)
32 | # t4 = threading.Thread(target=insertDailyWorkReport, args=(start_time, end_time, AccessToken))
33 | # threads.append(t4)
34 | # t5 = threading.Thread(target=insertFaultHistory, args=(start_time, end_time, AccessToken))
35 | # threads.append(t5)
36 | # t6 = threading.Thread(target=insertImportantEvent, args=(start_time, end_time, AccessToken))
37 | # threads.append(t6)
38 | # t7 = threading.Thread(target=insertServerCheck, args=(start_time, end_time, AccessToken))
39 | # threads.append(t7)
40 | # t8 = threading.Thread(target=inspectionRecord, args=(start_time, end_time, AccessToken))
41 | # threads.append(t8)
42 | # t9 = threading.Thread(target=insertReturnvisit, args=(start_time, end_time, AccessToken))
43 | # threads.append(t9)
44 | for t in threads:
45 | t.start()
46 | for t in threads:
47 | t.join()
48 |
49 | # sendMessage('爬虫结束', '两个月的数据已经更新完毕')
50 |
51 | print('Done!')
--------------------------------------------------------------------------------
/dingding/common/functions.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | import re
3 |
4 | def duration(start_time,end_time):
5 | a = datetime.strptime(str(start_time), "%Y-%m-%d %H:%M:%S")
6 | b = datetime.strptime(str(end_time), "%Y-%m-%d %H:%M:%S")
7 | return (b-a).days
8 |
9 | def getName(title):
10 | try:
11 | name = re.search('(\w+)的', title).group(1)
12 | except:
13 | name =None
14 | return name
15 |
16 | def getAccsory(data):
17 | if 'value' in data:
18 | res = data['value']
19 | else:
20 | res = None
21 | return res
22 |
23 | def getProjectName(data):
24 | if data['label'] == '项目名称':
25 | return data['value']
26 | elif data['label']=='设备名称':
27 | return data['value']
28 | else:
29 | return None
30 |
31 | def getTradeMark(data):
32 | if data['label'] == '品牌':
33 | return data['value']
34 | else:
35 | return None
36 |
37 | def getSpecificationModels(data):
38 | if data['label'] == '规格/型号':
39 | return data['value']
40 | else:
41 | return None
42 |
43 | def getUnits(data):
44 | if data['label'] == '单位':
45 | return data['value']
46 | else:
47 | return None
48 |
49 | def getAmount(data):
50 | if data['label'] == '数量':
51 | return data['value']
52 | else:
53 | return None
54 |
55 | def getUnitPrice(data):
56 | if data['label'] == '单价(元)':
57 | return data['value']
58 | else:
59 | return None
60 |
61 | def getTotalPrice(data):
62 | if data['label'] == '合计金额':
63 | return data['value']
64 | else:
65 | return None
66 |
67 | def getStationName(data):
68 | if data['label'] == '站名':
69 | return data['value']
70 | else:
71 | return None
72 |
73 | def getLaneNumber(data):
74 | if data['label'] == '车道号':
75 | return data['value']
76 | else:
77 | return None
78 |
79 | def getUseLocation(data):
80 | if data['label'] == '使用位置':
81 | return data['value']
82 | else:
83 | return None
84 |
85 | def getRemark(data):
86 | if data['label'] == '备注':
87 | return data['value']
88 | else:
89 | return None
90 |
91 | def getPhoto(data):
92 | if data['label'] == '报送照片':
93 | return data['value']
94 | else:
95 | return None
96 |
--------------------------------------------------------------------------------
/appdata/crawlinfo.py:
--------------------------------------------------------------------------------
1 | # _*_ coding: utf-8 _*_
2 | __author__ = 'seven'
3 | __date__ = '2017/11/17 20:51'
4 |
5 | import json
6 | import time
7 | import requests
8 | from sqlalchemy import create_engine
9 | from sqlalchemy.orm import sessionmaker
10 | from appdata.common.User import Userinfo
11 |
12 |
13 | ##target url
14 | userinfourl = 'http://api.renrengyw.com/Api/Userv9/recomLog'
15 |
16 | ##your head
17 | heads = {
18 | 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.113 Safari/537.36',
19 | }
20 | ## database connect
21 | dburl="mysql+pymysql://root:[密码]@localhost/[数据库名字]?charset=utf8"
22 | engine = create_engine(dburl,echo=True)
23 | mysession = sessionmaker(bind=engine)()
24 |
25 | ##获取数据
26 | def getdatafromuser(heads,userid,page):
27 |
28 | taget_url = userinfourl+"?p={}&logintype=1&userid={}".format(page,userid)
29 | header = {
30 | 'User-Agent':heads['User-Agent'],
31 | }
32 | response = requests.get(url=taget_url,headers=header)
33 | return response.json()
34 |
35 | ##批量插入数据库
36 | def insertdata(user_data,userid):
37 | datalist = []
38 | for user_data in user_data:
39 | user = Userinfo(phone=user_data['phone'],datetime=user_data['datetime'],amount=user_data['amount'],num=user_data['num'],userid=userid,name=user_data['name'])
40 | ##构造数据库实体化对象列表,方便批量插入
41 | datalist.append(user)
42 | # 批量插入
43 | mysession.add_all(datalist) # 批量新增
44 | mysession.commit()
45 | mysession.close()
46 |
47 | if __name__ == '__main__':
48 | ##genrate userid
49 | for userid in range(1,200000):
50 | print('userid')
51 | print(userid)
52 | ##set True
53 | flag = True
54 | ##set page =1
55 | page = 1
56 | ##genrate
57 | while flag:
58 | print('page:')
59 | print(page)
60 | return_data = getdatafromuser(heads=heads,userid=userid,page=page)
61 | ##data is empty,set page flag False
62 | if len(return_data['result']['list'])==0:
63 | flag = False
64 | else:
65 | ##page+1
66 | page =page +1
67 | ##数据批量入库
68 | insertdata(return_data['result']['list'],userid)
69 | ##延时3秒
70 | time.sleep(3)
71 |
72 |
73 |
74 |
--------------------------------------------------------------------------------
/ethCrawler/crawler.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import json
3 | from sqlalchemy import create_engine
4 | from sqlalchemy.orm import sessionmaker
5 | from TradeList import tradelist
6 |
7 | import re
8 |
9 | dburl="mysql+pymysql://root:123@localhost/eth?charset=utf8"
10 |
11 |
12 | ##初始化数据库
13 | engine = create_engine(dburl, echo=True)
14 |
15 | def sendPost(page):
16 |
17 | payload ={'address':'0xc38e2669cc249748eab2c86e9e371481a1919293','currency':'ETH','page':page,'pageSize':20}
18 | headers = {
19 | 'Host': 'scan-api.spancer.cn',
20 | 'Accept': 'application/json, text/plain, */*',
21 | 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36C',
22 | 'Content-Type': 'application/json;charset=UTF-8',
23 | 'Accept-Language':'zh-CN,zh;q=0.9',
24 | 'Accept-Encoding':'gzip, deflate',
25 | 'Origin':'http://www.qukuai.com',
26 | }
27 | r = requests.post('http://scan-api.spancer.cn//v1/address/getInfo',data=json.dumps(payload),headers=headers)
28 | result = json.loads(r.text)
29 |
30 | if result['code']==200 and len(result['data']['tradeList']):
31 | return result['data']['tradeList']
32 | else:
33 | return None
34 | def saveData(data):
35 | ##批量插入
36 | ##初始化数据库连接
37 | mysession = sessionmaker(bind=engine)()
38 | dataList = []
39 | tradeRow={}
40 | for item in data:
41 |
42 | ###判断交易哈希是否存在
43 | res = mysession.query(tradelist).filter_by(
44 | txHash=item['txHash']).all()
45 | if len(res) != 0:
46 | continue
47 | if int(item['confirmCount'])>5 and float(re.split('[+-]',item['amount'])[1])>0:
48 | tradeRow['txHash'] = item['txHash']
49 | tradeRow['blockHeight'] = item['blockHeight']
50 | tradeRow['amount'] = item['amount']
51 | tradeRow['confirmTime'] = item['confirmTime']
52 | tradeRow['originatorAdress'] = item['inList'][0]['address']
53 | tradeRow['recevierAdress'] = item['outList'][0]['address']
54 | tradeRow['brokerage'] = item['brokerage']
55 | treadList = tradelist(**tradeRow)
56 | dataList.append(treadList)
57 |
58 | mysession.add_all(dataList) # 批量新增
59 | mysession.commit()
60 | mysession.close()
61 |
62 |
63 | def main():
64 | for page in range(1,9999):
65 | data = sendPost(page)
66 | if data!=None:
67 | saveData(data)
68 | else:
69 | exit()
70 |
71 | main()
--------------------------------------------------------------------------------
/pdfdownload/pdfdown.py:
--------------------------------------------------------------------------------
1 | __author__ = 'seven'
2 | import requests
3 | import codecs
4 | import json
5 | import os
6 | import re
7 | '''
8 | 单进程下载
9 | '''
10 | header = {
11 | 'Referer': 'http://www.jameshardie.co.nz/specifiers/cad-library',
12 | 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36'
13 | }
14 |
15 | ##将要获取的数据源写入文件当中
16 | def writedatatojson():
17 | data_response = requests.get('http://cdnmaster.smartb.im/staging/td/jh/scripts/databoom.js',headers = header)
18 | datatable = re.search('my.dt=(.)*;my',data_response.text).group(0).split(';my')[0]
19 | datatable = json.loads(datatable[7:])
20 | with codecs.open('data.json', 'w') as file:
21 | file.write(json.dumps(datatable))
22 | file.close()
23 | ##从文件当中获取json数据
24 | def getdatafromjson():
25 | with open('data.json') as json_file:
26 | data = json.load(json_file)
27 | return data
28 | ##下载文件
29 | def download(category,file_name,pdf_url,dwg_url,gif_url):
30 | #新建文件夹
31 | if not os.path.exists(category):
32 | os.makedirs(category)
33 | print("创建文件夹成功")
34 | #下载pdf
35 | print("正在下载pdf")
36 | pdf_response = requests.get(pdf_url,stream=True,headers = header)
37 | with open(category+'/'+file_name+'.pdf','wb') as pdf_file:
38 | pdf_file.write(pdf_response.content)
39 | print("pdf下载完成")
40 |
41 | print("正在下载dwg")
42 | dwg_response = requests.get(dwg_url,stream=True,headers = header)
43 | with open(category+'/'+file_name + '.dwg', 'wb') as dwg_file:
44 | dwg_file.write(dwg_response.content)
45 | print("dwg下载完成")
46 |
47 | print("正在下载gif")
48 | gif_response = requests.get(gif_url,stream=True,headers = header)
49 | with open(category+'/'+file_name + '.gif', 'wb') as gif_file:
50 | gif_file.write(gif_response.content)
51 | print("gif下载完成")
52 |
53 | if __name__ == '__main__':
54 | baseurl = 'http://cdnmaster.smartb.im/staging/td/jh/cadbim/'
55 | current_dir = os.getcwd()
56 | writedatatojson()
57 | datas = getdatafromjson()
58 | for data in datas[1:]:
59 | os.chdir(os.path.join(current_dir))
60 | category = data[-4]
61 | file_name = str(data[-1]).replace(category+'/','')
62 | down_url =str(data[-1])
63 | #pdf 下载链接
64 | pdf_url = '%s%s.pdf'%(baseurl,'pdf/'+down_url)
65 | #dwg下载链接
66 | dwg_url = '%s%s.dwg'%(baseurl,'dwg/'+down_url)
67 | #gif下载链接
68 | gif_url = '%s%s.gif'%(baseurl,'thumbs/'+down_url)
69 | download(category,file_name,pdf_url,dwg_url,gif_url)
70 |
71 |
72 |
--------------------------------------------------------------------------------
/ethCrawler/etherEumCrawler.py:
--------------------------------------------------------------------------------
1 | import requests
2 | from bs4 import BeautifulSoup
3 | import re
4 | import logging
5 | import datetime
6 |
7 | from sqlalchemy import create_engine
8 | from sqlalchemy.orm import sessionmaker
9 | from EthTradeList import Etherscantradelist
10 |
11 | dburl="mysql+pymysql://root:hello2016@localhost/eth?charset=utf8"
12 |
13 |
14 | ##初始化数据库
15 | engine = create_engine(dburl, echo=True)
16 | mysession = sessionmaker(bind=engine)()
17 |
18 | logger = logging.getLogger(__name__)
19 | logger.setLevel(level = logging.INFO)
20 | handler = logging.FileHandler("log.txt")
21 | handler.setLevel(logging.INFO)
22 | formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
23 | handler.setFormatter(formatter)
24 | logger.addHandler(handler)
25 |
26 | token ='0x0d0707963952f2fba59dd06f2b425ace40b492fe'
27 | page =1
28 |
29 | def sendRequest(token,page):
30 | targetUrl = 'https://etherscan.io/tokentxns?a={token}&ps=100&p={page}'.format(token=token,page=page)
31 | headers ={
32 | 'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36',
33 | 'Accept':'*/*'
34 | }
35 | r= requests.get(targetUrl,headers=headers)
36 | if r.status_code==200:
37 | soup = BeautifulSoup(r.content,'lxml')
38 | return soup.find(name='table',class_='table table-hover')
39 | else:
40 | logger.info(targetUrl+' reposne is not 200')
41 | return False
42 |
43 | def parseHtml(htmlData):
44 |
45 |
46 | if htmlData!=None:
47 | tradeRow = {}
48 | dataList = []
49 |
50 | for datatr in htmlData.find_all(name='tr')[1:]:
51 | tdRow = datatr.find_all(name='td')
52 |
53 | ###判断交易哈希是否存在
54 | res = mysession.query(Etherscantradelist).filter_by(
55 | txHash=tdRow[0].get_text().strip()).all()
56 | if len(res) != 0:
57 | continue
58 |
59 | tradeRow['txHash'] = tdRow[0].get_text().strip()
60 | tradeRow['age'] = datetime.datetime.strptime(tdRow[1].span['title'].strip(),'%b-%d-%Y %I:%M:%S %p')
61 | tradeRow['fromadress'] = tdRow[2].get_text().strip()
62 | tradeRow['to'] = tdRow[4].get_text().strip()
63 | tradeRow['value'] = tdRow[5].get_text().replace(',','').strip()
64 | tradeRow['token'] = re.match('/token/(.+)\?',tdRow[6].a['href']).group(1).strip()
65 | tradeRow['name'] = str(tdRow[6].get_text()).lower().strip()
66 | treadList = Etherscantradelist(**tradeRow)
67 | dataList.append(treadList)
68 | return dataList
69 |
70 |
71 | def saveToDataBase(dataModel):
72 | mysession.add_all(dataModel) # 批量新增
73 | mysession.commit()
74 | mysession.close()
75 |
76 | def main():
77 | pages = 1000
78 | for page in range(1,pages+1):
79 | saveToDataBase(parseHtml(sendRequest(token,page)))
80 |
81 | if __name__ == '__main__':
82 | main()
83 |
--------------------------------------------------------------------------------
/pdfdownload/pdfdown_mutiprocess.py:
--------------------------------------------------------------------------------
1 | __author__ = 'seven'
2 | import requests
3 | import codecs
4 | import json
5 | import os
6 | import re
7 | from multiprocessing import Process
8 |
9 | '''
10 | 分割任务多进程程下载
11 | '''
12 | header = {
13 | 'Referer': 'http://www.jameshardie.co.nz/specifiers/cad-library',
14 | 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36'
15 | }
16 | baseurl = 'http://cdnmaster.smartb.im/staging/td/jh/cadbim/'
17 |
18 | ##将要获取的数据源写入文件当中
19 | def writedatatojson():
20 | data_response = requests.get('http://cdnmaster.smartb.im/staging/td/jh/scripts/databoom.js',headers = header)
21 | datatable = re.search('my.dt=(.)*;my',data_response.text).group(0).split(';my')[0]
22 | datatable = json.loads(datatable[7:])
23 | with codecs.open('data.json', 'w') as file:
24 | file.write(json.dumps(datatable))
25 | file.close()
26 | ##从文件当中获取json数据
27 | def getdatafromjson():
28 | with open('data.json') as json_file:
29 | data = json.load(json_file)
30 | return data
31 | ##下载文件
32 | def download(datas,myrange):
33 | for order in myrange:
34 | data = datas[order]
35 | category = data[-4]
36 | file_name = str(data[-1]).replace(category + '/', '')
37 | down_url = str(data[-1])
38 | # pdf 下载链接
39 | pdf_url = '%s%s.pdf' % (baseurl, 'pdf/' + down_url)
40 | # dwg下载链接
41 | dwg_url = '%s%s.dwg' % (baseurl, 'dwg/' + down_url)
42 | # gif下载链接
43 | gif_url = '%s%s.gif' % (baseurl, 'thumbs/' + down_url)
44 | #新建文件夹
45 | if not os.path.exists(category):
46 | os.makedirs(category)
47 | print("创建文件夹成功")
48 | #下载pdf
49 | print("正在下载pdf")
50 | pdf_response = requests.get(pdf_url,stream=True,headers = header)
51 | with open(category+'/'+file_name+'.pdf','wb') as pdf_file:
52 | pdf_file.write(pdf_response.content)
53 | print("pdf下载完成")
54 |
55 | print("正在下载dwg")
56 | dwg_response = requests.get(dwg_url,stream=True,headers = header)
57 | with open(category+'/'+file_name + '.dwg', 'wb') as dwg_file:
58 | dwg_file.write(dwg_response.content)
59 | print("dwg下载完成")
60 |
61 | print("正在下载gif")
62 | gif_response = requests.get(gif_url,stream=True,headers = header)
63 | with open(category+'/'+file_name + '.gif', 'wb') as gif_file:
64 | gif_file.write(gif_response.content)
65 | print("gif下载完成")
66 |
67 | if __name__ == '__main__':
68 | writedatatojson()
69 | step = 100
70 | datas = getdatafromjson()
71 | blocks = range(1,len(datas)+1)
72 | ##将数据分段,实行多线程下载
73 | range_lists = [blocks[x:x + step] for x in range(0, len(blocks), step)]
74 | processlist = []
75 |
76 | for range_list in range_lists:
77 | p = Process(target=download,args=(datas,range_list))
78 | processlist.append(p)
79 | for p in processlist:
80 | p.start()
81 |
82 |
83 |
84 |
85 |
--------------------------------------------------------------------------------
/miaosha/miaosha/settings.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # Scrapy settings for miaosha project
4 | #
5 | # For simplicity, this file contains only settings considered important or
6 | # commonly used. You can find more settings consulting the documentation:
7 | #
8 | # http://doc.scrapy.org/en/latest/topics/settings.html
9 | # http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
10 | # http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
11 |
12 | BOT_NAME = 'miaosha'
13 |
14 | SPIDER_MODULES = ['miaosha.spiders']
15 | NEWSPIDER_MODULE = 'miaosha.spiders'
16 |
17 |
18 | # Crawl responsibly by identifying yourself (and your website) on the user-agent
19 | #USER_AGENT = 'miaosha (+http://www.yourdomain.com)'
20 |
21 | # Obey robots.txt rules
22 | ROBOTSTXT_OBEY = True
23 |
24 | # Configure maximum concurrent requests performed by Scrapy (default: 16)
25 | #CONCURRENT_REQUESTS = 32
26 |
27 | # Configure a delay for requests for the same website (default: 0)
28 | # See http://scrapy.readthedocs.org/en/latest/topics/settings.html#download-delay
29 | # See also autothrottle settings and docs
30 | #DOWNLOAD_DELAY = 3
31 | # The download delay setting will honor only one of:
32 | #CONCURRENT_REQUESTS_PER_DOMAIN = 16
33 | #CONCURRENT_REQUESTS_PER_IP = 16
34 |
35 | # Disable cookies (enabled by default)
36 | #COOKIES_ENABLED = False
37 |
38 | # Disable Telnet Console (enabled by default)
39 | #TELNETCONSOLE_ENABLED = False
40 |
41 | # Override the default request headers:
42 | #DEFAULT_REQUEST_HEADERS = {
43 | # 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
44 | # 'Accept-Language': 'en',
45 | #}
46 |
47 | # Enable or disable spider middlewares
48 | # See http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
49 | #SPIDER_MIDDLEWARES = {
50 | # 'miaosha.middlewares.MiaoshaSpiderMiddleware': 543,
51 | #}
52 |
53 | # Enable or disable downloader middlewares
54 | # See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
55 | #DOWNLOADER_MIDDLEWARES = {
56 | # 'miaosha.middlewares.MyCustomDownloaderMiddleware': 543,
57 | #}
58 |
59 | # Enable or disable extensions
60 | # See http://scrapy.readthedocs.org/en/latest/topics/extensions.html
61 | #EXTENSIONS = {
62 | # 'scrapy.extensions.telnet.TelnetConsole': None,
63 | #}
64 |
65 | # Configure item pipelines
66 | # See http://scrapy.readthedocs.org/en/latest/topics/item-pipeline.html
67 | #ITEM_PIPELINES = {
68 | # 'miaosha.pipelines.MiaoshaPipeline': 300,
69 | #}
70 |
71 | # Enable and configure the AutoThrottle extension (disabled by default)
72 | # See http://doc.scrapy.org/en/latest/topics/autothrottle.html
73 | #AUTOTHROTTLE_ENABLED = True
74 | # The initial download delay
75 | #AUTOTHROTTLE_START_DELAY = 5
76 | # The maximum download delay to be set in case of high latencies
77 | #AUTOTHROTTLE_MAX_DELAY = 60
78 | # The average number of requests Scrapy should be sending in parallel to
79 | # each remote server
80 | #AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
81 | # Enable showing throttling stats for every response received:
82 | #AUTOTHROTTLE_DEBUG = False
83 |
84 | # Enable and configure HTTP caching (disabled by default)
85 | # See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
86 | #HTTPCACHE_ENABLED = True
87 | #HTTPCACHE_EXPIRATION_SECS = 0
88 | #HTTPCACHE_DIR = 'httpcache'
89 | #HTTPCACHE_IGNORE_HTTP_CODES = []
90 | #HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
91 |
--------------------------------------------------------------------------------
/jingdong/jingdong/settings.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # Scrapy settings for jingdong project
4 | #
5 | # For simplicity, this file contains only settings considered important or
6 | # commonly used. You can find more settings consulting the documentation:
7 | #
8 | # http://doc.scrapy.org/en/latest/topics/settings.html
9 | # http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
10 | # http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
11 |
12 | BOT_NAME = 'jingdong'
13 |
14 | SPIDER_MODULES = ['jingdong.spiders']
15 | NEWSPIDER_MODULE = 'jingdong.spiders'
16 |
17 |
18 | # Crawl responsibly by identifying yourself (and your website) on the user-agent
19 | #USER_AGENT = 'jingdong (+http://www.yourdomain.com)'
20 |
21 | # Obey robots.txt rules
22 | ROBOTSTXT_OBEY = True
23 |
24 | # Configure maximum concurrent requests performed by Scrapy (default: 16)
25 | #CONCURRENT_REQUESTS = 32
26 |
27 | # Configure a delay for requests for the same website (default: 0)
28 | # See http://scrapy.readthedocs.org/en/latest/topics/settings.html#download-delay
29 | # See also autothrottle settings and docs
30 | #DOWNLOAD_DELAY = 3
31 | # The download delay setting will honor only one of:
32 | #CONCURRENT_REQUESTS_PER_DOMAIN = 16
33 | #CONCURRENT_REQUESTS_PER_IP = 16
34 |
35 | # Disable cookies (enabled by default)
36 | #COOKIES_ENABLED = False
37 |
38 | # Disable Telnet Console (enabled by default)
39 | #TELNETCONSOLE_ENABLED = False
40 |
41 | # Override the default request headers:
42 | #DEFAULT_REQUEST_HEADERS = {
43 | # 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
44 | # 'Accept-Language': 'en',
45 | #}
46 |
47 | # Enable or disable spider middlewares
48 | # See http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
49 | #SPIDER_MIDDLEWARES = {
50 | # 'jingdong.middlewares.JingdongSpiderMiddleware': 543,
51 | #}
52 |
53 | # Enable or disable downloader middlewares
54 | # See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
55 | #DOWNLOADER_MIDDLEWARES = {
56 | # 'jingdong.middlewares.MyCustomDownloaderMiddleware': 543,
57 | #}
58 |
59 | # Enable or disable extensions
60 | # See http://scrapy.readthedocs.org/en/latest/topics/extensions.html
61 | #EXTENSIONS = {
62 | # 'scrapy.extensions.telnet.TelnetConsole': None,
63 | #}
64 |
65 | # Configure item pipelines
66 | # See http://scrapy.readthedocs.org/en/latest/topics/item-pipeline.html
67 | #ITEM_PIPELINES = {
68 | # 'jingdong.pipelines.JingdongPipeline': 300,
69 | #}
70 |
71 | # Enable and configure the AutoThrottle extension (disabled by default)
72 | # See http://doc.scrapy.org/en/latest/topics/autothrottle.html
73 | #AUTOTHROTTLE_ENABLED = True
74 | # The initial download delay
75 | #AUTOTHROTTLE_START_DELAY = 5
76 | # The maximum download delay to be set in case of high latencies
77 | #AUTOTHROTTLE_MAX_DELAY = 60
78 | # The average number of requests Scrapy should be sending in parallel to
79 | # each remote server
80 | #AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
81 | # Enable showing throttling stats for every response received:
82 | #AUTOTHROTTLE_DEBUG = False
83 |
84 | # Enable and configure HTTP caching (disabled by default)
85 | # See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
86 | #HTTPCACHE_ENABLED = True
87 | #HTTPCACHE_EXPIRATION_SECS = 0
88 | #HTTPCACHE_DIR = 'httpcache'
89 | #HTTPCACHE_IGNORE_HTTP_CODES = []
90 | #HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
91 |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/580005/101.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2009-07-22 | 1.0795 | 1.0795 | 0.77% | 开放申购 | 封闭期 | |
| 2009-07-21 | 1.0713 | 1.0713 | -0.89% | 开放申购 | 封闭期 | |
| 2009-07-20 | 1.0809 | 1.0809 | 1.02% | 开放申购 | 封闭期 | |
| 2009-07-17 | 1.0700 | 1.0700 | 0.40% | 开放申购 | 封闭期 | |
| 2009-07-16 | 1.0657 | 1.0657 | 0.13% | 开放申购 | 封闭期 | |
| 2009-07-15 | 1.0643 | 1.0643 | 0.16% | 开放申购 | 封闭期 | |
| 2009-07-14 | 1.0626 | 1.0626 | 0.31% | 封闭期 | 封闭期 | |
| 2009-07-10 | 1.0593 | 1.0593 | 0.16% | 封闭期 | 封闭期 | |
| 2009-07-03 | 1.0576 | 1.0576 | 4.26% | 封闭期 | 封闭期 | |
| 2009-06-30 | 1.0144 | 1.0144 | 0.90% | 封闭期 | 封闭期 | |
| 2009-06-26 | 1.0054 | 1.0054 | 0.54% | 封闭期 | 封闭期 | |
| 2009-06-19 | 1.0000 | 1.0000 | 1.72% | 封闭期 | 封闭期 | |
| 2009-06-12 | 0.9831 | 0.9831 | -1.00% | 封闭期 | 封闭期 | |
| 2009-06-05 | 0.9930 | 0.9930 | 0.00% | 封闭期 | 封闭期 | |
| 2009-05-27 | 0.9930 | 0.9930 | -0.18% | 封闭期 | 封闭期 | |
| 2009-05-22 | 0.9948 | 0.9948 | -0.50% | 封闭期 | 封闭期 | |
| 2009-05-15 | 0.9998 | 0.9998 | -0.02% | 封闭期 | 封闭期 | |
| 2009-05-08 | 1.0000 | 1.0000 | 0.00% | 封闭期 | 封闭期 | |
| 2009-05-06 | 1.0000 | 1.0000 | | 封闭期 | 封闭期 | |
--------------------------------------------------------------------------------
/crawlDajiawen/spider.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from selenium import webdriver
3 | from selenium.webdriver.chrome.options import Options
4 | from bs4 import BeautifulSoup
5 | import time
6 | import csv
7 | import sys
8 | import io
9 | sys.stdout = io.TextIOWrapper(sys.stdout.buffer,encoding='gb18030') #改变标准输出的默认编码
10 |
11 |
12 | def getTopGoodsId(url):
13 | goodIdList = []
14 | driver = configure_driver()
15 | driver.get(url)
16 | goodInfo = BeautifulSoup(driver.page_source, 'lxml').find_all('a',class_='pic-link J_ClickStat J_ItemPicA')
17 | for goodRow in goodInfo:
18 | with open('goodid.txt', 'a') as infile:
19 | infile.write(goodRow['data-nid']+'\n')
20 | infile.close()
21 |
22 | ## 翻页
23 | nextpage = driver.find_element_by_css_selector('a[trace="srp_bottom_page2"]')
24 | nextpage.click()
25 | time.sleep(2)
26 |
27 | goodInfoPage = BeautifulSoup(driver.page_source, 'lxml').find_all('a',class_='pic-link J_ClickStat J_ItemPicA')
28 | for goodRowpage in goodInfoPage[:6]:
29 | with open('gooid.txt', 'a') as infile:
30 | infile.write(goodRowpage['data-nid']+'\n')
31 | infile.close()
32 |
33 | def makeDjiawenUrl(goodId):
34 | dajiwenUrl = 'https://h5.m.taobao.com/wendajia/question2017.html?refId={}'.format(goodId)
35 | return dajiwenUrl
36 |
37 | def configure_driver():
38 | opts = Options()
39 | opts.add_argument('--headless')
40 | prefs = {"profile.managed_default_content_settings.images": 2}
41 | opts.add_experimental_option("prefs", prefs)
42 | opts.add_argument('--user-agent=Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.91 Safari/537.36')
43 | driver = webdriver.Chrome(chrome_options=opts, executable_path='D:\soft\chromedriver\chromedriver.exe')
44 | return driver
45 |
46 | def get_page_source(url):
47 |
48 | driver = configure_driver()
49 | driver.get(url)
50 | driver.execute_script("document.getElementById('wdj').scrollTop=100000")
51 | time.sleep(2)
52 | parse(driver.page_source)
53 |
54 | def parse(response):
55 | questionDiv = BeautifulSoup(response, 'lxml').find_all('div',class_="question mgb16")
56 | answerDiv = BeautifulSoup(response, 'lxml').find_all('div',class_="answer mgb22")
57 | goodsName = BeautifulSoup(response, 'lxml').find('div',class_="it-name").get_text()
58 | dataList=[]
59 |
60 | for row in zip(questionDiv,answerDiv):
61 | if len(row)!=0:
62 | dataList.append([goodsName,row[0].find_next('div',class_="title text").get_text(),row[1].find_next('p',class_="title text").get_text()])
63 | insertIntoCsv(dataList)
64 |
65 | def insertIntoCsv(data):
66 | with open("dajiawen.csv", "a+",encoding='gb18030') as csvfile:
67 | writer = csv.writer(csvfile)
68 | writer.writerows(data)
69 | csvfile.close()
70 |
71 | def writeHeader():
72 | with open("dajiawen.csv", "a+", encoding='gb18030') as csvfile:
73 | writer = csv.writer(csvfile)
74 | # 先写入columns_name
75 | writer.writerow(["商品名字", "问题", "答案"])
76 | csvfile.close()
77 |
78 | def run():
79 | writeHeader()
80 | with open('goodid.txt', 'r') as infile:
81 | for id in infile.readlines():
82 | dajiawenUrl = makeDjiawenUrl(id.strip())
83 | get_page_source(dajiawenUrl)
84 | infile.close()
85 |
86 |
87 | ##淘宝搜索销量优先的查询链接
88 | top50url = 'https://s.taobao.com/search?q=%E5%86%AC%E8%99%AB%E5%A4%8F%E8%8D%89&imgfile=&commend=all&ssid=s5-e&search_type=item&sourceId=tb.index&spm=a21bo.2017.201856-taobao-item.1&ie=utf8&initiative_id=tbindexz_20170306&sort=sale-desc'
89 |
90 | run()
--------------------------------------------------------------------------------
/alishiyong/itemLinks.txt:
--------------------------------------------------------------------------------
1 | https://try.taobao.com/item.htm?id=32554209#tab-report
2 | https://try.taobao.com/item.htm?id=32216005#tab-report
3 | https://try.taobao.com/item.htm?id=31696246#tab-report
4 | https://try.taobao.com/item.htm?id=31606332#tab-report
5 | https://try.taobao.com/item.htm?id=31398149#tab-report
6 | https://try.taobao.com/item.htm?id=30674254#tab-report
7 | https://try.taobao.com/item.htm?id=30246169#tab-report
8 | https://try.taobao.com/item.htm?id=29972091#tab-report
9 | https://try.taobao.com/item.htm?id=29352092#tab-report
10 | https://try.taobao.com/item.htm?id=28458152#tab-report
11 | https://try.taobao.com/item.htm?id=28410077#tab-report
12 | https://try.taobao.com/item.htm?id=28076155#tab-report
13 | https://try.taobao.com/item.htm?id=27656070#tab-report
14 | https://try.taobao.com/item.htm?id=24984068#tab-report
15 | https://try.taobao.com/item.htm?id=23584624#tab-report
16 | https://try.taobao.com/item.htm?id=23462544#tab-report
17 | https://try.taobao.com/item.htm?id=23032085#tab-report
18 | https://try.taobao.com/item.htm?id=22488896#tab-report
19 | https://try.taobao.com/item.htm?id=21706193#tab-report
20 | https://try.taobao.com/item.htm?id=21606463#tab-report
21 | https://try.taobao.com/item.htm?id=21220476#tab-report
22 | https://try.taobao.com/item.htm?id=20944194#tab-report
23 | https://try.taobao.com/item.htm?id=20676023#tab-report
24 | https://try.taobao.com/item.htm?id=20504295#tab-report
25 | https://try.taobao.com/item.htm?id=20312223#tab-report
26 | https://try.taobao.com/item.htm?id=19932583#tab-report
27 | https://try.taobao.com/item.htm?id=19882508#tab-report
28 | https://try.taobao.com/item.htm?id=19030422#tab-report
29 | https://try.taobao.com/item.htm?id=18724006#tab-report
30 | https://try.taobao.com/item.htm?id=17566967#tab-report
31 | https://try.taobao.com/item.htm?id=17560397#tab-report
32 | https://try.taobao.com/item.htm?id=17554283#tab-report
33 | https://try.taobao.com/item.htm?id=17480258#tab-report
34 | https://try.taobao.com/item.htm?id=16782277#tab-report
35 | https://try.taobao.com/item.htm?id=16660700#tab-report
36 | https://try.taobao.com/item.htm?id=16386485#tab-report
37 | https://try.taobao.com/item.htm?id=16084244#tab-report
38 | https://try.taobao.com/item.htm?id=16072306#tab-report
39 | https://try.taobao.com/item.htm?id=15346237#tab-report
40 | https://try.taobao.com/item.htm?id=15724056#tab-report
41 | https://try.taobao.com/item.htm?id=14788178#tab-report
42 | https://try.taobao.com/item.htm?id=14408069#tab-report
43 | https://try.taobao.com/item.htm?id=14288393#tab-report
44 | https://try.taobao.com/item.htm?id=13264128#tab-report
45 | https://try.taobao.com/item.htm?id=13180864#tab-report
46 | https://try.taobao.com/item.htm?id=13196401#tab-report
47 | https://try.taobao.com/item.htm?id=12388252#tab-report
48 | https://try.taobao.com/item.htm?id=12274907#tab-report
49 | https://try.taobao.com/item.htm?id=11954078#tab-report
50 | https://try.taobao.com/item.htm?id=10718243#tab-report
51 | https://try.taobao.com/item.htm?id=9426001
52 | https://try.taobao.com/item.htm?id=9166010#tab-report
53 | https://try.taobao.com/item.htm?id=8578149#tab-report
54 | https://try.taobao.com/item.htm?id=8620060#tab-report
55 | https://try.taobao.com/item.htm?id=7928034
56 | https://try.taobao.com/item.htm?id=8252098#tab-report
57 | https://try.taobao.com/item.htm?id=7882535#tab-report
58 | https://try.taobao.com/item.htm?id=7834391#tab-report
59 | https://try.taobao.com/item.htm?id=6223769#tab-report
60 | https://try.taobao.com/item.htm?id=5847744#tab-report
61 | https://try.taobao.com/item.htm?id=5738666#tab-report
62 | https://try.taobao.com/item.htm?id=5562814#tab-report
63 | https://try.taobao.com/item.htm?id=4817780#tab-report
64 | https://try.taobao.com/item.htm?id=3532761#tab-report
65 |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/001112/25.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2015-10-21 | 0.9410 | 0.9410 | -3.09% | 开放申购 | 开放赎回 | |
| 2015-10-20 | 0.9710 | 0.9710 | 0.21% | 开放申购 | 开放赎回 | |
| 2015-10-19 | 0.9690 | 0.9690 | 0.10% | 开放申购 | 开放赎回 | |
| 2015-10-16 | 0.9680 | 0.9680 | 0.62% | 开放申购 | 开放赎回 | |
| 2015-10-15 | 0.9620 | 0.9620 | 1.69% | 开放申购 | 开放赎回 | |
| 2015-10-14 | 0.9460 | 0.9460 | -1.25% | 开放申购 | 开放赎回 | |
| 2015-10-13 | 0.9580 | 0.9580 | 0.21% | 开放申购 | 开放赎回 | |
| 2015-10-12 | 0.9560 | 0.9560 | 2.47% | 开放申购 | 开放赎回 | |
| 2015-10-09 | 0.9330 | 0.9330 | 0.54% | 开放申购 | 开放赎回 | |
| 2015-10-08 | 0.9280 | 0.9280 | 2.32% | 开放申购 | 开放赎回 | |
| 2015-09-30 | 0.9070 | 0.9070 | 1.00% | 开放申购 | 开放赎回 | |
| 2015-09-29 | 0.8980 | 0.8980 | -0.88% | 开放申购 | 开放赎回 | |
| 2015-09-28 | 0.9060 | 0.9060 | 0.78% | 开放申购 | 开放赎回 | |
| 2015-09-25 | 0.8990 | 0.8990 | -1.53% | 开放申购 | 开放赎回 | |
| 2015-09-24 | 0.9130 | 0.9130 | 0.33% | 开放申购 | 开放赎回 | |
| 2015-09-23 | 0.9100 | 0.9100 | -1.19% | 开放申购 | 开放赎回 | |
| 2015-09-22 | 0.9210 | 0.9210 | 0.22% | 开放申购 | 开放赎回 | |
| 2015-09-21 | 0.9190 | 0.9190 | 1.66% | 开放申购 | 开放赎回 | |
| 2015-09-18 | 0.9040 | 0.9040 | 0.67% | 开放申购 | 开放赎回 | |
| 2015-09-17 | 0.8980 | 0.8980 | -1.43% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/001112/27.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2015-08-17 | 1.0050 | 1.0050 | 0.40% | 开放申购 | 开放赎回 | |
| 2015-08-14 | 1.0010 | 1.0010 | 0.10% | 开放申购 | 开放赎回 | |
| 2015-08-13 | 1.0000 | 1.0000 | 0.60% | 开放申购 | 开放赎回 | |
| 2015-08-12 | 0.9940 | 0.9940 | -1.58% | 开放申购 | 开放赎回 | |
| 2015-08-11 | 1.0100 | 1.0100 | -0.39% | 开放申购 | 开放赎回 | |
| 2015-08-10 | 1.0140 | 1.0140 | 2.53% | 开放申购 | 开放赎回 | |
| 2015-08-07 | 0.9890 | 0.9890 | 1.44% | 开放申购 | 开放赎回 | |
| 2015-08-06 | 0.9750 | 0.9750 | 0.00% | 开放申购 | 开放赎回 | |
| 2015-08-05 | 0.9750 | 0.9750 | -1.42% | 开放申购 | 开放赎回 | |
| 2015-08-04 | 0.9890 | 0.9890 | 2.49% | 开放申购 | 开放赎回 | |
| 2015-08-03 | 0.9650 | 0.9650 | 0.63% | 开放申购 | 开放赎回 | |
| 2015-07-31 | 0.9590 | 0.9590 | 0.21% | 开放申购 | 开放赎回 | |
| 2015-07-30 | 0.9570 | 0.9570 | -2.35% | 开放申购 | 开放赎回 | |
| 2015-07-29 | 0.9800 | 0.9800 | 1.87% | 开放申购 | 开放赎回 | |
| 2015-07-28 | 0.9620 | 0.9620 | 0.00% | 开放申购 | 开放赎回 | |
| 2015-07-27 | 0.9620 | 0.9620 | -5.13% | 开放申购 | 开放赎回 | |
| 2015-07-24 | 1.0140 | 1.0140 | -0.98% | 开放申购 | 开放赎回 | |
| 2015-07-23 | 1.0240 | 1.0240 | 1.49% | 开放申购 | 开放赎回 | |
| 2015-07-22 | 1.0090 | 1.0090 | 0.70% | 开放申购 | 开放赎回 | |
| 2015-07-21 | 1.0020 | 1.0020 | 0.70% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/001112/8.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2017-03-10 | 1.1170 | 1.1170 | 0.72% | 开放申购 | 开放赎回 | |
| 2017-03-09 | 1.1090 | 1.1090 | -0.54% | 开放申购 | 开放赎回 | |
| 2017-03-08 | 1.1150 | 1.1150 | -0.18% | 开放申购 | 开放赎回 | |
| 2017-03-07 | 1.1170 | 1.1170 | 0.36% | 开放申购 | 开放赎回 | |
| 2017-03-06 | 1.1130 | 1.1130 | 0.91% | 开放申购 | 开放赎回 | |
| 2017-03-03 | 1.1030 | 1.1030 | 0.64% | 开放申购 | 开放赎回 | |
| 2017-03-02 | 1.0960 | 1.0960 | -0.18% | 开放申购 | 开放赎回 | |
| 2017-03-01 | 1.0980 | 1.0980 | 0.00% | 开放申购 | 开放赎回 | |
| 2017-02-28 | 1.0980 | 1.0980 | 0.00% | 开放申购 | 开放赎回 | |
| 2017-02-27 | 1.0980 | 1.0980 | -0.72% | 开放申购 | 开放赎回 | |
| 2017-02-24 | 1.1060 | 1.1060 | -0.36% | 开放申购 | 开放赎回 | |
| 2017-02-23 | 1.1100 | 1.1100 | 0.27% | 开放申购 | 开放赎回 | |
| 2017-02-22 | 1.1070 | 1.1070 | 0.36% | 开放申购 | 开放赎回 | |
| 2017-02-21 | 1.1030 | 1.1030 | 0.18% | 开放申购 | 开放赎回 | |
| 2017-02-20 | 1.1010 | 1.1010 | 1.66% | 开放申购 | 开放赎回 | |
| 2017-02-17 | 1.0830 | 1.0830 | 0.00% | 开放申购 | 开放赎回 | |
| 2017-02-16 | 1.0830 | 1.0830 | 0.74% | 开放申购 | 开放赎回 | |
| 2017-02-15 | 1.0750 | 1.0750 | -0.56% | 开放申购 | 开放赎回 | |
| 2017-02-14 | 1.0810 | 1.0810 | -0.37% | 开放申购 | 开放赎回 | |
| 2017-02-13 | 1.0850 | 1.0850 | 1.31% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/001112/9.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2017-02-10 | 1.0710 | 1.0710 | 0.09% | 开放申购 | 开放赎回 | |
| 2017-02-09 | 1.0700 | 1.0700 | -0.19% | 开放申购 | 开放赎回 | |
| 2017-02-08 | 1.0720 | 1.0720 | 0.47% | 开放申购 | 开放赎回 | |
| 2017-02-07 | 1.0670 | 1.0670 | -0.28% | 开放申购 | 开放赎回 | |
| 2017-02-06 | 1.0700 | 1.0700 | 0.00% | 开放申购 | 开放赎回 | |
| 2017-02-03 | 1.0700 | 1.0700 | 0.38% | 开放申购 | 开放赎回 | |
| 2017-01-26 | 1.0660 | 1.0660 | 0.28% | 开放申购 | 开放赎回 | |
| 2017-01-25 | 1.0630 | 1.0630 | 0.47% | 开放申购 | 开放赎回 | |
| 2017-01-24 | 1.0580 | 1.0580 | 0.28% | 开放申购 | 开放赎回 | |
| 2017-01-23 | 1.0550 | 1.0550 | 0.29% | 开放申购 | 开放赎回 | |
| 2017-01-20 | 1.0520 | 1.0520 | 0.86% | 开放申购 | 开放赎回 | |
| 2017-01-19 | 1.0430 | 1.0430 | -0.38% | 开放申购 | 开放赎回 | |
| 2017-01-18 | 1.0470 | 1.0470 | 0.48% | 开放申购 | 开放赎回 | |
| 2017-01-17 | 1.0420 | 1.0420 | 0.97% | 开放申购 | 开放赎回 | |
| 2017-01-16 | 1.0320 | 1.0320 | -0.19% | 开放申购 | 开放赎回 | |
| 2017-01-13 | 1.0340 | 1.0340 | -0.10% | 开放申购 | 开放赎回 | |
| 2017-01-12 | 1.0350 | 1.0350 | -0.58% | 开放申购 | 开放赎回 | |
| 2017-01-11 | 1.0410 | 1.0410 | -0.57% | 开放申购 | 开放赎回 | |
| 2017-01-10 | 1.0470 | 1.0470 | 0.10% | 开放申购 | 开放赎回 | |
| 2017-01-09 | 1.0460 | 1.0460 | 0.19% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/580005/100.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2009-08-19 | 0.9709 | 0.9709 | -1.04% | 开放申购 | 开放赎回 | |
| 2009-08-18 | 0.9811 | 0.9811 | -0.41% | 开放申购 | 开放赎回 | |
| 2009-08-17 | 0.9851 | 0.9851 | -2.97% | 开放申购 | 开放赎回 | |
| 2009-08-14 | 1.0153 | 1.0153 | -1.04% | 开放申购 | 开放赎回 | |
| 2009-08-13 | 1.0260 | 1.0260 | 1.43% | 开放申购 | 开放赎回 | |
| 2009-08-12 | 1.0115 | 1.0115 | -0.89% | 开放申购 | 开放赎回 | |
| 2009-08-11 | 1.0206 | 1.0206 | 0.60% | 开放申购 | 开放赎回 | |
| 2009-08-10 | 1.0145 | 1.0145 | -0.80% | 开放申购 | 开放赎回 | |
| 2009-08-07 | 1.0227 | 1.0227 | -2.79% | 开放申购 | 开放赎回 | |
| 2009-08-06 | 1.0521 | 1.0521 | -1.02% | 开放申购 | 开放赎回 | |
| 2009-08-05 | 1.0629 | 1.0629 | -0.80% | 开放申购 | 开放赎回 | |
| 2009-08-04 | 1.0715 | 1.0715 | 0.36% | 开放申购 | 开放赎回 | |
| 2009-08-03 | 1.0677 | 1.0677 | 0.30% | 开放申购 | 开放赎回 | |
| 2009-07-31 | 1.0645 | 1.0645 | 2.26% | 开放申购 | 封闭期 | |
| 2009-07-30 | 1.0410 | 1.0410 | 0.17% | 开放申购 | 封闭期 | |
| 2009-07-29 | 1.0392 | 1.0392 | -4.26% | 开放申购 | 封闭期 | |
| 2009-07-28 | 1.0854 | 1.0854 | -0.86% | 开放申购 | 封闭期 | |
| 2009-07-27 | 1.0948 | 1.0948 | 0.89% | 开放申购 | 封闭期 | |
| 2009-07-24 | 1.0851 | 1.0851 | -0.28% | 开放申购 | 封闭期 | |
| 2009-07-23 | 1.0882 | 1.0882 | 0.81% | 开放申购 | 封闭期 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/580005/16.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2016-07-13 | 1.5521 | 1.6221 | 0.72% | 开放申购 | 开放赎回 | |
| 2016-07-12 | 1.5410 | 1.6110 | 0.10% | 开放申购 | 开放赎回 | |
| 2016-07-11 | 1.5395 | 1.6095 | -0.74% | 开放申购 | 开放赎回 | |
| 2016-07-08 | 1.5509 | 1.6209 | -0.39% | 开放申购 | 开放赎回 | |
| 2016-07-07 | 1.5569 | 1.6269 | 0.30% | 开放申购 | 开放赎回 | |
| 2016-07-06 | 1.5523 | 1.6223 | 0.60% | 开放申购 | 开放赎回 | |
| 2016-07-05 | 1.5430 | 1.6130 | 0.32% | 开放申购 | 开放赎回 | |
| 2016-07-04 | 1.5381 | 1.6081 | 1.98% | 开放申购 | 开放赎回 | |
| 2016-07-01 | 1.5083 | 1.5783 | -0.15% | 开放申购 | 开放赎回 | |
| 2016-06-30 | 1.5106 | 1.5806 | 0.11% | 开放申购 | 开放赎回 | |
| 2016-06-29 | 1.5089 | 1.5789 | 0.53% | 开放申购 | 开放赎回 | |
| 2016-06-28 | 1.5010 | 1.5710 | 0.28% | 开放申购 | 开放赎回 | |
| 2016-06-27 | 1.4968 | 1.5668 | 1.97% | 开放申购 | 开放赎回 | |
| 2016-06-24 | 1.4679 | 1.5379 | 0.14% | 开放申购 | 开放赎回 | |
| 2016-06-23 | 1.4658 | 1.5358 | -1.02% | 开放申购 | 开放赎回 | |
| 2016-06-22 | 1.4809 | 1.5509 | 1.84% | 开放申购 | 开放赎回 | |
| 2016-06-21 | 1.4542 | 1.5242 | -1.18% | 开放申购 | 开放赎回 | |
| 2016-06-20 | 1.4715 | 1.5415 | 1.69% | 开放申购 | 开放赎回 | |
| 2016-06-17 | 1.4471 | 1.5171 | 0.05% | 开放申购 | 开放赎回 | |
| 2016-06-16 | 1.4464 | 1.5164 | 0.39% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/580005/19.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2016-04-15 | 1.4294 | 1.4994 | 0.06% | 开放申购 | 开放赎回 | |
| 2016-04-14 | 1.4285 | 1.4985 | 0.95% | 开放申购 | 开放赎回 | |
| 2016-04-13 | 1.4151 | 1.4851 | 1.85% | 开放申购 | 开放赎回 | |
| 2016-04-12 | 1.3894 | 1.4594 | -1.21% | 开放申购 | 开放赎回 | |
| 2016-04-11 | 1.4064 | 1.4764 | 1.49% | 开放申购 | 开放赎回 | |
| 2016-04-08 | 1.3857 | 1.4557 | -0.76% | 开放申购 | 开放赎回 | |
| 2016-04-07 | 1.3963 | 1.4663 | 0.48% | 开放申购 | 开放赎回 | |
| 2016-04-06 | 1.3896 | 1.4596 | 0.67% | 开放申购 | 开放赎回 | |
| 2016-04-05 | 1.3804 | 1.4504 | 2.57% | 开放申购 | 开放赎回 | |
| 2016-04-01 | 1.3458 | 1.4158 | -0.76% | 开放申购 | 开放赎回 | |
| 2016-03-31 | 1.3561 | 1.4261 | 1.57% | 开放申购 | 开放赎回 | |
| 2016-03-30 | 1.3351 | 1.4051 | 3.85% | 开放申购 | 开放赎回 | |
| 2016-03-29 | 1.2856 | 1.3556 | -1.80% | 开放申购 | 开放赎回 | |
| 2016-03-28 | 1.3092 | 1.3792 | -1.04% | 开放申购 | 开放赎回 | |
| 2016-03-25 | 1.3229 | 1.3929 | 0.76% | 开放申购 | 开放赎回 | |
| 2016-03-24 | 1.3129 | 1.3829 | -1.11% | 开放申购 | 开放赎回 | |
| 2016-03-23 | 1.3276 | 1.3976 | 1.14% | 开放申购 | 开放赎回 | |
| 2016-03-22 | 1.3127 | 1.3827 | 0.74% | 开放申购 | 开放赎回 | |
| 2016-03-21 | 1.3030 | 1.3730 | 3.04% | 开放申购 | 开放赎回 | |
| 2016-03-18 | 1.2646 | 1.3346 | 4.47% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/580005/25.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2015-10-21 | 1.2452 | 1.3152 | -6.69% | 开放申购 | 开放赎回 | |
| 2015-10-20 | 1.3345 | 1.4045 | 1.39% | 开放申购 | 开放赎回 | |
| 2015-10-19 | 1.3162 | 1.3862 | 0.40% | 开放申购 | 开放赎回 | |
| 2015-10-16 | 1.3110 | 1.3810 | 1.42% | 开放申购 | 开放赎回 | |
| 2015-10-15 | 1.2926 | 1.3626 | 3.67% | 开放申购 | 开放赎回 | |
| 2015-10-14 | 1.2468 | 1.3168 | -2.11% | 开放申购 | 开放赎回 | |
| 2015-10-13 | 1.2737 | 1.3437 | 2.05% | 开放申购 | 开放赎回 | |
| 2015-10-12 | 1.2481 | 1.3181 | 4.01% | 开放申购 | 开放赎回 | |
| 2015-10-09 | 1.2000 | 1.2700 | 1.60% | 开放申购 | 开放赎回 | |
| 2015-10-08 | 1.1811 | 1.2511 | 4.47% | 开放申购 | 开放赎回 | |
| 2015-09-30 | 1.1306 | 1.2006 | 0.15% | 开放申购 | 开放赎回 | |
| 2015-09-29 | 1.1289 | 1.1989 | -1.21% | 开放申购 | 开放赎回 | |
| 2015-09-28 | 1.1427 | 1.2127 | 3.85% | 开放申购 | 开放赎回 | |
| 2015-09-25 | 1.1003 | 1.1703 | -2.89% | 开放申购 | 开放赎回 | |
| 2015-09-24 | 1.1330 | 1.2030 | 1.02% | 开放申购 | 开放赎回 | |
| 2015-09-23 | 1.1216 | 1.1916 | 0.57% | 开放申购 | 开放赎回 | |
| 2015-09-22 | 1.1152 | 1.1852 | 0.31% | 开放申购 | 开放赎回 | |
| 2015-09-21 | 1.1118 | 1.1818 | 5.22% | 开放申购 | 开放赎回 | |
| 2015-09-18 | 1.0566 | 1.1266 | 2.12% | 开放申购 | 开放赎回 | |
| 2015-09-17 | 1.0347 | 1.1047 | 0.43% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/580005/30.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2015-05-22 | 2.0893 | 2.1593 | -0.88% | 开放申购 | 开放赎回 | |
| 2015-05-21 | 2.1078 | 2.1778 | 3.18% | 开放申购 | 开放赎回 | |
| 2015-05-20 | 2.0428 | 2.1128 | 2.32% | 开放申购 | 开放赎回 | |
| 2015-05-19 | 1.9965 | 2.0665 | 0.17% | 开放申购 | 开放赎回 | |
| 2015-05-18 | 1.9932 | 2.0632 | 4.15% | 开放申购 | 开放赎回 | |
| 2015-05-15 | 1.9138 | 1.9838 | 0.35% | 开放申购 | 开放赎回 | |
| 2015-05-14 | 1.9071 | 1.9771 | -1.50% | 开放申购 | 开放赎回 | |
| 2015-05-13 | 1.9361 | 2.0061 | 1.14% | 开放申购 | 开放赎回 | |
| 2015-05-12 | 1.9143 | 1.9843 | 4.33% | 开放申购 | 开放赎回 | |
| 2015-05-11 | 1.8349 | 1.9049 | 5.98% | 开放申购 | 开放赎回 | |
| 2015-05-08 | 1.7314 | 1.8014 | 4.98% | 开放申购 | 开放赎回 | |
| 2015-05-07 | 1.6492 | 1.7192 | 0.22% | 开放申购 | 开放赎回 | |
| 2015-05-06 | 1.6456 | 1.7156 | 1.03% | 开放申购 | 开放赎回 | |
| 2015-05-05 | 1.6288 | 1.6988 | -2.60% | 开放申购 | 开放赎回 | |
| 2015-05-04 | 1.6722 | 1.7422 | 0.28% | 开放申购 | 开放赎回 | |
| 2015-04-30 | 1.6676 | 1.7376 | 1.11% | 开放申购 | 开放赎回 | |
| 2015-04-29 | 1.6493 | 1.7193 | 1.94% | 开放申购 | 开放赎回 | |
| 2015-04-28 | 1.6179 | 1.6879 | -3.60% | 开放申购 | 开放赎回 | |
| 2015-04-27 | 1.6783 | 1.7483 | -0.27% | 开放申购 | 开放赎回 | |
| 2015-04-24 | 1.6828 | 1.7528 | -0.07% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/580005/32.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2015-03-25 | 1.5679 | 1.6379 | 1.54% | 开放申购 | 开放赎回 | |
| 2015-03-24 | 1.5441 | 1.6141 | 0.29% | 开放申购 | 开放赎回 | |
| 2015-03-23 | 1.5397 | 1.6097 | 2.05% | 开放申购 | 开放赎回 | |
| 2015-03-20 | 1.5088 | 1.5788 | 0.20% | 开放申购 | 开放赎回 | |
| 2015-03-19 | 1.5058 | 1.5758 | 0.01% | 开放申购 | 开放赎回 | |
| 2015-03-18 | 1.5056 | 1.5756 | 1.41% | 开放申购 | 开放赎回 | |
| 2015-03-17 | 1.4846 | 1.5546 | 0.76% | 开放申购 | 开放赎回 | |
| 2015-03-16 | 1.4734 | 1.5434 | 3.16% | 开放申购 | 开放赎回 | |
| 2015-03-13 | 1.4282 | 1.4982 | 0.80% | 开放申购 | 开放赎回 | |
| 2015-03-12 | 1.4169 | 1.4869 | -0.87% | 开放申购 | 开放赎回 | |
| 2015-03-11 | 1.4294 | 1.4994 | -0.09% | 开放申购 | 开放赎回 | |
| 2015-03-10 | 1.4307 | 1.5007 | 0.94% | 开放申购 | 开放赎回 | |
| 2015-03-09 | 1.4174 | 1.4874 | 0.96% | 开放申购 | 开放赎回 | |
| 2015-03-06 | 1.4039 | 1.4739 | -2.57% | 开放申购 | 开放赎回 | |
| 2015-03-05 | 1.4409 | 1.5109 | 0.89% | 开放申购 | 开放赎回 | |
| 2015-03-04 | 1.4282 | 1.4982 | 0.98% | 开放申购 | 开放赎回 | |
| 2015-03-03 | 1.4143 | 1.4843 | 0.12% | 开放申购 | 开放赎回 | |
| 2015-03-02 | 1.4126 | 1.4826 | 2.96% | 开放申购 | 开放赎回 | |
| 2015-02-27 | 1.3720 | 1.4420 | 1.22% | 开放申购 | 开放赎回 | |
| 2015-02-26 | 1.3554 | 1.4254 | -0.25% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/580005/4.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2017-07-07 | 0.9991 | 1.5191 | 0.31% | 开放申购 | 开放赎回 | |
| 2017-07-06 | 0.9960 | 1.5160 | 1.08% | 开放申购 | 开放赎回 | |
| 2017-07-05 | 0.9854 | 1.5054 | 1.09% | 开放申购 | 开放赎回 | |
| 2017-07-04 | 0.9748 | 1.4948 | -0.60% | 开放申购 | 开放赎回 | |
| 2017-07-03 | 0.9807 | 1.5007 | 1.07% | 开放申购 | 开放赎回 | |
| 2017-06-30 | 0.9703 | 1.4903 | 0.80% | 开放申购 | 开放赎回 | |
| 2017-06-29 | 0.9626 | 1.4826 | 0.50% | 开放申购 | 开放赎回 | |
| 2017-06-28 | 0.9578 | 1.4778 | -0.90% | 开放申购 | 开放赎回 | |
| 2017-06-27 | 0.9665 | 1.4865 | 0.23% | 开放申购 | 开放赎回 | |
| 2017-06-26 | 0.9643 | 1.4843 | 1.71% | 开放申购 | 开放赎回 | |
| 2017-06-23 | 0.9481 | 1.4681 | 0.81% | 开放申购 | 开放赎回 | |
| 2017-06-22 | 0.9405 | 1.4605 | -1.58% | 开放申购 | 开放赎回 | |
| 2017-06-21 | 0.9556 | 1.4756 | -0.29% | 开放申购 | 开放赎回 | |
| 2017-06-20 | 0.9584 | 1.4784 | 0.58% | 开放申购 | 开放赎回 | |
| 2017-06-19 | 0.9529 | 1.4729 | 0.42% | 开放申购 | 开放赎回 | |
| 2017-06-16 | 0.9489 | 1.4689 | 0.81% | 开放申购 | 开放赎回 | |
| 2017-06-15 | 0.9413 | 1.4613 | 1.42% | 开放申购 | 开放赎回 | |
| 2017-06-14 | 0.9281 | 1.4481 | -0.27% | 开放申购 | 开放赎回 | |
| 2017-06-13 | 0.9306 | 1.4506 | 1.63% | 开放申购 | 开放赎回 | |
| 2017-06-12 | 0.9157 | 1.4357 | -0.65% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/580005/58.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2013-01-28 | 0.9243 | 0.9943 | 2.60% | 开放申购 | 开放赎回 | |
| 2013-01-25 | 0.9009 | 0.9709 | 0.69% | 开放申购 | 开放赎回 | |
| 2013-01-24 | 0.8947 | 0.9647 | -0.52% | 开放申购 | 开放赎回 | |
| 2013-01-23 | 0.8994 | 0.9694 | 1.08% | 开放申购 | 开放赎回 | |
| 2013-01-22 | 0.8898 | 0.9598 | -1.14% | 开放申购 | 开放赎回 | |
| 2013-01-21 | 0.9001 | 0.9701 | 0.66% | 开放申购 | 开放赎回 | |
| 2013-01-18 | 0.8942 | 0.9642 | 1.71% | 开放申购 | 开放赎回 | |
| 2013-01-17 | 0.8792 | 0.9492 | -0.64% | 开放申购 | 开放赎回 | |
| 2013-01-16 | 0.8849 | 0.9549 | 0.41% | 开放申购 | 开放赎回 | |
| 2013-01-15 | 0.8813 | 0.9513 | 0.86% | 开放申购 | 开放赎回 | |
| 2013-01-14 | 0.8738 | 0.9438 | 3.68% | 开放申购 | 开放赎回 | |
| 2013-01-11 | 0.8428 | 0.9128 | -0.70% | 开放申购 | 开放赎回 | |
| 2013-01-10 | 0.8487 | 0.9187 | 0.77% | 开放申购 | 开放赎回 | |
| 2013-01-09 | 0.8422 | 0.9122 | 0.37% | 开放申购 | 开放赎回 | |
| 2013-01-08 | 0.8391 | 0.9091 | 0.82% | 开放申购 | 开放赎回 | |
| 2013-01-07 | 0.8323 | 0.9023 | 0.75% | 开放申购 | 开放赎回 | |
| 2013-01-04 | 0.8261 | 0.8961 | -1.09% | 开放申购 | 开放赎回 | |
| 2012-12-31 | 0.8352 | 0.9052 | 1.11% | 开放申购 | 开放赎回 | |
| 2012-12-28 | 0.8260 | 0.8960 | 1.08% | 开放申购 | 开放赎回 | |
| 2012-12-27 | 0.8172 | 0.8872 | -0.49% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/580005/88.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2010-08-13 | 1.1633 | 1.1633 | 0.89% | 开放申购 | 开放赎回 | |
| 2010-08-12 | 1.1530 | 1.1530 | -0.07% | 开放申购 | 开放赎回 | |
| 2010-08-11 | 1.1538 | 1.1538 | 0.46% | 开放申购 | 开放赎回 | |
| 2010-08-10 | 1.1485 | 1.1485 | -1.99% | 开放申购 | 开放赎回 | |
| 2010-08-09 | 1.1718 | 1.1718 | 0.77% | 开放申购 | 开放赎回 | |
| 2010-08-06 | 1.1629 | 1.1629 | 0.86% | 开放申购 | 开放赎回 | |
| 2010-08-05 | 1.1530 | 1.1530 | 1.10% | 开放申购 | 开放赎回 | |
| 2010-08-04 | 1.1404 | 1.1404 | 1.13% | 开放申购 | 开放赎回 | |
| 2010-08-03 | 1.1277 | 1.1277 | -1.11% | 开放申购 | 开放赎回 | |
| 2010-08-02 | 1.1403 | 1.1403 | 1.28% | 开放申购 | 开放赎回 | |
| 2010-07-30 | 1.1259 | 1.1259 | -0.26% | 开放申购 | 开放赎回 | |
| 2010-07-29 | 1.1288 | 1.1288 | 0.29% | 开放申购 | 开放赎回 | |
| 2010-07-28 | 1.1255 | 1.1255 | 1.91% | 开放申购 | 开放赎回 | |
| 2010-07-27 | 1.1044 | 1.1044 | 0.98% | 开放申购 | 开放赎回 | |
| 2010-07-26 | 1.0937 | 1.0937 | 1.47% | 开放申购 | 开放赎回 | |
| 2010-07-23 | 1.0779 | 1.0779 | 0.14% | 开放申购 | 开放赎回 | |
| 2010-07-22 | 1.0764 | 1.0764 | 1.35% | 开放申购 | 开放赎回 | |
| 2010-07-21 | 1.0621 | 1.0621 | -0.11% | 开放申购 | 开放赎回 | |
| 2010-07-20 | 1.0633 | 1.0633 | 1.41% | 开放申购 | 开放赎回 | |
| 2010-07-19 | 1.0485 | 1.0485 | 0.98% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/580005/92.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2010-04-19 | 1.0973 | 1.0973 | -1.93% | 开放申购 | 开放赎回 | |
| 2010-04-16 | 1.1189 | 1.1189 | 0.02% | 开放申购 | 开放赎回 | |
| 2010-04-15 | 1.1187 | 1.1187 | -0.70% | 开放申购 | 开放赎回 | |
| 2010-04-14 | 1.1266 | 1.1266 | 0.98% | 开放申购 | 开放赎回 | |
| 2010-04-13 | 1.1157 | 1.1157 | -1.80% | 开放申购 | 开放赎回 | |
| 2010-04-12 | 1.1361 | 1.1361 | 1.76% | 开放申购 | 开放赎回 | |
| 2010-04-09 | 1.1165 | 1.1165 | 1.98% | 开放申购 | 开放赎回 | |
| 2010-04-08 | 1.0948 | 1.0948 | 0.40% | 开放申购 | 开放赎回 | |
| 2010-04-07 | 1.0904 | 1.0904 | 0.38% | 开放申购 | 开放赎回 | |
| 2010-04-06 | 1.0863 | 1.0863 | -0.51% | 开放申购 | 开放赎回 | |
| 2010-04-02 | 1.0919 | 1.0919 | 0.23% | 开放申购 | 开放赎回 | |
| 2010-04-01 | 1.0894 | 1.0894 | 2.46% | 开放申购 | 开放赎回 | |
| 2010-03-31 | 1.0632 | 1.0632 | 0.48% | 开放申购 | 开放赎回 | |
| 2010-03-30 | 1.0581 | 1.0581 | 0.61% | 开放申购 | 开放赎回 | |
| 2010-03-29 | 1.0517 | 1.0517 | 0.59% | 开放申购 | 开放赎回 | |
| 2010-03-26 | 1.0455 | 1.0455 | 1.34% | 开放申购 | 开放赎回 | |
| 2010-03-25 | 1.0317 | 1.0317 | -0.60% | 开放申购 | 开放赎回 | |
| 2010-03-24 | 1.0379 | 1.0379 | 0.13% | 开放申购 | 开放赎回 | |
| 2010-03-23 | 1.0366 | 1.0366 | 0.34% | 开放申购 | 开放赎回 | |
| 2010-03-22 | 1.0331 | 1.0331 | 0.44% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/001112/10.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2017-01-06 | 1.0440 | 1.0440 | -0.48% | 开放申购 | 开放赎回 | |
| 2017-01-05 | 1.0490 | 1.0490 | -0.38% | 开放申购 | 开放赎回 | |
| 2017-01-04 | 1.0530 | 1.0530 | 1.15% | 开放申购 | 开放赎回 | |
| 2017-01-03 | 1.0410 | 1.0410 | 0.19% | 开放申购 | 开放赎回 | |
| 2016-12-31 | 1.0390 | 1.0390 | 0.00% | 开放申购 | 开放赎回 | |
| 2016-12-30 | 1.0390 | 1.0390 | 1.17% | 开放申购 | 开放赎回 | |
| 2016-12-29 | 1.0270 | 1.0270 | 0.10% | 开放申购 | 开放赎回 | |
| 2016-12-28 | 1.0260 | 1.0260 | -0.19% | 开放申购 | 开放赎回 | |
| 2016-12-27 | 1.0280 | 1.0280 | 0.19% | 开放申购 | 开放赎回 | |
| 2016-12-26 | 1.0260 | 1.0260 | 0.49% | 开放申购 | 开放赎回 | |
| 2016-12-23 | 1.0210 | 1.0210 | -0.68% | 开放申购 | 开放赎回 | |
| 2016-12-22 | 1.0280 | 1.0280 | 0.10% | 开放申购 | 开放赎回 | |
| 2016-12-21 | 1.0270 | 1.0270 | 0.79% | 开放申购 | 开放赎回 | |
| 2016-12-20 | 1.0190 | 1.0190 | -0.49% | 开放申购 | 开放赎回 | |
| 2016-12-19 | 1.0240 | 1.0240 | -0.29% | 开放申购 | 开放赎回 | |
| 2016-12-16 | 1.0270 | 1.0270 | 0.49% | 开放申购 | 开放赎回 | |
| 2016-12-15 | 1.0220 | 1.0220 | -0.58% | 开放申购 | 开放赎回 | |
| 2016-12-14 | 1.0280 | 1.0280 | -0.68% | 开放申购 | 开放赎回 | |
| 2016-12-13 | 1.0350 | 1.0350 | 0.39% | 开放申购 | 开放赎回 | |
| 2016-12-12 | 1.0310 | 1.0310 | -2.09% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/001112/11.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2016-12-09 | 1.0530 | 1.0530 | 0.10% | 开放申购 | 开放赎回 | |
| 2016-12-08 | 1.0520 | 1.0520 | -0.10% | 开放申购 | 开放赎回 | |
| 2016-12-07 | 1.0530 | 1.0530 | 0.57% | 开放申购 | 开放赎回 | |
| 2016-12-06 | 1.0470 | 1.0470 | 0.87% | 开放申购 | 开放赎回 | |
| 2016-12-05 | 1.0380 | 1.0380 | -1.24% | 开放申购 | 开放赎回 | |
| 2016-12-02 | 1.0510 | 1.0510 | -1.41% | 开放申购 | 开放赎回 | |
| 2016-12-01 | 1.0660 | 1.0660 | 0.85% | 开放申购 | 开放赎回 | |
| 2016-11-30 | 1.0570 | 1.0570 | -0.94% | 开放申购 | 开放赎回 | |
| 2016-11-29 | 1.0670 | 1.0670 | 1.43% | 开放申购 | 开放赎回 | |
| 2016-11-28 | 1.0520 | 1.0520 | 0.10% | 开放申购 | 开放赎回 | |
| 2016-11-25 | 1.0510 | 1.0510 | 0.29% | 开放申购 | 开放赎回 | |
| 2016-11-24 | 1.0480 | 1.0480 | 0.96% | 开放申购 | 开放赎回 | |
| 2016-11-23 | 1.0380 | 1.0380 | 0.29% | 开放申购 | 开放赎回 | |
| 2016-11-22 | 1.0350 | 1.0350 | 0.19% | 开放申购 | 开放赎回 | |
| 2016-11-21 | 1.0330 | 1.0330 | -0.29% | 开放申购 | 开放赎回 | |
| 2016-11-18 | 1.0360 | 1.0360 | -0.29% | 开放申购 | 开放赎回 | |
| 2016-11-17 | 1.0390 | 1.0390 | 0.19% | 开放申购 | 开放赎回 | |
| 2016-11-16 | 1.0370 | 1.0370 | 0.10% | 开放申购 | 开放赎回 | |
| 2016-11-15 | 1.0360 | 1.0360 | 0.39% | 开放申购 | 开放赎回 | |
| 2016-11-14 | 1.0320 | 1.0320 | -0.10% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/001112/12.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2016-11-11 | 1.0330 | 1.0330 | -0.39% | 开放申购 | 开放赎回 | |
| 2016-11-10 | 1.0370 | 1.0370 | 0.78% | 开放申购 | 开放赎回 | |
| 2016-11-09 | 1.0290 | 1.0290 | -1.06% | 开放申购 | 开放赎回 | |
| 2016-11-08 | 1.0400 | 1.0400 | 0.39% | 开放申购 | 开放赎回 | |
| 2016-11-07 | 1.0360 | 1.0360 | -0.29% | 开放申购 | 开放赎回 | |
| 2016-11-04 | 1.0390 | 1.0390 | -0.38% | 开放申购 | 开放赎回 | |
| 2016-11-03 | 1.0430 | 1.0430 | 0.10% | 开放申购 | 开放赎回 | |
| 2016-11-02 | 1.0420 | 1.0420 | -0.57% | 开放申购 | 开放赎回 | |
| 2016-11-01 | 1.0480 | 1.0480 | 0.58% | 开放申购 | 开放赎回 | |
| 2016-10-31 | 1.0420 | 1.0420 | 0.00% | 开放申购 | 开放赎回 | |
| 2016-10-28 | 1.0420 | 1.0420 | -0.48% | 开放申购 | 开放赎回 | |
| 2016-10-27 | 1.0470 | 1.0470 | -0.29% | 开放申购 | 开放赎回 | |
| 2016-10-26 | 1.0500 | 1.0500 | 0.38% | 开放申购 | 开放赎回 | |
| 2016-10-25 | 1.0460 | 1.0460 | -0.10% | 开放申购 | 开放赎回 | |
| 2016-10-24 | 1.0470 | 1.0470 | 0.48% | 开放申购 | 开放赎回 | |
| 2016-10-21 | 1.0420 | 1.0420 | -0.29% | 开放申购 | 开放赎回 | |
| 2016-10-20 | 1.0450 | 1.0450 | 0.29% | 开放申购 | 开放赎回 | |
| 2016-10-19 | 1.0420 | 1.0420 | -0.29% | 开放申购 | 开放赎回 | |
| 2016-10-18 | 1.0450 | 1.0450 | 1.06% | 开放申购 | 开放赎回 | |
| 2016-10-17 | 1.0340 | 1.0340 | -0.86% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/001112/13.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2016-10-14 | 1.0430 | 1.0430 | 0.10% | 开放申购 | 开放赎回 | |
| 2016-10-13 | 1.0420 | 1.0420 | 0.58% | 开放申购 | 开放赎回 | |
| 2016-10-12 | 1.0360 | 1.0360 | 0.19% | 开放申购 | 开放赎回 | |
| 2016-10-11 | 1.0340 | 1.0340 | 0.29% | 开放申购 | 开放赎回 | |
| 2016-10-10 | 1.0310 | 1.0310 | 1.48% | 开放申购 | 开放赎回 | |
| 2016-09-30 | 1.0160 | 1.0160 | 0.49% | 开放申购 | 开放赎回 | |
| 2016-09-29 | 1.0110 | 1.0110 | 0.40% | 开放申购 | 开放赎回 | |
| 2016-09-28 | 1.0070 | 1.0070 | -0.10% | 开放申购 | 开放赎回 | |
| 2016-09-27 | 1.0080 | 1.0080 | 0.50% | 开放申购 | 开放赎回 | |
| 2016-09-26 | 1.0030 | 1.0030 | -1.47% | 开放申购 | 开放赎回 | |
| 2016-09-23 | 1.0180 | 1.0180 | 0.00% | 开放申购 | 开放赎回 | |
| 2016-09-22 | 1.0180 | 1.0180 | 0.49% | 开放申购 | 开放赎回 | |
| 2016-09-21 | 1.0130 | 1.0130 | -0.20% | 开放申购 | 开放赎回 | |
| 2016-09-20 | 1.0150 | 1.0150 | -0.49% | 开放申购 | 开放赎回 | |
| 2016-09-19 | 1.0200 | 1.0200 | 0.69% | 开放申购 | 开放赎回 | |
| 2016-09-14 | 1.0130 | 1.0130 | 0.00% | 开放申购 | 开放赎回 | |
| 2016-09-13 | 1.0130 | 1.0130 | 0.20% | 开放申购 | 开放赎回 | |
| 2016-09-12 | 1.0110 | 1.0110 | -1.56% | 开放申购 | 开放赎回 | |
| 2016-09-09 | 1.0270 | 1.0270 | -0.29% | 开放申购 | 开放赎回 | |
| 2016-09-08 | 1.0300 | 1.0300 | -0.29% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/001112/15.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2016-08-10 | 1.0250 | 1.0250 | -0.77% | 开放申购 | 开放赎回 | |
| 2016-08-09 | 1.0330 | 1.0330 | 0.88% | 开放申购 | 开放赎回 | |
| 2016-08-08 | 1.0240 | 1.0240 | 0.79% | 开放申购 | 开放赎回 | |
| 2016-08-05 | 1.0160 | 1.0160 | -0.59% | 开放申购 | 开放赎回 | |
| 2016-08-04 | 1.0220 | 1.0220 | 0.29% | 开放申购 | 开放赎回 | |
| 2016-08-03 | 1.0190 | 1.0190 | 0.39% | 开放申购 | 开放赎回 | |
| 2016-08-02 | 1.0150 | 1.0150 | 0.30% | 开放申购 | 开放赎回 | |
| 2016-08-01 | 1.0120 | 1.0120 | -1.36% | 开放申购 | 开放赎回 | |
| 2016-07-29 | 1.0260 | 1.0260 | 0.00% | 开放申购 | 开放赎回 | |
| 2016-07-28 | 1.0260 | 1.0260 | 1.38% | 开放申购 | 开放赎回 | |
| 2016-07-27 | 1.0120 | 1.0120 | -1.46% | 开放申购 | 开放赎回 | |
| 2016-07-26 | 1.0270 | 1.0270 | 1.28% | 开放申购 | 开放赎回 | |
| 2016-07-25 | 1.0140 | 1.0140 | 0.50% | 开放申购 | 开放赎回 | |
| 2016-07-22 | 1.0090 | 1.0090 | -0.88% | 开放申购 | 开放赎回 | |
| 2016-07-21 | 1.0180 | 1.0180 | 0.89% | 开放申购 | 开放赎回 | |
| 2016-07-20 | 1.0090 | 1.0090 | 0.10% | 开放申购 | 开放赎回 | |
| 2016-07-19 | 1.0080 | 1.0080 | -0.79% | 开放申购 | 开放赎回 | |
| 2016-07-18 | 1.0160 | 1.0160 | -0.78% | 开放申购 | 开放赎回 | |
| 2016-07-15 | 1.0240 | 1.0240 | 0.49% | 开放申购 | 开放赎回 | |
| 2016-07-14 | 1.0190 | 1.0190 | 0.00% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/001112/23.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2015-12-16 | 1.0240 | 1.0240 | 0.10% | 开放申购 | 开放赎回 | |
| 2015-12-15 | 1.0230 | 1.0230 | 0.59% | 开放申购 | 开放赎回 | |
| 2015-12-14 | 1.0170 | 1.0170 | 1.40% | 开放申购 | 开放赎回 | |
| 2015-12-11 | 1.0030 | 1.0030 | -0.40% | 开放申购 | 开放赎回 | |
| 2015-12-10 | 1.0070 | 1.0070 | 0.00% | 开放申购 | 开放赎回 | |
| 2015-12-09 | 1.0070 | 1.0070 | 0.30% | 开放申购 | 开放赎回 | |
| 2015-12-08 | 1.0040 | 1.0040 | -1.67% | 开放申购 | 开放赎回 | |
| 2015-12-07 | 1.0210 | 1.0210 | 0.10% | 开放申购 | 开放赎回 | |
| 2015-12-04 | 1.0200 | 1.0200 | -0.97% | 开放申购 | 开放赎回 | |
| 2015-12-03 | 1.0300 | 1.0300 | 0.68% | 开放申购 | 开放赎回 | |
| 2015-12-02 | 1.0230 | 1.0230 | 2.20% | 开放申购 | 开放赎回 | |
| 2015-12-01 | 1.0010 | 1.0010 | 1.01% | 开放申购 | 开放赎回 | |
| 2015-11-30 | 0.9910 | 0.9910 | 1.02% | 开放申购 | 开放赎回 | |
| 2015-11-27 | 0.9810 | 0.9810 | -3.92% | 开放申购 | 开放赎回 | |
| 2015-11-26 | 1.0210 | 1.0210 | -0.58% | 开放申购 | 开放赎回 | |
| 2015-11-25 | 1.0270 | 1.0270 | 0.88% | 开放申购 | 开放赎回 | |
| 2015-11-24 | 1.0180 | 1.0180 | 0.30% | 开放申购 | 开放赎回 | |
| 2015-11-23 | 1.0150 | 1.0150 | -0.20% | 开放申购 | 开放赎回 | |
| 2015-11-20 | 1.0170 | 1.0170 | -0.20% | 开放申购 | 开放赎回 | |
| 2015-11-19 | 1.0190 | 1.0190 | 1.29% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/001112/24.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2015-11-18 | 1.0060 | 1.0060 | -0.40% | 开放申购 | 开放赎回 | |
| 2015-11-17 | 1.0100 | 1.0100 | -0.20% | 开放申购 | 开放赎回 | |
| 2015-11-16 | 1.0120 | 1.0120 | 0.90% | 开放申购 | 开放赎回 | |
| 2015-11-13 | 1.0030 | 1.0030 | -1.28% | 开放申购 | 开放赎回 | |
| 2015-11-12 | 1.0160 | 1.0160 | -0.29% | 开放申购 | 开放赎回 | |
| 2015-11-11 | 1.0190 | 1.0190 | -0.10% | 开放申购 | 开放赎回 | |
| 2015-11-10 | 1.0200 | 1.0200 | -0.58% | 开放申购 | 开放赎回 | |
| 2015-11-09 | 1.0260 | 1.0260 | 0.89% | 开放申购 | 开放赎回 | |
| 2015-11-06 | 1.0170 | 1.0170 | 1.40% | 开放申购 | 开放赎回 | |
| 2015-11-05 | 1.0030 | 1.0030 | 0.40% | 开放申购 | 开放赎回 | |
| 2015-11-04 | 0.9990 | 0.9990 | 3.31% | 开放申购 | 开放赎回 | |
| 2015-11-03 | 0.9670 | 0.9670 | -0.10% | 开放申购 | 开放赎回 | |
| 2015-11-02 | 0.9680 | 0.9680 | -1.12% | 开放申购 | 开放赎回 | |
| 2015-10-30 | 0.9790 | 0.9790 | 0.51% | 开放申购 | 开放赎回 | |
| 2015-10-29 | 0.9740 | 0.9740 | 0.52% | 开放申购 | 开放赎回 | |
| 2015-10-28 | 0.9690 | 0.9690 | -0.92% | 开放申购 | 开放赎回 | |
| 2015-10-27 | 0.9780 | 0.9780 | 0.00% | 开放申购 | 开放赎回 | |
| 2015-10-26 | 0.9780 | 0.9780 | 0.72% | 开放申购 | 开放赎回 | |
| 2015-10-23 | 0.9710 | 0.9710 | 1.68% | 开放申购 | 开放赎回 | |
| 2015-10-22 | 0.9550 | 0.9550 | 1.49% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/001112/28.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2015-07-20 | 0.9950 | 0.9950 | 0.71% | 开放申购 | 开放赎回 | |
| 2015-07-17 | 0.9880 | 0.9880 | 2.49% | 开放申购 | 开放赎回 | |
| 2015-07-16 | 0.9640 | 0.9640 | 1.05% | 开放申购 | 开放赎回 | |
| 2015-07-15 | 0.9540 | 0.9540 | -1.95% | 开放申购 | 开放赎回 | |
| 2015-07-14 | 0.9730 | 0.9730 | -0.51% | 开放申购 | 开放赎回 | |
| 2015-07-13 | 0.9780 | 0.9780 | 1.88% | 开放申购 | 开放赎回 | |
| 2015-07-10 | 0.9600 | 0.9600 | 2.67% | 开放申购 | 开放赎回 | |
| 2015-07-09 | 0.9350 | 0.9350 | 4.59% | 开放申购 | 开放赎回 | |
| 2015-07-08 | 0.8940 | 0.8940 | -6.19% | 开放申购 | 开放赎回 | |
| 2015-07-07 | 0.9530 | 0.9530 | -1.55% | 开放申购 | 开放赎回 | |
| 2015-07-06 | 0.9680 | 0.9680 | 1.89% | 暂停申购 | 开放赎回 | |
| 2015-07-03 | 0.9500 | 0.9500 | -3.36% | 暂停申购 | 开放赎回 | |
| 2015-07-02 | 0.9830 | 0.9830 | -2.38% | 暂停申购 | 开放赎回 | |
| 2015-07-01 | 1.0070 | 1.0070 | -2.99% | 暂停申购 | 开放赎回 | |
| 2015-06-30 | 1.0380 | 1.0380 | 5.70% | 暂停申购 | 开放赎回 | |
| 2015-06-29 | 0.9820 | 0.9820 | -1.31% | 暂停申购 | 开放赎回 | |
| 2015-06-26 | 0.9950 | 0.9950 | -5.60% | 暂停申购 | 开放赎回 | |
| 2015-06-25 | 1.0540 | 1.0540 | -2.77% | 暂停申购 | 开放赎回 | |
| 2015-06-24 | 1.0840 | 1.0840 | 1.59% | 暂停申购 | 开放赎回 | |
| 2015-06-23 | 1.0670 | 1.0670 | 3.59% | 暂停申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/001112/29.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2015-06-19 | 1.0300 | 1.0300 | -4.10% | 暂停申购 | 开放赎回 | |
| 2015-06-18 | 1.0740 | 1.0740 | -2.27% | 暂停申购 | 开放赎回 | |
| 2015-06-17 | 1.0990 | 1.0990 | 0.92% | 暂停申购 | 开放赎回 | |
| 2015-06-16 | 1.0890 | 1.0890 | -1.80% | 暂停申购 | 开放赎回 | |
| 2015-06-15 | 1.1090 | 1.1090 | -1.95% | 暂停申购 | 开放赎回 | |
| 2015-06-12 | 1.1310 | 1.1310 | 0.53% | 暂停申购 | 开放赎回 | |
| 2015-06-11 | 1.1250 | 1.1250 | -0.27% | 暂停申购 | 开放赎回 | |
| 2015-06-10 | 1.1280 | 1.1280 | 0.53% | 暂停申购 | 开放赎回 | |
| 2015-06-09 | 1.1220 | 1.1220 | -0.36% | 暂停申购 | 开放赎回 | |
| 2015-06-08 | 1.1260 | 1.1260 | 1.90% | 暂停申购 | 开放赎回 | |
| 2015-06-05 | 1.1050 | 1.1050 | 0.18% | 暂停申购 | 开放赎回 | |
| 2015-06-04 | 1.1030 | 1.1030 | 0.00% | 暂停申购 | 开放赎回 | |
| 2015-06-03 | 1.1030 | 1.1030 | 0.27% | 暂停申购 | 开放赎回 | |
| 2015-06-02 | 1.1000 | 1.1000 | 0.46% | 暂停申购 | 开放赎回 | |
| 2015-06-01 | 1.0950 | 1.0950 | 2.82% | 暂停申购 | 开放赎回 | |
| 2015-05-29 | 1.0650 | 1.0650 | 0.00% | 开放申购 | 开放赎回 | |
| 2015-05-28 | 1.0650 | 1.0650 | -3.79% | 开放申购 | 开放赎回 | |
| 2015-05-27 | 1.1070 | 1.1070 | -0.81% | 开放申购 | 开放赎回 | |
| 2015-05-26 | 1.1160 | 1.1160 | 1.55% | 开放申购 | 开放赎回 | |
| 2015-05-25 | 1.0990 | 1.0990 | 2.04% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/580005/1.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2017-09-29 | 1.0542 | 1.5742 | 2.24% | 开放申购 | 开放赎回 | |
| 2017-09-28 | 1.0311 | 1.5511 | -0.55% | 开放申购 | 开放赎回 | |
| 2017-09-27 | 1.0368 | 1.5568 | 0.55% | 开放申购 | 开放赎回 | |
| 2017-09-26 | 1.0311 | 1.5511 | -0.44% | 开放申购 | 开放赎回 | |
| 2017-09-25 | 1.0357 | 1.5557 | -1.10% | 开放申购 | 开放赎回 | |
| 2017-09-22 | 1.0472 | 1.5672 | 0.24% | 开放申购 | 开放赎回 | |
| 2017-09-21 | 1.0447 | 1.5647 | -1.21% | 开放申购 | 开放赎回 | |
| 2017-09-20 | 1.0575 | 1.5775 | 1.12% | 开放申购 | 开放赎回 | |
| 2017-09-19 | 1.0458 | 1.5658 | -0.66% | 开放申购 | 开放赎回 | |
| 2017-09-18 | 1.0528 | 1.5728 | 1.00% | 开放申购 | 开放赎回 | |
| 2017-09-15 | 1.0424 | 1.5624 | -0.78% | 开放申购 | 开放赎回 | |
| 2017-09-14 | 1.0506 | 1.5706 | 0.14% | 开放申购 | 开放赎回 | |
| 2017-09-13 | 1.0491 | 1.5691 | 1.64% | 开放申购 | 开放赎回 | |
| 2017-09-12 | 1.0322 | 1.5522 | -0.16% | 开放申购 | 开放赎回 | |
| 2017-09-11 | 1.0339 | 1.5539 | 1.23% | 开放申购 | 开放赎回 | |
| 2017-09-08 | 1.0213 | 1.5413 | 0.03% | 开放申购 | 开放赎回 | |
| 2017-09-07 | 1.0210 | 1.5410 | -0.64% | 开放申购 | 开放赎回 | |
| 2017-09-06 | 1.0276 | 1.5476 | 0.38% | 开放申购 | 开放赎回 | |
| 2017-09-05 | 1.0237 | 1.5437 | -0.44% | 开放申购 | 开放赎回 | |
| 2017-09-04 | 1.0282 | 1.5482 | 0.08% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/580005/10.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2017-01-06 | 1.4431 | 1.5131 | -0.70% | 开放申购 | 开放赎回 | |
| 2017-01-05 | 1.4533 | 1.5233 | -0.76% | 开放申购 | 开放赎回 | |
| 2017-01-04 | 1.4645 | 1.5345 | 0.43% | 开放申购 | 开放赎回 | |
| 2017-01-03 | 1.4582 | 1.5282 | 0.19% | 开放申购 | 开放赎回 | |
| 2016-12-31 | 1.4554 | 1.5254 | -0.01% | 开放申购 | 开放赎回 | |
| 2016-12-30 | 1.4555 | 1.5255 | -0.08% | 开放申购 | 开放赎回 | |
| 2016-12-29 | 1.4566 | 1.5266 | -0.84% | 开放申购 | 开放赎回 | |
| 2016-12-28 | 1.4690 | 1.5390 | -0.18% | 开放申购 | 开放赎回 | |
| 2016-12-27 | 1.4716 | 1.5416 | 1.79% | 开放申购 | 开放赎回 | |
| 2016-12-26 | 1.4457 | 1.5157 | 0.59% | 开放申购 | 开放赎回 | |
| 2016-12-23 | 1.4372 | 1.5072 | -1.36% | 开放申购 | 开放赎回 | |
| 2016-12-22 | 1.4570 | 1.5270 | 0.17% | 开放申购 | 开放赎回 | |
| 2016-12-21 | 1.4545 | 1.5245 | 1.01% | 开放申购 | 开放赎回 | |
| 2016-12-20 | 1.4399 | 1.5099 | 0.15% | 开放申购 | 开放赎回 | |
| 2016-12-19 | 1.4377 | 1.5077 | 0.17% | 开放申购 | 开放赎回 | |
| 2016-12-16 | 1.4353 | 1.5053 | 0.57% | 开放申购 | 开放赎回 | |
| 2016-12-15 | 1.4272 | 1.4972 | 0.63% | 开放申购 | 开放赎回 | |
| 2016-12-14 | 1.4183 | 1.4883 | -0.83% | 开放申购 | 开放赎回 | |
| 2016-12-13 | 1.4301 | 1.5001 | -0.42% | 开放申购 | 开放赎回 | |
| 2016-12-12 | 1.4361 | 1.5061 | -3.60% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/580005/11.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2016-12-09 | 1.4898 | 1.5598 | -0.60% | 开放申购 | 开放赎回 | |
| 2016-12-08 | 1.4988 | 1.5688 | -0.33% | 开放申购 | 开放赎回 | |
| 2016-12-07 | 1.5038 | 1.5738 | 1.88% | 开放申购 | 开放赎回 | |
| 2016-12-06 | 1.4761 | 1.5461 | 0.16% | 开放申购 | 开放赎回 | |
| 2016-12-05 | 1.4737 | 1.5437 | -1.31% | 开放申购 | 开放赎回 | |
| 2016-12-02 | 1.4932 | 1.5632 | -1.36% | 开放申购 | 开放赎回 | |
| 2016-12-01 | 1.5138 | 1.5838 | 0.73% | 开放申购 | 开放赎回 | |
| 2016-11-30 | 1.5028 | 1.5728 | -1.01% | 开放申购 | 开放赎回 | |
| 2016-11-29 | 1.5181 | 1.5881 | -0.59% | 开放申购 | 开放赎回 | |
| 2016-11-28 | 1.5271 | 1.5971 | 0.48% | 开放申购 | 开放赎回 | |
| 2016-11-25 | 1.5198 | 1.5898 | 0.50% | 开放申购 | 开放赎回 | |
| 2016-11-24 | 1.5122 | 1.5822 | -0.01% | 开放申购 | 开放赎回 | |
| 2016-11-23 | 1.5123 | 1.5823 | -0.42% | 开放申购 | 开放赎回 | |
| 2016-11-22 | 1.5187 | 1.5887 | 0.21% | 开放申购 | 开放赎回 | |
| 2016-11-21 | 1.5155 | 1.5855 | 1.35% | 开放申购 | 开放赎回 | |
| 2016-11-18 | 1.4953 | 1.5653 | -1.18% | 开放申购 | 开放赎回 | |
| 2016-11-17 | 1.5132 | 1.5832 | 0.33% | 开放申购 | 开放赎回 | |
| 2016-11-16 | 1.5082 | 1.5782 | 0.76% | 开放申购 | 开放赎回 | |
| 2016-11-15 | 1.4968 | 1.5668 | -0.17% | 开放申购 | 开放赎回 | |
| 2016-11-14 | 1.4994 | 1.5694 | 0.29% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/580005/13.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2016-10-14 | 1.5061 | 1.5761 | -0.44% | 开放申购 | 开放赎回 | |
| 2016-10-13 | 1.5127 | 1.5827 | 0.61% | 开放申购 | 开放赎回 | |
| 2016-10-12 | 1.5036 | 1.5736 | 0.47% | 开放申购 | 开放赎回 | |
| 2016-10-11 | 1.4965 | 1.5665 | 0.65% | 开放申购 | 开放赎回 | |
| 2016-10-10 | 1.4868 | 1.5568 | 1.93% | 开放申购 | 开放赎回 | |
| 2016-09-30 | 1.4586 | 1.5286 | 0.22% | 开放申购 | 开放赎回 | |
| 2016-09-29 | 1.4554 | 1.5254 | 0.27% | 开放申购 | 开放赎回 | |
| 2016-09-28 | 1.4515 | 1.5215 | -0.69% | 开放申购 | 开放赎回 | |
| 2016-09-27 | 1.4616 | 1.5316 | -0.14% | 开放申购 | 开放赎回 | |
| 2016-09-26 | 1.4637 | 1.5337 | -1.91% | 开放申购 | 开放赎回 | |
| 2016-09-23 | 1.4922 | 1.5622 | -0.20% | 开放申购 | 开放赎回 | |
| 2016-09-22 | 1.4952 | 1.5652 | 0.61% | 开放申购 | 开放赎回 | |
| 2016-09-21 | 1.4862 | 1.5562 | -0.31% | 开放申购 | 开放赎回 | |
| 2016-09-20 | 1.4908 | 1.5608 | 0.65% | 开放申购 | 开放赎回 | |
| 2016-09-19 | 1.4812 | 1.5512 | 1.14% | 开放申购 | 开放赎回 | |
| 2016-09-14 | 1.4645 | 1.5345 | -1.06% | 开放申购 | 开放赎回 | |
| 2016-09-13 | 1.4802 | 1.5502 | 0.35% | 开放申购 | 开放赎回 | |
| 2016-09-12 | 1.4750 | 1.5450 | -1.89% | 开放申购 | 开放赎回 | |
| 2016-09-09 | 1.5034 | 1.5734 | -0.70% | 开放申购 | 开放赎回 | |
| 2016-09-08 | 1.5140 | 1.5840 | 0.29% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/580005/14.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2016-09-07 | 1.5096 | 1.5796 | -0.28% | 开放申购 | 开放赎回 | |
| 2016-09-06 | 1.5138 | 1.5838 | 1.69% | 开放申购 | 开放赎回 | |
| 2016-09-05 | 1.4887 | 1.5587 | 1.26% | 开放申购 | 开放赎回 | |
| 2016-09-02 | 1.4702 | 1.5402 | -0.55% | 开放申购 | 开放赎回 | |
| 2016-09-01 | 1.4783 | 1.5483 | -0.81% | 开放申购 | 开放赎回 | |
| 2016-08-31 | 1.4904 | 1.5604 | 0.48% | 开放申购 | 开放赎回 | |
| 2016-08-30 | 1.4833 | 1.5533 | -0.15% | 开放申购 | 开放赎回 | |
| 2016-08-29 | 1.4855 | 1.5555 | 0.32% | 开放申购 | 开放赎回 | |
| 2016-08-26 | 1.4808 | 1.5508 | 0.86% | 开放申购 | 开放赎回 | |
| 2016-08-25 | 1.4682 | 1.5382 | -0.08% | 开放申购 | 开放赎回 | |
| 2016-08-24 | 1.4694 | 1.5394 | -0.06% | 开放申购 | 开放赎回 | |
| 2016-08-23 | 1.4703 | 1.5403 | -0.24% | 开放申购 | 开放赎回 | |
| 2016-08-22 | 1.4739 | 1.5439 | -0.35% | 开放申购 | 开放赎回 | |
| 2016-08-19 | 1.4791 | 1.5491 | 0.07% | 开放申购 | 开放赎回 | |
| 2016-08-18 | 1.4781 | 1.5481 | -0.07% | 开放申购 | 开放赎回 | |
| 2016-08-17 | 1.4791 | 1.5491 | 0.33% | 开放申购 | 开放赎回 | |
| 2016-08-16 | 1.4742 | 1.5442 | 0.53% | 开放申购 | 开放赎回 | |
| 2016-08-15 | 1.4664 | 1.5364 | 1.88% | 开放申购 | 开放赎回 | |
| 2016-08-12 | 1.4393 | 1.5093 | 0.54% | 开放申购 | 开放赎回 | |
| 2016-08-11 | 1.4316 | 1.5016 | -1.51% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/580005/17.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2016-06-15 | 1.4408 | 1.5108 | 3.69% | 开放申购 | 开放赎回 | |
| 2016-06-14 | 1.3895 | 1.4595 | 1.23% | 开放申购 | 开放赎回 | |
| 2016-06-13 | 1.3726 | 1.4426 | -5.44% | 开放申购 | 开放赎回 | |
| 2016-06-08 | 1.4516 | 1.5216 | -0.18% | 开放申购 | 开放赎回 | |
| 2016-06-07 | 1.4542 | 1.5242 | 0.77% | 开放申购 | 开放赎回 | |
| 2016-06-06 | 1.4431 | 1.5131 | 1.19% | 开放申购 | 开放赎回 | |
| 2016-06-03 | 1.4261 | 1.4961 | -0.29% | 开放申购 | 开放赎回 | |
| 2016-06-02 | 1.4302 | 1.5002 | 1.64% | 开放申购 | 开放赎回 | |
| 2016-06-01 | 1.4071 | 1.4771 | 1.35% | 开放申购 | 开放赎回 | |
| 2016-05-31 | 1.3884 | 1.4584 | 3.49% | 开放申购 | 开放赎回 | |
| 2016-05-30 | 1.3416 | 1.4116 | -0.22% | 开放申购 | 开放赎回 | |
| 2016-05-27 | 1.3445 | 1.4145 | 0.22% | 开放申购 | 开放赎回 | |
| 2016-05-26 | 1.3415 | 1.4115 | 0.37% | 开放申购 | 开放赎回 | |
| 2016-05-25 | 1.3365 | 1.4065 | -0.51% | 开放申购 | 开放赎回 | |
| 2016-05-24 | 1.3433 | 1.4133 | -1.12% | 开放申购 | 开放赎回 | |
| 2016-05-23 | 1.3585 | 1.4285 | 1.92% | 开放申购 | 开放赎回 | |
| 2016-05-20 | 1.3329 | 1.4029 | 1.18% | 开放申购 | 开放赎回 | |
| 2016-05-19 | 1.3173 | 1.3873 | 0.15% | 开放申购 | 开放赎回 | |
| 2016-05-18 | 1.3153 | 1.3853 | -2.82% | 开放申购 | 开放赎回 | |
| 2016-05-17 | 1.3535 | 1.4235 | -0.43% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/580005/18.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2016-05-16 | 1.3594 | 1.4294 | 1.80% | 开放申购 | 开放赎回 | |
| 2016-05-13 | 1.3354 | 1.4054 | 0.16% | 开放申购 | 开放赎回 | |
| 2016-05-12 | 1.3333 | 1.4033 | 0.59% | 开放申购 | 开放赎回 | |
| 2016-05-11 | 1.3255 | 1.3955 | -1.85% | 开放申购 | 开放赎回 | |
| 2016-05-10 | 1.3505 | 1.4205 | -1.30% | 开放申购 | 开放赎回 | |
| 2016-05-09 | 1.3683 | 1.4383 | -2.65% | 开放申购 | 开放赎回 | |
| 2016-05-06 | 1.4056 | 1.4756 | -2.87% | 开放申购 | 开放赎回 | |
| 2016-05-05 | 1.4471 | 1.5171 | 0.65% | 开放申购 | 开放赎回 | |
| 2016-05-04 | 1.4377 | 1.5077 | 0.76% | 开放申购 | 开放赎回 | |
| 2016-05-03 | 1.4268 | 1.4968 | 2.22% | 开放申购 | 开放赎回 | |
| 2016-04-29 | 1.3958 | 1.4658 | 0.27% | 开放申购 | 开放赎回 | |
| 2016-04-28 | 1.3920 | 1.4620 | -0.06% | 开放申购 | 开放赎回 | |
| 2016-04-27 | 1.3928 | 1.4628 | 1.21% | 开放申购 | 开放赎回 | |
| 2016-04-26 | 1.3762 | 1.4462 | 1.20% | 开放申购 | 开放赎回 | |
| 2016-04-25 | 1.3599 | 1.4299 | -0.21% | 开放申购 | 开放赎回 | |
| 2016-04-22 | 1.3627 | 1.4327 | 1.17% | 开放申购 | 开放赎回 | |
| 2016-04-21 | 1.3470 | 1.4170 | -0.85% | 开放申购 | 开放赎回 | |
| 2016-04-20 | 1.3586 | 1.4286 | -4.18% | 开放申购 | 开放赎回 | |
| 2016-04-19 | 1.4179 | 1.4879 | -0.11% | 开放申购 | 开放赎回 | |
| 2016-04-18 | 1.4195 | 1.4895 | -0.69% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/580005/2.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2017-09-01 | 1.0274 | 1.5474 | 0.71% | 开放申购 | 开放赎回 | |
| 2017-08-31 | 1.0202 | 1.5402 | 0.40% | 开放申购 | 开放赎回 | |
| 2017-08-30 | 1.0161 | 1.5361 | 0.33% | 开放申购 | 开放赎回 | |
| 2017-08-29 | 1.0128 | 1.5328 | -0.27% | 开放申购 | 开放赎回 | |
| 2017-08-28 | 1.0155 | 1.5355 | 1.85% | 开放申购 | 开放赎回 | |
| 2017-08-25 | 0.9971 | 1.5171 | 0.96% | 开放申购 | 开放赎回 | |
| 2017-08-24 | 0.9876 | 1.5076 | -0.39% | 开放申购 | 开放赎回 | |
| 2017-08-23 | 0.9915 | 1.5115 | -0.41% | 开放申购 | 开放赎回 | |
| 2017-08-22 | 0.9956 | 1.5156 | -0.96% | 开放申购 | 开放赎回 | |
| 2017-08-21 | 1.0053 | 1.5253 | 0.69% | 开放申购 | 开放赎回 | |
| 2017-08-18 | 0.9984 | 1.5184 | -0.79% | 开放申购 | 开放赎回 | |
| 2017-08-17 | 1.0063 | 1.5263 | 0.48% | 开放申购 | 开放赎回 | |
| 2017-08-16 | 1.0015 | 1.5215 | 0.71% | 开放申购 | 开放赎回 | |
| 2017-08-15 | 0.9944 | 1.5144 | -0.13% | 开放申购 | 开放赎回 | |
| 2017-08-14 | 0.9957 | 1.5157 | 2.33% | 开放申购 | 开放赎回 | |
| 2017-08-11 | 0.9730 | 1.4930 | -0.93% | 开放申购 | 开放赎回 | |
| 2017-08-10 | 0.9821 | 1.5021 | -0.40% | 开放申购 | 开放赎回 | |
| 2017-08-09 | 0.9860 | 1.5060 | 0.27% | 开放申购 | 开放赎回 | |
| 2017-08-08 | 0.9833 | 1.5033 | 0.11% | 开放申购 | 开放赎回 | |
| 2017-08-07 | 0.9822 | 1.5022 | 0.47% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/580005/21.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2016-02-18 | 1.3519 | 1.4219 | 0.13% | 开放申购 | 开放赎回 | |
| 2016-02-17 | 1.3502 | 1.4202 | 1.66% | 开放申购 | 开放赎回 | |
| 2016-02-16 | 1.3282 | 1.3982 | 4.43% | 开放申购 | 开放赎回 | |
| 2016-02-15 | 1.2718 | 1.3418 | 0.54% | 开放申购 | 开放赎回 | |
| 2016-02-05 | 1.2650 | 1.3350 | -0.99% | 开放申购 | 开放赎回 | |
| 2016-02-04 | 1.2777 | 1.3477 | 2.13% | 开放申购 | 开放赎回 | |
| 2016-02-03 | 1.2510 | 1.3210 | 1.77% | 开放申购 | 开放赎回 | |
| 2016-02-02 | 1.2293 | 1.2993 | 4.08% | 开放申购 | 开放赎回 | |
| 2016-02-01 | 1.1811 | 1.2511 | -1.53% | 开放申购 | 开放赎回 | |
| 2016-01-29 | 1.1995 | 1.2695 | 3.47% | 开放申购 | 开放赎回 | |
| 2016-01-28 | 1.1593 | 1.2293 | -4.32% | 开放申购 | 开放赎回 | |
| 2016-01-27 | 1.2117 | 1.2817 | -2.91% | 开放申购 | 开放赎回 | |
| 2016-01-26 | 1.2480 | 1.3180 | -6.28% | 开放申购 | 开放赎回 | |
| 2016-01-25 | 1.3316 | 1.4016 | 0.86% | 开放申购 | 开放赎回 | |
| 2016-01-22 | 1.3202 | 1.3902 | 2.13% | 开放申购 | 开放赎回 | |
| 2016-01-21 | 1.2927 | 1.3627 | -4.83% | 开放申购 | 开放赎回 | |
| 2016-01-20 | 1.3583 | 1.4283 | -0.56% | 开放申购 | 开放赎回 | |
| 2016-01-19 | 1.3660 | 1.4360 | 4.05% | 开放申购 | 开放赎回 | |
| 2016-01-18 | 1.3128 | 1.3828 | 2.00% | 开放申购 | 开放赎回 | |
| 2016-01-15 | 1.2870 | 1.3570 | -3.22% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/580005/22.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2016-01-14 | 1.3298 | 1.3998 | 3.68% | 开放申购 | 开放赎回 | |
| 2016-01-13 | 1.2826 | 1.3526 | -3.32% | 开放申购 | 开放赎回 | |
| 2016-01-12 | 1.3267 | 1.3967 | 2.88% | 开放申购 | 开放赎回 | |
| 2016-01-11 | 1.2896 | 1.3596 | -5.15% | 开放申购 | 开放赎回 | |
| 2016-01-08 | 1.3596 | 1.4296 | -0.41% | 开放申购 | 开放赎回 | |
| 2016-01-07 | 1.3652 | 1.4352 | -5.40% | 开放申购 | 开放赎回 | |
| 2016-01-06 | 1.4432 | 1.5132 | 1.63% | 开放申购 | 开放赎回 | |
| 2016-01-05 | 1.4201 | 1.4901 | -1.80% | 开放申购 | 开放赎回 | |
| 2016-01-04 | 1.4461 | 1.5161 | -6.93% | 开放申购 | 开放赎回 | |
| 2015-12-31 | 1.5538 | 1.6238 | -1.82% | 开放申购 | 开放赎回 | |
| 2015-12-30 | 1.5826 | 1.6526 | 0.97% | 开放申购 | 开放赎回 | |
| 2015-12-29 | 1.5674 | 1.6374 | 1.36% | 开放申购 | 开放赎回 | |
| 2015-12-28 | 1.5464 | 1.6164 | -1.35% | 开放申购 | 开放赎回 | |
| 2015-12-25 | 1.5676 | 1.6376 | 1.10% | 开放申购 | 开放赎回 | |
| 2015-12-24 | 1.5505 | 1.6205 | -0.88% | 开放申购 | 开放赎回 | |
| 2015-12-23 | 1.5643 | 1.6343 | -2.21% | 开放申购 | 开放赎回 | |
| 2015-12-22 | 1.5996 | 1.6696 | 0.38% | 开放申购 | 开放赎回 | |
| 2015-12-21 | 1.5935 | 1.6635 | 0.23% | 开放申购 | 开放赎回 | |
| 2015-12-18 | 1.5899 | 1.6599 | -0.74% | 开放申购 | 开放赎回 | |
| 2015-12-17 | 1.6017 | 1.6717 | 3.00% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/580005/23.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2015-12-16 | 1.5550 | 1.6250 | 0.27% | 开放申购 | 开放赎回 | |
| 2015-12-15 | 1.5508 | 1.6208 | 1.48% | 开放申购 | 开放赎回 | |
| 2015-12-14 | 1.5282 | 1.5982 | 1.17% | 开放申购 | 开放赎回 | |
| 2015-12-11 | 1.5105 | 1.5805 | -0.20% | 开放申购 | 开放赎回 | |
| 2015-12-10 | 1.5135 | 1.5835 | -0.57% | 开放申购 | 开放赎回 | |
| 2015-12-09 | 1.5221 | 1.5921 | 0.77% | 开放申购 | 开放赎回 | |
| 2015-12-08 | 1.5105 | 1.5805 | -1.33% | 开放申购 | 开放赎回 | |
| 2015-12-07 | 1.5308 | 1.6008 | 1.86% | 开放申购 | 开放赎回 | |
| 2015-12-04 | 1.5028 | 1.5728 | -0.23% | 开放申购 | 开放赎回 | |
| 2015-12-03 | 1.5062 | 1.5762 | 2.01% | 开放申购 | 开放赎回 | |
| 2015-12-02 | 1.4765 | 1.5465 | -1.70% | 开放申购 | 开放赎回 | |
| 2015-12-01 | 1.5020 | 1.5720 | -1.86% | 开放申购 | 开放赎回 | |
| 2015-11-30 | 1.5305 | 1.6005 | 0.82% | 开放申购 | 开放赎回 | |
| 2015-11-27 | 1.5180 | 1.5880 | -5.07% | 开放申购 | 开放赎回 | |
| 2015-11-26 | 1.5990 | 1.6690 | -0.16% | 开放申购 | 开放赎回 | |
| 2015-11-25 | 1.6016 | 1.6716 | 2.57% | 开放申购 | 开放赎回 | |
| 2015-11-24 | 1.5615 | 1.6315 | 1.51% | 开放申购 | 开放赎回 | |
| 2015-11-23 | 1.5383 | 1.6083 | -1.32% | 开放申购 | 开放赎回 | |
| 2015-11-20 | 1.5589 | 1.6289 | 1.66% | 开放申购 | 开放赎回 | |
| 2015-11-19 | 1.5335 | 1.6035 | 3.80% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------
/crawl_fund/htmls/details/580005/24.txt:
--------------------------------------------------------------------------------
1 | | 净值日期 | 单位净值 | 累计净值 | 日增长率 | 申购状态 | 赎回状态 | 分红送配 |
|---|
| 2015-11-18 | 1.4774 | 1.5474 | -1.85% | 开放申购 | 开放赎回 | |
| 2015-11-17 | 1.5053 | 1.5753 | -1.14% | 开放申购 | 开放赎回 | |
| 2015-11-16 | 1.5226 | 1.5926 | 2.19% | 开放申购 | 开放赎回 | |
| 2015-11-13 | 1.4900 | 1.5600 | -1.41% | 开放申购 | 开放赎回 | |
| 2015-11-12 | 1.5113 | 1.5813 | -0.20% | 开放申购 | 开放赎回 | |
| 2015-11-11 | 1.5143 | 1.5843 | 1.33% | 开放申购 | 开放赎回 | |
| 2015-11-10 | 1.4944 | 1.5644 | 0.40% | 开放申购 | 开放赎回 | |
| 2015-11-09 | 1.4884 | 1.5584 | 2.25% | 开放申购 | 开放赎回 | |
| 2015-11-06 | 1.4557 | 1.5257 | 3.65% | 开放申购 | 开放赎回 | |
| 2015-11-05 | 1.4045 | 1.4745 | -0.99% | 开放申购 | 开放赎回 | |
| 2015-11-04 | 1.4186 | 1.4886 | 4.55% | 开放申购 | 开放赎回 | |
| 2015-11-03 | 1.3569 | 1.4269 | 0.13% | 开放申购 | 开放赎回 | |
| 2015-11-02 | 1.3551 | 1.4251 | -2.07% | 开放申购 | 开放赎回 | |
| 2015-10-30 | 1.3837 | 1.4537 | 1.82% | 开放申购 | 开放赎回 | |
| 2015-10-29 | 1.3590 | 1.4290 | 2.40% | 开放申购 | 开放赎回 | |
| 2015-10-28 | 1.3271 | 1.3971 | -2.37% | 开放申购 | 开放赎回 | |
| 2015-10-27 | 1.3593 | 1.4293 | 0.72% | 开放申购 | 开放赎回 | |
| 2015-10-26 | 1.3496 | 1.4196 | 1.70% | 开放申购 | 开放赎回 | |
| 2015-10-23 | 1.3270 | 1.3970 | 2.84% | 开放申购 | 开放赎回 | |
| 2015-10-22 | 1.2903 | 1.3603 | 3.62% | 开放申购 | 开放赎回 | |
--------------------------------------------------------------------------------