├── fanPie
├── fanPie
│ ├── __init__.py
│ ├── items.py
│ ├── spiders
│ │ ├── episode.py
│ │ ├── __init__.py
│ │ └── episodes.py
│ ├── pipelines.py
│ ├── settings.py
│ └── middlewares.py
├── assets
│ └── 939x0w.jpg
├── scrapy.cfg
├── all_episodes.py
├── update.py
├── tests
│ ├── test_pipelines.test.py
│ └── test_feedparser.py
└── feed_parser.py
├── .gitignore
├── .vscode
└── settings.json
├── Pipfile
├── readme.md
├── setup.py
├── LICENSE
└── Pipfile.lock
/fanPie/fanPie/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/fanPie/assets/939x0w.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Reyshawn/FanpieFilmFeed/HEAD/fanPie/assets/939x0w.jpg
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | /bin
2 | /include
3 | /lib
4 | /__pycache__
5 | .Python
6 |
7 | .DS_Store
8 | *.pyc
9 |
10 |
11 | helper.rss
--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "python.pythonPath": "${workspaceFolder}/bin/python3",
3 | "editor.wordWrap": "off",
4 | "editor.tabSize": 4
5 | }
--------------------------------------------------------------------------------
/Pipfile:
--------------------------------------------------------------------------------
1 | [[source]]
2 | url = "https://pypi.org/simple"
3 | verify_ssl = true
4 | name = "pypi"
5 |
6 | [packages]
7 | scrapy = "2.5.1"
8 |
9 | [dev-packages]
10 |
11 | [requires]
12 | python_version = "3.10"
13 |
--------------------------------------------------------------------------------
/readme.md:
--------------------------------------------------------------------------------
1 | # FanpieFilmFeed
2 |
3 | Please support FanpieFilm official website: https://fanpaiyingping.com
4 |
5 | Subscribe this rss file: [fanPieFilm](https://raw.githubusercontent.com/Reyshawn/FanpieFilmFeed/master/fanPieFilm.rss)
6 | I would update this file if the podcast has updated in wechat.
--------------------------------------------------------------------------------
/fanPie/scrapy.cfg:
--------------------------------------------------------------------------------
1 | # Automatically created by: scrapy startproject
2 | #
3 | # For more information about the [deploy] section see:
4 | # https://scrapyd.readthedocs.io/en/latest/deploy.html
5 |
6 | [settings]
7 | default = fanPie.settings
8 |
9 | [deploy]
10 | #url = http://localhost:6800/
11 | project = fanPie
12 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup, find_packages
2 |
3 | with open("README.md", "r", "utf-8") as f:
4 | readme = f.read()
5 |
6 | setup(
7 | name="FanpieFilmFeed",
8 | version="1.0.0",
9 | description="🎬A fully complete FanpieFilm podcast rss feed with detailed shownotes.",
10 | long_description=readme,
11 | author="Reyshawn",
12 | author_email="reshawnchang@gmail.com",
13 | url="https://github.com/Reyshawn/FanpieFilmFeed",
14 | packages=find_packages(exclude=['test']),
15 | install_requires=[
16 | "scrapy==2.4.0"
17 | ]
18 | )
19 |
--------------------------------------------------------------------------------
/fanPie/all_episodes.py:
--------------------------------------------------------------------------------
1 | import os
2 | import json
3 | from scrapy.crawler import CrawlerProcess
4 | from scrapy.utils.project import get_project_settings
5 |
6 | def cmp(i):
7 | return int(i['episode'][:3]), len(i['episode']), int(i['episode'][-1:])
8 |
9 | if __name__ == "__main__":
10 |
11 | if os.path.isfile('output.json'):
12 | os.remove('output.json')
13 |
14 | settings_file_path = 'fanPie.settings'
15 | os.environ.setdefault('SCRAPY_SETTINGS_MODULE', settings_file_path)
16 |
17 | process = CrawlerProcess(get_project_settings())
18 | process.crawl('episodes')
19 |
20 | process.start()
21 |
22 | with open('output.json', 'r') as f:
23 | data = json.load(f)
24 |
25 | data.sort(key=cmp, reverse=True)
26 |
27 | with open('output.json', 'w+') as f:
28 | json.dump(data, f, ensure_ascii=False)
29 |
30 |
--------------------------------------------------------------------------------
/fanPie/fanPie/items.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # Define here the models for your scraped items
4 | #
5 | # See documentation in:
6 | # https://docs.scrapy.org/en/latest/topics/items.html
7 |
8 | import scrapy
9 | from itemloaders.processors import TakeFirst
10 |
11 |
12 | class FanpieItem(scrapy.Item):
13 | # define the fields for your item here like:
14 | # name = scrapy.Field()
15 | episode = scrapy.Field(output_processor=TakeFirst())
16 | film = scrapy.Field(output_processor=TakeFirst())
17 | title = scrapy.Field()
18 | hosts = scrapy.Field()
19 | shownotes = scrapy.Field(output_processor=TakeFirst())
20 | url = scrapy.Field(output_processor=TakeFirst())
21 | duration = scrapy.Field(output_processor=TakeFirst())
22 | pub_date = scrapy.Field(output_processor=TakeFirst())
23 | link = scrapy.Field(output_processor=TakeFirst())
24 |
25 |
26 | if __name__ == "__main__":
27 | film = FanpieItem(episode=1, title="今年的金熊金狮金棕榈,这部是最好的。", hosts=['波米', '雷普利'])
28 | print(film['title'])
29 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2019 Reyshawn
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/fanPie/fanPie/spiders/episode.py:
--------------------------------------------------------------------------------
1 | import scrapy
2 | from scrapy.loader import ItemLoader
3 |
4 | from ..items import FanpieItem
5 | from . import parse_response, validate_item
6 | import re
7 |
8 |
9 | class updateSpider(scrapy.Spider):
10 | name = 'episode'
11 | custom_settings = {
12 | 'FEED_FORMAT': 'json',
13 | 'FEED_URI': 'latest.json'
14 | }
15 | # start_urls = ['https://mp.weixin.qq.com/s/uF0GiJbi4A5WBxq5iUPuqQ']
16 |
17 | def __init__(self, url='', episode='', film='', hosts=[]):
18 | self.start_urls = [url]
19 | self._episode = episode
20 | self._film = film
21 | self._hosts = hosts.split(',')
22 |
23 | def parse(self, response):
24 | l = ItemLoader(item=FanpieItem(), response=response)
25 | l.add_css('title', '.rich_media_title::text')
26 |
27 | l.add_value('episode', self._episode)
28 | l.add_value('film', self._film)
29 | l.add_value('hosts', self._hosts)
30 | l.add_value('link', self.start_urls[0])
31 |
32 | parse_response(response, l)
33 |
34 | validate_item(l)
35 |
36 | yield l.load_item()
37 |
38 |
39 | if __name__ == "__main__":
40 | pass
41 |
--------------------------------------------------------------------------------
/fanPie/update.py:
--------------------------------------------------------------------------------
1 | # Update the new episode
2 |
3 | import os
4 | import json
5 | from scrapy.crawler import CrawlerProcess
6 | from scrapy.utils.project import get_project_settings
7 |
8 | def is_latest(**kwargs):
9 | pass
10 |
11 | if __name__ == "__main__":
12 |
13 | settings_file_path = 'fanPie.settings'
14 | os.environ.setdefault('SCRAPY_SETTINGS_MODULE', settings_file_path)
15 |
16 | process = CrawlerProcess(get_project_settings())
17 |
18 | with open('output.json', 'r') as f:
19 | output = json.load(f)
20 |
21 | episode = int(output[0]['episode']) + 1
22 | episode = f"{episode:03d}" # '001' '012' '174' format
23 |
24 | if os.path.isfile('latest.json'):
25 | os.remove('latest.json')
26 |
27 | kwargs = {
28 | 'url':'https://mp.weixin.qq.com/s/yqi_tfA7T89_vemI7ry4Yw',
29 | 'episode': episode,
30 | 'film': '钛',
31 | 'hosts': '弦子,胤祥,波米'
32 | }
33 |
34 | process.crawl('episode', **kwargs)
35 | process.start()
36 |
37 | with open('latest.json', 'r') as f:
38 | latest = json.load(f)
39 |
40 | output = latest + output
41 |
42 | with open('output.json', 'w+') as f:
43 | json.dump(output, f, ensure_ascii=False)
44 |
45 | # clear the lastest.json content
46 | os.remove('latest.json')
--------------------------------------------------------------------------------
/fanPie/fanPie/spiders/__init__.py:
--------------------------------------------------------------------------------
1 | # This package will contain the spiders of your Scrapy project
2 | #
3 | # Please refer to the documentation for information on how to create and manage
4 | # your spiders.
5 |
6 | import re
7 |
8 | def parse_response(response, l):
9 | st = response.text.find(r'
]*>', '', i.get()).strip()
22 | if title:
23 | num = title[:3]
24 | hosts_list = re.findall(r'([^:]*?(?:嘉宾:?)([^)]*))', title)
25 | film = re.sub(r'([^:]*?(?:嘉宾:?)([^)]*))', '', title[3:]).strip()
26 |
27 | hosts = []
28 | for host in hosts_list:
29 | if '、' in host:
30 | hosts += host.split('、')
31 | else:
32 | hosts.append(host)
33 |
34 | print('title:', title)
35 | if num == '111':
36 | url = 'https://mp.weixin.qq.com/s/_phqvDLLqsI_4uJ7gSDoQw'
37 | request = scrapy.Request(url, self.parse_article)
38 | request.meta['episode'] = num
39 | request.meta['film'] = film
40 | request.meta['hosts'] = hosts
41 | request.meta['link'] = url
42 | yield request
43 | elif i.css('a'):
44 | for j in range(len(i.css('a'))):
45 | url = i.css('a')[j].attrib['href']
46 | request = scrapy.Request(url, self.parse_article)
47 | request.meta['episode'] = num if j == 0 else num + ' § ' + str(j)
48 | request.meta['film'] = film
49 | request.meta['hosts'] = hosts
50 | request.meta['link'] = url
51 | yield request
52 |
53 |
54 | def parse_article(self, response):
55 | l = ItemLoader(item=FanpieItem(), response=response)
56 | l.add_css('title', '.rich_media_title::text')
57 | l.add_value('episode', response.meta['episode'])
58 | l.add_value('film', response.meta['film'])
59 | l.add_value('hosts', response.meta['hosts'])
60 | l.add_value('link', response.meta['link'])
61 |
62 | parse_response(response, l)
63 |
64 | yield l.load_item()
65 |
66 | if __name__ == "__main__":
67 | pass
--------------------------------------------------------------------------------
/fanPie/fanPie/pipelines.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # Define your item pipelines here
4 | #
5 | # Don't forget to add your pipeline to the ITEM_PIPELINES setting
6 | # See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
7 | import re
8 |
9 | class FanpiePipeline(object):
10 |
11 | def open_spider(self, spider):
12 | print('start the pipeline!!!!!!')
13 |
14 | def close_spider(self, spider):
15 | print('close the pipeline⚠️')
16 |
17 | def process_item(self, item, spider):
18 |
19 | item['title'] = item['title'][0].strip()
20 | NotesParser(item)
21 | return item
22 |
23 |
24 | class NotesParser:
25 |
26 | def __init__(self, episode):
27 | episode['shownotes']['shownotes_original'] = episode['shownotes']['shownotes_original'].replace(' ', ' ')
28 | self._shownotes = episode['shownotes']['shownotes_original']
29 | list_pos_1 = ['本期节目重点提及', '本期片目', '本期涉及', '节目提及', '本期节目重点提及的电影片单', '重点提及的电影片单', '相关泛音乐传记类型影片', '话题:按时间顺序', '节目中谈及的广义上的徐克作品']
30 | list_pos_2 = ['往期节目', '若想下载', '安卓用户下载', '抽奖', '获奖', '本周热映新片', '本周新推送节目', '片子基本信息', '耳旁风', '立刻收听', '若批评不自由,则赞美无意义', '2018年其它']
31 | self._pos_1 = self.find_pos(list_pos_1, self._shownotes)
32 | next_pos = self.find_pos(list_pos_2, self._shownotes[self._pos_1:])
33 | self._pos_2 = self._pos_1 + next_pos if next_pos > -1 else next_pos
34 |
35 | episode['shownotes']['film_list_range'] = (self._pos_1, self._pos_2)
36 | episode['shownotes']['film_list_original'] = self._shownotes[self._pos_1:self._pos_2]
37 | episode['shownotes']['film_list'] = self.pars_film_list(episode['shownotes']['film_list_original'])
38 |
39 | episode['shownotes']['film_outline'] = self.pars_outline(self._shownotes[:self._pos_1])
40 | episode['shownotes']['film_scoring'] = self.pars_scoring(self._shownotes)
41 |
42 |
43 | # find the beginning position of some sections, film list, outline, scoring
44 | def find_pos(self, key_words, section):
45 | pos = len(section)
46 | for word in key_words:
47 | k = section.find(word)
48 | pos = k if k > -1 and k < pos else pos
49 | return pos if pos < len(section) else -1
50 |
51 |
52 | def pars_outline(self, section):
53 | out_key = ['本期节目流程', '节目流程']
54 | self._out_pos = self.find_pos(out_key, section)
55 | return section[self._out_pos:] if self._out_pos > -1 else '❌'
56 |
57 |
58 | def pars_scoring(self, section):
59 | sc_key = ['综合分数', '主播打分', '评分', '总分', '本期主创团队', ':约', '点击收听节目']
60 | if self._out_pos > -1:
61 | section = section[:self._out_pos]
62 | else:
63 | section = section[self._pos_1:self._pos_2]
64 | sc_pos = self.find_pos(sc_key, section)
65 | return section[sc_pos:] if sc_pos > -1 else '❌'
66 |
67 | def pars_film_list(self, section):
68 | pattern = r'《([^》]*)》[^《》]*?\(?[^)]*?([0-9]{4}|[0-9]{4}-[0-9]{4})[^)]*?\)?'
69 |
70 | matches = re.findall(pattern, section)
71 | res = []
72 | for name, time in matches:
73 | tmp = {}
74 | tmp['name'] = name
75 | tmp['time'] = time
76 | res.append(tmp)
77 | return res
78 |
--------------------------------------------------------------------------------
/fanPie/fanPie/settings.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # Scrapy settings for fanPie project
4 | #
5 | # For simplicity, this file contains only settings considered important or
6 | # commonly used. You can find more settings consulting the documentation:
7 | #
8 | # https://docs.scrapy.org/en/latest/topics/settings.html
9 | # https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
10 | # https://docs.scrapy.org/en/latest/topics/spider-middleware.html
11 |
12 | BOT_NAME = 'fanPie'
13 |
14 | SPIDER_MODULES = ['fanPie.spiders']
15 | NEWSPIDER_MODULE = 'fanPie.spiders'
16 |
17 |
18 | # Crawl responsibly by identifying yourself (and your website) on the user-agent
19 | #USER_AGENT = 'fanPie (+http://www.yourdomain.com)'
20 |
21 | # Obey robots.txt rules
22 | ROBOTSTXT_OBEY = False
23 |
24 | # Configure maximum concurrent requests performed by Scrapy (default: 16)
25 | #CONCURRENT_REQUESTS = 32
26 |
27 | # Configure a delay for requests for the same website (default: 0)
28 | # See https://docs.scrapy.org/en/latest/topics/settings.html#download-delay
29 | # See also autothrottle settings and docs
30 | #DOWNLOAD_DELAY = 3
31 | # The download delay setting will honor only one of:
32 | #CONCURRENT_REQUESTS_PER_DOMAIN = 16
33 | #CONCURRENT_REQUESTS_PER_IP = 16
34 |
35 | # Disable cookies (enabled by default)
36 | #COOKIES_ENABLED = False
37 |
38 | # Disable Telnet Console (enabled by default)
39 | #TELNETCONSOLE_ENABLED = False
40 |
41 | # Override the default request headers:
42 | #DEFAULT_REQUEST_HEADERS = {
43 | # 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
44 | # 'Accept-Language': 'en',
45 | #}
46 |
47 | # Enable or disable spider middlewares
48 | # See https://docs.scrapy.org/en/latest/topics/spider-middleware.html
49 | #SPIDER_MIDDLEWARES = {
50 | # 'fanPie.middlewares.FanpieSpiderMiddleware': 543,
51 | #}
52 |
53 | # Enable or disable downloader middlewares
54 | # See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
55 | #DOWNLOADER_MIDDLEWARES = {
56 | # 'fanPie.middlewares.FanpieDownloaderMiddleware': 543,
57 | #}
58 |
59 | # Enable or disable extensions
60 | # See https://docs.scrapy.org/en/latest/topics/extensions.html
61 | #EXTENSIONS = {
62 | # 'scrapy.extensions.telnet.TelnetConsole': None,
63 | #}
64 |
65 | # Configure item pipelines
66 | # See https://docs.scrapy.org/en/latest/topics/item-pipeline.html
67 | ITEM_PIPELINES = {
68 | 'fanPie.pipelines.FanpiePipeline': 300,
69 | }
70 |
71 | # Enable and configure the AutoThrottle extension (disabled by default)
72 | # See https://docs.scrapy.org/en/latest/topics/autothrottle.html
73 | #AUTOTHROTTLE_ENABLED = True
74 | # The initial download delay
75 | #AUTOTHROTTLE_START_DELAY = 5
76 | # The maximum download delay to be set in case of high latencies
77 | #AUTOTHROTTLE_MAX_DELAY = 60
78 | # The average number of requests Scrapy should be sending in parallel to
79 | # each remote server
80 | #AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
81 | # Enable showing throttling stats for every response received:
82 | #AUTOTHROTTLE_DEBUG = False
83 |
84 | # Enable and configure HTTP caching (disabled by default)
85 | # See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
86 | #HTTPCACHE_ENABLED = True
87 | #HTTPCACHE_EXPIRATION_SECS = 0
88 | #HTTPCACHE_DIR = 'httpcache'
89 | #HTTPCACHE_IGNORE_HTTP_CODES = []
90 | #HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
91 |
92 | FEED_EXPORT_ENCODING = 'utf-8'
--------------------------------------------------------------------------------
/fanPie/tests/test_pipelines.test.py:
--------------------------------------------------------------------------------
1 | import json
2 | import re
3 |
4 | # re = r'《([^》]*)》[ ]*?[(|(]?[^()()]*?([0-9]{4})[)|)]?'
5 | ORIGINAL_PATH = '/Users/reyshawn/Desktop/FanpieFilm/fanPie/output.json'
6 | OUTPUT_PATH = '/Users/reyshawn/Desktop/output.json'
7 |
8 | class NotesParser:
9 |
10 | def __init__(self, episode):
11 | episode['shownotes']['shownotes_original'] = episode['shownotes']['shownotes_original'].replace(' ', ' ')
12 | self._shownotes = episode['shownotes']['shownotes_original']
13 | list_pos_1 = ['本期节目重点提及', '本期片目', '节目提及', '本期节目重点提及的电影片单']
14 | list_pos_2 = ['往期节目', '若想下载', '安卓用户下载', '抽奖', '获奖', '本周热映新片', '本周新推送节目', '片子基本信息', '耳旁风', '立刻收听', '若批评不自由,则赞美无意义', '2018年其它']
15 | self._pos_1 = self.find_pos(list_pos_1, self._shownotes)
16 | self._pos_2 = self._pos_1 + self.find_pos(list_pos_2, self._shownotes[self._pos_1:])
17 |
18 | episode['shownotes']['film_list_range'] = (self._pos_1, self._pos_2)
19 | episode['shownotes']['film_list_original'] = self._shownotes[self._pos_1:self._pos_2]
20 | episode['shownotes']['film_list'] = self.pars_film_list(episode['shownotes']['film_list_original'])
21 |
22 | episode['shownotes']['film_outline'] = self.pars_outline(self._shownotes[:self._pos_1])
23 | episode['shownotes']['film_scoring'] = self.pars_scoring(self._shownotes)
24 |
25 |
26 | # find the beginning position of some sections, film list, outline, scoring
27 | def find_pos(self, key_words, section):
28 | pos = len(section)
29 | for word in key_words:
30 | k = section.find(word)
31 | pos = k if k > -1 and k < pos else pos
32 | return pos if pos < len(section) else -1
33 |
34 |
35 | def pars_outline(self, section):
36 | out_key = ['本期节目流程', '节目流程']
37 | self._out_pos = self.find_pos(out_key, section)
38 | return section[self._out_pos:] if self._out_pos > -1 else '❌'
39 |
40 |
41 | def pars_scoring(self, section):
42 | sc_key = ['综合分数', '主播打分', '评分', '总分']
43 | if self._out_pos > -1:
44 | section = section[:self._out_pos]
45 | else:
46 | section = section[self._pos_1:self._pos_2]
47 | sc_pos = self.find_pos(sc_key, section)
48 | return section[sc_pos:] if sc_pos > -1 else '❌'
49 |
50 | def pars_film_list(self, section):
51 | pattern = r'《([^》]*)》[^《》]*?\(?[^)]*?([0-9]{4}|[0-9]{4}-[0-9]{4})[^)]*?\)?'
52 |
53 | matches = re.findall(pattern, section)
54 | res = []
55 | for name, time in matches:
56 | tmp = {}
57 | tmp['name'] = name
58 | tmp['time'] = time
59 | res.append(tmp)
60 | return res
61 |
62 |
63 |
64 | def load_data(path, output):
65 | with open(path, 'r') as f:
66 | data = json.load(f)
67 |
68 | with open(output, 'w+') as f:
69 | json.dump(data, f, ensure_ascii=False)
70 |
71 |
72 | def test_data(path):
73 | with open(path, 'r') as f:
74 | data = json.load(f)
75 |
76 | for i in range(len(data)):
77 | try:
78 | if data[i]['url'] == '❌':
79 | print(i)
80 | print(data[i]['episode'])
81 | except:
82 | print(i)
83 | print('episode', data[i]['episode'])
84 |
85 | # with open('/Users/reyshawn/Desktop/test.json', 'w+') as f:
86 | # json.dump(res, f, ensure_ascii=False)
87 |
88 | if __name__ == "__main__":
89 | load_data(ORIGINAL_PATH, OUTPUT_PATH)
90 | # test_data(OUTPUT_PATH)
--------------------------------------------------------------------------------
/fanPie/fanPie/middlewares.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # Define here the models for your spider middleware
4 | #
5 | # See documentation in:
6 | # https://docs.scrapy.org/en/latest/topics/spider-middleware.html
7 |
8 | from scrapy import signals
9 |
10 |
11 | class FanpieSpiderMiddleware(object):
12 | # Not all methods need to be defined. If a method is not defined,
13 | # scrapy acts as if the spider middleware does not modify the
14 | # passed objects.
15 |
16 | @classmethod
17 | def from_crawler(cls, crawler):
18 | # This method is used by Scrapy to create your spiders.
19 | s = cls()
20 | crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
21 | return s
22 |
23 | def process_spider_input(self, response, spider):
24 | # Called for each response that goes through the spider
25 | # middleware and into the spider.
26 |
27 | # Should return None or raise an exception.
28 | return None
29 |
30 | def process_spider_output(self, response, result, spider):
31 | # Called with the results returned from the Spider, after
32 | # it has processed the response.
33 |
34 | # Must return an iterable of Request, dict or Item objects.
35 | for i in result:
36 | yield i
37 |
38 | def process_spider_exception(self, response, exception, spider):
39 | # Called when a spider or process_spider_input() method
40 | # (from other spider middleware) raises an exception.
41 |
42 | # Should return either None or an iterable of Request, dict
43 | # or Item objects.
44 | pass
45 |
46 | def process_start_requests(self, start_requests, spider):
47 | # Called with the start requests of the spider, and works
48 | # similarly to the process_spider_output() method, except
49 | # that it doesn’t have a response associated.
50 |
51 | # Must return only requests (not items).
52 | for r in start_requests:
53 | yield r
54 |
55 | def spider_opened(self, spider):
56 | spider.logger.info('Spider opened: %s' % spider.name)
57 |
58 |
59 | class FanpieDownloaderMiddleware(object):
60 | # Not all methods need to be defined. If a method is not defined,
61 | # scrapy acts as if the downloader middleware does not modify the
62 | # passed objects.
63 |
64 | @classmethod
65 | def from_crawler(cls, crawler):
66 | # This method is used by Scrapy to create your spiders.
67 | s = cls()
68 | crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
69 | return s
70 |
71 | def process_request(self, request, spider):
72 | # Called for each request that goes through the downloader
73 | # middleware.
74 |
75 | # Must either:
76 | # - return None: continue processing this request
77 | # - or return a Response object
78 | # - or return a Request object
79 | # - or raise IgnoreRequest: process_exception() methods of
80 | # installed downloader middleware will be called
81 | return None
82 |
83 | def process_response(self, request, response, spider):
84 | # Called with the response returned from the downloader.
85 |
86 | # Must either;
87 | # - return a Response object
88 | # - return a Request object
89 | # - or raise IgnoreRequest
90 | return response
91 |
92 | def process_exception(self, request, exception, spider):
93 | # Called when a download handler or a process_request()
94 | # (from other downloader middleware) raises an exception.
95 |
96 | # Must either:
97 | # - return None: continue processing this exception
98 | # - return a Response object: stops process_exception() chain
99 | # - return a Request object: stops process_exception() chain
100 | pass
101 |
102 | def spider_opened(self, spider):
103 | spider.logger.info('Spider opened: %s' % spider.name)
104 |
--------------------------------------------------------------------------------
/fanPie/tests/test_feedparser.py:
--------------------------------------------------------------------------------
1 | import json
2 | from lxml import etree, builder
3 | import re
4 |
5 | def test_write_xml(output):
6 | with open('/Users/reyshawn/Desktop/rss.xml', 'wb+') as f:
7 | f.write(etree.tostring(output, xml_declaration=True, encoding='UTF-8'))
8 |
9 |
10 | def test_build_xml():
11 | E = builder.ElementMaker(nsmap={
12 | 'atom':'http://www.w3.org/2005/Atom',
13 | 'itunes': "http://www.itunes.com/dtds/podcast-1.0.dtd"
14 | })
15 |
16 | itunes = builder.ElementMaker(namespace='http://www.itunes.com/dtds/podcast-1.0.dtd')
17 |
18 |
19 | def get_item(title, link):
20 | return E.item(
21 | E.title(title),
22 | E.link(link)
23 | )
24 |
25 | rss = E.rss(
26 | E.channel(
27 | E.title('反派影评'),
28 | E.link('https://www.ximalaya.com/album/4127591/'),
29 | E.pubDate('Mon, 27 May 2019 17:10:26 GMT'),
30 | E.language('zh-cn'),
31 | E.descripttion('若批评不自由,则赞美无意义。党同伐异,猛于炮火。'),
32 | itunes.author('波米和他的朋友们'),
33 | itunes.image(href="https://fdfs.xmcdn.com/group48/M01/8F/6E/wKgKlVuJRsmCGvIUAACqlRzgjdM964.jpg"),
34 | get_item(
35 | '157《复仇者联盟4:终局之战》:你和银幕对暗号',
36 | 'https://www.ximalaya.com/47283140/sound/180370476/'
37 | ),
38 | get_item(
39 | '128《影》(2小时版,5.7分):标新立异还是徒有其表?',
40 | 'https://www.ximalaya.com/47283140/sound/127187181/'
41 | )
42 | )
43 | )
44 |
45 | channel = rss.xpath('//channel')[0]
46 | channel.append(get_item(
47 | '126《曼蒂》(6分):跟凯奇左手右手一个慢动作!',
48 | 'https://www.ximalaya.com/47283140/sound/124142172/'
49 | ))
50 |
51 |
52 | rss.set('version', '2.0')
53 | rss.set('encoding', 'UTF-8')
54 | test_write_xml(rss)
55 |
56 |
57 | def test_sort_items(data):
58 | def cmp(i):
59 | return int(i['episode'][:3]), len(i['episode']), int(i['episode'][-1:])
60 |
61 | data.sort(key=cmp, reverse=True)
62 |
63 |
64 | # complete url and duration field
65 | def test_complete_items(data, path):
66 | incomp = {}
67 | for i, item in enumerate(data):
68 | if item['url'] == '❌':
69 | incomp[item['episode']] = i
70 | print(item['episode'])
71 |
72 | root = etree.parse(path)
73 | items = root.xpath('//item')
74 |
75 | for i in items:
76 | title = i.find('title').text
77 | if title[:3] in incomp.keys():
78 | url = i.find('enclosure').attrib['url']
79 | num = incomp[title[:3]]
80 | data[num]['url'] = url
81 |
82 | with open('/Users/reyshawn/Desktop/allurl.json', 'w+') as f:
83 | json.dump(data, f, ensure_ascii=False)
84 |
85 | def test_complete_dur(data, path):
86 | incomp = {}
87 | for i, item in enumerate(data):
88 | if item['duration'] == '❌':
89 | incomp[item['episode']] = i
90 | print(item['episode'])
91 |
92 | root = etree.parse(path)
93 | items = root.xpath('//item')
94 |
95 | for i in items:
96 | title = i.find('title').text
97 | if title[:3] in incomp.keys():
98 | dur = i.find('itunes:duration', namespaces=i.nsmap).text
99 | num = incomp[title[:3]]
100 | data[num]['duration'] = dur
101 |
102 | data[incomp['131 sep: 1']]['duration'] = '00:55:30'
103 | data[incomp['085']]['duration'] = '2:00:00'
104 | data[incomp['065 sep: 1']]['duration'] = '00:58:00'
105 | data[incomp['048 sep: 1']]['duration'] = '01:09:36'
106 |
107 |
108 |
109 | print('------------------------------')
110 | for i, item in enumerate(data):
111 | if item['duration'] == '❌':
112 | incomp[item['episode']] = i
113 | print(item['episode'])
114 |
115 |
116 |
117 | def test_get_duration(data, path):
118 | sect = data[:]
119 | for i, item in enumerate(sect):
120 | t = re.search(r'(?:(节目)?总?时长:)(?P
[0-9]{2}:[0-9]{2}:[0-9]{2})', item['shownotes']['shownotes_original'])
121 | l = re.search(r'(?:(节目)?总?时长:)约?(?P[0-9]{1,2})小?时(?P[0-9]{1,2})?分?', item['shownotes']['shownotes_original'])
122 | if t:
123 | print(': ', t['t'])
124 | print('episode: ', item['episode'])
125 | elif l:
126 | m = '0'+ l['m'] if l['m'] and len(l['m']) == 1 else l['m']
127 | x = l['h'] + ':' + m + ':00' if l['m'] else l['h'] + ':00:00'
128 | print(': ', x)
129 | print('episode: ', item['episode'])
130 |
131 |
132 | def test_parse_shownotes(data):
133 | def _format_scoring(s, hosts, item):
134 | patterns = [
135 | r'(《[^》]*?》([\(|(][^\)]*?[\)|)])?)(综合)?(平均)?总?分数?[:|:]约?(?P[0-9\.]*)分?',
136 | r'[(综合)|(平均)|总]分数?[:|:]约?(?P[0-9\.]*)分?'
137 | ]
138 |
139 | if s == '❌':
140 | return s
141 | end_s = re.search(r'[(|(]?音频后期制作', s)
142 | if end_s:
143 | end = s[end_s.span()[0]:]
144 | s = s[:end_s.span()[0]]
145 | else:
146 | end = ''
147 |
148 | if '波米' not in hosts:
149 | hosts.append('波米')
150 |
151 | for pattern in patterns:
152 | ave = re.search(pattern, s)
153 | if ave and ave['score']:
154 | s = re.sub(pattern, '', s)
155 | break
156 |
157 | s = s.replace('&', '&')
158 | s = s.replace('(以下广告,由微信平台自动插入,我们编辑文章时看不到内容,每位读者看到的也并不相同)', '')
159 |
160 | if not ave:
161 | print('exception:', s)
162 |
163 | pos = []
164 | for host in hosts:
165 | if s.find(host) > -1:
166 | pos.append(s.find(host))
167 |
168 | pos.sort()
169 | try:
170 | st = pos[0]
171 | except:
172 | print(item['episode'])
173 | return s
174 | res = []
175 | for i, p in enumerate(pos[1:]):
176 | res.append(s[st:p])
177 | st = p
178 | res.append(s[st:])
179 |
180 | if not ave:
181 | print('score', item['episode'])
182 |
183 | scoring = '\n'.join(res) + '\n' + end + '\n\n'
184 |
185 | return scoring + '平均分: ' + ave['score'] if ave else scoring
186 |
187 | def _format_outline(s):
188 | s = s.split(';')
189 | s = '; \n'.join(s)
190 | s = s.replace('<', '<')
191 | s = s.replace('>', '>')
192 | s = s.replace('&', '&')
193 | s = re.sub(r'(下载完整节目)?(收听节目)?请点击(文末)?\"阅读原文\"按钮。', '', s)
194 | s = s.replace('(以下广告,由微信平台自动插入,我们编辑文章时看不到内容,每位读者看到的也并不相同)', '')
195 |
196 | return s
197 |
198 | def _format_list(l):
199 | res = '本期片目\n\n'
200 | for i, item in enumerate(l):
201 | res += '『' + item['name'] + '』( ' + item['time'] + ' )\n'
202 | return res
203 |
204 | res = []
205 | for i, item in enumerate(data):
206 | hosts = item['hosts']
207 | scoring = _format_scoring(item['shownotes']['film_scoring'], hosts, item)
208 | outline = _format_outline(item['shownotes']['film_outline'])
209 | f_list = _format_list(item['shownotes']['film_list'])
210 | summary = scoring + '\n\n' + outline + '\n\n' + f_list
211 | tmp = {
212 | 'episode': item['episode'],
213 | 'title': item['title'],
214 | 'summary': summary
215 | }
216 | res.append(tmp)
217 |
218 | with open('/Users/reyshawn/Desktop/summary.json', 'w+') as f:
219 | json.dump(res, f, ensure_ascii=False)
220 |
221 |
222 |
223 | if __name__ == "__main__":
224 | #with open('/Users/reyshawn/Desktop/c.json', 'r') as f:
225 | # data = json.load(f)
226 |
227 | # test_sort_items(data)
228 | # test_complete_dur(data, '/Users/reyshawn/Desktop/fanPie.rss')
229 | # test_parse_shownotes(data)
230 |
231 |
--------------------------------------------------------------------------------
/fanPie/feed_parser.py:
--------------------------------------------------------------------------------
1 | import json
2 | import re
3 | import os
4 |
5 | from lxml import etree, builder
6 | from datetime import datetime
7 |
8 | censored = {
9 | '“韩国老司机”': '逆權司機',
10 | '一九八七': '1987:黎明到來的那一天'
11 | }
12 |
13 | class feedParser:
14 | def __init__(self, json):
15 | self.E = builder.ElementMaker(nsmap={
16 | 'atom':'http://www.w3.org/2005/Atom',
17 | 'itunes': 'http://www.itunes.com/dtds/podcast-1.0.dtd',
18 | 'content': 'http://purl.org/rss/1.0/modules/content/'
19 | })
20 | self.itunes = builder.ElementMaker(namespace='http://www.itunes.com/dtds/podcast-1.0.dtd')
21 | self.content = builder.ElementMaker(namespace='http://purl.org/rss/1.0/modules/content/')
22 | self.json = json
23 |
24 | self.build_feed()
25 | self.parse_items()
26 |
27 | def build_feed(self):
28 | self.rss = self.E.rss(
29 | self.E.channel(
30 | self._ele('title'),
31 | self._ele('link'),
32 | self._ele('pubDate'),
33 | self._ele('generator'),
34 | self._ele('language'),
35 | self._ele('description'),
36 | self._itunes_ele('author'),
37 | self.itunes.image(href=self.json['image']),
38 | self.E.webMaster(self.json['email']),
39 | self.E.ttl('60'),
40 | self.E.image(
41 | self.E.url(self.json['image'])
42 | ),
43 | self._itunes_ele('type')
44 | )
45 | )
46 |
47 |
48 | def _ele(self, tag):
49 | return self.E(tag, self.json[tag])
50 |
51 | def _itunes_ele(self, tag):
52 | return self.itunes(tag, self.json[tag])
53 |
54 | def parse_items(self):
55 | channel = self.rss.xpath('//channel')[0]
56 | for i, item in enumerate(self.json['items']):
57 | episode = self.E.item(
58 | self.E.title(item['title']),
59 | self.E.link(item['link']),
60 | self.E.pubDate(item['pubDate']),
61 | self.E.guid(item['guid']),
62 |
63 | self.itunes.episodeType('full'),
64 | self.itunes.image(href=item['image']),
65 | self.E.enclosure(url=item['enclosure'], type="audio/mpeg"),
66 | self.itunes.duration(item['duration']),
67 | self.E.description(item['description']),
68 | self.content.encoded(etree.CDATA(item['description']))
69 | )
70 | channel.append(episode)
71 |
72 | def save(self, path):
73 | with open(path, 'wb+') as f:
74 | f.write(etree.tostring(self.rss, xml_declaration=True, encoding='UTF-8'))
75 |
76 | class jsonParser:
77 | def __init__(self, path, other):
78 | with open(path) as f:
79 | self._items = json.load(f)
80 |
81 | self._sort_items()
82 | self._complete_items(other)
83 | self._parse_shownotes()
84 | new_items = self._build_items()
85 |
86 | self._feed = {
87 | 'title': '反派影评',
88 | 'link': 'https://fanpaiyingping.com',
89 | 'pubDate': format_time(self._items[-1]['pub_date']),
90 | 'generator': 'python',
91 | 'language': 'zh-cn',
92 | 'description': '若批评不自由,则赞美无意义。党同伐异,猛于炮火。',
93 | 'author': '波米和他的朋友们',
94 | 'image': 'https://raw.githubusercontent.com/Reyshawn/FanpieFilmFeed/master/fanPie/assets/939x0w.jpg',
95 | 'name': 'reyshawn',
96 | 'email': 'reshawnchang@gamil.com',
97 | 'type': 'TV & Film',
98 | 'items': new_items
99 | }
100 |
101 | # sort items by episode number
102 | def _sort_items(self):
103 | def cmp(i):
104 | return int(i['episode'][:3]), len(i['episode']), int(i['episode'][-1:])
105 |
106 | self._items.sort(key=cmp, reverse=True)
107 |
108 | # complete items by other rss file, mainly url, duration, image
109 | def _complete_items(self, path):
110 | incomp_url = {}
111 | incomp_dur = {}
112 | for i, item in enumerate(self._items):
113 | if item['url'] == '❌':
114 | incomp_url[item['episode']] = i
115 |
116 | if item['duration'] == '❌':
117 | incomp_dur[item['episode']] = i
118 |
119 | root = etree.parse(path)
120 | items = root.xpath('//item')
121 |
122 | for i in items:
123 | title = i.find('title').text
124 | if title[:3] in incomp_url.keys():
125 | url = i.find('enclosure').attrib['url']
126 | num = incomp_url[title[:3]]
127 | self._items[num]['url'] = url
128 |
129 | if title[:3] in incomp_dur.keys():
130 | dur = i.find('itunes:duration', namespaces=i.nsmap).text
131 | num = incomp_dur[title[:3]]
132 | self._items[num]['duration'] = dur
133 |
134 | self._items[incomp_dur['131 § 1']]['duration'] = '00:55:30'
135 | self._items[incomp_dur['085']]['duration'] = '02:00:00'
136 | self._items[incomp_dur['065 § 1']]['duration'] = '00:58:00'
137 | self._items[incomp_dur['048 § 1']]['duration'] = '01:09:36'
138 |
139 | def _parse_shownotes(self):
140 | def _format_subtitle(s, episode):
141 | if '&' in s or '及' in s:
142 | ss = s.split('&') if '&' in s else s.split('及')
143 | if len(episode) == 3:
144 | s = ss[0]
145 | else:
146 | s = ss[int(episode[-1])] if episode != '139 § 2' else ss[1]
147 |
148 | if not re.search(r'((外延)|(前作)|(回顾)|(盘点)|(电影节))', s):
149 | s = '『' + s + '』'
150 |
151 | return '' + s + '
\n\n'
152 |
153 |
154 | def _format_scoring(s, hosts):
155 | patterns = [
156 | r'(《[^》]*?》([\(|(][^\)]*?[\)|)])?)(综合)?(平均)?总?分数?[:|:]约?(?P[0-9\.]*)分?',
157 | r'[(综合)|(平均)|总]分数?[:|:]约?(?P[0-9\.]*)分?'
158 | ]
159 | if s == '❌':
160 | return s
161 | end_s = re.search(r'[(|(]?音频后期制作', s)
162 | if end_s:
163 | end = '' + s[end_s.span()[0]:] + '
'
164 | s = s[:end_s.span()[0]]
165 | else:
166 | end = ''
167 |
168 | if '波米' not in hosts:
169 | hosts.append('波米')
170 |
171 | for pattern in patterns:
172 | ave = re.search(pattern, s)
173 | if ave and ave['score']:
174 | s = re.sub(pattern, '', s)
175 | break
176 |
177 | s = s.replace('&', '&')
178 | s = s.replace('(以下广告,由微信平台自动插入,我们编辑文章时看不到内容,每位读者看到的也并不相同)', '')
179 |
180 | if not ave:
181 | print('exception:', s)
182 |
183 | pos = []
184 | for host in hosts:
185 | if s.find(host) > -1:
186 | pos.append(s.find(host))
187 |
188 | pos.sort()
189 | try:
190 | st = pos[0]
191 | except:
192 | # print(item['episode'])
193 | return s
194 | res = []
195 | for i, p in enumerate(pos[1:]):
196 | res.append(s[st:p])
197 | st = p
198 | res.append(s[st:])
199 | res = ['' + i + '
' for i in res]
200 |
201 | scoring = '\n'.join(res) + '\n' + end + '\n\n'
202 | return scoring + '平均分: ' + ave['score'] + '
' if ave else scoring
203 |
204 | def _format_outline(s):
205 | patterns = [
206 | r'((第?[0-9]小时)?第?[0-9]{1,2}[秒|分]钟?半?([0-9]{1,2}秒)?-((第?[0-9]小时)?第?[0-9]{1,2}分钟?半?)?([0-9]{1,2}秒)?(尾声)?)',
207 | r'([0-9]{2}:[0-9]{2}:[0-9]{2}-([0-9]{2}:[0-9]{2}:[0-9]{2})?(尾声)?)',
208 | r'(((开场)|(结束)|(尾声))歌?曲)',
209 | r'(影片(《[^》]*?》)?([(|\(][^\)]*?[)|\)])?(重要)?信息[^简要介绍])'
210 | ]
211 | for pattern in patterns:
212 | s = re.sub(pattern, r'\n\1', s)
213 |
214 | s = re.sub(r'([^0-9])([1-9]{1}[、)])', r'\1\n\2', s)
215 |
216 | s = s.split('\n')
217 | s = ['' + i +'
' for i in s]
218 | s = '\n'.join(s)
219 | s = s.replace('&', '&')
220 | s = re.sub(r'(下载完整节目)?(收听节目)?请点击(文末)?\"阅读原文\"按钮。', '', s)
221 | s = s.replace('(以下广告,由微信平台自动插入,我们编辑文章时看不到内容,每位读者看到的也并不相同)', '')
222 |
223 | return s
224 |
225 | def _format_list(l):
226 | res = '本期片目
\n\n'
227 | for i, item in enumerate(l):
228 | res += '- 『' + item['name'] + '』( ' + item['time'] + ' )
\n'
229 | return res + '
\n'
230 |
231 | for i, item in enumerate(self._items):
232 | hosts = item['hosts']
233 | film = _format_subtitle(item['film'], item['episode'])
234 | scoring = _format_scoring(item['shownotes']['film_scoring'], hosts)
235 | outline = _format_outline(item['shownotes']['film_outline'])
236 | f_list = _format_list(item['shownotes']['film_list'])
237 | summary = film + '\n\n' + scoring + '\n\n\n\n' + outline + '\n\n\n\n' + f_list
238 | summary = summary.replace('❌', '🎬')
239 | item['summary'] = summary
240 |
241 | def _build_items(self):
242 | res = []
243 | def _format_title(s):
244 | filters = [
245 | '【反派影评】',
246 | '【反派影评文字版】',
247 | '【节目】',
248 | r'(\|||) ?长?节目(重发)?'
249 |
250 | ]
251 |
252 | for f in filters:
253 | if re.search(f, s):
254 | s = re.sub(f, '', s)
255 | s = s.strip()
256 | return s
257 |
258 | for i, item in enumerate(self._items):
259 | tmp = {}
260 | tmp['title'] = 'Episode ' + item['episode'] + ' | ' + _format_title(item['title'])
261 | tmp['link'] = item['link']
262 | tmp['guid'] = 'fanpie_' + re.search(r'\/([\_\-a-zA-Z0-9]*)\.mp3', item['url'])[1]
263 | tmp['pubDate'] = format_time(item['pub_date'])
264 | tmp['author'] = ', '.join(item['hosts'])
265 | tmp['enclosure'] = item['url']
266 | tmp['duration'] = item['duration']
267 | tmp['image'] = 'https://raw.githubusercontent.com/Reyshawn/FanpieFilmFeed/master/fanPie/assets/939x0w.jpg'
268 | tmp['description']= item['summary']
269 | res.append(tmp)
270 | return res
271 |
272 | def save(self, path):
273 | with open(path, 'w+') as f:
274 | json.dump(self._feed, f, ensure_ascii=False)
275 |
276 | def feed(self):
277 | return self._feed
278 |
279 |
280 | def format_time(s):
281 | t = datetime.strptime(s, '%Y-%m-%d')
282 | t = t.replace(hour=17)
283 | return t.strftime("%a, %d %B %Y %H:%M:%S +0800")
284 |
285 |
286 | if __name__ == "__main__":
287 | a = jsonParser('output.json', 'helper.rss')
288 | feed = a.feed()
289 | xml = feedParser(feed)
290 | xml.save('fanPieFilm.rss')
--------------------------------------------------------------------------------
/Pipfile.lock:
--------------------------------------------------------------------------------
1 | {
2 | "_meta": {
3 | "hash": {
4 | "sha256": "d97b5386e9b13854069c12203a7d51034c87838fdc39e4b99b422c22a32b56bd"
5 | },
6 | "pipfile-spec": 6,
7 | "requires": {
8 | "python_version": "3.10"
9 | },
10 | "sources": [
11 | {
12 | "name": "pypi",
13 | "url": "https://pypi.org/simple",
14 | "verify_ssl": true
15 | }
16 | ]
17 | },
18 | "default": {
19 | "attrs": {
20 | "hashes": [
21 | "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1",
22 | "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"
23 | ],
24 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
25 | "version": "==21.2.0"
26 | },
27 | "automat": {
28 | "hashes": [
29 | "sha256:7979803c74610e11ef0c0d68a2942b152df52da55336e0c9d58daf1831cbdf33",
30 | "sha256:b6feb6455337df834f6c9962d6ccf771515b7d939bca142b29c20c2376bc6111"
31 | ],
32 | "version": "==20.2.0"
33 | },
34 | "cffi": {
35 | "hashes": [
36 | "sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3",
37 | "sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2",
38 | "sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636",
39 | "sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20",
40 | "sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728",
41 | "sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27",
42 | "sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66",
43 | "sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443",
44 | "sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0",
45 | "sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7",
46 | "sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39",
47 | "sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605",
48 | "sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a",
49 | "sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37",
50 | "sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029",
51 | "sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139",
52 | "sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc",
53 | "sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df",
54 | "sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14",
55 | "sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880",
56 | "sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2",
57 | "sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a",
58 | "sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e",
59 | "sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474",
60 | "sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024",
61 | "sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8",
62 | "sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0",
63 | "sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e",
64 | "sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a",
65 | "sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e",
66 | "sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032",
67 | "sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6",
68 | "sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e",
69 | "sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b",
70 | "sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e",
71 | "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954",
72 | "sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962",
73 | "sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c",
74 | "sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4",
75 | "sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55",
76 | "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962",
77 | "sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023",
78 | "sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c",
79 | "sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6",
80 | "sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8",
81 | "sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382",
82 | "sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7",
83 | "sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc",
84 | "sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997",
85 | "sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"
86 | ],
87 | "version": "==1.15.0"
88 | },
89 | "constantly": {
90 | "hashes": [
91 | "sha256:586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35",
92 | "sha256:dd2fa9d6b1a51a83f0d7dd76293d734046aa176e384bf6e33b7e44880eb37c5d"
93 | ],
94 | "version": "==15.1.0"
95 | },
96 | "cryptography": {
97 | "hashes": [
98 | "sha256:0a817b961b46894c5ca8a66b599c745b9a3d9f822725221f0e0fe49dc043a3a3",
99 | "sha256:2d87cdcb378d3cfed944dac30596da1968f88fb96d7fc34fdae30a99054b2e31",
100 | "sha256:30ee1eb3ebe1644d1c3f183d115a8c04e4e603ed6ce8e394ed39eea4a98469ac",
101 | "sha256:391432971a66cfaf94b21c24ab465a4cc3e8bf4a939c1ca5c3e3a6e0abebdbcf",
102 | "sha256:39bdf8e70eee6b1c7b289ec6e5d84d49a6bfa11f8b8646b5b3dfe41219153316",
103 | "sha256:4caa4b893d8fad33cf1964d3e51842cd78ba87401ab1d2e44556826df849a8ca",
104 | "sha256:53e5c1dc3d7a953de055d77bef2ff607ceef7a2aac0353b5d630ab67f7423638",
105 | "sha256:596f3cd67e1b950bc372c33f1a28a0692080625592ea6392987dba7f09f17a94",
106 | "sha256:5d59a9d55027a8b88fd9fd2826c4392bd487d74bf628bb9d39beecc62a644c12",
107 | "sha256:6c0c021f35b421ebf5976abf2daacc47e235f8b6082d3396a2fe3ccd537ab173",
108 | "sha256:73bc2d3f2444bcfeac67dd130ff2ea598ea5f20b40e36d19821b4df8c9c5037b",
109 | "sha256:74d6c7e80609c0f4c2434b97b80c7f8fdfaa072ca4baab7e239a15d6d70ed73a",
110 | "sha256:7be0eec337359c155df191d6ae00a5e8bbb63933883f4f5dffc439dac5348c3f",
111 | "sha256:94ae132f0e40fe48f310bba63f477f14a43116f05ddb69d6fa31e93f05848ae2",
112 | "sha256:bb5829d027ff82aa872d76158919045a7c1e91fbf241aec32cb07956e9ebd3c9",
113 | "sha256:ca238ceb7ba0bdf6ce88c1b74a87bffcee5afbfa1e41e173b1ceb095b39add46",
114 | "sha256:ca28641954f767f9822c24e927ad894d45d5a1e501767599647259cbf030b903",
115 | "sha256:e0344c14c9cb89e76eb6a060e67980c9e35b3f36691e15e1b7a9e58a0a6c6dc3",
116 | "sha256:ebc15b1c22e55c4d5566e3ca4db8689470a0ca2babef8e3a9ee057a8b82ce4b1",
117 | "sha256:ec63da4e7e4a5f924b90af42eddf20b698a70e58d86a72d943857c4c6045b3ee"
118 | ],
119 | "markers": "python_version >= '3.6'",
120 | "version": "==36.0.1"
121 | },
122 | "cssselect": {
123 | "hashes": [
124 | "sha256:f612ee47b749c877ebae5bb77035d8f4202c6ad0f0fc1271b3c18ad6c4468ecf",
125 | "sha256:f95f8dedd925fd8f54edb3d2dfb44c190d9d18512377d3c1e2388d16126879bc"
126 | ],
127 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
128 | "version": "==1.1.0"
129 | },
130 | "h2": {
131 | "hashes": [
132 | "sha256:61e0f6601fa709f35cdb730863b4e5ec7ad449792add80d1410d4174ed139af5",
133 | "sha256:875f41ebd6f2c44781259005b157faed1a5031df3ae5aa7bcb4628a6c0782f14"
134 | ],
135 | "version": "==3.2.0"
136 | },
137 | "hpack": {
138 | "hashes": [
139 | "sha256:0edd79eda27a53ba5be2dfabf3b15780928a0dff6eb0c60a3d6767720e970c89",
140 | "sha256:8eec9c1f4bfae3408a3f30500261f7e6a65912dc138526ea054f9ad98892e9d2"
141 | ],
142 | "version": "==3.0.0"
143 | },
144 | "hyperframe": {
145 | "hashes": [
146 | "sha256:5187962cb16dcc078f23cb5a4b110098d546c3f41ff2d4038a9896893bbd0b40",
147 | "sha256:a9f5c17f2cc3c719b917c4f33ed1c61bd1f8dfac4b1bd23b7c80b3400971b41f"
148 | ],
149 | "version": "==5.2.0"
150 | },
151 | "hyperlink": {
152 | "hashes": [
153 | "sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b",
154 | "sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4"
155 | ],
156 | "version": "==21.0.0"
157 | },
158 | "idna": {
159 | "hashes": [
160 | "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff",
161 | "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"
162 | ],
163 | "markers": "python_version >= '3.5'",
164 | "version": "==3.3"
165 | },
166 | "incremental": {
167 | "hashes": [
168 | "sha256:02f5de5aff48f6b9f665d99d48bfc7ec03b6e3943210de7cfc88856d755d6f57",
169 | "sha256:92014aebc6a20b78a8084cdd5645eeaa7f74b8933f70fa3ada2cfbd1e3b54321"
170 | ],
171 | "version": "==21.3.0"
172 | },
173 | "itemadapter": {
174 | "hashes": [
175 | "sha256:695809a4e2f42174f0392dd66c2ceb2b2454d3ebbf65a930e5c85910d8d88d8f",
176 | "sha256:f05df8da52619da4b8c7f155d8a15af19083c0c7ad941d8c1de799560ad994ca"
177 | ],
178 | "markers": "python_version >= '3.6'",
179 | "version": "==0.4.0"
180 | },
181 | "itemloaders": {
182 | "hashes": [
183 | "sha256:1277cd8ca3e4c02dcdfbc1bcae9134ad89acfa6041bd15b4561c6290203a0c96",
184 | "sha256:4cb46a0f8915e910c770242ae3b60b1149913ed37162804f1e40e8535d6ec497"
185 | ],
186 | "markers": "python_version >= '3.6'",
187 | "version": "==1.0.4"
188 | },
189 | "jmespath": {
190 | "hashes": [
191 | "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9",
192 | "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f"
193 | ],
194 | "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
195 | "version": "==0.10.0"
196 | },
197 | "lxml": {
198 | "hashes": [
199 | "sha256:0607ff0988ad7e173e5ddf7bf55ee65534bd18a5461183c33e8e41a59e89edf4",
200 | "sha256:09b738360af8cb2da275998a8bf79517a71225b0de41ab47339c2beebfff025f",
201 | "sha256:0a5f0e4747f31cff87d1eb32a6000bde1e603107f632ef4666be0dc065889c7a",
202 | "sha256:0b5e96e25e70917b28a5391c2ed3ffc6156513d3db0e1476c5253fcd50f7a944",
203 | "sha256:1104a8d47967a414a436007c52f533e933e5d52574cab407b1e49a4e9b5ddbd1",
204 | "sha256:13dbb5c7e8f3b6a2cf6e10b0948cacb2f4c9eb05029fe31c60592d08ac63180d",
205 | "sha256:2a906c3890da6a63224d551c2967413b8790a6357a80bf6b257c9a7978c2c42d",
206 | "sha256:317bd63870b4d875af3c1be1b19202de34c32623609ec803b81c99193a788c1e",
207 | "sha256:34c22eb8c819d59cec4444d9eebe2e38b95d3dcdafe08965853f8799fd71161d",
208 | "sha256:36b16fecb10246e599f178dd74f313cbdc9f41c56e77d52100d1361eed24f51a",
209 | "sha256:38d9759733aa04fb1697d717bfabbedb21398046bd07734be7cccc3d19ea8675",
210 | "sha256:3e26ad9bc48d610bf6cc76c506b9e5ad9360ed7a945d9be3b5b2c8535a0145e3",
211 | "sha256:41358bfd24425c1673f184d7c26c6ae91943fe51dfecc3603b5e08187b4bcc55",
212 | "sha256:447d5009d6b5447b2f237395d0018901dcc673f7d9f82ba26c1b9f9c3b444b60",
213 | "sha256:44f552e0da3c8ee3c28e2eb82b0b784200631687fc6a71277ea8ab0828780e7d",
214 | "sha256:490712b91c65988012e866c411a40cc65b595929ececf75eeb4c79fcc3bc80a6",
215 | "sha256:4c093c571bc3da9ebcd484e001ba18b8452903cd428c0bc926d9b0141bcb710e",
216 | "sha256:50d3dba341f1e583265c1a808e897b4159208d814ab07530202b6036a4d86da5",
217 | "sha256:534e946bce61fd162af02bad7bfd2daec1521b71d27238869c23a672146c34a5",
218 | "sha256:585ea241ee4961dc18a95e2f5581dbc26285fcf330e007459688096f76be8c42",
219 | "sha256:59e7da839a1238807226f7143c68a479dee09244d1b3cf8c134f2fce777d12d0",
220 | "sha256:5b0f782f0e03555c55e37d93d7a57454efe7495dab33ba0ccd2dbe25fc50f05d",
221 | "sha256:5bee1b0cbfdb87686a7fb0e46f1d8bd34d52d6932c0723a86de1cc532b1aa489",
222 | "sha256:610807cea990fd545b1559466971649e69302c8a9472cefe1d6d48a1dee97440",
223 | "sha256:6308062534323f0d3edb4e702a0e26a76ca9e0e23ff99be5d82750772df32a9e",
224 | "sha256:67fa5f028e8a01e1d7944a9fb616d1d0510d5d38b0c41708310bd1bc45ae89f6",
225 | "sha256:6a2ab9d089324d77bb81745b01f4aeffe4094306d939e92ba5e71e9a6b99b71e",
226 | "sha256:6c198bfc169419c09b85ab10cb0f572744e686f40d1e7f4ed09061284fc1303f",
227 | "sha256:6e56521538f19c4a6690f439fefed551f0b296bd785adc67c1777c348beb943d",
228 | "sha256:6ec829058785d028f467be70cd195cd0aaf1a763e4d09822584ede8c9eaa4b03",
229 | "sha256:718d7208b9c2d86aaf0294d9381a6acb0158b5ff0f3515902751404e318e02c9",
230 | "sha256:735e3b4ce9c0616e85f302f109bdc6e425ba1670a73f962c9f6b98a6d51b77c9",
231 | "sha256:772057fba283c095db8c8ecde4634717a35c47061d24f889468dc67190327bcd",
232 | "sha256:7b5e2acefd33c259c4a2e157119c4373c8773cf6793e225006a1649672ab47a6",
233 | "sha256:82d16a64236970cb93c8d63ad18c5b9f138a704331e4b916b2737ddfad14e0c4",
234 | "sha256:87c1b0496e8c87ec9db5383e30042357b4839b46c2d556abd49ec770ce2ad868",
235 | "sha256:8e54945dd2eeb50925500957c7c579df3cd07c29db7810b83cf30495d79af267",
236 | "sha256:9393a05b126a7e187f3e38758255e0edf948a65b22c377414002d488221fdaa2",
237 | "sha256:9fbc0dee7ff5f15c4428775e6fa3ed20003140560ffa22b88326669d53b3c0f4",
238 | "sha256:a1613838aa6b89af4ba10a0f3a972836128801ed008078f8c1244e65958f1b24",
239 | "sha256:a1bbc4efa99ed1310b5009ce7f3a1784698082ed2c1ef3895332f5df9b3b92c2",
240 | "sha256:a555e06566c6dc167fbcd0ad507ff05fd9328502aefc963cb0a0547cfe7f00db",
241 | "sha256:a58d78653ae422df6837dd4ca0036610b8cb4962b5cfdbd337b7b24de9e5f98a",
242 | "sha256:a5edc58d631170de90e50adc2cc0248083541affef82f8cd93bea458e4d96db8",
243 | "sha256:a5f623aeaa24f71fce3177d7fee875371345eb9102b355b882243e33e04b7175",
244 | "sha256:adaab25be351fff0d8a691c4f09153647804d09a87a4e4ea2c3f9fe9e8651851",
245 | "sha256:ade74f5e3a0fd17df5782896ddca7ddb998845a5f7cd4b0be771e1ffc3b9aa5b",
246 | "sha256:b1d381f58fcc3e63fcc0ea4f0a38335163883267f77e4c6e22d7a30877218a0e",
247 | "sha256:bf6005708fc2e2c89a083f258b97709559a95f9a7a03e59f805dd23c93bc3986",
248 | "sha256:d546431636edb1d6a608b348dd58cc9841b81f4116745857b6cb9f8dadb2725f",
249 | "sha256:d5618d49de6ba63fe4510bdada62d06a8acfca0b4b5c904956c777d28382b419",
250 | "sha256:dfd0d464f3d86a1460683cd742306d1138b4e99b79094f4e07e1ca85ee267fe7",
251 | "sha256:e18281a7d80d76b66a9f9e68a98cf7e1d153182772400d9a9ce855264d7d0ce7",
252 | "sha256:e410cf3a2272d0a85526d700782a2fa92c1e304fdcc519ba74ac80b8297adf36",
253 | "sha256:e662c6266e3a275bdcb6bb049edc7cd77d0b0f7e119a53101d367c841afc66dc",
254 | "sha256:ec9027d0beb785a35aa9951d14e06d48cfbf876d8ff67519403a2522b181943b",
255 | "sha256:eed394099a7792834f0cb4a8f615319152b9d801444c1c9e1b1a2c36d2239f9e",
256 | "sha256:f76dbe44e31abf516114f6347a46fa4e7c2e8bceaa4b6f7ee3a0a03c8eba3c17",
257 | "sha256:fc15874816b9320581133ddc2096b644582ab870cf6a6ed63684433e7af4b0d3",
258 | "sha256:fc9fb11b65e7bc49f7f75aaba1b700f7181d95d4e151cf2f24d51bfd14410b77"
259 | ],
260 | "markers": "platform_python_implementation == 'CPython'",
261 | "version": "==4.7.1"
262 | },
263 | "parsel": {
264 | "hashes": [
265 | "sha256:70efef0b651a996cceebc69e55a85eb2233be0890959203ba7c3a03c72725c79",
266 | "sha256:9e1fa8db1c0b4a878bf34b35c043d89c9d1cbebc23b4d34dbc3c0ec33f2e087d"
267 | ],
268 | "version": "==1.6.0"
269 | },
270 | "priority": {
271 | "hashes": [
272 | "sha256:6bc1961a6d7fcacbfc337769f1a382c8e746566aaa365e78047abe9f66b2ffbe",
273 | "sha256:be4fcb94b5e37cdeb40af5533afe6dd603bd665fe9c8b3052610fc1001d5d1eb"
274 | ],
275 | "version": "==1.3.0"
276 | },
277 | "protego": {
278 | "hashes": [
279 | "sha256:a682771bc7b51b2ff41466460896c1a5a653f9a1e71639ef365a72e66d8734b4"
280 | ],
281 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
282 | "version": "==0.1.16"
283 | },
284 | "pyasn1": {
285 | "hashes": [
286 | "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359",
287 | "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576",
288 | "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf",
289 | "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7",
290 | "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d",
291 | "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00",
292 | "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8",
293 | "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86",
294 | "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12",
295 | "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776",
296 | "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba",
297 | "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2",
298 | "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"
299 | ],
300 | "version": "==0.4.8"
301 | },
302 | "pyasn1-modules": {
303 | "hashes": [
304 | "sha256:0845a5582f6a02bb3e1bde9ecfc4bfcae6ec3210dd270522fee602365430c3f8",
305 | "sha256:0fe1b68d1e486a1ed5473f1302bd991c1611d319bba158e98b106ff86e1d7199",
306 | "sha256:15b7c67fabc7fc240d87fb9aabf999cf82311a6d6fb2c70d00d3d0604878c811",
307 | "sha256:426edb7a5e8879f1ec54a1864f16b882c2837bfd06eee62f2c982315ee2473ed",
308 | "sha256:65cebbaffc913f4fe9e4808735c95ea22d7a7775646ab690518c056784bc21b4",
309 | "sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e",
310 | "sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74",
311 | "sha256:a99324196732f53093a84c4369c996713eb8c89d360a496b599fb1a9c47fc3eb",
312 | "sha256:b80486a6c77252ea3a3e9b1e360bc9cf28eaac41263d173c032581ad2f20fe45",
313 | "sha256:c29a5e5cc7a3f05926aff34e097e84f8589cd790ce0ed41b67aed6857b26aafd",
314 | "sha256:cbac4bc38d117f2a49aeedec4407d23e8866ea4ac27ff2cf7fb3e5b570df19e0",
315 | "sha256:f39edd8c4ecaa4556e989147ebf219227e2cd2e8a43c7e7fcb1f1c18c5fd6a3d",
316 | "sha256:fe0644d9ab041506b62782e92b06b8c68cca799e1a9636ec398675459e031405"
317 | ],
318 | "version": "==0.2.8"
319 | },
320 | "pycparser": {
321 | "hashes": [
322 | "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9",
323 | "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"
324 | ],
325 | "version": "==2.21"
326 | },
327 | "pydispatcher": {
328 | "hashes": [
329 | "sha256:5570069e1b1769af1fe481de6dd1d3a388492acddd2cdad7a3bde145615d5caf",
330 | "sha256:5be4a8be12805ef7d712dd9a93284fb8bc53f309867e573f653a72e5fd10e433"
331 | ],
332 | "markers": "platform_python_implementation == 'CPython'",
333 | "version": "==2.0.5"
334 | },
335 | "pyopenssl": {
336 | "hashes": [
337 | "sha256:5e2d8c5e46d0d865ae933bef5230090bdaf5506281e9eec60fa250ee80600cb3",
338 | "sha256:8935bd4920ab9abfebb07c41a4f58296407ed77f04bd1a92914044b848ba1ed6"
339 | ],
340 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'",
341 | "version": "==21.0.0"
342 | },
343 | "queuelib": {
344 | "hashes": [
345 | "sha256:4b207267f2642a8699a1f806045c56eb7ad1a85a10c0e249884580d139c2fcd2",
346 | "sha256:4b96d48f650a814c6fb2fd11b968f9c46178b683aad96d68f930fe13a8574d19"
347 | ],
348 | "markers": "python_version >= '3.5'",
349 | "version": "==1.6.2"
350 | },
351 | "scrapy": {
352 | "hashes": [
353 | "sha256:13af6032476ab4256158220e530411290b3b934dd602bb6dacacbf6d16141f49",
354 | "sha256:1a9a36970004950ee3c519a14c4db945f9d9a63fecb3d593dddcda477331dde9"
355 | ],
356 | "index": "pypi",
357 | "version": "==2.5.1"
358 | },
359 | "service-identity": {
360 | "hashes": [
361 | "sha256:6e6c6086ca271dc11b033d17c3a8bea9f24ebff920c587da090afc9519419d34",
362 | "sha256:f0b0caac3d40627c3c04d7a51b6e06721857a0e10a8775f2d1d7e72901b3a7db"
363 | ],
364 | "version": "==21.1.0"
365 | },
366 | "setuptools": {
367 | "hashes": [
368 | "sha256:8244876a37456ccbcbe1247b4cba1b015f4e42dfebfadd6a5488bfc12060f21b",
369 | "sha256:dae4d7933ca671d51fa44c10b4ef8165319d9be3aa41807dd4c73f1c1ba81dc3"
370 | ],
371 | "markers": "python_version >= '3.7'",
372 | "version": "==60.0.4"
373 | },
374 | "six": {
375 | "hashes": [
376 | "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926",
377 | "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"
378 | ],
379 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
380 | "version": "==1.16.0"
381 | },
382 | "twisted": {
383 | "extras": [
384 | "http2"
385 | ],
386 | "hashes": [
387 | "sha256:13c1d1d2421ae556d91e81e66cf0d4f4e4e1e4a36a0486933bee4305c6a4fb9b",
388 | "sha256:2cd652542463277378b0d349f47c62f20d9306e57d1247baabd6d1d38a109006"
389 | ],
390 | "markers": "python_full_version >= '3.6.7'",
391 | "version": "==21.7.0"
392 | },
393 | "typing-extensions": {
394 | "hashes": [
395 | "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e",
396 | "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"
397 | ],
398 | "markers": "python_version >= '3.6'",
399 | "version": "==4.0.1"
400 | },
401 | "w3lib": {
402 | "hashes": [
403 | "sha256:0161d55537063e00d95a241663ede3395c4c6d7b777972ba2fd58bbab2001e53",
404 | "sha256:0ad6d0203157d61149fd45aaed2e24f53902989c32fc1dccc2e2bfba371560df"
405 | ],
406 | "version": "==1.22.0"
407 | },
408 | "zope.interface": {
409 | "hashes": [
410 | "sha256:08f9636e99a9d5410181ba0729e0408d3d8748026ea938f3b970a0249daa8192",
411 | "sha256:0b465ae0962d49c68aa9733ba92a001b2a0933c317780435f00be7ecb959c702",
412 | "sha256:0cba8477e300d64a11a9789ed40ee8932b59f9ee05f85276dbb4b59acee5dd09",
413 | "sha256:0cee5187b60ed26d56eb2960136288ce91bcf61e2a9405660d271d1f122a69a4",
414 | "sha256:0ea1d73b7c9dcbc5080bb8aaffb776f1c68e807767069b9ccdd06f27a161914a",
415 | "sha256:0f91b5b948686659a8e28b728ff5e74b1be6bf40cb04704453617e5f1e945ef3",
416 | "sha256:15e7d1f7a6ee16572e21e3576d2012b2778cbacf75eb4b7400be37455f5ca8bf",
417 | "sha256:17776ecd3a1fdd2b2cd5373e5ef8b307162f581c693575ec62e7c5399d80794c",
418 | "sha256:194d0bcb1374ac3e1e023961610dc8f2c78a0f5f634d0c737691e215569e640d",
419 | "sha256:1c0e316c9add0db48a5b703833881351444398b04111188069a26a61cfb4df78",
420 | "sha256:205e40ccde0f37496904572035deea747390a8b7dc65146d30b96e2dd1359a83",
421 | "sha256:273f158fabc5ea33cbc936da0ab3d4ba80ede5351babc4f577d768e057651531",
422 | "sha256:2876246527c91e101184f63ccd1d716ec9c46519cc5f3d5375a3351c46467c46",
423 | "sha256:2c98384b254b37ce50eddd55db8d381a5c53b4c10ee66e1e7fe749824f894021",
424 | "sha256:2e5a26f16503be6c826abca904e45f1a44ff275fdb7e9d1b75c10671c26f8b94",
425 | "sha256:334701327f37c47fa628fc8b8d28c7d7730ce7daaf4bda1efb741679c2b087fc",
426 | "sha256:3748fac0d0f6a304e674955ab1365d515993b3a0a865e16a11ec9d86fb307f63",
427 | "sha256:3c02411a3b62668200910090a0dff17c0b25aaa36145082a5a6adf08fa281e54",
428 | "sha256:3dd4952748521205697bc2802e4afac5ed4b02909bb799ba1fe239f77fd4e117",
429 | "sha256:3f24df7124c323fceb53ff6168da70dbfbae1442b4f3da439cd441681f54fe25",
430 | "sha256:469e2407e0fe9880ac690a3666f03eb4c3c444411a5a5fddfdabc5d184a79f05",
431 | "sha256:4de4bc9b6d35c5af65b454d3e9bc98c50eb3960d5a3762c9438df57427134b8e",
432 | "sha256:5208ebd5152e040640518a77827bdfcc73773a15a33d6644015b763b9c9febc1",
433 | "sha256:52de7fc6c21b419078008f697fd4103dbc763288b1406b4562554bd47514c004",
434 | "sha256:5bb3489b4558e49ad2c5118137cfeaf59434f9737fa9c5deefc72d22c23822e2",
435 | "sha256:5dba5f530fec3f0988d83b78cc591b58c0b6eb8431a85edd1569a0539a8a5a0e",
436 | "sha256:5dd9ca406499444f4c8299f803d4a14edf7890ecc595c8b1c7115c2342cadc5f",
437 | "sha256:5f931a1c21dfa7a9c573ec1f50a31135ccce84e32507c54e1ea404894c5eb96f",
438 | "sha256:63b82bb63de7c821428d513607e84c6d97d58afd1fe2eb645030bdc185440120",
439 | "sha256:66c0061c91b3b9cf542131148ef7ecbecb2690d48d1612ec386de9d36766058f",
440 | "sha256:6f0c02cbb9691b7c91d5009108f975f8ffeab5dff8f26d62e21c493060eff2a1",
441 | "sha256:71aace0c42d53abe6fc7f726c5d3b60d90f3c5c055a447950ad6ea9cec2e37d9",
442 | "sha256:7d97a4306898b05404a0dcdc32d9709b7d8832c0c542b861d9a826301719794e",
443 | "sha256:7df1e1c05304f26faa49fa752a8c690126cf98b40b91d54e6e9cc3b7d6ffe8b7",
444 | "sha256:8270252effc60b9642b423189a2fe90eb6b59e87cbee54549db3f5562ff8d1b8",
445 | "sha256:867a5ad16892bf20e6c4ea2aab1971f45645ff3102ad29bd84c86027fa99997b",
446 | "sha256:877473e675fdcc113c138813a5dd440da0769a2d81f4d86614e5d62b69497155",
447 | "sha256:8892f89999ffd992208754851e5a052f6b5db70a1e3f7d54b17c5211e37a98c7",
448 | "sha256:9a9845c4c6bb56e508651f005c4aeb0404e518c6f000d5a1123ab077ab769f5c",
449 | "sha256:a1e6e96217a0f72e2b8629e271e1b280c6fa3fe6e59fa8f6701bec14e3354325",
450 | "sha256:a8156e6a7f5e2a0ff0c5b21d6bcb45145efece1909efcbbbf48c56f8da68221d",
451 | "sha256:a9506a7e80bcf6eacfff7f804c0ad5350c8c95b9010e4356a4b36f5322f09abb",
452 | "sha256:af310ec8335016b5e52cae60cda4a4f2a60a788cbb949a4fbea13d441aa5a09e",
453 | "sha256:b0297b1e05fd128d26cc2460c810d42e205d16d76799526dfa8c8ccd50e74959",
454 | "sha256:bf68f4b2b6683e52bec69273562df15af352e5ed25d1b6641e7efddc5951d1a7",
455 | "sha256:d0c1bc2fa9a7285719e5678584f6b92572a5b639d0e471bb8d4b650a1a910920",
456 | "sha256:d4d9d6c1a455d4babd320203b918ccc7fcbefe308615c521062bc2ba1aa4d26e",
457 | "sha256:db1fa631737dab9fa0b37f3979d8d2631e348c3b4e8325d6873c2541d0ae5a48",
458 | "sha256:dd93ea5c0c7f3e25335ab7d22a507b1dc43976e1345508f845efc573d3d779d8",
459 | "sha256:f44e517131a98f7a76696a7b21b164bcb85291cee106a23beccce454e1f433a4",
460 | "sha256:f7ee479e96f7ee350db1cf24afa5685a5899e2b34992fb99e1f7c1b0b758d263"
461 | ],
462 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
463 | "version": "==5.4.0"
464 | }
465 | },
466 | "develop": {}
467 | }
468 |
--------------------------------------------------------------------------------