├── .gitignore
├── CryptoTracker2-0.alfredworkflow
├── LICENSE
├── README.md
├── screenshot_no_arg.png
├── screenshot_with_arg.png
└── src
├── 4FD48167-2143-421F-AAFE-395A6C9E511F.png
├── crypto-tracker.py
├── icon.png
├── icon
├── AlertStopIcon.icns
├── BookmarkIcon.icns
├── bch.png
├── btc.png
├── eth.png
└── ltc.png
├── info.plist
├── lib
├── bin
│ └── normalizer
├── certifi-2021.10.8.dist-info
│ ├── INSTALLER
│ ├── LICENSE
│ ├── METADATA
│ ├── RECORD
│ ├── WHEEL
│ └── top_level.txt
├── certifi
│ ├── __init__.py
│ ├── __main__.py
│ ├── cacert.pem
│ └── core.py
├── charset_normalizer-2.0.12.dist-info
│ ├── INSTALLER
│ ├── LICENSE
│ ├── METADATA
│ ├── RECORD
│ ├── WHEEL
│ ├── entry_points.txt
│ └── top_level.txt
├── charset_normalizer
│ ├── __init__.py
│ ├── api.py
│ ├── assets
│ │ └── __init__.py
│ ├── cd.py
│ ├── cli
│ │ ├── __init__.py
│ │ └── normalizer.py
│ ├── constant.py
│ ├── legacy.py
│ ├── md.py
│ ├── models.py
│ ├── py.typed
│ ├── utils.py
│ └── version.py
├── idna-3.3.dist-info
│ ├── INSTALLER
│ ├── LICENSE.md
│ ├── METADATA
│ ├── RECORD
│ ├── WHEEL
│ └── top_level.txt
├── idna
│ ├── __init__.py
│ ├── codec.py
│ ├── compat.py
│ ├── core.py
│ ├── idnadata.py
│ ├── intranges.py
│ ├── package_data.py
│ ├── py.typed
│ └── uts46data.py
├── requests-2.27.1.dist-info
│ ├── INSTALLER
│ ├── LICENSE
│ ├── METADATA
│ ├── RECORD
│ ├── REQUESTED
│ ├── WHEEL
│ └── top_level.txt
├── requests
│ ├── __init__.py
│ ├── __version__.py
│ ├── _internal_utils.py
│ ├── adapters.py
│ ├── api.py
│ ├── auth.py
│ ├── certs.py
│ ├── compat.py
│ ├── cookies.py
│ ├── exceptions.py
│ ├── help.py
│ ├── hooks.py
│ ├── models.py
│ ├── packages.py
│ ├── sessions.py
│ ├── status_codes.py
│ ├── structures.py
│ └── utils.py
├── urllib3-1.26.9.dist-info
│ ├── INSTALLER
│ ├── LICENSE.txt
│ ├── METADATA
│ ├── RECORD
│ ├── WHEEL
│ └── top_level.txt
└── urllib3
│ ├── __init__.py
│ ├── _collections.py
│ ├── _version.py
│ ├── connection.py
│ ├── connectionpool.py
│ ├── contrib
│ ├── __init__.py
│ ├── _appengine_environ.py
│ ├── _securetransport
│ │ ├── __init__.py
│ │ ├── bindings.py
│ │ └── low_level.py
│ ├── appengine.py
│ ├── ntlmpool.py
│ ├── pyopenssl.py
│ ├── securetransport.py
│ └── socks.py
│ ├── exceptions.py
│ ├── fields.py
│ ├── filepost.py
│ ├── packages
│ ├── __init__.py
│ ├── backports
│ │ ├── __init__.py
│ │ └── makefile.py
│ └── six.py
│ ├── poolmanager.py
│ ├── request.py
│ ├── response.py
│ └── util
│ ├── __init__.py
│ ├── connection.py
│ ├── proxy.py
│ ├── queue.py
│ ├── request.py
│ ├── response.py
│ ├── retry.py
│ ├── ssl_.py
│ ├── ssl_match_hostname.py
│ ├── ssltransport.py
│ ├── timeout.py
│ ├── url.py
│ └── wait.py
└── requirements.txt
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | *.pyc
3 | __pycache__
4 |
--------------------------------------------------------------------------------
/CryptoTracker2-0.alfredworkflow:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhlsthrm/alfred-crypto-tracker/c73071bc70c9c206bcb532c873b0e3a395e0412b/CryptoTracker2-0.alfredworkflow
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2014 Fabio Niephaus
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # CryptoCurrency Tracker for Alfred
2 | Quickly check cryptocurrency prices with [Alfred](https://www.alfredapp.com/).
3 |
4 | ## March 2022
5 | Updated to python 3
6 |
7 |
8 |
9 | 
11 |
12 |
13 | 
14 | Use arguments to check many other cryptocurrency prices.
15 | 
16 |
17 |
18 |
19 |
20 |
21 | # Features
22 | * Quickly check the main cryptocurrency ticker price using CryptoCompare (BTC, ETH, LTC, BCH).
23 | * Optionally provide an argument to get a quote for another currency symbol.
24 | * Quotes are all USD currency pair.
25 |
26 | # Usage
27 | ## Packal
28 | The package is hosted on [Packal](http://www.packal.org/workflow/cryptocurrency-price-tracker).
29 |
30 | ## Compiled Package
31 | The compiled package is committed to the repo. This file can be opened to import the workflow into Alfred.
32 |
33 | ## Build from Source
34 | Use makefile to build workflow from source. Workflow file can be imported into Alfred.
35 |
36 | # Contributing
37 | Pull requests welcome!
38 |
39 | # Credits
40 | This workflow uses [alfred-workflow](https://github.com/deanishe/alfred-workflow)
--------------------------------------------------------------------------------
/screenshot_no_arg.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhlsthrm/alfred-crypto-tracker/c73071bc70c9c206bcb532c873b0e3a395e0412b/screenshot_no_arg.png
--------------------------------------------------------------------------------
/screenshot_with_arg.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhlsthrm/alfred-crypto-tracker/c73071bc70c9c206bcb532c873b0e3a395e0412b/screenshot_with_arg.png
--------------------------------------------------------------------------------
/src/4FD48167-2143-421F-AAFE-395A6C9E511F.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhlsthrm/alfred-crypto-tracker/c73071bc70c9c206bcb532c873b0e3a395e0412b/src/4FD48167-2143-421F-AAFE-395A6C9E511F.png
--------------------------------------------------------------------------------
/src/crypto-tracker.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 | # giovanni from Rahul Sethuram
4 | # Clear ☀️ 🌡️+32°F (feels +24°F, 37%) 🌬️↓12mph 🌑 Wed Mar 30 05:07:37 2022
5 |
6 |
7 | import sys
8 | import requests
9 | import json
10 |
11 |
12 | def log(s, *args):
13 | if args:
14 | s = s % args
15 | print(s, file=sys.stderr)
16 |
17 |
18 | def format_strings_from_quote(ticker, quote_data):
19 | data = quote_data['RAW'][ticker.upper()]['USD']
20 | price = '{:,.2f}'.format(data['PRICE'])
21 | high = '{:,.2f}'.format(data['HIGH24HOUR'])
22 | low = '{:,.2f}'.format(data['LOW24HOUR'])
23 | change = '{:,.2f}'.format(data['CHANGEPCT24HOUR'])
24 | formatted = {}
25 | formatted['title'] = '{}: ${} ({}%)'.format(ticker.upper(), price, change)
26 | formatted['subtitle'] = '24hr high: ${} | 24hr low: ${}'.format(high, low)
27 | return formatted
28 |
29 |
30 | def main():
31 |
32 | if len(sys.argv) > 1:
33 | query = sys.argv[1]
34 | else:
35 | query = None
36 |
37 | result = {"items": []}
38 |
39 |
40 | if query:
41 | url = 'https://min-api.cryptocompare.com/data/pricemultifull?fsyms=' + \
42 | query.upper() + '&tsyms=USD'
43 |
44 | resp = requests.get(url)
45 | myData = resp.json()
46 |
47 |
48 | try:
49 | formatted = format_strings_from_quote(query, myData)
50 |
51 | result["items"].append({
52 | "title": formatted['title'],
53 | 'subtitle': formatted['subtitle'],
54 | 'valid': True,
55 |
56 | "icon": {
57 | "path": 'icon/BookmarkIcon.icns'
58 | },
59 | 'arg': 'https://www.cryptocompare.com/coins/' + query + '/overview/USD'
60 | })
61 |
62 |
63 | except:
64 | result["items"].append({
65 | "title": 'Couldn\'t find a quote for that symbol 😞',
66 | 'subtitle': 'Please try again.',
67 |
68 |
69 | "icon": {
70 | "path": 'icon/AlertStopIcon.icns'
71 | },
72 | })
73 |
74 | else:
75 | url = 'https://min-api.cryptocompare.com/data/pricemultifull?fsyms=BTC,ETH,LTC,BCH&tsyms=USD'
76 | resp = requests.get(url)
77 | myData = resp.json()
78 |
79 |
80 | formatted = format_strings_from_quote('BTC', myData)
81 | result["items"].append({
82 | "title": formatted['title'],
83 | 'subtitle': formatted['subtitle'],
84 | 'valid': True,
85 |
86 | "icon": {
87 | "path": 'icon/btc.png'
88 | },
89 | 'arg': 'https://www.cryptocompare.com/'
90 | })
91 |
92 |
93 |
94 | formatted = format_strings_from_quote('ETH', myData)
95 |
96 | result["items"].append({
97 | "title": formatted['title'],
98 | 'subtitle': formatted['subtitle'],
99 | 'valid': True,
100 |
101 | "icon": {
102 | "path": 'icon/eth.png'
103 | },
104 | 'arg': 'https://www.cryptocompare.com/'
105 | })
106 |
107 |
108 | formatted = format_strings_from_quote('LTC', myData)
109 | result["items"].append({
110 | "title": formatted['title'],
111 | 'subtitle': formatted['subtitle'],
112 | 'valid': True,
113 |
114 | "icon": {
115 | "path": 'icon/ltc.png'
116 | },
117 | 'arg': 'https://www.cryptocompare.com/'
118 | })
119 |
120 | formatted = format_strings_from_quote('BCH', myData)
121 | result["items"].append({
122 | "title": formatted['title'],
123 | 'subtitle': formatted['subtitle'],
124 | 'valid': True,
125 |
126 | "icon": {
127 | "path": 'icon/bch.png'
128 | },
129 | 'arg': 'https://www.cryptocompare.com/'
130 | })
131 |
132 |
133 | print (json.dumps(result))
134 |
135 |
136 | if __name__ == u"__main__":
137 | main()
138 |
--------------------------------------------------------------------------------
/src/icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhlsthrm/alfred-crypto-tracker/c73071bc70c9c206bcb532c873b0e3a395e0412b/src/icon.png
--------------------------------------------------------------------------------
/src/icon/AlertStopIcon.icns:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhlsthrm/alfred-crypto-tracker/c73071bc70c9c206bcb532c873b0e3a395e0412b/src/icon/AlertStopIcon.icns
--------------------------------------------------------------------------------
/src/icon/BookmarkIcon.icns:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhlsthrm/alfred-crypto-tracker/c73071bc70c9c206bcb532c873b0e3a395e0412b/src/icon/BookmarkIcon.icns
--------------------------------------------------------------------------------
/src/icon/bch.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhlsthrm/alfred-crypto-tracker/c73071bc70c9c206bcb532c873b0e3a395e0412b/src/icon/bch.png
--------------------------------------------------------------------------------
/src/icon/btc.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhlsthrm/alfred-crypto-tracker/c73071bc70c9c206bcb532c873b0e3a395e0412b/src/icon/btc.png
--------------------------------------------------------------------------------
/src/icon/eth.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhlsthrm/alfred-crypto-tracker/c73071bc70c9c206bcb532c873b0e3a395e0412b/src/icon/eth.png
--------------------------------------------------------------------------------
/src/icon/ltc.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhlsthrm/alfred-crypto-tracker/c73071bc70c9c206bcb532c873b0e3a395e0412b/src/icon/ltc.png
--------------------------------------------------------------------------------
/src/info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | bundleid
6 | com.cryprotracker
7 | connections
8 |
9 | CF35A24A-ED70-4124-9E31-D356993403FC
10 |
11 |
12 | destinationuid
13 | 203CE1F8-438D-407D-BB86-E46160867531
14 | modifiers
15 | 0
16 | modifiersubtext
17 |
18 | vitoclose
19 |
20 |
21 |
22 |
23 | createdby
24 | giovanni from Rahul Sethuram
25 | description
26 | Quickly get price information for cryptocurrencies.
27 | disabled
28 |
29 | name
30 | Crypto Tracker
31 | objects
32 |
33 |
34 | config
35 |
36 | browser
37 |
38 | spaces
39 |
40 | url
41 | {query}
42 | utf8
43 |
44 |
45 | type
46 | alfred.workflow.action.openurl
47 | uid
48 | 203CE1F8-438D-407D-BB86-E46160867531
49 | version
50 | 1
51 |
52 |
53 | config
54 |
55 | alfredfiltersresults
56 |
57 | alfredfiltersresultsmatchmode
58 | 0
59 | argumenttreatemptyqueryasnil
60 |
61 | argumenttrimmode
62 | 0
63 | argumenttype
64 | 1
65 | escaping
66 | 102
67 | keyword
68 | crypto
69 | queuedelaycustom
70 | 3
71 | queuedelayimmediatelyinitially
72 |
73 | queuedelaymode
74 | 1
75 | queuemode
76 | 1
77 | runningsubtext
78 | Retrieving quotes...
79 | script
80 | export PATH=/opt/homebrew/bin:/usr/local/bin:$PATH
81 | export PYTHONPATH="$PWD/lib"
82 | python3 crypto-tracker.py "$1"
83 | scriptargtype
84 | 1
85 | scriptfile
86 |
87 | subtext
88 | Enter a ticker or view main tickers
89 | title
90 | Track CryptoCurrency Prices
91 | type
92 | 0
93 | withspace
94 |
95 |
96 | type
97 | alfred.workflow.input.scriptfilter
98 | uid
99 | CF35A24A-ED70-4124-9E31-D356993403FC
100 | version
101 | 3
102 |
103 |
104 | readme
105 | Migrated to Python 3 from https://github.com/rhlsthrm/alfred-crypto-tracker
106 | uidata
107 |
108 | 203CE1F8-438D-407D-BB86-E46160867531
109 |
110 | xpos
111 | 400
112 | ypos
113 | 360
114 |
115 | CF35A24A-ED70-4124-9E31-D356993403FC
116 |
117 | xpos
118 | 165
119 | ypos
120 | 360
121 |
122 |
123 | variablesdontexport
124 |
125 | version
126 | 2.0
127 | webaddress
128 | github.com/rhlsthrm/alfred-crypto-tracker
129 |
130 |
131 |
--------------------------------------------------------------------------------
/src/lib/bin/normalizer:
--------------------------------------------------------------------------------
1 | #!/usr/local/opt/python@3.9/bin/python3.9
2 | # -*- coding: utf-8 -*-
3 | import re
4 | import sys
5 | from charset_normalizer.cli.normalizer import cli_detect
6 | if __name__ == '__main__':
7 | sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
8 | sys.exit(cli_detect())
9 |
--------------------------------------------------------------------------------
/src/lib/certifi-2021.10.8.dist-info/INSTALLER:
--------------------------------------------------------------------------------
1 | pip
2 |
--------------------------------------------------------------------------------
/src/lib/certifi-2021.10.8.dist-info/LICENSE:
--------------------------------------------------------------------------------
1 | This package contains a modified version of ca-bundle.crt:
2 |
3 | ca-bundle.crt -- Bundle of CA Root Certificates
4 |
5 | Certificate data from Mozilla as of: Thu Nov 3 19:04:19 2011#
6 | This is a bundle of X.509 certificates of public Certificate Authorities
7 | (CA). These were automatically extracted from Mozilla's root certificates
8 | file (certdata.txt). This file can be found in the mozilla source tree:
9 | http://mxr.mozilla.org/mozilla/source/security/nss/lib/ckfw/builtins/certdata.txt?raw=1#
10 | It contains the certificates in PEM format and therefore
11 | can be directly used with curl / libcurl / php_curl, or with
12 | an Apache+mod_ssl webserver for SSL client authentication.
13 | Just configure this file as the SSLCACertificateFile.#
14 |
15 | ***** BEGIN LICENSE BLOCK *****
16 | This Source Code Form is subject to the terms of the Mozilla Public License,
17 | v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain
18 | one at http://mozilla.org/MPL/2.0/.
19 |
20 | ***** END LICENSE BLOCK *****
21 | @(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $
22 |
--------------------------------------------------------------------------------
/src/lib/certifi-2021.10.8.dist-info/METADATA:
--------------------------------------------------------------------------------
1 | Metadata-Version: 2.1
2 | Name: certifi
3 | Version: 2021.10.8
4 | Summary: Python package for providing Mozilla's CA Bundle.
5 | Home-page: https://certifiio.readthedocs.io/en/latest/
6 | Author: Kenneth Reitz
7 | Author-email: me@kennethreitz.com
8 | License: MPL-2.0
9 | Project-URL: Documentation, https://certifiio.readthedocs.io/en/latest/
10 | Project-URL: Source, https://github.com/certifi/python-certifi
11 | Platform: UNKNOWN
12 | Classifier: Development Status :: 5 - Production/Stable
13 | Classifier: Intended Audience :: Developers
14 | Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
15 | Classifier: Natural Language :: English
16 | Classifier: Programming Language :: Python
17 | Classifier: Programming Language :: Python :: 3
18 | Classifier: Programming Language :: Python :: 3.3
19 | Classifier: Programming Language :: Python :: 3.4
20 | Classifier: Programming Language :: Python :: 3.5
21 | Classifier: Programming Language :: Python :: 3.6
22 | Classifier: Programming Language :: Python :: 3.7
23 | Classifier: Programming Language :: Python :: 3.8
24 | Classifier: Programming Language :: Python :: 3.9
25 |
26 | Certifi: Python SSL Certificates
27 | ================================
28 |
29 | `Certifi`_ provides Mozilla's carefully curated collection of Root Certificates for
30 | validating the trustworthiness of SSL certificates while verifying the identity
31 | of TLS hosts. It has been extracted from the `Requests`_ project.
32 |
33 | Installation
34 | ------------
35 |
36 | ``certifi`` is available on PyPI. Simply install it with ``pip``::
37 |
38 | $ pip install certifi
39 |
40 | Usage
41 | -----
42 |
43 | To reference the installed certificate authority (CA) bundle, you can use the
44 | built-in function::
45 |
46 | >>> import certifi
47 |
48 | >>> certifi.where()
49 | '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem'
50 |
51 | Or from the command line::
52 |
53 | $ python -m certifi
54 | /usr/local/lib/python3.7/site-packages/certifi/cacert.pem
55 |
56 | Enjoy!
57 |
58 | 1024-bit Root Certificates
59 | ~~~~~~~~~~~~~~~~~~~~~~~~~~
60 |
61 | Browsers and certificate authorities have concluded that 1024-bit keys are
62 | unacceptably weak for certificates, particularly root certificates. For this
63 | reason, Mozilla has removed any weak (i.e. 1024-bit key) certificate from its
64 | bundle, replacing it with an equivalent strong (i.e. 2048-bit or greater key)
65 | certificate from the same CA. Because Mozilla removed these certificates from
66 | its bundle, ``certifi`` removed them as well.
67 |
68 | In previous versions, ``certifi`` provided the ``certifi.old_where()`` function
69 | to intentionally re-add the 1024-bit roots back into your bundle. This was not
70 | recommended in production and therefore was removed at the end of 2018.
71 |
72 | .. _`Certifi`: https://certifiio.readthedocs.io/en/latest/
73 | .. _`Requests`: https://requests.readthedocs.io/en/master/
74 |
75 | Addition/Removal of Certificates
76 | --------------------------------
77 |
78 | Certifi does not support any addition/removal or other modification of the
79 | CA trust store content. This project is intended to provide a reliable and
80 | highly portable root of trust to python deployments. Look to upstream projects
81 | for methods to use alternate trust.
82 |
83 |
84 |
--------------------------------------------------------------------------------
/src/lib/certifi-2021.10.8.dist-info/RECORD:
--------------------------------------------------------------------------------
1 | certifi-2021.10.8.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
2 | certifi-2021.10.8.dist-info/LICENSE,sha256=vp2C82ES-Hp_HXTs1Ih-FGe7roh4qEAEoAEXseR1o-I,1049
3 | certifi-2021.10.8.dist-info/METADATA,sha256=iB_zbT1uX_8_NC7iGv0YEB-9b3idhQwHrFTSq8R1kD8,2994
4 | certifi-2021.10.8.dist-info/RECORD,,
5 | certifi-2021.10.8.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110
6 | certifi-2021.10.8.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8
7 | certifi/__init__.py,sha256=xWdRgntT3j1V95zkRipGOg_A1UfEju2FcpujhysZLRI,62
8 | certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243
9 | certifi/__pycache__/__init__.cpython-39.pyc,,
10 | certifi/__pycache__/__main__.cpython-39.pyc,,
11 | certifi/__pycache__/core.cpython-39.pyc,,
12 | certifi/cacert.pem,sha256=-og4Keu4zSpgL5shwfhd4kz0eUnVILzrGCi0zRy2kGw,265969
13 | certifi/core.py,sha256=V0uyxKOYdz6ulDSusclrLmjbPgOXsD0BnEf0SQ7OnoE,2303
14 |
--------------------------------------------------------------------------------
/src/lib/certifi-2021.10.8.dist-info/WHEEL:
--------------------------------------------------------------------------------
1 | Wheel-Version: 1.0
2 | Generator: bdist_wheel (0.35.1)
3 | Root-Is-Purelib: true
4 | Tag: py2-none-any
5 | Tag: py3-none-any
6 |
7 |
--------------------------------------------------------------------------------
/src/lib/certifi-2021.10.8.dist-info/top_level.txt:
--------------------------------------------------------------------------------
1 | certifi
2 |
--------------------------------------------------------------------------------
/src/lib/certifi/__init__.py:
--------------------------------------------------------------------------------
1 | from .core import contents, where
2 |
3 | __version__ = "2021.10.08"
4 |
--------------------------------------------------------------------------------
/src/lib/certifi/__main__.py:
--------------------------------------------------------------------------------
1 | import argparse
2 |
3 | from certifi import contents, where
4 |
5 | parser = argparse.ArgumentParser()
6 | parser.add_argument("-c", "--contents", action="store_true")
7 | args = parser.parse_args()
8 |
9 | if args.contents:
10 | print(contents())
11 | else:
12 | print(where())
13 |
--------------------------------------------------------------------------------
/src/lib/certifi/core.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | """
4 | certifi.py
5 | ~~~~~~~~~~
6 |
7 | This module returns the installation location of cacert.pem or its contents.
8 | """
9 | import os
10 |
11 | try:
12 | from importlib.resources import path as get_path, read_text
13 |
14 | _CACERT_CTX = None
15 | _CACERT_PATH = None
16 |
17 | def where():
18 | # This is slightly terrible, but we want to delay extracting the file
19 | # in cases where we're inside of a zipimport situation until someone
20 | # actually calls where(), but we don't want to re-extract the file
21 | # on every call of where(), so we'll do it once then store it in a
22 | # global variable.
23 | global _CACERT_CTX
24 | global _CACERT_PATH
25 | if _CACERT_PATH is None:
26 | # This is slightly janky, the importlib.resources API wants you to
27 | # manage the cleanup of this file, so it doesn't actually return a
28 | # path, it returns a context manager that will give you the path
29 | # when you enter it and will do any cleanup when you leave it. In
30 | # the common case of not needing a temporary file, it will just
31 | # return the file system location and the __exit__() is a no-op.
32 | #
33 | # We also have to hold onto the actual context manager, because
34 | # it will do the cleanup whenever it gets garbage collected, so
35 | # we will also store that at the global level as well.
36 | _CACERT_CTX = get_path("certifi", "cacert.pem")
37 | _CACERT_PATH = str(_CACERT_CTX.__enter__())
38 |
39 | return _CACERT_PATH
40 |
41 |
42 | except ImportError:
43 | # This fallback will work for Python versions prior to 3.7 that lack the
44 | # importlib.resources module but relies on the existing `where` function
45 | # so won't address issues with environments like PyOxidizer that don't set
46 | # __file__ on modules.
47 | def read_text(_module, _path, encoding="ascii"):
48 | with open(where(), "r", encoding=encoding) as data:
49 | return data.read()
50 |
51 | # If we don't have importlib.resources, then we will just do the old logic
52 | # of assuming we're on the filesystem and munge the path directly.
53 | def where():
54 | f = os.path.dirname(__file__)
55 |
56 | return os.path.join(f, "cacert.pem")
57 |
58 |
59 | def contents():
60 | return read_text("certifi", "cacert.pem", encoding="ascii")
61 |
--------------------------------------------------------------------------------
/src/lib/charset_normalizer-2.0.12.dist-info/INSTALLER:
--------------------------------------------------------------------------------
1 | pip
2 |
--------------------------------------------------------------------------------
/src/lib/charset_normalizer-2.0.12.dist-info/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2019 TAHRI Ahmed R.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/src/lib/charset_normalizer-2.0.12.dist-info/RECORD:
--------------------------------------------------------------------------------
1 | ../../bin/normalizer,sha256=qMFuG1HUuJwppVf1MG5al1cGmYpw5_PXeOBZyfp2rIA,267
2 | charset_normalizer-2.0.12.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
3 | charset_normalizer-2.0.12.dist-info/LICENSE,sha256=6zGgxaT7Cbik4yBV0lweX5w1iidS_vPNcgIT0cz-4kE,1070
4 | charset_normalizer-2.0.12.dist-info/METADATA,sha256=eX-U3s7nb6wcvXZFyM1mdBf1yz4I0msVBgNvLEscAbo,11713
5 | charset_normalizer-2.0.12.dist-info/RECORD,,
6 | charset_normalizer-2.0.12.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
7 | charset_normalizer-2.0.12.dist-info/entry_points.txt,sha256=5AJq_EPtGGUwJPgQLnBZfbVr-FYCIwT0xP7dIEZO3NI,77
8 | charset_normalizer-2.0.12.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19
9 | charset_normalizer/__init__.py,sha256=x2A2OW29MBcqdxsvy6t1wzkUlH3ma0guxL6ZCfS8J94,1790
10 | charset_normalizer/__pycache__/__init__.cpython-39.pyc,,
11 | charset_normalizer/__pycache__/api.cpython-39.pyc,,
12 | charset_normalizer/__pycache__/cd.cpython-39.pyc,,
13 | charset_normalizer/__pycache__/constant.cpython-39.pyc,,
14 | charset_normalizer/__pycache__/legacy.cpython-39.pyc,,
15 | charset_normalizer/__pycache__/md.cpython-39.pyc,,
16 | charset_normalizer/__pycache__/models.cpython-39.pyc,,
17 | charset_normalizer/__pycache__/utils.cpython-39.pyc,,
18 | charset_normalizer/__pycache__/version.cpython-39.pyc,,
19 | charset_normalizer/api.py,sha256=r__Wz85F5pYOkRwEY5imXY_pCZ2Nil1DkdaAJY7T5o0,20303
20 | charset_normalizer/assets/__init__.py,sha256=FPnfk8limZRb8ZIUQcTvPEcbuM1eqOdWGw0vbWGycDs,25485
21 | charset_normalizer/assets/__pycache__/__init__.cpython-39.pyc,,
22 | charset_normalizer/cd.py,sha256=a9Kzzd9tHl_W08ExbCFMmRJqdo2k7EBQ8Z_3y9DmYsg,11076
23 | charset_normalizer/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24 | charset_normalizer/cli/__pycache__/__init__.cpython-39.pyc,,
25 | charset_normalizer/cli/__pycache__/normalizer.cpython-39.pyc,,
26 | charset_normalizer/cli/normalizer.py,sha256=LkeFIRc1l28rOgXpEby695x0bcKQv4D8z9FmA3Z2c3A,9364
27 | charset_normalizer/constant.py,sha256=51u_RS10I1vYVpBao__xHqf--HHNrR6me1A1se5r5Y0,19449
28 | charset_normalizer/legacy.py,sha256=XKeZOts_HdYQU_Jb3C9ZfOjY2CiUL132k9_nXer8gig,3384
29 | charset_normalizer/md.py,sha256=WEwnu2MyIiMeEaorRduqcTxGjIBclWIG3i-9_UL6LLs,18191
30 | charset_normalizer/models.py,sha256=XrGpVxfonhcilIWC1WeiP3-ZORGEe_RG3sgrfPLl9qM,13303
31 | charset_normalizer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32 | charset_normalizer/utils.py,sha256=AWSL0z1B42IwdLfjX4ZMASA9cTUsTp0PweCdW98SI-4,9308
33 | charset_normalizer/version.py,sha256=uxO2cT0YIavQv4dQlNGmHPIOOwOa-exspxXi3IR7dck,80
34 |
--------------------------------------------------------------------------------
/src/lib/charset_normalizer-2.0.12.dist-info/WHEEL:
--------------------------------------------------------------------------------
1 | Wheel-Version: 1.0
2 | Generator: bdist_wheel (0.37.1)
3 | Root-Is-Purelib: true
4 | Tag: py3-none-any
5 |
6 |
--------------------------------------------------------------------------------
/src/lib/charset_normalizer-2.0.12.dist-info/entry_points.txt:
--------------------------------------------------------------------------------
1 | [console_scripts]
2 | normalizer = charset_normalizer.cli.normalizer:cli_detect
3 |
4 |
--------------------------------------------------------------------------------
/src/lib/charset_normalizer-2.0.12.dist-info/top_level.txt:
--------------------------------------------------------------------------------
1 | charset_normalizer
2 |
--------------------------------------------------------------------------------
/src/lib/charset_normalizer/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf_8 -*-
2 | """
3 | Charset-Normalizer
4 | ~~~~~~~~~~~~~~
5 | The Real First Universal Charset Detector.
6 | A library that helps you read text from an unknown charset encoding.
7 | Motivated by chardet, This package is trying to resolve the issue by taking a new approach.
8 | All IANA character set names for which the Python core library provides codecs are supported.
9 |
10 | Basic usage:
11 | >>> from charset_normalizer import from_bytes
12 | >>> results = from_bytes('Bсеки човек има право на образование. Oбразованието!'.encode('utf_8'))
13 | >>> best_guess = results.best()
14 | >>> str(best_guess)
15 | 'Bсеки човек има право на образование. Oбразованието!'
16 |
17 | Others methods and usages are available - see the full documentation
18 | at .
19 | :copyright: (c) 2021 by Ahmed TAHRI
20 | :license: MIT, see LICENSE for more details.
21 | """
22 | import logging
23 |
24 | from .api import from_bytes, from_fp, from_path, normalize
25 | from .legacy import (
26 | CharsetDetector,
27 | CharsetDoctor,
28 | CharsetNormalizerMatch,
29 | CharsetNormalizerMatches,
30 | detect,
31 | )
32 | from .models import CharsetMatch, CharsetMatches
33 | from .utils import set_logging_handler
34 | from .version import VERSION, __version__
35 |
36 | __all__ = (
37 | "from_fp",
38 | "from_path",
39 | "from_bytes",
40 | "normalize",
41 | "detect",
42 | "CharsetMatch",
43 | "CharsetMatches",
44 | "CharsetNormalizerMatch",
45 | "CharsetNormalizerMatches",
46 | "CharsetDetector",
47 | "CharsetDoctor",
48 | "__version__",
49 | "VERSION",
50 | "set_logging_handler",
51 | )
52 |
53 | # Attach a NullHandler to the top level logger by default
54 | # https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library
55 |
56 | logging.getLogger("charset_normalizer").addHandler(logging.NullHandler())
57 |
--------------------------------------------------------------------------------
/src/lib/charset_normalizer/cli/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhlsthrm/alfred-crypto-tracker/c73071bc70c9c206bcb532c873b0e3a395e0412b/src/lib/charset_normalizer/cli/__init__.py
--------------------------------------------------------------------------------
/src/lib/charset_normalizer/cli/normalizer.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import sys
3 | from json import dumps
4 | from os.path import abspath
5 | from platform import python_version
6 | from typing import List
7 |
8 | from charset_normalizer import from_fp
9 | from charset_normalizer.models import CliDetectionResult
10 | from charset_normalizer.version import __version__
11 |
12 |
13 | def query_yes_no(question: str, default: str = "yes") -> bool:
14 | """Ask a yes/no question via input() and return their answer.
15 |
16 | "question" is a string that is presented to the user.
17 | "default" is the presumed answer if the user just hits .
18 | It must be "yes" (the default), "no" or None (meaning
19 | an answer is required of the user).
20 |
21 | The "answer" return value is True for "yes" or False for "no".
22 |
23 | Credit goes to (c) https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input
24 | """
25 | valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False}
26 | if default is None:
27 | prompt = " [y/n] "
28 | elif default == "yes":
29 | prompt = " [Y/n] "
30 | elif default == "no":
31 | prompt = " [y/N] "
32 | else:
33 | raise ValueError("invalid default answer: '%s'" % default)
34 |
35 | while True:
36 | sys.stdout.write(question + prompt)
37 | choice = input().lower()
38 | if default is not None and choice == "":
39 | return valid[default]
40 | elif choice in valid:
41 | return valid[choice]
42 | else:
43 | sys.stdout.write("Please respond with 'yes' or 'no' " "(or 'y' or 'n').\n")
44 |
45 |
46 | def cli_detect(argv: List[str] = None) -> int:
47 | """
48 | CLI assistant using ARGV and ArgumentParser
49 | :param argv:
50 | :return: 0 if everything is fine, anything else equal trouble
51 | """
52 | parser = argparse.ArgumentParser(
53 | description="The Real First Universal Charset Detector. "
54 | "Discover originating encoding used on text file. "
55 | "Normalize text to unicode."
56 | )
57 |
58 | parser.add_argument(
59 | "files", type=argparse.FileType("rb"), nargs="+", help="File(s) to be analysed"
60 | )
61 | parser.add_argument(
62 | "-v",
63 | "--verbose",
64 | action="store_true",
65 | default=False,
66 | dest="verbose",
67 | help="Display complementary information about file if any. "
68 | "Stdout will contain logs about the detection process.",
69 | )
70 | parser.add_argument(
71 | "-a",
72 | "--with-alternative",
73 | action="store_true",
74 | default=False,
75 | dest="alternatives",
76 | help="Output complementary possibilities if any. Top-level JSON WILL be a list.",
77 | )
78 | parser.add_argument(
79 | "-n",
80 | "--normalize",
81 | action="store_true",
82 | default=False,
83 | dest="normalize",
84 | help="Permit to normalize input file. If not set, program does not write anything.",
85 | )
86 | parser.add_argument(
87 | "-m",
88 | "--minimal",
89 | action="store_true",
90 | default=False,
91 | dest="minimal",
92 | help="Only output the charset detected to STDOUT. Disabling JSON output.",
93 | )
94 | parser.add_argument(
95 | "-r",
96 | "--replace",
97 | action="store_true",
98 | default=False,
99 | dest="replace",
100 | help="Replace file when trying to normalize it instead of creating a new one.",
101 | )
102 | parser.add_argument(
103 | "-f",
104 | "--force",
105 | action="store_true",
106 | default=False,
107 | dest="force",
108 | help="Replace file without asking if you are sure, use this flag with caution.",
109 | )
110 | parser.add_argument(
111 | "-t",
112 | "--threshold",
113 | action="store",
114 | default=0.1,
115 | type=float,
116 | dest="threshold",
117 | help="Define a custom maximum amount of chaos allowed in decoded content. 0. <= chaos <= 1.",
118 | )
119 | parser.add_argument(
120 | "--version",
121 | action="version",
122 | version="Charset-Normalizer {} - Python {}".format(
123 | __version__, python_version()
124 | ),
125 | help="Show version information and exit.",
126 | )
127 |
128 | args = parser.parse_args(argv)
129 |
130 | if args.replace is True and args.normalize is False:
131 | print("Use --replace in addition of --normalize only.", file=sys.stderr)
132 | return 1
133 |
134 | if args.force is True and args.replace is False:
135 | print("Use --force in addition of --replace only.", file=sys.stderr)
136 | return 1
137 |
138 | if args.threshold < 0.0 or args.threshold > 1.0:
139 | print("--threshold VALUE should be between 0. AND 1.", file=sys.stderr)
140 | return 1
141 |
142 | x_ = []
143 |
144 | for my_file in args.files:
145 |
146 | matches = from_fp(my_file, threshold=args.threshold, explain=args.verbose)
147 |
148 | best_guess = matches.best()
149 |
150 | if best_guess is None:
151 | print(
152 | 'Unable to identify originating encoding for "{}". {}'.format(
153 | my_file.name,
154 | "Maybe try increasing maximum amount of chaos."
155 | if args.threshold < 1.0
156 | else "",
157 | ),
158 | file=sys.stderr,
159 | )
160 | x_.append(
161 | CliDetectionResult(
162 | abspath(my_file.name),
163 | None,
164 | [],
165 | [],
166 | "Unknown",
167 | [],
168 | False,
169 | 1.0,
170 | 0.0,
171 | None,
172 | True,
173 | )
174 | )
175 | else:
176 | x_.append(
177 | CliDetectionResult(
178 | abspath(my_file.name),
179 | best_guess.encoding,
180 | best_guess.encoding_aliases,
181 | [
182 | cp
183 | for cp in best_guess.could_be_from_charset
184 | if cp != best_guess.encoding
185 | ],
186 | best_guess.language,
187 | best_guess.alphabets,
188 | best_guess.bom,
189 | best_guess.percent_chaos,
190 | best_guess.percent_coherence,
191 | None,
192 | True,
193 | )
194 | )
195 |
196 | if len(matches) > 1 and args.alternatives:
197 | for el in matches:
198 | if el != best_guess:
199 | x_.append(
200 | CliDetectionResult(
201 | abspath(my_file.name),
202 | el.encoding,
203 | el.encoding_aliases,
204 | [
205 | cp
206 | for cp in el.could_be_from_charset
207 | if cp != el.encoding
208 | ],
209 | el.language,
210 | el.alphabets,
211 | el.bom,
212 | el.percent_chaos,
213 | el.percent_coherence,
214 | None,
215 | False,
216 | )
217 | )
218 |
219 | if args.normalize is True:
220 |
221 | if best_guess.encoding.startswith("utf") is True:
222 | print(
223 | '"{}" file does not need to be normalized, as it already came from unicode.'.format(
224 | my_file.name
225 | ),
226 | file=sys.stderr,
227 | )
228 | if my_file.closed is False:
229 | my_file.close()
230 | continue
231 |
232 | o_ = my_file.name.split(".") # type: List[str]
233 |
234 | if args.replace is False:
235 | o_.insert(-1, best_guess.encoding)
236 | if my_file.closed is False:
237 | my_file.close()
238 | elif (
239 | args.force is False
240 | and query_yes_no(
241 | 'Are you sure to normalize "{}" by replacing it ?'.format(
242 | my_file.name
243 | ),
244 | "no",
245 | )
246 | is False
247 | ):
248 | if my_file.closed is False:
249 | my_file.close()
250 | continue
251 |
252 | try:
253 | x_[0].unicode_path = abspath("./{}".format(".".join(o_)))
254 |
255 | with open(x_[0].unicode_path, "w", encoding="utf-8") as fp:
256 | fp.write(str(best_guess))
257 | except IOError as e:
258 | print(str(e), file=sys.stderr)
259 | if my_file.closed is False:
260 | my_file.close()
261 | return 2
262 |
263 | if my_file.closed is False:
264 | my_file.close()
265 |
266 | if args.minimal is False:
267 | print(
268 | dumps(
269 | [el.__dict__ for el in x_] if len(x_) > 1 else x_[0].__dict__,
270 | ensure_ascii=True,
271 | indent=4,
272 | )
273 | )
274 | else:
275 | for my_file in args.files:
276 | print(
277 | ", ".join(
278 | [
279 | el.encoding or "undefined"
280 | for el in x_
281 | if el.path == abspath(my_file.name)
282 | ]
283 | )
284 | )
285 |
286 | return 0
287 |
288 |
289 | if __name__ == "__main__":
290 | cli_detect()
291 |
--------------------------------------------------------------------------------
/src/lib/charset_normalizer/legacy.py:
--------------------------------------------------------------------------------
1 | import warnings
2 | from typing import Dict, Optional, Union
3 |
4 | from .api import from_bytes, from_fp, from_path, normalize
5 | from .constant import CHARDET_CORRESPONDENCE
6 | from .models import CharsetMatch, CharsetMatches
7 |
8 |
9 | def detect(byte_str: bytes) -> Dict[str, Optional[Union[str, float]]]:
10 | """
11 | chardet legacy method
12 | Detect the encoding of the given byte string. It should be mostly backward-compatible.
13 | Encoding name will match Chardet own writing whenever possible. (Not on encoding name unsupported by it)
14 | This function is deprecated and should be used to migrate your project easily, consult the documentation for
15 | further information. Not planned for removal.
16 |
17 | :param byte_str: The byte sequence to examine.
18 | """
19 | if not isinstance(byte_str, (bytearray, bytes)):
20 | raise TypeError( # pragma: nocover
21 | "Expected object of type bytes or bytearray, got: "
22 | "{0}".format(type(byte_str))
23 | )
24 |
25 | if isinstance(byte_str, bytearray):
26 | byte_str = bytes(byte_str)
27 |
28 | r = from_bytes(byte_str).best()
29 |
30 | encoding = r.encoding if r is not None else None
31 | language = r.language if r is not None and r.language != "Unknown" else ""
32 | confidence = 1.0 - r.chaos if r is not None else None
33 |
34 | # Note: CharsetNormalizer does not return 'UTF-8-SIG' as the sig get stripped in the detection/normalization process
35 | # but chardet does return 'utf-8-sig' and it is a valid codec name.
36 | if r is not None and encoding == "utf_8" and r.bom:
37 | encoding += "_sig"
38 |
39 | return {
40 | "encoding": encoding
41 | if encoding not in CHARDET_CORRESPONDENCE
42 | else CHARDET_CORRESPONDENCE[encoding],
43 | "language": language,
44 | "confidence": confidence,
45 | }
46 |
47 |
48 | class CharsetNormalizerMatch(CharsetMatch):
49 | pass
50 |
51 |
52 | class CharsetNormalizerMatches(CharsetMatches):
53 | @staticmethod
54 | def from_fp(*args, **kwargs): # type: ignore
55 | warnings.warn( # pragma: nocover
56 | "staticmethod from_fp, from_bytes, from_path and normalize are deprecated "
57 | "and scheduled to be removed in 3.0",
58 | DeprecationWarning,
59 | )
60 | return from_fp(*args, **kwargs) # pragma: nocover
61 |
62 | @staticmethod
63 | def from_bytes(*args, **kwargs): # type: ignore
64 | warnings.warn( # pragma: nocover
65 | "staticmethod from_fp, from_bytes, from_path and normalize are deprecated "
66 | "and scheduled to be removed in 3.0",
67 | DeprecationWarning,
68 | )
69 | return from_bytes(*args, **kwargs) # pragma: nocover
70 |
71 | @staticmethod
72 | def from_path(*args, **kwargs): # type: ignore
73 | warnings.warn( # pragma: nocover
74 | "staticmethod from_fp, from_bytes, from_path and normalize are deprecated "
75 | "and scheduled to be removed in 3.0",
76 | DeprecationWarning,
77 | )
78 | return from_path(*args, **kwargs) # pragma: nocover
79 |
80 | @staticmethod
81 | def normalize(*args, **kwargs): # type: ignore
82 | warnings.warn( # pragma: nocover
83 | "staticmethod from_fp, from_bytes, from_path and normalize are deprecated "
84 | "and scheduled to be removed in 3.0",
85 | DeprecationWarning,
86 | )
87 | return normalize(*args, **kwargs) # pragma: nocover
88 |
89 |
90 | class CharsetDetector(CharsetNormalizerMatches):
91 | pass
92 |
93 |
94 | class CharsetDoctor(CharsetNormalizerMatches):
95 | pass
96 |
--------------------------------------------------------------------------------
/src/lib/charset_normalizer/py.typed:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhlsthrm/alfred-crypto-tracker/c73071bc70c9c206bcb532c873b0e3a395e0412b/src/lib/charset_normalizer/py.typed
--------------------------------------------------------------------------------
/src/lib/charset_normalizer/version.py:
--------------------------------------------------------------------------------
1 | """
2 | Expose version
3 | """
4 |
5 | __version__ = "2.0.12"
6 | VERSION = __version__.split(".")
7 |
--------------------------------------------------------------------------------
/src/lib/idna-3.3.dist-info/INSTALLER:
--------------------------------------------------------------------------------
1 | pip
2 |
--------------------------------------------------------------------------------
/src/lib/idna-3.3.dist-info/LICENSE.md:
--------------------------------------------------------------------------------
1 | BSD 3-Clause License
2 |
3 | Copyright (c) 2013-2021, Kim Davies
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without
7 | modification, are permitted provided that the following conditions are met:
8 |
9 | 1. Redistributions of source code must retain the above copyright notice, this
10 | list of conditions and the following disclaimer.
11 |
12 | 2. Redistributions in binary form must reproduce the above copyright notice,
13 | this list of conditions and the following disclaimer in the documentation
14 | and/or other materials provided with the distribution.
15 |
16 | 3. Neither the name of the copyright holder nor the names of its
17 | contributors may be used to endorse or promote products derived from
18 | this software without specific prior written permission.
19 |
20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 |
--------------------------------------------------------------------------------
/src/lib/idna-3.3.dist-info/METADATA:
--------------------------------------------------------------------------------
1 | Metadata-Version: 2.1
2 | Name: idna
3 | Version: 3.3
4 | Summary: Internationalized Domain Names in Applications (IDNA)
5 | Home-page: https://github.com/kjd/idna
6 | Author: Kim Davies
7 | Author-email: kim@cynosure.com.au
8 | License: BSD-3-Clause
9 | Platform: UNKNOWN
10 | Classifier: Development Status :: 5 - Production/Stable
11 | Classifier: Intended Audience :: Developers
12 | Classifier: Intended Audience :: System Administrators
13 | Classifier: License :: OSI Approved :: BSD License
14 | Classifier: Operating System :: OS Independent
15 | Classifier: Programming Language :: Python
16 | Classifier: Programming Language :: Python :: 3
17 | Classifier: Programming Language :: Python :: 3 :: Only
18 | Classifier: Programming Language :: Python :: 3.5
19 | Classifier: Programming Language :: Python :: 3.6
20 | Classifier: Programming Language :: Python :: 3.7
21 | Classifier: Programming Language :: Python :: 3.8
22 | Classifier: Programming Language :: Python :: 3.9
23 | Classifier: Programming Language :: Python :: 3.10
24 | Classifier: Programming Language :: Python :: Implementation :: CPython
25 | Classifier: Programming Language :: Python :: Implementation :: PyPy
26 | Classifier: Topic :: Internet :: Name Service (DNS)
27 | Classifier: Topic :: Software Development :: Libraries :: Python Modules
28 | Classifier: Topic :: Utilities
29 | Requires-Python: >=3.5
30 | License-File: LICENSE.md
31 |
32 | Internationalized Domain Names in Applications (IDNA)
33 | =====================================================
34 |
35 | Support for the Internationalised Domain Names in Applications
36 | (IDNA) protocol as specified in `RFC 5891 `_.
37 | This is the latest version of the protocol and is sometimes referred to as
38 | “IDNA 2008”.
39 |
40 | This library also provides support for Unicode Technical Standard 46,
41 | `Unicode IDNA Compatibility Processing `_.
42 |
43 | This acts as a suitable replacement for the “encodings.idna” module that
44 | comes with the Python standard library, but which only supports the
45 | older superseded IDNA specification (`RFC 3490 `_).
46 |
47 | Basic functions are simply executed:
48 |
49 | .. code-block:: pycon
50 |
51 | >>> import idna
52 | >>> idna.encode('ドメイン.テスト')
53 | b'xn--eckwd4c7c.xn--zckzah'
54 | >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah'))
55 | ドメイン.テスト
56 |
57 |
58 | Installation
59 | ------------
60 |
61 | To install this library, you can use pip:
62 |
63 | .. code-block:: bash
64 |
65 | $ pip install idna
66 |
67 | Alternatively, you can install the package using the bundled setup script:
68 |
69 | .. code-block:: bash
70 |
71 | $ python setup.py install
72 |
73 |
74 | Usage
75 | -----
76 |
77 | For typical usage, the ``encode`` and ``decode`` functions will take a domain
78 | name argument and perform a conversion to A-labels or U-labels respectively.
79 |
80 | .. code-block:: pycon
81 |
82 | >>> import idna
83 | >>> idna.encode('ドメイン.テスト')
84 | b'xn--eckwd4c7c.xn--zckzah'
85 | >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah'))
86 | ドメイン.テスト
87 |
88 | You may use the codec encoding and decoding methods using the
89 | ``idna.codec`` module:
90 |
91 | .. code-block:: pycon
92 |
93 | >>> import idna.codec
94 | >>> print('домен.испытание'.encode('idna'))
95 | b'xn--d1acufc.xn--80akhbyknj4f'
96 | >>> print(b'xn--d1acufc.xn--80akhbyknj4f'.decode('idna'))
97 | домен.испытание
98 |
99 | Conversions can be applied at a per-label basis using the ``ulabel`` or ``alabel``
100 | functions if necessary:
101 |
102 | .. code-block:: pycon
103 |
104 | >>> idna.alabel('测试')
105 | b'xn--0zwm56d'
106 |
107 | Compatibility Mapping (UTS #46)
108 | +++++++++++++++++++++++++++++++
109 |
110 | As described in `RFC 5895 `_, the IDNA
111 | specification does not normalize input from different potential ways a user
112 | may input a domain name. This functionality, known as a “mapping”, is
113 | considered by the specification to be a local user-interface issue distinct
114 | from IDNA conversion functionality.
115 |
116 | This library provides one such mapping, that was developed by the Unicode
117 | Consortium. Known as `Unicode IDNA Compatibility Processing `_,
118 | it provides for both a regular mapping for typical applications, as well as
119 | a transitional mapping to help migrate from older IDNA 2003 applications.
120 |
121 | For example, “Königsgäßchen” is not a permissible label as *LATIN CAPITAL
122 | LETTER K* is not allowed (nor are capital letters in general). UTS 46 will
123 | convert this into lower case prior to applying the IDNA conversion.
124 |
125 | .. code-block:: pycon
126 |
127 | >>> import idna
128 | >>> idna.encode('Königsgäßchen')
129 | ...
130 | idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of 'Königsgäßchen' not allowed
131 | >>> idna.encode('Königsgäßchen', uts46=True)
132 | b'xn--knigsgchen-b4a3dun'
133 | >>> print(idna.decode('xn--knigsgchen-b4a3dun'))
134 | königsgäßchen
135 |
136 | Transitional processing provides conversions to help transition from the older
137 | 2003 standard to the current standard. For example, in the original IDNA
138 | specification, the *LATIN SMALL LETTER SHARP S* (ß) was converted into two
139 | *LATIN SMALL LETTER S* (ss), whereas in the current IDNA specification this
140 | conversion is not performed.
141 |
142 | .. code-block:: pycon
143 |
144 | >>> idna.encode('Königsgäßchen', uts46=True, transitional=True)
145 | 'xn--knigsgsschen-lcb0w'
146 |
147 | Implementors should use transitional processing with caution, only in rare
148 | cases where conversion from legacy labels to current labels must be performed
149 | (i.e. IDNA implementations that pre-date 2008). For typical applications
150 | that just need to convert labels, transitional processing is unlikely to be
151 | beneficial and could produce unexpected incompatible results.
152 |
153 | ``encodings.idna`` Compatibility
154 | ++++++++++++++++++++++++++++++++
155 |
156 | Function calls from the Python built-in ``encodings.idna`` module are
157 | mapped to their IDNA 2008 equivalents using the ``idna.compat`` module.
158 | Simply substitute the ``import`` clause in your code to refer to the
159 | new module name.
160 |
161 | Exceptions
162 | ----------
163 |
164 | All errors raised during the conversion following the specification should
165 | raise an exception derived from the ``idna.IDNAError`` base class.
166 |
167 | More specific exceptions that may be generated as ``idna.IDNABidiError``
168 | when the error reflects an illegal combination of left-to-right and
169 | right-to-left characters in a label; ``idna.InvalidCodepoint`` when
170 | a specific codepoint is an illegal character in an IDN label (i.e.
171 | INVALID); and ``idna.InvalidCodepointContext`` when the codepoint is
172 | illegal based on its positional context (i.e. it is CONTEXTO or CONTEXTJ
173 | but the contextual requirements are not satisfied.)
174 |
175 | Building and Diagnostics
176 | ------------------------
177 |
178 | The IDNA and UTS 46 functionality relies upon pre-calculated lookup
179 | tables for performance. These tables are derived from computing against
180 | eligibility criteria in the respective standards. These tables are
181 | computed using the command-line script ``tools/idna-data``.
182 |
183 | This tool will fetch relevant codepoint data from the Unicode repository
184 | and perform the required calculations to identify eligibility. There are
185 | three main modes:
186 |
187 | * ``idna-data make-libdata``. Generates ``idnadata.py`` and ``uts46data.py``,
188 | the pre-calculated lookup tables using for IDNA and UTS 46 conversions. Implementors
189 | who wish to track this library against a different Unicode version may use this tool
190 | to manually generate a different version of the ``idnadata.py`` and ``uts46data.py``
191 | files.
192 |
193 | * ``idna-data make-table``. Generate a table of the IDNA disposition
194 | (e.g. PVALID, CONTEXTJ, CONTEXTO) in the format found in Appendix B.1 of RFC
195 | 5892 and the pre-computed tables published by `IANA `_.
196 |
197 | * ``idna-data U+0061``. Prints debugging output on the various properties
198 | associated with an individual Unicode codepoint (in this case, U+0061), that are
199 | used to assess the IDNA and UTS 46 status of a codepoint. This is helpful in debugging
200 | or analysis.
201 |
202 | The tool accepts a number of arguments, described using ``idna-data -h``. Most notably,
203 | the ``--version`` argument allows the specification of the version of Unicode to use
204 | in computing the table data. For example, ``idna-data --version 9.0.0 make-libdata``
205 | will generate library data against Unicode 9.0.0.
206 |
207 |
208 | Additional Notes
209 | ----------------
210 |
211 | * **Packages**. The latest tagged release version is published in the
212 | `Python Package Index `_.
213 |
214 | * **Version support**. This library supports Python 3.5 and higher. As this library
215 | serves as a low-level toolkit for a variety of applications, many of which strive
216 | for broad compatibility with older Python versions, there is no rush to remove
217 | older intepreter support. Removing support for older versions should be well
218 | justified in that the maintenance burden has become too high.
219 |
220 | * **Python 2**. Python 2 is supported by version 2.x of this library. While active
221 | development of the version 2.x series has ended, notable issues being corrected
222 | may be backported to 2.x. Use "idna<3" in your requirements file if you need this
223 | library for a Python 2 application.
224 |
225 | * **Testing**. The library has a test suite based on each rule of the IDNA specification, as
226 | well as tests that are provided as part of the Unicode Technical Standard 46,
227 | `Unicode IDNA Compatibility Processing `_.
228 |
229 | * **Emoji**. It is an occasional request to support emoji domains in this library. Encoding
230 | of symbols like emoji is expressly prohibited by the technical standard IDNA 2008 and
231 | emoji domains are broadly phased out across the domain industry due to associated security
232 | risks. For now, applications that wish need to support these non-compliant labels may
233 | wish to consider trying the encode/decode operation in this library first, and then falling
234 | back to using `encodings.idna`. See `the Github project `_
235 | for more discussion.
236 |
237 |
--------------------------------------------------------------------------------
/src/lib/idna-3.3.dist-info/RECORD:
--------------------------------------------------------------------------------
1 | idna-3.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
2 | idna-3.3.dist-info/LICENSE.md,sha256=otbk2UC9JNvnuWRc3hmpeSzFHbeuDVrNMBrIYMqj6DY,1523
3 | idna-3.3.dist-info/METADATA,sha256=BdqiAf8ou4x1nzIHp2_sDfXWjl7BrSUGpOeVzbYHQuQ,9765
4 | idna-3.3.dist-info/RECORD,,
5 | idna-3.3.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92
6 | idna-3.3.dist-info/top_level.txt,sha256=jSag9sEDqvSPftxOQy-ABfGV_RSy7oFh4zZJpODV8k0,5
7 | idna/__init__.py,sha256=KJQN1eQBr8iIK5SKrJ47lXvxG0BJ7Lm38W4zT0v_8lk,849
8 | idna/__pycache__/__init__.cpython-39.pyc,,
9 | idna/__pycache__/codec.cpython-39.pyc,,
10 | idna/__pycache__/compat.cpython-39.pyc,,
11 | idna/__pycache__/core.cpython-39.pyc,,
12 | idna/__pycache__/idnadata.cpython-39.pyc,,
13 | idna/__pycache__/intranges.cpython-39.pyc,,
14 | idna/__pycache__/package_data.cpython-39.pyc,,
15 | idna/__pycache__/uts46data.cpython-39.pyc,,
16 | idna/codec.py,sha256=6ly5odKfqrytKT9_7UrlGklHnf1DSK2r9C6cSM4sa28,3374
17 | idna/compat.py,sha256=0_sOEUMT4CVw9doD3vyRhX80X19PwqFoUBs7gWsFME4,321
18 | idna/core.py,sha256=RFIkY-HhFZaDoBEFjGwyGd_vWI04uOAQjnzueMWqwOU,12795
19 | idna/idnadata.py,sha256=fzMzkCea2xieVxcrjngJ-2pLsKQNejPCZFlBajIuQdw,44025
20 | idna/intranges.py,sha256=YBr4fRYuWH7kTKS2tXlFjM24ZF1Pdvcir-aywniInqg,1881
21 | idna/package_data.py,sha256=szxQhV0ZD0nKJ84Kuobw3l8q4_KeCyXjFRdpwIpKZmw,21
22 | idna/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23 | idna/uts46data.py,sha256=o-D7V-a0fOLZNd7tvxof6MYfUd0TBZzE2bLR5XO67xU,204400
24 |
--------------------------------------------------------------------------------
/src/lib/idna-3.3.dist-info/WHEEL:
--------------------------------------------------------------------------------
1 | Wheel-Version: 1.0
2 | Generator: bdist_wheel (0.37.0)
3 | Root-Is-Purelib: true
4 | Tag: py3-none-any
5 |
6 |
--------------------------------------------------------------------------------
/src/lib/idna-3.3.dist-info/top_level.txt:
--------------------------------------------------------------------------------
1 | idna
2 |
--------------------------------------------------------------------------------
/src/lib/idna/__init__.py:
--------------------------------------------------------------------------------
1 | from .package_data import __version__
2 | from .core import (
3 | IDNABidiError,
4 | IDNAError,
5 | InvalidCodepoint,
6 | InvalidCodepointContext,
7 | alabel,
8 | check_bidi,
9 | check_hyphen_ok,
10 | check_initial_combiner,
11 | check_label,
12 | check_nfc,
13 | decode,
14 | encode,
15 | ulabel,
16 | uts46_remap,
17 | valid_contextj,
18 | valid_contexto,
19 | valid_label_length,
20 | valid_string_length,
21 | )
22 | from .intranges import intranges_contain
23 |
24 | __all__ = [
25 | "IDNABidiError",
26 | "IDNAError",
27 | "InvalidCodepoint",
28 | "InvalidCodepointContext",
29 | "alabel",
30 | "check_bidi",
31 | "check_hyphen_ok",
32 | "check_initial_combiner",
33 | "check_label",
34 | "check_nfc",
35 | "decode",
36 | "encode",
37 | "intranges_contain",
38 | "ulabel",
39 | "uts46_remap",
40 | "valid_contextj",
41 | "valid_contexto",
42 | "valid_label_length",
43 | "valid_string_length",
44 | ]
45 |
--------------------------------------------------------------------------------
/src/lib/idna/codec.py:
--------------------------------------------------------------------------------
1 | from .core import encode, decode, alabel, ulabel, IDNAError
2 | import codecs
3 | import re
4 | from typing import Tuple, Optional
5 |
6 | _unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]')
7 |
8 | class Codec(codecs.Codec):
9 |
10 | def encode(self, data: str, errors: str = 'strict') -> Tuple[bytes, int]:
11 | if errors != 'strict':
12 | raise IDNAError('Unsupported error handling \"{}\"'.format(errors))
13 |
14 | if not data:
15 | return b"", 0
16 |
17 | return encode(data), len(data)
18 |
19 | def decode(self, data: bytes, errors: str = 'strict') -> Tuple[str, int]:
20 | if errors != 'strict':
21 | raise IDNAError('Unsupported error handling \"{}\"'.format(errors))
22 |
23 | if not data:
24 | return '', 0
25 |
26 | return decode(data), len(data)
27 |
28 | class IncrementalEncoder(codecs.BufferedIncrementalEncoder):
29 | def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[str, int]: # type: ignore
30 | if errors != 'strict':
31 | raise IDNAError('Unsupported error handling \"{}\"'.format(errors))
32 |
33 | if not data:
34 | return "", 0
35 |
36 | labels = _unicode_dots_re.split(data)
37 | trailing_dot = ''
38 | if labels:
39 | if not labels[-1]:
40 | trailing_dot = '.'
41 | del labels[-1]
42 | elif not final:
43 | # Keep potentially unfinished label until the next call
44 | del labels[-1]
45 | if labels:
46 | trailing_dot = '.'
47 |
48 | result = []
49 | size = 0
50 | for label in labels:
51 | result.append(alabel(label))
52 | if size:
53 | size += 1
54 | size += len(label)
55 |
56 | # Join with U+002E
57 | result_str = '.'.join(result) + trailing_dot # type: ignore
58 | size += len(trailing_dot)
59 | return result_str, size
60 |
61 | class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
62 | def _buffer_decode(self, data: str, errors: str, final: bool) -> Tuple[str, int]: # type: ignore
63 | if errors != 'strict':
64 | raise IDNAError('Unsupported error handling \"{}\"'.format(errors))
65 |
66 | if not data:
67 | return ('', 0)
68 |
69 | labels = _unicode_dots_re.split(data)
70 | trailing_dot = ''
71 | if labels:
72 | if not labels[-1]:
73 | trailing_dot = '.'
74 | del labels[-1]
75 | elif not final:
76 | # Keep potentially unfinished label until the next call
77 | del labels[-1]
78 | if labels:
79 | trailing_dot = '.'
80 |
81 | result = []
82 | size = 0
83 | for label in labels:
84 | result.append(ulabel(label))
85 | if size:
86 | size += 1
87 | size += len(label)
88 |
89 | result_str = '.'.join(result) + trailing_dot
90 | size += len(trailing_dot)
91 | return (result_str, size)
92 |
93 |
94 | class StreamWriter(Codec, codecs.StreamWriter):
95 | pass
96 |
97 |
98 | class StreamReader(Codec, codecs.StreamReader):
99 | pass
100 |
101 |
102 | def getregentry() -> codecs.CodecInfo:
103 | # Compatibility as a search_function for codecs.register()
104 | return codecs.CodecInfo(
105 | name='idna',
106 | encode=Codec().encode, # type: ignore
107 | decode=Codec().decode, # type: ignore
108 | incrementalencoder=IncrementalEncoder,
109 | incrementaldecoder=IncrementalDecoder,
110 | streamwriter=StreamWriter,
111 | streamreader=StreamReader,
112 | )
113 |
--------------------------------------------------------------------------------
/src/lib/idna/compat.py:
--------------------------------------------------------------------------------
1 | from .core import *
2 | from .codec import *
3 | from typing import Any, Union
4 |
5 | def ToASCII(label: str) -> bytes:
6 | return encode(label)
7 |
8 | def ToUnicode(label: Union[bytes, bytearray]) -> str:
9 | return decode(label)
10 |
11 | def nameprep(s: Any) -> None:
12 | raise NotImplementedError('IDNA 2008 does not utilise nameprep protocol')
13 |
14 |
--------------------------------------------------------------------------------
/src/lib/idna/intranges.py:
--------------------------------------------------------------------------------
1 | """
2 | Given a list of integers, made up of (hopefully) a small number of long runs
3 | of consecutive integers, compute a representation of the form
4 | ((start1, end1), (start2, end2) ...). Then answer the question "was x present
5 | in the original list?" in time O(log(# runs)).
6 | """
7 |
8 | import bisect
9 | from typing import List, Tuple
10 |
11 | def intranges_from_list(list_: List[int]) -> Tuple[int, ...]:
12 | """Represent a list of integers as a sequence of ranges:
13 | ((start_0, end_0), (start_1, end_1), ...), such that the original
14 | integers are exactly those x such that start_i <= x < end_i for some i.
15 |
16 | Ranges are encoded as single integers (start << 32 | end), not as tuples.
17 | """
18 |
19 | sorted_list = sorted(list_)
20 | ranges = []
21 | last_write = -1
22 | for i in range(len(sorted_list)):
23 | if i+1 < len(sorted_list):
24 | if sorted_list[i] == sorted_list[i+1]-1:
25 | continue
26 | current_range = sorted_list[last_write+1:i+1]
27 | ranges.append(_encode_range(current_range[0], current_range[-1] + 1))
28 | last_write = i
29 |
30 | return tuple(ranges)
31 |
32 | def _encode_range(start: int, end: int) -> int:
33 | return (start << 32) | end
34 |
35 | def _decode_range(r: int) -> Tuple[int, int]:
36 | return (r >> 32), (r & ((1 << 32) - 1))
37 |
38 |
39 | def intranges_contain(int_: int, ranges: Tuple[int, ...]) -> bool:
40 | """Determine if `int_` falls into one of the ranges in `ranges`."""
41 | tuple_ = _encode_range(int_, 0)
42 | pos = bisect.bisect_left(ranges, tuple_)
43 | # we could be immediately ahead of a tuple (start, end)
44 | # with start < int_ <= end
45 | if pos > 0:
46 | left, right = _decode_range(ranges[pos-1])
47 | if left <= int_ < right:
48 | return True
49 | # or we could be immediately behind a tuple (int_, end)
50 | if pos < len(ranges):
51 | left, _ = _decode_range(ranges[pos])
52 | if left == int_:
53 | return True
54 | return False
55 |
--------------------------------------------------------------------------------
/src/lib/idna/package_data.py:
--------------------------------------------------------------------------------
1 | __version__ = '3.3'
2 |
3 |
--------------------------------------------------------------------------------
/src/lib/idna/py.typed:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhlsthrm/alfred-crypto-tracker/c73071bc70c9c206bcb532c873b0e3a395e0412b/src/lib/idna/py.typed
--------------------------------------------------------------------------------
/src/lib/requests-2.27.1.dist-info/INSTALLER:
--------------------------------------------------------------------------------
1 | pip
2 |
--------------------------------------------------------------------------------
/src/lib/requests-2.27.1.dist-info/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
--------------------------------------------------------------------------------
/src/lib/requests-2.27.1.dist-info/METADATA:
--------------------------------------------------------------------------------
1 | Metadata-Version: 2.1
2 | Name: requests
3 | Version: 2.27.1
4 | Summary: Python HTTP for Humans.
5 | Home-page: https://requests.readthedocs.io
6 | Author: Kenneth Reitz
7 | Author-email: me@kennethreitz.org
8 | License: Apache 2.0
9 | Project-URL: Documentation, https://requests.readthedocs.io
10 | Project-URL: Source, https://github.com/psf/requests
11 | Platform: UNKNOWN
12 | Classifier: Development Status :: 5 - Production/Stable
13 | Classifier: Environment :: Web Environment
14 | Classifier: Intended Audience :: Developers
15 | Classifier: License :: OSI Approved :: Apache Software License
16 | Classifier: Natural Language :: English
17 | Classifier: Operating System :: OS Independent
18 | Classifier: Programming Language :: Python
19 | Classifier: Programming Language :: Python :: 2
20 | Classifier: Programming Language :: Python :: 2.7
21 | Classifier: Programming Language :: Python :: 3
22 | Classifier: Programming Language :: Python :: 3.6
23 | Classifier: Programming Language :: Python :: 3.7
24 | Classifier: Programming Language :: Python :: 3.8
25 | Classifier: Programming Language :: Python :: 3.9
26 | Classifier: Programming Language :: Python :: 3.10
27 | Classifier: Programming Language :: Python :: Implementation :: CPython
28 | Classifier: Programming Language :: Python :: Implementation :: PyPy
29 | Classifier: Topic :: Internet :: WWW/HTTP
30 | Classifier: Topic :: Software Development :: Libraries
31 | Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*
32 | Description-Content-Type: text/markdown
33 | Requires-Dist: urllib3 (<1.27,>=1.21.1)
34 | Requires-Dist: certifi (>=2017.4.17)
35 | Requires-Dist: chardet (<5,>=3.0.2) ; python_version < "3"
36 | Requires-Dist: idna (<3,>=2.5) ; python_version < "3"
37 | Requires-Dist: charset-normalizer (~=2.0.0) ; python_version >= "3"
38 | Requires-Dist: idna (<4,>=2.5) ; python_version >= "3"
39 | Provides-Extra: security
40 | Provides-Extra: socks
41 | Requires-Dist: PySocks (!=1.5.7,>=1.5.6) ; extra == 'socks'
42 | Requires-Dist: win-inet-pton ; (sys_platform == "win32" and python_version == "2.7") and extra == 'socks'
43 | Provides-Extra: use_chardet_on_py3
44 | Requires-Dist: chardet (<5,>=3.0.2) ; extra == 'use_chardet_on_py3'
45 |
46 | # Requests
47 |
48 | **Requests** is a simple, yet elegant, HTTP library.
49 |
50 | ```python
51 | >>> import requests
52 | >>> r = requests.get('https://httpbin.org/basic-auth/user/pass', auth=('user', 'pass'))
53 | >>> r.status_code
54 | 200
55 | >>> r.headers['content-type']
56 | 'application/json; charset=utf8'
57 | >>> r.encoding
58 | 'utf-8'
59 | >>> r.text
60 | '{"authenticated": true, ...'
61 | >>> r.json()
62 | {'authenticated': True, ...}
63 | ```
64 |
65 | Requests allows you to send HTTP/1.1 requests extremely easily. There’s no need to manually add query strings to your URLs, or to form-encode your `PUT` & `POST` data — but nowadays, just use the `json` method!
66 |
67 | Requests is one of the most downloaded Python packages today, pulling in around `30M downloads / week`— according to GitHub, Requests is currently [depended upon](https://github.com/psf/requests/network/dependents?package_id=UGFja2FnZS01NzA4OTExNg%3D%3D) by `1,000,000+` repositories. You may certainly put your trust in this code.
68 |
69 | [](https://pepy.tech/project/requests)
70 | [](https://pypi.org/project/requests)
71 | [](https://github.com/psf/requests/graphs/contributors)
72 |
73 | ## Installing Requests and Supported Versions
74 |
75 | Requests is available on PyPI:
76 |
77 | ```console
78 | $ python -m pip install requests
79 | ```
80 |
81 | Requests officially supports Python 2.7 & 3.6+.
82 |
83 | ## Supported Features & Best–Practices
84 |
85 | Requests is ready for the demands of building robust and reliable HTTP–speaking applications, for the needs of today.
86 |
87 | - Keep-Alive & Connection Pooling
88 | - International Domains and URLs
89 | - Sessions with Cookie Persistence
90 | - Browser-style TLS/SSL Verification
91 | - Basic & Digest Authentication
92 | - Familiar `dict`–like Cookies
93 | - Automatic Content Decompression and Decoding
94 | - Multi-part File Uploads
95 | - SOCKS Proxy Support
96 | - Connection Timeouts
97 | - Streaming Downloads
98 | - Automatic honoring of `.netrc`
99 | - Chunked HTTP Requests
100 |
101 | ## API Reference and User Guide available on [Read the Docs](https://requests.readthedocs.io)
102 |
103 | [](https://requests.readthedocs.io)
104 |
105 | ## Cloning the repository
106 |
107 | When cloning the Requests repository, you may need to add the `-c
108 | fetch.fsck.badTimezone=ignore` flag to avoid an error about a bad commit (see
109 | [this issue](https://github.com/psf/requests/issues/2690) for more background):
110 |
111 | ```shell
112 | git clone -c fetch.fsck.badTimezone=ignore https://github.com/psf/requests.git
113 | ```
114 |
115 | You can also apply this setting to your global Git config:
116 |
117 | ```shell
118 | git config --global fetch.fsck.badTimezone ignore
119 | ```
120 |
121 | ---
122 |
123 | [](https://kennethreitz.org) [](https://www.python.org/psf)
124 |
125 |
126 |
--------------------------------------------------------------------------------
/src/lib/requests-2.27.1.dist-info/RECORD:
--------------------------------------------------------------------------------
1 | requests-2.27.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
2 | requests-2.27.1.dist-info/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142
3 | requests-2.27.1.dist-info/METADATA,sha256=fxIjGa_S2RlpD4oFJGdSuXQWFUGxjcgAhLYF9HwRp8Q,4984
4 | requests-2.27.1.dist-info/RECORD,,
5 | requests-2.27.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6 | requests-2.27.1.dist-info/WHEEL,sha256=z9j0xAa_JmUKMpmz72K0ZGALSM_n-wQVmGbleXx2VHg,110
7 | requests-2.27.1.dist-info/top_level.txt,sha256=fMSVmHfb5rbGOo6xv-O_tUX6j-WyixssE-SnwcDRxNQ,9
8 | requests/__init__.py,sha256=BgNBrAYr3DdRWDtkc1IwoSraVBBz1712c_eX4G8h-ak,4924
9 | requests/__pycache__/__init__.cpython-39.pyc,,
10 | requests/__pycache__/__version__.cpython-39.pyc,,
11 | requests/__pycache__/_internal_utils.cpython-39.pyc,,
12 | requests/__pycache__/adapters.cpython-39.pyc,,
13 | requests/__pycache__/api.cpython-39.pyc,,
14 | requests/__pycache__/auth.cpython-39.pyc,,
15 | requests/__pycache__/certs.cpython-39.pyc,,
16 | requests/__pycache__/compat.cpython-39.pyc,,
17 | requests/__pycache__/cookies.cpython-39.pyc,,
18 | requests/__pycache__/exceptions.cpython-39.pyc,,
19 | requests/__pycache__/help.cpython-39.pyc,,
20 | requests/__pycache__/hooks.cpython-39.pyc,,
21 | requests/__pycache__/models.cpython-39.pyc,,
22 | requests/__pycache__/packages.cpython-39.pyc,,
23 | requests/__pycache__/sessions.cpython-39.pyc,,
24 | requests/__pycache__/status_codes.cpython-39.pyc,,
25 | requests/__pycache__/structures.cpython-39.pyc,,
26 | requests/__pycache__/utils.cpython-39.pyc,,
27 | requests/__version__.py,sha256=q8miOQaomOv3S74lK4eQs1zZ5jwcnOusyEU-M2idhts,441
28 | requests/_internal_utils.py,sha256=Zx3PnEUccyfsB-ie11nZVAW8qClJy0gx1qNME7rgT18,1096
29 | requests/adapters.py,sha256=YJf_0S2JL5fEs6yPKC3iQ-Iu0UZxDb9H6W5ipVBrIXE,21645
30 | requests/api.py,sha256=hjuoP79IAEmX6Dysrw8t032cLfwLHxbI_wM4gC5G9t0,6402
31 | requests/auth.py,sha256=OMoJIVKyRLy9THr91y8rxysZuclwPB-K1Xg1zBomUhQ,10207
32 | requests/certs.py,sha256=dOB5rV2DZ13dEhq9BUa_4hd5kAqg59e_zUZB00faYz8,453
33 | requests/compat.py,sha256=wy3bUOq8aKOE7mMZiLJJMXFCaXbhoS-baON60EDvAIg,2054
34 | requests/cookies.py,sha256=Y-bKX6TvW3FnYlE6Au0SXtVVWcaNdFvuAwQxw-G0iTI,18430
35 | requests/exceptions.py,sha256=VUKyfNZmXjsjkPgipZupHfkcE3OVdYKx8GqfvWckfwA,3434
36 | requests/help.py,sha256=ywPNssPohrch_Q_q4_JLJM1z2bP0TirHkA9QnoOF0sY,3968
37 | requests/hooks.py,sha256=QReGyy0bRcr5rkwCuObNakbYsc7EkiKeBwG4qHekr2Q,757
38 | requests/models.py,sha256=RfXfgGUZ5X6CWUhUODku7-MOQNWVs7lDZPV6bDC53cY,35051
39 | requests/packages.py,sha256=kr9J9dYZr9Ef4JmwHbCEUgwViwcCyOUPgfXZvIL84Os,932
40 | requests/sessions.py,sha256=Zu-Y9YPlwTIsyFx1hvIrc3ziyeFpuFPqcOuSuz8BNWs,29835
41 | requests/status_codes.py,sha256=gT79Pbs_cQjBgp-fvrUgg1dn2DQO32bDj4TInjnMPSc,4188
42 | requests/structures.py,sha256=msAtr9mq1JxHd-JRyiILfdFlpbJwvvFuP3rfUQT_QxE,3005
43 | requests/utils.py,sha256=MKTK3du_WmmO2nv_SkeV880VwfIYhJbvd1Lz7uDioP8,33277
44 |
--------------------------------------------------------------------------------
/src/lib/requests-2.27.1.dist-info/REQUESTED:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhlsthrm/alfred-crypto-tracker/c73071bc70c9c206bcb532c873b0e3a395e0412b/src/lib/requests-2.27.1.dist-info/REQUESTED
--------------------------------------------------------------------------------
/src/lib/requests-2.27.1.dist-info/WHEEL:
--------------------------------------------------------------------------------
1 | Wheel-Version: 1.0
2 | Generator: bdist_wheel (0.37.1)
3 | Root-Is-Purelib: true
4 | Tag: py2-none-any
5 | Tag: py3-none-any
6 |
7 |
--------------------------------------------------------------------------------
/src/lib/requests-2.27.1.dist-info/top_level.txt:
--------------------------------------------------------------------------------
1 | requests
2 |
--------------------------------------------------------------------------------
/src/lib/requests/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # __
4 | # /__) _ _ _ _ _/ _
5 | # / ( (- (/ (/ (- _) / _)
6 | # /
7 |
8 | """
9 | Requests HTTP Library
10 | ~~~~~~~~~~~~~~~~~~~~~
11 |
12 | Requests is an HTTP library, written in Python, for human beings.
13 | Basic GET usage:
14 |
15 | >>> import requests
16 | >>> r = requests.get('https://www.python.org')
17 | >>> r.status_code
18 | 200
19 | >>> b'Python is a programming language' in r.content
20 | True
21 |
22 | ... or POST:
23 |
24 | >>> payload = dict(key1='value1', key2='value2')
25 | >>> r = requests.post('https://httpbin.org/post', data=payload)
26 | >>> print(r.text)
27 | {
28 | ...
29 | "form": {
30 | "key1": "value1",
31 | "key2": "value2"
32 | },
33 | ...
34 | }
35 |
36 | The other HTTP methods are supported - see `requests.api`. Full documentation
37 | is at .
38 |
39 | :copyright: (c) 2017 by Kenneth Reitz.
40 | :license: Apache 2.0, see LICENSE for more details.
41 | """
42 |
43 | import urllib3
44 | import warnings
45 | from .exceptions import RequestsDependencyWarning
46 |
47 | try:
48 | from charset_normalizer import __version__ as charset_normalizer_version
49 | except ImportError:
50 | charset_normalizer_version = None
51 |
52 | try:
53 | from chardet import __version__ as chardet_version
54 | except ImportError:
55 | chardet_version = None
56 |
57 | def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version):
58 | urllib3_version = urllib3_version.split('.')
59 | assert urllib3_version != ['dev'] # Verify urllib3 isn't installed from git.
60 |
61 | # Sometimes, urllib3 only reports its version as 16.1.
62 | if len(urllib3_version) == 2:
63 | urllib3_version.append('0')
64 |
65 | # Check urllib3 for compatibility.
66 | major, minor, patch = urllib3_version # noqa: F811
67 | major, minor, patch = int(major), int(minor), int(patch)
68 | # urllib3 >= 1.21.1, <= 1.26
69 | assert major == 1
70 | assert minor >= 21
71 | assert minor <= 26
72 |
73 | # Check charset_normalizer for compatibility.
74 | if chardet_version:
75 | major, minor, patch = chardet_version.split('.')[:3]
76 | major, minor, patch = int(major), int(minor), int(patch)
77 | # chardet_version >= 3.0.2, < 5.0.0
78 | assert (3, 0, 2) <= (major, minor, patch) < (5, 0, 0)
79 | elif charset_normalizer_version:
80 | major, minor, patch = charset_normalizer_version.split('.')[:3]
81 | major, minor, patch = int(major), int(minor), int(patch)
82 | # charset_normalizer >= 2.0.0 < 3.0.0
83 | assert (2, 0, 0) <= (major, minor, patch) < (3, 0, 0)
84 | else:
85 | raise Exception("You need either charset_normalizer or chardet installed")
86 |
87 | def _check_cryptography(cryptography_version):
88 | # cryptography < 1.3.4
89 | try:
90 | cryptography_version = list(map(int, cryptography_version.split('.')))
91 | except ValueError:
92 | return
93 |
94 | if cryptography_version < [1, 3, 4]:
95 | warning = 'Old version of cryptography ({}) may cause slowdown.'.format(cryptography_version)
96 | warnings.warn(warning, RequestsDependencyWarning)
97 |
98 | # Check imported dependencies for compatibility.
99 | try:
100 | check_compatibility(urllib3.__version__, chardet_version, charset_normalizer_version)
101 | except (AssertionError, ValueError):
102 | warnings.warn("urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported "
103 | "version!".format(urllib3.__version__, chardet_version, charset_normalizer_version),
104 | RequestsDependencyWarning)
105 |
106 | # Attempt to enable urllib3's fallback for SNI support
107 | # if the standard library doesn't support SNI or the
108 | # 'ssl' library isn't available.
109 | try:
110 | try:
111 | import ssl
112 | except ImportError:
113 | ssl = None
114 |
115 | if not getattr(ssl, "HAS_SNI", False):
116 | from urllib3.contrib import pyopenssl
117 | pyopenssl.inject_into_urllib3()
118 |
119 | # Check cryptography version
120 | from cryptography import __version__ as cryptography_version
121 | _check_cryptography(cryptography_version)
122 | except ImportError:
123 | pass
124 |
125 | # urllib3's DependencyWarnings should be silenced.
126 | from urllib3.exceptions import DependencyWarning
127 | warnings.simplefilter('ignore', DependencyWarning)
128 |
129 | from .__version__ import __title__, __description__, __url__, __version__
130 | from .__version__ import __build__, __author__, __author_email__, __license__
131 | from .__version__ import __copyright__, __cake__
132 |
133 | from . import utils
134 | from . import packages
135 | from .models import Request, Response, PreparedRequest
136 | from .api import request, get, head, post, patch, put, delete, options
137 | from .sessions import session, Session
138 | from .status_codes import codes
139 | from .exceptions import (
140 | RequestException, Timeout, URLRequired,
141 | TooManyRedirects, HTTPError, ConnectionError,
142 | FileModeWarning, ConnectTimeout, ReadTimeout, JSONDecodeError
143 | )
144 |
145 | # Set default logging handler to avoid "No handler found" warnings.
146 | import logging
147 | from logging import NullHandler
148 |
149 | logging.getLogger(__name__).addHandler(NullHandler())
150 |
151 | # FileModeWarnings go off per the default.
152 | warnings.simplefilter('default', FileModeWarning, append=True)
153 |
--------------------------------------------------------------------------------
/src/lib/requests/__version__.py:
--------------------------------------------------------------------------------
1 | # .-. .-. .-. . . .-. .-. .-. .-.
2 | # |( |- |.| | | |- `-. | `-.
3 | # ' ' `-' `-`.`-' `-' `-' ' `-'
4 |
5 | __title__ = 'requests'
6 | __description__ = 'Python HTTP for Humans.'
7 | __url__ = 'https://requests.readthedocs.io'
8 | __version__ = '2.27.1'
9 | __build__ = 0x022701
10 | __author__ = 'Kenneth Reitz'
11 | __author_email__ = 'me@kennethreitz.org'
12 | __license__ = 'Apache 2.0'
13 | __copyright__ = 'Copyright 2022 Kenneth Reitz'
14 | __cake__ = u'\u2728 \U0001f370 \u2728'
15 |
--------------------------------------------------------------------------------
/src/lib/requests/_internal_utils.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | """
4 | requests._internal_utils
5 | ~~~~~~~~~~~~~~
6 |
7 | Provides utility functions that are consumed internally by Requests
8 | which depend on extremely few external helpers (such as compat)
9 | """
10 |
11 | from .compat import is_py2, builtin_str, str
12 |
13 |
14 | def to_native_string(string, encoding='ascii'):
15 | """Given a string object, regardless of type, returns a representation of
16 | that string in the native string type, encoding and decoding where
17 | necessary. This assumes ASCII unless told otherwise.
18 | """
19 | if isinstance(string, builtin_str):
20 | out = string
21 | else:
22 | if is_py2:
23 | out = string.encode(encoding)
24 | else:
25 | out = string.decode(encoding)
26 |
27 | return out
28 |
29 |
30 | def unicode_is_ascii(u_string):
31 | """Determine if unicode string only contains ASCII characters.
32 |
33 | :param str u_string: unicode string to check. Must be unicode
34 | and not Python 2 `str`.
35 | :rtype: bool
36 | """
37 | assert isinstance(u_string, str)
38 | try:
39 | u_string.encode('ascii')
40 | return True
41 | except UnicodeEncodeError:
42 | return False
43 |
--------------------------------------------------------------------------------
/src/lib/requests/api.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | """
4 | requests.api
5 | ~~~~~~~~~~~~
6 |
7 | This module implements the Requests API.
8 |
9 | :copyright: (c) 2012 by Kenneth Reitz.
10 | :license: Apache2, see LICENSE for more details.
11 | """
12 |
13 | from . import sessions
14 |
15 |
16 | def request(method, url, **kwargs):
17 | """Constructs and sends a :class:`Request `.
18 |
19 | :param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``.
20 | :param url: URL for the new :class:`Request` object.
21 | :param params: (optional) Dictionary, list of tuples or bytes to send
22 | in the query string for the :class:`Request`.
23 | :param data: (optional) Dictionary, list of tuples, bytes, or file-like
24 | object to send in the body of the :class:`Request`.
25 | :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
26 | :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
27 | :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
28 | :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload.
29 | ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')``
30 | or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string
31 | defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers
32 | to add for the file.
33 | :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
34 | :param timeout: (optional) How many seconds to wait for the server to send data
35 | before giving up, as a float, or a :ref:`(connect timeout, read
36 | timeout) ` tuple.
37 | :type timeout: float or tuple
38 | :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``.
39 | :type allow_redirects: bool
40 | :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
41 | :param verify: (optional) Either a boolean, in which case it controls whether we verify
42 | the server's TLS certificate, or a string, in which case it must be a path
43 | to a CA bundle to use. Defaults to ``True``.
44 | :param stream: (optional) if ``False``, the response content will be immediately downloaded.
45 | :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
46 | :return: :class:`Response ` object
47 | :rtype: requests.Response
48 |
49 | Usage::
50 |
51 | >>> import requests
52 | >>> req = requests.request('GET', 'https://httpbin.org/get')
53 | >>> req
54 |
55 | """
56 |
57 | # By using the 'with' statement we are sure the session is closed, thus we
58 | # avoid leaving sockets open which can trigger a ResourceWarning in some
59 | # cases, and look like a memory leak in others.
60 | with sessions.Session() as session:
61 | return session.request(method=method, url=url, **kwargs)
62 |
63 |
64 | def get(url, params=None, **kwargs):
65 | r"""Sends a GET request.
66 |
67 | :param url: URL for the new :class:`Request` object.
68 | :param params: (optional) Dictionary, list of tuples or bytes to send
69 | in the query string for the :class:`Request`.
70 | :param \*\*kwargs: Optional arguments that ``request`` takes.
71 | :return: :class:`Response ` object
72 | :rtype: requests.Response
73 | """
74 |
75 | return request('get', url, params=params, **kwargs)
76 |
77 |
78 | def options(url, **kwargs):
79 | r"""Sends an OPTIONS request.
80 |
81 | :param url: URL for the new :class:`Request` object.
82 | :param \*\*kwargs: Optional arguments that ``request`` takes.
83 | :return: :class:`Response ` object
84 | :rtype: requests.Response
85 | """
86 |
87 | return request('options', url, **kwargs)
88 |
89 |
90 | def head(url, **kwargs):
91 | r"""Sends a HEAD request.
92 |
93 | :param url: URL for the new :class:`Request` object.
94 | :param \*\*kwargs: Optional arguments that ``request`` takes. If
95 | `allow_redirects` is not provided, it will be set to `False` (as
96 | opposed to the default :meth:`request` behavior).
97 | :return: :class:`Response ` object
98 | :rtype: requests.Response
99 | """
100 |
101 | kwargs.setdefault('allow_redirects', False)
102 | return request('head', url, **kwargs)
103 |
104 |
105 | def post(url, data=None, json=None, **kwargs):
106 | r"""Sends a POST request.
107 |
108 | :param url: URL for the new :class:`Request` object.
109 | :param data: (optional) Dictionary, list of tuples, bytes, or file-like
110 | object to send in the body of the :class:`Request`.
111 | :param json: (optional) json data to send in the body of the :class:`Request`.
112 | :param \*\*kwargs: Optional arguments that ``request`` takes.
113 | :return: :class:`Response ` object
114 | :rtype: requests.Response
115 | """
116 |
117 | return request('post', url, data=data, json=json, **kwargs)
118 |
119 |
120 | def put(url, data=None, **kwargs):
121 | r"""Sends a PUT request.
122 |
123 | :param url: URL for the new :class:`Request` object.
124 | :param data: (optional) Dictionary, list of tuples, bytes, or file-like
125 | object to send in the body of the :class:`Request`.
126 | :param json: (optional) json data to send in the body of the :class:`Request`.
127 | :param \*\*kwargs: Optional arguments that ``request`` takes.
128 | :return: :class:`Response ` object
129 | :rtype: requests.Response
130 | """
131 |
132 | return request('put', url, data=data, **kwargs)
133 |
134 |
135 | def patch(url, data=None, **kwargs):
136 | r"""Sends a PATCH request.
137 |
138 | :param url: URL for the new :class:`Request` object.
139 | :param data: (optional) Dictionary, list of tuples, bytes, or file-like
140 | object to send in the body of the :class:`Request`.
141 | :param json: (optional) json data to send in the body of the :class:`Request`.
142 | :param \*\*kwargs: Optional arguments that ``request`` takes.
143 | :return: :class:`Response ` object
144 | :rtype: requests.Response
145 | """
146 |
147 | return request('patch', url, data=data, **kwargs)
148 |
149 |
150 | def delete(url, **kwargs):
151 | r"""Sends a DELETE request.
152 |
153 | :param url: URL for the new :class:`Request` object.
154 | :param \*\*kwargs: Optional arguments that ``request`` takes.
155 | :return: :class:`Response ` object
156 | :rtype: requests.Response
157 | """
158 |
159 | return request('delete', url, **kwargs)
160 |
--------------------------------------------------------------------------------
/src/lib/requests/certs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | """
5 | requests.certs
6 | ~~~~~~~~~~~~~~
7 |
8 | This module returns the preferred default CA certificate bundle. There is
9 | only one — the one from the certifi package.
10 |
11 | If you are packaging Requests, e.g., for a Linux distribution or a managed
12 | environment, you can change the definition of where() to return a separately
13 | packaged CA bundle.
14 | """
15 | from certifi import where
16 |
17 | if __name__ == '__main__':
18 | print(where())
19 |
--------------------------------------------------------------------------------
/src/lib/requests/compat.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | """
4 | requests.compat
5 | ~~~~~~~~~~~~~~~
6 |
7 | This module handles import compatibility issues between Python 2 and
8 | Python 3.
9 | """
10 |
11 | try:
12 | import chardet
13 | except ImportError:
14 | import charset_normalizer as chardet
15 |
16 | import sys
17 |
18 | # -------
19 | # Pythons
20 | # -------
21 |
22 | # Syntax sugar.
23 | _ver = sys.version_info
24 |
25 | #: Python 2.x?
26 | is_py2 = (_ver[0] == 2)
27 |
28 | #: Python 3.x?
29 | is_py3 = (_ver[0] == 3)
30 |
31 | has_simplejson = False
32 | try:
33 | import simplejson as json
34 | has_simplejson = True
35 | except ImportError:
36 | import json
37 |
38 | # ---------
39 | # Specifics
40 | # ---------
41 |
42 | if is_py2:
43 | from urllib import (
44 | quote, unquote, quote_plus, unquote_plus, urlencode, getproxies,
45 | proxy_bypass, proxy_bypass_environment, getproxies_environment)
46 | from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag
47 | from urllib2 import parse_http_list
48 | import cookielib
49 | from Cookie import Morsel
50 | from StringIO import StringIO
51 | # Keep OrderedDict for backwards compatibility.
52 | from collections import Callable, Mapping, MutableMapping, OrderedDict
53 |
54 | builtin_str = str
55 | bytes = str
56 | str = unicode
57 | basestring = basestring
58 | numeric_types = (int, long, float)
59 | integer_types = (int, long)
60 | JSONDecodeError = ValueError
61 |
62 | elif is_py3:
63 | from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
64 | from urllib.request import parse_http_list, getproxies, proxy_bypass, proxy_bypass_environment, getproxies_environment
65 | from http import cookiejar as cookielib
66 | from http.cookies import Morsel
67 | from io import StringIO
68 | # Keep OrderedDict for backwards compatibility.
69 | from collections import OrderedDict
70 | from collections.abc import Callable, Mapping, MutableMapping
71 | if has_simplejson:
72 | from simplejson import JSONDecodeError
73 | else:
74 | from json import JSONDecodeError
75 |
76 | builtin_str = str
77 | str = str
78 | bytes = bytes
79 | basestring = (str, bytes)
80 | numeric_types = (int, float)
81 | integer_types = (int,)
82 |
--------------------------------------------------------------------------------
/src/lib/requests/exceptions.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | """
4 | requests.exceptions
5 | ~~~~~~~~~~~~~~~~~~~
6 |
7 | This module contains the set of Requests' exceptions.
8 | """
9 | from urllib3.exceptions import HTTPError as BaseHTTPError
10 |
11 | from .compat import JSONDecodeError as CompatJSONDecodeError
12 |
13 |
14 | class RequestException(IOError):
15 | """There was an ambiguous exception that occurred while handling your
16 | request.
17 | """
18 |
19 | def __init__(self, *args, **kwargs):
20 | """Initialize RequestException with `request` and `response` objects."""
21 | response = kwargs.pop('response', None)
22 | self.response = response
23 | self.request = kwargs.pop('request', None)
24 | if (response is not None and not self.request and
25 | hasattr(response, 'request')):
26 | self.request = self.response.request
27 | super(RequestException, self).__init__(*args, **kwargs)
28 |
29 |
30 | class InvalidJSONError(RequestException):
31 | """A JSON error occurred."""
32 |
33 |
34 | class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError):
35 | """Couldn't decode the text into json"""
36 |
37 |
38 | class HTTPError(RequestException):
39 | """An HTTP error occurred."""
40 |
41 |
42 | class ConnectionError(RequestException):
43 | """A Connection error occurred."""
44 |
45 |
46 | class ProxyError(ConnectionError):
47 | """A proxy error occurred."""
48 |
49 |
50 | class SSLError(ConnectionError):
51 | """An SSL error occurred."""
52 |
53 |
54 | class Timeout(RequestException):
55 | """The request timed out.
56 |
57 | Catching this error will catch both
58 | :exc:`~requests.exceptions.ConnectTimeout` and
59 | :exc:`~requests.exceptions.ReadTimeout` errors.
60 | """
61 |
62 |
63 | class ConnectTimeout(ConnectionError, Timeout):
64 | """The request timed out while trying to connect to the remote server.
65 |
66 | Requests that produced this error are safe to retry.
67 | """
68 |
69 |
70 | class ReadTimeout(Timeout):
71 | """The server did not send any data in the allotted amount of time."""
72 |
73 |
74 | class URLRequired(RequestException):
75 | """A valid URL is required to make a request."""
76 |
77 |
78 | class TooManyRedirects(RequestException):
79 | """Too many redirects."""
80 |
81 |
82 | class MissingSchema(RequestException, ValueError):
83 | """The URL scheme (e.g. http or https) is missing."""
84 |
85 |
86 | class InvalidSchema(RequestException, ValueError):
87 | """The URL scheme provided is either invalid or unsupported."""
88 |
89 |
90 | class InvalidURL(RequestException, ValueError):
91 | """The URL provided was somehow invalid."""
92 |
93 |
94 | class InvalidHeader(RequestException, ValueError):
95 | """The header value provided was somehow invalid."""
96 |
97 |
98 | class InvalidProxyURL(InvalidURL):
99 | """The proxy URL provided is invalid."""
100 |
101 |
102 | class ChunkedEncodingError(RequestException):
103 | """The server declared chunked encoding but sent an invalid chunk."""
104 |
105 |
106 | class ContentDecodingError(RequestException, BaseHTTPError):
107 | """Failed to decode response content."""
108 |
109 |
110 | class StreamConsumedError(RequestException, TypeError):
111 | """The content for this response was already consumed."""
112 |
113 |
114 | class RetryError(RequestException):
115 | """Custom retries logic failed"""
116 |
117 |
118 | class UnrewindableBodyError(RequestException):
119 | """Requests encountered an error when trying to rewind a body."""
120 |
121 | # Warnings
122 |
123 |
124 | class RequestsWarning(Warning):
125 | """Base warning for Requests."""
126 |
127 |
128 | class FileModeWarning(RequestsWarning, DeprecationWarning):
129 | """A file was opened in text mode, but Requests determined its binary length."""
130 |
131 |
132 | class RequestsDependencyWarning(RequestsWarning):
133 | """An imported dependency doesn't match the expected version range."""
134 |
--------------------------------------------------------------------------------
/src/lib/requests/help.py:
--------------------------------------------------------------------------------
1 | """Module containing bug report helper(s)."""
2 | from __future__ import print_function
3 |
4 | import json
5 | import platform
6 | import sys
7 | import ssl
8 |
9 | import idna
10 | import urllib3
11 |
12 | from . import __version__ as requests_version
13 |
14 | try:
15 | import charset_normalizer
16 | except ImportError:
17 | charset_normalizer = None
18 |
19 | try:
20 | import chardet
21 | except ImportError:
22 | chardet = None
23 |
24 | try:
25 | from urllib3.contrib import pyopenssl
26 | except ImportError:
27 | pyopenssl = None
28 | OpenSSL = None
29 | cryptography = None
30 | else:
31 | import OpenSSL
32 | import cryptography
33 |
34 |
35 | def _implementation():
36 | """Return a dict with the Python implementation and version.
37 |
38 | Provide both the name and the version of the Python implementation
39 | currently running. For example, on CPython 2.7.5 it will return
40 | {'name': 'CPython', 'version': '2.7.5'}.
41 |
42 | This function works best on CPython and PyPy: in particular, it probably
43 | doesn't work for Jython or IronPython. Future investigation should be done
44 | to work out the correct shape of the code for those platforms.
45 | """
46 | implementation = platform.python_implementation()
47 |
48 | if implementation == 'CPython':
49 | implementation_version = platform.python_version()
50 | elif implementation == 'PyPy':
51 | implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major,
52 | sys.pypy_version_info.minor,
53 | sys.pypy_version_info.micro)
54 | if sys.pypy_version_info.releaselevel != 'final':
55 | implementation_version = ''.join([
56 | implementation_version, sys.pypy_version_info.releaselevel
57 | ])
58 | elif implementation == 'Jython':
59 | implementation_version = platform.python_version() # Complete Guess
60 | elif implementation == 'IronPython':
61 | implementation_version = platform.python_version() # Complete Guess
62 | else:
63 | implementation_version = 'Unknown'
64 |
65 | return {'name': implementation, 'version': implementation_version}
66 |
67 |
68 | def info():
69 | """Generate information for a bug report."""
70 | try:
71 | platform_info = {
72 | 'system': platform.system(),
73 | 'release': platform.release(),
74 | }
75 | except IOError:
76 | platform_info = {
77 | 'system': 'Unknown',
78 | 'release': 'Unknown',
79 | }
80 |
81 | implementation_info = _implementation()
82 | urllib3_info = {'version': urllib3.__version__}
83 | charset_normalizer_info = {'version': None}
84 | chardet_info = {'version': None}
85 | if charset_normalizer:
86 | charset_normalizer_info = {'version': charset_normalizer.__version__}
87 | if chardet:
88 | chardet_info = {'version': chardet.__version__}
89 |
90 | pyopenssl_info = {
91 | 'version': None,
92 | 'openssl_version': '',
93 | }
94 | if OpenSSL:
95 | pyopenssl_info = {
96 | 'version': OpenSSL.__version__,
97 | 'openssl_version': '%x' % OpenSSL.SSL.OPENSSL_VERSION_NUMBER,
98 | }
99 | cryptography_info = {
100 | 'version': getattr(cryptography, '__version__', ''),
101 | }
102 | idna_info = {
103 | 'version': getattr(idna, '__version__', ''),
104 | }
105 |
106 | system_ssl = ssl.OPENSSL_VERSION_NUMBER
107 | system_ssl_info = {
108 | 'version': '%x' % system_ssl if system_ssl is not None else ''
109 | }
110 |
111 | return {
112 | 'platform': platform_info,
113 | 'implementation': implementation_info,
114 | 'system_ssl': system_ssl_info,
115 | 'using_pyopenssl': pyopenssl is not None,
116 | 'using_charset_normalizer': chardet is None,
117 | 'pyOpenSSL': pyopenssl_info,
118 | 'urllib3': urllib3_info,
119 | 'chardet': chardet_info,
120 | 'charset_normalizer': charset_normalizer_info,
121 | 'cryptography': cryptography_info,
122 | 'idna': idna_info,
123 | 'requests': {
124 | 'version': requests_version,
125 | },
126 | }
127 |
128 |
129 | def main():
130 | """Pretty-print the bug information as JSON."""
131 | print(json.dumps(info(), sort_keys=True, indent=2))
132 |
133 |
134 | if __name__ == '__main__':
135 | main()
136 |
--------------------------------------------------------------------------------
/src/lib/requests/hooks.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | """
4 | requests.hooks
5 | ~~~~~~~~~~~~~~
6 |
7 | This module provides the capabilities for the Requests hooks system.
8 |
9 | Available hooks:
10 |
11 | ``response``:
12 | The response generated from a Request.
13 | """
14 | HOOKS = ['response']
15 |
16 |
17 | def default_hooks():
18 | return {event: [] for event in HOOKS}
19 |
20 | # TODO: response is the only one
21 |
22 |
23 | def dispatch_hook(key, hooks, hook_data, **kwargs):
24 | """Dispatches a hook dictionary on a given piece of data."""
25 | hooks = hooks or {}
26 | hooks = hooks.get(key)
27 | if hooks:
28 | if hasattr(hooks, '__call__'):
29 | hooks = [hooks]
30 | for hook in hooks:
31 | _hook_data = hook(hook_data, **kwargs)
32 | if _hook_data is not None:
33 | hook_data = _hook_data
34 | return hook_data
35 |
--------------------------------------------------------------------------------
/src/lib/requests/packages.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | try:
4 | import chardet
5 | except ImportError:
6 | import charset_normalizer as chardet
7 | import warnings
8 |
9 | warnings.filterwarnings('ignore', 'Trying to detect', module='charset_normalizer')
10 |
11 | # This code exists for backwards compatibility reasons.
12 | # I don't like it either. Just look the other way. :)
13 |
14 | for package in ('urllib3', 'idna'):
15 | locals()[package] = __import__(package)
16 | # This traversal is apparently necessary such that the identities are
17 | # preserved (requests.packages.urllib3.* is urllib3.*)
18 | for mod in list(sys.modules):
19 | if mod == package or mod.startswith(package + '.'):
20 | sys.modules['requests.packages.' + mod] = sys.modules[mod]
21 |
22 | target = chardet.__name__
23 | for mod in list(sys.modules):
24 | if mod == target or mod.startswith(target + '.'):
25 | sys.modules['requests.packages.' + target.replace(target, 'chardet')] = sys.modules[mod]
26 | # Kinda cool, though, right?
27 |
--------------------------------------------------------------------------------
/src/lib/requests/status_codes.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | r"""
4 | The ``codes`` object defines a mapping from common names for HTTP statuses
5 | to their numerical codes, accessible either as attributes or as dictionary
6 | items.
7 |
8 | Example::
9 |
10 | >>> import requests
11 | >>> requests.codes['temporary_redirect']
12 | 307
13 | >>> requests.codes.teapot
14 | 418
15 | >>> requests.codes['\o/']
16 | 200
17 |
18 | Some codes have multiple names, and both upper- and lower-case versions of
19 | the names are allowed. For example, ``codes.ok``, ``codes.OK``, and
20 | ``codes.okay`` all correspond to the HTTP status code 200.
21 | """
22 |
23 | from .structures import LookupDict
24 |
25 | _codes = {
26 |
27 | # Informational.
28 | 100: ('continue',),
29 | 101: ('switching_protocols',),
30 | 102: ('processing',),
31 | 103: ('checkpoint',),
32 | 122: ('uri_too_long', 'request_uri_too_long'),
33 | 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'),
34 | 201: ('created',),
35 | 202: ('accepted',),
36 | 203: ('non_authoritative_info', 'non_authoritative_information'),
37 | 204: ('no_content',),
38 | 205: ('reset_content', 'reset'),
39 | 206: ('partial_content', 'partial'),
40 | 207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'),
41 | 208: ('already_reported',),
42 | 226: ('im_used',),
43 |
44 | # Redirection.
45 | 300: ('multiple_choices',),
46 | 301: ('moved_permanently', 'moved', '\\o-'),
47 | 302: ('found',),
48 | 303: ('see_other', 'other'),
49 | 304: ('not_modified',),
50 | 305: ('use_proxy',),
51 | 306: ('switch_proxy',),
52 | 307: ('temporary_redirect', 'temporary_moved', 'temporary'),
53 | 308: ('permanent_redirect',
54 | 'resume_incomplete', 'resume',), # These 2 to be removed in 3.0
55 |
56 | # Client Error.
57 | 400: ('bad_request', 'bad'),
58 | 401: ('unauthorized',),
59 | 402: ('payment_required', 'payment'),
60 | 403: ('forbidden',),
61 | 404: ('not_found', '-o-'),
62 | 405: ('method_not_allowed', 'not_allowed'),
63 | 406: ('not_acceptable',),
64 | 407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'),
65 | 408: ('request_timeout', 'timeout'),
66 | 409: ('conflict',),
67 | 410: ('gone',),
68 | 411: ('length_required',),
69 | 412: ('precondition_failed', 'precondition'),
70 | 413: ('request_entity_too_large',),
71 | 414: ('request_uri_too_large',),
72 | 415: ('unsupported_media_type', 'unsupported_media', 'media_type'),
73 | 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'),
74 | 417: ('expectation_failed',),
75 | 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'),
76 | 421: ('misdirected_request',),
77 | 422: ('unprocessable_entity', 'unprocessable'),
78 | 423: ('locked',),
79 | 424: ('failed_dependency', 'dependency'),
80 | 425: ('unordered_collection', 'unordered'),
81 | 426: ('upgrade_required', 'upgrade'),
82 | 428: ('precondition_required', 'precondition'),
83 | 429: ('too_many_requests', 'too_many'),
84 | 431: ('header_fields_too_large', 'fields_too_large'),
85 | 444: ('no_response', 'none'),
86 | 449: ('retry_with', 'retry'),
87 | 450: ('blocked_by_windows_parental_controls', 'parental_controls'),
88 | 451: ('unavailable_for_legal_reasons', 'legal_reasons'),
89 | 499: ('client_closed_request',),
90 |
91 | # Server Error.
92 | 500: ('internal_server_error', 'server_error', '/o\\', '✗'),
93 | 501: ('not_implemented',),
94 | 502: ('bad_gateway',),
95 | 503: ('service_unavailable', 'unavailable'),
96 | 504: ('gateway_timeout',),
97 | 505: ('http_version_not_supported', 'http_version'),
98 | 506: ('variant_also_negotiates',),
99 | 507: ('insufficient_storage',),
100 | 509: ('bandwidth_limit_exceeded', 'bandwidth'),
101 | 510: ('not_extended',),
102 | 511: ('network_authentication_required', 'network_auth', 'network_authentication'),
103 | }
104 |
105 | codes = LookupDict(name='status_codes')
106 |
107 | def _init():
108 | for code, titles in _codes.items():
109 | for title in titles:
110 | setattr(codes, title, code)
111 | if not title.startswith(('\\', '/')):
112 | setattr(codes, title.upper(), code)
113 |
114 | def doc(code):
115 | names = ', '.join('``%s``' % n for n in _codes[code])
116 | return '* %d: %s' % (code, names)
117 |
118 | global __doc__
119 | __doc__ = (__doc__ + '\n' +
120 | '\n'.join(doc(code) for code in sorted(_codes))
121 | if __doc__ is not None else None)
122 |
123 | _init()
124 |
--------------------------------------------------------------------------------
/src/lib/requests/structures.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | """
4 | requests.structures
5 | ~~~~~~~~~~~~~~~~~~~
6 |
7 | Data structures that power Requests.
8 | """
9 |
10 | from collections import OrderedDict
11 |
12 | from .compat import Mapping, MutableMapping
13 |
14 |
15 | class CaseInsensitiveDict(MutableMapping):
16 | """A case-insensitive ``dict``-like object.
17 |
18 | Implements all methods and operations of
19 | ``MutableMapping`` as well as dict's ``copy``. Also
20 | provides ``lower_items``.
21 |
22 | All keys are expected to be strings. The structure remembers the
23 | case of the last key to be set, and ``iter(instance)``,
24 | ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
25 | will contain case-sensitive keys. However, querying and contains
26 | testing is case insensitive::
27 |
28 | cid = CaseInsensitiveDict()
29 | cid['Accept'] = 'application/json'
30 | cid['aCCEPT'] == 'application/json' # True
31 | list(cid) == ['Accept'] # True
32 |
33 | For example, ``headers['content-encoding']`` will return the
34 | value of a ``'Content-Encoding'`` response header, regardless
35 | of how the header name was originally stored.
36 |
37 | If the constructor, ``.update``, or equality comparison
38 | operations are given keys that have equal ``.lower()``s, the
39 | behavior is undefined.
40 | """
41 |
42 | def __init__(self, data=None, **kwargs):
43 | self._store = OrderedDict()
44 | if data is None:
45 | data = {}
46 | self.update(data, **kwargs)
47 |
48 | def __setitem__(self, key, value):
49 | # Use the lowercased key for lookups, but store the actual
50 | # key alongside the value.
51 | self._store[key.lower()] = (key, value)
52 |
53 | def __getitem__(self, key):
54 | return self._store[key.lower()][1]
55 |
56 | def __delitem__(self, key):
57 | del self._store[key.lower()]
58 |
59 | def __iter__(self):
60 | return (casedkey for casedkey, mappedvalue in self._store.values())
61 |
62 | def __len__(self):
63 | return len(self._store)
64 |
65 | def lower_items(self):
66 | """Like iteritems(), but with all lowercase keys."""
67 | return (
68 | (lowerkey, keyval[1])
69 | for (lowerkey, keyval)
70 | in self._store.items()
71 | )
72 |
73 | def __eq__(self, other):
74 | if isinstance(other, Mapping):
75 | other = CaseInsensitiveDict(other)
76 | else:
77 | return NotImplemented
78 | # Compare insensitively
79 | return dict(self.lower_items()) == dict(other.lower_items())
80 |
81 | # Copy is required
82 | def copy(self):
83 | return CaseInsensitiveDict(self._store.values())
84 |
85 | def __repr__(self):
86 | return str(dict(self.items()))
87 |
88 |
89 | class LookupDict(dict):
90 | """Dictionary lookup object."""
91 |
92 | def __init__(self, name=None):
93 | self.name = name
94 | super(LookupDict, self).__init__()
95 |
96 | def __repr__(self):
97 | return '' % (self.name)
98 |
99 | def __getitem__(self, key):
100 | # We allow fall-through here, so values default to None
101 |
102 | return self.__dict__.get(key, None)
103 |
104 | def get(self, key, default=None):
105 | return self.__dict__.get(key, default)
106 |
--------------------------------------------------------------------------------
/src/lib/urllib3-1.26.9.dist-info/INSTALLER:
--------------------------------------------------------------------------------
1 | pip
2 |
--------------------------------------------------------------------------------
/src/lib/urllib3-1.26.9.dist-info/LICENSE.txt:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2008-2020 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/src/lib/urllib3-1.26.9.dist-info/RECORD:
--------------------------------------------------------------------------------
1 | urllib3-1.26.9.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
2 | urllib3-1.26.9.dist-info/LICENSE.txt,sha256=w3vxhuJ8-dvpYZ5V7f486nswCRzrPaY8fay-Dm13kHs,1115
3 | urllib3-1.26.9.dist-info/METADATA,sha256=UicmDwTLIrYL2O4fOBvq8wxMMAwM2L8JYP75jEXt8DQ,46325
4 | urllib3-1.26.9.dist-info/RECORD,,
5 | urllib3-1.26.9.dist-info/WHEEL,sha256=z9j0xAa_JmUKMpmz72K0ZGALSM_n-wQVmGbleXx2VHg,110
6 | urllib3-1.26.9.dist-info/top_level.txt,sha256=EMiXL2sKrTcmrMxIHTqdc3ET54pQI2Y072LexFEemvo,8
7 | urllib3/__init__.py,sha256=j3yzHIbmW7CS-IKQJ9-PPQf_YKO8EOAey_rMW0UR7us,2763
8 | urllib3/__pycache__/__init__.cpython-39.pyc,,
9 | urllib3/__pycache__/_collections.cpython-39.pyc,,
10 | urllib3/__pycache__/_version.cpython-39.pyc,,
11 | urllib3/__pycache__/connection.cpython-39.pyc,,
12 | urllib3/__pycache__/connectionpool.cpython-39.pyc,,
13 | urllib3/__pycache__/exceptions.cpython-39.pyc,,
14 | urllib3/__pycache__/fields.cpython-39.pyc,,
15 | urllib3/__pycache__/filepost.cpython-39.pyc,,
16 | urllib3/__pycache__/poolmanager.cpython-39.pyc,,
17 | urllib3/__pycache__/request.cpython-39.pyc,,
18 | urllib3/__pycache__/response.cpython-39.pyc,,
19 | urllib3/_collections.py,sha256=Rp1mVyBgc_UlAcp6M3at1skJBXR5J43NawRTvW2g_XY,10811
20 | urllib3/_version.py,sha256=WE7GLYd0IVwgk-1gQZ-7jw00bCUYjYTIlcWIk7NOhEM,63
21 | urllib3/connection.py,sha256=mMuCIjdG01kRpFUENwJRoDKmYer7CZO56pfTbBCS7cw,20070
22 | urllib3/connectionpool.py,sha256=qz-ICrW6g4TZVCbDQ8fRe68BMpXkskkR9vAVY9zUWtA,39013
23 | urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24 | urllib3/contrib/__pycache__/__init__.cpython-39.pyc,,
25 | urllib3/contrib/__pycache__/_appengine_environ.cpython-39.pyc,,
26 | urllib3/contrib/__pycache__/appengine.cpython-39.pyc,,
27 | urllib3/contrib/__pycache__/ntlmpool.cpython-39.pyc,,
28 | urllib3/contrib/__pycache__/pyopenssl.cpython-39.pyc,,
29 | urllib3/contrib/__pycache__/securetransport.cpython-39.pyc,,
30 | urllib3/contrib/__pycache__/socks.cpython-39.pyc,,
31 | urllib3/contrib/_appengine_environ.py,sha256=bDbyOEhW2CKLJcQqAKAyrEHN-aklsyHFKq6vF8ZFsmk,957
32 | urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
33 | urllib3/contrib/_securetransport/__pycache__/__init__.cpython-39.pyc,,
34 | urllib3/contrib/_securetransport/__pycache__/bindings.cpython-39.pyc,,
35 | urllib3/contrib/_securetransport/__pycache__/low_level.cpython-39.pyc,,
36 | urllib3/contrib/_securetransport/bindings.py,sha256=4Xk64qIkPBt09A5q-RIFUuDhNc9mXilVapm7WnYnzRw,17632
37 | urllib3/contrib/_securetransport/low_level.py,sha256=B2JBB2_NRP02xK6DCa1Pa9IuxrPwxzDzZbixQkb7U9M,13922
38 | urllib3/contrib/appengine.py,sha256=jz515jZYBDFTnhR4zqfeaCo6JdDgAQqYbqzHK9sDkfw,11010
39 | urllib3/contrib/ntlmpool.py,sha256=ej9gGvfAb2Gt00lafFp45SIoRz-QwrQ4WChm6gQmAlM,4538
40 | urllib3/contrib/pyopenssl.py,sha256=YIMyTiXiLPV_QfFw3PjZ31mGqJmM5EzxIjhSLxZ7VUM,16874
41 | urllib3/contrib/securetransport.py,sha256=izdx43gFoUGFSgxasZlOCL42FaM4vSsAVTmhO0EH1vM,34417
42 | urllib3/contrib/socks.py,sha256=aRi9eWXo9ZEb95XUxef4Z21CFlnnjbEiAo9HOseoMt4,7097
43 | urllib3/exceptions.py,sha256=0Mnno3KHTNfXRfY7638NufOPkUb6mXOm-Lqj-4x2w8A,8217
44 | urllib3/fields.py,sha256=kvLDCg_JmH1lLjUUEY_FLS8UhY7hBvDPuVETbY8mdrM,8579
45 | urllib3/filepost.py,sha256=5b_qqgRHVlL7uLtdAYBzBh-GHmU5AfJVt_2N0XS3PeY,2440
46 | urllib3/packages/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
47 | urllib3/packages/__pycache__/__init__.cpython-39.pyc,,
48 | urllib3/packages/__pycache__/six.cpython-39.pyc,,
49 | urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
50 | urllib3/packages/backports/__pycache__/__init__.cpython-39.pyc,,
51 | urllib3/packages/backports/__pycache__/makefile.cpython-39.pyc,,
52 | urllib3/packages/backports/makefile.py,sha256=nbzt3i0agPVP07jqqgjhaYjMmuAi_W5E0EywZivVO8E,1417
53 | urllib3/packages/six.py,sha256=1LVW7ljqRirFlfExjwl-v1B7vSAUNTmzGMs-qays2zg,34666
54 | urllib3/poolmanager.py,sha256=0KOOJECoeLYVjUHvv-0h4Oq3FFQQ2yb-Fnjkbj8gJO0,19786
55 | urllib3/request.py,sha256=ZFSIqX0C6WizixecChZ3_okyu7BEv0lZu1VT0s6h4SM,5985
56 | urllib3/response.py,sha256=9b5NrbzHDnC2l_QC9uuNQPv75is1qgLa7M3Ax4Zr9z8,28276
57 | urllib3/util/__init__.py,sha256=JEmSmmqqLyaw8P51gUImZh8Gwg9i1zSe-DoqAitn2nc,1155
58 | urllib3/util/__pycache__/__init__.cpython-39.pyc,,
59 | urllib3/util/__pycache__/connection.cpython-39.pyc,,
60 | urllib3/util/__pycache__/proxy.cpython-39.pyc,,
61 | urllib3/util/__pycache__/queue.cpython-39.pyc,,
62 | urllib3/util/__pycache__/request.cpython-39.pyc,,
63 | urllib3/util/__pycache__/response.cpython-39.pyc,,
64 | urllib3/util/__pycache__/retry.cpython-39.pyc,,
65 | urllib3/util/__pycache__/ssl_.cpython-39.pyc,,
66 | urllib3/util/__pycache__/ssl_match_hostname.cpython-39.pyc,,
67 | urllib3/util/__pycache__/ssltransport.cpython-39.pyc,,
68 | urllib3/util/__pycache__/timeout.cpython-39.pyc,,
69 | urllib3/util/__pycache__/url.cpython-39.pyc,,
70 | urllib3/util/__pycache__/wait.cpython-39.pyc,,
71 | urllib3/util/connection.py,sha256=5Lx2B1PW29KxBn2T0xkN1CBgRBa3gGVJBKoQoRogEVk,4901
72 | urllib3/util/proxy.py,sha256=zUvPPCJrp6dOF0N4GAVbOcl6o-4uXKSrGiTkkr5vUS4,1605
73 | urllib3/util/queue.py,sha256=nRgX8_eX-_VkvxoX096QWoz8Ps0QHUAExILCY_7PncM,498
74 | urllib3/util/request.py,sha256=fWiAaa8pwdLLIqoTLBxCC2e4ed80muzKU3e3HWWTzFQ,4225
75 | urllib3/util/response.py,sha256=GJpg3Egi9qaJXRwBh5wv-MNuRWan5BIu40oReoxWP28,3510
76 | urllib3/util/retry.py,sha256=iESg2PvViNdXBRY4MpL4h0kqwOOkHkxmLn1kkhFHPU8,22001
77 | urllib3/util/ssl_.py,sha256=c0sYiSC6272r6uPkxQpo5rYPP9QC1eR6oI7004gYqZo,17165
78 | urllib3/util/ssl_match_hostname.py,sha256=Ir4cZVEjmAk8gUAIHWSi7wtOO83UCYABY2xFD1Ql_WA,5758
79 | urllib3/util/ssltransport.py,sha256=NA-u5rMTrDFDFC8QzRKUEKMG0561hOD4qBTr3Z4pv6E,6895
80 | urllib3/util/timeout.py,sha256=QSbBUNOB9yh6AnDn61SrLQ0hg5oz0I9-uXEG91AJuIg,10003
81 | urllib3/util/url.py,sha256=au9jkUMnVr9Qp_9kg4HfZx9q9ur6yXQ4u5M17In-UKY,14030
82 | urllib3/util/wait.py,sha256=3MUKRSAUJDB2tgco7qRUskW0zXGAWYvRRE4Q1_6xlLs,5404
83 |
--------------------------------------------------------------------------------
/src/lib/urllib3-1.26.9.dist-info/WHEEL:
--------------------------------------------------------------------------------
1 | Wheel-Version: 1.0
2 | Generator: bdist_wheel (0.37.1)
3 | Root-Is-Purelib: true
4 | Tag: py2-none-any
5 | Tag: py3-none-any
6 |
7 |
--------------------------------------------------------------------------------
/src/lib/urllib3-1.26.9.dist-info/top_level.txt:
--------------------------------------------------------------------------------
1 | urllib3
2 |
--------------------------------------------------------------------------------
/src/lib/urllib3/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more
3 | """
4 | from __future__ import absolute_import
5 |
6 | # Set default logging handler to avoid "No handler found" warnings.
7 | import logging
8 | import warnings
9 | from logging import NullHandler
10 |
11 | from . import exceptions
12 | from ._version import __version__
13 | from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url
14 | from .filepost import encode_multipart_formdata
15 | from .poolmanager import PoolManager, ProxyManager, proxy_from_url
16 | from .response import HTTPResponse
17 | from .util.request import make_headers
18 | from .util.retry import Retry
19 | from .util.timeout import Timeout
20 | from .util.url import get_host
21 |
22 | __author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
23 | __license__ = "MIT"
24 | __version__ = __version__
25 |
26 | __all__ = (
27 | "HTTPConnectionPool",
28 | "HTTPSConnectionPool",
29 | "PoolManager",
30 | "ProxyManager",
31 | "HTTPResponse",
32 | "Retry",
33 | "Timeout",
34 | "add_stderr_logger",
35 | "connection_from_url",
36 | "disable_warnings",
37 | "encode_multipart_formdata",
38 | "get_host",
39 | "make_headers",
40 | "proxy_from_url",
41 | )
42 |
43 | logging.getLogger(__name__).addHandler(NullHandler())
44 |
45 |
46 | def add_stderr_logger(level=logging.DEBUG):
47 | """
48 | Helper for quickly adding a StreamHandler to the logger. Useful for
49 | debugging.
50 |
51 | Returns the handler after adding it.
52 | """
53 | # This method needs to be in this __init__.py to get the __name__ correct
54 | # even if urllib3 is vendored within another package.
55 | logger = logging.getLogger(__name__)
56 | handler = logging.StreamHandler()
57 | handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s"))
58 | logger.addHandler(handler)
59 | logger.setLevel(level)
60 | logger.debug("Added a stderr logging handler to logger: %s", __name__)
61 | return handler
62 |
63 |
64 | # ... Clean up.
65 | del NullHandler
66 |
67 |
68 | # All warning filters *must* be appended unless you're really certain that they
69 | # shouldn't be: otherwise, it's very hard for users to use most Python
70 | # mechanisms to silence them.
71 | # SecurityWarning's always go off by default.
72 | warnings.simplefilter("always", exceptions.SecurityWarning, append=True)
73 | # SubjectAltNameWarning's should go off once per host
74 | warnings.simplefilter("default", exceptions.SubjectAltNameWarning, append=True)
75 | # InsecurePlatformWarning's don't vary between requests, so we keep it default.
76 | warnings.simplefilter("default", exceptions.InsecurePlatformWarning, append=True)
77 | # SNIMissingWarnings should go off only once.
78 | warnings.simplefilter("default", exceptions.SNIMissingWarning, append=True)
79 |
80 |
81 | def disable_warnings(category=exceptions.HTTPWarning):
82 | """
83 | Helper for quickly disabling all urllib3 warnings.
84 | """
85 | warnings.simplefilter("ignore", category)
86 |
--------------------------------------------------------------------------------
/src/lib/urllib3/_version.py:
--------------------------------------------------------------------------------
1 | # This file is protected via CODEOWNERS
2 | __version__ = "1.26.9"
3 |
--------------------------------------------------------------------------------
/src/lib/urllib3/contrib/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhlsthrm/alfred-crypto-tracker/c73071bc70c9c206bcb532c873b0e3a395e0412b/src/lib/urllib3/contrib/__init__.py
--------------------------------------------------------------------------------
/src/lib/urllib3/contrib/_appengine_environ.py:
--------------------------------------------------------------------------------
1 | """
2 | This module provides means to detect the App Engine environment.
3 | """
4 |
5 | import os
6 |
7 |
8 | def is_appengine():
9 | return is_local_appengine() or is_prod_appengine()
10 |
11 |
12 | def is_appengine_sandbox():
13 | """Reports if the app is running in the first generation sandbox.
14 |
15 | The second generation runtimes are technically still in a sandbox, but it
16 | is much less restrictive, so generally you shouldn't need to check for it.
17 | see https://cloud.google.com/appengine/docs/standard/runtimes
18 | """
19 | return is_appengine() and os.environ["APPENGINE_RUNTIME"] == "python27"
20 |
21 |
22 | def is_local_appengine():
23 | return "APPENGINE_RUNTIME" in os.environ and os.environ.get(
24 | "SERVER_SOFTWARE", ""
25 | ).startswith("Development/")
26 |
27 |
28 | def is_prod_appengine():
29 | return "APPENGINE_RUNTIME" in os.environ and os.environ.get(
30 | "SERVER_SOFTWARE", ""
31 | ).startswith("Google App Engine/")
32 |
33 |
34 | def is_prod_appengine_mvms():
35 | """Deprecated."""
36 | return False
37 |
--------------------------------------------------------------------------------
/src/lib/urllib3/contrib/_securetransport/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhlsthrm/alfred-crypto-tracker/c73071bc70c9c206bcb532c873b0e3a395e0412b/src/lib/urllib3/contrib/_securetransport/__init__.py
--------------------------------------------------------------------------------
/src/lib/urllib3/contrib/ntlmpool.py:
--------------------------------------------------------------------------------
1 | """
2 | NTLM authenticating pool, contributed by erikcederstran
3 |
4 | Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
5 | """
6 | from __future__ import absolute_import
7 |
8 | import warnings
9 | from logging import getLogger
10 |
11 | from ntlm import ntlm
12 |
13 | from .. import HTTPSConnectionPool
14 | from ..packages.six.moves.http_client import HTTPSConnection
15 |
16 | warnings.warn(
17 | "The 'urllib3.contrib.ntlmpool' module is deprecated and will be removed "
18 | "in urllib3 v2.0 release, urllib3 is not able to support it properly due "
19 | "to reasons listed in issue: https://github.com/urllib3/urllib3/issues/2282. "
20 | "If you are a user of this module please comment in the mentioned issue.",
21 | DeprecationWarning,
22 | )
23 |
24 | log = getLogger(__name__)
25 |
26 |
27 | class NTLMConnectionPool(HTTPSConnectionPool):
28 | """
29 | Implements an NTLM authentication version of an urllib3 connection pool
30 | """
31 |
32 | scheme = "https"
33 |
34 | def __init__(self, user, pw, authurl, *args, **kwargs):
35 | """
36 | authurl is a random URL on the server that is protected by NTLM.
37 | user is the Windows user, probably in the DOMAIN\\username format.
38 | pw is the password for the user.
39 | """
40 | super(NTLMConnectionPool, self).__init__(*args, **kwargs)
41 | self.authurl = authurl
42 | self.rawuser = user
43 | user_parts = user.split("\\", 1)
44 | self.domain = user_parts[0].upper()
45 | self.user = user_parts[1]
46 | self.pw = pw
47 |
48 | def _new_conn(self):
49 | # Performs the NTLM handshake that secures the connection. The socket
50 | # must be kept open while requests are performed.
51 | self.num_connections += 1
52 | log.debug(
53 | "Starting NTLM HTTPS connection no. %d: https://%s%s",
54 | self.num_connections,
55 | self.host,
56 | self.authurl,
57 | )
58 |
59 | headers = {"Connection": "Keep-Alive"}
60 | req_header = "Authorization"
61 | resp_header = "www-authenticate"
62 |
63 | conn = HTTPSConnection(host=self.host, port=self.port)
64 |
65 | # Send negotiation message
66 | headers[req_header] = "NTLM %s" % ntlm.create_NTLM_NEGOTIATE_MESSAGE(
67 | self.rawuser
68 | )
69 | log.debug("Request headers: %s", headers)
70 | conn.request("GET", self.authurl, None, headers)
71 | res = conn.getresponse()
72 | reshdr = dict(res.getheaders())
73 | log.debug("Response status: %s %s", res.status, res.reason)
74 | log.debug("Response headers: %s", reshdr)
75 | log.debug("Response data: %s [...]", res.read(100))
76 |
77 | # Remove the reference to the socket, so that it can not be closed by
78 | # the response object (we want to keep the socket open)
79 | res.fp = None
80 |
81 | # Server should respond with a challenge message
82 | auth_header_values = reshdr[resp_header].split(", ")
83 | auth_header_value = None
84 | for s in auth_header_values:
85 | if s[:5] == "NTLM ":
86 | auth_header_value = s[5:]
87 | if auth_header_value is None:
88 | raise Exception(
89 | "Unexpected %s response header: %s" % (resp_header, reshdr[resp_header])
90 | )
91 |
92 | # Send authentication message
93 | ServerChallenge, NegotiateFlags = ntlm.parse_NTLM_CHALLENGE_MESSAGE(
94 | auth_header_value
95 | )
96 | auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(
97 | ServerChallenge, self.user, self.domain, self.pw, NegotiateFlags
98 | )
99 | headers[req_header] = "NTLM %s" % auth_msg
100 | log.debug("Request headers: %s", headers)
101 | conn.request("GET", self.authurl, None, headers)
102 | res = conn.getresponse()
103 | log.debug("Response status: %s %s", res.status, res.reason)
104 | log.debug("Response headers: %s", dict(res.getheaders()))
105 | log.debug("Response data: %s [...]", res.read()[:100])
106 | if res.status != 200:
107 | if res.status == 401:
108 | raise Exception("Server rejected request: wrong username or password")
109 | raise Exception("Wrong server response: %s %s" % (res.status, res.reason))
110 |
111 | res.fp = None
112 | log.debug("Connection established")
113 | return conn
114 |
115 | def urlopen(
116 | self,
117 | method,
118 | url,
119 | body=None,
120 | headers=None,
121 | retries=3,
122 | redirect=True,
123 | assert_same_host=True,
124 | ):
125 | if headers is None:
126 | headers = {}
127 | headers["Connection"] = "Keep-Alive"
128 | return super(NTLMConnectionPool, self).urlopen(
129 | method, url, body, headers, retries, redirect, assert_same_host
130 | )
131 |
--------------------------------------------------------------------------------
/src/lib/urllib3/contrib/socks.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | This module contains provisional support for SOCKS proxies from within
4 | urllib3. This module supports SOCKS4, SOCKS4A (an extension of SOCKS4), and
5 | SOCKS5. To enable its functionality, either install PySocks or install this
6 | module with the ``socks`` extra.
7 |
8 | The SOCKS implementation supports the full range of urllib3 features. It also
9 | supports the following SOCKS features:
10 |
11 | - SOCKS4A (``proxy_url='socks4a://...``)
12 | - SOCKS4 (``proxy_url='socks4://...``)
13 | - SOCKS5 with remote DNS (``proxy_url='socks5h://...``)
14 | - SOCKS5 with local DNS (``proxy_url='socks5://...``)
15 | - Usernames and passwords for the SOCKS proxy
16 |
17 | .. note::
18 | It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in
19 | your ``proxy_url`` to ensure that DNS resolution is done from the remote
20 | server instead of client-side when connecting to a domain name.
21 |
22 | SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5
23 | supports IPv4, IPv6, and domain names.
24 |
25 | When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url``
26 | will be sent as the ``userid`` section of the SOCKS request:
27 |
28 | .. code-block:: python
29 |
30 | proxy_url="socks4a://@proxy-host"
31 |
32 | When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion
33 | of the ``proxy_url`` will be sent as the username/password to authenticate
34 | with the proxy:
35 |
36 | .. code-block:: python
37 |
38 | proxy_url="socks5h://:@proxy-host"
39 |
40 | """
41 | from __future__ import absolute_import
42 |
43 | try:
44 | import socks
45 | except ImportError:
46 | import warnings
47 |
48 | from ..exceptions import DependencyWarning
49 |
50 | warnings.warn(
51 | (
52 | "SOCKS support in urllib3 requires the installation of optional "
53 | "dependencies: specifically, PySocks. For more information, see "
54 | "https://urllib3.readthedocs.io/en/1.26.x/contrib.html#socks-proxies"
55 | ),
56 | DependencyWarning,
57 | )
58 | raise
59 |
60 | from socket import error as SocketError
61 | from socket import timeout as SocketTimeout
62 |
63 | from ..connection import HTTPConnection, HTTPSConnection
64 | from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool
65 | from ..exceptions import ConnectTimeoutError, NewConnectionError
66 | from ..poolmanager import PoolManager
67 | from ..util.url import parse_url
68 |
69 | try:
70 | import ssl
71 | except ImportError:
72 | ssl = None
73 |
74 |
75 | class SOCKSConnection(HTTPConnection):
76 | """
77 | A plain-text HTTP connection that connects via a SOCKS proxy.
78 | """
79 |
80 | def __init__(self, *args, **kwargs):
81 | self._socks_options = kwargs.pop("_socks_options")
82 | super(SOCKSConnection, self).__init__(*args, **kwargs)
83 |
84 | def _new_conn(self):
85 | """
86 | Establish a new connection via the SOCKS proxy.
87 | """
88 | extra_kw = {}
89 | if self.source_address:
90 | extra_kw["source_address"] = self.source_address
91 |
92 | if self.socket_options:
93 | extra_kw["socket_options"] = self.socket_options
94 |
95 | try:
96 | conn = socks.create_connection(
97 | (self.host, self.port),
98 | proxy_type=self._socks_options["socks_version"],
99 | proxy_addr=self._socks_options["proxy_host"],
100 | proxy_port=self._socks_options["proxy_port"],
101 | proxy_username=self._socks_options["username"],
102 | proxy_password=self._socks_options["password"],
103 | proxy_rdns=self._socks_options["rdns"],
104 | timeout=self.timeout,
105 | **extra_kw
106 | )
107 |
108 | except SocketTimeout:
109 | raise ConnectTimeoutError(
110 | self,
111 | "Connection to %s timed out. (connect timeout=%s)"
112 | % (self.host, self.timeout),
113 | )
114 |
115 | except socks.ProxyError as e:
116 | # This is fragile as hell, but it seems to be the only way to raise
117 | # useful errors here.
118 | if e.socket_err:
119 | error = e.socket_err
120 | if isinstance(error, SocketTimeout):
121 | raise ConnectTimeoutError(
122 | self,
123 | "Connection to %s timed out. (connect timeout=%s)"
124 | % (self.host, self.timeout),
125 | )
126 | else:
127 | raise NewConnectionError(
128 | self, "Failed to establish a new connection: %s" % error
129 | )
130 | else:
131 | raise NewConnectionError(
132 | self, "Failed to establish a new connection: %s" % e
133 | )
134 |
135 | except SocketError as e: # Defensive: PySocks should catch all these.
136 | raise NewConnectionError(
137 | self, "Failed to establish a new connection: %s" % e
138 | )
139 |
140 | return conn
141 |
142 |
143 | # We don't need to duplicate the Verified/Unverified distinction from
144 | # urllib3/connection.py here because the HTTPSConnection will already have been
145 | # correctly set to either the Verified or Unverified form by that module. This
146 | # means the SOCKSHTTPSConnection will automatically be the correct type.
147 | class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection):
148 | pass
149 |
150 |
151 | class SOCKSHTTPConnectionPool(HTTPConnectionPool):
152 | ConnectionCls = SOCKSConnection
153 |
154 |
155 | class SOCKSHTTPSConnectionPool(HTTPSConnectionPool):
156 | ConnectionCls = SOCKSHTTPSConnection
157 |
158 |
159 | class SOCKSProxyManager(PoolManager):
160 | """
161 | A version of the urllib3 ProxyManager that routes connections via the
162 | defined SOCKS proxy.
163 | """
164 |
165 | pool_classes_by_scheme = {
166 | "http": SOCKSHTTPConnectionPool,
167 | "https": SOCKSHTTPSConnectionPool,
168 | }
169 |
170 | def __init__(
171 | self,
172 | proxy_url,
173 | username=None,
174 | password=None,
175 | num_pools=10,
176 | headers=None,
177 | **connection_pool_kw
178 | ):
179 | parsed = parse_url(proxy_url)
180 |
181 | if username is None and password is None and parsed.auth is not None:
182 | split = parsed.auth.split(":")
183 | if len(split) == 2:
184 | username, password = split
185 | if parsed.scheme == "socks5":
186 | socks_version = socks.PROXY_TYPE_SOCKS5
187 | rdns = False
188 | elif parsed.scheme == "socks5h":
189 | socks_version = socks.PROXY_TYPE_SOCKS5
190 | rdns = True
191 | elif parsed.scheme == "socks4":
192 | socks_version = socks.PROXY_TYPE_SOCKS4
193 | rdns = False
194 | elif parsed.scheme == "socks4a":
195 | socks_version = socks.PROXY_TYPE_SOCKS4
196 | rdns = True
197 | else:
198 | raise ValueError("Unable to determine SOCKS version from %s" % proxy_url)
199 |
200 | self.proxy_url = proxy_url
201 |
202 | socks_options = {
203 | "socks_version": socks_version,
204 | "proxy_host": parsed.host,
205 | "proxy_port": parsed.port,
206 | "username": username,
207 | "password": password,
208 | "rdns": rdns,
209 | }
210 | connection_pool_kw["_socks_options"] = socks_options
211 |
212 | super(SOCKSProxyManager, self).__init__(
213 | num_pools, headers, **connection_pool_kw
214 | )
215 |
216 | self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme
217 |
--------------------------------------------------------------------------------
/src/lib/urllib3/exceptions.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | from .packages.six.moves.http_client import IncompleteRead as httplib_IncompleteRead
4 |
5 | # Base Exceptions
6 |
7 |
8 | class HTTPError(Exception):
9 | """Base exception used by this module."""
10 |
11 | pass
12 |
13 |
14 | class HTTPWarning(Warning):
15 | """Base warning used by this module."""
16 |
17 | pass
18 |
19 |
20 | class PoolError(HTTPError):
21 | """Base exception for errors caused within a pool."""
22 |
23 | def __init__(self, pool, message):
24 | self.pool = pool
25 | HTTPError.__init__(self, "%s: %s" % (pool, message))
26 |
27 | def __reduce__(self):
28 | # For pickling purposes.
29 | return self.__class__, (None, None)
30 |
31 |
32 | class RequestError(PoolError):
33 | """Base exception for PoolErrors that have associated URLs."""
34 |
35 | def __init__(self, pool, url, message):
36 | self.url = url
37 | PoolError.__init__(self, pool, message)
38 |
39 | def __reduce__(self):
40 | # For pickling purposes.
41 | return self.__class__, (None, self.url, None)
42 |
43 |
44 | class SSLError(HTTPError):
45 | """Raised when SSL certificate fails in an HTTPS connection."""
46 |
47 | pass
48 |
49 |
50 | class ProxyError(HTTPError):
51 | """Raised when the connection to a proxy fails."""
52 |
53 | def __init__(self, message, error, *args):
54 | super(ProxyError, self).__init__(message, error, *args)
55 | self.original_error = error
56 |
57 |
58 | class DecodeError(HTTPError):
59 | """Raised when automatic decoding based on Content-Type fails."""
60 |
61 | pass
62 |
63 |
64 | class ProtocolError(HTTPError):
65 | """Raised when something unexpected happens mid-request/response."""
66 |
67 | pass
68 |
69 |
70 | #: Renamed to ProtocolError but aliased for backwards compatibility.
71 | ConnectionError = ProtocolError
72 |
73 |
74 | # Leaf Exceptions
75 |
76 |
77 | class MaxRetryError(RequestError):
78 | """Raised when the maximum number of retries is exceeded.
79 |
80 | :param pool: The connection pool
81 | :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool`
82 | :param string url: The requested Url
83 | :param exceptions.Exception reason: The underlying error
84 |
85 | """
86 |
87 | def __init__(self, pool, url, reason=None):
88 | self.reason = reason
89 |
90 | message = "Max retries exceeded with url: %s (Caused by %r)" % (url, reason)
91 |
92 | RequestError.__init__(self, pool, url, message)
93 |
94 |
95 | class HostChangedError(RequestError):
96 | """Raised when an existing pool gets a request for a foreign host."""
97 |
98 | def __init__(self, pool, url, retries=3):
99 | message = "Tried to open a foreign host with url: %s" % url
100 | RequestError.__init__(self, pool, url, message)
101 | self.retries = retries
102 |
103 |
104 | class TimeoutStateError(HTTPError):
105 | """Raised when passing an invalid state to a timeout"""
106 |
107 | pass
108 |
109 |
110 | class TimeoutError(HTTPError):
111 | """Raised when a socket timeout error occurs.
112 |
113 | Catching this error will catch both :exc:`ReadTimeoutErrors
114 | ` and :exc:`ConnectTimeoutErrors `.
115 | """
116 |
117 | pass
118 |
119 |
120 | class ReadTimeoutError(TimeoutError, RequestError):
121 | """Raised when a socket timeout occurs while receiving data from a server"""
122 |
123 | pass
124 |
125 |
126 | # This timeout error does not have a URL attached and needs to inherit from the
127 | # base HTTPError
128 | class ConnectTimeoutError(TimeoutError):
129 | """Raised when a socket timeout occurs while connecting to a server"""
130 |
131 | pass
132 |
133 |
134 | class NewConnectionError(ConnectTimeoutError, PoolError):
135 | """Raised when we fail to establish a new connection. Usually ECONNREFUSED."""
136 |
137 | pass
138 |
139 |
140 | class EmptyPoolError(PoolError):
141 | """Raised when a pool runs out of connections and no more are allowed."""
142 |
143 | pass
144 |
145 |
146 | class ClosedPoolError(PoolError):
147 | """Raised when a request enters a pool after the pool has been closed."""
148 |
149 | pass
150 |
151 |
152 | class LocationValueError(ValueError, HTTPError):
153 | """Raised when there is something wrong with a given URL input."""
154 |
155 | pass
156 |
157 |
158 | class LocationParseError(LocationValueError):
159 | """Raised when get_host or similar fails to parse the URL input."""
160 |
161 | def __init__(self, location):
162 | message = "Failed to parse: %s" % location
163 | HTTPError.__init__(self, message)
164 |
165 | self.location = location
166 |
167 |
168 | class URLSchemeUnknown(LocationValueError):
169 | """Raised when a URL input has an unsupported scheme."""
170 |
171 | def __init__(self, scheme):
172 | message = "Not supported URL scheme %s" % scheme
173 | super(URLSchemeUnknown, self).__init__(message)
174 |
175 | self.scheme = scheme
176 |
177 |
178 | class ResponseError(HTTPError):
179 | """Used as a container for an error reason supplied in a MaxRetryError."""
180 |
181 | GENERIC_ERROR = "too many error responses"
182 | SPECIFIC_ERROR = "too many {status_code} error responses"
183 |
184 |
185 | class SecurityWarning(HTTPWarning):
186 | """Warned when performing security reducing actions"""
187 |
188 | pass
189 |
190 |
191 | class SubjectAltNameWarning(SecurityWarning):
192 | """Warned when connecting to a host with a certificate missing a SAN."""
193 |
194 | pass
195 |
196 |
197 | class InsecureRequestWarning(SecurityWarning):
198 | """Warned when making an unverified HTTPS request."""
199 |
200 | pass
201 |
202 |
203 | class SystemTimeWarning(SecurityWarning):
204 | """Warned when system time is suspected to be wrong"""
205 |
206 | pass
207 |
208 |
209 | class InsecurePlatformWarning(SecurityWarning):
210 | """Warned when certain TLS/SSL configuration is not available on a platform."""
211 |
212 | pass
213 |
214 |
215 | class SNIMissingWarning(HTTPWarning):
216 | """Warned when making a HTTPS request without SNI available."""
217 |
218 | pass
219 |
220 |
221 | class DependencyWarning(HTTPWarning):
222 | """
223 | Warned when an attempt is made to import a module with missing optional
224 | dependencies.
225 | """
226 |
227 | pass
228 |
229 |
230 | class ResponseNotChunked(ProtocolError, ValueError):
231 | """Response needs to be chunked in order to read it as chunks."""
232 |
233 | pass
234 |
235 |
236 | class BodyNotHttplibCompatible(HTTPError):
237 | """
238 | Body should be :class:`http.client.HTTPResponse` like
239 | (have an fp attribute which returns raw chunks) for read_chunked().
240 | """
241 |
242 | pass
243 |
244 |
245 | class IncompleteRead(HTTPError, httplib_IncompleteRead):
246 | """
247 | Response length doesn't match expected Content-Length
248 |
249 | Subclass of :class:`http.client.IncompleteRead` to allow int value
250 | for ``partial`` to avoid creating large objects on streamed reads.
251 | """
252 |
253 | def __init__(self, partial, expected):
254 | super(IncompleteRead, self).__init__(partial, expected)
255 |
256 | def __repr__(self):
257 | return "IncompleteRead(%i bytes read, %i more expected)" % (
258 | self.partial,
259 | self.expected,
260 | )
261 |
262 |
263 | class InvalidChunkLength(HTTPError, httplib_IncompleteRead):
264 | """Invalid chunk length in a chunked response."""
265 |
266 | def __init__(self, response, length):
267 | super(InvalidChunkLength, self).__init__(
268 | response.tell(), response.length_remaining
269 | )
270 | self.response = response
271 | self.length = length
272 |
273 | def __repr__(self):
274 | return "InvalidChunkLength(got length %r, %i bytes read)" % (
275 | self.length,
276 | self.partial,
277 | )
278 |
279 |
280 | class InvalidHeader(HTTPError):
281 | """The header provided was somehow invalid."""
282 |
283 | pass
284 |
285 |
286 | class ProxySchemeUnknown(AssertionError, URLSchemeUnknown):
287 | """ProxyManager does not support the supplied scheme"""
288 |
289 | # TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
290 |
291 | def __init__(self, scheme):
292 | # 'localhost' is here because our URL parser parses
293 | # localhost:8080 -> scheme=localhost, remove if we fix this.
294 | if scheme == "localhost":
295 | scheme = None
296 | if scheme is None:
297 | message = "Proxy URL had no scheme, should start with http:// or https://"
298 | else:
299 | message = (
300 | "Proxy URL had unsupported scheme %s, should use http:// or https://"
301 | % scheme
302 | )
303 | super(ProxySchemeUnknown, self).__init__(message)
304 |
305 |
306 | class ProxySchemeUnsupported(ValueError):
307 | """Fetching HTTPS resources through HTTPS proxies is unsupported"""
308 |
309 | pass
310 |
311 |
312 | class HeaderParsingError(HTTPError):
313 | """Raised by assert_header_parsing, but we convert it to a log.warning statement."""
314 |
315 | def __init__(self, defects, unparsed_data):
316 | message = "%s, unparsed data: %r" % (defects or "Unknown", unparsed_data)
317 | super(HeaderParsingError, self).__init__(message)
318 |
319 |
320 | class UnrewindableBodyError(HTTPError):
321 | """urllib3 encountered an error when trying to rewind a body"""
322 |
323 | pass
324 |
--------------------------------------------------------------------------------
/src/lib/urllib3/fields.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import email.utils
4 | import mimetypes
5 | import re
6 |
7 | from .packages import six
8 |
9 |
10 | def guess_content_type(filename, default="application/octet-stream"):
11 | """
12 | Guess the "Content-Type" of a file.
13 |
14 | :param filename:
15 | The filename to guess the "Content-Type" of using :mod:`mimetypes`.
16 | :param default:
17 | If no "Content-Type" can be guessed, default to `default`.
18 | """
19 | if filename:
20 | return mimetypes.guess_type(filename)[0] or default
21 | return default
22 |
23 |
24 | def format_header_param_rfc2231(name, value):
25 | """
26 | Helper function to format and quote a single header parameter using the
27 | strategy defined in RFC 2231.
28 |
29 | Particularly useful for header parameters which might contain
30 | non-ASCII values, like file names. This follows
31 | `RFC 2388 Section 4.4 `_.
32 |
33 | :param name:
34 | The name of the parameter, a string expected to be ASCII only.
35 | :param value:
36 | The value of the parameter, provided as ``bytes`` or `str``.
37 | :ret:
38 | An RFC-2231-formatted unicode string.
39 | """
40 | if isinstance(value, six.binary_type):
41 | value = value.decode("utf-8")
42 |
43 | if not any(ch in value for ch in '"\\\r\n'):
44 | result = u'%s="%s"' % (name, value)
45 | try:
46 | result.encode("ascii")
47 | except (UnicodeEncodeError, UnicodeDecodeError):
48 | pass
49 | else:
50 | return result
51 |
52 | if six.PY2: # Python 2:
53 | value = value.encode("utf-8")
54 |
55 | # encode_rfc2231 accepts an encoded string and returns an ascii-encoded
56 | # string in Python 2 but accepts and returns unicode strings in Python 3
57 | value = email.utils.encode_rfc2231(value, "utf-8")
58 | value = "%s*=%s" % (name, value)
59 |
60 | if six.PY2: # Python 2:
61 | value = value.decode("utf-8")
62 |
63 | return value
64 |
65 |
66 | _HTML5_REPLACEMENTS = {
67 | u"\u0022": u"%22",
68 | # Replace "\" with "\\".
69 | u"\u005C": u"\u005C\u005C",
70 | }
71 |
72 | # All control characters from 0x00 to 0x1F *except* 0x1B.
73 | _HTML5_REPLACEMENTS.update(
74 | {
75 | six.unichr(cc): u"%{:02X}".format(cc)
76 | for cc in range(0x00, 0x1F + 1)
77 | if cc not in (0x1B,)
78 | }
79 | )
80 |
81 |
82 | def _replace_multiple(value, needles_and_replacements):
83 | def replacer(match):
84 | return needles_and_replacements[match.group(0)]
85 |
86 | pattern = re.compile(
87 | r"|".join([re.escape(needle) for needle in needles_and_replacements.keys()])
88 | )
89 |
90 | result = pattern.sub(replacer, value)
91 |
92 | return result
93 |
94 |
95 | def format_header_param_html5(name, value):
96 | """
97 | Helper function to format and quote a single header parameter using the
98 | HTML5 strategy.
99 |
100 | Particularly useful for header parameters which might contain
101 | non-ASCII values, like file names. This follows the `HTML5 Working Draft
102 | Section 4.10.22.7`_ and matches the behavior of curl and modern browsers.
103 |
104 | .. _HTML5 Working Draft Section 4.10.22.7:
105 | https://w3c.github.io/html/sec-forms.html#multipart-form-data
106 |
107 | :param name:
108 | The name of the parameter, a string expected to be ASCII only.
109 | :param value:
110 | The value of the parameter, provided as ``bytes`` or `str``.
111 | :ret:
112 | A unicode string, stripped of troublesome characters.
113 | """
114 | if isinstance(value, six.binary_type):
115 | value = value.decode("utf-8")
116 |
117 | value = _replace_multiple(value, _HTML5_REPLACEMENTS)
118 |
119 | return u'%s="%s"' % (name, value)
120 |
121 |
122 | # For backwards-compatibility.
123 | format_header_param = format_header_param_html5
124 |
125 |
126 | class RequestField(object):
127 | """
128 | A data container for request body parameters.
129 |
130 | :param name:
131 | The name of this request field. Must be unicode.
132 | :param data:
133 | The data/value body.
134 | :param filename:
135 | An optional filename of the request field. Must be unicode.
136 | :param headers:
137 | An optional dict-like object of headers to initially use for the field.
138 | :param header_formatter:
139 | An optional callable that is used to encode and format the headers. By
140 | default, this is :func:`format_header_param_html5`.
141 | """
142 |
143 | def __init__(
144 | self,
145 | name,
146 | data,
147 | filename=None,
148 | headers=None,
149 | header_formatter=format_header_param_html5,
150 | ):
151 | self._name = name
152 | self._filename = filename
153 | self.data = data
154 | self.headers = {}
155 | if headers:
156 | self.headers = dict(headers)
157 | self.header_formatter = header_formatter
158 |
159 | @classmethod
160 | def from_tuples(cls, fieldname, value, header_formatter=format_header_param_html5):
161 | """
162 | A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.
163 |
164 | Supports constructing :class:`~urllib3.fields.RequestField` from
165 | parameter of key/value strings AND key/filetuple. A filetuple is a
166 | (filename, data, MIME type) tuple where the MIME type is optional.
167 | For example::
168 |
169 | 'foo': 'bar',
170 | 'fakefile': ('foofile.txt', 'contents of foofile'),
171 | 'realfile': ('barfile.txt', open('realfile').read()),
172 | 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'),
173 | 'nonamefile': 'contents of nonamefile field',
174 |
175 | Field names and filenames must be unicode.
176 | """
177 | if isinstance(value, tuple):
178 | if len(value) == 3:
179 | filename, data, content_type = value
180 | else:
181 | filename, data = value
182 | content_type = guess_content_type(filename)
183 | else:
184 | filename = None
185 | content_type = None
186 | data = value
187 |
188 | request_param = cls(
189 | fieldname, data, filename=filename, header_formatter=header_formatter
190 | )
191 | request_param.make_multipart(content_type=content_type)
192 |
193 | return request_param
194 |
195 | def _render_part(self, name, value):
196 | """
197 | Overridable helper function to format a single header parameter. By
198 | default, this calls ``self.header_formatter``.
199 |
200 | :param name:
201 | The name of the parameter, a string expected to be ASCII only.
202 | :param value:
203 | The value of the parameter, provided as a unicode string.
204 | """
205 |
206 | return self.header_formatter(name, value)
207 |
208 | def _render_parts(self, header_parts):
209 | """
210 | Helper function to format and quote a single header.
211 |
212 | Useful for single headers that are composed of multiple items. E.g.,
213 | 'Content-Disposition' fields.
214 |
215 | :param header_parts:
216 | A sequence of (k, v) tuples or a :class:`dict` of (k, v) to format
217 | as `k1="v1"; k2="v2"; ...`.
218 | """
219 | parts = []
220 | iterable = header_parts
221 | if isinstance(header_parts, dict):
222 | iterable = header_parts.items()
223 |
224 | for name, value in iterable:
225 | if value is not None:
226 | parts.append(self._render_part(name, value))
227 |
228 | return u"; ".join(parts)
229 |
230 | def render_headers(self):
231 | """
232 | Renders the headers for this request field.
233 | """
234 | lines = []
235 |
236 | sort_keys = ["Content-Disposition", "Content-Type", "Content-Location"]
237 | for sort_key in sort_keys:
238 | if self.headers.get(sort_key, False):
239 | lines.append(u"%s: %s" % (sort_key, self.headers[sort_key]))
240 |
241 | for header_name, header_value in self.headers.items():
242 | if header_name not in sort_keys:
243 | if header_value:
244 | lines.append(u"%s: %s" % (header_name, header_value))
245 |
246 | lines.append(u"\r\n")
247 | return u"\r\n".join(lines)
248 |
249 | def make_multipart(
250 | self, content_disposition=None, content_type=None, content_location=None
251 | ):
252 | """
253 | Makes this request field into a multipart request field.
254 |
255 | This method overrides "Content-Disposition", "Content-Type" and
256 | "Content-Location" headers to the request parameter.
257 |
258 | :param content_type:
259 | The 'Content-Type' of the request body.
260 | :param content_location:
261 | The 'Content-Location' of the request body.
262 |
263 | """
264 | self.headers["Content-Disposition"] = content_disposition or u"form-data"
265 | self.headers["Content-Disposition"] += u"; ".join(
266 | [
267 | u"",
268 | self._render_parts(
269 | ((u"name", self._name), (u"filename", self._filename))
270 | ),
271 | ]
272 | )
273 | self.headers["Content-Type"] = content_type
274 | self.headers["Content-Location"] = content_location
275 |
--------------------------------------------------------------------------------
/src/lib/urllib3/filepost.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import binascii
4 | import codecs
5 | import os
6 | from io import BytesIO
7 |
8 | from .fields import RequestField
9 | from .packages import six
10 | from .packages.six import b
11 |
12 | writer = codecs.lookup("utf-8")[3]
13 |
14 |
15 | def choose_boundary():
16 | """
17 | Our embarrassingly-simple replacement for mimetools.choose_boundary.
18 | """
19 | boundary = binascii.hexlify(os.urandom(16))
20 | if not six.PY2:
21 | boundary = boundary.decode("ascii")
22 | return boundary
23 |
24 |
25 | def iter_field_objects(fields):
26 | """
27 | Iterate over fields.
28 |
29 | Supports list of (k, v) tuples and dicts, and lists of
30 | :class:`~urllib3.fields.RequestField`.
31 |
32 | """
33 | if isinstance(fields, dict):
34 | i = six.iteritems(fields)
35 | else:
36 | i = iter(fields)
37 |
38 | for field in i:
39 | if isinstance(field, RequestField):
40 | yield field
41 | else:
42 | yield RequestField.from_tuples(*field)
43 |
44 |
45 | def iter_fields(fields):
46 | """
47 | .. deprecated:: 1.6
48 |
49 | Iterate over fields.
50 |
51 | The addition of :class:`~urllib3.fields.RequestField` makes this function
52 | obsolete. Instead, use :func:`iter_field_objects`, which returns
53 | :class:`~urllib3.fields.RequestField` objects.
54 |
55 | Supports list of (k, v) tuples and dicts.
56 | """
57 | if isinstance(fields, dict):
58 | return ((k, v) for k, v in six.iteritems(fields))
59 |
60 | return ((k, v) for k, v in fields)
61 |
62 |
63 | def encode_multipart_formdata(fields, boundary=None):
64 | """
65 | Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
66 |
67 | :param fields:
68 | Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).
69 |
70 | :param boundary:
71 | If not specified, then a random boundary will be generated using
72 | :func:`urllib3.filepost.choose_boundary`.
73 | """
74 | body = BytesIO()
75 | if boundary is None:
76 | boundary = choose_boundary()
77 |
78 | for field in iter_field_objects(fields):
79 | body.write(b("--%s\r\n" % (boundary)))
80 |
81 | writer(body).write(field.render_headers())
82 | data = field.data
83 |
84 | if isinstance(data, int):
85 | data = str(data) # Backwards compatibility
86 |
87 | if isinstance(data, six.text_type):
88 | writer(body).write(data)
89 | else:
90 | body.write(data)
91 |
92 | body.write(b"\r\n")
93 |
94 | body.write(b("--%s--\r\n" % (boundary)))
95 |
96 | content_type = str("multipart/form-data; boundary=%s" % boundary)
97 |
98 | return body.getvalue(), content_type
99 |
--------------------------------------------------------------------------------
/src/lib/urllib3/packages/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhlsthrm/alfred-crypto-tracker/c73071bc70c9c206bcb532c873b0e3a395e0412b/src/lib/urllib3/packages/__init__.py
--------------------------------------------------------------------------------
/src/lib/urllib3/packages/backports/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhlsthrm/alfred-crypto-tracker/c73071bc70c9c206bcb532c873b0e3a395e0412b/src/lib/urllib3/packages/backports/__init__.py
--------------------------------------------------------------------------------
/src/lib/urllib3/packages/backports/makefile.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | backports.makefile
4 | ~~~~~~~~~~~~~~~~~~
5 |
6 | Backports the Python 3 ``socket.makefile`` method for use with anything that
7 | wants to create a "fake" socket object.
8 | """
9 | import io
10 | from socket import SocketIO
11 |
12 |
13 | def backport_makefile(
14 | self, mode="r", buffering=None, encoding=None, errors=None, newline=None
15 | ):
16 | """
17 | Backport of ``socket.makefile`` from Python 3.5.
18 | """
19 | if not set(mode) <= {"r", "w", "b"}:
20 | raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,))
21 | writing = "w" in mode
22 | reading = "r" in mode or not writing
23 | assert reading or writing
24 | binary = "b" in mode
25 | rawmode = ""
26 | if reading:
27 | rawmode += "r"
28 | if writing:
29 | rawmode += "w"
30 | raw = SocketIO(self, rawmode)
31 | self._makefile_refs += 1
32 | if buffering is None:
33 | buffering = -1
34 | if buffering < 0:
35 | buffering = io.DEFAULT_BUFFER_SIZE
36 | if buffering == 0:
37 | if not binary:
38 | raise ValueError("unbuffered streams must be binary")
39 | return raw
40 | if reading and writing:
41 | buffer = io.BufferedRWPair(raw, raw, buffering)
42 | elif reading:
43 | buffer = io.BufferedReader(raw, buffering)
44 | else:
45 | assert writing
46 | buffer = io.BufferedWriter(raw, buffering)
47 | if binary:
48 | return buffer
49 | text = io.TextIOWrapper(buffer, encoding, errors, newline)
50 | text.mode = mode
51 | return text
52 |
--------------------------------------------------------------------------------
/src/lib/urllib3/request.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | from .filepost import encode_multipart_formdata
4 | from .packages.six.moves.urllib.parse import urlencode
5 |
6 | __all__ = ["RequestMethods"]
7 |
8 |
9 | class RequestMethods(object):
10 | """
11 | Convenience mixin for classes who implement a :meth:`urlopen` method, such
12 | as :class:`urllib3.HTTPConnectionPool` and
13 | :class:`urllib3.PoolManager`.
14 |
15 | Provides behavior for making common types of HTTP request methods and
16 | decides which type of request field encoding to use.
17 |
18 | Specifically,
19 |
20 | :meth:`.request_encode_url` is for sending requests whose fields are
21 | encoded in the URL (such as GET, HEAD, DELETE).
22 |
23 | :meth:`.request_encode_body` is for sending requests whose fields are
24 | encoded in the *body* of the request using multipart or www-form-urlencoded
25 | (such as for POST, PUT, PATCH).
26 |
27 | :meth:`.request` is for making any kind of request, it will look up the
28 | appropriate encoding format and use one of the above two methods to make
29 | the request.
30 |
31 | Initializer parameters:
32 |
33 | :param headers:
34 | Headers to include with all requests, unless other headers are given
35 | explicitly.
36 | """
37 |
38 | _encode_url_methods = {"DELETE", "GET", "HEAD", "OPTIONS"}
39 |
40 | def __init__(self, headers=None):
41 | self.headers = headers or {}
42 |
43 | def urlopen(
44 | self,
45 | method,
46 | url,
47 | body=None,
48 | headers=None,
49 | encode_multipart=True,
50 | multipart_boundary=None,
51 | **kw
52 | ): # Abstract
53 | raise NotImplementedError(
54 | "Classes extending RequestMethods must implement "
55 | "their own ``urlopen`` method."
56 | )
57 |
58 | def request(self, method, url, fields=None, headers=None, **urlopen_kw):
59 | """
60 | Make a request using :meth:`urlopen` with the appropriate encoding of
61 | ``fields`` based on the ``method`` used.
62 |
63 | This is a convenience method that requires the least amount of manual
64 | effort. It can be used in most situations, while still having the
65 | option to drop down to more specific methods when necessary, such as
66 | :meth:`request_encode_url`, :meth:`request_encode_body`,
67 | or even the lowest level :meth:`urlopen`.
68 | """
69 | method = method.upper()
70 |
71 | urlopen_kw["request_url"] = url
72 |
73 | if method in self._encode_url_methods:
74 | return self.request_encode_url(
75 | method, url, fields=fields, headers=headers, **urlopen_kw
76 | )
77 | else:
78 | return self.request_encode_body(
79 | method, url, fields=fields, headers=headers, **urlopen_kw
80 | )
81 |
82 | def request_encode_url(self, method, url, fields=None, headers=None, **urlopen_kw):
83 | """
84 | Make a request using :meth:`urlopen` with the ``fields`` encoded in
85 | the url. This is useful for request methods like GET, HEAD, DELETE, etc.
86 | """
87 | if headers is None:
88 | headers = self.headers
89 |
90 | extra_kw = {"headers": headers}
91 | extra_kw.update(urlopen_kw)
92 |
93 | if fields:
94 | url += "?" + urlencode(fields)
95 |
96 | return self.urlopen(method, url, **extra_kw)
97 |
98 | def request_encode_body(
99 | self,
100 | method,
101 | url,
102 | fields=None,
103 | headers=None,
104 | encode_multipart=True,
105 | multipart_boundary=None,
106 | **urlopen_kw
107 | ):
108 | """
109 | Make a request using :meth:`urlopen` with the ``fields`` encoded in
110 | the body. This is useful for request methods like POST, PUT, PATCH, etc.
111 |
112 | When ``encode_multipart=True`` (default), then
113 | :func:`urllib3.encode_multipart_formdata` is used to encode
114 | the payload with the appropriate content type. Otherwise
115 | :func:`urllib.parse.urlencode` is used with the
116 | 'application/x-www-form-urlencoded' content type.
117 |
118 | Multipart encoding must be used when posting files, and it's reasonably
119 | safe to use it in other times too. However, it may break request
120 | signing, such as with OAuth.
121 |
122 | Supports an optional ``fields`` parameter of key/value strings AND
123 | key/filetuple. A filetuple is a (filename, data, MIME type) tuple where
124 | the MIME type is optional. For example::
125 |
126 | fields = {
127 | 'foo': 'bar',
128 | 'fakefile': ('foofile.txt', 'contents of foofile'),
129 | 'realfile': ('barfile.txt', open('realfile').read()),
130 | 'typedfile': ('bazfile.bin', open('bazfile').read(),
131 | 'image/jpeg'),
132 | 'nonamefile': 'contents of nonamefile field',
133 | }
134 |
135 | When uploading a file, providing a filename (the first parameter of the
136 | tuple) is optional but recommended to best mimic behavior of browsers.
137 |
138 | Note that if ``headers`` are supplied, the 'Content-Type' header will
139 | be overwritten because it depends on the dynamic random boundary string
140 | which is used to compose the body of the request. The random boundary
141 | string can be explicitly set with the ``multipart_boundary`` parameter.
142 | """
143 | if headers is None:
144 | headers = self.headers
145 |
146 | extra_kw = {"headers": {}}
147 |
148 | if fields:
149 | if "body" in urlopen_kw:
150 | raise TypeError(
151 | "request got values for both 'fields' and 'body', can only specify one."
152 | )
153 |
154 | if encode_multipart:
155 | body, content_type = encode_multipart_formdata(
156 | fields, boundary=multipart_boundary
157 | )
158 | else:
159 | body, content_type = (
160 | urlencode(fields),
161 | "application/x-www-form-urlencoded",
162 | )
163 |
164 | extra_kw["body"] = body
165 | extra_kw["headers"] = {"Content-Type": content_type}
166 |
167 | extra_kw["headers"].update(headers)
168 | extra_kw.update(urlopen_kw)
169 |
170 | return self.urlopen(method, url, **extra_kw)
171 |
--------------------------------------------------------------------------------
/src/lib/urllib3/util/__init__.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | # For backwards compatibility, provide imports that used to be here.
4 | from .connection import is_connection_dropped
5 | from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers
6 | from .response import is_fp_closed
7 | from .retry import Retry
8 | from .ssl_ import (
9 | ALPN_PROTOCOLS,
10 | HAS_SNI,
11 | IS_PYOPENSSL,
12 | IS_SECURETRANSPORT,
13 | PROTOCOL_TLS,
14 | SSLContext,
15 | assert_fingerprint,
16 | resolve_cert_reqs,
17 | resolve_ssl_version,
18 | ssl_wrap_socket,
19 | )
20 | from .timeout import Timeout, current_time
21 | from .url import Url, get_host, parse_url, split_first
22 | from .wait import wait_for_read, wait_for_write
23 |
24 | __all__ = (
25 | "HAS_SNI",
26 | "IS_PYOPENSSL",
27 | "IS_SECURETRANSPORT",
28 | "SSLContext",
29 | "PROTOCOL_TLS",
30 | "ALPN_PROTOCOLS",
31 | "Retry",
32 | "Timeout",
33 | "Url",
34 | "assert_fingerprint",
35 | "current_time",
36 | "is_connection_dropped",
37 | "is_fp_closed",
38 | "get_host",
39 | "parse_url",
40 | "make_headers",
41 | "resolve_cert_reqs",
42 | "resolve_ssl_version",
43 | "split_first",
44 | "ssl_wrap_socket",
45 | "wait_for_read",
46 | "wait_for_write",
47 | "SKIP_HEADER",
48 | "SKIPPABLE_HEADERS",
49 | )
50 |
--------------------------------------------------------------------------------
/src/lib/urllib3/util/connection.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import socket
4 |
5 | from ..contrib import _appengine_environ
6 | from ..exceptions import LocationParseError
7 | from ..packages import six
8 | from .wait import NoWayToWaitForSocketError, wait_for_read
9 |
10 |
11 | def is_connection_dropped(conn): # Platform-specific
12 | """
13 | Returns True if the connection is dropped and should be closed.
14 |
15 | :param conn:
16 | :class:`http.client.HTTPConnection` object.
17 |
18 | Note: For platforms like AppEngine, this will always return ``False`` to
19 | let the platform handle connection recycling transparently for us.
20 | """
21 | sock = getattr(conn, "sock", False)
22 | if sock is False: # Platform-specific: AppEngine
23 | return False
24 | if sock is None: # Connection already closed (such as by httplib).
25 | return True
26 | try:
27 | # Returns True if readable, which here means it's been dropped
28 | return wait_for_read(sock, timeout=0.0)
29 | except NoWayToWaitForSocketError: # Platform-specific: AppEngine
30 | return False
31 |
32 |
33 | # This function is copied from socket.py in the Python 2.7 standard
34 | # library test suite. Added to its signature is only `socket_options`.
35 | # One additional modification is that we avoid binding to IPv6 servers
36 | # discovered in DNS if the system doesn't have IPv6 functionality.
37 | def create_connection(
38 | address,
39 | timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
40 | source_address=None,
41 | socket_options=None,
42 | ):
43 | """Connect to *address* and return the socket object.
44 |
45 | Convenience function. Connect to *address* (a 2-tuple ``(host,
46 | port)``) and return the socket object. Passing the optional
47 | *timeout* parameter will set the timeout on the socket instance
48 | before attempting to connect. If no *timeout* is supplied, the
49 | global default timeout setting returned by :func:`socket.getdefaulttimeout`
50 | is used. If *source_address* is set it must be a tuple of (host, port)
51 | for the socket to bind as a source address before making the connection.
52 | An host of '' or port 0 tells the OS to use the default.
53 | """
54 |
55 | host, port = address
56 | if host.startswith("["):
57 | host = host.strip("[]")
58 | err = None
59 |
60 | # Using the value from allowed_gai_family() in the context of getaddrinfo lets
61 | # us select whether to work with IPv4 DNS records, IPv6 records, or both.
62 | # The original create_connection function always returns all records.
63 | family = allowed_gai_family()
64 |
65 | try:
66 | host.encode("idna")
67 | except UnicodeError:
68 | return six.raise_from(
69 | LocationParseError(u"'%s', label empty or too long" % host), None
70 | )
71 |
72 | for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
73 | af, socktype, proto, canonname, sa = res
74 | sock = None
75 | try:
76 | sock = socket.socket(af, socktype, proto)
77 |
78 | # If provided, set socket level options before connecting.
79 | _set_socket_options(sock, socket_options)
80 |
81 | if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
82 | sock.settimeout(timeout)
83 | if source_address:
84 | sock.bind(source_address)
85 | sock.connect(sa)
86 | return sock
87 |
88 | except socket.error as e:
89 | err = e
90 | if sock is not None:
91 | sock.close()
92 | sock = None
93 |
94 | if err is not None:
95 | raise err
96 |
97 | raise socket.error("getaddrinfo returns an empty list")
98 |
99 |
100 | def _set_socket_options(sock, options):
101 | if options is None:
102 | return
103 |
104 | for opt in options:
105 | sock.setsockopt(*opt)
106 |
107 |
108 | def allowed_gai_family():
109 | """This function is designed to work in the context of
110 | getaddrinfo, where family=socket.AF_UNSPEC is the default and
111 | will perform a DNS search for both IPv6 and IPv4 records."""
112 |
113 | family = socket.AF_INET
114 | if HAS_IPV6:
115 | family = socket.AF_UNSPEC
116 | return family
117 |
118 |
119 | def _has_ipv6(host):
120 | """Returns True if the system can bind an IPv6 address."""
121 | sock = None
122 | has_ipv6 = False
123 |
124 | # App Engine doesn't support IPV6 sockets and actually has a quota on the
125 | # number of sockets that can be used, so just early out here instead of
126 | # creating a socket needlessly.
127 | # See https://github.com/urllib3/urllib3/issues/1446
128 | if _appengine_environ.is_appengine_sandbox():
129 | return False
130 |
131 | if socket.has_ipv6:
132 | # has_ipv6 returns true if cPython was compiled with IPv6 support.
133 | # It does not tell us if the system has IPv6 support enabled. To
134 | # determine that we must bind to an IPv6 address.
135 | # https://github.com/urllib3/urllib3/pull/611
136 | # https://bugs.python.org/issue658327
137 | try:
138 | sock = socket.socket(socket.AF_INET6)
139 | sock.bind((host, 0))
140 | has_ipv6 = True
141 | except Exception:
142 | pass
143 |
144 | if sock:
145 | sock.close()
146 | return has_ipv6
147 |
148 |
149 | HAS_IPV6 = _has_ipv6("::1")
150 |
--------------------------------------------------------------------------------
/src/lib/urllib3/util/proxy.py:
--------------------------------------------------------------------------------
1 | from .ssl_ import create_urllib3_context, resolve_cert_reqs, resolve_ssl_version
2 |
3 |
4 | def connection_requires_http_tunnel(
5 | proxy_url=None, proxy_config=None, destination_scheme=None
6 | ):
7 | """
8 | Returns True if the connection requires an HTTP CONNECT through the proxy.
9 |
10 | :param URL proxy_url:
11 | URL of the proxy.
12 | :param ProxyConfig proxy_config:
13 | Proxy configuration from poolmanager.py
14 | :param str destination_scheme:
15 | The scheme of the destination. (i.e https, http, etc)
16 | """
17 | # If we're not using a proxy, no way to use a tunnel.
18 | if proxy_url is None:
19 | return False
20 |
21 | # HTTP destinations never require tunneling, we always forward.
22 | if destination_scheme == "http":
23 | return False
24 |
25 | # Support for forwarding with HTTPS proxies and HTTPS destinations.
26 | if (
27 | proxy_url.scheme == "https"
28 | and proxy_config
29 | and proxy_config.use_forwarding_for_https
30 | ):
31 | return False
32 |
33 | # Otherwise always use a tunnel.
34 | return True
35 |
36 |
37 | def create_proxy_ssl_context(
38 | ssl_version, cert_reqs, ca_certs=None, ca_cert_dir=None, ca_cert_data=None
39 | ):
40 | """
41 | Generates a default proxy ssl context if one hasn't been provided by the
42 | user.
43 | """
44 | ssl_context = create_urllib3_context(
45 | ssl_version=resolve_ssl_version(ssl_version),
46 | cert_reqs=resolve_cert_reqs(cert_reqs),
47 | )
48 |
49 | if (
50 | not ca_certs
51 | and not ca_cert_dir
52 | and not ca_cert_data
53 | and hasattr(ssl_context, "load_default_certs")
54 | ):
55 | ssl_context.load_default_certs()
56 |
57 | return ssl_context
58 |
--------------------------------------------------------------------------------
/src/lib/urllib3/util/queue.py:
--------------------------------------------------------------------------------
1 | import collections
2 |
3 | from ..packages import six
4 | from ..packages.six.moves import queue
5 |
6 | if six.PY2:
7 | # Queue is imported for side effects on MS Windows. See issue #229.
8 | import Queue as _unused_module_Queue # noqa: F401
9 |
10 |
11 | class LifoQueue(queue.Queue):
12 | def _init(self, _):
13 | self.queue = collections.deque()
14 |
15 | def _qsize(self, len=len):
16 | return len(self.queue)
17 |
18 | def _put(self, item):
19 | self.queue.append(item)
20 |
21 | def _get(self):
22 | return self.queue.pop()
23 |
--------------------------------------------------------------------------------
/src/lib/urllib3/util/request.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | from base64 import b64encode
4 |
5 | from ..exceptions import UnrewindableBodyError
6 | from ..packages.six import b, integer_types
7 |
8 | # Pass as a value within ``headers`` to skip
9 | # emitting some HTTP headers that are added automatically.
10 | # The only headers that are supported are ``Accept-Encoding``,
11 | # ``Host``, and ``User-Agent``.
12 | SKIP_HEADER = "@@@SKIP_HEADER@@@"
13 | SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"])
14 |
15 | ACCEPT_ENCODING = "gzip,deflate"
16 | try:
17 | try:
18 | import brotlicffi as _unused_module_brotli # noqa: F401
19 | except ImportError:
20 | import brotli as _unused_module_brotli # noqa: F401
21 | except ImportError:
22 | pass
23 | else:
24 | ACCEPT_ENCODING += ",br"
25 |
26 | _FAILEDTELL = object()
27 |
28 |
29 | def make_headers(
30 | keep_alive=None,
31 | accept_encoding=None,
32 | user_agent=None,
33 | basic_auth=None,
34 | proxy_basic_auth=None,
35 | disable_cache=None,
36 | ):
37 | """
38 | Shortcuts for generating request headers.
39 |
40 | :param keep_alive:
41 | If ``True``, adds 'connection: keep-alive' header.
42 |
43 | :param accept_encoding:
44 | Can be a boolean, list, or string.
45 | ``True`` translates to 'gzip,deflate'.
46 | List will get joined by comma.
47 | String will be used as provided.
48 |
49 | :param user_agent:
50 | String representing the user-agent you want, such as
51 | "python-urllib3/0.6"
52 |
53 | :param basic_auth:
54 | Colon-separated username:password string for 'authorization: basic ...'
55 | auth header.
56 |
57 | :param proxy_basic_auth:
58 | Colon-separated username:password string for 'proxy-authorization: basic ...'
59 | auth header.
60 |
61 | :param disable_cache:
62 | If ``True``, adds 'cache-control: no-cache' header.
63 |
64 | Example::
65 |
66 | >>> make_headers(keep_alive=True, user_agent="Batman/1.0")
67 | {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
68 | >>> make_headers(accept_encoding=True)
69 | {'accept-encoding': 'gzip,deflate'}
70 | """
71 | headers = {}
72 | if accept_encoding:
73 | if isinstance(accept_encoding, str):
74 | pass
75 | elif isinstance(accept_encoding, list):
76 | accept_encoding = ",".join(accept_encoding)
77 | else:
78 | accept_encoding = ACCEPT_ENCODING
79 | headers["accept-encoding"] = accept_encoding
80 |
81 | if user_agent:
82 | headers["user-agent"] = user_agent
83 |
84 | if keep_alive:
85 | headers["connection"] = "keep-alive"
86 |
87 | if basic_auth:
88 | headers["authorization"] = "Basic " + b64encode(b(basic_auth)).decode("utf-8")
89 |
90 | if proxy_basic_auth:
91 | headers["proxy-authorization"] = "Basic " + b64encode(
92 | b(proxy_basic_auth)
93 | ).decode("utf-8")
94 |
95 | if disable_cache:
96 | headers["cache-control"] = "no-cache"
97 |
98 | return headers
99 |
100 |
101 | def set_file_position(body, pos):
102 | """
103 | If a position is provided, move file to that point.
104 | Otherwise, we'll attempt to record a position for future use.
105 | """
106 | if pos is not None:
107 | rewind_body(body, pos)
108 | elif getattr(body, "tell", None) is not None:
109 | try:
110 | pos = body.tell()
111 | except (IOError, OSError):
112 | # This differentiates from None, allowing us to catch
113 | # a failed `tell()` later when trying to rewind the body.
114 | pos = _FAILEDTELL
115 |
116 | return pos
117 |
118 |
119 | def rewind_body(body, body_pos):
120 | """
121 | Attempt to rewind body to a certain position.
122 | Primarily used for request redirects and retries.
123 |
124 | :param body:
125 | File-like object that supports seek.
126 |
127 | :param int pos:
128 | Position to seek to in file.
129 | """
130 | body_seek = getattr(body, "seek", None)
131 | if body_seek is not None and isinstance(body_pos, integer_types):
132 | try:
133 | body_seek(body_pos)
134 | except (IOError, OSError):
135 | raise UnrewindableBodyError(
136 | "An error occurred when rewinding request body for redirect/retry."
137 | )
138 | elif body_pos is _FAILEDTELL:
139 | raise UnrewindableBodyError(
140 | "Unable to record file position for rewinding "
141 | "request body during a redirect/retry."
142 | )
143 | else:
144 | raise ValueError(
145 | "body_pos must be of type integer, instead it was %s." % type(body_pos)
146 | )
147 |
--------------------------------------------------------------------------------
/src/lib/urllib3/util/response.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | from email.errors import MultipartInvariantViolationDefect, StartBoundaryNotFoundDefect
4 |
5 | from ..exceptions import HeaderParsingError
6 | from ..packages.six.moves import http_client as httplib
7 |
8 |
9 | def is_fp_closed(obj):
10 | """
11 | Checks whether a given file-like object is closed.
12 |
13 | :param obj:
14 | The file-like object to check.
15 | """
16 |
17 | try:
18 | # Check `isclosed()` first, in case Python3 doesn't set `closed`.
19 | # GH Issue #928
20 | return obj.isclosed()
21 | except AttributeError:
22 | pass
23 |
24 | try:
25 | # Check via the official file-like-object way.
26 | return obj.closed
27 | except AttributeError:
28 | pass
29 |
30 | try:
31 | # Check if the object is a container for another file-like object that
32 | # gets released on exhaustion (e.g. HTTPResponse).
33 | return obj.fp is None
34 | except AttributeError:
35 | pass
36 |
37 | raise ValueError("Unable to determine whether fp is closed.")
38 |
39 |
40 | def assert_header_parsing(headers):
41 | """
42 | Asserts whether all headers have been successfully parsed.
43 | Extracts encountered errors from the result of parsing headers.
44 |
45 | Only works on Python 3.
46 |
47 | :param http.client.HTTPMessage headers: Headers to verify.
48 |
49 | :raises urllib3.exceptions.HeaderParsingError:
50 | If parsing errors are found.
51 | """
52 |
53 | # This will fail silently if we pass in the wrong kind of parameter.
54 | # To make debugging easier add an explicit check.
55 | if not isinstance(headers, httplib.HTTPMessage):
56 | raise TypeError("expected httplib.Message, got {0}.".format(type(headers)))
57 |
58 | defects = getattr(headers, "defects", None)
59 | get_payload = getattr(headers, "get_payload", None)
60 |
61 | unparsed_data = None
62 | if get_payload:
63 | # get_payload is actually email.message.Message.get_payload;
64 | # we're only interested in the result if it's not a multipart message
65 | if not headers.is_multipart():
66 | payload = get_payload()
67 |
68 | if isinstance(payload, (bytes, str)):
69 | unparsed_data = payload
70 | if defects:
71 | # httplib is assuming a response body is available
72 | # when parsing headers even when httplib only sends
73 | # header data to parse_headers() This results in
74 | # defects on multipart responses in particular.
75 | # See: https://github.com/urllib3/urllib3/issues/800
76 |
77 | # So we ignore the following defects:
78 | # - StartBoundaryNotFoundDefect:
79 | # The claimed start boundary was never found.
80 | # - MultipartInvariantViolationDefect:
81 | # A message claimed to be a multipart but no subparts were found.
82 | defects = [
83 | defect
84 | for defect in defects
85 | if not isinstance(
86 | defect, (StartBoundaryNotFoundDefect, MultipartInvariantViolationDefect)
87 | )
88 | ]
89 |
90 | if defects or unparsed_data:
91 | raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
92 |
93 |
94 | def is_response_to_head(response):
95 | """
96 | Checks whether the request of a response has been a HEAD-request.
97 | Handles the quirks of AppEngine.
98 |
99 | :param http.client.HTTPResponse response:
100 | Response to check if the originating request
101 | used 'HEAD' as a method.
102 | """
103 | # FIXME: Can we do this somehow without accessing private httplib _method?
104 | method = response._method
105 | if isinstance(method, int): # Platform-specific: Appengine
106 | return method == 3
107 | return method.upper() == "HEAD"
108 |
--------------------------------------------------------------------------------
/src/lib/urllib3/util/ssl_match_hostname.py:
--------------------------------------------------------------------------------
1 | """The match_hostname() function from Python 3.3.3, essential when using SSL."""
2 |
3 | # Note: This file is under the PSF license as the code comes from the python
4 | # stdlib. http://docs.python.org/3/license.html
5 |
6 | import re
7 | import sys
8 |
9 | # ipaddress has been backported to 2.6+ in pypi. If it is installed on the
10 | # system, use it to handle IPAddress ServerAltnames (this was added in
11 | # python-3.5) otherwise only do DNS matching. This allows
12 | # util.ssl_match_hostname to continue to be used in Python 2.7.
13 | try:
14 | import ipaddress
15 | except ImportError:
16 | ipaddress = None
17 |
18 | __version__ = "3.5.0.1"
19 |
20 |
21 | class CertificateError(ValueError):
22 | pass
23 |
24 |
25 | def _dnsname_match(dn, hostname, max_wildcards=1):
26 | """Matching according to RFC 6125, section 6.4.3
27 |
28 | http://tools.ietf.org/html/rfc6125#section-6.4.3
29 | """
30 | pats = []
31 | if not dn:
32 | return False
33 |
34 | # Ported from python3-syntax:
35 | # leftmost, *remainder = dn.split(r'.')
36 | parts = dn.split(r".")
37 | leftmost = parts[0]
38 | remainder = parts[1:]
39 |
40 | wildcards = leftmost.count("*")
41 | if wildcards > max_wildcards:
42 | # Issue #17980: avoid denials of service by refusing more
43 | # than one wildcard per fragment. A survey of established
44 | # policy among SSL implementations showed it to be a
45 | # reasonable choice.
46 | raise CertificateError(
47 | "too many wildcards in certificate DNS name: " + repr(dn)
48 | )
49 |
50 | # speed up common case w/o wildcards
51 | if not wildcards:
52 | return dn.lower() == hostname.lower()
53 |
54 | # RFC 6125, section 6.4.3, subitem 1.
55 | # The client SHOULD NOT attempt to match a presented identifier in which
56 | # the wildcard character comprises a label other than the left-most label.
57 | if leftmost == "*":
58 | # When '*' is a fragment by itself, it matches a non-empty dotless
59 | # fragment.
60 | pats.append("[^.]+")
61 | elif leftmost.startswith("xn--") or hostname.startswith("xn--"):
62 | # RFC 6125, section 6.4.3, subitem 3.
63 | # The client SHOULD NOT attempt to match a presented identifier
64 | # where the wildcard character is embedded within an A-label or
65 | # U-label of an internationalized domain name.
66 | pats.append(re.escape(leftmost))
67 | else:
68 | # Otherwise, '*' matches any dotless string, e.g. www*
69 | pats.append(re.escape(leftmost).replace(r"\*", "[^.]*"))
70 |
71 | # add the remaining fragments, ignore any wildcards
72 | for frag in remainder:
73 | pats.append(re.escape(frag))
74 |
75 | pat = re.compile(r"\A" + r"\.".join(pats) + r"\Z", re.IGNORECASE)
76 | return pat.match(hostname)
77 |
78 |
79 | def _to_unicode(obj):
80 | if isinstance(obj, str) and sys.version_info < (3,):
81 | # ignored flake8 # F821 to support python 2.7 function
82 | obj = unicode(obj, encoding="ascii", errors="strict") # noqa: F821
83 | return obj
84 |
85 |
86 | def _ipaddress_match(ipname, host_ip):
87 | """Exact matching of IP addresses.
88 |
89 | RFC 6125 explicitly doesn't define an algorithm for this
90 | (section 1.7.2 - "Out of Scope").
91 | """
92 | # OpenSSL may add a trailing newline to a subjectAltName's IP address
93 | # Divergence from upstream: ipaddress can't handle byte str
94 | ip = ipaddress.ip_address(_to_unicode(ipname).rstrip())
95 | return ip == host_ip
96 |
97 |
98 | def match_hostname(cert, hostname):
99 | """Verify that *cert* (in decoded format as returned by
100 | SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
101 | rules are followed, but IP addresses are not accepted for *hostname*.
102 |
103 | CertificateError is raised on failure. On success, the function
104 | returns nothing.
105 | """
106 | if not cert:
107 | raise ValueError(
108 | "empty or no certificate, match_hostname needs a "
109 | "SSL socket or SSL context with either "
110 | "CERT_OPTIONAL or CERT_REQUIRED"
111 | )
112 | try:
113 | # Divergence from upstream: ipaddress can't handle byte str
114 | host_ip = ipaddress.ip_address(_to_unicode(hostname))
115 | except (UnicodeError, ValueError):
116 | # ValueError: Not an IP address (common case)
117 | # UnicodeError: Divergence from upstream: Have to deal with ipaddress not taking
118 | # byte strings. addresses should be all ascii, so we consider it not
119 | # an ipaddress in this case
120 | host_ip = None
121 | except AttributeError:
122 | # Divergence from upstream: Make ipaddress library optional
123 | if ipaddress is None:
124 | host_ip = None
125 | else: # Defensive
126 | raise
127 | dnsnames = []
128 | san = cert.get("subjectAltName", ())
129 | for key, value in san:
130 | if key == "DNS":
131 | if host_ip is None and _dnsname_match(value, hostname):
132 | return
133 | dnsnames.append(value)
134 | elif key == "IP Address":
135 | if host_ip is not None and _ipaddress_match(value, host_ip):
136 | return
137 | dnsnames.append(value)
138 | if not dnsnames:
139 | # The subject is only checked when there is no dNSName entry
140 | # in subjectAltName
141 | for sub in cert.get("subject", ()):
142 | for key, value in sub:
143 | # XXX according to RFC 2818, the most specific Common Name
144 | # must be used.
145 | if key == "commonName":
146 | if _dnsname_match(value, hostname):
147 | return
148 | dnsnames.append(value)
149 | if len(dnsnames) > 1:
150 | raise CertificateError(
151 | "hostname %r "
152 | "doesn't match either of %s" % (hostname, ", ".join(map(repr, dnsnames)))
153 | )
154 | elif len(dnsnames) == 1:
155 | raise CertificateError("hostname %r doesn't match %r" % (hostname, dnsnames[0]))
156 | else:
157 | raise CertificateError(
158 | "no appropriate commonName or subjectAltName fields were found"
159 | )
160 |
--------------------------------------------------------------------------------
/src/lib/urllib3/util/ssltransport.py:
--------------------------------------------------------------------------------
1 | import io
2 | import socket
3 | import ssl
4 |
5 | from ..exceptions import ProxySchemeUnsupported
6 | from ..packages import six
7 |
8 | SSL_BLOCKSIZE = 16384
9 |
10 |
11 | class SSLTransport:
12 | """
13 | The SSLTransport wraps an existing socket and establishes an SSL connection.
14 |
15 | Contrary to Python's implementation of SSLSocket, it allows you to chain
16 | multiple TLS connections together. It's particularly useful if you need to
17 | implement TLS within TLS.
18 |
19 | The class supports most of the socket API operations.
20 | """
21 |
22 | @staticmethod
23 | def _validate_ssl_context_for_tls_in_tls(ssl_context):
24 | """
25 | Raises a ProxySchemeUnsupported if the provided ssl_context can't be used
26 | for TLS in TLS.
27 |
28 | The only requirement is that the ssl_context provides the 'wrap_bio'
29 | methods.
30 | """
31 |
32 | if not hasattr(ssl_context, "wrap_bio"):
33 | if six.PY2:
34 | raise ProxySchemeUnsupported(
35 | "TLS in TLS requires SSLContext.wrap_bio() which isn't "
36 | "supported on Python 2"
37 | )
38 | else:
39 | raise ProxySchemeUnsupported(
40 | "TLS in TLS requires SSLContext.wrap_bio() which isn't "
41 | "available on non-native SSLContext"
42 | )
43 |
44 | def __init__(
45 | self, socket, ssl_context, server_hostname=None, suppress_ragged_eofs=True
46 | ):
47 | """
48 | Create an SSLTransport around socket using the provided ssl_context.
49 | """
50 | self.incoming = ssl.MemoryBIO()
51 | self.outgoing = ssl.MemoryBIO()
52 |
53 | self.suppress_ragged_eofs = suppress_ragged_eofs
54 | self.socket = socket
55 |
56 | self.sslobj = ssl_context.wrap_bio(
57 | self.incoming, self.outgoing, server_hostname=server_hostname
58 | )
59 |
60 | # Perform initial handshake.
61 | self._ssl_io_loop(self.sslobj.do_handshake)
62 |
63 | def __enter__(self):
64 | return self
65 |
66 | def __exit__(self, *_):
67 | self.close()
68 |
69 | def fileno(self):
70 | return self.socket.fileno()
71 |
72 | def read(self, len=1024, buffer=None):
73 | return self._wrap_ssl_read(len, buffer)
74 |
75 | def recv(self, len=1024, flags=0):
76 | if flags != 0:
77 | raise ValueError("non-zero flags not allowed in calls to recv")
78 | return self._wrap_ssl_read(len)
79 |
80 | def recv_into(self, buffer, nbytes=None, flags=0):
81 | if flags != 0:
82 | raise ValueError("non-zero flags not allowed in calls to recv_into")
83 | if buffer and (nbytes is None):
84 | nbytes = len(buffer)
85 | elif nbytes is None:
86 | nbytes = 1024
87 | return self.read(nbytes, buffer)
88 |
89 | def sendall(self, data, flags=0):
90 | if flags != 0:
91 | raise ValueError("non-zero flags not allowed in calls to sendall")
92 | count = 0
93 | with memoryview(data) as view, view.cast("B") as byte_view:
94 | amount = len(byte_view)
95 | while count < amount:
96 | v = self.send(byte_view[count:])
97 | count += v
98 |
99 | def send(self, data, flags=0):
100 | if flags != 0:
101 | raise ValueError("non-zero flags not allowed in calls to send")
102 | response = self._ssl_io_loop(self.sslobj.write, data)
103 | return response
104 |
105 | def makefile(
106 | self, mode="r", buffering=None, encoding=None, errors=None, newline=None
107 | ):
108 | """
109 | Python's httpclient uses makefile and buffered io when reading HTTP
110 | messages and we need to support it.
111 |
112 | This is unfortunately a copy and paste of socket.py makefile with small
113 | changes to point to the socket directly.
114 | """
115 | if not set(mode) <= {"r", "w", "b"}:
116 | raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,))
117 |
118 | writing = "w" in mode
119 | reading = "r" in mode or not writing
120 | assert reading or writing
121 | binary = "b" in mode
122 | rawmode = ""
123 | if reading:
124 | rawmode += "r"
125 | if writing:
126 | rawmode += "w"
127 | raw = socket.SocketIO(self, rawmode)
128 | self.socket._io_refs += 1
129 | if buffering is None:
130 | buffering = -1
131 | if buffering < 0:
132 | buffering = io.DEFAULT_BUFFER_SIZE
133 | if buffering == 0:
134 | if not binary:
135 | raise ValueError("unbuffered streams must be binary")
136 | return raw
137 | if reading and writing:
138 | buffer = io.BufferedRWPair(raw, raw, buffering)
139 | elif reading:
140 | buffer = io.BufferedReader(raw, buffering)
141 | else:
142 | assert writing
143 | buffer = io.BufferedWriter(raw, buffering)
144 | if binary:
145 | return buffer
146 | text = io.TextIOWrapper(buffer, encoding, errors, newline)
147 | text.mode = mode
148 | return text
149 |
150 | def unwrap(self):
151 | self._ssl_io_loop(self.sslobj.unwrap)
152 |
153 | def close(self):
154 | self.socket.close()
155 |
156 | def getpeercert(self, binary_form=False):
157 | return self.sslobj.getpeercert(binary_form)
158 |
159 | def version(self):
160 | return self.sslobj.version()
161 |
162 | def cipher(self):
163 | return self.sslobj.cipher()
164 |
165 | def selected_alpn_protocol(self):
166 | return self.sslobj.selected_alpn_protocol()
167 |
168 | def selected_npn_protocol(self):
169 | return self.sslobj.selected_npn_protocol()
170 |
171 | def shared_ciphers(self):
172 | return self.sslobj.shared_ciphers()
173 |
174 | def compression(self):
175 | return self.sslobj.compression()
176 |
177 | def settimeout(self, value):
178 | self.socket.settimeout(value)
179 |
180 | def gettimeout(self):
181 | return self.socket.gettimeout()
182 |
183 | def _decref_socketios(self):
184 | self.socket._decref_socketios()
185 |
186 | def _wrap_ssl_read(self, len, buffer=None):
187 | try:
188 | return self._ssl_io_loop(self.sslobj.read, len, buffer)
189 | except ssl.SSLError as e:
190 | if e.errno == ssl.SSL_ERROR_EOF and self.suppress_ragged_eofs:
191 | return 0 # eof, return 0.
192 | else:
193 | raise
194 |
195 | def _ssl_io_loop(self, func, *args):
196 | """Performs an I/O loop between incoming/outgoing and the socket."""
197 | should_loop = True
198 | ret = None
199 |
200 | while should_loop:
201 | errno = None
202 | try:
203 | ret = func(*args)
204 | except ssl.SSLError as e:
205 | if e.errno not in (ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE):
206 | # WANT_READ, and WANT_WRITE are expected, others are not.
207 | raise e
208 | errno = e.errno
209 |
210 | buf = self.outgoing.read()
211 | self.socket.sendall(buf)
212 |
213 | if errno is None:
214 | should_loop = False
215 | elif errno == ssl.SSL_ERROR_WANT_READ:
216 | buf = self.socket.recv(SSL_BLOCKSIZE)
217 | if buf:
218 | self.incoming.write(buf)
219 | else:
220 | self.incoming.write_eof()
221 | return ret
222 |
--------------------------------------------------------------------------------
/src/lib/urllib3/util/wait.py:
--------------------------------------------------------------------------------
1 | import errno
2 | import select
3 | import sys
4 | from functools import partial
5 |
6 | try:
7 | from time import monotonic
8 | except ImportError:
9 | from time import time as monotonic
10 |
11 | __all__ = ["NoWayToWaitForSocketError", "wait_for_read", "wait_for_write"]
12 |
13 |
14 | class NoWayToWaitForSocketError(Exception):
15 | pass
16 |
17 |
18 | # How should we wait on sockets?
19 | #
20 | # There are two types of APIs you can use for waiting on sockets: the fancy
21 | # modern stateful APIs like epoll/kqueue, and the older stateless APIs like
22 | # select/poll. The stateful APIs are more efficient when you have a lots of
23 | # sockets to keep track of, because you can set them up once and then use them
24 | # lots of times. But we only ever want to wait on a single socket at a time
25 | # and don't want to keep track of state, so the stateless APIs are actually
26 | # more efficient. So we want to use select() or poll().
27 | #
28 | # Now, how do we choose between select() and poll()? On traditional Unixes,
29 | # select() has a strange calling convention that makes it slow, or fail
30 | # altogether, for high-numbered file descriptors. The point of poll() is to fix
31 | # that, so on Unixes, we prefer poll().
32 | #
33 | # On Windows, there is no poll() (or at least Python doesn't provide a wrapper
34 | # for it), but that's OK, because on Windows, select() doesn't have this
35 | # strange calling convention; plain select() works fine.
36 | #
37 | # So: on Windows we use select(), and everywhere else we use poll(). We also
38 | # fall back to select() in case poll() is somehow broken or missing.
39 |
40 | if sys.version_info >= (3, 5):
41 | # Modern Python, that retries syscalls by default
42 | def _retry_on_intr(fn, timeout):
43 | return fn(timeout)
44 |
45 |
46 | else:
47 | # Old and broken Pythons.
48 | def _retry_on_intr(fn, timeout):
49 | if timeout is None:
50 | deadline = float("inf")
51 | else:
52 | deadline = monotonic() + timeout
53 |
54 | while True:
55 | try:
56 | return fn(timeout)
57 | # OSError for 3 <= pyver < 3.5, select.error for pyver <= 2.7
58 | except (OSError, select.error) as e:
59 | # 'e.args[0]' incantation works for both OSError and select.error
60 | if e.args[0] != errno.EINTR:
61 | raise
62 | else:
63 | timeout = deadline - monotonic()
64 | if timeout < 0:
65 | timeout = 0
66 | if timeout == float("inf"):
67 | timeout = None
68 | continue
69 |
70 |
71 | def select_wait_for_socket(sock, read=False, write=False, timeout=None):
72 | if not read and not write:
73 | raise RuntimeError("must specify at least one of read=True, write=True")
74 | rcheck = []
75 | wcheck = []
76 | if read:
77 | rcheck.append(sock)
78 | if write:
79 | wcheck.append(sock)
80 | # When doing a non-blocking connect, most systems signal success by
81 | # marking the socket writable. Windows, though, signals success by marked
82 | # it as "exceptional". We paper over the difference by checking the write
83 | # sockets for both conditions. (The stdlib selectors module does the same
84 | # thing.)
85 | fn = partial(select.select, rcheck, wcheck, wcheck)
86 | rready, wready, xready = _retry_on_intr(fn, timeout)
87 | return bool(rready or wready or xready)
88 |
89 |
90 | def poll_wait_for_socket(sock, read=False, write=False, timeout=None):
91 | if not read and not write:
92 | raise RuntimeError("must specify at least one of read=True, write=True")
93 | mask = 0
94 | if read:
95 | mask |= select.POLLIN
96 | if write:
97 | mask |= select.POLLOUT
98 | poll_obj = select.poll()
99 | poll_obj.register(sock, mask)
100 |
101 | # For some reason, poll() takes timeout in milliseconds
102 | def do_poll(t):
103 | if t is not None:
104 | t *= 1000
105 | return poll_obj.poll(t)
106 |
107 | return bool(_retry_on_intr(do_poll, timeout))
108 |
109 |
110 | def null_wait_for_socket(*args, **kwargs):
111 | raise NoWayToWaitForSocketError("no select-equivalent available")
112 |
113 |
114 | def _have_working_poll():
115 | # Apparently some systems have a select.poll that fails as soon as you try
116 | # to use it, either due to strange configuration or broken monkeypatching
117 | # from libraries like eventlet/greenlet.
118 | try:
119 | poll_obj = select.poll()
120 | _retry_on_intr(poll_obj.poll, 0)
121 | except (AttributeError, OSError):
122 | return False
123 | else:
124 | return True
125 |
126 |
127 | def wait_for_socket(*args, **kwargs):
128 | # We delay choosing which implementation to use until the first time we're
129 | # called. We could do it at import time, but then we might make the wrong
130 | # decision if someone goes wild with monkeypatching select.poll after
131 | # we're imported.
132 | global wait_for_socket
133 | if _have_working_poll():
134 | wait_for_socket = poll_wait_for_socket
135 | elif hasattr(select, "select"):
136 | wait_for_socket = select_wait_for_socket
137 | else: # Platform-specific: Appengine.
138 | wait_for_socket = null_wait_for_socket
139 | return wait_for_socket(*args, **kwargs)
140 |
141 |
142 | def wait_for_read(sock, timeout=None):
143 | """Waits for reading to be available on a given socket.
144 | Returns True if the socket is readable, or False if the timeout expired.
145 | """
146 | return wait_for_socket(sock, read=True, timeout=timeout)
147 |
148 |
149 | def wait_for_write(sock, timeout=None):
150 | """Waits for writing to be available on a given socket.
151 | Returns True if the socket is readable, or False if the timeout expired.
152 | """
153 | return wait_for_socket(sock, write=True, timeout=timeout)
154 |
--------------------------------------------------------------------------------
/src/requirements.txt:
--------------------------------------------------------------------------------
1 | requests==2.27.1
2 |
--------------------------------------------------------------------------------