├── source ├── requests │ ├── packages │ │ ├── urllib3 │ │ │ ├── contrib │ │ │ │ ├── __init__.py │ │ │ │ ├── __init__.pyc │ │ │ │ ├── ntlmpool.pyc │ │ │ │ ├── pyopenssl.pyc │ │ │ │ └── ntlmpool.py │ │ │ ├── packages │ │ │ │ ├── __init__.py │ │ │ │ ├── six.pyc │ │ │ │ ├── __init__.pyc │ │ │ │ ├── ordered_dict.pyc │ │ │ │ └── ssl_match_hostname │ │ │ │ │ ├── __init__.pyc │ │ │ │ │ ├── _implementation.pyc │ │ │ │ │ ├── __init__.py │ │ │ │ │ └── _implementation.py │ │ │ ├── fields.pyc │ │ │ ├── __init__.pyc │ │ │ ├── filepost.pyc │ │ │ ├── request.pyc │ │ │ ├── response.pyc │ │ │ ├── util │ │ │ │ ├── ssl_.pyc │ │ │ │ ├── url.pyc │ │ │ │ ├── retry.pyc │ │ │ │ ├── __init__.pyc │ │ │ │ ├── request.pyc │ │ │ │ ├── response.pyc │ │ │ │ ├── timeout.pyc │ │ │ │ ├── connection.pyc │ │ │ │ ├── __init__.py │ │ │ │ ├── response.py │ │ │ │ ├── request.py │ │ │ │ ├── connection.py │ │ │ │ ├── ssl_.py │ │ │ │ └── url.py │ │ │ ├── connection.pyc │ │ │ ├── exceptions.pyc │ │ │ ├── poolmanager.pyc │ │ │ ├── _collections.pyc │ │ │ ├── connectionpool.pyc │ │ │ ├── __init__.py │ │ │ ├── filepost.py │ │ │ ├── exceptions.py │ │ │ ├── request.py │ │ │ ├── fields.py │ │ │ ├── _collections.py │ │ │ └── connection.py │ │ ├── __init__.py │ │ ├── __init__.pyc │ │ └── chardet │ │ │ ├── compat.pyc │ │ │ ├── escsm.pyc │ │ │ ├── jpcntx.pyc │ │ │ ├── mbcssm.pyc │ │ │ ├── __init__.pyc │ │ │ ├── big5freq.pyc │ │ │ ├── constants.pyc │ │ │ ├── escprober.pyc │ │ │ ├── euckrfreq.pyc │ │ │ ├── euctwfreq.pyc │ │ │ ├── jisfreq.pyc │ │ │ ├── big5prober.pyc │ │ │ ├── chardetect.pyc │ │ │ ├── cp949prober.pyc │ │ │ ├── eucjpprober.pyc │ │ │ ├── euckrprober.pyc │ │ │ ├── euctwprober.pyc │ │ │ ├── gb2312freq.pyc │ │ │ ├── sjisprober.pyc │ │ │ ├── utf8prober.pyc │ │ │ ├── charsetprober.pyc │ │ │ ├── gb2312prober.pyc │ │ │ ├── hebrewprober.pyc │ │ │ ├── langgreekmodel.pyc │ │ │ ├── langthaimodel.pyc │ │ │ ├── latin1prober.pyc │ │ │ ├── chardistribution.pyc │ │ │ ├── langhebrewmodel.pyc │ │ │ ├── mbcharsetprober.pyc │ │ │ ├── mbcsgroupprober.pyc │ │ │ ├── sbcharsetprober.pyc │ │ │ ├── sbcsgroupprober.pyc │ │ │ ├── charsetgroupprober.pyc │ │ │ ├── codingstatemachine.pyc │ │ │ ├── langbulgarianmodel.pyc │ │ │ ├── langcyrillicmodel.pyc │ │ │ ├── langhungarianmodel.pyc │ │ │ ├── universaldetector.pyc │ │ │ ├── compat.py │ │ │ ├── chardetect.py │ │ │ ├── __init__.py │ │ │ ├── constants.py │ │ │ ├── euckrprober.py │ │ │ ├── euctwprober.py │ │ │ ├── gb2312prober.py │ │ │ ├── big5prober.py │ │ │ ├── cp949prober.py │ │ │ ├── charsetprober.py │ │ │ ├── mbcsgroupprober.py │ │ │ ├── codingstatemachine.py │ │ │ ├── utf8prober.py │ │ │ ├── escprober.py │ │ │ ├── sbcsgroupprober.py │ │ │ ├── mbcharsetprober.py │ │ │ ├── eucjpprober.py │ │ │ ├── sjisprober.py │ │ │ ├── charsetgroupprober.py │ │ │ ├── sbcharsetprober.py │ │ │ ├── latin1prober.py │ │ │ ├── universaldetector.py │ │ │ └── escsm.py │ ├── api.pyc │ ├── auth.pyc │ ├── certs.pyc │ ├── compat.pyc │ ├── hooks.pyc │ ├── models.pyc │ ├── utils.pyc │ ├── __init__.pyc │ ├── adapters.pyc │ ├── cookies.pyc │ ├── sessions.pyc │ ├── exceptions.pyc │ ├── structures.pyc │ ├── status_codes.pyc │ ├── certs.py │ ├── hooks.py │ ├── __init__.py │ ├── exceptions.py │ ├── compat.py │ ├── structures.py │ ├── status_codes.py │ ├── api.py │ └── auth.py ├── lib │ └── requests-2.4.1.dist-info │ │ ├── top_level.txt │ │ ├── WHEEL │ │ └── pydist.json ├── icon.png ├── do_poweron.py ├── do_reboot.py ├── do_shutdown.py ├── do_power_cycle.py ├── do_snapshot.py ├── do_snapshot_check.py ├── do_alfred.py └── workflow │ ├── __init__.py │ └── background.py ├── workflow └── Digital Ocean status.alfredworkflow └── README.md /source/requests/packages/urllib3/contrib/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /source/lib/requests-2.4.1.dist-info/top_level.txt: -------------------------------------------------------------------------------- 1 | requests 2 | -------------------------------------------------------------------------------- /source/icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/icon.png -------------------------------------------------------------------------------- /source/requests/api.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/api.pyc -------------------------------------------------------------------------------- /source/requests/packages/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | from . import urllib3 4 | -------------------------------------------------------------------------------- /source/requests/auth.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/auth.pyc -------------------------------------------------------------------------------- /source/requests/certs.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/certs.pyc -------------------------------------------------------------------------------- /source/requests/compat.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/compat.pyc -------------------------------------------------------------------------------- /source/requests/hooks.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/hooks.pyc -------------------------------------------------------------------------------- /source/requests/models.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/models.pyc -------------------------------------------------------------------------------- /source/requests/utils.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/utils.pyc -------------------------------------------------------------------------------- /source/requests/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/__init__.pyc -------------------------------------------------------------------------------- /source/requests/adapters.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/adapters.pyc -------------------------------------------------------------------------------- /source/requests/cookies.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/cookies.pyc -------------------------------------------------------------------------------- /source/requests/sessions.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/sessions.pyc -------------------------------------------------------------------------------- /source/requests/exceptions.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/exceptions.pyc -------------------------------------------------------------------------------- /source/requests/structures.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/structures.pyc -------------------------------------------------------------------------------- /source/requests/status_codes.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/status_codes.pyc -------------------------------------------------------------------------------- /source/requests/packages/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/__init__.pyc -------------------------------------------------------------------------------- /source/requests/packages/urllib3/packages/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | from . import ssl_match_hostname 4 | 5 | -------------------------------------------------------------------------------- /source/requests/packages/chardet/compat.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/compat.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/escsm.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/escsm.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/jpcntx.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/jpcntx.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/mbcssm.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/mbcssm.pyc -------------------------------------------------------------------------------- /source/requests/packages/urllib3/fields.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/urllib3/fields.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/__init__.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/big5freq.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/big5freq.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/constants.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/constants.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/escprober.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/escprober.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/euckrfreq.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/euckrfreq.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/euctwfreq.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/euctwfreq.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/jisfreq.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/jisfreq.pyc -------------------------------------------------------------------------------- /source/requests/packages/urllib3/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/urllib3/__init__.pyc -------------------------------------------------------------------------------- /source/requests/packages/urllib3/filepost.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/urllib3/filepost.pyc -------------------------------------------------------------------------------- /source/requests/packages/urllib3/request.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/urllib3/request.pyc -------------------------------------------------------------------------------- /source/requests/packages/urllib3/response.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/urllib3/response.pyc -------------------------------------------------------------------------------- /source/requests/packages/urllib3/util/ssl_.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/urllib3/util/ssl_.pyc -------------------------------------------------------------------------------- /source/requests/packages/urllib3/util/url.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/urllib3/util/url.pyc -------------------------------------------------------------------------------- /workflow/Digital Ocean status.alfredworkflow: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/workflow/Digital Ocean status.alfredworkflow -------------------------------------------------------------------------------- /source/requests/packages/chardet/big5prober.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/big5prober.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/chardetect.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/chardetect.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/cp949prober.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/cp949prober.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/eucjpprober.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/eucjpprober.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/euckrprober.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/euckrprober.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/euctwprober.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/euctwprober.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/gb2312freq.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/gb2312freq.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/sjisprober.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/sjisprober.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/utf8prober.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/utf8prober.pyc -------------------------------------------------------------------------------- /source/requests/packages/urllib3/connection.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/urllib3/connection.pyc -------------------------------------------------------------------------------- /source/requests/packages/urllib3/exceptions.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/urllib3/exceptions.pyc -------------------------------------------------------------------------------- /source/requests/packages/urllib3/poolmanager.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/urllib3/poolmanager.pyc -------------------------------------------------------------------------------- /source/requests/packages/urllib3/util/retry.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/urllib3/util/retry.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/charsetprober.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/charsetprober.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/gb2312prober.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/gb2312prober.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/hebrewprober.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/hebrewprober.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/langgreekmodel.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/langgreekmodel.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/langthaimodel.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/langthaimodel.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/latin1prober.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/latin1prober.pyc -------------------------------------------------------------------------------- /source/requests/packages/urllib3/_collections.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/urllib3/_collections.pyc -------------------------------------------------------------------------------- /source/requests/packages/urllib3/connectionpool.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/urllib3/connectionpool.pyc -------------------------------------------------------------------------------- /source/requests/packages/urllib3/packages/six.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/urllib3/packages/six.pyc -------------------------------------------------------------------------------- /source/requests/packages/urllib3/util/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/urllib3/util/__init__.pyc -------------------------------------------------------------------------------- /source/requests/packages/urllib3/util/request.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/urllib3/util/request.pyc -------------------------------------------------------------------------------- /source/requests/packages/urllib3/util/response.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/urllib3/util/response.pyc -------------------------------------------------------------------------------- /source/requests/packages/urllib3/util/timeout.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/urllib3/util/timeout.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/chardistribution.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/chardistribution.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/langhebrewmodel.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/langhebrewmodel.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/mbcharsetprober.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/mbcharsetprober.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/mbcsgroupprober.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/mbcsgroupprober.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/sbcharsetprober.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/sbcharsetprober.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/sbcsgroupprober.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/sbcsgroupprober.pyc -------------------------------------------------------------------------------- /source/requests/packages/urllib3/contrib/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/urllib3/contrib/__init__.pyc -------------------------------------------------------------------------------- /source/requests/packages/urllib3/contrib/ntlmpool.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/urllib3/contrib/ntlmpool.pyc -------------------------------------------------------------------------------- /source/requests/packages/urllib3/util/connection.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/urllib3/util/connection.pyc -------------------------------------------------------------------------------- /source/lib/requests-2.4.1.dist-info/WHEEL: -------------------------------------------------------------------------------- 1 | Wheel-Version: 1.0 2 | Generator: bdist_wheel (0.22.0) 3 | Root-Is-Purelib: true 4 | Tag: py2-none-any 5 | Tag: py3-none-any 6 | 7 | -------------------------------------------------------------------------------- /source/requests/packages/chardet/charsetgroupprober.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/charsetgroupprober.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/codingstatemachine.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/codingstatemachine.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/langbulgarianmodel.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/langbulgarianmodel.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/langcyrillicmodel.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/langcyrillicmodel.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/langhungarianmodel.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/langhungarianmodel.pyc -------------------------------------------------------------------------------- /source/requests/packages/chardet/universaldetector.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/chardet/universaldetector.pyc -------------------------------------------------------------------------------- /source/requests/packages/urllib3/contrib/pyopenssl.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/urllib3/contrib/pyopenssl.pyc -------------------------------------------------------------------------------- /source/requests/packages/urllib3/packages/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/urllib3/packages/__init__.pyc -------------------------------------------------------------------------------- /source/requests/packages/urllib3/packages/ordered_dict.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/urllib3/packages/ordered_dict.pyc -------------------------------------------------------------------------------- /source/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyc -------------------------------------------------------------------------------- /source/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fspinillo/alfred-digital-ocean/HEAD/source/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyc -------------------------------------------------------------------------------- /source/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py: -------------------------------------------------------------------------------- 1 | try: 2 | # Python 3.2+ 3 | from ssl import CertificateError, match_hostname 4 | except ImportError: 5 | try: 6 | # Backport of the function from a pypi module 7 | from backports.ssl_match_hostname import CertificateError, match_hostname 8 | except ImportError: 9 | # Our vendored copy 10 | from ._implementation import CertificateError, match_hostname 11 | 12 | # Not needed, but documenting what we provide. 13 | __all__ = ('CertificateError', 'match_hostname') 14 | -------------------------------------------------------------------------------- /source/requests/packages/urllib3/util/__init__.py: -------------------------------------------------------------------------------- 1 | # For backwards compatibility, provide imports that used to be here. 2 | from .connection import is_connection_dropped 3 | from .request import make_headers 4 | from .response import is_fp_closed 5 | from .ssl_ import ( 6 | SSLContext, 7 | HAS_SNI, 8 | assert_fingerprint, 9 | resolve_cert_reqs, 10 | resolve_ssl_version, 11 | ssl_wrap_socket, 12 | ) 13 | from .timeout import ( 14 | current_time, 15 | Timeout, 16 | ) 17 | 18 | from .retry import Retry 19 | from .url import ( 20 | get_host, 21 | parse_url, 22 | split_first, 23 | Url, 24 | ) 25 | -------------------------------------------------------------------------------- /source/requests/packages/urllib3/util/response.py: -------------------------------------------------------------------------------- 1 | def is_fp_closed(obj): 2 | """ 3 | Checks whether a given file-like object is closed. 4 | 5 | :param obj: 6 | The file-like object to check. 7 | """ 8 | 9 | try: 10 | # Check via the official file-like-object way. 11 | return obj.closed 12 | except AttributeError: 13 | pass 14 | 15 | try: 16 | # Check if the object is a container for another file-like object that 17 | # gets released on exhaustion (e.g. HTTPResponse). 18 | return obj.fp is None 19 | except AttributeError: 20 | pass 21 | 22 | raise ValueError("Unable to determine whether fp is closed.") 23 | -------------------------------------------------------------------------------- /source/requests/certs.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """ 5 | certs.py 6 | ~~~~~~~~ 7 | 8 | This module returns the preferred default CA certificate bundle. 9 | 10 | If you are packaging Requests, e.g., for a Linux distribution or a managed 11 | environment, you can change the definition of where() to return a separately 12 | packaged CA bundle. 13 | """ 14 | import os.path 15 | 16 | try: 17 | from certifi import where 18 | except ImportError: 19 | def where(): 20 | """Return the preferred certificate bundle.""" 21 | # vendored bundle inside Requests 22 | return os.path.join(os.path.dirname(__file__), 'cacert.pem') 23 | 24 | if __name__ == '__main__': 25 | print(where()) 26 | -------------------------------------------------------------------------------- /source/lib/requests-2.4.1.dist-info/pydist.json: -------------------------------------------------------------------------------- 1 | {"license": "Apache 2.0", "document_names": {"description": "DESCRIPTION.rst"}, "name": "requests", "metadata_version": "2.0", "contacts": [{"role": "author", "email": "me@kennethreitz.com", "name": "Kenneth Reitz"}], "generator": "bdist_wheel (0.22.0)", "summary": "Python HTTP for Humans.", "project_urls": {"Home": "http://python-requests.org"}, "run_requires": [{"requires": ["pyOpenSSL", "ndg-httpsclient", "pyasn1"], "extra": "security"}], "version": "2.4.1", "classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Natural Language :: English", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4"], "extras": ["security"]} -------------------------------------------------------------------------------- /source/do_poweron.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from workflow import Workflow, web, PasswordNotFound 3 | import requests 4 | 5 | def main(wf): 6 | #grab digital ocean token 7 | try: 8 | api_key = wf.get_password('digitalocean_api_key') 9 | except PasswordNotFound: 10 | wf.add_item('No API key set.', 11 | 'Please use dotoken to set your API key.', 12 | valid = False) 13 | 14 | # Pass droplet ID into workflow 15 | if len(wf.args): 16 | query = wf.args[0] 17 | 18 | # Build URL based on droplet ID, and generate header information from API key 19 | url = 'https://api.digitalocean.com/v2/droplets/' + query + '/actions' 20 | header = {'Authorization': 'Bearer ' + api_key + '', 'Content-Type': 'application/json'} 21 | payload = {'type': 'power_on'} 22 | 23 | requests.post(url, params = payload, headers = header) 24 | 25 | if __name__=='__main__': 26 | wf = Workflow() 27 | sys.exit(wf.run(main)) -------------------------------------------------------------------------------- /source/do_reboot.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from workflow import Workflow, web, PasswordNotFound 3 | import requests 4 | 5 | def main(wf): 6 | #grab digital ocean token 7 | try: 8 | api_key = wf.get_password('digitalocean_api_key') 9 | except PasswordNotFound: 10 | wf.add_item('No API key set.', 11 | 'Please use dotoken to set your API key.', 12 | valid = False) 13 | 14 | # Pass droplet ID into workflow 15 | if len(wf.args): 16 | query = wf.args[0] 17 | 18 | # Build URL based on droplet ID, and generate header information from API key 19 | url = 'https://api.digitalocean.com/v2/droplets/' + query + '/actions' 20 | header = {'Authorization': 'Bearer ' + api_key + '', 'Content-Type': 'application/json'} 21 | payload = {'type': 'reboot'} 22 | 23 | requests.post(url, params = payload, headers = header) 24 | 25 | if __name__=='__main__': 26 | wf = Workflow() 27 | sys.exit(wf.run(main)) -------------------------------------------------------------------------------- /source/do_shutdown.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from workflow import Workflow, web, PasswordNotFound 3 | import requests 4 | 5 | def main(wf): 6 | #grab digital ocean token 7 | try: 8 | api_key = wf.get_password('digitalocean_api_key') 9 | except PasswordNotFound: 10 | wf.add_item('No API key set.', 11 | 'Please use dotoken to set your API key.', 12 | valid = False) 13 | 14 | # pass droplet ID into workflow 15 | if len(wf.args): 16 | query = wf.args[0] 17 | 18 | # Build URL based on droplet ID, and generate header information from API key 19 | url = 'https://api.digitalocean.com/v2/droplets/' + query + '/actions' 20 | header = {'Authorization': 'Bearer ' + api_key + '', 'Content-Type': 'application/json'} 21 | payload = {'type': 'shutdown'} 22 | 23 | requests.post(url, params = payload, headers = header) 24 | 25 | if __name__=='__main__': 26 | wf = Workflow() 27 | sys.exit(wf.run(main)) -------------------------------------------------------------------------------- /source/do_power_cycle.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from workflow import Workflow, web, PasswordNotFound 3 | import requests 4 | 5 | def main(wf): 6 | #grab digital ocean token 7 | try: 8 | api_key = wf.get_password('digitalocean_api_key') 9 | except PasswordNotFound: 10 | wf.add_item('No API key set.', 11 | 'Please use dotoken to set your API key.', 12 | valid = False) 13 | 14 | # pass droplet ID into workflow 15 | if len(wf.args): 16 | query = wf.args[0] 17 | 18 | # Build URL based on droplet ID, and generate header information from API key 19 | url = 'https://api.digitalocean.com/v2/droplets/' + query + '/actions' 20 | header = {'Authorization': 'Bearer ' + api_key + '', 'Content-Type': 'application/json'} 21 | payload = {'type': 'power_cycle'} 22 | 23 | requests.post(url, params = payload, headers = header) 24 | 25 | if __name__=='__main__': 26 | wf = Workflow() 27 | sys.exit(wf.run(main)) -------------------------------------------------------------------------------- /source/requests/hooks.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | requests.hooks 5 | ~~~~~~~~~~~~~~ 6 | 7 | This module provides the capabilities for the Requests hooks system. 8 | 9 | Available hooks: 10 | 11 | ``response``: 12 | The response generated from a Request. 13 | 14 | """ 15 | 16 | 17 | HOOKS = ['response'] 18 | 19 | 20 | def default_hooks(): 21 | hooks = {} 22 | for event in HOOKS: 23 | hooks[event] = [] 24 | return hooks 25 | 26 | # TODO: response is the only one 27 | 28 | 29 | def dispatch_hook(key, hooks, hook_data, **kwargs): 30 | """Dispatches a hook dictionary on a given piece of data.""" 31 | 32 | hooks = hooks or dict() 33 | 34 | if key in hooks: 35 | hooks = hooks.get(key) 36 | 37 | if hasattr(hooks, '__call__'): 38 | hooks = [hooks] 39 | 40 | for hook in hooks: 41 | _hook_data = hook(hook_data, **kwargs) 42 | if _hook_data is not None: 43 | hook_data = _hook_data 44 | 45 | return hook_data 46 | -------------------------------------------------------------------------------- /source/requests/packages/chardet/compat.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # Contributor(s): 3 | # Ian Cordasco - port to Python 4 | # 5 | # This library is free software; you can redistribute it and/or 6 | # modify it under the terms of the GNU Lesser General Public 7 | # License as published by the Free Software Foundation; either 8 | # version 2.1 of the License, or (at your option) any later version. 9 | # 10 | # This library is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 13 | # Lesser General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU Lesser General Public 16 | # License along with this library; if not, write to the Free Software 17 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 18 | # 02110-1301 USA 19 | ######################### END LICENSE BLOCK ######################### 20 | 21 | import sys 22 | 23 | 24 | if sys.version_info < (3, 0): 25 | base_str = (str, unicode) 26 | else: 27 | base_str = (bytes, str) 28 | 29 | 30 | def wrap_ord(a): 31 | if sys.version_info < (3, 0) and isinstance(a, base_str): 32 | return ord(a) 33 | else: 34 | return a 35 | -------------------------------------------------------------------------------- /source/requests/packages/chardet/chardetect.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | Script which takes one or more file paths and reports on their detected 4 | encodings 5 | 6 | Example:: 7 | 8 | % chardetect somefile someotherfile 9 | somefile: windows-1252 with confidence 0.5 10 | someotherfile: ascii with confidence 1.0 11 | 12 | If no paths are provided, it takes its input from stdin. 13 | 14 | """ 15 | from io import open 16 | from sys import argv, stdin 17 | 18 | from chardet.universaldetector import UniversalDetector 19 | 20 | 21 | def description_of(file, name='stdin'): 22 | """Return a string describing the probable encoding of a file.""" 23 | u = UniversalDetector() 24 | for line in file: 25 | u.feed(line) 26 | u.close() 27 | result = u.result 28 | if result['encoding']: 29 | return '%s: %s with confidence %s' % (name, 30 | result['encoding'], 31 | result['confidence']) 32 | else: 33 | return '%s: no result' % name 34 | 35 | 36 | def main(): 37 | if len(argv) <= 1: 38 | print(description_of(stdin)) 39 | else: 40 | for path in argv[1:]: 41 | with open(path, 'rb') as f: 42 | print(description_of(f, path)) 43 | 44 | 45 | if __name__ == '__main__': 46 | main() 47 | -------------------------------------------------------------------------------- /source/requests/packages/chardet/__init__.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # This library is free software; you can redistribute it and/or 3 | # modify it under the terms of the GNU Lesser General Public 4 | # License as published by the Free Software Foundation; either 5 | # version 2.1 of the License, or (at your option) any later version. 6 | # 7 | # This library is distributed in the hope that it will be useful, 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 10 | # Lesser General Public License for more details. 11 | # 12 | # You should have received a copy of the GNU Lesser General Public 13 | # License along with this library; if not, write to the Free Software 14 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 15 | # 02110-1301 USA 16 | ######################### END LICENSE BLOCK ######################### 17 | 18 | __version__ = "2.2.1" 19 | from sys import version_info 20 | 21 | 22 | def detect(aBuf): 23 | if ((version_info < (3, 0) and isinstance(aBuf, unicode)) or 24 | (version_info >= (3, 0) and not isinstance(aBuf, bytes))): 25 | raise ValueError('Expected a bytes object, not a unicode object') 26 | 27 | from . import universaldetector 28 | u = universaldetector.UniversalDetector() 29 | u.reset() 30 | u.feed(aBuf) 31 | u.close() 32 | return u.result 33 | -------------------------------------------------------------------------------- /source/requests/packages/chardet/constants.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is Mozilla Universal charset detector code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 2001 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # Shy Shalom - original C code 12 | # 13 | # This library is free software; you can redistribute it and/or 14 | # modify it under the terms of the GNU Lesser General Public 15 | # License as published by the Free Software Foundation; either 16 | # version 2.1 of the License, or (at your option) any later version. 17 | # 18 | # This library is distributed in the hope that it will be useful, 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 21 | # Lesser General Public License for more details. 22 | # 23 | # You should have received a copy of the GNU Lesser General Public 24 | # License along with this library; if not, write to the Free Software 25 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 26 | # 02110-1301 USA 27 | ######################### END LICENSE BLOCK ######################### 28 | 29 | _debug = 0 30 | 31 | eDetecting = 0 32 | eFoundIt = 1 33 | eNotMe = 2 34 | 35 | eStart = 0 36 | eError = 1 37 | eItsMe = 2 38 | 39 | SHORTCUT_THRESHOLD = 0.95 40 | -------------------------------------------------------------------------------- /source/do_snapshot.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from datetime import datetime 3 | from workflow import Workflow, web, PasswordNotFound 4 | import requests 5 | 6 | def main(wf): 7 | #grab digital ocean token 8 | try: 9 | api_key = wf.get_password('digitalocean_api_key') 10 | except PasswordNotFound: 11 | wf.add_item('No API key set.', 12 | 'Please use dotoken to set your API key.', 13 | valid = False) 14 | 15 | # get droplet id from previous script 16 | if len(wf.args): 17 | query = wf.args[0] 18 | 19 | # gather droplet information 20 | dropletData = requests.get('https://api.digitalocean.com/v2/droplets/' + query + '/', headers = {'Authorization': 'Bearer ' + api_key + ''}).json() 21 | 22 | # set the current date in YYYY-MM-DD format 23 | now = datetime.now() 24 | date = "%s-%s-%s" % (now.year, now.strftime("%m"), now.strftime("%d")) 25 | 26 | # set snapshot name based on droplet name and the date: dropletNAME-YYYY-MM-DD 27 | snapshotName = "%s-%s" % (dropletData['droplet']['name'], date) 28 | 29 | # build the URL and header information for creating a snapshot 30 | snapshotUrl = 'https://api.digitalocean.com/v2/droplets/' + query + '/actions' 31 | snapshotHeader = {'Authorization': 'Bearer ' + api_key + '', 'Content-Type': 'application/json'} 32 | snapshotPayload = {'type': 'snapshot', 'name': snapshotName} 33 | 34 | requests.post(snapshotUrl, params = snapshotPayload, headers = snapshotHeader) 35 | 36 | if __name__=='__main__': 37 | wf = Workflow() 38 | sys.exit(wf.run(main)) -------------------------------------------------------------------------------- /source/requests/packages/chardet/euckrprober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is mozilla.org code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | from .mbcharsetprober import MultiByteCharSetProber 29 | from .codingstatemachine import CodingStateMachine 30 | from .chardistribution import EUCKRDistributionAnalysis 31 | from .mbcssm import EUCKRSMModel 32 | 33 | 34 | class EUCKRProber(MultiByteCharSetProber): 35 | def __init__(self): 36 | MultiByteCharSetProber.__init__(self) 37 | self._mCodingSM = CodingStateMachine(EUCKRSMModel) 38 | self._mDistributionAnalyzer = EUCKRDistributionAnalysis() 39 | self.reset() 40 | 41 | def get_charset_name(self): 42 | return "EUC-KR" 43 | -------------------------------------------------------------------------------- /source/requests/packages/chardet/euctwprober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is mozilla.org code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | from .mbcharsetprober import MultiByteCharSetProber 29 | from .codingstatemachine import CodingStateMachine 30 | from .chardistribution import EUCTWDistributionAnalysis 31 | from .mbcssm import EUCTWSMModel 32 | 33 | class EUCTWProber(MultiByteCharSetProber): 34 | def __init__(self): 35 | MultiByteCharSetProber.__init__(self) 36 | self._mCodingSM = CodingStateMachine(EUCTWSMModel) 37 | self._mDistributionAnalyzer = EUCTWDistributionAnalysis() 38 | self.reset() 39 | 40 | def get_charset_name(self): 41 | return "EUC-TW" 42 | -------------------------------------------------------------------------------- /source/requests/packages/chardet/gb2312prober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is mozilla.org code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | from .mbcharsetprober import MultiByteCharSetProber 29 | from .codingstatemachine import CodingStateMachine 30 | from .chardistribution import GB2312DistributionAnalysis 31 | from .mbcssm import GB2312SMModel 32 | 33 | class GB2312Prober(MultiByteCharSetProber): 34 | def __init__(self): 35 | MultiByteCharSetProber.__init__(self) 36 | self._mCodingSM = CodingStateMachine(GB2312SMModel) 37 | self._mDistributionAnalyzer = GB2312DistributionAnalysis() 38 | self.reset() 39 | 40 | def get_charset_name(self): 41 | return "GB2312" 42 | -------------------------------------------------------------------------------- /source/requests/packages/chardet/big5prober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is Mozilla Communicator client code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | from .mbcharsetprober import MultiByteCharSetProber 29 | from .codingstatemachine import CodingStateMachine 30 | from .chardistribution import Big5DistributionAnalysis 31 | from .mbcssm import Big5SMModel 32 | 33 | 34 | class Big5Prober(MultiByteCharSetProber): 35 | def __init__(self): 36 | MultiByteCharSetProber.__init__(self) 37 | self._mCodingSM = CodingStateMachine(Big5SMModel) 38 | self._mDistributionAnalyzer = Big5DistributionAnalysis() 39 | self.reset() 40 | 41 | def get_charset_name(self): 42 | return "Big5" 43 | -------------------------------------------------------------------------------- /source/requests/packages/chardet/cp949prober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is mozilla.org code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | from .mbcharsetprober import MultiByteCharSetProber 29 | from .codingstatemachine import CodingStateMachine 30 | from .chardistribution import EUCKRDistributionAnalysis 31 | from .mbcssm import CP949SMModel 32 | 33 | 34 | class CP949Prober(MultiByteCharSetProber): 35 | def __init__(self): 36 | MultiByteCharSetProber.__init__(self) 37 | self._mCodingSM = CodingStateMachine(CP949SMModel) 38 | # NOTE: CP949 is a superset of EUC-KR, so the distribution should be 39 | # not different. 40 | self._mDistributionAnalyzer = EUCKRDistributionAnalysis() 41 | self.reset() 42 | 43 | def get_charset_name(self): 44 | return "CP949" 45 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | DigitalOcean's API is still in beta, so this will be evolving as their API access evolves. 2 | 3 | This workflow allows you to interact with your droplets to perform various functions. Currently you can do the follow: 4 | 5 | * Get the current status of your droplets 6 | * Reboot a droplet 7 | * Shutdown a droplet 8 | * Power cycle a dropet 9 | * Power on a droplet 10 | * Create snapshots of a droplet 11 | 12 | The workflow uses standard Python packages, expcet for Workflow, so no additional requirements needed. Workflow is bundled in the file, and was created by [Dean Jackson (deanishe)](https://github.com/deanishe/alfred-workflow/). 13 | 14 | Setup 15 | --- 16 | 1. To set this up you'll first need get a token from your [settings](https://cloud.digitalocean.com/settings/applications) page. 17 | 18 | 2. In order to control droplets you need to set your token scope to **write**. If it is not set to write you wont be able to power on, shutdown, or take snapshots 19 | 20 | 3. Run 'dotoken', and this will let you paste your token 21 | 22 | 4. A notification will display letting you know the token was saved 23 | 24 | Using the workflow 25 | --- 26 | Status: 27 | 'dos' will return information pertaining to your droplets. You will get back activity status, name, CPUs, RAM, and HDD size. If the droplet is in the middle of a task it will inform you. 28 | 29 | Shutdown: 30 | After running 'dos', hold down ctrl to pick a droplet to shutdown. 31 | 32 | Reboot: 33 | After running 'dos', hold down alt to pick a droplet to reboot. 34 | 35 | Power-on: 36 | After running 'dos', hold down cmd to pick a droplet to power-on. 37 | 38 | Power-cycle: 39 | After running 'dos', hold down fn to pick a droplet to power-cycle. 40 | 41 | Snapshots: 42 | 'sshot' will check to see which droplets are currently off. If a droplet is not off, it will inform you to power it down first. Select a powered down droplet and hit enter to initiate the snapshot. Snapshots are generated based on the name and date. Example: DropletName-YYYY-MM-DD 43 | 44 | Todo 45 | --- 46 | 1. Store the droplet information locally for quicker access 47 | 2. Return more infomration in the notification 48 | -------------------------------------------------------------------------------- /source/requests/packages/urllib3/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | urllib3 - Thread-safe connection pooling and re-using. 3 | """ 4 | 5 | __author__ = 'Andrey Petrov (andrey.petrov@shazow.net)' 6 | __license__ = 'MIT' 7 | __version__ = 'dev' 8 | 9 | 10 | from .connectionpool import ( 11 | HTTPConnectionPool, 12 | HTTPSConnectionPool, 13 | connection_from_url 14 | ) 15 | 16 | from . import exceptions 17 | from .filepost import encode_multipart_formdata 18 | from .poolmanager import PoolManager, ProxyManager, proxy_from_url 19 | from .response import HTTPResponse 20 | from .util.request import make_headers 21 | from .util.url import get_host 22 | from .util.timeout import Timeout 23 | from .util.retry import Retry 24 | 25 | 26 | # Set default logging handler to avoid "No handler found" warnings. 27 | import logging 28 | try: # Python 2.7+ 29 | from logging import NullHandler 30 | except ImportError: 31 | class NullHandler(logging.Handler): 32 | def emit(self, record): 33 | pass 34 | 35 | logging.getLogger(__name__).addHandler(NullHandler()) 36 | 37 | def add_stderr_logger(level=logging.DEBUG): 38 | """ 39 | Helper for quickly adding a StreamHandler to the logger. Useful for 40 | debugging. 41 | 42 | Returns the handler after adding it. 43 | """ 44 | # This method needs to be in this __init__.py to get the __name__ correct 45 | # even if urllib3 is vendored within another package. 46 | logger = logging.getLogger(__name__) 47 | handler = logging.StreamHandler() 48 | handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s')) 49 | logger.addHandler(handler) 50 | logger.setLevel(level) 51 | logger.debug('Added a stderr logging handler to logger: %s' % __name__) 52 | return handler 53 | 54 | # ... Clean up. 55 | del NullHandler 56 | 57 | 58 | # Set security warning to only go off once by default. 59 | import warnings 60 | warnings.simplefilter('module', exceptions.SecurityWarning) 61 | 62 | def disable_warnings(category=exceptions.HTTPWarning): 63 | """ 64 | Helper for quickly disabling all urllib3 warnings. 65 | """ 66 | warnings.simplefilter('ignore', category) 67 | -------------------------------------------------------------------------------- /source/requests/packages/chardet/charsetprober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is Mozilla Universal charset detector code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 2001 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # Shy Shalom - original C code 12 | # 13 | # This library is free software; you can redistribute it and/or 14 | # modify it under the terms of the GNU Lesser General Public 15 | # License as published by the Free Software Foundation; either 16 | # version 2.1 of the License, or (at your option) any later version. 17 | # 18 | # This library is distributed in the hope that it will be useful, 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 21 | # Lesser General Public License for more details. 22 | # 23 | # You should have received a copy of the GNU Lesser General Public 24 | # License along with this library; if not, write to the Free Software 25 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 26 | # 02110-1301 USA 27 | ######################### END LICENSE BLOCK ######################### 28 | 29 | from . import constants 30 | import re 31 | 32 | 33 | class CharSetProber: 34 | def __init__(self): 35 | pass 36 | 37 | def reset(self): 38 | self._mState = constants.eDetecting 39 | 40 | def get_charset_name(self): 41 | return None 42 | 43 | def feed(self, aBuf): 44 | pass 45 | 46 | def get_state(self): 47 | return self._mState 48 | 49 | def get_confidence(self): 50 | return 0.0 51 | 52 | def filter_high_bit_only(self, aBuf): 53 | aBuf = re.sub(b'([\x00-\x7F])+', b' ', aBuf) 54 | return aBuf 55 | 56 | def filter_without_english_letters(self, aBuf): 57 | aBuf = re.sub(b'([A-Za-z])+', b' ', aBuf) 58 | return aBuf 59 | 60 | def filter_with_english_letters(self, aBuf): 61 | # TODO 62 | return aBuf 63 | -------------------------------------------------------------------------------- /source/requests/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # __ 4 | # /__) _ _ _ _ _/ _ 5 | # / ( (- (/ (/ (- _) / _) 6 | # / 7 | 8 | """ 9 | requests HTTP library 10 | ~~~~~~~~~~~~~~~~~~~~~ 11 | 12 | Requests is an HTTP library, written in Python, for human beings. Basic GET 13 | usage: 14 | 15 | >>> import requests 16 | >>> r = requests.get('http://python.org') 17 | >>> r.status_code 18 | 200 19 | >>> 'Python is a programming language' in r.content 20 | True 21 | 22 | ... or POST: 23 | 24 | >>> payload = dict(key1='value1', key2='value2') 25 | >>> r = requests.post("http://httpbin.org/post", data=payload) 26 | >>> print(r.text) 27 | { 28 | ... 29 | "form": { 30 | "key2": "value2", 31 | "key1": "value1" 32 | }, 33 | ... 34 | } 35 | 36 | The other HTTP methods are supported - see `requests.api`. Full documentation 37 | is at . 38 | 39 | :copyright: (c) 2014 by Kenneth Reitz. 40 | :license: Apache 2.0, see LICENSE for more details. 41 | 42 | """ 43 | 44 | __title__ = 'requests' 45 | __version__ = '2.4.1' 46 | __build__ = 0x020401 47 | __author__ = 'Kenneth Reitz' 48 | __license__ = 'Apache 2.0' 49 | __copyright__ = 'Copyright 2014 Kenneth Reitz' 50 | 51 | # Attempt to enable urllib3's SNI support, if possible 52 | try: 53 | from .packages.urllib3.contrib import pyopenssl 54 | pyopenssl.inject_into_urllib3() 55 | except ImportError: 56 | pass 57 | 58 | from . import utils 59 | from .models import Request, Response, PreparedRequest 60 | from .api import request, get, head, post, patch, put, delete, options 61 | from .sessions import session, Session 62 | from .status_codes import codes 63 | from .exceptions import ( 64 | RequestException, Timeout, URLRequired, 65 | TooManyRedirects, HTTPError, ConnectionError 66 | ) 67 | 68 | # Set default logging handler to avoid "No handler found" warnings. 69 | import logging 70 | try: # Python 2.7+ 71 | from logging import NullHandler 72 | except ImportError: 73 | class NullHandler(logging.Handler): 74 | def emit(self, record): 75 | pass 76 | 77 | logging.getLogger(__name__).addHandler(NullHandler()) 78 | -------------------------------------------------------------------------------- /source/requests/packages/chardet/mbcsgroupprober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is Mozilla Universal charset detector code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 2001 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # Shy Shalom - original C code 12 | # Proofpoint, Inc. 13 | # 14 | # This library is free software; you can redistribute it and/or 15 | # modify it under the terms of the GNU Lesser General Public 16 | # License as published by the Free Software Foundation; either 17 | # version 2.1 of the License, or (at your option) any later version. 18 | # 19 | # This library is distributed in the hope that it will be useful, 20 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 21 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 22 | # Lesser General Public License for more details. 23 | # 24 | # You should have received a copy of the GNU Lesser General Public 25 | # License along with this library; if not, write to the Free Software 26 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 27 | # 02110-1301 USA 28 | ######################### END LICENSE BLOCK ######################### 29 | 30 | from .charsetgroupprober import CharSetGroupProber 31 | from .utf8prober import UTF8Prober 32 | from .sjisprober import SJISProber 33 | from .eucjpprober import EUCJPProber 34 | from .gb2312prober import GB2312Prober 35 | from .euckrprober import EUCKRProber 36 | from .cp949prober import CP949Prober 37 | from .big5prober import Big5Prober 38 | from .euctwprober import EUCTWProber 39 | 40 | 41 | class MBCSGroupProber(CharSetGroupProber): 42 | def __init__(self): 43 | CharSetGroupProber.__init__(self) 44 | self._mProbers = [ 45 | UTF8Prober(), 46 | SJISProber(), 47 | EUCJPProber(), 48 | GB2312Prober(), 49 | EUCKRProber(), 50 | CP949Prober(), 51 | Big5Prober(), 52 | EUCTWProber() 53 | ] 54 | self.reset() 55 | -------------------------------------------------------------------------------- /source/requests/packages/urllib3/util/request.py: -------------------------------------------------------------------------------- 1 | from base64 import b64encode 2 | 3 | from ..packages.six import b 4 | 5 | ACCEPT_ENCODING = 'gzip,deflate' 6 | 7 | 8 | def make_headers(keep_alive=None, accept_encoding=None, user_agent=None, 9 | basic_auth=None, proxy_basic_auth=None, disable_cache=None): 10 | """ 11 | Shortcuts for generating request headers. 12 | 13 | :param keep_alive: 14 | If ``True``, adds 'connection: keep-alive' header. 15 | 16 | :param accept_encoding: 17 | Can be a boolean, list, or string. 18 | ``True`` translates to 'gzip,deflate'. 19 | List will get joined by comma. 20 | String will be used as provided. 21 | 22 | :param user_agent: 23 | String representing the user-agent you want, such as 24 | "python-urllib3/0.6" 25 | 26 | :param basic_auth: 27 | Colon-separated username:password string for 'authorization: basic ...' 28 | auth header. 29 | 30 | :param proxy_basic_auth: 31 | Colon-separated username:password string for 'proxy-authorization: basic ...' 32 | auth header. 33 | 34 | :param disable_cache: 35 | If ``True``, adds 'cache-control: no-cache' header. 36 | 37 | Example:: 38 | 39 | >>> make_headers(keep_alive=True, user_agent="Batman/1.0") 40 | {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} 41 | >>> make_headers(accept_encoding=True) 42 | {'accept-encoding': 'gzip,deflate'} 43 | """ 44 | headers = {} 45 | if accept_encoding: 46 | if isinstance(accept_encoding, str): 47 | pass 48 | elif isinstance(accept_encoding, list): 49 | accept_encoding = ','.join(accept_encoding) 50 | else: 51 | accept_encoding = ACCEPT_ENCODING 52 | headers['accept-encoding'] = accept_encoding 53 | 54 | if user_agent: 55 | headers['user-agent'] = user_agent 56 | 57 | if keep_alive: 58 | headers['connection'] = 'keep-alive' 59 | 60 | if basic_auth: 61 | headers['authorization'] = 'Basic ' + \ 62 | b64encode(b(basic_auth)).decode('utf-8') 63 | 64 | if proxy_basic_auth: 65 | headers['proxy-authorization'] = 'Basic ' + \ 66 | b64encode(b(proxy_basic_auth)).decode('utf-8') 67 | 68 | if disable_cache: 69 | headers['cache-control'] = 'no-cache' 70 | 71 | return headers 72 | -------------------------------------------------------------------------------- /source/requests/packages/chardet/codingstatemachine.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is mozilla.org code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | from .constants import eStart 29 | from .compat import wrap_ord 30 | 31 | 32 | class CodingStateMachine: 33 | def __init__(self, sm): 34 | self._mModel = sm 35 | self._mCurrentBytePos = 0 36 | self._mCurrentCharLen = 0 37 | self.reset() 38 | 39 | def reset(self): 40 | self._mCurrentState = eStart 41 | 42 | def next_state(self, c): 43 | # for each byte we get its class 44 | # if it is first byte, we also get byte length 45 | # PY3K: aBuf is a byte stream, so c is an int, not a byte 46 | byteCls = self._mModel['classTable'][wrap_ord(c)] 47 | if self._mCurrentState == eStart: 48 | self._mCurrentBytePos = 0 49 | self._mCurrentCharLen = self._mModel['charLenTable'][byteCls] 50 | # from byte's class and stateTable, we get its next state 51 | curr_state = (self._mCurrentState * self._mModel['classFactor'] 52 | + byteCls) 53 | self._mCurrentState = self._mModel['stateTable'][curr_state] 54 | self._mCurrentBytePos += 1 55 | return self._mCurrentState 56 | 57 | def get_current_charlen(self): 58 | return self._mCurrentCharLen 59 | 60 | def get_coding_state_machine(self): 61 | return self._mModel['name'] 62 | -------------------------------------------------------------------------------- /source/requests/packages/urllib3/filepost.py: -------------------------------------------------------------------------------- 1 | import codecs 2 | 3 | from uuid import uuid4 4 | from io import BytesIO 5 | 6 | from .packages import six 7 | from .packages.six import b 8 | from .fields import RequestField 9 | 10 | writer = codecs.lookup('utf-8')[3] 11 | 12 | 13 | def choose_boundary(): 14 | """ 15 | Our embarassingly-simple replacement for mimetools.choose_boundary. 16 | """ 17 | return uuid4().hex 18 | 19 | 20 | def iter_field_objects(fields): 21 | """ 22 | Iterate over fields. 23 | 24 | Supports list of (k, v) tuples and dicts, and lists of 25 | :class:`~urllib3.fields.RequestField`. 26 | 27 | """ 28 | if isinstance(fields, dict): 29 | i = six.iteritems(fields) 30 | else: 31 | i = iter(fields) 32 | 33 | for field in i: 34 | if isinstance(field, RequestField): 35 | yield field 36 | else: 37 | yield RequestField.from_tuples(*field) 38 | 39 | 40 | def iter_fields(fields): 41 | """ 42 | .. deprecated:: 1.6 43 | 44 | Iterate over fields. 45 | 46 | The addition of :class:`~urllib3.fields.RequestField` makes this function 47 | obsolete. Instead, use :func:`iter_field_objects`, which returns 48 | :class:`~urllib3.fields.RequestField` objects. 49 | 50 | Supports list of (k, v) tuples and dicts. 51 | """ 52 | if isinstance(fields, dict): 53 | return ((k, v) for k, v in six.iteritems(fields)) 54 | 55 | return ((k, v) for k, v in fields) 56 | 57 | 58 | def encode_multipart_formdata(fields, boundary=None): 59 | """ 60 | Encode a dictionary of ``fields`` using the multipart/form-data MIME format. 61 | 62 | :param fields: 63 | Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`). 64 | 65 | :param boundary: 66 | If not specified, then a random boundary will be generated using 67 | :func:`mimetools.choose_boundary`. 68 | """ 69 | body = BytesIO() 70 | if boundary is None: 71 | boundary = choose_boundary() 72 | 73 | for field in iter_field_objects(fields): 74 | body.write(b('--%s\r\n' % (boundary))) 75 | 76 | writer(body).write(field.render_headers()) 77 | data = field.data 78 | 79 | if isinstance(data, int): 80 | data = str(data) # Backwards compatibility 81 | 82 | if isinstance(data, six.text_type): 83 | writer(body).write(data) 84 | else: 85 | body.write(data) 86 | 87 | body.write(b'\r\n') 88 | 89 | body.write(b('--%s--\r\n' % (boundary))) 90 | 91 | content_type = str('multipart/form-data; boundary=%s' % boundary) 92 | 93 | return body.getvalue(), content_type 94 | -------------------------------------------------------------------------------- /source/requests/exceptions.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | requests.exceptions 5 | ~~~~~~~~~~~~~~~~~~~ 6 | 7 | This module contains the set of Requests' exceptions. 8 | 9 | """ 10 | from .packages.urllib3.exceptions import HTTPError as BaseHTTPError 11 | 12 | 13 | class RequestException(IOError): 14 | """There was an ambiguous exception that occurred while handling your 15 | request.""" 16 | 17 | def __init__(self, *args, **kwargs): 18 | """ 19 | Initialize RequestException with `request` and `response` objects. 20 | """ 21 | response = kwargs.pop('response', None) 22 | self.response = response 23 | self.request = kwargs.pop('request', None) 24 | if (response is not None and not self.request and 25 | hasattr(response, 'request')): 26 | self.request = self.response.request 27 | super(RequestException, self).__init__(*args, **kwargs) 28 | 29 | 30 | class HTTPError(RequestException): 31 | """An HTTP error occurred.""" 32 | 33 | 34 | class ConnectionError(RequestException): 35 | """A Connection error occurred.""" 36 | 37 | 38 | class ProxyError(ConnectionError): 39 | """A proxy error occurred.""" 40 | 41 | 42 | class SSLError(ConnectionError): 43 | """An SSL error occurred.""" 44 | 45 | 46 | class Timeout(RequestException): 47 | """The request timed out. 48 | 49 | Catching this error will catch both 50 | :exc:`~requests.exceptions.ConnectTimeout` and 51 | :exc:`~requests.exceptions.ReadTimeout` errors. 52 | """ 53 | 54 | 55 | class ConnectTimeout(ConnectionError, Timeout): 56 | """The request timed out while trying to connect to the remote server. 57 | 58 | Requests that produced this error are safe to retry. 59 | """ 60 | 61 | 62 | class ReadTimeout(Timeout): 63 | """The server did not send any data in the allotted amount of time.""" 64 | 65 | 66 | class URLRequired(RequestException): 67 | """A valid URL is required to make a request.""" 68 | 69 | 70 | class TooManyRedirects(RequestException): 71 | """Too many redirects.""" 72 | 73 | 74 | class MissingSchema(RequestException, ValueError): 75 | """The URL schema (e.g. http or https) is missing.""" 76 | 77 | 78 | class InvalidSchema(RequestException, ValueError): 79 | """See defaults.py for valid schemas.""" 80 | 81 | 82 | class InvalidURL(RequestException, ValueError): 83 | """ The URL provided was somehow invalid. """ 84 | 85 | 86 | class ChunkedEncodingError(RequestException): 87 | """The server declared chunked encoding but sent an invalid chunk.""" 88 | 89 | 90 | class ContentDecodingError(RequestException, BaseHTTPError): 91 | """Failed to decode response content""" 92 | -------------------------------------------------------------------------------- /source/do_snapshot_check.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import argparse 3 | from workflow import Workflow, ICON_WEB, ICON_WARNING, web, PasswordNotFound 4 | 5 | def main(wf): 6 | 7 | # parser to gather args from the acript 8 | parser = argparse.ArgumentParser() 9 | 10 | # saves the key to 'apikey' 11 | parser.add_argument('--setkey', dest='apikey', nargs='?', default=None) 12 | args = parser.parse_args(wf.args) 13 | 14 | # saving the api key 15 | if args.apikey: 16 | wf.save_password('digitalocean_api_key', args.apikey) 17 | return 0 18 | 19 | # verify that an API key has been set 20 | try: 21 | api_key = wf.get_password('digitalocean_api_key') 22 | except PasswordNotFound: 23 | wf.add_item('No API key set.', 24 | 'Please use dotoken to set your API key.', 25 | valid=False, 26 | icon=ICON_WARNING) 27 | 28 | url = 'https://api.digitalocean.com/v2/droplets' 29 | header = {'Authorization': 'Bearer ' + api_key + ''} 30 | 31 | # gather the data, store the JSON and builds the array 32 | r = web.get(url, headers=header) 33 | data = r.json() 34 | droplet_array = data['droplets'] 35 | 36 | for droplet in droplet_array: 37 | # Check to see if droplet is active. If it is active it lets the user # know they need to power off the droplet first. 38 | # After the check, the droplet status is returned 39 | # Selecting a droplet will pass its ID into Alfred for the next script 40 | 41 | if droplet['status'] == 'active': 42 | wf.add_item(title = '%s is still active' % droplet['name'], 43 | subtitle = 'Please power off before creating snapshot') 44 | else: 45 | if droplet['memory'] == 512: 46 | wf.add_item(title ='%s is %s on %s' % ( 47 | droplet['name'], 48 | droplet['status'], 49 | droplet['region']['name']), 50 | subtitle = 'CPU(s): %s || Memory: %sMB || Size: %sGB' % ( 51 | droplet['vcpus'], 52 | droplet['memory'], 53 | droplet['disk']), 54 | arg = str(droplet['id']), 55 | valid = True) 56 | else: 57 | memory_string = str(droplet['memory']) 58 | wf.add_item(title ='%s is %s on %s' % ( 59 | droplet['name'], 60 | droplet['status'], 61 | droplet['region']['name']), 62 | subtitle = 'CPU(s): %s || Memory: %sGB || Size: %sGB' % ( 63 | droplet['vcpus'], 64 | memory_string[0], 65 | droplet['disk']), 66 | arg = str(droplet['id']), 67 | valid = True) 68 | 69 | wf.send_feedback() 70 | 71 | if __name__=='__main__': 72 | wf = Workflow() 73 | sys.exit(wf.run(main)) -------------------------------------------------------------------------------- /source/requests/packages/chardet/utf8prober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is mozilla.org code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | from . import constants 29 | from .charsetprober import CharSetProber 30 | from .codingstatemachine import CodingStateMachine 31 | from .mbcssm import UTF8SMModel 32 | 33 | ONE_CHAR_PROB = 0.5 34 | 35 | 36 | class UTF8Prober(CharSetProber): 37 | def __init__(self): 38 | CharSetProber.__init__(self) 39 | self._mCodingSM = CodingStateMachine(UTF8SMModel) 40 | self.reset() 41 | 42 | def reset(self): 43 | CharSetProber.reset(self) 44 | self._mCodingSM.reset() 45 | self._mNumOfMBChar = 0 46 | 47 | def get_charset_name(self): 48 | return "utf-8" 49 | 50 | def feed(self, aBuf): 51 | for c in aBuf: 52 | codingState = self._mCodingSM.next_state(c) 53 | if codingState == constants.eError: 54 | self._mState = constants.eNotMe 55 | break 56 | elif codingState == constants.eItsMe: 57 | self._mState = constants.eFoundIt 58 | break 59 | elif codingState == constants.eStart: 60 | if self._mCodingSM.get_current_charlen() >= 2: 61 | self._mNumOfMBChar += 1 62 | 63 | if self.get_state() == constants.eDetecting: 64 | if self.get_confidence() > constants.SHORTCUT_THRESHOLD: 65 | self._mState = constants.eFoundIt 66 | 67 | return self.get_state() 68 | 69 | def get_confidence(self): 70 | unlike = 0.99 71 | if self._mNumOfMBChar < 6: 72 | for i in range(0, self._mNumOfMBChar): 73 | unlike = unlike * ONE_CHAR_PROB 74 | return 1.0 - unlike 75 | else: 76 | return unlike 77 | -------------------------------------------------------------------------------- /source/requests/compat.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | pythoncompat 5 | """ 6 | 7 | from .packages import chardet 8 | 9 | import sys 10 | 11 | # ------- 12 | # Pythons 13 | # ------- 14 | 15 | # Syntax sugar. 16 | _ver = sys.version_info 17 | 18 | #: Python 2.x? 19 | is_py2 = (_ver[0] == 2) 20 | 21 | #: Python 3.x? 22 | is_py3 = (_ver[0] == 3) 23 | 24 | #: Python 3.0.x 25 | is_py30 = (is_py3 and _ver[1] == 0) 26 | 27 | #: Python 3.1.x 28 | is_py31 = (is_py3 and _ver[1] == 1) 29 | 30 | #: Python 3.2.x 31 | is_py32 = (is_py3 and _ver[1] == 2) 32 | 33 | #: Python 3.3.x 34 | is_py33 = (is_py3 and _ver[1] == 3) 35 | 36 | #: Python 3.4.x 37 | is_py34 = (is_py3 and _ver[1] == 4) 38 | 39 | #: Python 2.7.x 40 | is_py27 = (is_py2 and _ver[1] == 7) 41 | 42 | #: Python 2.6.x 43 | is_py26 = (is_py2 and _ver[1] == 6) 44 | 45 | #: Python 2.5.x 46 | is_py25 = (is_py2 and _ver[1] == 5) 47 | 48 | #: Python 2.4.x 49 | is_py24 = (is_py2 and _ver[1] == 4) # I'm assuming this is not by choice. 50 | 51 | 52 | # --------- 53 | # Platforms 54 | # --------- 55 | 56 | 57 | # Syntax sugar. 58 | _ver = sys.version.lower() 59 | 60 | is_pypy = ('pypy' in _ver) 61 | is_jython = ('jython' in _ver) 62 | is_ironpython = ('iron' in _ver) 63 | 64 | # Assume CPython, if nothing else. 65 | is_cpython = not any((is_pypy, is_jython, is_ironpython)) 66 | 67 | # Windows-based system. 68 | is_windows = 'win32' in str(sys.platform).lower() 69 | 70 | # Standard Linux 2+ system. 71 | is_linux = ('linux' in str(sys.platform).lower()) 72 | is_osx = ('darwin' in str(sys.platform).lower()) 73 | is_hpux = ('hpux' in str(sys.platform).lower()) # Complete guess. 74 | is_solaris = ('solar==' in str(sys.platform).lower()) # Complete guess. 75 | 76 | try: 77 | import simplejson as json 78 | except (ImportError, SyntaxError): 79 | # simplejson does not support Python 3.2, it thows a SyntaxError 80 | # because of u'...' Unicode literals. 81 | import json 82 | 83 | # --------- 84 | # Specifics 85 | # --------- 86 | 87 | if is_py2: 88 | from urllib import quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, proxy_bypass 89 | from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag 90 | from urllib2 import parse_http_list 91 | import cookielib 92 | from Cookie import Morsel 93 | from StringIO import StringIO 94 | from .packages.urllib3.packages.ordered_dict import OrderedDict 95 | 96 | builtin_str = str 97 | bytes = str 98 | str = unicode 99 | basestring = basestring 100 | numeric_types = (int, long, float) 101 | 102 | 103 | elif is_py3: 104 | from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag 105 | from urllib.request import parse_http_list, getproxies, proxy_bypass 106 | from http import cookiejar as cookielib 107 | from http.cookies import Morsel 108 | from io import StringIO 109 | from collections import OrderedDict 110 | 111 | builtin_str = str 112 | str = str 113 | bytes = bytes 114 | basestring = (str, bytes) 115 | numeric_types = (int, float) 116 | -------------------------------------------------------------------------------- /source/do_alfred.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import argparse 3 | from workflow import Workflow, ICON_WEB, ICON_WARNING, web, PasswordNotFound 4 | import requests 5 | 6 | def main(wf): 7 | 8 | # parser to gather args from the acript 9 | parser = argparse.ArgumentParser() 10 | 11 | # saves the key to 'apikey' 12 | parser.add_argument('--setkey', dest='apikey', nargs='?', default=None) 13 | args = parser.parse_args(wf.args) 14 | 15 | # saving the api key 16 | if args.apikey: 17 | wf.save_password('digitalocean_api_key', args.apikey) 18 | return 0 19 | 20 | # verify that an API key has been set 21 | try: 22 | api_key = wf.get_password('digitalocean_api_key') 23 | except PasswordNotFound: 24 | wf.add_item('No API key set.', 25 | 'Please use dotoken to set your API key.', 26 | valid=False, 27 | icon=ICON_WARNING) 28 | wf.send_feedback() 29 | return 0 30 | 31 | # build URL and header info for API request 32 | url = 'https://api.digitalocean.com/v2/droplets' 33 | header = {'Authorization': 'Bearer ' + api_key + ''} 34 | 35 | # gather the data, store the JSON and builds the array 36 | data = web.get(url, headers=header).json() 37 | 38 | droplet_array = data['droplets'] 39 | 40 | # This is a multi-step process for checking on the status 41 | # It first checks the individual status of each droplet 42 | # If a droplet is current "in-progress" it will return what it's currently doing 43 | # If the droplet is not in an "in-progress" state, it returns general information 44 | 45 | for droplet in droplet_array: 46 | 47 | # get the current status of the individual droplets 48 | dropletID = droplet['id'] 49 | statusURL = 'https://api.digitalocean.com/v2/droplets/%s/actions' % dropletID 50 | statusData = requests.get(statusURL, headers=header).json() 51 | 52 | # Returns what the droplet is doing if "in-progress" 53 | # Else it checks for memory size first, then returns basic information 54 | if statusData['actions'][0]['status'] == "in-progress": 55 | wf.add_item("%s is in the middle of a %s" % (droplet['name'], statusData['actions'][0]['type'])) 56 | elif droplet['memory'] == 512: 57 | wf.add_item(title ='%s is %s on %s' % ( 58 | droplet['name'], 59 | droplet['status'], 60 | droplet['region']['name']), 61 | subtitle = 'CPU(s): %s || Memory: %sMB || Size: %sGB' % ( 62 | droplet['vcpus'], 63 | droplet['memory'], 64 | droplet['disk']), 65 | arg = str(droplet['id']), 66 | valid = True) 67 | else: 68 | memory_string = str(droplet['memory']) 69 | wf.add_item(title ='%s is %s on %s' % ( 70 | droplet['name'], 71 | droplet['status'], 72 | droplet['region']['name']), 73 | subtitle = 'CPU(s): %s || Memory: %sGB || Size: %sGB' % ( 74 | droplet['vcpus'], 75 | memory_string[0], 76 | droplet['disk']), 77 | arg = str(droplet['id']), 78 | valid = True) 79 | 80 | wf.send_feedback() 81 | 82 | if __name__=='__main__': 83 | wf = Workflow() 84 | sys.exit(wf.run(main)) -------------------------------------------------------------------------------- /source/requests/structures.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | requests.structures 5 | ~~~~~~~~~~~~~~~~~~~ 6 | 7 | Data structures that power Requests. 8 | 9 | """ 10 | 11 | import collections 12 | 13 | 14 | class CaseInsensitiveDict(collections.MutableMapping): 15 | """ 16 | A case-insensitive ``dict``-like object. 17 | 18 | Implements all methods and operations of 19 | ``collections.MutableMapping`` as well as dict's ``copy``. Also 20 | provides ``lower_items``. 21 | 22 | All keys are expected to be strings. The structure remembers the 23 | case of the last key to be set, and ``iter(instance)``, 24 | ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()`` 25 | will contain case-sensitive keys. However, querying and contains 26 | testing is case insensitive:: 27 | 28 | cid = CaseInsensitiveDict() 29 | cid['Accept'] = 'application/json' 30 | cid['aCCEPT'] == 'application/json' # True 31 | list(cid) == ['Accept'] # True 32 | 33 | For example, ``headers['content-encoding']`` will return the 34 | value of a ``'Content-Encoding'`` response header, regardless 35 | of how the header name was originally stored. 36 | 37 | If the constructor, ``.update``, or equality comparison 38 | operations are given keys that have equal ``.lower()``s, the 39 | behavior is undefined. 40 | 41 | """ 42 | def __init__(self, data=None, **kwargs): 43 | self._store = dict() 44 | if data is None: 45 | data = {} 46 | self.update(data, **kwargs) 47 | 48 | def __setitem__(self, key, value): 49 | # Use the lowercased key for lookups, but store the actual 50 | # key alongside the value. 51 | self._store[key.lower()] = (key, value) 52 | 53 | def __getitem__(self, key): 54 | return self._store[key.lower()][1] 55 | 56 | def __delitem__(self, key): 57 | del self._store[key.lower()] 58 | 59 | def __iter__(self): 60 | return (casedkey for casedkey, mappedvalue in self._store.values()) 61 | 62 | def __len__(self): 63 | return len(self._store) 64 | 65 | def lower_items(self): 66 | """Like iteritems(), but with all lowercase keys.""" 67 | return ( 68 | (lowerkey, keyval[1]) 69 | for (lowerkey, keyval) 70 | in self._store.items() 71 | ) 72 | 73 | def __eq__(self, other): 74 | if isinstance(other, collections.Mapping): 75 | other = CaseInsensitiveDict(other) 76 | else: 77 | return NotImplemented 78 | # Compare insensitively 79 | return dict(self.lower_items()) == dict(other.lower_items()) 80 | 81 | # Copy is required 82 | def copy(self): 83 | return CaseInsensitiveDict(self._store.values()) 84 | 85 | def __repr__(self): 86 | return str(dict(self.items())) 87 | 88 | class LookupDict(dict): 89 | """Dictionary lookup object.""" 90 | 91 | def __init__(self, name=None): 92 | self.name = name 93 | super(LookupDict, self).__init__() 94 | 95 | def __repr__(self): 96 | return '' % (self.name) 97 | 98 | def __getitem__(self, key): 99 | # We allow fall-through here, so values default to None 100 | 101 | return self.__dict__.get(key, None) 102 | 103 | def get(self, key, default=None): 104 | return self.__dict__.get(key, default) 105 | -------------------------------------------------------------------------------- /source/requests/packages/chardet/escprober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is mozilla.org code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | from . import constants 29 | from .escsm import (HZSMModel, ISO2022CNSMModel, ISO2022JPSMModel, 30 | ISO2022KRSMModel) 31 | from .charsetprober import CharSetProber 32 | from .codingstatemachine import CodingStateMachine 33 | from .compat import wrap_ord 34 | 35 | 36 | class EscCharSetProber(CharSetProber): 37 | def __init__(self): 38 | CharSetProber.__init__(self) 39 | self._mCodingSM = [ 40 | CodingStateMachine(HZSMModel), 41 | CodingStateMachine(ISO2022CNSMModel), 42 | CodingStateMachine(ISO2022JPSMModel), 43 | CodingStateMachine(ISO2022KRSMModel) 44 | ] 45 | self.reset() 46 | 47 | def reset(self): 48 | CharSetProber.reset(self) 49 | for codingSM in self._mCodingSM: 50 | if not codingSM: 51 | continue 52 | codingSM.active = True 53 | codingSM.reset() 54 | self._mActiveSM = len(self._mCodingSM) 55 | self._mDetectedCharset = None 56 | 57 | def get_charset_name(self): 58 | return self._mDetectedCharset 59 | 60 | def get_confidence(self): 61 | if self._mDetectedCharset: 62 | return 0.99 63 | else: 64 | return 0.00 65 | 66 | def feed(self, aBuf): 67 | for c in aBuf: 68 | # PY3K: aBuf is a byte array, so c is an int, not a byte 69 | for codingSM in self._mCodingSM: 70 | if not codingSM: 71 | continue 72 | if not codingSM.active: 73 | continue 74 | codingState = codingSM.next_state(wrap_ord(c)) 75 | if codingState == constants.eError: 76 | codingSM.active = False 77 | self._mActiveSM -= 1 78 | if self._mActiveSM <= 0: 79 | self._mState = constants.eNotMe 80 | return self.get_state() 81 | elif codingState == constants.eItsMe: 82 | self._mState = constants.eFoundIt 83 | self._mDetectedCharset = codingSM.get_coding_state_machine() # nopep8 84 | return self.get_state() 85 | 86 | return self.get_state() 87 | -------------------------------------------------------------------------------- /source/requests/status_codes.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from .structures import LookupDict 4 | 5 | _codes = { 6 | 7 | # Informational. 8 | 100: ('continue',), 9 | 101: ('switching_protocols',), 10 | 102: ('processing',), 11 | 103: ('checkpoint',), 12 | 122: ('uri_too_long', 'request_uri_too_long'), 13 | 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'), 14 | 201: ('created',), 15 | 202: ('accepted',), 16 | 203: ('non_authoritative_info', 'non_authoritative_information'), 17 | 204: ('no_content',), 18 | 205: ('reset_content', 'reset'), 19 | 206: ('partial_content', 'partial'), 20 | 207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'), 21 | 208: ('already_reported',), 22 | 226: ('im_used',), 23 | 24 | # Redirection. 25 | 300: ('multiple_choices',), 26 | 301: ('moved_permanently', 'moved', '\\o-'), 27 | 302: ('found',), 28 | 303: ('see_other', 'other'), 29 | 304: ('not_modified',), 30 | 305: ('use_proxy',), 31 | 306: ('switch_proxy',), 32 | 307: ('temporary_redirect', 'temporary_moved', 'temporary'), 33 | 308: ('permanent_redirect', 34 | 'resume_incomplete', 'resume',), # These 2 to be removed in 3.0 35 | 36 | # Client Error. 37 | 400: ('bad_request', 'bad'), 38 | 401: ('unauthorized',), 39 | 402: ('payment_required', 'payment'), 40 | 403: ('forbidden',), 41 | 404: ('not_found', '-o-'), 42 | 405: ('method_not_allowed', 'not_allowed'), 43 | 406: ('not_acceptable',), 44 | 407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'), 45 | 408: ('request_timeout', 'timeout'), 46 | 409: ('conflict',), 47 | 410: ('gone',), 48 | 411: ('length_required',), 49 | 412: ('precondition_failed', 'precondition'), 50 | 413: ('request_entity_too_large',), 51 | 414: ('request_uri_too_large',), 52 | 415: ('unsupported_media_type', 'unsupported_media', 'media_type'), 53 | 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'), 54 | 417: ('expectation_failed',), 55 | 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'), 56 | 422: ('unprocessable_entity', 'unprocessable'), 57 | 423: ('locked',), 58 | 424: ('failed_dependency', 'dependency'), 59 | 425: ('unordered_collection', 'unordered'), 60 | 426: ('upgrade_required', 'upgrade'), 61 | 428: ('precondition_required', 'precondition'), 62 | 429: ('too_many_requests', 'too_many'), 63 | 431: ('header_fields_too_large', 'fields_too_large'), 64 | 444: ('no_response', 'none'), 65 | 449: ('retry_with', 'retry'), 66 | 450: ('blocked_by_windows_parental_controls', 'parental_controls'), 67 | 451: ('unavailable_for_legal_reasons', 'legal_reasons'), 68 | 499: ('client_closed_request',), 69 | 70 | # Server Error. 71 | 500: ('internal_server_error', 'server_error', '/o\\', '✗'), 72 | 501: ('not_implemented',), 73 | 502: ('bad_gateway',), 74 | 503: ('service_unavailable', 'unavailable'), 75 | 504: ('gateway_timeout',), 76 | 505: ('http_version_not_supported', 'http_version'), 77 | 506: ('variant_also_negotiates',), 78 | 507: ('insufficient_storage',), 79 | 509: ('bandwidth_limit_exceeded', 'bandwidth'), 80 | 510: ('not_extended',), 81 | } 82 | 83 | codes = LookupDict(name='status_codes') 84 | 85 | for (code, titles) in list(_codes.items()): 86 | for title in titles: 87 | setattr(codes, title, code) 88 | if not title.startswith('\\'): 89 | setattr(codes, title.upper(), code) 90 | -------------------------------------------------------------------------------- /source/requests/packages/chardet/sbcsgroupprober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is Mozilla Universal charset detector code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 2001 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # Shy Shalom - original C code 12 | # 13 | # This library is free software; you can redistribute it and/or 14 | # modify it under the terms of the GNU Lesser General Public 15 | # License as published by the Free Software Foundation; either 16 | # version 2.1 of the License, or (at your option) any later version. 17 | # 18 | # This library is distributed in the hope that it will be useful, 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 21 | # Lesser General Public License for more details. 22 | # 23 | # You should have received a copy of the GNU Lesser General Public 24 | # License along with this library; if not, write to the Free Software 25 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 26 | # 02110-1301 USA 27 | ######################### END LICENSE BLOCK ######################### 28 | 29 | from .charsetgroupprober import CharSetGroupProber 30 | from .sbcharsetprober import SingleByteCharSetProber 31 | from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel, 32 | Latin5CyrillicModel, MacCyrillicModel, 33 | Ibm866Model, Ibm855Model) 34 | from .langgreekmodel import Latin7GreekModel, Win1253GreekModel 35 | from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel 36 | from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel 37 | from .langthaimodel import TIS620ThaiModel 38 | from .langhebrewmodel import Win1255HebrewModel 39 | from .hebrewprober import HebrewProber 40 | 41 | 42 | class SBCSGroupProber(CharSetGroupProber): 43 | def __init__(self): 44 | CharSetGroupProber.__init__(self) 45 | self._mProbers = [ 46 | SingleByteCharSetProber(Win1251CyrillicModel), 47 | SingleByteCharSetProber(Koi8rModel), 48 | SingleByteCharSetProber(Latin5CyrillicModel), 49 | SingleByteCharSetProber(MacCyrillicModel), 50 | SingleByteCharSetProber(Ibm866Model), 51 | SingleByteCharSetProber(Ibm855Model), 52 | SingleByteCharSetProber(Latin7GreekModel), 53 | SingleByteCharSetProber(Win1253GreekModel), 54 | SingleByteCharSetProber(Latin5BulgarianModel), 55 | SingleByteCharSetProber(Win1251BulgarianModel), 56 | SingleByteCharSetProber(Latin2HungarianModel), 57 | SingleByteCharSetProber(Win1250HungarianModel), 58 | SingleByteCharSetProber(TIS620ThaiModel), 59 | ] 60 | hebrewProber = HebrewProber() 61 | logicalHebrewProber = SingleByteCharSetProber(Win1255HebrewModel, 62 | False, hebrewProber) 63 | visualHebrewProber = SingleByteCharSetProber(Win1255HebrewModel, True, 64 | hebrewProber) 65 | hebrewProber.set_model_probers(logicalHebrewProber, visualHebrewProber) 66 | self._mProbers.extend([hebrewProber, logicalHebrewProber, 67 | visualHebrewProber]) 68 | 69 | self.reset() 70 | -------------------------------------------------------------------------------- /source/requests/packages/chardet/mbcharsetprober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is Mozilla Universal charset detector code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 2001 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # Shy Shalom - original C code 12 | # Proofpoint, Inc. 13 | # 14 | # This library is free software; you can redistribute it and/or 15 | # modify it under the terms of the GNU Lesser General Public 16 | # License as published by the Free Software Foundation; either 17 | # version 2.1 of the License, or (at your option) any later version. 18 | # 19 | # This library is distributed in the hope that it will be useful, 20 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 21 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 22 | # Lesser General Public License for more details. 23 | # 24 | # You should have received a copy of the GNU Lesser General Public 25 | # License along with this library; if not, write to the Free Software 26 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 27 | # 02110-1301 USA 28 | ######################### END LICENSE BLOCK ######################### 29 | 30 | import sys 31 | from . import constants 32 | from .charsetprober import CharSetProber 33 | 34 | 35 | class MultiByteCharSetProber(CharSetProber): 36 | def __init__(self): 37 | CharSetProber.__init__(self) 38 | self._mDistributionAnalyzer = None 39 | self._mCodingSM = None 40 | self._mLastChar = [0, 0] 41 | 42 | def reset(self): 43 | CharSetProber.reset(self) 44 | if self._mCodingSM: 45 | self._mCodingSM.reset() 46 | if self._mDistributionAnalyzer: 47 | self._mDistributionAnalyzer.reset() 48 | self._mLastChar = [0, 0] 49 | 50 | def get_charset_name(self): 51 | pass 52 | 53 | def feed(self, aBuf): 54 | aLen = len(aBuf) 55 | for i in range(0, aLen): 56 | codingState = self._mCodingSM.next_state(aBuf[i]) 57 | if codingState == constants.eError: 58 | if constants._debug: 59 | sys.stderr.write(self.get_charset_name() 60 | + ' prober hit error at byte ' + str(i) 61 | + '\n') 62 | self._mState = constants.eNotMe 63 | break 64 | elif codingState == constants.eItsMe: 65 | self._mState = constants.eFoundIt 66 | break 67 | elif codingState == constants.eStart: 68 | charLen = self._mCodingSM.get_current_charlen() 69 | if i == 0: 70 | self._mLastChar[1] = aBuf[0] 71 | self._mDistributionAnalyzer.feed(self._mLastChar, charLen) 72 | else: 73 | self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1], 74 | charLen) 75 | 76 | self._mLastChar[0] = aBuf[aLen - 1] 77 | 78 | if self.get_state() == constants.eDetecting: 79 | if (self._mDistributionAnalyzer.got_enough_data() and 80 | (self.get_confidence() > constants.SHORTCUT_THRESHOLD)): 81 | self._mState = constants.eFoundIt 82 | 83 | return self.get_state() 84 | 85 | def get_confidence(self): 86 | return self._mDistributionAnalyzer.get_confidence() 87 | -------------------------------------------------------------------------------- /source/requests/packages/urllib3/util/connection.py: -------------------------------------------------------------------------------- 1 | import socket 2 | try: 3 | from select import poll, POLLIN 4 | except ImportError: # `poll` doesn't exist on OSX and other platforms 5 | poll = False 6 | try: 7 | from select import select 8 | except ImportError: # `select` doesn't exist on AppEngine. 9 | select = False 10 | 11 | 12 | def is_connection_dropped(conn): # Platform-specific 13 | """ 14 | Returns True if the connection is dropped and should be closed. 15 | 16 | :param conn: 17 | :class:`httplib.HTTPConnection` object. 18 | 19 | Note: For platforms like AppEngine, this will always return ``False`` to 20 | let the platform handle connection recycling transparently for us. 21 | """ 22 | sock = getattr(conn, 'sock', False) 23 | if sock is False: # Platform-specific: AppEngine 24 | return False 25 | if sock is None: # Connection already closed (such as by httplib). 26 | return True 27 | 28 | if not poll: 29 | if not select: # Platform-specific: AppEngine 30 | return False 31 | 32 | try: 33 | return select([sock], [], [], 0.0)[0] 34 | except socket.error: 35 | return True 36 | 37 | # This version is better on platforms that support it. 38 | p = poll() 39 | p.register(sock, POLLIN) 40 | for (fno, ev) in p.poll(0.0): 41 | if fno == sock.fileno(): 42 | # Either data is buffered (bad), or the connection is dropped. 43 | return True 44 | 45 | 46 | # This function is copied from socket.py in the Python 2.7 standard 47 | # library test suite. Added to its signature is only `socket_options`. 48 | def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, 49 | source_address=None, socket_options=None): 50 | """Connect to *address* and return the socket object. 51 | 52 | Convenience function. Connect to *address* (a 2-tuple ``(host, 53 | port)``) and return the socket object. Passing the optional 54 | *timeout* parameter will set the timeout on the socket instance 55 | before attempting to connect. If no *timeout* is supplied, the 56 | global default timeout setting returned by :func:`getdefaulttimeout` 57 | is used. If *source_address* is set it must be a tuple of (host, port) 58 | for the socket to bind as a source address before making the connection. 59 | An host of '' or port 0 tells the OS to use the default. 60 | """ 61 | 62 | host, port = address 63 | err = None 64 | for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM): 65 | af, socktype, proto, canonname, sa = res 66 | sock = None 67 | try: 68 | sock = socket.socket(af, socktype, proto) 69 | 70 | # If provided, set socket level options before connecting. 71 | # This is the only addition urllib3 makes to this function. 72 | _set_socket_options(sock, socket_options) 73 | 74 | if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT: 75 | sock.settimeout(timeout) 76 | if source_address: 77 | sock.bind(source_address) 78 | sock.connect(sa) 79 | return sock 80 | 81 | except socket.error as _: 82 | err = _ 83 | if sock is not None: 84 | sock.close() 85 | 86 | if err is not None: 87 | raise err 88 | else: 89 | raise socket.error("getaddrinfo returns an empty list") 90 | 91 | 92 | def _set_socket_options(sock, options): 93 | if options is None: 94 | return 95 | 96 | for opt in options: 97 | sock.setsockopt(*opt) 98 | -------------------------------------------------------------------------------- /source/requests/packages/chardet/eucjpprober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is mozilla.org code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | import sys 29 | from . import constants 30 | from .mbcharsetprober import MultiByteCharSetProber 31 | from .codingstatemachine import CodingStateMachine 32 | from .chardistribution import EUCJPDistributionAnalysis 33 | from .jpcntx import EUCJPContextAnalysis 34 | from .mbcssm import EUCJPSMModel 35 | 36 | 37 | class EUCJPProber(MultiByteCharSetProber): 38 | def __init__(self): 39 | MultiByteCharSetProber.__init__(self) 40 | self._mCodingSM = CodingStateMachine(EUCJPSMModel) 41 | self._mDistributionAnalyzer = EUCJPDistributionAnalysis() 42 | self._mContextAnalyzer = EUCJPContextAnalysis() 43 | self.reset() 44 | 45 | def reset(self): 46 | MultiByteCharSetProber.reset(self) 47 | self._mContextAnalyzer.reset() 48 | 49 | def get_charset_name(self): 50 | return "EUC-JP" 51 | 52 | def feed(self, aBuf): 53 | aLen = len(aBuf) 54 | for i in range(0, aLen): 55 | # PY3K: aBuf is a byte array, so aBuf[i] is an int, not a byte 56 | codingState = self._mCodingSM.next_state(aBuf[i]) 57 | if codingState == constants.eError: 58 | if constants._debug: 59 | sys.stderr.write(self.get_charset_name() 60 | + ' prober hit error at byte ' + str(i) 61 | + '\n') 62 | self._mState = constants.eNotMe 63 | break 64 | elif codingState == constants.eItsMe: 65 | self._mState = constants.eFoundIt 66 | break 67 | elif codingState == constants.eStart: 68 | charLen = self._mCodingSM.get_current_charlen() 69 | if i == 0: 70 | self._mLastChar[1] = aBuf[0] 71 | self._mContextAnalyzer.feed(self._mLastChar, charLen) 72 | self._mDistributionAnalyzer.feed(self._mLastChar, charLen) 73 | else: 74 | self._mContextAnalyzer.feed(aBuf[i - 1:i + 1], charLen) 75 | self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1], 76 | charLen) 77 | 78 | self._mLastChar[0] = aBuf[aLen - 1] 79 | 80 | if self.get_state() == constants.eDetecting: 81 | if (self._mContextAnalyzer.got_enough_data() and 82 | (self.get_confidence() > constants.SHORTCUT_THRESHOLD)): 83 | self._mState = constants.eFoundIt 84 | 85 | return self.get_state() 86 | 87 | def get_confidence(self): 88 | contxtCf = self._mContextAnalyzer.get_confidence() 89 | distribCf = self._mDistributionAnalyzer.get_confidence() 90 | return max(contxtCf, distribCf) 91 | -------------------------------------------------------------------------------- /source/requests/packages/chardet/sjisprober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is mozilla.org code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | import sys 29 | from .mbcharsetprober import MultiByteCharSetProber 30 | from .codingstatemachine import CodingStateMachine 31 | from .chardistribution import SJISDistributionAnalysis 32 | from .jpcntx import SJISContextAnalysis 33 | from .mbcssm import SJISSMModel 34 | from . import constants 35 | 36 | 37 | class SJISProber(MultiByteCharSetProber): 38 | def __init__(self): 39 | MultiByteCharSetProber.__init__(self) 40 | self._mCodingSM = CodingStateMachine(SJISSMModel) 41 | self._mDistributionAnalyzer = SJISDistributionAnalysis() 42 | self._mContextAnalyzer = SJISContextAnalysis() 43 | self.reset() 44 | 45 | def reset(self): 46 | MultiByteCharSetProber.reset(self) 47 | self._mContextAnalyzer.reset() 48 | 49 | def get_charset_name(self): 50 | return "SHIFT_JIS" 51 | 52 | def feed(self, aBuf): 53 | aLen = len(aBuf) 54 | for i in range(0, aLen): 55 | codingState = self._mCodingSM.next_state(aBuf[i]) 56 | if codingState == constants.eError: 57 | if constants._debug: 58 | sys.stderr.write(self.get_charset_name() 59 | + ' prober hit error at byte ' + str(i) 60 | + '\n') 61 | self._mState = constants.eNotMe 62 | break 63 | elif codingState == constants.eItsMe: 64 | self._mState = constants.eFoundIt 65 | break 66 | elif codingState == constants.eStart: 67 | charLen = self._mCodingSM.get_current_charlen() 68 | if i == 0: 69 | self._mLastChar[1] = aBuf[0] 70 | self._mContextAnalyzer.feed(self._mLastChar[2 - charLen:], 71 | charLen) 72 | self._mDistributionAnalyzer.feed(self._mLastChar, charLen) 73 | else: 74 | self._mContextAnalyzer.feed(aBuf[i + 1 - charLen:i + 3 75 | - charLen], charLen) 76 | self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1], 77 | charLen) 78 | 79 | self._mLastChar[0] = aBuf[aLen - 1] 80 | 81 | if self.get_state() == constants.eDetecting: 82 | if (self._mContextAnalyzer.got_enough_data() and 83 | (self.get_confidence() > constants.SHORTCUT_THRESHOLD)): 84 | self._mState = constants.eFoundIt 85 | 86 | return self.get_state() 87 | 88 | def get_confidence(self): 89 | contxtCf = self._mContextAnalyzer.get_confidence() 90 | distribCf = self._mDistributionAnalyzer.get_confidence() 91 | return max(contxtCf, distribCf) 92 | -------------------------------------------------------------------------------- /source/requests/packages/chardet/charsetgroupprober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is Mozilla Communicator client code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | from . import constants 29 | import sys 30 | from .charsetprober import CharSetProber 31 | 32 | 33 | class CharSetGroupProber(CharSetProber): 34 | def __init__(self): 35 | CharSetProber.__init__(self) 36 | self._mActiveNum = 0 37 | self._mProbers = [] 38 | self._mBestGuessProber = None 39 | 40 | def reset(self): 41 | CharSetProber.reset(self) 42 | self._mActiveNum = 0 43 | for prober in self._mProbers: 44 | if prober: 45 | prober.reset() 46 | prober.active = True 47 | self._mActiveNum += 1 48 | self._mBestGuessProber = None 49 | 50 | def get_charset_name(self): 51 | if not self._mBestGuessProber: 52 | self.get_confidence() 53 | if not self._mBestGuessProber: 54 | return None 55 | # self._mBestGuessProber = self._mProbers[0] 56 | return self._mBestGuessProber.get_charset_name() 57 | 58 | def feed(self, aBuf): 59 | for prober in self._mProbers: 60 | if not prober: 61 | continue 62 | if not prober.active: 63 | continue 64 | st = prober.feed(aBuf) 65 | if not st: 66 | continue 67 | if st == constants.eFoundIt: 68 | self._mBestGuessProber = prober 69 | return self.get_state() 70 | elif st == constants.eNotMe: 71 | prober.active = False 72 | self._mActiveNum -= 1 73 | if self._mActiveNum <= 0: 74 | self._mState = constants.eNotMe 75 | return self.get_state() 76 | return self.get_state() 77 | 78 | def get_confidence(self): 79 | st = self.get_state() 80 | if st == constants.eFoundIt: 81 | return 0.99 82 | elif st == constants.eNotMe: 83 | return 0.01 84 | bestConf = 0.0 85 | self._mBestGuessProber = None 86 | for prober in self._mProbers: 87 | if not prober: 88 | continue 89 | if not prober.active: 90 | if constants._debug: 91 | sys.stderr.write(prober.get_charset_name() 92 | + ' not active\n') 93 | continue 94 | cf = prober.get_confidence() 95 | if constants._debug: 96 | sys.stderr.write('%s confidence = %s\n' % 97 | (prober.get_charset_name(), cf)) 98 | if bestConf < cf: 99 | bestConf = cf 100 | self._mBestGuessProber = prober 101 | if not self._mBestGuessProber: 102 | return 0.0 103 | return bestConf 104 | # else: 105 | # self._mBestGuessProber = self._mProbers[0] 106 | # return self._mBestGuessProber.get_confidence() 107 | -------------------------------------------------------------------------------- /source/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py: -------------------------------------------------------------------------------- 1 | """The match_hostname() function from Python 3.3.3, essential when using SSL.""" 2 | 3 | # Note: This file is under the PSF license as the code comes from the python 4 | # stdlib. http://docs.python.org/3/license.html 5 | 6 | import re 7 | 8 | __version__ = '3.4.0.2' 9 | 10 | class CertificateError(ValueError): 11 | pass 12 | 13 | 14 | def _dnsname_match(dn, hostname, max_wildcards=1): 15 | """Matching according to RFC 6125, section 6.4.3 16 | 17 | http://tools.ietf.org/html/rfc6125#section-6.4.3 18 | """ 19 | pats = [] 20 | if not dn: 21 | return False 22 | 23 | # Ported from python3-syntax: 24 | # leftmost, *remainder = dn.split(r'.') 25 | parts = dn.split(r'.') 26 | leftmost = parts[0] 27 | remainder = parts[1:] 28 | 29 | wildcards = leftmost.count('*') 30 | if wildcards > max_wildcards: 31 | # Issue #17980: avoid denials of service by refusing more 32 | # than one wildcard per fragment. A survey of established 33 | # policy among SSL implementations showed it to be a 34 | # reasonable choice. 35 | raise CertificateError( 36 | "too many wildcards in certificate DNS name: " + repr(dn)) 37 | 38 | # speed up common case w/o wildcards 39 | if not wildcards: 40 | return dn.lower() == hostname.lower() 41 | 42 | # RFC 6125, section 6.4.3, subitem 1. 43 | # The client SHOULD NOT attempt to match a presented identifier in which 44 | # the wildcard character comprises a label other than the left-most label. 45 | if leftmost == '*': 46 | # When '*' is a fragment by itself, it matches a non-empty dotless 47 | # fragment. 48 | pats.append('[^.]+') 49 | elif leftmost.startswith('xn--') or hostname.startswith('xn--'): 50 | # RFC 6125, section 6.4.3, subitem 3. 51 | # The client SHOULD NOT attempt to match a presented identifier 52 | # where the wildcard character is embedded within an A-label or 53 | # U-label of an internationalized domain name. 54 | pats.append(re.escape(leftmost)) 55 | else: 56 | # Otherwise, '*' matches any dotless string, e.g. www* 57 | pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) 58 | 59 | # add the remaining fragments, ignore any wildcards 60 | for frag in remainder: 61 | pats.append(re.escape(frag)) 62 | 63 | pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) 64 | return pat.match(hostname) 65 | 66 | 67 | def match_hostname(cert, hostname): 68 | """Verify that *cert* (in decoded format as returned by 69 | SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 70 | rules are followed, but IP addresses are not accepted for *hostname*. 71 | 72 | CertificateError is raised on failure. On success, the function 73 | returns nothing. 74 | """ 75 | if not cert: 76 | raise ValueError("empty or no certificate") 77 | dnsnames = [] 78 | san = cert.get('subjectAltName', ()) 79 | for key, value in san: 80 | if key == 'DNS': 81 | if _dnsname_match(value, hostname): 82 | return 83 | dnsnames.append(value) 84 | if not dnsnames: 85 | # The subject is only checked when there is no dNSName entry 86 | # in subjectAltName 87 | for sub in cert.get('subject', ()): 88 | for key, value in sub: 89 | # XXX according to RFC 2818, the most specific Common Name 90 | # must be used. 91 | if key == 'commonName': 92 | if _dnsname_match(value, hostname): 93 | return 94 | dnsnames.append(value) 95 | if len(dnsnames) > 1: 96 | raise CertificateError("hostname %r " 97 | "doesn't match either of %s" 98 | % (hostname, ', '.join(map(repr, dnsnames)))) 99 | elif len(dnsnames) == 1: 100 | raise CertificateError("hostname %r " 101 | "doesn't match %r" 102 | % (hostname, dnsnames[0])) 103 | else: 104 | raise CertificateError("no appropriate commonName or " 105 | "subjectAltName fields were found") 106 | -------------------------------------------------------------------------------- /source/requests/packages/urllib3/exceptions.py: -------------------------------------------------------------------------------- 1 | 2 | ## Base Exceptions 3 | 4 | class HTTPError(Exception): 5 | "Base exception used by this module." 6 | pass 7 | 8 | class HTTPWarning(Warning): 9 | "Base warning used by this module." 10 | pass 11 | 12 | 13 | 14 | class PoolError(HTTPError): 15 | "Base exception for errors caused within a pool." 16 | def __init__(self, pool, message): 17 | self.pool = pool 18 | HTTPError.__init__(self, "%s: %s" % (pool, message)) 19 | 20 | def __reduce__(self): 21 | # For pickling purposes. 22 | return self.__class__, (None, None) 23 | 24 | 25 | class RequestError(PoolError): 26 | "Base exception for PoolErrors that have associated URLs." 27 | def __init__(self, pool, url, message): 28 | self.url = url 29 | PoolError.__init__(self, pool, message) 30 | 31 | def __reduce__(self): 32 | # For pickling purposes. 33 | return self.__class__, (None, self.url, None) 34 | 35 | 36 | class SSLError(HTTPError): 37 | "Raised when SSL certificate fails in an HTTPS connection." 38 | pass 39 | 40 | 41 | class ProxyError(HTTPError): 42 | "Raised when the connection to a proxy fails." 43 | pass 44 | 45 | 46 | class DecodeError(HTTPError): 47 | "Raised when automatic decoding based on Content-Type fails." 48 | pass 49 | 50 | 51 | class ProtocolError(HTTPError): 52 | "Raised when something unexpected happens mid-request/response." 53 | pass 54 | 55 | 56 | #: Renamed to ProtocolError but aliased for backwards compatibility. 57 | ConnectionError = ProtocolError 58 | 59 | 60 | ## Leaf Exceptions 61 | 62 | class MaxRetryError(RequestError): 63 | """Raised when the maximum number of retries is exceeded. 64 | 65 | :param pool: The connection pool 66 | :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool` 67 | :param string url: The requested Url 68 | :param exceptions.Exception reason: The underlying error 69 | 70 | """ 71 | 72 | def __init__(self, pool, url, reason=None): 73 | self.reason = reason 74 | 75 | message = "Max retries exceeded with url: %s" % url 76 | if reason: 77 | message += " (Caused by %r)" % reason 78 | else: 79 | message += " (Caused by redirect)" 80 | 81 | RequestError.__init__(self, pool, url, message) 82 | 83 | 84 | class HostChangedError(RequestError): 85 | "Raised when an existing pool gets a request for a foreign host." 86 | 87 | def __init__(self, pool, url, retries=3): 88 | message = "Tried to open a foreign host with url: %s" % url 89 | RequestError.__init__(self, pool, url, message) 90 | self.retries = retries 91 | 92 | 93 | class TimeoutStateError(HTTPError): 94 | """ Raised when passing an invalid state to a timeout """ 95 | pass 96 | 97 | 98 | class TimeoutError(HTTPError): 99 | """ Raised when a socket timeout error occurs. 100 | 101 | Catching this error will catch both :exc:`ReadTimeoutErrors 102 | ` and :exc:`ConnectTimeoutErrors `. 103 | """ 104 | pass 105 | 106 | 107 | class ReadTimeoutError(TimeoutError, RequestError): 108 | "Raised when a socket timeout occurs while receiving data from a server" 109 | pass 110 | 111 | 112 | # This timeout error does not have a URL attached and needs to inherit from the 113 | # base HTTPError 114 | class ConnectTimeoutError(TimeoutError): 115 | "Raised when a socket timeout occurs while connecting to a server" 116 | pass 117 | 118 | 119 | class EmptyPoolError(PoolError): 120 | "Raised when a pool runs out of connections and no more are allowed." 121 | pass 122 | 123 | 124 | class ClosedPoolError(PoolError): 125 | "Raised when a request enters a pool after the pool has been closed." 126 | pass 127 | 128 | 129 | class LocationValueError(ValueError, HTTPError): 130 | "Raised when there is something wrong with a given URL input." 131 | pass 132 | 133 | 134 | class LocationParseError(LocationValueError): 135 | "Raised when get_host or similar fails to parse the URL input." 136 | 137 | def __init__(self, location): 138 | message = "Failed to parse: %s" % location 139 | HTTPError.__init__(self, message) 140 | 141 | self.location = location 142 | 143 | 144 | class SecurityWarning(HTTPWarning): 145 | "Warned when perfoming security reducing actions" 146 | pass 147 | 148 | 149 | class InsecureRequestWarning(SecurityWarning): 150 | "Warned when making an unverified HTTPS request." 151 | pass 152 | 153 | 154 | class SystemTimeWarning(SecurityWarning): 155 | "Warned when system time is suspected to be wrong" 156 | pass 157 | -------------------------------------------------------------------------------- /source/requests/packages/urllib3/util/ssl_.py: -------------------------------------------------------------------------------- 1 | from binascii import hexlify, unhexlify 2 | from hashlib import md5, sha1 3 | 4 | from ..exceptions import SSLError 5 | 6 | 7 | try: # Test for SSL features 8 | SSLContext = None 9 | HAS_SNI = False 10 | 11 | import ssl 12 | from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23 13 | from ssl import SSLContext # Modern SSL? 14 | from ssl import HAS_SNI # Has SNI? 15 | except ImportError: 16 | pass 17 | 18 | 19 | def assert_fingerprint(cert, fingerprint): 20 | """ 21 | Checks if given fingerprint matches the supplied certificate. 22 | 23 | :param cert: 24 | Certificate as bytes object. 25 | :param fingerprint: 26 | Fingerprint as string of hexdigits, can be interspersed by colons. 27 | """ 28 | 29 | # Maps the length of a digest to a possible hash function producing 30 | # this digest. 31 | hashfunc_map = { 32 | 16: md5, 33 | 20: sha1 34 | } 35 | 36 | fingerprint = fingerprint.replace(':', '').lower() 37 | digest_length, odd = divmod(len(fingerprint), 2) 38 | 39 | if odd or digest_length not in hashfunc_map: 40 | raise SSLError('Fingerprint is of invalid length.') 41 | 42 | # We need encode() here for py32; works on py2 and p33. 43 | fingerprint_bytes = unhexlify(fingerprint.encode()) 44 | 45 | hashfunc = hashfunc_map[digest_length] 46 | 47 | cert_digest = hashfunc(cert).digest() 48 | 49 | if not cert_digest == fingerprint_bytes: 50 | raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".' 51 | .format(hexlify(fingerprint_bytes), 52 | hexlify(cert_digest))) 53 | 54 | 55 | def resolve_cert_reqs(candidate): 56 | """ 57 | Resolves the argument to a numeric constant, which can be passed to 58 | the wrap_socket function/method from the ssl module. 59 | Defaults to :data:`ssl.CERT_NONE`. 60 | If given a string it is assumed to be the name of the constant in the 61 | :mod:`ssl` module or its abbrevation. 62 | (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. 63 | If it's neither `None` nor a string we assume it is already the numeric 64 | constant which can directly be passed to wrap_socket. 65 | """ 66 | if candidate is None: 67 | return CERT_NONE 68 | 69 | if isinstance(candidate, str): 70 | res = getattr(ssl, candidate, None) 71 | if res is None: 72 | res = getattr(ssl, 'CERT_' + candidate) 73 | return res 74 | 75 | return candidate 76 | 77 | 78 | def resolve_ssl_version(candidate): 79 | """ 80 | like resolve_cert_reqs 81 | """ 82 | if candidate is None: 83 | return PROTOCOL_SSLv23 84 | 85 | if isinstance(candidate, str): 86 | res = getattr(ssl, candidate, None) 87 | if res is None: 88 | res = getattr(ssl, 'PROTOCOL_' + candidate) 89 | return res 90 | 91 | return candidate 92 | 93 | 94 | if SSLContext is not None: # Python 3.2+ 95 | def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, 96 | ca_certs=None, server_hostname=None, 97 | ssl_version=None): 98 | """ 99 | All arguments except `server_hostname` have the same meaning as for 100 | :func:`ssl.wrap_socket` 101 | 102 | :param server_hostname: 103 | Hostname of the expected certificate 104 | """ 105 | context = SSLContext(ssl_version) 106 | context.verify_mode = cert_reqs 107 | 108 | # Disable TLS compression to migitate CRIME attack (issue #309) 109 | OP_NO_COMPRESSION = 0x20000 110 | context.options |= OP_NO_COMPRESSION 111 | 112 | if ca_certs: 113 | try: 114 | context.load_verify_locations(ca_certs) 115 | # Py32 raises IOError 116 | # Py33 raises FileNotFoundError 117 | except Exception as e: # Reraise as SSLError 118 | raise SSLError(e) 119 | if certfile: 120 | # FIXME: This block needs a test. 121 | context.load_cert_chain(certfile, keyfile) 122 | if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI 123 | return context.wrap_socket(sock, server_hostname=server_hostname) 124 | return context.wrap_socket(sock) 125 | 126 | else: # Python 3.1 and earlier 127 | def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, 128 | ca_certs=None, server_hostname=None, 129 | ssl_version=None): 130 | return wrap_socket(sock, keyfile=keyfile, certfile=certfile, 131 | ca_certs=ca_certs, cert_reqs=cert_reqs, 132 | ssl_version=ssl_version) 133 | -------------------------------------------------------------------------------- /source/requests/packages/urllib3/contrib/ntlmpool.py: -------------------------------------------------------------------------------- 1 | """ 2 | NTLM authenticating pool, contributed by erikcederstran 3 | 4 | Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10 5 | """ 6 | 7 | try: 8 | from http.client import HTTPSConnection 9 | except ImportError: 10 | from httplib import HTTPSConnection 11 | from logging import getLogger 12 | from ntlm import ntlm 13 | 14 | from urllib3 import HTTPSConnectionPool 15 | 16 | 17 | log = getLogger(__name__) 18 | 19 | 20 | class NTLMConnectionPool(HTTPSConnectionPool): 21 | """ 22 | Implements an NTLM authentication version of an urllib3 connection pool 23 | """ 24 | 25 | scheme = 'https' 26 | 27 | def __init__(self, user, pw, authurl, *args, **kwargs): 28 | """ 29 | authurl is a random URL on the server that is protected by NTLM. 30 | user is the Windows user, probably in the DOMAIN\\username format. 31 | pw is the password for the user. 32 | """ 33 | super(NTLMConnectionPool, self).__init__(*args, **kwargs) 34 | self.authurl = authurl 35 | self.rawuser = user 36 | user_parts = user.split('\\', 1) 37 | self.domain = user_parts[0].upper() 38 | self.user = user_parts[1] 39 | self.pw = pw 40 | 41 | def _new_conn(self): 42 | # Performs the NTLM handshake that secures the connection. The socket 43 | # must be kept open while requests are performed. 44 | self.num_connections += 1 45 | log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s' % 46 | (self.num_connections, self.host, self.authurl)) 47 | 48 | headers = {} 49 | headers['Connection'] = 'Keep-Alive' 50 | req_header = 'Authorization' 51 | resp_header = 'www-authenticate' 52 | 53 | conn = HTTPSConnection(host=self.host, port=self.port) 54 | 55 | # Send negotiation message 56 | headers[req_header] = ( 57 | 'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser)) 58 | log.debug('Request headers: %s' % headers) 59 | conn.request('GET', self.authurl, None, headers) 60 | res = conn.getresponse() 61 | reshdr = dict(res.getheaders()) 62 | log.debug('Response status: %s %s' % (res.status, res.reason)) 63 | log.debug('Response headers: %s' % reshdr) 64 | log.debug('Response data: %s [...]' % res.read(100)) 65 | 66 | # Remove the reference to the socket, so that it can not be closed by 67 | # the response object (we want to keep the socket open) 68 | res.fp = None 69 | 70 | # Server should respond with a challenge message 71 | auth_header_values = reshdr[resp_header].split(', ') 72 | auth_header_value = None 73 | for s in auth_header_values: 74 | if s[:5] == 'NTLM ': 75 | auth_header_value = s[5:] 76 | if auth_header_value is None: 77 | raise Exception('Unexpected %s response header: %s' % 78 | (resp_header, reshdr[resp_header])) 79 | 80 | # Send authentication message 81 | ServerChallenge, NegotiateFlags = \ 82 | ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value) 83 | auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge, 84 | self.user, 85 | self.domain, 86 | self.pw, 87 | NegotiateFlags) 88 | headers[req_header] = 'NTLM %s' % auth_msg 89 | log.debug('Request headers: %s' % headers) 90 | conn.request('GET', self.authurl, None, headers) 91 | res = conn.getresponse() 92 | log.debug('Response status: %s %s' % (res.status, res.reason)) 93 | log.debug('Response headers: %s' % dict(res.getheaders())) 94 | log.debug('Response data: %s [...]' % res.read()[:100]) 95 | if res.status != 200: 96 | if res.status == 401: 97 | raise Exception('Server rejected request: wrong ' 98 | 'username or password') 99 | raise Exception('Wrong server response: %s %s' % 100 | (res.status, res.reason)) 101 | 102 | res.fp = None 103 | log.debug('Connection established') 104 | return conn 105 | 106 | def urlopen(self, method, url, body=None, headers=None, retries=3, 107 | redirect=True, assert_same_host=True): 108 | if headers is None: 109 | headers = {} 110 | headers['Connection'] = 'Keep-Alive' 111 | return super(NTLMConnectionPool, self).urlopen(method, url, body, 112 | headers, retries, 113 | redirect, 114 | assert_same_host) 115 | -------------------------------------------------------------------------------- /source/requests/api.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | requests.api 5 | ~~~~~~~~~~~~ 6 | 7 | This module implements the Requests API. 8 | 9 | :copyright: (c) 2012 by Kenneth Reitz. 10 | :license: Apache2, see LICENSE for more details. 11 | 12 | """ 13 | 14 | from . import sessions 15 | 16 | 17 | def request(method, url, **kwargs): 18 | """Constructs and sends a :class:`Request `. 19 | Returns :class:`Response ` object. 20 | 21 | :param method: method for the new :class:`Request` object. 22 | :param url: URL for the new :class:`Request` object. 23 | :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. 24 | :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. 25 | :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. 26 | :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. 27 | :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': ('filename', fileobj)}``) for multipart encoding upload. 28 | :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. 29 | :param timeout: (optional) How long to wait for the server to send data 30 | before giving up, as a float, or a (`connect timeout, read timeout 31 | `_) tuple. 32 | :type timeout: float or tuple 33 | :param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. 34 | :type allow_redirects: bool 35 | :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. 36 | :param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided. 37 | :param stream: (optional) if ``False``, the response content will be immediately downloaded. 38 | :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. 39 | 40 | Usage:: 41 | 42 | >>> import requests 43 | >>> req = requests.request('GET', 'http://httpbin.org/get') 44 | 45 | """ 46 | 47 | session = sessions.Session() 48 | return session.request(method=method, url=url, **kwargs) 49 | 50 | 51 | def get(url, **kwargs): 52 | """Sends a GET request. Returns :class:`Response` object. 53 | 54 | :param url: URL for the new :class:`Request` object. 55 | :param \*\*kwargs: Optional arguments that ``request`` takes. 56 | """ 57 | 58 | kwargs.setdefault('allow_redirects', True) 59 | return request('get', url, **kwargs) 60 | 61 | 62 | def options(url, **kwargs): 63 | """Sends a OPTIONS request. Returns :class:`Response` object. 64 | 65 | :param url: URL for the new :class:`Request` object. 66 | :param \*\*kwargs: Optional arguments that ``request`` takes. 67 | """ 68 | 69 | kwargs.setdefault('allow_redirects', True) 70 | return request('options', url, **kwargs) 71 | 72 | 73 | def head(url, **kwargs): 74 | """Sends a HEAD request. Returns :class:`Response` object. 75 | 76 | :param url: URL for the new :class:`Request` object. 77 | :param \*\*kwargs: Optional arguments that ``request`` takes. 78 | """ 79 | 80 | kwargs.setdefault('allow_redirects', False) 81 | return request('head', url, **kwargs) 82 | 83 | 84 | def post(url, data=None, **kwargs): 85 | """Sends a POST request. Returns :class:`Response` object. 86 | 87 | :param url: URL for the new :class:`Request` object. 88 | :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. 89 | :param \*\*kwargs: Optional arguments that ``request`` takes. 90 | """ 91 | 92 | return request('post', url, data=data, **kwargs) 93 | 94 | 95 | def put(url, data=None, **kwargs): 96 | """Sends a PUT request. Returns :class:`Response` object. 97 | 98 | :param url: URL for the new :class:`Request` object. 99 | :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. 100 | :param \*\*kwargs: Optional arguments that ``request`` takes. 101 | """ 102 | 103 | return request('put', url, data=data, **kwargs) 104 | 105 | 106 | def patch(url, data=None, **kwargs): 107 | """Sends a PATCH request. Returns :class:`Response` object. 108 | 109 | :param url: URL for the new :class:`Request` object. 110 | :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. 111 | :param \*\*kwargs: Optional arguments that ``request`` takes. 112 | """ 113 | 114 | return request('patch', url, data=data, **kwargs) 115 | 116 | 117 | def delete(url, **kwargs): 118 | """Sends a DELETE request. Returns :class:`Response` object. 119 | 120 | :param url: URL for the new :class:`Request` object. 121 | :param \*\*kwargs: Optional arguments that ``request`` takes. 122 | """ 123 | 124 | return request('delete', url, **kwargs) 125 | -------------------------------------------------------------------------------- /source/workflow/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # encoding: utf-8 3 | # 4 | # Copyright (c) 2014 Dean Jackson 5 | # 6 | # MIT Licence. See http://opensource.org/licenses/MIT 7 | # 8 | # Created on 2014-02-15 9 | # 10 | 11 | """ 12 | A Python helper library for `Alfred 2 `_ Workflow 13 | authors. 14 | 15 | Alfred Workflows typically take user input, fetch data from the Web or 16 | elsewhere, filter them and display results to the user. **Alfred-Workflow** 17 | helps you do these things. 18 | 19 | There are convenience methods for: 20 | 21 | - Parsing script arguments. 22 | - Text decoding/normalisation. 23 | - Storing data and settings. 24 | - Caching data from, e.g., web services with a simple API for updating expired 25 | data. 26 | - Securely storing (and syncing) passwords using OS X Keychain. 27 | - Generating XML output for Alfred. 28 | - Including external libraries (adding directories to ``sys.path``). 29 | - Filtering results using an Alfred-like, fuzzy search algorithm. 30 | - Generating log output for debugging. 31 | - Running background processes to keep your workflow responsive. 32 | - Capturing errors, so the workflow doesn't fail silently. 33 | 34 | Quick Example 35 | ============= 36 | 37 | Here's how to show recent `Pinboard.in `_ posts in Alfred. 38 | 39 | Create a new Workflow in Alfred's preferences. Add a **Script Filter** with 40 | Language ``/usr/bin/python`` and paste the following into the **Script** field 41 | (changing ``API_KEY``): 42 | 43 | .. code-block:: python 44 | :emphasize-lines: 4 45 | 46 | import sys 47 | from workflow import Workflow, ICON_WEB, web 48 | 49 | API_KEY = 'your-pinboard-api-key' 50 | 51 | def main(wf): 52 | url = 'https://api.pinboard.in/v1/posts/recent' 53 | params = dict(auth_token=API_KEY, count=20, format='json') 54 | r = web.get(url, params) 55 | r.raise_for_status() 56 | for post in r.json()['posts']: 57 | wf.add_item(post['description'], post['href'], arg=post['href'], 58 | uid=post['hash'], valid=True, icon=ICON_WEB) 59 | wf.send_feedback() 60 | 61 | 62 | if __name__ == u"__main__": 63 | wf = Workflow() 64 | sys.exit(wf.run(main)) 65 | 66 | 67 | Add an **Open URL** action to your Workflow with ``{query}`` as the **URL**, 68 | connect your **Script Filter** to it, and you can now hit **ENTER** on a 69 | Pinboard item in Alfred to open it in your browser. 70 | 71 | Installation 72 | ============ 73 | 74 | With pip 75 | -------- 76 | 77 | You can install **Alfred-Workflow** directly into your workflow with:: 78 | 79 | pip install --target=/path/to/my/workflow Alfred-Workflow 80 | 81 | 82 | **Note**: If you intend to distribute your workflow to other users, you should 83 | include **Alfred-Workflow** (and other Python libraries your workflow requires) 84 | within your workflow as described. Do not ask users to install anything into 85 | their system Python. 86 | 87 | From source 88 | ----------- 89 | 90 | Download the ``alfred-workflow-X.X.zip`` file from the 91 | `GitHub releases page `_ 92 | and either extract the ZIP to the root directory of your workflow (where 93 | ``info.plist`` is) or place the ZIP in the root directory and add 94 | ``sys.path.insert(0, 'alfred-workflow-X.X.zip')`` to the top of your 95 | Python scripts. 96 | 97 | Alternatively, you can download 98 | `the source code `_ 99 | from the `GitHub repository `_ and 100 | copy the ``workflow`` subfolder to the root directory of your Workflow. 101 | 102 | Your Workflow directory should look something like this (where 103 | ``yourscript.py`` contains your Workflow code and ``info.plist`` is 104 | the Workflow information file generated by Alfred):: 105 | 106 | Your Workflow/ 107 | info.plist 108 | icon.png 109 | workflow/ 110 | __init__.py 111 | background.py 112 | workflow.py 113 | web.py 114 | yourscript.py 115 | etc. 116 | 117 | 118 | Or like this:: 119 | 120 | Your Workflow/ 121 | info.plist 122 | icon.png 123 | workflow-1.X.X.zip 124 | yourscript.py 125 | etc. 126 | 127 | 128 | """ 129 | 130 | __version__ = '1.8.5' 131 | 132 | 133 | from .workflow import Workflow, PasswordNotFound, KeychainError 134 | from .workflow import (ICON_ERROR, ICON_WARNING, ICON_NOTE, ICON_INFO, 135 | ICON_FAVORITE, ICON_FAVOURITE, ICON_USER, ICON_GROUP, 136 | ICON_HELP, ICON_NETWORK, ICON_WEB, ICON_COLOR, 137 | ICON_COLOUR, ICON_SYNC, ICON_SETTINGS, ICON_TRASH, 138 | ICON_MUSIC, ICON_BURN, ICON_ACCOUNT, ICON_ERROR) 139 | from .workflow import (MATCH_ALL, MATCH_ALLCHARS, MATCH_ATOM, 140 | MATCH_CAPITALS, MATCH_INITIALS, 141 | MATCH_INITIALS_CONTAIN, MATCH_INITIALS_STARTSWITH, 142 | MATCH_STARTSWITH, MATCH_SUBSTRING) 143 | -------------------------------------------------------------------------------- /source/requests/packages/urllib3/util/url.py: -------------------------------------------------------------------------------- 1 | from collections import namedtuple 2 | 3 | from ..exceptions import LocationParseError 4 | 5 | 6 | url_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment'] 7 | 8 | 9 | class Url(namedtuple('Url', url_attrs)): 10 | """ 11 | Datastructure for representing an HTTP URL. Used as a return value for 12 | :func:`parse_url`. 13 | """ 14 | slots = () 15 | 16 | def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None, 17 | query=None, fragment=None): 18 | return super(Url, cls).__new__(cls, scheme, auth, host, port, path, 19 | query, fragment) 20 | 21 | @property 22 | def hostname(self): 23 | """For backwards-compatibility with urlparse. We're nice like that.""" 24 | return self.host 25 | 26 | @property 27 | def request_uri(self): 28 | """Absolute path including the query string.""" 29 | uri = self.path or '/' 30 | 31 | if self.query is not None: 32 | uri += '?' + self.query 33 | 34 | return uri 35 | 36 | @property 37 | def netloc(self): 38 | """Network location including host and port""" 39 | if self.port: 40 | return '%s:%d' % (self.host, self.port) 41 | return self.host 42 | 43 | 44 | def split_first(s, delims): 45 | """ 46 | Given a string and an iterable of delimiters, split on the first found 47 | delimiter. Return two split parts and the matched delimiter. 48 | 49 | If not found, then the first part is the full input string. 50 | 51 | Example:: 52 | 53 | >>> split_first('foo/bar?baz', '?/=') 54 | ('foo', 'bar?baz', '/') 55 | >>> split_first('foo/bar?baz', '123') 56 | ('foo/bar?baz', '', None) 57 | 58 | Scales linearly with number of delims. Not ideal for large number of delims. 59 | """ 60 | min_idx = None 61 | min_delim = None 62 | for d in delims: 63 | idx = s.find(d) 64 | if idx < 0: 65 | continue 66 | 67 | if min_idx is None or idx < min_idx: 68 | min_idx = idx 69 | min_delim = d 70 | 71 | if min_idx is None or min_idx < 0: 72 | return s, '', None 73 | 74 | return s[:min_idx], s[min_idx+1:], min_delim 75 | 76 | 77 | def parse_url(url): 78 | """ 79 | Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is 80 | performed to parse incomplete urls. Fields not provided will be None. 81 | 82 | Partly backwards-compatible with :mod:`urlparse`. 83 | 84 | Example:: 85 | 86 | >>> parse_url('http://google.com/mail/') 87 | Url(scheme='http', host='google.com', port=None, path='/', ...) 88 | >>> parse_url('google.com:80') 89 | Url(scheme=None, host='google.com', port=80, path=None, ...) 90 | >>> parse_url('/foo?bar') 91 | Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...) 92 | """ 93 | 94 | # While this code has overlap with stdlib's urlparse, it is much 95 | # simplified for our needs and less annoying. 96 | # Additionally, this implementations does silly things to be optimal 97 | # on CPython. 98 | 99 | if not url: 100 | # Empty 101 | return Url() 102 | 103 | scheme = None 104 | auth = None 105 | host = None 106 | port = None 107 | path = None 108 | fragment = None 109 | query = None 110 | 111 | # Scheme 112 | if '://' in url: 113 | scheme, url = url.split('://', 1) 114 | 115 | # Find the earliest Authority Terminator 116 | # (http://tools.ietf.org/html/rfc3986#section-3.2) 117 | url, path_, delim = split_first(url, ['/', '?', '#']) 118 | 119 | if delim: 120 | # Reassemble the path 121 | path = delim + path_ 122 | 123 | # Auth 124 | if '@' in url: 125 | # Last '@' denotes end of auth part 126 | auth, url = url.rsplit('@', 1) 127 | 128 | # IPv6 129 | if url and url[0] == '[': 130 | host, url = url.split(']', 1) 131 | host += ']' 132 | 133 | # Port 134 | if ':' in url: 135 | _host, port = url.split(':', 1) 136 | 137 | if not host: 138 | host = _host 139 | 140 | if port: 141 | # If given, ports must be integers. 142 | if not port.isdigit(): 143 | raise LocationParseError(url) 144 | port = int(port) 145 | else: 146 | # Blank ports are cool, too. (rfc3986#section-3.2.3) 147 | port = None 148 | 149 | elif not host and url: 150 | host = url 151 | 152 | if not path: 153 | return Url(scheme, auth, host, port, path, query, fragment) 154 | 155 | # Fragment 156 | if '#' in path: 157 | path, fragment = path.split('#', 1) 158 | 159 | # Query 160 | if '?' in path: 161 | path, query = path.split('?', 1) 162 | 163 | return Url(scheme, auth, host, port, path, query, fragment) 164 | 165 | 166 | def get_host(url): 167 | """ 168 | Deprecated. Use :func:`.parse_url` instead. 169 | """ 170 | p = parse_url(url) 171 | return p.scheme or 'http', p.hostname, p.port 172 | -------------------------------------------------------------------------------- /source/requests/packages/chardet/sbcharsetprober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is Mozilla Universal charset detector code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 2001 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # Shy Shalom - original C code 12 | # 13 | # This library is free software; you can redistribute it and/or 14 | # modify it under the terms of the GNU Lesser General Public 15 | # License as published by the Free Software Foundation; either 16 | # version 2.1 of the License, or (at your option) any later version. 17 | # 18 | # This library is distributed in the hope that it will be useful, 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 21 | # Lesser General Public License for more details. 22 | # 23 | # You should have received a copy of the GNU Lesser General Public 24 | # License along with this library; if not, write to the Free Software 25 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 26 | # 02110-1301 USA 27 | ######################### END LICENSE BLOCK ######################### 28 | 29 | import sys 30 | from . import constants 31 | from .charsetprober import CharSetProber 32 | from .compat import wrap_ord 33 | 34 | SAMPLE_SIZE = 64 35 | SB_ENOUGH_REL_THRESHOLD = 1024 36 | POSITIVE_SHORTCUT_THRESHOLD = 0.95 37 | NEGATIVE_SHORTCUT_THRESHOLD = 0.05 38 | SYMBOL_CAT_ORDER = 250 39 | NUMBER_OF_SEQ_CAT = 4 40 | POSITIVE_CAT = NUMBER_OF_SEQ_CAT - 1 41 | #NEGATIVE_CAT = 0 42 | 43 | 44 | class SingleByteCharSetProber(CharSetProber): 45 | def __init__(self, model, reversed=False, nameProber=None): 46 | CharSetProber.__init__(self) 47 | self._mModel = model 48 | # TRUE if we need to reverse every pair in the model lookup 49 | self._mReversed = reversed 50 | # Optional auxiliary prober for name decision 51 | self._mNameProber = nameProber 52 | self.reset() 53 | 54 | def reset(self): 55 | CharSetProber.reset(self) 56 | # char order of last character 57 | self._mLastOrder = 255 58 | self._mSeqCounters = [0] * NUMBER_OF_SEQ_CAT 59 | self._mTotalSeqs = 0 60 | self._mTotalChar = 0 61 | # characters that fall in our sampling range 62 | self._mFreqChar = 0 63 | 64 | def get_charset_name(self): 65 | if self._mNameProber: 66 | return self._mNameProber.get_charset_name() 67 | else: 68 | return self._mModel['charsetName'] 69 | 70 | def feed(self, aBuf): 71 | if not self._mModel['keepEnglishLetter']: 72 | aBuf = self.filter_without_english_letters(aBuf) 73 | aLen = len(aBuf) 74 | if not aLen: 75 | return self.get_state() 76 | for c in aBuf: 77 | order = self._mModel['charToOrderMap'][wrap_ord(c)] 78 | if order < SYMBOL_CAT_ORDER: 79 | self._mTotalChar += 1 80 | if order < SAMPLE_SIZE: 81 | self._mFreqChar += 1 82 | if self._mLastOrder < SAMPLE_SIZE: 83 | self._mTotalSeqs += 1 84 | if not self._mReversed: 85 | i = (self._mLastOrder * SAMPLE_SIZE) + order 86 | model = self._mModel['precedenceMatrix'][i] 87 | else: # reverse the order of the letters in the lookup 88 | i = (order * SAMPLE_SIZE) + self._mLastOrder 89 | model = self._mModel['precedenceMatrix'][i] 90 | self._mSeqCounters[model] += 1 91 | self._mLastOrder = order 92 | 93 | if self.get_state() == constants.eDetecting: 94 | if self._mTotalSeqs > SB_ENOUGH_REL_THRESHOLD: 95 | cf = self.get_confidence() 96 | if cf > POSITIVE_SHORTCUT_THRESHOLD: 97 | if constants._debug: 98 | sys.stderr.write('%s confidence = %s, we have a' 99 | 'winner\n' % 100 | (self._mModel['charsetName'], cf)) 101 | self._mState = constants.eFoundIt 102 | elif cf < NEGATIVE_SHORTCUT_THRESHOLD: 103 | if constants._debug: 104 | sys.stderr.write('%s confidence = %s, below negative' 105 | 'shortcut threshhold %s\n' % 106 | (self._mModel['charsetName'], cf, 107 | NEGATIVE_SHORTCUT_THRESHOLD)) 108 | self._mState = constants.eNotMe 109 | 110 | return self.get_state() 111 | 112 | def get_confidence(self): 113 | r = 0.01 114 | if self._mTotalSeqs > 0: 115 | r = ((1.0 * self._mSeqCounters[POSITIVE_CAT]) / self._mTotalSeqs 116 | / self._mModel['mTypicalPositiveRatio']) 117 | r = r * self._mFreqChar / self._mTotalChar 118 | if r >= 1.0: 119 | r = 0.99 120 | return r 121 | -------------------------------------------------------------------------------- /source/requests/packages/chardet/latin1prober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is Mozilla Universal charset detector code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 2001 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # Shy Shalom - original C code 12 | # 13 | # This library is free software; you can redistribute it and/or 14 | # modify it under the terms of the GNU Lesser General Public 15 | # License as published by the Free Software Foundation; either 16 | # version 2.1 of the License, or (at your option) any later version. 17 | # 18 | # This library is distributed in the hope that it will be useful, 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 21 | # Lesser General Public License for more details. 22 | # 23 | # You should have received a copy of the GNU Lesser General Public 24 | # License along with this library; if not, write to the Free Software 25 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 26 | # 02110-1301 USA 27 | ######################### END LICENSE BLOCK ######################### 28 | 29 | from .charsetprober import CharSetProber 30 | from .constants import eNotMe 31 | from .compat import wrap_ord 32 | 33 | FREQ_CAT_NUM = 4 34 | 35 | UDF = 0 # undefined 36 | OTH = 1 # other 37 | ASC = 2 # ascii capital letter 38 | ASS = 3 # ascii small letter 39 | ACV = 4 # accent capital vowel 40 | ACO = 5 # accent capital other 41 | ASV = 6 # accent small vowel 42 | ASO = 7 # accent small other 43 | CLASS_NUM = 8 # total classes 44 | 45 | Latin1_CharToClass = ( 46 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07 47 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F 48 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17 49 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F 50 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27 51 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F 52 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37 53 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F 54 | OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47 55 | ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F 56 | ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57 57 | ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F 58 | OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67 59 | ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F 60 | ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77 61 | ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F 62 | OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87 63 | OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F 64 | UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97 65 | OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F 66 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7 67 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF 68 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7 69 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF 70 | ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7 71 | ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF 72 | ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7 73 | ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF 74 | ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7 75 | ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF 76 | ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7 77 | ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF 78 | ) 79 | 80 | # 0 : illegal 81 | # 1 : very unlikely 82 | # 2 : normal 83 | # 3 : very likely 84 | Latin1ClassModel = ( 85 | # UDF OTH ASC ASS ACV ACO ASV ASO 86 | 0, 0, 0, 0, 0, 0, 0, 0, # UDF 87 | 0, 3, 3, 3, 3, 3, 3, 3, # OTH 88 | 0, 3, 3, 3, 3, 3, 3, 3, # ASC 89 | 0, 3, 3, 3, 1, 1, 3, 3, # ASS 90 | 0, 3, 3, 3, 1, 2, 1, 2, # ACV 91 | 0, 3, 3, 3, 3, 3, 3, 3, # ACO 92 | 0, 3, 1, 3, 1, 1, 1, 3, # ASV 93 | 0, 3, 1, 3, 1, 1, 3, 3, # ASO 94 | ) 95 | 96 | 97 | class Latin1Prober(CharSetProber): 98 | def __init__(self): 99 | CharSetProber.__init__(self) 100 | self.reset() 101 | 102 | def reset(self): 103 | self._mLastCharClass = OTH 104 | self._mFreqCounter = [0] * FREQ_CAT_NUM 105 | CharSetProber.reset(self) 106 | 107 | def get_charset_name(self): 108 | return "windows-1252" 109 | 110 | def feed(self, aBuf): 111 | aBuf = self.filter_with_english_letters(aBuf) 112 | for c in aBuf: 113 | charClass = Latin1_CharToClass[wrap_ord(c)] 114 | freq = Latin1ClassModel[(self._mLastCharClass * CLASS_NUM) 115 | + charClass] 116 | if freq == 0: 117 | self._mState = eNotMe 118 | break 119 | self._mFreqCounter[freq] += 1 120 | self._mLastCharClass = charClass 121 | 122 | return self.get_state() 123 | 124 | def get_confidence(self): 125 | if self.get_state() == eNotMe: 126 | return 0.01 127 | 128 | total = sum(self._mFreqCounter) 129 | if total < 0.01: 130 | confidence = 0.0 131 | else: 132 | confidence = ((self._mFreqCounter[3] / total) 133 | - (self._mFreqCounter[1] * 20.0 / total)) 134 | if confidence < 0.0: 135 | confidence = 0.0 136 | # lower the confidence of latin1 so that other more accurate 137 | # detector can take priority. 138 | confidence = confidence * 0.5 139 | return confidence 140 | -------------------------------------------------------------------------------- /source/requests/packages/urllib3/request.py: -------------------------------------------------------------------------------- 1 | try: 2 | from urllib.parse import urlencode 3 | except ImportError: 4 | from urllib import urlencode 5 | 6 | from .filepost import encode_multipart_formdata 7 | 8 | 9 | __all__ = ['RequestMethods'] 10 | 11 | 12 | class RequestMethods(object): 13 | """ 14 | Convenience mixin for classes who implement a :meth:`urlopen` method, such 15 | as :class:`~urllib3.connectionpool.HTTPConnectionPool` and 16 | :class:`~urllib3.poolmanager.PoolManager`. 17 | 18 | Provides behavior for making common types of HTTP request methods and 19 | decides which type of request field encoding to use. 20 | 21 | Specifically, 22 | 23 | :meth:`.request_encode_url` is for sending requests whose fields are 24 | encoded in the URL (such as GET, HEAD, DELETE). 25 | 26 | :meth:`.request_encode_body` is for sending requests whose fields are 27 | encoded in the *body* of the request using multipart or www-form-urlencoded 28 | (such as for POST, PUT, PATCH). 29 | 30 | :meth:`.request` is for making any kind of request, it will look up the 31 | appropriate encoding format and use one of the above two methods to make 32 | the request. 33 | 34 | Initializer parameters: 35 | 36 | :param headers: 37 | Headers to include with all requests, unless other headers are given 38 | explicitly. 39 | """ 40 | 41 | _encode_url_methods = set(['DELETE', 'GET', 'HEAD', 'OPTIONS']) 42 | 43 | def __init__(self, headers=None): 44 | self.headers = headers or {} 45 | 46 | def urlopen(self, method, url, body=None, headers=None, 47 | encode_multipart=True, multipart_boundary=None, 48 | **kw): # Abstract 49 | raise NotImplemented("Classes extending RequestMethods must implement " 50 | "their own ``urlopen`` method.") 51 | 52 | def request(self, method, url, fields=None, headers=None, **urlopen_kw): 53 | """ 54 | Make a request using :meth:`urlopen` with the appropriate encoding of 55 | ``fields`` based on the ``method`` used. 56 | 57 | This is a convenience method that requires the least amount of manual 58 | effort. It can be used in most situations, while still having the 59 | option to drop down to more specific methods when necessary, such as 60 | :meth:`request_encode_url`, :meth:`request_encode_body`, 61 | or even the lowest level :meth:`urlopen`. 62 | """ 63 | method = method.upper() 64 | 65 | if method in self._encode_url_methods: 66 | return self.request_encode_url(method, url, fields=fields, 67 | headers=headers, 68 | **urlopen_kw) 69 | else: 70 | return self.request_encode_body(method, url, fields=fields, 71 | headers=headers, 72 | **urlopen_kw) 73 | 74 | def request_encode_url(self, method, url, fields=None, **urlopen_kw): 75 | """ 76 | Make a request using :meth:`urlopen` with the ``fields`` encoded in 77 | the url. This is useful for request methods like GET, HEAD, DELETE, etc. 78 | """ 79 | if fields: 80 | url += '?' + urlencode(fields) 81 | return self.urlopen(method, url, **urlopen_kw) 82 | 83 | def request_encode_body(self, method, url, fields=None, headers=None, 84 | encode_multipart=True, multipart_boundary=None, 85 | **urlopen_kw): 86 | """ 87 | Make a request using :meth:`urlopen` with the ``fields`` encoded in 88 | the body. This is useful for request methods like POST, PUT, PATCH, etc. 89 | 90 | When ``encode_multipart=True`` (default), then 91 | :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode 92 | the payload with the appropriate content type. Otherwise 93 | :meth:`urllib.urlencode` is used with the 94 | 'application/x-www-form-urlencoded' content type. 95 | 96 | Multipart encoding must be used when posting files, and it's reasonably 97 | safe to use it in other times too. However, it may break request 98 | signing, such as with OAuth. 99 | 100 | Supports an optional ``fields`` parameter of key/value strings AND 101 | key/filetuple. A filetuple is a (filename, data, MIME type) tuple where 102 | the MIME type is optional. For example:: 103 | 104 | fields = { 105 | 'foo': 'bar', 106 | 'fakefile': ('foofile.txt', 'contents of foofile'), 107 | 'realfile': ('barfile.txt', open('realfile').read()), 108 | 'typedfile': ('bazfile.bin', open('bazfile').read(), 109 | 'image/jpeg'), 110 | 'nonamefile': 'contents of nonamefile field', 111 | } 112 | 113 | When uploading a file, providing a filename (the first parameter of the 114 | tuple) is optional but recommended to best mimick behavior of browsers. 115 | 116 | Note that if ``headers`` are supplied, the 'Content-Type' header will 117 | be overwritten because it depends on the dynamic random boundary string 118 | which is used to compose the body of the request. The random boundary 119 | string can be explicitly set with the ``multipart_boundary`` parameter. 120 | """ 121 | if encode_multipart: 122 | body, content_type = encode_multipart_formdata( 123 | fields or {}, boundary=multipart_boundary) 124 | else: 125 | body, content_type = (urlencode(fields or {}), 126 | 'application/x-www-form-urlencoded') 127 | 128 | if headers is None: 129 | headers = self.headers 130 | 131 | headers_ = {'Content-Type': content_type} 132 | headers_.update(headers) 133 | 134 | return self.urlopen(method, url, body=body, headers=headers_, 135 | **urlopen_kw) 136 | -------------------------------------------------------------------------------- /source/requests/packages/urllib3/fields.py: -------------------------------------------------------------------------------- 1 | import email.utils 2 | import mimetypes 3 | 4 | from .packages import six 5 | 6 | 7 | def guess_content_type(filename, default='application/octet-stream'): 8 | """ 9 | Guess the "Content-Type" of a file. 10 | 11 | :param filename: 12 | The filename to guess the "Content-Type" of using :mod:`mimetypes`. 13 | :param default: 14 | If no "Content-Type" can be guessed, default to `default`. 15 | """ 16 | if filename: 17 | return mimetypes.guess_type(filename)[0] or default 18 | return default 19 | 20 | 21 | def format_header_param(name, value): 22 | """ 23 | Helper function to format and quote a single header parameter. 24 | 25 | Particularly useful for header parameters which might contain 26 | non-ASCII values, like file names. This follows RFC 2231, as 27 | suggested by RFC 2388 Section 4.4. 28 | 29 | :param name: 30 | The name of the parameter, a string expected to be ASCII only. 31 | :param value: 32 | The value of the parameter, provided as a unicode string. 33 | """ 34 | if not any(ch in value for ch in '"\\\r\n'): 35 | result = '%s="%s"' % (name, value) 36 | try: 37 | result.encode('ascii') 38 | except UnicodeEncodeError: 39 | pass 40 | else: 41 | return result 42 | if not six.PY3: # Python 2: 43 | value = value.encode('utf-8') 44 | value = email.utils.encode_rfc2231(value, 'utf-8') 45 | value = '%s*=%s' % (name, value) 46 | return value 47 | 48 | 49 | class RequestField(object): 50 | """ 51 | A data container for request body parameters. 52 | 53 | :param name: 54 | The name of this request field. 55 | :param data: 56 | The data/value body. 57 | :param filename: 58 | An optional filename of the request field. 59 | :param headers: 60 | An optional dict-like object of headers to initially use for the field. 61 | """ 62 | def __init__(self, name, data, filename=None, headers=None): 63 | self._name = name 64 | self._filename = filename 65 | self.data = data 66 | self.headers = {} 67 | if headers: 68 | self.headers = dict(headers) 69 | 70 | @classmethod 71 | def from_tuples(cls, fieldname, value): 72 | """ 73 | A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. 74 | 75 | Supports constructing :class:`~urllib3.fields.RequestField` from 76 | parameter of key/value strings AND key/filetuple. A filetuple is a 77 | (filename, data, MIME type) tuple where the MIME type is optional. 78 | For example:: 79 | 80 | 'foo': 'bar', 81 | 'fakefile': ('foofile.txt', 'contents of foofile'), 82 | 'realfile': ('barfile.txt', open('realfile').read()), 83 | 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'), 84 | 'nonamefile': 'contents of nonamefile field', 85 | 86 | Field names and filenames must be unicode. 87 | """ 88 | if isinstance(value, tuple): 89 | if len(value) == 3: 90 | filename, data, content_type = value 91 | else: 92 | filename, data = value 93 | content_type = guess_content_type(filename) 94 | else: 95 | filename = None 96 | content_type = None 97 | data = value 98 | 99 | request_param = cls(fieldname, data, filename=filename) 100 | request_param.make_multipart(content_type=content_type) 101 | 102 | return request_param 103 | 104 | def _render_part(self, name, value): 105 | """ 106 | Overridable helper function to format a single header parameter. 107 | 108 | :param name: 109 | The name of the parameter, a string expected to be ASCII only. 110 | :param value: 111 | The value of the parameter, provided as a unicode string. 112 | """ 113 | return format_header_param(name, value) 114 | 115 | def _render_parts(self, header_parts): 116 | """ 117 | Helper function to format and quote a single header. 118 | 119 | Useful for single headers that are composed of multiple items. E.g., 120 | 'Content-Disposition' fields. 121 | 122 | :param header_parts: 123 | A sequence of (k, v) typles or a :class:`dict` of (k, v) to format 124 | as `k1="v1"; k2="v2"; ...`. 125 | """ 126 | parts = [] 127 | iterable = header_parts 128 | if isinstance(header_parts, dict): 129 | iterable = header_parts.items() 130 | 131 | for name, value in iterable: 132 | if value: 133 | parts.append(self._render_part(name, value)) 134 | 135 | return '; '.join(parts) 136 | 137 | def render_headers(self): 138 | """ 139 | Renders the headers for this request field. 140 | """ 141 | lines = [] 142 | 143 | sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location'] 144 | for sort_key in sort_keys: 145 | if self.headers.get(sort_key, False): 146 | lines.append('%s: %s' % (sort_key, self.headers[sort_key])) 147 | 148 | for header_name, header_value in self.headers.items(): 149 | if header_name not in sort_keys: 150 | if header_value: 151 | lines.append('%s: %s' % (header_name, header_value)) 152 | 153 | lines.append('\r\n') 154 | return '\r\n'.join(lines) 155 | 156 | def make_multipart(self, content_disposition=None, content_type=None, 157 | content_location=None): 158 | """ 159 | Makes this request field into a multipart request field. 160 | 161 | This method overrides "Content-Disposition", "Content-Type" and 162 | "Content-Location" headers to the request parameter. 163 | 164 | :param content_type: 165 | The 'Content-Type' of the request body. 166 | :param content_location: 167 | The 'Content-Location' of the request body. 168 | 169 | """ 170 | self.headers['Content-Disposition'] = content_disposition or 'form-data' 171 | self.headers['Content-Disposition'] += '; '.join([ 172 | '', self._render_parts( 173 | (('name', self._name), ('filename', self._filename)) 174 | ) 175 | ]) 176 | self.headers['Content-Type'] = content_type 177 | self.headers['Content-Location'] = content_location 178 | -------------------------------------------------------------------------------- /source/requests/auth.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | requests.auth 5 | ~~~~~~~~~~~~~ 6 | 7 | This module contains the authentication handlers for Requests. 8 | """ 9 | 10 | import os 11 | import re 12 | import time 13 | import hashlib 14 | 15 | from base64 import b64encode 16 | 17 | from .compat import urlparse, str 18 | from .cookies import extract_cookies_to_jar 19 | from .utils import parse_dict_header, to_native_string 20 | 21 | CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded' 22 | CONTENT_TYPE_MULTI_PART = 'multipart/form-data' 23 | 24 | 25 | def _basic_auth_str(username, password): 26 | """Returns a Basic Auth string.""" 27 | 28 | authstr = 'Basic ' + to_native_string( 29 | b64encode(('%s:%s' % (username, password)).encode('latin1')).strip() 30 | ) 31 | 32 | return authstr 33 | 34 | 35 | class AuthBase(object): 36 | """Base class that all auth implementations derive from""" 37 | 38 | def __call__(self, r): 39 | raise NotImplementedError('Auth hooks must be callable.') 40 | 41 | 42 | class HTTPBasicAuth(AuthBase): 43 | """Attaches HTTP Basic Authentication to the given Request object.""" 44 | def __init__(self, username, password): 45 | self.username = username 46 | self.password = password 47 | 48 | def __call__(self, r): 49 | r.headers['Authorization'] = _basic_auth_str(self.username, self.password) 50 | return r 51 | 52 | 53 | class HTTPProxyAuth(HTTPBasicAuth): 54 | """Attaches HTTP Proxy Authentication to a given Request object.""" 55 | def __call__(self, r): 56 | r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password) 57 | return r 58 | 59 | 60 | class HTTPDigestAuth(AuthBase): 61 | """Attaches HTTP Digest Authentication to the given Request object.""" 62 | def __init__(self, username, password): 63 | self.username = username 64 | self.password = password 65 | self.last_nonce = '' 66 | self.nonce_count = 0 67 | self.chal = {} 68 | self.pos = None 69 | 70 | def build_digest_header(self, method, url): 71 | 72 | realm = self.chal['realm'] 73 | nonce = self.chal['nonce'] 74 | qop = self.chal.get('qop') 75 | algorithm = self.chal.get('algorithm') 76 | opaque = self.chal.get('opaque') 77 | 78 | if algorithm is None: 79 | _algorithm = 'MD5' 80 | else: 81 | _algorithm = algorithm.upper() 82 | # lambdas assume digest modules are imported at the top level 83 | if _algorithm == 'MD5' or _algorithm == 'MD5-SESS': 84 | def md5_utf8(x): 85 | if isinstance(x, str): 86 | x = x.encode('utf-8') 87 | return hashlib.md5(x).hexdigest() 88 | hash_utf8 = md5_utf8 89 | elif _algorithm == 'SHA': 90 | def sha_utf8(x): 91 | if isinstance(x, str): 92 | x = x.encode('utf-8') 93 | return hashlib.sha1(x).hexdigest() 94 | hash_utf8 = sha_utf8 95 | 96 | KD = lambda s, d: hash_utf8("%s:%s" % (s, d)) 97 | 98 | if hash_utf8 is None: 99 | return None 100 | 101 | # XXX not implemented yet 102 | entdig = None 103 | p_parsed = urlparse(url) 104 | path = p_parsed.path 105 | if p_parsed.query: 106 | path += '?' + p_parsed.query 107 | 108 | A1 = '%s:%s:%s' % (self.username, realm, self.password) 109 | A2 = '%s:%s' % (method, path) 110 | 111 | HA1 = hash_utf8(A1) 112 | HA2 = hash_utf8(A2) 113 | 114 | if nonce == self.last_nonce: 115 | self.nonce_count += 1 116 | else: 117 | self.nonce_count = 1 118 | ncvalue = '%08x' % self.nonce_count 119 | s = str(self.nonce_count).encode('utf-8') 120 | s += nonce.encode('utf-8') 121 | s += time.ctime().encode('utf-8') 122 | s += os.urandom(8) 123 | 124 | cnonce = (hashlib.sha1(s).hexdigest()[:16]) 125 | noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, HA2) 126 | if _algorithm == 'MD5-SESS': 127 | HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce)) 128 | 129 | if qop is None: 130 | respdig = KD(HA1, "%s:%s" % (nonce, HA2)) 131 | elif qop == 'auth' or 'auth' in qop.split(','): 132 | respdig = KD(HA1, noncebit) 133 | else: 134 | # XXX handle auth-int. 135 | return None 136 | 137 | self.last_nonce = nonce 138 | 139 | # XXX should the partial digests be encoded too? 140 | base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \ 141 | 'response="%s"' % (self.username, realm, nonce, path, respdig) 142 | if opaque: 143 | base += ', opaque="%s"' % opaque 144 | if algorithm: 145 | base += ', algorithm="%s"' % algorithm 146 | if entdig: 147 | base += ', digest="%s"' % entdig 148 | if qop: 149 | base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce) 150 | 151 | return 'Digest %s' % (base) 152 | 153 | def handle_401(self, r, **kwargs): 154 | """Takes the given response and tries digest-auth, if needed.""" 155 | 156 | if self.pos is not None: 157 | # Rewind the file position indicator of the body to where 158 | # it was to resend the request. 159 | r.request.body.seek(self.pos) 160 | num_401_calls = getattr(self, 'num_401_calls', 1) 161 | s_auth = r.headers.get('www-authenticate', '') 162 | 163 | if 'digest' in s_auth.lower() and num_401_calls < 2: 164 | 165 | setattr(self, 'num_401_calls', num_401_calls + 1) 166 | pat = re.compile(r'digest ', flags=re.IGNORECASE) 167 | self.chal = parse_dict_header(pat.sub('', s_auth, count=1)) 168 | 169 | # Consume content and release the original connection 170 | # to allow our new request to reuse the same one. 171 | r.content 172 | r.raw.release_conn() 173 | prep = r.request.copy() 174 | extract_cookies_to_jar(prep._cookies, r.request, r.raw) 175 | prep.prepare_cookies(prep._cookies) 176 | 177 | prep.headers['Authorization'] = self.build_digest_header( 178 | prep.method, prep.url) 179 | _r = r.connection.send(prep, **kwargs) 180 | _r.history.append(r) 181 | _r.request = prep 182 | 183 | return _r 184 | 185 | setattr(self, 'num_401_calls', 1) 186 | return r 187 | 188 | def __call__(self, r): 189 | # If we have a saved nonce, skip the 401 190 | if self.last_nonce: 191 | r.headers['Authorization'] = self.build_digest_header(r.method, r.url) 192 | try: 193 | self.pos = r.body.tell() 194 | except AttributeError: 195 | pass 196 | r.register_hook('response', self.handle_401) 197 | return r 198 | -------------------------------------------------------------------------------- /source/requests/packages/urllib3/_collections.py: -------------------------------------------------------------------------------- 1 | from collections import Mapping, MutableMapping 2 | try: 3 | from threading import RLock 4 | except ImportError: # Platform-specific: No threads available 5 | class RLock: 6 | def __enter__(self): 7 | pass 8 | 9 | def __exit__(self, exc_type, exc_value, traceback): 10 | pass 11 | 12 | 13 | try: # Python 2.7+ 14 | from collections import OrderedDict 15 | except ImportError: 16 | from .packages.ordered_dict import OrderedDict 17 | from .packages.six import itervalues 18 | 19 | 20 | __all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict'] 21 | 22 | 23 | _Null = object() 24 | 25 | 26 | class RecentlyUsedContainer(MutableMapping): 27 | """ 28 | Provides a thread-safe dict-like container which maintains up to 29 | ``maxsize`` keys while throwing away the least-recently-used keys beyond 30 | ``maxsize``. 31 | 32 | :param maxsize: 33 | Maximum number of recent elements to retain. 34 | 35 | :param dispose_func: 36 | Every time an item is evicted from the container, 37 | ``dispose_func(value)`` is called. Callback which will get called 38 | """ 39 | 40 | ContainerCls = OrderedDict 41 | 42 | def __init__(self, maxsize=10, dispose_func=None): 43 | self._maxsize = maxsize 44 | self.dispose_func = dispose_func 45 | 46 | self._container = self.ContainerCls() 47 | self.lock = RLock() 48 | 49 | def __getitem__(self, key): 50 | # Re-insert the item, moving it to the end of the eviction line. 51 | with self.lock: 52 | item = self._container.pop(key) 53 | self._container[key] = item 54 | return item 55 | 56 | def __setitem__(self, key, value): 57 | evicted_value = _Null 58 | with self.lock: 59 | # Possibly evict the existing value of 'key' 60 | evicted_value = self._container.get(key, _Null) 61 | self._container[key] = value 62 | 63 | # If we didn't evict an existing value, we might have to evict the 64 | # least recently used item from the beginning of the container. 65 | if len(self._container) > self._maxsize: 66 | _key, evicted_value = self._container.popitem(last=False) 67 | 68 | if self.dispose_func and evicted_value is not _Null: 69 | self.dispose_func(evicted_value) 70 | 71 | def __delitem__(self, key): 72 | with self.lock: 73 | value = self._container.pop(key) 74 | 75 | if self.dispose_func: 76 | self.dispose_func(value) 77 | 78 | def __len__(self): 79 | with self.lock: 80 | return len(self._container) 81 | 82 | def __iter__(self): 83 | raise NotImplementedError('Iteration over this class is unlikely to be threadsafe.') 84 | 85 | def clear(self): 86 | with self.lock: 87 | # Copy pointers to all values, then wipe the mapping 88 | # under Python 2, this copies the list of values twice :-| 89 | values = list(self._container.values()) 90 | self._container.clear() 91 | 92 | if self.dispose_func: 93 | for value in values: 94 | self.dispose_func(value) 95 | 96 | def keys(self): 97 | with self.lock: 98 | return self._container.keys() 99 | 100 | 101 | class HTTPHeaderDict(MutableMapping): 102 | """ 103 | :param headers: 104 | An iterable of field-value pairs. Must not contain multiple field names 105 | when compared case-insensitively. 106 | 107 | :param kwargs: 108 | Additional field-value pairs to pass in to ``dict.update``. 109 | 110 | A ``dict`` like container for storing HTTP Headers. 111 | 112 | Field names are stored and compared case-insensitively in compliance with 113 | RFC 7230. Iteration provides the first case-sensitive key seen for each 114 | case-insensitive pair. 115 | 116 | Using ``__setitem__`` syntax overwrites fields that compare equal 117 | case-insensitively in order to maintain ``dict``'s api. For fields that 118 | compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add`` 119 | in a loop. 120 | 121 | If multiple fields that are equal case-insensitively are passed to the 122 | constructor or ``.update``, the behavior is undefined and some will be 123 | lost. 124 | 125 | >>> headers = HTTPHeaderDict() 126 | >>> headers.add('Set-Cookie', 'foo=bar') 127 | >>> headers.add('set-cookie', 'baz=quxx') 128 | >>> headers['content-length'] = '7' 129 | >>> headers['SET-cookie'] 130 | 'foo=bar, baz=quxx' 131 | >>> headers['Content-Length'] 132 | '7' 133 | 134 | If you want to access the raw headers with their original casing 135 | for debugging purposes you can access the private ``._data`` attribute 136 | which is a normal python ``dict`` that maps the case-insensitive key to a 137 | list of tuples stored as (case-sensitive-original-name, value). Using the 138 | structure from above as our example: 139 | 140 | >>> headers._data 141 | {'set-cookie': [('Set-Cookie', 'foo=bar'), ('set-cookie', 'baz=quxx')], 142 | 'content-length': [('content-length', '7')]} 143 | """ 144 | 145 | def __init__(self, headers=None, **kwargs): 146 | self._data = {} 147 | if headers is None: 148 | headers = {} 149 | self.update(headers, **kwargs) 150 | 151 | def add(self, key, value): 152 | """Adds a (name, value) pair, doesn't overwrite the value if it already 153 | exists. 154 | 155 | >>> headers = HTTPHeaderDict(foo='bar') 156 | >>> headers.add('Foo', 'baz') 157 | >>> headers['foo'] 158 | 'bar, baz' 159 | """ 160 | self._data.setdefault(key.lower(), []).append((key, value)) 161 | 162 | def getlist(self, key): 163 | """Returns a list of all the values for the named field. Returns an 164 | empty list if the key doesn't exist.""" 165 | return self[key].split(', ') if key in self else [] 166 | 167 | def copy(self): 168 | h = HTTPHeaderDict() 169 | for key in self._data: 170 | for rawkey, value in self._data[key]: 171 | h.add(rawkey, value) 172 | return h 173 | 174 | def __eq__(self, other): 175 | if not isinstance(other, Mapping): 176 | return False 177 | other = HTTPHeaderDict(other) 178 | return dict((k1, self[k1]) for k1 in self._data) == \ 179 | dict((k2, other[k2]) for k2 in other._data) 180 | 181 | def __getitem__(self, key): 182 | values = self._data[key.lower()] 183 | return ', '.join(value[1] for value in values) 184 | 185 | def __setitem__(self, key, value): 186 | self._data[key.lower()] = [(key, value)] 187 | 188 | def __delitem__(self, key): 189 | del self._data[key.lower()] 190 | 191 | def __len__(self): 192 | return len(self._data) 193 | 194 | def __iter__(self): 195 | for headers in itervalues(self._data): 196 | yield headers[0][0] 197 | 198 | def __repr__(self): 199 | return '%s(%r)' % (self.__class__.__name__, dict(self.items())) 200 | -------------------------------------------------------------------------------- /source/requests/packages/chardet/universaldetector.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is Mozilla Universal charset detector code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 2001 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # Shy Shalom - original C code 12 | # 13 | # This library is free software; you can redistribute it and/or 14 | # modify it under the terms of the GNU Lesser General Public 15 | # License as published by the Free Software Foundation; either 16 | # version 2.1 of the License, or (at your option) any later version. 17 | # 18 | # This library is distributed in the hope that it will be useful, 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 21 | # Lesser General Public License for more details. 22 | # 23 | # You should have received a copy of the GNU Lesser General Public 24 | # License along with this library; if not, write to the Free Software 25 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 26 | # 02110-1301 USA 27 | ######################### END LICENSE BLOCK ######################### 28 | 29 | from . import constants 30 | import sys 31 | import codecs 32 | from .latin1prober import Latin1Prober # windows-1252 33 | from .mbcsgroupprober import MBCSGroupProber # multi-byte character sets 34 | from .sbcsgroupprober import SBCSGroupProber # single-byte character sets 35 | from .escprober import EscCharSetProber # ISO-2122, etc. 36 | import re 37 | 38 | MINIMUM_THRESHOLD = 0.20 39 | ePureAscii = 0 40 | eEscAscii = 1 41 | eHighbyte = 2 42 | 43 | 44 | class UniversalDetector: 45 | def __init__(self): 46 | self._highBitDetector = re.compile(b'[\x80-\xFF]') 47 | self._escDetector = re.compile(b'(\033|~{)') 48 | self._mEscCharSetProber = None 49 | self._mCharSetProbers = [] 50 | self.reset() 51 | 52 | def reset(self): 53 | self.result = {'encoding': None, 'confidence': 0.0} 54 | self.done = False 55 | self._mStart = True 56 | self._mGotData = False 57 | self._mInputState = ePureAscii 58 | self._mLastChar = b'' 59 | if self._mEscCharSetProber: 60 | self._mEscCharSetProber.reset() 61 | for prober in self._mCharSetProbers: 62 | prober.reset() 63 | 64 | def feed(self, aBuf): 65 | if self.done: 66 | return 67 | 68 | aLen = len(aBuf) 69 | if not aLen: 70 | return 71 | 72 | if not self._mGotData: 73 | # If the data starts with BOM, we know it is UTF 74 | if aBuf[:3] == codecs.BOM: 75 | # EF BB BF UTF-8 with BOM 76 | self.result = {'encoding': "UTF-8", 'confidence': 1.0} 77 | elif aBuf[:4] == codecs.BOM_UTF32_LE: 78 | # FF FE 00 00 UTF-32, little-endian BOM 79 | self.result = {'encoding': "UTF-32LE", 'confidence': 1.0} 80 | elif aBuf[:4] == codecs.BOM_UTF32_BE: 81 | # 00 00 FE FF UTF-32, big-endian BOM 82 | self.result = {'encoding': "UTF-32BE", 'confidence': 1.0} 83 | elif aBuf[:4] == b'\xFE\xFF\x00\x00': 84 | # FE FF 00 00 UCS-4, unusual octet order BOM (3412) 85 | self.result = { 86 | 'encoding': "X-ISO-10646-UCS-4-3412", 87 | 'confidence': 1.0 88 | } 89 | elif aBuf[:4] == b'\x00\x00\xFF\xFE': 90 | # 00 00 FF FE UCS-4, unusual octet order BOM (2143) 91 | self.result = { 92 | 'encoding': "X-ISO-10646-UCS-4-2143", 93 | 'confidence': 1.0 94 | } 95 | elif aBuf[:2] == codecs.BOM_LE: 96 | # FF FE UTF-16, little endian BOM 97 | self.result = {'encoding': "UTF-16LE", 'confidence': 1.0} 98 | elif aBuf[:2] == codecs.BOM_BE: 99 | # FE FF UTF-16, big endian BOM 100 | self.result = {'encoding': "UTF-16BE", 'confidence': 1.0} 101 | 102 | self._mGotData = True 103 | if self.result['encoding'] and (self.result['confidence'] > 0.0): 104 | self.done = True 105 | return 106 | 107 | if self._mInputState == ePureAscii: 108 | if self._highBitDetector.search(aBuf): 109 | self._mInputState = eHighbyte 110 | elif ((self._mInputState == ePureAscii) and 111 | self._escDetector.search(self._mLastChar + aBuf)): 112 | self._mInputState = eEscAscii 113 | 114 | self._mLastChar = aBuf[-1:] 115 | 116 | if self._mInputState == eEscAscii: 117 | if not self._mEscCharSetProber: 118 | self._mEscCharSetProber = EscCharSetProber() 119 | if self._mEscCharSetProber.feed(aBuf) == constants.eFoundIt: 120 | self.result = {'encoding': self._mEscCharSetProber.get_charset_name(), 121 | 'confidence': self._mEscCharSetProber.get_confidence()} 122 | self.done = True 123 | elif self._mInputState == eHighbyte: 124 | if not self._mCharSetProbers: 125 | self._mCharSetProbers = [MBCSGroupProber(), SBCSGroupProber(), 126 | Latin1Prober()] 127 | for prober in self._mCharSetProbers: 128 | if prober.feed(aBuf) == constants.eFoundIt: 129 | self.result = {'encoding': prober.get_charset_name(), 130 | 'confidence': prober.get_confidence()} 131 | self.done = True 132 | break 133 | 134 | def close(self): 135 | if self.done: 136 | return 137 | if not self._mGotData: 138 | if constants._debug: 139 | sys.stderr.write('no data received!\n') 140 | return 141 | self.done = True 142 | 143 | if self._mInputState == ePureAscii: 144 | self.result = {'encoding': 'ascii', 'confidence': 1.0} 145 | return self.result 146 | 147 | if self._mInputState == eHighbyte: 148 | proberConfidence = None 149 | maxProberConfidence = 0.0 150 | maxProber = None 151 | for prober in self._mCharSetProbers: 152 | if not prober: 153 | continue 154 | proberConfidence = prober.get_confidence() 155 | if proberConfidence > maxProberConfidence: 156 | maxProberConfidence = proberConfidence 157 | maxProber = prober 158 | if maxProber and (maxProberConfidence > MINIMUM_THRESHOLD): 159 | self.result = {'encoding': maxProber.get_charset_name(), 160 | 'confidence': maxProber.get_confidence()} 161 | return self.result 162 | 163 | if constants._debug: 164 | sys.stderr.write('no probers hit minimum threshhold\n') 165 | for prober in self._mCharSetProbers[0].mProbers: 166 | if not prober: 167 | continue 168 | sys.stderr.write('%s confidence = %s\n' % 169 | (prober.get_charset_name(), 170 | prober.get_confidence())) 171 | -------------------------------------------------------------------------------- /source/workflow/background.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # encoding: utf-8 3 | # 4 | # Copyright © 2014 deanishe@deanishe.net 5 | # 6 | # MIT Licence. See http://opensource.org/licenses/MIT 7 | # 8 | # Created on 2014-04-06 9 | # 10 | 11 | """ 12 | 13 | .. versionadded:: 1.4 14 | 15 | Run scripts in the background. 16 | 17 | This module allows your workflow to execute longer-running processes, e.g. 18 | updating the data cache from a webservice, in the background, allowing 19 | the workflow to remain responsive in Alfred. 20 | 21 | For example, if your workflow requires up-to-date exchange rates, you might 22 | write a script ``update_exchange_rates.py`` to retrieve the data from the 23 | relevant webservice, and call it from your main workflow script: 24 | 25 | .. code-block:: python 26 | :linenos: 27 | 28 | from workflow import Workflow, ICON_INFO 29 | from workflow.background import run_in_background, is_running 30 | 31 | def main(wf): 32 | # Is cache over 6 hours old or non-existent? 33 | if not wf.cached_data_fresh('exchange-rates', 3600): 34 | run_in_background('update', 35 | ['/usr/bin/python', 36 | wf.workflowfile('update_exchange_rates.py')]) 37 | 38 | # Add a notification if the script is running 39 | if is_running('update'): 40 | wf.add_item('Updating exchange rates...', icon=ICON_INFO) 41 | 42 | exchange_rates = wf.cached_data('exchage-rates') 43 | 44 | # Display (possibly stale) cache data 45 | if exchange_rates: 46 | for rate in exchange_rates: 47 | wf.add_item(rate) 48 | 49 | # Send results to Alfred 50 | wf.send_feedback() 51 | 52 | if __name__ == '__main__': 53 | wf = Workflow() 54 | wf.run(main) 55 | 56 | 57 | For a working example, see :ref:`Part 2 of the tutorial `. 58 | 59 | """ 60 | 61 | from __future__ import print_function, unicode_literals 62 | 63 | import sys 64 | import os 65 | import subprocess 66 | import pickle 67 | 68 | from workflow import Workflow 69 | 70 | __all__ = ['is_running', 'run_in_background'] 71 | 72 | wf = Workflow() 73 | log = wf.logger 74 | 75 | 76 | def _arg_cache(name): 77 | """Return path to pickle cache file for arguments 78 | 79 | :param name: name of task 80 | :type name: ``unicode`` 81 | :returns: Path to cache file 82 | :rtype: ``unicode`` filepath 83 | 84 | """ 85 | 86 | return wf.cachefile('{}.argcache'.format(name)) 87 | 88 | 89 | def _pid_file(name): 90 | """Return path to PID file for ``name`` 91 | 92 | :param name: name of task 93 | :type name: ``unicode`` 94 | :returns: Path to PID file for task 95 | :rtype: ``unicode`` filepath 96 | 97 | """ 98 | 99 | return wf.cachefile('{}.pid'.format(name)) 100 | 101 | 102 | def _process_exists(pid): 103 | """Check if a process with PID ``pid`` exists 104 | 105 | :param pid: PID to check 106 | :type pid: ``int`` 107 | :returns: ``True`` if process exists, else ``False`` 108 | :rtype: ``Boolean`` 109 | """ 110 | 111 | try: 112 | os.kill(pid, 0) 113 | except OSError: # not running 114 | return False 115 | return True 116 | 117 | 118 | def is_running(name): 119 | """ 120 | Test whether task is running under ``name`` 121 | 122 | :param name: name of task 123 | :type name: ``unicode`` 124 | :returns: ``True`` if task with name ``name`` is running, else ``False`` 125 | :rtype: ``Boolean`` 126 | 127 | """ 128 | pidfile = _pid_file(name) 129 | if not os.path.exists(pidfile): 130 | return False 131 | 132 | with open(pidfile, 'rb') as file: 133 | pid = int(file.read().strip()) 134 | 135 | if _process_exists(pid): 136 | return True 137 | 138 | elif os.path.exists(pidfile): 139 | os.unlink(pidfile) 140 | 141 | return False 142 | 143 | 144 | def _background(stdin='/dev/null', stdout='/dev/null', 145 | stderr='/dev/null'): # pragma: no cover 146 | """Fork the current process into a background daemon. 147 | 148 | :param stdin: where to read input 149 | :type stdin: filepath 150 | :param stdout: where to write stdout output 151 | :type stdout: filepath 152 | :param stderr: where to write stderr output 153 | :type stderr: filepath 154 | 155 | """ 156 | 157 | # Do first fork. 158 | try: 159 | pid = os.fork() 160 | if pid > 0: 161 | sys.exit(0) # Exit first parent. 162 | except OSError as e: 163 | log.critical("fork #1 failed: (%d) %s\n" % (e.errno, e.strerror)) 164 | sys.exit(1) 165 | # Decouple from parent environment. 166 | os.chdir(wf.workflowdir) 167 | os.umask(0) 168 | os.setsid() 169 | # Do second fork. 170 | try: 171 | pid = os.fork() 172 | if pid > 0: 173 | sys.exit(0) # Exit second parent. 174 | except OSError as e: 175 | log.critical("fork #2 failed: (%d) %s\n" % (e.errno, e.strerror)) 176 | sys.exit(1) 177 | # Now I am a daemon! 178 | # Redirect standard file descriptors. 179 | si = file(stdin, 'r', 0) 180 | so = file(stdout, 'a+', 0) 181 | se = file(stderr, 'a+', 0) 182 | if hasattr(sys.stdin, 'fileno'): 183 | os.dup2(si.fileno(), sys.stdin.fileno()) 184 | if hasattr(sys.stdout, 'fileno'): 185 | os.dup2(so.fileno(), sys.stdout.fileno()) 186 | if hasattr(sys.stderr, 'fileno'): 187 | os.dup2(se.fileno(), sys.stderr.fileno()) 188 | 189 | 190 | def run_in_background(name, args, **kwargs): 191 | """Pickle arguments to cache file, then call this script again via 192 | :func:`subprocess.call`. 193 | 194 | :param name: name of task 195 | :type name: ``unicode`` 196 | :param args: arguments passed as first argument to :func:`subprocess.call` 197 | :param \**kwargs: keyword arguments to :func:`subprocess.call` 198 | :returns: exit code of sub-process 199 | :rtype: ``int`` 200 | 201 | """ 202 | 203 | if is_running(name): 204 | log.info('Task `{}` is already running'.format(name)) 205 | return 206 | 207 | argcache = _arg_cache(name) 208 | 209 | # Cache arguments 210 | with open(argcache, 'wb') as file: 211 | pickle.dump({'args': args, 'kwargs': kwargs}, file) 212 | 213 | # Call this script 214 | cmd = ['/usr/bin/python', __file__, name] 215 | log.debug('Calling {!r} ...'.format(cmd)) 216 | retcode = subprocess.call(cmd) 217 | if retcode: # pragma: no cover 218 | log.error('Failed to call task in background') 219 | else: 220 | log.debug('Executing task `{}` in background...'.format(name)) 221 | return retcode 222 | 223 | 224 | def main(wf): # pragma: no cover 225 | """ 226 | Load cached arguments, fork into background, then call 227 | :meth:`subprocess.call` with cached arguments 228 | 229 | """ 230 | 231 | name = wf.args[0] 232 | argcache = _arg_cache(name) 233 | if not os.path.exists(argcache): 234 | log.critical('No arg cache found : {!r}'.format(argcache)) 235 | return 1 236 | 237 | # Load cached arguments 238 | with open(argcache, 'rb') as file: 239 | data = pickle.load(file) 240 | 241 | # Cached arguments 242 | args = data['args'] 243 | kwargs = data['kwargs'] 244 | 245 | # Delete argument cache file 246 | os.unlink(argcache) 247 | 248 | pidfile = _pid_file(name) 249 | 250 | # Fork to background 251 | _background() 252 | 253 | # Write PID to file 254 | with open(pidfile, 'wb') as file: 255 | file.write('{}'.format(os.getpid())) 256 | 257 | # Run the command 258 | try: 259 | log.debug('Task `{}` running'.format(name)) 260 | log.debug('cmd : {!r}'.format(args)) 261 | 262 | retcode = subprocess.call(args, **kwargs) 263 | 264 | if retcode: 265 | log.error('Command failed with [{}] : {!r}'.format(retcode, args)) 266 | 267 | finally: 268 | if os.path.exists(pidfile): 269 | os.unlink(pidfile) 270 | log.debug('Task `{}` finished'.format(name)) 271 | 272 | 273 | if __name__ == '__main__': # pragma: no cover 274 | wf.run(main) 275 | -------------------------------------------------------------------------------- /source/requests/packages/chardet/escsm.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is mozilla.org code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | from .constants import eStart, eError, eItsMe 29 | 30 | HZ_cls = ( 31 | 1,0,0,0,0,0,0,0, # 00 - 07 32 | 0,0,0,0,0,0,0,0, # 08 - 0f 33 | 0,0,0,0,0,0,0,0, # 10 - 17 34 | 0,0,0,1,0,0,0,0, # 18 - 1f 35 | 0,0,0,0,0,0,0,0, # 20 - 27 36 | 0,0,0,0,0,0,0,0, # 28 - 2f 37 | 0,0,0,0,0,0,0,0, # 30 - 37 38 | 0,0,0,0,0,0,0,0, # 38 - 3f 39 | 0,0,0,0,0,0,0,0, # 40 - 47 40 | 0,0,0,0,0,0,0,0, # 48 - 4f 41 | 0,0,0,0,0,0,0,0, # 50 - 57 42 | 0,0,0,0,0,0,0,0, # 58 - 5f 43 | 0,0,0,0,0,0,0,0, # 60 - 67 44 | 0,0,0,0,0,0,0,0, # 68 - 6f 45 | 0,0,0,0,0,0,0,0, # 70 - 77 46 | 0,0,0,4,0,5,2,0, # 78 - 7f 47 | 1,1,1,1,1,1,1,1, # 80 - 87 48 | 1,1,1,1,1,1,1,1, # 88 - 8f 49 | 1,1,1,1,1,1,1,1, # 90 - 97 50 | 1,1,1,1,1,1,1,1, # 98 - 9f 51 | 1,1,1,1,1,1,1,1, # a0 - a7 52 | 1,1,1,1,1,1,1,1, # a8 - af 53 | 1,1,1,1,1,1,1,1, # b0 - b7 54 | 1,1,1,1,1,1,1,1, # b8 - bf 55 | 1,1,1,1,1,1,1,1, # c0 - c7 56 | 1,1,1,1,1,1,1,1, # c8 - cf 57 | 1,1,1,1,1,1,1,1, # d0 - d7 58 | 1,1,1,1,1,1,1,1, # d8 - df 59 | 1,1,1,1,1,1,1,1, # e0 - e7 60 | 1,1,1,1,1,1,1,1, # e8 - ef 61 | 1,1,1,1,1,1,1,1, # f0 - f7 62 | 1,1,1,1,1,1,1,1, # f8 - ff 63 | ) 64 | 65 | HZ_st = ( 66 | eStart,eError, 3,eStart,eStart,eStart,eError,eError,# 00-07 67 | eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 08-0f 68 | eItsMe,eItsMe,eError,eError,eStart,eStart, 4,eError,# 10-17 69 | 5,eError, 6,eError, 5, 5, 4,eError,# 18-1f 70 | 4,eError, 4, 4, 4,eError, 4,eError,# 20-27 71 | 4,eItsMe,eStart,eStart,eStart,eStart,eStart,eStart,# 28-2f 72 | ) 73 | 74 | HZCharLenTable = (0, 0, 0, 0, 0, 0) 75 | 76 | HZSMModel = {'classTable': HZ_cls, 77 | 'classFactor': 6, 78 | 'stateTable': HZ_st, 79 | 'charLenTable': HZCharLenTable, 80 | 'name': "HZ-GB-2312"} 81 | 82 | ISO2022CN_cls = ( 83 | 2,0,0,0,0,0,0,0, # 00 - 07 84 | 0,0,0,0,0,0,0,0, # 08 - 0f 85 | 0,0,0,0,0,0,0,0, # 10 - 17 86 | 0,0,0,1,0,0,0,0, # 18 - 1f 87 | 0,0,0,0,0,0,0,0, # 20 - 27 88 | 0,3,0,0,0,0,0,0, # 28 - 2f 89 | 0,0,0,0,0,0,0,0, # 30 - 37 90 | 0,0,0,0,0,0,0,0, # 38 - 3f 91 | 0,0,0,4,0,0,0,0, # 40 - 47 92 | 0,0,0,0,0,0,0,0, # 48 - 4f 93 | 0,0,0,0,0,0,0,0, # 50 - 57 94 | 0,0,0,0,0,0,0,0, # 58 - 5f 95 | 0,0,0,0,0,0,0,0, # 60 - 67 96 | 0,0,0,0,0,0,0,0, # 68 - 6f 97 | 0,0,0,0,0,0,0,0, # 70 - 77 98 | 0,0,0,0,0,0,0,0, # 78 - 7f 99 | 2,2,2,2,2,2,2,2, # 80 - 87 100 | 2,2,2,2,2,2,2,2, # 88 - 8f 101 | 2,2,2,2,2,2,2,2, # 90 - 97 102 | 2,2,2,2,2,2,2,2, # 98 - 9f 103 | 2,2,2,2,2,2,2,2, # a0 - a7 104 | 2,2,2,2,2,2,2,2, # a8 - af 105 | 2,2,2,2,2,2,2,2, # b0 - b7 106 | 2,2,2,2,2,2,2,2, # b8 - bf 107 | 2,2,2,2,2,2,2,2, # c0 - c7 108 | 2,2,2,2,2,2,2,2, # c8 - cf 109 | 2,2,2,2,2,2,2,2, # d0 - d7 110 | 2,2,2,2,2,2,2,2, # d8 - df 111 | 2,2,2,2,2,2,2,2, # e0 - e7 112 | 2,2,2,2,2,2,2,2, # e8 - ef 113 | 2,2,2,2,2,2,2,2, # f0 - f7 114 | 2,2,2,2,2,2,2,2, # f8 - ff 115 | ) 116 | 117 | ISO2022CN_st = ( 118 | eStart, 3,eError,eStart,eStart,eStart,eStart,eStart,# 00-07 119 | eStart,eError,eError,eError,eError,eError,eError,eError,# 08-0f 120 | eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,# 10-17 121 | eItsMe,eItsMe,eItsMe,eError,eError,eError, 4,eError,# 18-1f 122 | eError,eError,eError,eItsMe,eError,eError,eError,eError,# 20-27 123 | 5, 6,eError,eError,eError,eError,eError,eError,# 28-2f 124 | eError,eError,eError,eItsMe,eError,eError,eError,eError,# 30-37 125 | eError,eError,eError,eError,eError,eItsMe,eError,eStart,# 38-3f 126 | ) 127 | 128 | ISO2022CNCharLenTable = (0, 0, 0, 0, 0, 0, 0, 0, 0) 129 | 130 | ISO2022CNSMModel = {'classTable': ISO2022CN_cls, 131 | 'classFactor': 9, 132 | 'stateTable': ISO2022CN_st, 133 | 'charLenTable': ISO2022CNCharLenTable, 134 | 'name': "ISO-2022-CN"} 135 | 136 | ISO2022JP_cls = ( 137 | 2,0,0,0,0,0,0,0, # 00 - 07 138 | 0,0,0,0,0,0,2,2, # 08 - 0f 139 | 0,0,0,0,0,0,0,0, # 10 - 17 140 | 0,0,0,1,0,0,0,0, # 18 - 1f 141 | 0,0,0,0,7,0,0,0, # 20 - 27 142 | 3,0,0,0,0,0,0,0, # 28 - 2f 143 | 0,0,0,0,0,0,0,0, # 30 - 37 144 | 0,0,0,0,0,0,0,0, # 38 - 3f 145 | 6,0,4,0,8,0,0,0, # 40 - 47 146 | 0,9,5,0,0,0,0,0, # 48 - 4f 147 | 0,0,0,0,0,0,0,0, # 50 - 57 148 | 0,0,0,0,0,0,0,0, # 58 - 5f 149 | 0,0,0,0,0,0,0,0, # 60 - 67 150 | 0,0,0,0,0,0,0,0, # 68 - 6f 151 | 0,0,0,0,0,0,0,0, # 70 - 77 152 | 0,0,0,0,0,0,0,0, # 78 - 7f 153 | 2,2,2,2,2,2,2,2, # 80 - 87 154 | 2,2,2,2,2,2,2,2, # 88 - 8f 155 | 2,2,2,2,2,2,2,2, # 90 - 97 156 | 2,2,2,2,2,2,2,2, # 98 - 9f 157 | 2,2,2,2,2,2,2,2, # a0 - a7 158 | 2,2,2,2,2,2,2,2, # a8 - af 159 | 2,2,2,2,2,2,2,2, # b0 - b7 160 | 2,2,2,2,2,2,2,2, # b8 - bf 161 | 2,2,2,2,2,2,2,2, # c0 - c7 162 | 2,2,2,2,2,2,2,2, # c8 - cf 163 | 2,2,2,2,2,2,2,2, # d0 - d7 164 | 2,2,2,2,2,2,2,2, # d8 - df 165 | 2,2,2,2,2,2,2,2, # e0 - e7 166 | 2,2,2,2,2,2,2,2, # e8 - ef 167 | 2,2,2,2,2,2,2,2, # f0 - f7 168 | 2,2,2,2,2,2,2,2, # f8 - ff 169 | ) 170 | 171 | ISO2022JP_st = ( 172 | eStart, 3,eError,eStart,eStart,eStart,eStart,eStart,# 00-07 173 | eStart,eStart,eError,eError,eError,eError,eError,eError,# 08-0f 174 | eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 10-17 175 | eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,# 18-1f 176 | eError, 5,eError,eError,eError, 4,eError,eError,# 20-27 177 | eError,eError,eError, 6,eItsMe,eError,eItsMe,eError,# 28-2f 178 | eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,# 30-37 179 | eError,eError,eError,eItsMe,eError,eError,eError,eError,# 38-3f 180 | eError,eError,eError,eError,eItsMe,eError,eStart,eStart,# 40-47 181 | ) 182 | 183 | ISO2022JPCharLenTable = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0) 184 | 185 | ISO2022JPSMModel = {'classTable': ISO2022JP_cls, 186 | 'classFactor': 10, 187 | 'stateTable': ISO2022JP_st, 188 | 'charLenTable': ISO2022JPCharLenTable, 189 | 'name': "ISO-2022-JP"} 190 | 191 | ISO2022KR_cls = ( 192 | 2,0,0,0,0,0,0,0, # 00 - 07 193 | 0,0,0,0,0,0,0,0, # 08 - 0f 194 | 0,0,0,0,0,0,0,0, # 10 - 17 195 | 0,0,0,1,0,0,0,0, # 18 - 1f 196 | 0,0,0,0,3,0,0,0, # 20 - 27 197 | 0,4,0,0,0,0,0,0, # 28 - 2f 198 | 0,0,0,0,0,0,0,0, # 30 - 37 199 | 0,0,0,0,0,0,0,0, # 38 - 3f 200 | 0,0,0,5,0,0,0,0, # 40 - 47 201 | 0,0,0,0,0,0,0,0, # 48 - 4f 202 | 0,0,0,0,0,0,0,0, # 50 - 57 203 | 0,0,0,0,0,0,0,0, # 58 - 5f 204 | 0,0,0,0,0,0,0,0, # 60 - 67 205 | 0,0,0,0,0,0,0,0, # 68 - 6f 206 | 0,0,0,0,0,0,0,0, # 70 - 77 207 | 0,0,0,0,0,0,0,0, # 78 - 7f 208 | 2,2,2,2,2,2,2,2, # 80 - 87 209 | 2,2,2,2,2,2,2,2, # 88 - 8f 210 | 2,2,2,2,2,2,2,2, # 90 - 97 211 | 2,2,2,2,2,2,2,2, # 98 - 9f 212 | 2,2,2,2,2,2,2,2, # a0 - a7 213 | 2,2,2,2,2,2,2,2, # a8 - af 214 | 2,2,2,2,2,2,2,2, # b0 - b7 215 | 2,2,2,2,2,2,2,2, # b8 - bf 216 | 2,2,2,2,2,2,2,2, # c0 - c7 217 | 2,2,2,2,2,2,2,2, # c8 - cf 218 | 2,2,2,2,2,2,2,2, # d0 - d7 219 | 2,2,2,2,2,2,2,2, # d8 - df 220 | 2,2,2,2,2,2,2,2, # e0 - e7 221 | 2,2,2,2,2,2,2,2, # e8 - ef 222 | 2,2,2,2,2,2,2,2, # f0 - f7 223 | 2,2,2,2,2,2,2,2, # f8 - ff 224 | ) 225 | 226 | ISO2022KR_st = ( 227 | eStart, 3,eError,eStart,eStart,eStart,eError,eError,# 00-07 228 | eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 08-0f 229 | eItsMe,eItsMe,eError,eError,eError, 4,eError,eError,# 10-17 230 | eError,eError,eError,eError, 5,eError,eError,eError,# 18-1f 231 | eError,eError,eError,eItsMe,eStart,eStart,eStart,eStart,# 20-27 232 | ) 233 | 234 | ISO2022KRCharLenTable = (0, 0, 0, 0, 0, 0) 235 | 236 | ISO2022KRSMModel = {'classTable': ISO2022KR_cls, 237 | 'classFactor': 6, 238 | 'stateTable': ISO2022KR_st, 239 | 'charLenTable': ISO2022KRCharLenTable, 240 | 'name': "ISO-2022-KR"} 241 | 242 | # flake8: noqa 243 | -------------------------------------------------------------------------------- /source/requests/packages/urllib3/connection.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import sys 3 | import socket 4 | from socket import timeout as SocketTimeout 5 | import warnings 6 | 7 | try: # Python 3 8 | from http.client import HTTPConnection as _HTTPConnection, HTTPException 9 | except ImportError: 10 | from httplib import HTTPConnection as _HTTPConnection, HTTPException 11 | 12 | 13 | class DummyConnection(object): 14 | "Used to detect a failed ConnectionCls import." 15 | pass 16 | 17 | 18 | try: # Compiled with SSL? 19 | HTTPSConnection = DummyConnection 20 | import ssl 21 | BaseSSLError = ssl.SSLError 22 | except (ImportError, AttributeError): # Platform-specific: No SSL. 23 | ssl = None 24 | 25 | class BaseSSLError(BaseException): 26 | pass 27 | 28 | 29 | from .exceptions import ( 30 | ConnectTimeoutError, 31 | SystemTimeWarning, 32 | ) 33 | from .packages.ssl_match_hostname import match_hostname 34 | from .packages import six 35 | 36 | from .util.ssl_ import ( 37 | resolve_cert_reqs, 38 | resolve_ssl_version, 39 | ssl_wrap_socket, 40 | assert_fingerprint, 41 | ) 42 | 43 | from .util import connection 44 | 45 | 46 | port_by_scheme = { 47 | 'http': 80, 48 | 'https': 443, 49 | } 50 | 51 | RECENT_DATE = datetime.date(2014, 1, 1) 52 | 53 | 54 | class HTTPConnection(_HTTPConnection, object): 55 | """ 56 | Based on httplib.HTTPConnection but provides an extra constructor 57 | backwards-compatibility layer between older and newer Pythons. 58 | 59 | Additional keyword parameters are used to configure attributes of the connection. 60 | Accepted parameters include: 61 | 62 | - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool` 63 | - ``source_address``: Set the source address for the current connection. 64 | 65 | .. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x 66 | 67 | - ``socket_options``: Set specific options on the underlying socket. If not specified, then 68 | defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling 69 | Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy. 70 | 71 | For example, if you wish to enable TCP Keep Alive in addition to the defaults, 72 | you might pass:: 73 | 74 | HTTPConnection.default_socket_options + [ 75 | (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), 76 | ] 77 | 78 | Or you may want to disable the defaults by passing an empty list (e.g., ``[]``). 79 | """ 80 | 81 | default_port = port_by_scheme['http'] 82 | 83 | #: Disable Nagle's algorithm by default. 84 | #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]`` 85 | default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)] 86 | 87 | #: Whether this connection verifies the host's certificate. 88 | is_verified = False 89 | 90 | def __init__(self, *args, **kw): 91 | if six.PY3: # Python 3 92 | kw.pop('strict', None) 93 | 94 | # Pre-set source_address in case we have an older Python like 2.6. 95 | self.source_address = kw.get('source_address') 96 | 97 | if sys.version_info < (2, 7): # Python 2.6 98 | # _HTTPConnection on Python 2.6 will balk at this keyword arg, but 99 | # not newer versions. We can still use it when creating a 100 | # connection though, so we pop it *after* we have saved it as 101 | # self.source_address. 102 | kw.pop('source_address', None) 103 | 104 | #: The socket options provided by the user. If no options are 105 | #: provided, we use the default options. 106 | self.socket_options = kw.pop('socket_options', self.default_socket_options) 107 | 108 | # Superclass also sets self.source_address in Python 2.7+. 109 | _HTTPConnection.__init__(self, *args, **kw) 110 | 111 | def _new_conn(self): 112 | """ Establish a socket connection and set nodelay settings on it. 113 | 114 | :return: New socket connection. 115 | """ 116 | extra_kw = {} 117 | if self.source_address: 118 | extra_kw['source_address'] = self.source_address 119 | 120 | if self.socket_options: 121 | extra_kw['socket_options'] = self.socket_options 122 | 123 | try: 124 | conn = connection.create_connection( 125 | (self.host, self.port), self.timeout, **extra_kw) 126 | 127 | except SocketTimeout: 128 | raise ConnectTimeoutError( 129 | self, "Connection to %s timed out. (connect timeout=%s)" % 130 | (self.host, self.timeout)) 131 | 132 | return conn 133 | 134 | def _prepare_conn(self, conn): 135 | self.sock = conn 136 | # the _tunnel_host attribute was added in python 2.6.3 (via 137 | # http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do 138 | # not have them. 139 | if getattr(self, '_tunnel_host', None): 140 | # TODO: Fix tunnel so it doesn't depend on self.sock state. 141 | self._tunnel() 142 | # Mark this connection as not reusable 143 | self.auto_open = 0 144 | 145 | def connect(self): 146 | conn = self._new_conn() 147 | self._prepare_conn(conn) 148 | 149 | 150 | class HTTPSConnection(HTTPConnection): 151 | default_port = port_by_scheme['https'] 152 | 153 | def __init__(self, host, port=None, key_file=None, cert_file=None, 154 | strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **kw): 155 | 156 | HTTPConnection.__init__(self, host, port, strict=strict, 157 | timeout=timeout, **kw) 158 | 159 | self.key_file = key_file 160 | self.cert_file = cert_file 161 | 162 | # Required property for Google AppEngine 1.9.0 which otherwise causes 163 | # HTTPS requests to go out as HTTP. (See Issue #356) 164 | self._protocol = 'https' 165 | 166 | def connect(self): 167 | conn = self._new_conn() 168 | self._prepare_conn(conn) 169 | self.sock = ssl.wrap_socket(conn, self.key_file, self.cert_file) 170 | 171 | 172 | class VerifiedHTTPSConnection(HTTPSConnection): 173 | """ 174 | Based on httplib.HTTPSConnection but wraps the socket with 175 | SSL certification. 176 | """ 177 | cert_reqs = None 178 | ca_certs = None 179 | ssl_version = None 180 | assert_fingerprint = None 181 | 182 | def set_cert(self, key_file=None, cert_file=None, 183 | cert_reqs=None, ca_certs=None, 184 | assert_hostname=None, assert_fingerprint=None): 185 | 186 | self.key_file = key_file 187 | self.cert_file = cert_file 188 | self.cert_reqs = cert_reqs 189 | self.ca_certs = ca_certs 190 | self.assert_hostname = assert_hostname 191 | self.assert_fingerprint = assert_fingerprint 192 | 193 | def connect(self): 194 | # Add certificate verification 195 | conn = self._new_conn() 196 | 197 | resolved_cert_reqs = resolve_cert_reqs(self.cert_reqs) 198 | resolved_ssl_version = resolve_ssl_version(self.ssl_version) 199 | 200 | hostname = self.host 201 | if getattr(self, '_tunnel_host', None): 202 | # _tunnel_host was added in Python 2.6.3 203 | # (See: http://hg.python.org/cpython/rev/0f57b30a152f) 204 | 205 | self.sock = conn 206 | # Calls self._set_hostport(), so self.host is 207 | # self._tunnel_host below. 208 | self._tunnel() 209 | # Mark this connection as not reusable 210 | self.auto_open = 0 211 | 212 | # Override the host with the one we're requesting data from. 213 | hostname = self._tunnel_host 214 | 215 | is_time_off = datetime.date.today() < RECENT_DATE 216 | if is_time_off: 217 | warnings.warn(( 218 | 'System time is way off (before {0}). This will probably ' 219 | 'lead to SSL verification errors').format(RECENT_DATE), 220 | SystemTimeWarning 221 | ) 222 | 223 | # Wrap socket using verification with the root certs in 224 | # trusted_root_certs 225 | self.sock = ssl_wrap_socket(conn, self.key_file, self.cert_file, 226 | cert_reqs=resolved_cert_reqs, 227 | ca_certs=self.ca_certs, 228 | server_hostname=hostname, 229 | ssl_version=resolved_ssl_version) 230 | 231 | if self.assert_fingerprint: 232 | assert_fingerprint(self.sock.getpeercert(binary_form=True), 233 | self.assert_fingerprint) 234 | elif resolved_cert_reqs != ssl.CERT_NONE \ 235 | and self.assert_hostname is not False: 236 | match_hostname(self.sock.getpeercert(), 237 | self.assert_hostname or hostname) 238 | 239 | self.is_verified = (resolved_cert_reqs == ssl.CERT_REQUIRED 240 | or self.assert_fingerprint is not None) 241 | 242 | 243 | if ssl: 244 | # Make a copy for testing. 245 | UnverifiedHTTPSConnection = HTTPSConnection 246 | HTTPSConnection = VerifiedHTTPSConnection 247 | --------------------------------------------------------------------------------