├── .gitignore
├── LICENSE
├── Makefile
├── README.md
├── screenshot.gif
└── src
├── config.py
├── dropbox
├── __init__.py
├── client.py
├── datastore.py
├── rest.py
├── session.py
├── six.py
└── trusted-certs.crt
├── dropbox_filter.py
├── dropbox_handler.py
├── dropbox_prefetch.py
├── helpers.py
├── icon.png
├── icons
├── desktop.png
├── download.png
├── folder.png
├── folder_app.png
├── folder_camera.png
├── folder_photos.png
├── folder_public.png
├── folder_user.png
├── keynote.png
├── numbers.png
├── package.png
├── page_white.png
├── page_white_acrobat.png
├── page_white_actionscript.png
├── page_white_c.png
├── page_white_code.png
├── page_white_compressed.png
├── page_white_cplusplus.png
├── page_white_csharp.png
├── page_white_cup.png
├── page_white_dvd.png
├── page_white_excel.png
├── page_white_film.png
├── page_white_flash.png
├── page_white_gear.png
├── page_white_h.png
├── page_white_js.png
├── page_white_paint.png
├── page_white_php.png
├── page_white_picture.png
├── page_white_powerpoint.png
├── page_white_ruby.png
├── page_white_sound.png
├── page_white_text.png
├── page_white_tux.png
├── page_white_vector.png
├── page_white_visualstudio.png
├── page_white_word.png
└── pages.png
├── info.plist
├── urllib3
├── __init__.py
├── _collections.py
├── connection.py
├── connectionpool.py
├── contrib
│ ├── __init__.py
│ ├── ntlmpool.py
│ └── pyopenssl.py
├── exceptions.py
├── fields.py
├── filepost.py
├── packages
│ ├── __init__.py
│ ├── ordered_dict.py
│ ├── six.py
│ └── ssl_match_hostname
│ │ ├── __init__.py
│ │ └── _implementation.py
├── poolmanager.py
├── request.py
├── response.py
└── util
│ ├── __init__.py
│ ├── connection.py
│ ├── request.py
│ ├── response.py
│ ├── retry.py
│ ├── ssl_.py
│ ├── timeout.py
│ └── url.py
├── version
└── workflow
├── Notify.tgz
├── __init__.py
├── background.py
├── notify.py
├── update.py
├── version
├── web.py
├── workflow.py
└── workflow3.py
/.gitignore:
--------------------------------------------------------------------------------
1 | *.py[cod]
2 |
3 | # C extensions
4 | *.so
5 |
6 | # Packages
7 | *.egg
8 | *.egg-info
9 | dist
10 | build
11 | eggs
12 | parts
13 | bin
14 | var
15 | sdist
16 | develop-eggs
17 | .installed.cfg
18 | lib
19 | lib64
20 | __pycache__
21 |
22 | # Installer logs
23 | pip-log.txt
24 |
25 | # Unit test / coverage reports
26 | .coverage
27 | .tox
28 | nosetests.xml
29 |
30 | # Translations
31 | *.mo
32 |
33 | # Mr Developer
34 | .mr.developer.cfg
35 | .project
36 | .pydevproject
37 |
38 | src/dbicons/*
39 | *.alfredworkflow
40 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2014 Fabio Niephaus
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | all: clean build
2 |
3 | build:
4 | cd src ; \
5 | zip ../Dropbox-Client-for-Alfred.alfredworkflow . -r --exclude=*.DS_Store* --exclude=*.pyc*
6 |
7 | clean:
8 | rm -f *.alfredworkflow
9 |
10 | update-lib:
11 | /usr/bin/python -m pip install --target src --upgrade Alfred-Workflow
12 | rm -rf src/Alfred_Workflow-*.dist-info/
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | Dropbox Client for Alfred
2 | ==============
3 |
4 | This workflow lets you quickly access multiple Dropbox accounts with [Alfred](http://www.alfredapp.com/).
5 |
6 | 
7 |
8 |
9 | ## Features
10 |
11 | - Copy Dropbox link to clipboard to share files quickly
12 | - Download and delete files from Dropbox
13 | - Supports Dropbox API search
14 | - Supports multiple Dropbox accounts
15 | - Supports notifications
16 | - Uses OAuth 2.0 to authorize the workflow
17 | - Saves your access tokens securely in OS X's keychain
18 |
19 |
20 | ## Credits
21 |
22 | This workflow uses [Dropbox's Core API](https://www.dropbox.com/developers/core), [urllib3](https://pypi.python.org/pypi/urllib3) and [alfred-workflow](https://github.com/deanishe/alfred-workflow).
23 | The icons have been crawled from [dropbox.com](https://www.dropbox.com/static/images/icons64/page_white.png).
24 |
--------------------------------------------------------------------------------
/screenshot.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/screenshot.gif
--------------------------------------------------------------------------------
/src/config.py:
--------------------------------------------------------------------------------
1 | APP_KEY = 'aehc68b78l1x0xf'
2 | APP_SECRET = 'q864ds64e65550p'
3 |
--------------------------------------------------------------------------------
/src/dropbox/__init__.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | from . import client, rest, session
4 |
--------------------------------------------------------------------------------
/src/dropbox/session.py:
--------------------------------------------------------------------------------
1 | """
2 | dropbox.session.DropboxSession is responsible for holding OAuth authentication
3 | info (app key/secret, request key/secret, access key/secret). It knows how to
4 | use all of this information to craft properly constructed requests to Dropbox.
5 |
6 | A DropboxSession object must be passed to a dropbox.client.DropboxClient object upon
7 | initialization.
8 |
9 | """
10 | from __future__ import absolute_import
11 |
12 | import random
13 | import sys
14 | import time
15 | import urllib
16 |
17 | try:
18 | from urlparse import parse_qs
19 | except ImportError:
20 | # fall back for Python 2.5
21 | from cgi import parse_qs
22 |
23 | from . import rest
24 |
25 | class OAuthToken(object):
26 | """
27 | A class representing an OAuth token. Contains two fields: ``key`` and
28 | ``secret``.
29 | """
30 | def __init__(self, key, secret):
31 | self.key = key
32 | self.secret = secret
33 |
34 | class BaseSession(object):
35 | API_VERSION = 1
36 |
37 | API_HOST = "api.dropbox.com"
38 | WEB_HOST = "www.dropbox.com"
39 | API_CONTENT_HOST = "api-content.dropbox.com"
40 | API_NOTIFICATION_HOST = "api-notify.dropbox.com"
41 |
42 | def __init__(self, consumer_key, consumer_secret, access_type="auto", locale=None, rest_client=rest.RESTClient):
43 | """Initialize a DropboxSession object.
44 |
45 | Your consumer key and secret are available
46 | at https://www.dropbox.com/developers/apps
47 |
48 | Args:
49 |
50 | - ``access_type``: Either 'auto' (the default), 'dropbox', or
51 | 'app_folder'. You probably don't need to specify this and should
52 | just use the default.
53 | - ``locale``: A locale string ('en', 'pt_PT', etc.) [optional]
54 | The locale setting will be used to translate any user-facing error
55 | messages that the server generates. At this time Dropbox supports
56 | 'en', 'es', 'fr', 'de', and 'ja', though we will be supporting more
57 | languages in the future. If you send a language the server doesn't
58 | support, messages will remain in English. Look for these translated
59 | messages in rest.ErrorResponse exceptions as e.user_error_msg.
60 |
61 | """
62 | assert access_type in ['dropbox', 'app_folder', 'auto'], "expected access_type of 'dropbox' or 'app_folder'"
63 | self.consumer_creds = OAuthToken(consumer_key, consumer_secret)
64 | self.token = None
65 | self.request_token = None
66 | self.root = 'sandbox' if access_type == 'app_folder' else access_type
67 | self.locale = locale
68 | self.rest_client = rest_client
69 |
70 | def is_linked(self):
71 | """Return whether the DropboxSession has an access token attached."""
72 | return bool(self.token)
73 |
74 | def unlink(self):
75 | """Remove any attached access token from the DropboxSession."""
76 | self.token = None
77 |
78 | def build_path(self, target, params=None):
79 | """Build the path component for an API URL.
80 |
81 | This method urlencodes the parameters, adds them
82 | to the end of the target url, and puts a marker for the API
83 | version in front.
84 |
85 | Args:
86 | - ``target``: A target url (e.g. '/files') to build upon.
87 | - ``params``: A dictionary of parameters (name to value). [optional]
88 |
89 | Returns:
90 | - The path and parameters components of an API URL.
91 | """
92 | if sys.version_info < (3,) and type(target) == unicode:
93 | target = target.encode("utf8")
94 |
95 | target_path = urllib.quote(target)
96 |
97 | params = params or {}
98 | params = params.copy()
99 |
100 | if self.locale:
101 | params['locale'] = self.locale
102 |
103 | if params:
104 | return "/%s%s?%s" % (self.API_VERSION, target_path, urllib.urlencode(params))
105 | else:
106 | return "/%s%s" % (self.API_VERSION, target_path)
107 |
108 | def build_url(self, host, target, params=None):
109 | """Build an API URL.
110 |
111 | This method adds scheme and hostname to the path
112 | returned from build_path.
113 |
114 | Args:
115 | - ``target``: A target url (e.g. '/files') to build upon.
116 | - ``params``: A dictionary of parameters (name to value). [optional]
117 |
118 | Returns:
119 | - The full API URL.
120 | """
121 | return "https://%s%s" % (host, self.build_path(target, params))
122 |
123 | class DropboxSession(BaseSession):
124 |
125 | def set_token(self, access_token, access_token_secret):
126 | """Attach an access token to the DropboxSession.
127 |
128 | Note that the access 'token' is made up of both a token string
129 | and a secret string.
130 | """
131 | self.token = OAuthToken(access_token, access_token_secret)
132 |
133 | def set_request_token(self, request_token, request_token_secret):
134 | """Attach an request token to the DropboxSession.
135 |
136 | Note that the request 'token' is made up of both a token string
137 | and a secret string.
138 | """
139 | self.request_token = OAuthToken(request_token, request_token_secret)
140 |
141 | def build_authorize_url(self, request_token, oauth_callback=None):
142 | """Build a request token authorization URL.
143 |
144 | After obtaining a request token, you'll need to send the user to
145 | the URL returned from this function so that they can confirm that
146 | they want to connect their account to your app.
147 |
148 | Args:
149 | - ``request_token``: A request token from obtain_request_token.
150 | - ``oauth_callback``: A url to redirect back to with the authorized
151 | request token.
152 |
153 | Returns:
154 | - An authorization for the given request token.
155 | """
156 | params = {'oauth_token': request_token.key,
157 | }
158 |
159 | if oauth_callback:
160 | params['oauth_callback'] = oauth_callback
161 |
162 | return self.build_url(self.WEB_HOST, '/oauth/authorize', params)
163 |
164 | def obtain_request_token(self):
165 | """Obtain a request token from the Dropbox API.
166 |
167 | This is your first step in the OAuth process. You call this to get a
168 | request_token from the Dropbox server that you can then use with
169 | DropboxSession.build_authorize_url() to get the user to authorize it.
170 | After it's authorized you use this token with
171 | DropboxSession.obtain_access_token() to get an access token.
172 |
173 | NOTE: You should only need to do this once for each user, and then you
174 | can store the access token for that user for later operations.
175 |
176 | Returns:
177 | - An :py:class:`OAuthToken` object representing the
178 | request token Dropbox assigned to this app. Also attaches the
179 | request token as self.request_token.
180 | """
181 | self.token = None # clear any token currently on the request
182 | url = self.build_url(self.API_HOST, '/oauth/request_token')
183 | headers, params = self.build_access_headers('POST', url)
184 |
185 | response = self.rest_client.POST(url, headers=headers, params=params, raw_response=True)
186 | self.request_token = self._parse_token(response.read())
187 | return self.request_token
188 |
189 | def obtain_access_token(self, request_token=None):
190 | """Obtain an access token for a user.
191 |
192 | After you get a request token, and then send the user to the authorize
193 | URL, you can use the authorized request token with this method to get the
194 | access token to use for future operations. The access token is stored on
195 | the session object.
196 |
197 | Args:
198 | - ``request_token``: A request token from obtain_request_token. [optional]
199 | The request_token should have been authorized via the
200 | authorization url from build_authorize_url. If you don't pass
201 | a request_token, the fallback is self.request_token, which
202 | will exist if you previously called obtain_request_token on this
203 | DropboxSession instance.
204 |
205 | Returns:
206 | - An :py:class:`OAuthToken` object with fields ``key`` and ``secret``
207 | representing the access token Dropbox assigned to this app and
208 | user. Also attaches the access token as self.token.
209 | """
210 | request_token = request_token or self.request_token
211 | assert request_token, "No request_token available on the session. Please pass one."
212 | url = self.build_url(self.API_HOST, '/oauth/access_token')
213 | headers, params = self.build_access_headers('POST', url, request_token=request_token)
214 |
215 | response = self.rest_client.POST(url, headers=headers, params=params, raw_response=True)
216 | self.token = self._parse_token(response.read())
217 | return self.token
218 |
219 | def build_access_headers(self, method, resource_url, params=None, request_token=None):
220 | """Build OAuth access headers for a future request.
221 |
222 | Args:
223 | - ``method``: The HTTP method being used (e.g. 'GET' or 'POST').
224 | - ``resource_url``: The full url the request will be made to.
225 | - ``params``: A dictionary of parameters to add to what's already on the url.
226 | Typically, this would consist of POST parameters.
227 |
228 | Returns:
229 | - A tuple of (header_dict, params) where header_dict is a dictionary
230 | of header names and values appropriate for passing into dropbox.rest.RESTClient
231 | and params is a dictionary like the one that was passed in, but augmented with
232 | oauth-related parameters as appropriate.
233 | """
234 | if params is None:
235 | params = {}
236 | else:
237 | params = params.copy()
238 |
239 | oauth_params = {
240 | 'oauth_consumer_key' : self.consumer_creds.key,
241 | 'oauth_timestamp' : self._generate_oauth_timestamp(),
242 | 'oauth_nonce' : self._generate_oauth_nonce(),
243 | 'oauth_version' : self._oauth_version(),
244 | }
245 |
246 | token = request_token if request_token is not None else self.token
247 |
248 | if token:
249 | oauth_params['oauth_token'] = token.key
250 |
251 | self._oauth_sign_request(oauth_params, self.consumer_creds, token)
252 |
253 | params.update(oauth_params)
254 |
255 | return {}, params
256 |
257 | @classmethod
258 | def _oauth_sign_request(cls, params, consumer_pair, token_pair):
259 | params.update({'oauth_signature_method' : 'PLAINTEXT',
260 | 'oauth_signature' : ('%s&%s' % (consumer_pair.secret, token_pair.secret)
261 | if token_pair is not None else
262 | '%s&' % (consumer_pair.secret,))})
263 |
264 | @classmethod
265 | def _generate_oauth_timestamp(cls):
266 | return int(time.time())
267 |
268 | @classmethod
269 | def _generate_oauth_nonce(cls, length=8):
270 | return ''.join([str(random.randint(0, 9)) for i in range(length)])
271 |
272 | @classmethod
273 | def _oauth_version(cls):
274 | return '1.0'
275 |
276 | @classmethod
277 | def _parse_token(cls, s):
278 | if not s:
279 | raise ValueError("Invalid parameter string.")
280 |
281 | params = parse_qs(s, keep_blank_values=False)
282 | if not params:
283 | raise ValueError("Invalid parameter string: %r" % s)
284 |
285 | try:
286 | key = params['oauth_token'][0]
287 | except Exception:
288 | raise ValueError("'oauth_token' not found in OAuth request.")
289 |
290 | try:
291 | secret = params['oauth_token_secret'][0]
292 | except Exception:
293 | raise ValueError("'oauth_token_secret' not found in "
294 | "OAuth request.")
295 |
296 | return OAuthToken(key, secret)
297 |
298 | # Don't use this class directly.
299 | class DropboxOAuth2Session(BaseSession):
300 |
301 | def __init__(self, oauth2_access_token, locale, rest_client=rest.RESTClient):
302 | super(DropboxOAuth2Session, self).__init__("", "", "auto", locale=locale, rest_client=rest_client)
303 | self.access_token = oauth2_access_token
304 |
305 | def build_access_headers(self, method, resource_url, params=None, token=None):
306 | assert token is None
307 | headers = {"Authorization": "Bearer " + self.access_token}
308 | return headers, params
309 |
--------------------------------------------------------------------------------
/src/dropbox/six.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | def b(str_):
4 | if sys.version_info >= (3,):
5 | str_ = str_.encode('latin1')
6 | return str_
7 |
8 | def u(str_):
9 | if sys.version_info < (3,):
10 | str_ = str_.decode('latin1')
11 | return str_
12 |
--------------------------------------------------------------------------------
/src/dropbox_filter.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import time
4 | from email.utils import parsedate
5 |
6 | import config
7 | from helpers import get_resource, get_hash, get_account_info, uid_exists
8 |
9 | from dropbox import client
10 | from workflow import Workflow, PasswordNotFound, ICON_TRASH
11 | from workflow.background import run_in_background
12 |
13 |
14 | def main(wf):
15 | if wf.update_available:
16 | wf.add_item("An update is available!",
17 | autocomplete='workflow:update', valid=False)
18 |
19 | user_input = wf.args[0]
20 | command = query = ''
21 | if len(user_input) > 0:
22 | command = user_input.split()[0]
23 | query = user_input[len(command) + 1:]
24 |
25 | try:
26 | wf.get_password('dropbox_access_tokens')
27 | accounts = wf.cached_data(
28 | 'dropbox_accounts', data_func=get_account_info, max_age=360)
29 | except PasswordNotFound:
30 | accounts = None
31 |
32 | if command == 'auth':
33 | if query == '':
34 | wf.add_item(
35 | 'Please enter your authorization code',
36 | 'If you don\'t have one, simply press enter.',
37 | arg='url %s' % get_auth_url(), valid=True)
38 | else:
39 | wf.add_item(
40 | 'Authorize with "%s"' % query, 'Press enter to proceed',
41 | arg='auth %s' % query, valid=True)
42 |
43 | elif accounts is not None and command == 'remove':
44 | for account in accounts:
45 | wf.add_item(get_title(account), account[
46 | 'email'], arg='remove %s' % account['uid'], valid=True)
47 | elif (accounts is not None and len(user_input) > 0 and
48 | uid_exists(command, accounts)):
49 | file_or_folder = get_file_or_folder(command, query)
50 | if isinstance(file_or_folder, dict): # file
51 | wf.add_item(
52 | 'Share', 'Copy link to clipboard',
53 | arg='share %s %s' % (command, file_or_folder['path']),
54 | icon='icons/folder_public.png', valid=True)
55 | wf.add_item(
56 | 'Save to Downloads',
57 | arg='download %s %s' % (command, file_or_folder['path']),
58 | icon='icons/download.png', valid=True)
59 | wf.add_item(
60 | 'Save to Desktop',
61 | arg='desktop %s %s' % (command, file_or_folder['path']),
62 | icon='icons/desktop.png', valid=True)
63 | wf.add_item(
64 | 'Delete',
65 | arg='delete %s %s' % (command, file_or_folder['path']),
66 | icon=ICON_TRASH, valid=True)
67 | elif isinstance(file_or_folder, list) and file_or_folder: # folder
68 | if query and query != '/':
69 | path = file_or_folder[0]['path'].split('/')
70 | path = '/'.join(path[:-2])
71 | wf.add_item(
72 | '..', 'Change to parent directory',
73 | icon='icons/folder.png',
74 | autocomplete='%s %s/' % (command, path), valid=False)
75 | for f in file_or_folder:
76 | title = os.path.basename(f['path'])
77 | subtitle = 'Modified: %s' % time.strftime(
78 | '%Y-%m-%d %H:%M:%S', parsedate(f['modified']))
79 |
80 | icon = 'icons/%s.png' % f['icon']
81 | if not os.path.isfile(icon):
82 | icon = 'icons/page_white.png'
83 |
84 | if f['is_dir']:
85 | title += '/'
86 | wf.add_item(
87 | title, subtitle, icon=icon,
88 | autocomplete='%s %s/' % (command, f['path']),
89 | valid=False)
90 | else:
91 | title += ' (%s)' % f['size']
92 | wf.add_item(
93 | title, subtitle, icon=icon,
94 | autocomplete='%s %s' % (command, f['path']),
95 | valid=False)
96 | else:
97 | wf.add_item(
98 | 'No files were found', 'Try a different request.', valid=False)
99 | else:
100 | if accounts is not None:
101 | for account in accounts:
102 | wf.add_item(get_title(account),
103 | account['email'],
104 | autocomplete='%s ' % account['uid'],
105 | valid=False)
106 |
107 | wf.add_item('Add another Dropbox account',
108 | '', autocomplete='auth ', valid=False)
109 | if accounts is not None and len(accounts) > 0:
110 | wf.add_item('Remove an existing Dropbox account',
111 | '', autocomplete='remove', valid=False)
112 |
113 | wf.send_feedback()
114 |
115 |
116 | def prefetch(wf, uid, path):
117 | job_name = 'dropbox_prefetch_%s' % get_hash(uid, path)
118 | cmd = ['/usr/bin/python', wf.workflowfile('dropbox_prefetch.py'), uid, path]
119 | run_in_background(job_name, cmd)
120 |
121 |
122 | def get_file_or_folder(uid, query):
123 | path = '/' if query == '' else query
124 |
125 | if len(path) > 1 and path[-1] == '/':
126 | path = path[:-1]
127 |
128 | prefetch(wf, uid, path)
129 |
130 | def wrapper():
131 | return get_resource(uid, path)
132 |
133 | return wf.cached_data(get_hash(uid, path), wrapper, max_age=120)
134 |
135 |
136 | def get_auth_url():
137 | flow = client.DropboxOAuth2FlowNoRedirect(
138 | config.APP_KEY, config.APP_SECRET)
139 | return flow.start()
140 |
141 |
142 | def get_title(account):
143 | normal_use = account['quota_info']['normal']
144 | shared_use = account['quota_info']['shared']
145 | total_quota = account['quota_info']['quota']
146 | total_used = round(100.0 * (normal_use + shared_use) / total_quota, 2)
147 | return '%s (%s%% of %s used)' % (
148 | account['display_name'], total_used,
149 | sizeof(account['quota_info']['quota']))
150 |
151 |
152 | def sizeof(num):
153 | for x in ['bytes', 'KB', 'MB', 'GB', 'TB']:
154 | if num < 1024.0:
155 | return "%3.1f %s" % (num, x)
156 | num /= 1024.0
157 |
158 |
159 | if __name__ == '__main__':
160 | wf = Workflow(
161 | update_settings={'github_slug': 'fniephaus/alfred-dropbox'},
162 | help_url='https://github.com/fniephaus/alfred-dropbox/issues'
163 | )
164 | log = wf.logger
165 | sys.exit(wf.run(main))
166 |
--------------------------------------------------------------------------------
/src/dropbox_handler.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import subprocess
4 | import sys
5 | import webbrowser
6 |
7 | import config
8 |
9 | from dropbox import client, rest
10 | from workflow import Workflow, PasswordNotFound
11 |
12 |
13 | def main(wf):
14 | user_input = ''.join(wf.args)
15 |
16 | command = user_input.split()[0]
17 | query = user_input[len(command) + 1:]
18 | access_token = uid = path = ''
19 | if len(query.split()) > 0:
20 | uid = query.split()[0]
21 | path = query[len(uid) + 1:]
22 | try:
23 | access_tokens = json.loads(
24 | wf.get_password('dropbox_access_tokens'))
25 | if uid in access_tokens:
26 | access_token = access_tokens[uid]
27 | except PasswordNotFound:
28 | pass
29 |
30 | if access_token:
31 | if command == "share":
32 | return share_path(path, access_token)
33 | elif command == "download":
34 | return download_path(path, access_token)
35 | elif command == "desktop":
36 | return download_path(path, access_token, '~/Desktop/')
37 | elif command == "delete":
38 | return delete_path(path, access_token)
39 | else:
40 | print 'Invalid command: %s' % command
41 | elif command == "url":
42 | webbrowser.open(query)
43 | return 0
44 | elif command == "auth":
45 | return authorize(query)
46 | elif command == "remove":
47 | return remove(query)
48 |
49 | print 'An error occured.'
50 | return 0
51 |
52 |
53 | def copy_to_clipboard(text):
54 | p = subprocess.Popen(['pbcopy', 'w'],
55 | stdin=subprocess.PIPE, close_fds=True)
56 | p.communicate(input=text.encode('utf-8'))
57 |
58 |
59 | def share_path(path, access_token):
60 | api_client = client.DropboxClient(access_token)
61 | try:
62 | url = api_client.share(path)
63 | copy_to_clipboard(url)
64 | print 'Link copied to clipboard'
65 | except rest.ErrorResponse, e:
66 | print (e.user_error_msg or str(e))
67 |
68 | return 0
69 |
70 |
71 | def download_path(path, access_token, target='~/Downloads/'):
72 | api_client = client.DropboxClient(access_token)
73 |
74 | try:
75 | filename = os.path.basename(path)
76 | to_file_path = os.path.expanduser('%s%s' % (target, filename))
77 | i = 1
78 | while os.path.isfile(to_file_path):
79 | (root, ext) = os.path.splitext(filename)
80 | to_file_path = os.path.expanduser(
81 | '%s%s%s%s' % (target, root, '-' + str(i), ext))
82 | i += 1
83 |
84 | to_file = open(to_file_path, "wb")
85 |
86 | f, metadata = api_client.get_file_and_metadata(path)
87 | to_file.write(f.read())
88 |
89 | os.popen('open -R "%s"' % to_file_path)
90 |
91 | print '%s saved to %s' % (filename, target)
92 |
93 | except rest.ErrorResponse, e:
94 | print e.user_error_msg or str(e)
95 |
96 | return 0
97 |
98 |
99 | def delete_path(path, access_token):
100 | api_client = client.DropboxClient(access_token)
101 | try:
102 | api_client.file_delete(path)
103 | print 'File deleted successfully'
104 | except rest.ErrorResponse, e:
105 | print e.user_error_msg or str(e)
106 |
107 | return 0
108 |
109 |
110 | def authorize(auth_code):
111 | flow = client.DropboxOAuth2FlowNoRedirect(
112 | config.APP_KEY, config.APP_SECRET)
113 | try:
114 | access_token, user_id = flow.finish(auth_code)
115 |
116 | access_tokens = {}
117 | try:
118 | access_tokens = json.loads(wf.get_password('dropbox_access_tokens'))
119 | except PasswordNotFound:
120 | pass
121 |
122 | access_tokens[user_id] = access_token
123 | wf.save_password('dropbox_access_tokens', json.dumps(access_tokens))
124 | wf.clear_cache()
125 | print 'Authorization successful'
126 | except rest.ErrorResponse, e:
127 | print 'Error: %s' % (e,)
128 |
129 | return 0
130 |
131 |
132 | def remove(uid):
133 | try:
134 | access_tokens = json.loads(wf.get_password('dropbox_access_tokens'))
135 | access_token = access_tokens.pop(uid, None)
136 | api_client = client.DropboxClient(access_token)
137 | api_client.disable_access_token()
138 | wf.save_password('dropbox_access_tokens', json.dumps(access_tokens))
139 | wf.clear_cache()
140 | print 'Deauthorization successful'
141 | except PasswordNotFound:
142 | print 'Not access tokens found.'
143 |
144 | return 0
145 |
146 |
147 | if __name__ == '__main__':
148 | wf = Workflow()
149 | sys.exit(wf.run(main))
150 |
--------------------------------------------------------------------------------
/src/dropbox_prefetch.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | from workflow import Workflow
4 | from helpers import get_resource, get_hash, get_account_info, uid_exists
5 |
6 | wf = Workflow()
7 |
8 |
9 | def cache_resource(uid, path):
10 | def wrapper():
11 | return get_resource(uid, path)
12 |
13 | wf.cached_data(get_hash(uid, path), wrapper)
14 |
15 |
16 | def prefetch(uid, path):
17 | accounts = wf.cached_data(
18 | 'dropbox_accounts', data_func=get_account_info, max_age=60 * 60)
19 | if path is None or uid is None or not uid_exists(uid, accounts):
20 | return 0
21 | path_content = wf.cached_data(get_hash(uid, path))
22 | if path_content is None:
23 | return 0
24 | for resource in path_content:
25 | if resource['is_dir']:
26 | cache_resource(uid, resource['path'])
27 |
28 | if __name__ == '__main__':
29 | if len(sys.argv) != 3:
30 | sys.exit()
31 | [_, uid, path] = sys.argv
32 | prefetch(uid, path)
33 |
--------------------------------------------------------------------------------
/src/helpers.py:
--------------------------------------------------------------------------------
1 | import json
2 | import hashlib
3 | from workflow import Workflow
4 | from dropbox import client, rest
5 |
6 | wf = Workflow()
7 |
8 |
9 | def get_resource(uid, path):
10 | cached_resource = wf.cached_data(get_hash(uid, path), max_age=0)
11 | hash_value = None
12 | if cached_resource and 'hash' in cached_resource:
13 | hash_value = cached_resource['hash']
14 | access_tokens = json.loads(wf.get_password('dropbox_access_tokens'))
15 | api_client = client.DropboxClient(access_tokens[uid])
16 | try:
17 | resp = api_client.metadata(path, hash=hash_value, file_limit=1000)
18 | if 'contents' in resp:
19 | return resp['contents']
20 | else:
21 | return resp
22 | except rest.ErrorResponse, e:
23 | if e.status == '304':
24 | return cached_resource
25 | elif e.status == '404':
26 | return []
27 | else:
28 | wf.logger.debug(e)
29 |
30 |
31 | def get_hash(uid, path):
32 | return "_".join([uid, hashlib.md5(path.encode('utf-8')).hexdigest()])
33 |
34 |
35 | def get_account_info():
36 | output = []
37 | for access_token in json.loads(
38 | wf.get_password('dropbox_access_tokens')).values():
39 | api_client = client.DropboxClient(access_token)
40 | output.append(api_client.account_info())
41 | return output
42 |
43 |
44 | def uid_exists(uid, accounts):
45 | for account in accounts:
46 | try:
47 | if account['uid'] == int(uid):
48 | return True
49 | except ValueError:
50 | return False
51 | return False
52 |
--------------------------------------------------------------------------------
/src/icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icon.png
--------------------------------------------------------------------------------
/src/icons/desktop.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/desktop.png
--------------------------------------------------------------------------------
/src/icons/download.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/download.png
--------------------------------------------------------------------------------
/src/icons/folder.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/folder.png
--------------------------------------------------------------------------------
/src/icons/folder_app.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/folder_app.png
--------------------------------------------------------------------------------
/src/icons/folder_camera.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/folder_camera.png
--------------------------------------------------------------------------------
/src/icons/folder_photos.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/folder_photos.png
--------------------------------------------------------------------------------
/src/icons/folder_public.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/folder_public.png
--------------------------------------------------------------------------------
/src/icons/folder_user.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/folder_user.png
--------------------------------------------------------------------------------
/src/icons/keynote.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/keynote.png
--------------------------------------------------------------------------------
/src/icons/numbers.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/numbers.png
--------------------------------------------------------------------------------
/src/icons/package.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/package.png
--------------------------------------------------------------------------------
/src/icons/page_white.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/page_white.png
--------------------------------------------------------------------------------
/src/icons/page_white_acrobat.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/page_white_acrobat.png
--------------------------------------------------------------------------------
/src/icons/page_white_actionscript.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/page_white_actionscript.png
--------------------------------------------------------------------------------
/src/icons/page_white_c.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/page_white_c.png
--------------------------------------------------------------------------------
/src/icons/page_white_code.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/page_white_code.png
--------------------------------------------------------------------------------
/src/icons/page_white_compressed.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/page_white_compressed.png
--------------------------------------------------------------------------------
/src/icons/page_white_cplusplus.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/page_white_cplusplus.png
--------------------------------------------------------------------------------
/src/icons/page_white_csharp.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/page_white_csharp.png
--------------------------------------------------------------------------------
/src/icons/page_white_cup.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/page_white_cup.png
--------------------------------------------------------------------------------
/src/icons/page_white_dvd.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/page_white_dvd.png
--------------------------------------------------------------------------------
/src/icons/page_white_excel.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/page_white_excel.png
--------------------------------------------------------------------------------
/src/icons/page_white_film.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/page_white_film.png
--------------------------------------------------------------------------------
/src/icons/page_white_flash.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/page_white_flash.png
--------------------------------------------------------------------------------
/src/icons/page_white_gear.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/page_white_gear.png
--------------------------------------------------------------------------------
/src/icons/page_white_h.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/page_white_h.png
--------------------------------------------------------------------------------
/src/icons/page_white_js.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/page_white_js.png
--------------------------------------------------------------------------------
/src/icons/page_white_paint.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/page_white_paint.png
--------------------------------------------------------------------------------
/src/icons/page_white_php.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/page_white_php.png
--------------------------------------------------------------------------------
/src/icons/page_white_picture.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/page_white_picture.png
--------------------------------------------------------------------------------
/src/icons/page_white_powerpoint.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/page_white_powerpoint.png
--------------------------------------------------------------------------------
/src/icons/page_white_ruby.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/page_white_ruby.png
--------------------------------------------------------------------------------
/src/icons/page_white_sound.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/page_white_sound.png
--------------------------------------------------------------------------------
/src/icons/page_white_text.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/page_white_text.png
--------------------------------------------------------------------------------
/src/icons/page_white_tux.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/page_white_tux.png
--------------------------------------------------------------------------------
/src/icons/page_white_vector.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/page_white_vector.png
--------------------------------------------------------------------------------
/src/icons/page_white_visualstudio.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/page_white_visualstudio.png
--------------------------------------------------------------------------------
/src/icons/page_white_word.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/page_white_word.png
--------------------------------------------------------------------------------
/src/icons/pages.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/icons/pages.png
--------------------------------------------------------------------------------
/src/info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | bundleid
6 | com.fniephaus.dropbox
7 | category
8 | Internet
9 | connections
10 |
11 | 739C48C5-6C99-4B62-AAB4-E23DCD2035B5
12 |
13 |
14 | destinationuid
15 | 94B5A64D-BE8D-43B8-847A-F84CA4214C44
16 | modifiers
17 | 0
18 | modifiersubtext
19 |
20 |
21 |
22 | 96CE9469-D48F-491F-8D8D-BFD154DB2783
23 |
24 |
25 | destinationuid
26 | E037422F-BB78-4C52-9EC9-D23FCF6CA31A
27 | modifiers
28 | 0
29 | modifiersubtext
30 |
31 |
32 |
33 | E037422F-BB78-4C52-9EC9-D23FCF6CA31A
34 |
35 |
36 | destinationuid
37 | 739C48C5-6C99-4B62-AAB4-E23DCD2035B5
38 | modifiers
39 | 0
40 | modifiersubtext
41 |
42 |
43 |
44 |
45 | createdby
46 | Fabio Niephaus
47 | description
48 |
49 | disabled
50 |
51 | name
52 | Dropbox for Alfred
53 | objects
54 |
55 |
56 | config
57 |
58 | argumenttype
59 | 1
60 | escaping
61 | 102
62 | keyword
63 | db
64 | runningsubtext
65 | Contacting dropbox.com...
66 | script
67 | python dropbox_filter.py "{query}"
68 | title
69 | Dropbox
70 | type
71 | 0
72 | withspace
73 |
74 |
75 | type
76 | alfred.workflow.input.scriptfilter
77 | uid
78 | E037422F-BB78-4C52-9EC9-D23FCF6CA31A
79 | version
80 | 0
81 |
82 |
83 | config
84 |
85 | action
86 | 0
87 | argument
88 | 0
89 | hotkey
90 | 44
91 | hotmod
92 | 262144
93 | hotstring
94 | /
95 | leftcursor
96 |
97 | modsmode
98 | 0
99 | relatedApps
100 |
101 | com.googlecode.iterm2
102 |
103 | relatedAppsMode
104 | 2
105 |
106 | type
107 | alfred.workflow.trigger.hotkey
108 | uid
109 | 96CE9469-D48F-491F-8D8D-BFD154DB2783
110 | version
111 | 1
112 |
113 |
114 | config
115 |
116 | escaping
117 | 102
118 | script
119 | python dropbox_handler.py "{query}"
120 | type
121 | 0
122 |
123 | type
124 | alfred.workflow.action.script
125 | uid
126 | 739C48C5-6C99-4B62-AAB4-E23DCD2035B5
127 | version
128 | 0
129 |
130 |
131 | config
132 |
133 | lastpathcomponent
134 |
135 | onlyshowifquerypopulated
136 |
137 | output
138 | 0
139 | removeextension
140 |
141 | sticky
142 |
143 | text
144 | {query}
145 | title
146 | Dropbox for Alfred
147 |
148 | type
149 | alfred.workflow.output.notification
150 | uid
151 | 94B5A64D-BE8D-43B8-847A-F84CA4214C44
152 | version
153 | 0
154 |
155 |
156 | readme
157 |
158 | uidata
159 |
160 | 739C48C5-6C99-4B62-AAB4-E23DCD2035B5
161 |
162 | ypos
163 | 10
164 |
165 | 94B5A64D-BE8D-43B8-847A-F84CA4214C44
166 |
167 | ypos
168 | 10
169 |
170 | 96CE9469-D48F-491F-8D8D-BFD154DB2783
171 |
172 | ypos
173 | 10
174 |
175 | E037422F-BB78-4C52-9EC9-D23FCF6CA31A
176 |
177 | ypos
178 | 10
179 |
180 |
181 | webaddress
182 | https://github.com/fniephaus/alfred-dropbox/
183 |
184 |
185 |
--------------------------------------------------------------------------------
/src/urllib3/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | urllib3 - Thread-safe connection pooling and re-using.
3 | """
4 |
5 | __author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
6 | __license__ = 'MIT'
7 | __version__ = '1.9'
8 |
9 |
10 | from .connectionpool import (
11 | HTTPConnectionPool,
12 | HTTPSConnectionPool,
13 | connection_from_url
14 | )
15 |
16 | from . import exceptions
17 | from .filepost import encode_multipart_formdata
18 | from .poolmanager import PoolManager, ProxyManager, proxy_from_url
19 | from .response import HTTPResponse
20 | from .util.request import make_headers
21 | from .util.url import get_host
22 | from .util.timeout import Timeout
23 | from .util.retry import Retry
24 |
25 |
26 | # Set default logging handler to avoid "No handler found" warnings.
27 | import logging
28 | try: # Python 2.7+
29 | from logging import NullHandler
30 | except ImportError:
31 | class NullHandler(logging.Handler):
32 | def emit(self, record):
33 | pass
34 |
35 | logging.getLogger(__name__).addHandler(NullHandler())
36 |
37 | def add_stderr_logger(level=logging.DEBUG):
38 | """
39 | Helper for quickly adding a StreamHandler to the logger. Useful for
40 | debugging.
41 |
42 | Returns the handler after adding it.
43 | """
44 | # This method needs to be in this __init__.py to get the __name__ correct
45 | # even if urllib3 is vendored within another package.
46 | logger = logging.getLogger(__name__)
47 | handler = logging.StreamHandler()
48 | handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
49 | logger.addHandler(handler)
50 | logger.setLevel(level)
51 | logger.debug('Added a stderr logging handler to logger: %s' % __name__)
52 | return handler
53 |
54 | # ... Clean up.
55 | del NullHandler
56 |
57 |
58 | # Set security warning to only go off once by default.
59 | import warnings
60 | warnings.simplefilter('module', exceptions.InsecureRequestWarning)
61 |
62 | def disable_warnings(category=exceptions.HTTPWarning):
63 | """
64 | Helper for quickly disabling all urllib3 warnings.
65 | """
66 | warnings.simplefilter('ignore', category)
67 |
--------------------------------------------------------------------------------
/src/urllib3/_collections.py:
--------------------------------------------------------------------------------
1 | from collections import Mapping, MutableMapping
2 | try:
3 | from threading import RLock
4 | except ImportError: # Platform-specific: No threads available
5 | class RLock:
6 | def __enter__(self):
7 | pass
8 |
9 | def __exit__(self, exc_type, exc_value, traceback):
10 | pass
11 |
12 |
13 | try: # Python 2.7+
14 | from collections import OrderedDict
15 | except ImportError:
16 | from .packages.ordered_dict import OrderedDict
17 | from .packages.six import itervalues
18 |
19 |
20 | __all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict']
21 |
22 |
23 | _Null = object()
24 |
25 |
26 | class RecentlyUsedContainer(MutableMapping):
27 | """
28 | Provides a thread-safe dict-like container which maintains up to
29 | ``maxsize`` keys while throwing away the least-recently-used keys beyond
30 | ``maxsize``.
31 |
32 | :param maxsize:
33 | Maximum number of recent elements to retain.
34 |
35 | :param dispose_func:
36 | Every time an item is evicted from the container,
37 | ``dispose_func(value)`` is called. Callback which will get called
38 | """
39 |
40 | ContainerCls = OrderedDict
41 |
42 | def __init__(self, maxsize=10, dispose_func=None):
43 | self._maxsize = maxsize
44 | self.dispose_func = dispose_func
45 |
46 | self._container = self.ContainerCls()
47 | self.lock = RLock()
48 |
49 | def __getitem__(self, key):
50 | # Re-insert the item, moving it to the end of the eviction line.
51 | with self.lock:
52 | item = self._container.pop(key)
53 | self._container[key] = item
54 | return item
55 |
56 | def __setitem__(self, key, value):
57 | evicted_value = _Null
58 | with self.lock:
59 | # Possibly evict the existing value of 'key'
60 | evicted_value = self._container.get(key, _Null)
61 | self._container[key] = value
62 |
63 | # If we didn't evict an existing value, we might have to evict the
64 | # least recently used item from the beginning of the container.
65 | if len(self._container) > self._maxsize:
66 | _key, evicted_value = self._container.popitem(last=False)
67 |
68 | if self.dispose_func and evicted_value is not _Null:
69 | self.dispose_func(evicted_value)
70 |
71 | def __delitem__(self, key):
72 | with self.lock:
73 | value = self._container.pop(key)
74 |
75 | if self.dispose_func:
76 | self.dispose_func(value)
77 |
78 | def __len__(self):
79 | with self.lock:
80 | return len(self._container)
81 |
82 | def __iter__(self):
83 | raise NotImplementedError('Iteration over this class is unlikely to be threadsafe.')
84 |
85 | def clear(self):
86 | with self.lock:
87 | # Copy pointers to all values, then wipe the mapping
88 | # under Python 2, this copies the list of values twice :-|
89 | values = list(self._container.values())
90 | self._container.clear()
91 |
92 | if self.dispose_func:
93 | for value in values:
94 | self.dispose_func(value)
95 |
96 | def keys(self):
97 | with self.lock:
98 | return self._container.keys()
99 |
100 |
101 | class HTTPHeaderDict(MutableMapping):
102 | """
103 | :param headers:
104 | An iterable of field-value pairs. Must not contain multiple field names
105 | when compared case-insensitively.
106 |
107 | :param kwargs:
108 | Additional field-value pairs to pass in to ``dict.update``.
109 |
110 | A ``dict`` like container for storing HTTP Headers.
111 |
112 | Field names are stored and compared case-insensitively in compliance with
113 | RFC 7230. Iteration provides the first case-sensitive key seen for each
114 | case-insensitive pair.
115 |
116 | Using ``__setitem__`` syntax overwrites fields that compare equal
117 | case-insensitively in order to maintain ``dict``'s api. For fields that
118 | compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add``
119 | in a loop.
120 |
121 | If multiple fields that are equal case-insensitively are passed to the
122 | constructor or ``.update``, the behavior is undefined and some will be
123 | lost.
124 |
125 | >>> headers = HTTPHeaderDict()
126 | >>> headers.add('Set-Cookie', 'foo=bar')
127 | >>> headers.add('set-cookie', 'baz=quxx')
128 | >>> headers['content-length'] = '7'
129 | >>> headers['SET-cookie']
130 | 'foo=bar, baz=quxx'
131 | >>> headers['Content-Length']
132 | '7'
133 |
134 | If you want to access the raw headers with their original casing
135 | for debugging purposes you can access the private ``._data`` attribute
136 | which is a normal python ``dict`` that maps the case-insensitive key to a
137 | list of tuples stored as (case-sensitive-original-name, value). Using the
138 | structure from above as our example:
139 |
140 | >>> headers._data
141 | {'set-cookie': [('Set-Cookie', 'foo=bar'), ('set-cookie', 'baz=quxx')],
142 | 'content-length': [('content-length', '7')]}
143 | """
144 |
145 | def __init__(self, headers=None, **kwargs):
146 | self._data = {}
147 | if headers is None:
148 | headers = {}
149 | self.update(headers, **kwargs)
150 |
151 | def add(self, key, value):
152 | """Adds a (name, value) pair, doesn't overwrite the value if it already
153 | exists.
154 |
155 | >>> headers = HTTPHeaderDict(foo='bar')
156 | >>> headers.add('Foo', 'baz')
157 | >>> headers['foo']
158 | 'bar, baz'
159 | """
160 | self._data.setdefault(key.lower(), []).append((key, value))
161 |
162 | def getlist(self, key):
163 | """Returns a list of all the values for the named field. Returns an
164 | empty list if the key doesn't exist."""
165 | return self[key].split(', ') if key in self else []
166 |
167 | def copy(self):
168 | h = HTTPHeaderDict()
169 | for key in self._data:
170 | for rawkey, value in self._data[key]:
171 | h.add(rawkey, value)
172 | return h
173 |
174 | def __eq__(self, other):
175 | if not isinstance(other, Mapping):
176 | return False
177 | other = HTTPHeaderDict(other)
178 | return dict((k1, self[k1]) for k1 in self._data) == \
179 | dict((k2, other[k2]) for k2 in other._data)
180 |
181 | def __getitem__(self, key):
182 | values = self._data[key.lower()]
183 | return ', '.join(value[1] for value in values)
184 |
185 | def __setitem__(self, key, value):
186 | self._data[key.lower()] = [(key, value)]
187 |
188 | def __delitem__(self, key):
189 | del self._data[key.lower()]
190 |
191 | def __len__(self):
192 | return len(self._data)
193 |
194 | def __iter__(self):
195 | for headers in itervalues(self._data):
196 | yield headers[0][0]
197 |
198 | def __repr__(self):
199 | return '%s(%r)' % (self.__class__.__name__, dict(self.items()))
200 |
--------------------------------------------------------------------------------
/src/urllib3/connection.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import socket
3 | from socket import timeout as SocketTimeout
4 |
5 | try: # Python 3
6 | from http.client import HTTPConnection as _HTTPConnection, HTTPException
7 | except ImportError:
8 | from httplib import HTTPConnection as _HTTPConnection, HTTPException
9 |
10 |
11 | class DummyConnection(object):
12 | "Used to detect a failed ConnectionCls import."
13 | pass
14 |
15 |
16 | try: # Compiled with SSL?
17 | HTTPSConnection = DummyConnection
18 | import ssl
19 | BaseSSLError = ssl.SSLError
20 | except (ImportError, AttributeError): # Platform-specific: No SSL.
21 | ssl = None
22 |
23 | class BaseSSLError(BaseException):
24 | pass
25 |
26 |
27 | from .exceptions import (
28 | ConnectTimeoutError,
29 | )
30 | from .packages.ssl_match_hostname import match_hostname
31 | from .packages import six
32 |
33 | from .util.ssl_ import (
34 | resolve_cert_reqs,
35 | resolve_ssl_version,
36 | ssl_wrap_socket,
37 | assert_fingerprint,
38 | )
39 |
40 | from .util import connection
41 |
42 |
43 | port_by_scheme = {
44 | 'http': 80,
45 | 'https': 443,
46 | }
47 |
48 |
49 | class HTTPConnection(_HTTPConnection, object):
50 | """
51 | Based on httplib.HTTPConnection but provides an extra constructor
52 | backwards-compatibility layer between older and newer Pythons.
53 |
54 | Additional keyword parameters are used to configure attributes of the connection.
55 | Accepted parameters include:
56 |
57 | - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
58 | - ``source_address``: Set the source address for the current connection.
59 |
60 | .. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x
61 |
62 | - ``socket_options``: Set specific options on the underlying socket. If not specified, then
63 | defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
64 | Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
65 |
66 | For example, if you wish to enable TCP Keep Alive in addition to the defaults,
67 | you might pass::
68 |
69 | HTTPConnection.default_socket_options + [
70 | (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
71 | ]
72 |
73 | Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
74 | """
75 |
76 | default_port = port_by_scheme['http']
77 |
78 | #: Disable Nagle's algorithm by default.
79 | #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
80 | default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]
81 |
82 | #: Whether this connection verifies the host's certificate.
83 | is_verified = False
84 |
85 | def __init__(self, *args, **kw):
86 | if six.PY3: # Python 3
87 | kw.pop('strict', None)
88 |
89 | # Pre-set source_address in case we have an older Python like 2.6.
90 | self.source_address = kw.get('source_address')
91 |
92 | if sys.version_info < (2, 7): # Python 2.6
93 | # _HTTPConnection on Python 2.6 will balk at this keyword arg, but
94 | # not newer versions. We can still use it when creating a
95 | # connection though, so we pop it *after* we have saved it as
96 | # self.source_address.
97 | kw.pop('source_address', None)
98 |
99 | #: The socket options provided by the user. If no options are
100 | #: provided, we use the default options.
101 | self.socket_options = kw.pop('socket_options', self.default_socket_options)
102 |
103 | # Superclass also sets self.source_address in Python 2.7+.
104 | _HTTPConnection.__init__(self, *args, **kw)
105 |
106 | def _new_conn(self):
107 | """ Establish a socket connection and set nodelay settings on it.
108 |
109 | :return: New socket connection.
110 | """
111 | extra_kw = {}
112 | if self.source_address:
113 | extra_kw['source_address'] = self.source_address
114 |
115 | if self.socket_options:
116 | extra_kw['socket_options'] = self.socket_options
117 |
118 | try:
119 | conn = connection.create_connection(
120 | (self.host, self.port), self.timeout, **extra_kw)
121 |
122 | except SocketTimeout:
123 | raise ConnectTimeoutError(
124 | self, "Connection to %s timed out. (connect timeout=%s)" %
125 | (self.host, self.timeout))
126 |
127 | return conn
128 |
129 | def _prepare_conn(self, conn):
130 | self.sock = conn
131 | # the _tunnel_host attribute was added in python 2.6.3 (via
132 | # http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do
133 | # not have them.
134 | if getattr(self, '_tunnel_host', None):
135 | # TODO: Fix tunnel so it doesn't depend on self.sock state.
136 | self._tunnel()
137 | # Mark this connection as not reusable
138 | self.auto_open = 0
139 |
140 | def connect(self):
141 | conn = self._new_conn()
142 | self._prepare_conn(conn)
143 |
144 |
145 | class HTTPSConnection(HTTPConnection):
146 | default_port = port_by_scheme['https']
147 |
148 | def __init__(self, host, port=None, key_file=None, cert_file=None,
149 | strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **kw):
150 |
151 | HTTPConnection.__init__(self, host, port, strict=strict,
152 | timeout=timeout, **kw)
153 |
154 | self.key_file = key_file
155 | self.cert_file = cert_file
156 |
157 | # Required property for Google AppEngine 1.9.0 which otherwise causes
158 | # HTTPS requests to go out as HTTP. (See Issue #356)
159 | self._protocol = 'https'
160 |
161 | def connect(self):
162 | conn = self._new_conn()
163 | self._prepare_conn(conn)
164 | self.sock = ssl.wrap_socket(conn, self.key_file, self.cert_file)
165 |
166 |
167 | class VerifiedHTTPSConnection(HTTPSConnection):
168 | """
169 | Based on httplib.HTTPSConnection but wraps the socket with
170 | SSL certification.
171 | """
172 | cert_reqs = None
173 | ca_certs = None
174 | ssl_version = None
175 |
176 | def set_cert(self, key_file=None, cert_file=None,
177 | cert_reqs=None, ca_certs=None,
178 | assert_hostname=None, assert_fingerprint=None):
179 |
180 | self.key_file = key_file
181 | self.cert_file = cert_file
182 | self.cert_reqs = cert_reqs
183 | self.ca_certs = ca_certs
184 | self.assert_hostname = assert_hostname
185 | self.assert_fingerprint = assert_fingerprint
186 |
187 | def connect(self):
188 | # Add certificate verification
189 | conn = self._new_conn()
190 |
191 | resolved_cert_reqs = resolve_cert_reqs(self.cert_reqs)
192 | resolved_ssl_version = resolve_ssl_version(self.ssl_version)
193 |
194 | hostname = self.host
195 | if getattr(self, '_tunnel_host', None):
196 | # _tunnel_host was added in Python 2.6.3
197 | # (See: http://hg.python.org/cpython/rev/0f57b30a152f)
198 |
199 | self.sock = conn
200 | # Calls self._set_hostport(), so self.host is
201 | # self._tunnel_host below.
202 | self._tunnel()
203 | # Mark this connection as not reusable
204 | self.auto_open = 0
205 |
206 | # Override the host with the one we're requesting data from.
207 | hostname = self._tunnel_host
208 |
209 | # Wrap socket using verification with the root certs in
210 | # trusted_root_certs
211 | self.sock = ssl_wrap_socket(conn, self.key_file, self.cert_file,
212 | cert_reqs=resolved_cert_reqs,
213 | ca_certs=self.ca_certs,
214 | server_hostname=hostname,
215 | ssl_version=resolved_ssl_version)
216 |
217 | if resolved_cert_reqs != ssl.CERT_NONE:
218 | if self.assert_fingerprint:
219 | assert_fingerprint(self.sock.getpeercert(binary_form=True),
220 | self.assert_fingerprint)
221 | elif self.assert_hostname is not False:
222 | match_hostname(self.sock.getpeercert(),
223 | self.assert_hostname or hostname)
224 |
225 | self.is_verified = resolved_cert_reqs == ssl.CERT_REQUIRED
226 |
227 |
228 | if ssl:
229 | # Make a copy for testing.
230 | UnverifiedHTTPSConnection = HTTPSConnection
231 | HTTPSConnection = VerifiedHTTPSConnection
232 |
--------------------------------------------------------------------------------
/src/urllib3/contrib/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/urllib3/contrib/__init__.py
--------------------------------------------------------------------------------
/src/urllib3/contrib/ntlmpool.py:
--------------------------------------------------------------------------------
1 | """
2 | NTLM authenticating pool, contributed by erikcederstran
3 |
4 | Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
5 | """
6 |
7 | try:
8 | from http.client import HTTPSConnection
9 | except ImportError:
10 | from httplib import HTTPSConnection
11 | from logging import getLogger
12 | from ntlm import ntlm
13 |
14 | from urllib3 import HTTPSConnectionPool
15 |
16 |
17 | log = getLogger(__name__)
18 |
19 |
20 | class NTLMConnectionPool(HTTPSConnectionPool):
21 | """
22 | Implements an NTLM authentication version of an urllib3 connection pool
23 | """
24 |
25 | scheme = 'https'
26 |
27 | def __init__(self, user, pw, authurl, *args, **kwargs):
28 | """
29 | authurl is a random URL on the server that is protected by NTLM.
30 | user is the Windows user, probably in the DOMAIN\\username format.
31 | pw is the password for the user.
32 | """
33 | super(NTLMConnectionPool, self).__init__(*args, **kwargs)
34 | self.authurl = authurl
35 | self.rawuser = user
36 | user_parts = user.split('\\', 1)
37 | self.domain = user_parts[0].upper()
38 | self.user = user_parts[1]
39 | self.pw = pw
40 |
41 | def _new_conn(self):
42 | # Performs the NTLM handshake that secures the connection. The socket
43 | # must be kept open while requests are performed.
44 | self.num_connections += 1
45 | log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s' %
46 | (self.num_connections, self.host, self.authurl))
47 |
48 | headers = {}
49 | headers['Connection'] = 'Keep-Alive'
50 | req_header = 'Authorization'
51 | resp_header = 'www-authenticate'
52 |
53 | conn = HTTPSConnection(host=self.host, port=self.port)
54 |
55 | # Send negotiation message
56 | headers[req_header] = (
57 | 'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser))
58 | log.debug('Request headers: %s' % headers)
59 | conn.request('GET', self.authurl, None, headers)
60 | res = conn.getresponse()
61 | reshdr = dict(res.getheaders())
62 | log.debug('Response status: %s %s' % (res.status, res.reason))
63 | log.debug('Response headers: %s' % reshdr)
64 | log.debug('Response data: %s [...]' % res.read(100))
65 |
66 | # Remove the reference to the socket, so that it can not be closed by
67 | # the response object (we want to keep the socket open)
68 | res.fp = None
69 |
70 | # Server should respond with a challenge message
71 | auth_header_values = reshdr[resp_header].split(', ')
72 | auth_header_value = None
73 | for s in auth_header_values:
74 | if s[:5] == 'NTLM ':
75 | auth_header_value = s[5:]
76 | if auth_header_value is None:
77 | raise Exception('Unexpected %s response header: %s' %
78 | (resp_header, reshdr[resp_header]))
79 |
80 | # Send authentication message
81 | ServerChallenge, NegotiateFlags = \
82 | ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value)
83 | auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge,
84 | self.user,
85 | self.domain,
86 | self.pw,
87 | NegotiateFlags)
88 | headers[req_header] = 'NTLM %s' % auth_msg
89 | log.debug('Request headers: %s' % headers)
90 | conn.request('GET', self.authurl, None, headers)
91 | res = conn.getresponse()
92 | log.debug('Response status: %s %s' % (res.status, res.reason))
93 | log.debug('Response headers: %s' % dict(res.getheaders()))
94 | log.debug('Response data: %s [...]' % res.read()[:100])
95 | if res.status != 200:
96 | if res.status == 401:
97 | raise Exception('Server rejected request: wrong '
98 | 'username or password')
99 | raise Exception('Wrong server response: %s %s' %
100 | (res.status, res.reason))
101 |
102 | res.fp = None
103 | log.debug('Connection established')
104 | return conn
105 |
106 | def urlopen(self, method, url, body=None, headers=None, retries=3,
107 | redirect=True, assert_same_host=True):
108 | if headers is None:
109 | headers = {}
110 | headers['Connection'] = 'Keep-Alive'
111 | return super(NTLMConnectionPool, self).urlopen(method, url, body,
112 | headers, retries,
113 | redirect,
114 | assert_same_host)
115 |
--------------------------------------------------------------------------------
/src/urllib3/contrib/pyopenssl.py:
--------------------------------------------------------------------------------
1 | '''SSL with SNI_-support for Python 2. Follow these instructions if you would
2 | like to verify SSL certificates in Python 2. Note, the default libraries do
3 | *not* do certificate checking; you need to do additional work to validate
4 | certificates yourself.
5 |
6 | This needs the following packages installed:
7 |
8 | * pyOpenSSL (tested with 0.13)
9 | * ndg-httpsclient (tested with 0.3.2)
10 | * pyasn1 (tested with 0.1.6)
11 |
12 | You can install them with the following command:
13 |
14 | pip install pyopenssl ndg-httpsclient pyasn1
15 |
16 | To activate certificate checking, call
17 | :func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
18 | before you begin making HTTP requests. This can be done in a ``sitecustomize``
19 | module, or at any other time before your application begins using ``urllib3``,
20 | like this::
21 |
22 | try:
23 | import urllib3.contrib.pyopenssl
24 | urllib3.contrib.pyopenssl.inject_into_urllib3()
25 | except ImportError:
26 | pass
27 |
28 | Now you can use :mod:`urllib3` as you normally would, and it will support SNI
29 | when the required modules are installed.
30 |
31 | Activating this module also has the positive side effect of disabling SSL/TLS
32 | encryption in Python 2 (see `CRIME attack`_).
33 |
34 | If you want to configure the default list of supported cipher suites, you can
35 | set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable.
36 |
37 | Module Variables
38 | ----------------
39 |
40 | :var DEFAULT_SSL_CIPHER_LIST: The list of supported SSL/TLS cipher suites.
41 | Default: ``ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:
42 | ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:!aNULL:!MD5:!DSS``
43 |
44 | .. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
45 | .. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
46 |
47 | '''
48 |
49 | from ndg.httpsclient.ssl_peer_verification import SUBJ_ALT_NAME_SUPPORT
50 | from ndg.httpsclient.subj_alt_name import SubjectAltName as BaseSubjectAltName
51 | import OpenSSL.SSL
52 | from pyasn1.codec.der import decoder as der_decoder
53 | from pyasn1.type import univ, constraint
54 | from socket import _fileobject, timeout
55 | import ssl
56 | import select
57 |
58 | from .. import connection
59 | from .. import util
60 |
61 | __all__ = ['inject_into_urllib3', 'extract_from_urllib3']
62 |
63 | # SNI only *really* works if we can read the subjectAltName of certificates.
64 | HAS_SNI = SUBJ_ALT_NAME_SUPPORT
65 |
66 | # Map from urllib3 to PyOpenSSL compatible parameter-values.
67 | _openssl_versions = {
68 | ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD,
69 | ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD,
70 | ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
71 | }
72 | _openssl_verify = {
73 | ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
74 | ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,
75 | ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER
76 | + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
77 | }
78 |
79 | # A secure default.
80 | # Sources for more information on TLS ciphers:
81 | #
82 | # - https://wiki.mozilla.org/Security/Server_Side_TLS
83 | # - https://www.ssllabs.com/projects/best-practices/index.html
84 | # - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
85 | #
86 | # The general intent is:
87 | # - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),
88 | # - prefer ECDHE over DHE for better performance,
89 | # - prefer any AES-GCM over any AES-CBC for better performance and security,
90 | # - use 3DES as fallback which is secure but slow,
91 | # - disable NULL authentication, MD5 MACs and DSS for security reasons.
92 | DEFAULT_SSL_CIPHER_LIST = "ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:" + \
93 | "ECDH+AES128:DH+AES:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:" + \
94 | "!aNULL:!MD5:!DSS"
95 |
96 |
97 | orig_util_HAS_SNI = util.HAS_SNI
98 | orig_connection_ssl_wrap_socket = connection.ssl_wrap_socket
99 |
100 |
101 | def inject_into_urllib3():
102 | 'Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.'
103 |
104 | connection.ssl_wrap_socket = ssl_wrap_socket
105 | util.HAS_SNI = HAS_SNI
106 |
107 |
108 | def extract_from_urllib3():
109 | 'Undo monkey-patching by :func:`inject_into_urllib3`.'
110 |
111 | connection.ssl_wrap_socket = orig_connection_ssl_wrap_socket
112 | util.HAS_SNI = orig_util_HAS_SNI
113 |
114 |
115 | ### Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
116 | class SubjectAltName(BaseSubjectAltName):
117 | '''ASN.1 implementation for subjectAltNames support'''
118 |
119 | # There is no limit to how many SAN certificates a certificate may have,
120 | # however this needs to have some limit so we'll set an arbitrarily high
121 | # limit.
122 | sizeSpec = univ.SequenceOf.sizeSpec + \
123 | constraint.ValueSizeConstraint(1, 1024)
124 |
125 |
126 | ### Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
127 | def get_subj_alt_name(peer_cert):
128 | # Search through extensions
129 | dns_name = []
130 | if not SUBJ_ALT_NAME_SUPPORT:
131 | return dns_name
132 |
133 | general_names = SubjectAltName()
134 | for i in range(peer_cert.get_extension_count()):
135 | ext = peer_cert.get_extension(i)
136 | ext_name = ext.get_short_name()
137 | if ext_name != 'subjectAltName':
138 | continue
139 |
140 | # PyOpenSSL returns extension data in ASN.1 encoded form
141 | ext_dat = ext.get_data()
142 | decoded_dat = der_decoder.decode(ext_dat,
143 | asn1Spec=general_names)
144 |
145 | for name in decoded_dat:
146 | if not isinstance(name, SubjectAltName):
147 | continue
148 | for entry in range(len(name)):
149 | component = name.getComponentByPosition(entry)
150 | if component.getName() != 'dNSName':
151 | continue
152 | dns_name.append(str(component.getComponent()))
153 |
154 | return dns_name
155 |
156 |
157 | class WrappedSocket(object):
158 | '''API-compatibility wrapper for Python OpenSSL's Connection-class.'''
159 |
160 | def __init__(self, connection, socket, suppress_ragged_eofs=True):
161 | self.connection = connection
162 | self.socket = socket
163 | self.suppress_ragged_eofs = suppress_ragged_eofs
164 |
165 | def fileno(self):
166 | return self.socket.fileno()
167 |
168 | def makefile(self, mode, bufsize=-1):
169 | return _fileobject(self, mode, bufsize)
170 |
171 | def recv(self, *args, **kwargs):
172 | try:
173 | data = self.connection.recv(*args, **kwargs)
174 | except OpenSSL.SSL.SysCallError as e:
175 | if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'):
176 | return b''
177 | else:
178 | raise
179 | except OpenSSL.SSL.WantReadError:
180 | rd, wd, ed = select.select(
181 | [self.socket], [], [], self.socket.gettimeout())
182 | if not rd:
183 | raise timeout()
184 | else:
185 | return self.recv(*args, **kwargs)
186 | else:
187 | return data
188 |
189 | def settimeout(self, timeout):
190 | return self.socket.settimeout(timeout)
191 |
192 | def sendall(self, data):
193 | return self.connection.sendall(data)
194 |
195 | def close(self):
196 | return self.connection.shutdown()
197 |
198 | def getpeercert(self, binary_form=False):
199 | x509 = self.connection.get_peer_certificate()
200 |
201 | if not x509:
202 | return x509
203 |
204 | if binary_form:
205 | return OpenSSL.crypto.dump_certificate(
206 | OpenSSL.crypto.FILETYPE_ASN1,
207 | x509)
208 |
209 | return {
210 | 'subject': (
211 | (('commonName', x509.get_subject().CN),),
212 | ),
213 | 'subjectAltName': [
214 | ('DNS', value)
215 | for value in get_subj_alt_name(x509)
216 | ]
217 | }
218 |
219 |
220 | def _verify_callback(cnx, x509, err_no, err_depth, return_code):
221 | return err_no == 0
222 |
223 |
224 | def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
225 | ca_certs=None, server_hostname=None,
226 | ssl_version=None):
227 | ctx = OpenSSL.SSL.Context(_openssl_versions[ssl_version])
228 | if certfile:
229 | ctx.use_certificate_file(certfile)
230 | if keyfile:
231 | ctx.use_privatekey_file(keyfile)
232 | if cert_reqs != ssl.CERT_NONE:
233 | ctx.set_verify(_openssl_verify[cert_reqs], _verify_callback)
234 | if ca_certs:
235 | try:
236 | ctx.load_verify_locations(ca_certs, None)
237 | except OpenSSL.SSL.Error as e:
238 | raise ssl.SSLError('bad ca_certs: %r' % ca_certs, e)
239 | else:
240 | ctx.set_default_verify_paths()
241 |
242 | # Disable TLS compression to migitate CRIME attack (issue #309)
243 | OP_NO_COMPRESSION = 0x20000
244 | ctx.set_options(OP_NO_COMPRESSION)
245 |
246 | # Set list of supported ciphersuites.
247 | ctx.set_cipher_list(DEFAULT_SSL_CIPHER_LIST)
248 |
249 | cnx = OpenSSL.SSL.Connection(ctx, sock)
250 | cnx.set_tlsext_host_name(server_hostname)
251 | cnx.set_connect_state()
252 | while True:
253 | try:
254 | cnx.do_handshake()
255 | except OpenSSL.SSL.WantReadError:
256 | select.select([sock], [], [])
257 | continue
258 | except OpenSSL.SSL.Error as e:
259 | raise ssl.SSLError('bad handshake', e)
260 | break
261 |
262 | return WrappedSocket(cnx, sock)
263 |
--------------------------------------------------------------------------------
/src/urllib3/exceptions.py:
--------------------------------------------------------------------------------
1 |
2 | ## Base Exceptions
3 |
4 | class HTTPError(Exception):
5 | "Base exception used by this module."
6 | pass
7 |
8 | class HTTPWarning(Warning):
9 | "Base warning used by this module."
10 | pass
11 |
12 |
13 |
14 | class PoolError(HTTPError):
15 | "Base exception for errors caused within a pool."
16 | def __init__(self, pool, message):
17 | self.pool = pool
18 | HTTPError.__init__(self, "%s: %s" % (pool, message))
19 |
20 | def __reduce__(self):
21 | # For pickling purposes.
22 | return self.__class__, (None, None)
23 |
24 |
25 | class RequestError(PoolError):
26 | "Base exception for PoolErrors that have associated URLs."
27 | def __init__(self, pool, url, message):
28 | self.url = url
29 | PoolError.__init__(self, pool, message)
30 |
31 | def __reduce__(self):
32 | # For pickling purposes.
33 | return self.__class__, (None, self.url, None)
34 |
35 |
36 | class SSLError(HTTPError):
37 | "Raised when SSL certificate fails in an HTTPS connection."
38 | pass
39 |
40 |
41 | class ProxyError(HTTPError):
42 | "Raised when the connection to a proxy fails."
43 | pass
44 |
45 |
46 | class DecodeError(HTTPError):
47 | "Raised when automatic decoding based on Content-Type fails."
48 | pass
49 |
50 |
51 | class ProtocolError(HTTPError):
52 | "Raised when something unexpected happens mid-request/response."
53 | pass
54 |
55 |
56 | #: Renamed to ProtocolError but aliased for backwards compatibility.
57 | ConnectionError = ProtocolError
58 |
59 |
60 | ## Leaf Exceptions
61 |
62 | class MaxRetryError(RequestError):
63 | "Raised when the maximum number of retries is exceeded."
64 |
65 | def __init__(self, pool, url, reason=None):
66 | self.reason = reason
67 |
68 | message = "Max retries exceeded with url: %s" % url
69 | if reason:
70 | message += " (Caused by %r)" % reason
71 | else:
72 | message += " (Caused by redirect)"
73 |
74 | RequestError.__init__(self, pool, url, message)
75 |
76 |
77 | class HostChangedError(RequestError):
78 | "Raised when an existing pool gets a request for a foreign host."
79 |
80 | def __init__(self, pool, url, retries=3):
81 | message = "Tried to open a foreign host with url: %s" % url
82 | RequestError.__init__(self, pool, url, message)
83 | self.retries = retries
84 |
85 |
86 | class TimeoutStateError(HTTPError):
87 | """ Raised when passing an invalid state to a timeout """
88 | pass
89 |
90 |
91 | class TimeoutError(HTTPError):
92 | """ Raised when a socket timeout error occurs.
93 |
94 | Catching this error will catch both :exc:`ReadTimeoutErrors
95 | ` and :exc:`ConnectTimeoutErrors `.
96 | """
97 | pass
98 |
99 |
100 | class ReadTimeoutError(TimeoutError, RequestError):
101 | "Raised when a socket timeout occurs while receiving data from a server"
102 | pass
103 |
104 |
105 | # This timeout error does not have a URL attached and needs to inherit from the
106 | # base HTTPError
107 | class ConnectTimeoutError(TimeoutError):
108 | "Raised when a socket timeout occurs while connecting to a server"
109 | pass
110 |
111 |
112 | class EmptyPoolError(PoolError):
113 | "Raised when a pool runs out of connections and no more are allowed."
114 | pass
115 |
116 |
117 | class ClosedPoolError(PoolError):
118 | "Raised when a request enters a pool after the pool has been closed."
119 | pass
120 |
121 |
122 | class LocationValueError(ValueError, HTTPError):
123 | "Raised when there is something wrong with a given URL input."
124 | pass
125 |
126 |
127 | class LocationParseError(LocationValueError):
128 | "Raised when get_host or similar fails to parse the URL input."
129 |
130 | def __init__(self, location):
131 | message = "Failed to parse: %s" % location
132 | HTTPError.__init__(self, message)
133 |
134 | self.location = location
135 |
136 |
137 | class InsecureRequestWarning(HTTPWarning):
138 | "Warned when making an unverified HTTPS request."
139 | pass
140 |
--------------------------------------------------------------------------------
/src/urllib3/fields.py:
--------------------------------------------------------------------------------
1 | import email.utils
2 | import mimetypes
3 |
4 | from .packages import six
5 |
6 |
7 | def guess_content_type(filename, default='application/octet-stream'):
8 | """
9 | Guess the "Content-Type" of a file.
10 |
11 | :param filename:
12 | The filename to guess the "Content-Type" of using :mod:`mimetypes`.
13 | :param default:
14 | If no "Content-Type" can be guessed, default to `default`.
15 | """
16 | if filename:
17 | return mimetypes.guess_type(filename)[0] or default
18 | return default
19 |
20 |
21 | def format_header_param(name, value):
22 | """
23 | Helper function to format and quote a single header parameter.
24 |
25 | Particularly useful for header parameters which might contain
26 | non-ASCII values, like file names. This follows RFC 2231, as
27 | suggested by RFC 2388 Section 4.4.
28 |
29 | :param name:
30 | The name of the parameter, a string expected to be ASCII only.
31 | :param value:
32 | The value of the parameter, provided as a unicode string.
33 | """
34 | if not any(ch in value for ch in '"\\\r\n'):
35 | result = '%s="%s"' % (name, value)
36 | try:
37 | result.encode('ascii')
38 | except UnicodeEncodeError:
39 | pass
40 | else:
41 | return result
42 | if not six.PY3: # Python 2:
43 | value = value.encode('utf-8')
44 | value = email.utils.encode_rfc2231(value, 'utf-8')
45 | value = '%s*=%s' % (name, value)
46 | return value
47 |
48 |
49 | class RequestField(object):
50 | """
51 | A data container for request body parameters.
52 |
53 | :param name:
54 | The name of this request field.
55 | :param data:
56 | The data/value body.
57 | :param filename:
58 | An optional filename of the request field.
59 | :param headers:
60 | An optional dict-like object of headers to initially use for the field.
61 | """
62 | def __init__(self, name, data, filename=None, headers=None):
63 | self._name = name
64 | self._filename = filename
65 | self.data = data
66 | self.headers = {}
67 | if headers:
68 | self.headers = dict(headers)
69 |
70 | @classmethod
71 | def from_tuples(cls, fieldname, value):
72 | """
73 | A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.
74 |
75 | Supports constructing :class:`~urllib3.fields.RequestField` from
76 | parameter of key/value strings AND key/filetuple. A filetuple is a
77 | (filename, data, MIME type) tuple where the MIME type is optional.
78 | For example::
79 |
80 | 'foo': 'bar',
81 | 'fakefile': ('foofile.txt', 'contents of foofile'),
82 | 'realfile': ('barfile.txt', open('realfile').read()),
83 | 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'),
84 | 'nonamefile': 'contents of nonamefile field',
85 |
86 | Field names and filenames must be unicode.
87 | """
88 | if isinstance(value, tuple):
89 | if len(value) == 3:
90 | filename, data, content_type = value
91 | else:
92 | filename, data = value
93 | content_type = guess_content_type(filename)
94 | else:
95 | filename = None
96 | content_type = None
97 | data = value
98 |
99 | request_param = cls(fieldname, data, filename=filename)
100 | request_param.make_multipart(content_type=content_type)
101 |
102 | return request_param
103 |
104 | def _render_part(self, name, value):
105 | """
106 | Overridable helper function to format a single header parameter.
107 |
108 | :param name:
109 | The name of the parameter, a string expected to be ASCII only.
110 | :param value:
111 | The value of the parameter, provided as a unicode string.
112 | """
113 | return format_header_param(name, value)
114 |
115 | def _render_parts(self, header_parts):
116 | """
117 | Helper function to format and quote a single header.
118 |
119 | Useful for single headers that are composed of multiple items. E.g.,
120 | 'Content-Disposition' fields.
121 |
122 | :param header_parts:
123 | A sequence of (k, v) typles or a :class:`dict` of (k, v) to format
124 | as `k1="v1"; k2="v2"; ...`.
125 | """
126 | parts = []
127 | iterable = header_parts
128 | if isinstance(header_parts, dict):
129 | iterable = header_parts.items()
130 |
131 | for name, value in iterable:
132 | if value:
133 | parts.append(self._render_part(name, value))
134 |
135 | return '; '.join(parts)
136 |
137 | def render_headers(self):
138 | """
139 | Renders the headers for this request field.
140 | """
141 | lines = []
142 |
143 | sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location']
144 | for sort_key in sort_keys:
145 | if self.headers.get(sort_key, False):
146 | lines.append('%s: %s' % (sort_key, self.headers[sort_key]))
147 |
148 | for header_name, header_value in self.headers.items():
149 | if header_name not in sort_keys:
150 | if header_value:
151 | lines.append('%s: %s' % (header_name, header_value))
152 |
153 | lines.append('\r\n')
154 | return '\r\n'.join(lines)
155 |
156 | def make_multipart(self, content_disposition=None, content_type=None,
157 | content_location=None):
158 | """
159 | Makes this request field into a multipart request field.
160 |
161 | This method overrides "Content-Disposition", "Content-Type" and
162 | "Content-Location" headers to the request parameter.
163 |
164 | :param content_type:
165 | The 'Content-Type' of the request body.
166 | :param content_location:
167 | The 'Content-Location' of the request body.
168 |
169 | """
170 | self.headers['Content-Disposition'] = content_disposition or 'form-data'
171 | self.headers['Content-Disposition'] += '; '.join([
172 | '', self._render_parts(
173 | (('name', self._name), ('filename', self._filename))
174 | )
175 | ])
176 | self.headers['Content-Type'] = content_type
177 | self.headers['Content-Location'] = content_location
178 |
--------------------------------------------------------------------------------
/src/urllib3/filepost.py:
--------------------------------------------------------------------------------
1 | import codecs
2 |
3 | from uuid import uuid4
4 | from io import BytesIO
5 |
6 | from .packages import six
7 | from .packages.six import b
8 | from .fields import RequestField
9 |
10 | writer = codecs.lookup('utf-8')[3]
11 |
12 |
13 | def choose_boundary():
14 | """
15 | Our embarassingly-simple replacement for mimetools.choose_boundary.
16 | """
17 | return uuid4().hex
18 |
19 |
20 | def iter_field_objects(fields):
21 | """
22 | Iterate over fields.
23 |
24 | Supports list of (k, v) tuples and dicts, and lists of
25 | :class:`~urllib3.fields.RequestField`.
26 |
27 | """
28 | if isinstance(fields, dict):
29 | i = six.iteritems(fields)
30 | else:
31 | i = iter(fields)
32 |
33 | for field in i:
34 | if isinstance(field, RequestField):
35 | yield field
36 | else:
37 | yield RequestField.from_tuples(*field)
38 |
39 |
40 | def iter_fields(fields):
41 | """
42 | .. deprecated:: 1.6
43 |
44 | Iterate over fields.
45 |
46 | The addition of :class:`~urllib3.fields.RequestField` makes this function
47 | obsolete. Instead, use :func:`iter_field_objects`, which returns
48 | :class:`~urllib3.fields.RequestField` objects.
49 |
50 | Supports list of (k, v) tuples and dicts.
51 | """
52 | if isinstance(fields, dict):
53 | return ((k, v) for k, v in six.iteritems(fields))
54 |
55 | return ((k, v) for k, v in fields)
56 |
57 |
58 | def encode_multipart_formdata(fields, boundary=None):
59 | """
60 | Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
61 |
62 | :param fields:
63 | Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).
64 |
65 | :param boundary:
66 | If not specified, then a random boundary will be generated using
67 | :func:`mimetools.choose_boundary`.
68 | """
69 | body = BytesIO()
70 | if boundary is None:
71 | boundary = choose_boundary()
72 |
73 | for field in iter_field_objects(fields):
74 | body.write(b('--%s\r\n' % (boundary)))
75 |
76 | writer(body).write(field.render_headers())
77 | data = field.data
78 |
79 | if isinstance(data, int):
80 | data = str(data) # Backwards compatibility
81 |
82 | if isinstance(data, six.text_type):
83 | writer(body).write(data)
84 | else:
85 | body.write(data)
86 |
87 | body.write(b'\r\n')
88 |
89 | body.write(b('--%s--\r\n' % (boundary)))
90 |
91 | content_type = str('multipart/form-data; boundary=%s' % boundary)
92 |
93 | return body.getvalue(), content_type
94 |
--------------------------------------------------------------------------------
/src/urllib3/packages/__init__.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | from . import ssl_match_hostname
4 |
5 |
--------------------------------------------------------------------------------
/src/urllib3/packages/ordered_dict.py:
--------------------------------------------------------------------------------
1 | # Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
2 | # Passes Python2.7's test suite and incorporates all the latest updates.
3 | # Copyright 2009 Raymond Hettinger, released under the MIT License.
4 | # http://code.activestate.com/recipes/576693/
5 | try:
6 | from thread import get_ident as _get_ident
7 | except ImportError:
8 | from dummy_thread import get_ident as _get_ident
9 |
10 | try:
11 | from _abcoll import KeysView, ValuesView, ItemsView
12 | except ImportError:
13 | pass
14 |
15 |
16 | class OrderedDict(dict):
17 | 'Dictionary that remembers insertion order'
18 | # An inherited dict maps keys to values.
19 | # The inherited dict provides __getitem__, __len__, __contains__, and get.
20 | # The remaining methods are order-aware.
21 | # Big-O running times for all methods are the same as for regular dictionaries.
22 |
23 | # The internal self.__map dictionary maps keys to links in a doubly linked list.
24 | # The circular doubly linked list starts and ends with a sentinel element.
25 | # The sentinel element never gets deleted (this simplifies the algorithm).
26 | # Each link is stored as a list of length three: [PREV, NEXT, KEY].
27 |
28 | def __init__(self, *args, **kwds):
29 | '''Initialize an ordered dictionary. Signature is the same as for
30 | regular dictionaries, but keyword arguments are not recommended
31 | because their insertion order is arbitrary.
32 |
33 | '''
34 | if len(args) > 1:
35 | raise TypeError('expected at most 1 arguments, got %d' % len(args))
36 | try:
37 | self.__root
38 | except AttributeError:
39 | self.__root = root = [] # sentinel node
40 | root[:] = [root, root, None]
41 | self.__map = {}
42 | self.__update(*args, **kwds)
43 |
44 | def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
45 | 'od.__setitem__(i, y) <==> od[i]=y'
46 | # Setting a new item creates a new link which goes at the end of the linked
47 | # list, and the inherited dictionary is updated with the new key/value pair.
48 | if key not in self:
49 | root = self.__root
50 | last = root[0]
51 | last[1] = root[0] = self.__map[key] = [last, root, key]
52 | dict_setitem(self, key, value)
53 |
54 | def __delitem__(self, key, dict_delitem=dict.__delitem__):
55 | 'od.__delitem__(y) <==> del od[y]'
56 | # Deleting an existing item uses self.__map to find the link which is
57 | # then removed by updating the links in the predecessor and successor nodes.
58 | dict_delitem(self, key)
59 | link_prev, link_next, key = self.__map.pop(key)
60 | link_prev[1] = link_next
61 | link_next[0] = link_prev
62 |
63 | def __iter__(self):
64 | 'od.__iter__() <==> iter(od)'
65 | root = self.__root
66 | curr = root[1]
67 | while curr is not root:
68 | yield curr[2]
69 | curr = curr[1]
70 |
71 | def __reversed__(self):
72 | 'od.__reversed__() <==> reversed(od)'
73 | root = self.__root
74 | curr = root[0]
75 | while curr is not root:
76 | yield curr[2]
77 | curr = curr[0]
78 |
79 | def clear(self):
80 | 'od.clear() -> None. Remove all items from od.'
81 | try:
82 | for node in self.__map.itervalues():
83 | del node[:]
84 | root = self.__root
85 | root[:] = [root, root, None]
86 | self.__map.clear()
87 | except AttributeError:
88 | pass
89 | dict.clear(self)
90 |
91 | def popitem(self, last=True):
92 | '''od.popitem() -> (k, v), return and remove a (key, value) pair.
93 | Pairs are returned in LIFO order if last is true or FIFO order if false.
94 |
95 | '''
96 | if not self:
97 | raise KeyError('dictionary is empty')
98 | root = self.__root
99 | if last:
100 | link = root[0]
101 | link_prev = link[0]
102 | link_prev[1] = root
103 | root[0] = link_prev
104 | else:
105 | link = root[1]
106 | link_next = link[1]
107 | root[1] = link_next
108 | link_next[0] = root
109 | key = link[2]
110 | del self.__map[key]
111 | value = dict.pop(self, key)
112 | return key, value
113 |
114 | # -- the following methods do not depend on the internal structure --
115 |
116 | def keys(self):
117 | 'od.keys() -> list of keys in od'
118 | return list(self)
119 |
120 | def values(self):
121 | 'od.values() -> list of values in od'
122 | return [self[key] for key in self]
123 |
124 | def items(self):
125 | 'od.items() -> list of (key, value) pairs in od'
126 | return [(key, self[key]) for key in self]
127 |
128 | def iterkeys(self):
129 | 'od.iterkeys() -> an iterator over the keys in od'
130 | return iter(self)
131 |
132 | def itervalues(self):
133 | 'od.itervalues -> an iterator over the values in od'
134 | for k in self:
135 | yield self[k]
136 |
137 | def iteritems(self):
138 | 'od.iteritems -> an iterator over the (key, value) items in od'
139 | for k in self:
140 | yield (k, self[k])
141 |
142 | def update(*args, **kwds):
143 | '''od.update(E, **F) -> None. Update od from dict/iterable E and F.
144 |
145 | If E is a dict instance, does: for k in E: od[k] = E[k]
146 | If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
147 | Or if E is an iterable of items, does: for k, v in E: od[k] = v
148 | In either case, this is followed by: for k, v in F.items(): od[k] = v
149 |
150 | '''
151 | if len(args) > 2:
152 | raise TypeError('update() takes at most 2 positional '
153 | 'arguments (%d given)' % (len(args),))
154 | elif not args:
155 | raise TypeError('update() takes at least 1 argument (0 given)')
156 | self = args[0]
157 | # Make progressively weaker assumptions about "other"
158 | other = ()
159 | if len(args) == 2:
160 | other = args[1]
161 | if isinstance(other, dict):
162 | for key in other:
163 | self[key] = other[key]
164 | elif hasattr(other, 'keys'):
165 | for key in other.keys():
166 | self[key] = other[key]
167 | else:
168 | for key, value in other:
169 | self[key] = value
170 | for key, value in kwds.items():
171 | self[key] = value
172 |
173 | __update = update # let subclasses override update without breaking __init__
174 |
175 | __marker = object()
176 |
177 | def pop(self, key, default=__marker):
178 | '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
179 | If key is not found, d is returned if given, otherwise KeyError is raised.
180 |
181 | '''
182 | if key in self:
183 | result = self[key]
184 | del self[key]
185 | return result
186 | if default is self.__marker:
187 | raise KeyError(key)
188 | return default
189 |
190 | def setdefault(self, key, default=None):
191 | 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
192 | if key in self:
193 | return self[key]
194 | self[key] = default
195 | return default
196 |
197 | def __repr__(self, _repr_running={}):
198 | 'od.__repr__() <==> repr(od)'
199 | call_key = id(self), _get_ident()
200 | if call_key in _repr_running:
201 | return '...'
202 | _repr_running[call_key] = 1
203 | try:
204 | if not self:
205 | return '%s()' % (self.__class__.__name__,)
206 | return '%s(%r)' % (self.__class__.__name__, self.items())
207 | finally:
208 | del _repr_running[call_key]
209 |
210 | def __reduce__(self):
211 | 'Return state information for pickling'
212 | items = [[k, self[k]] for k in self]
213 | inst_dict = vars(self).copy()
214 | for k in vars(OrderedDict()):
215 | inst_dict.pop(k, None)
216 | if inst_dict:
217 | return (self.__class__, (items,), inst_dict)
218 | return self.__class__, (items,)
219 |
220 | def copy(self):
221 | 'od.copy() -> a shallow copy of od'
222 | return self.__class__(self)
223 |
224 | @classmethod
225 | def fromkeys(cls, iterable, value=None):
226 | '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
227 | and values equal to v (which defaults to None).
228 |
229 | '''
230 | d = cls()
231 | for key in iterable:
232 | d[key] = value
233 | return d
234 |
235 | def __eq__(self, other):
236 | '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
237 | while comparison to a regular mapping is order-insensitive.
238 |
239 | '''
240 | if isinstance(other, OrderedDict):
241 | return len(self)==len(other) and self.items() == other.items()
242 | return dict.__eq__(self, other)
243 |
244 | def __ne__(self, other):
245 | return not self == other
246 |
247 | # -- the following methods are only used in Python 2.7 --
248 |
249 | def viewkeys(self):
250 | "od.viewkeys() -> a set-like object providing a view on od's keys"
251 | return KeysView(self)
252 |
253 | def viewvalues(self):
254 | "od.viewvalues() -> an object providing a view on od's values"
255 | return ValuesView(self)
256 |
257 | def viewitems(self):
258 | "od.viewitems() -> a set-like object providing a view on od's items"
259 | return ItemsView(self)
260 |
--------------------------------------------------------------------------------
/src/urllib3/packages/six.py:
--------------------------------------------------------------------------------
1 | """Utilities for writing code that runs on Python 2 and 3"""
2 |
3 | #Copyright (c) 2010-2011 Benjamin Peterson
4 |
5 | #Permission is hereby granted, free of charge, to any person obtaining a copy of
6 | #this software and associated documentation files (the "Software"), to deal in
7 | #the Software without restriction, including without limitation the rights to
8 | #use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
9 | #the Software, and to permit persons to whom the Software is furnished to do so,
10 | #subject to the following conditions:
11 |
12 | #The above copyright notice and this permission notice shall be included in all
13 | #copies or substantial portions of the Software.
14 |
15 | #THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | #IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
17 | #FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
18 | #COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
19 | #IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
20 | #CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
21 |
22 | import operator
23 | import sys
24 | import types
25 |
26 | __author__ = "Benjamin Peterson "
27 | __version__ = "1.2.0" # Revision 41c74fef2ded
28 |
29 |
30 | # True if we are running on Python 3.
31 | PY3 = sys.version_info[0] == 3
32 |
33 | if PY3:
34 | string_types = str,
35 | integer_types = int,
36 | class_types = type,
37 | text_type = str
38 | binary_type = bytes
39 |
40 | MAXSIZE = sys.maxsize
41 | else:
42 | string_types = basestring,
43 | integer_types = (int, long)
44 | class_types = (type, types.ClassType)
45 | text_type = unicode
46 | binary_type = str
47 |
48 | if sys.platform.startswith("java"):
49 | # Jython always uses 32 bits.
50 | MAXSIZE = int((1 << 31) - 1)
51 | else:
52 | # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
53 | class X(object):
54 | def __len__(self):
55 | return 1 << 31
56 | try:
57 | len(X())
58 | except OverflowError:
59 | # 32-bit
60 | MAXSIZE = int((1 << 31) - 1)
61 | else:
62 | # 64-bit
63 | MAXSIZE = int((1 << 63) - 1)
64 | del X
65 |
66 |
67 | def _add_doc(func, doc):
68 | """Add documentation to a function."""
69 | func.__doc__ = doc
70 |
71 |
72 | def _import_module(name):
73 | """Import module, returning the module after the last dot."""
74 | __import__(name)
75 | return sys.modules[name]
76 |
77 |
78 | class _LazyDescr(object):
79 |
80 | def __init__(self, name):
81 | self.name = name
82 |
83 | def __get__(self, obj, tp):
84 | result = self._resolve()
85 | setattr(obj, self.name, result)
86 | # This is a bit ugly, but it avoids running this again.
87 | delattr(tp, self.name)
88 | return result
89 |
90 |
91 | class MovedModule(_LazyDescr):
92 |
93 | def __init__(self, name, old, new=None):
94 | super(MovedModule, self).__init__(name)
95 | if PY3:
96 | if new is None:
97 | new = name
98 | self.mod = new
99 | else:
100 | self.mod = old
101 |
102 | def _resolve(self):
103 | return _import_module(self.mod)
104 |
105 |
106 | class MovedAttribute(_LazyDescr):
107 |
108 | def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
109 | super(MovedAttribute, self).__init__(name)
110 | if PY3:
111 | if new_mod is None:
112 | new_mod = name
113 | self.mod = new_mod
114 | if new_attr is None:
115 | if old_attr is None:
116 | new_attr = name
117 | else:
118 | new_attr = old_attr
119 | self.attr = new_attr
120 | else:
121 | self.mod = old_mod
122 | if old_attr is None:
123 | old_attr = name
124 | self.attr = old_attr
125 |
126 | def _resolve(self):
127 | module = _import_module(self.mod)
128 | return getattr(module, self.attr)
129 |
130 |
131 |
132 | class _MovedItems(types.ModuleType):
133 | """Lazy loading of moved objects"""
134 |
135 |
136 | _moved_attributes = [
137 | MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
138 | MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
139 | MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
140 | MovedAttribute("map", "itertools", "builtins", "imap", "map"),
141 | MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
142 | MovedAttribute("reduce", "__builtin__", "functools"),
143 | MovedAttribute("StringIO", "StringIO", "io"),
144 | MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
145 | MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
146 |
147 | MovedModule("builtins", "__builtin__"),
148 | MovedModule("configparser", "ConfigParser"),
149 | MovedModule("copyreg", "copy_reg"),
150 | MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
151 | MovedModule("http_cookies", "Cookie", "http.cookies"),
152 | MovedModule("html_entities", "htmlentitydefs", "html.entities"),
153 | MovedModule("html_parser", "HTMLParser", "html.parser"),
154 | MovedModule("http_client", "httplib", "http.client"),
155 | MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
156 | MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
157 | MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
158 | MovedModule("cPickle", "cPickle", "pickle"),
159 | MovedModule("queue", "Queue"),
160 | MovedModule("reprlib", "repr"),
161 | MovedModule("socketserver", "SocketServer"),
162 | MovedModule("tkinter", "Tkinter"),
163 | MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
164 | MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
165 | MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
166 | MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
167 | MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
168 | MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
169 | MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
170 | MovedModule("tkinter_colorchooser", "tkColorChooser",
171 | "tkinter.colorchooser"),
172 | MovedModule("tkinter_commondialog", "tkCommonDialog",
173 | "tkinter.commondialog"),
174 | MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
175 | MovedModule("tkinter_font", "tkFont", "tkinter.font"),
176 | MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
177 | MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
178 | "tkinter.simpledialog"),
179 | MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
180 | MovedModule("winreg", "_winreg"),
181 | ]
182 | for attr in _moved_attributes:
183 | setattr(_MovedItems, attr.name, attr)
184 | del attr
185 |
186 | moves = sys.modules[__name__ + ".moves"] = _MovedItems("moves")
187 |
188 |
189 | def add_move(move):
190 | """Add an item to six.moves."""
191 | setattr(_MovedItems, move.name, move)
192 |
193 |
194 | def remove_move(name):
195 | """Remove item from six.moves."""
196 | try:
197 | delattr(_MovedItems, name)
198 | except AttributeError:
199 | try:
200 | del moves.__dict__[name]
201 | except KeyError:
202 | raise AttributeError("no such move, %r" % (name,))
203 |
204 |
205 | if PY3:
206 | _meth_func = "__func__"
207 | _meth_self = "__self__"
208 |
209 | _func_code = "__code__"
210 | _func_defaults = "__defaults__"
211 |
212 | _iterkeys = "keys"
213 | _itervalues = "values"
214 | _iteritems = "items"
215 | else:
216 | _meth_func = "im_func"
217 | _meth_self = "im_self"
218 |
219 | _func_code = "func_code"
220 | _func_defaults = "func_defaults"
221 |
222 | _iterkeys = "iterkeys"
223 | _itervalues = "itervalues"
224 | _iteritems = "iteritems"
225 |
226 |
227 | try:
228 | advance_iterator = next
229 | except NameError:
230 | def advance_iterator(it):
231 | return it.next()
232 | next = advance_iterator
233 |
234 |
235 | if PY3:
236 | def get_unbound_function(unbound):
237 | return unbound
238 |
239 | Iterator = object
240 |
241 | def callable(obj):
242 | return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
243 | else:
244 | def get_unbound_function(unbound):
245 | return unbound.im_func
246 |
247 | class Iterator(object):
248 |
249 | def next(self):
250 | return type(self).__next__(self)
251 |
252 | callable = callable
253 | _add_doc(get_unbound_function,
254 | """Get the function out of a possibly unbound function""")
255 |
256 |
257 | get_method_function = operator.attrgetter(_meth_func)
258 | get_method_self = operator.attrgetter(_meth_self)
259 | get_function_code = operator.attrgetter(_func_code)
260 | get_function_defaults = operator.attrgetter(_func_defaults)
261 |
262 |
263 | def iterkeys(d):
264 | """Return an iterator over the keys of a dictionary."""
265 | return iter(getattr(d, _iterkeys)())
266 |
267 | def itervalues(d):
268 | """Return an iterator over the values of a dictionary."""
269 | return iter(getattr(d, _itervalues)())
270 |
271 | def iteritems(d):
272 | """Return an iterator over the (key, value) pairs of a dictionary."""
273 | return iter(getattr(d, _iteritems)())
274 |
275 |
276 | if PY3:
277 | def b(s):
278 | return s.encode("latin-1")
279 | def u(s):
280 | return s
281 | if sys.version_info[1] <= 1:
282 | def int2byte(i):
283 | return bytes((i,))
284 | else:
285 | # This is about 2x faster than the implementation above on 3.2+
286 | int2byte = operator.methodcaller("to_bytes", 1, "big")
287 | import io
288 | StringIO = io.StringIO
289 | BytesIO = io.BytesIO
290 | else:
291 | def b(s):
292 | return s
293 | def u(s):
294 | return unicode(s, "unicode_escape")
295 | int2byte = chr
296 | import StringIO
297 | StringIO = BytesIO = StringIO.StringIO
298 | _add_doc(b, """Byte literal""")
299 | _add_doc(u, """Text literal""")
300 |
301 |
302 | if PY3:
303 | import builtins
304 | exec_ = getattr(builtins, "exec")
305 |
306 |
307 | def reraise(tp, value, tb=None):
308 | if value.__traceback__ is not tb:
309 | raise value.with_traceback(tb)
310 | raise value
311 |
312 |
313 | print_ = getattr(builtins, "print")
314 | del builtins
315 |
316 | else:
317 | def exec_(code, globs=None, locs=None):
318 | """Execute code in a namespace."""
319 | if globs is None:
320 | frame = sys._getframe(1)
321 | globs = frame.f_globals
322 | if locs is None:
323 | locs = frame.f_locals
324 | del frame
325 | elif locs is None:
326 | locs = globs
327 | exec("""exec code in globs, locs""")
328 |
329 |
330 | exec_("""def reraise(tp, value, tb=None):
331 | raise tp, value, tb
332 | """)
333 |
334 |
335 | def print_(*args, **kwargs):
336 | """The new-style print function."""
337 | fp = kwargs.pop("file", sys.stdout)
338 | if fp is None:
339 | return
340 | def write(data):
341 | if not isinstance(data, basestring):
342 | data = str(data)
343 | fp.write(data)
344 | want_unicode = False
345 | sep = kwargs.pop("sep", None)
346 | if sep is not None:
347 | if isinstance(sep, unicode):
348 | want_unicode = True
349 | elif not isinstance(sep, str):
350 | raise TypeError("sep must be None or a string")
351 | end = kwargs.pop("end", None)
352 | if end is not None:
353 | if isinstance(end, unicode):
354 | want_unicode = True
355 | elif not isinstance(end, str):
356 | raise TypeError("end must be None or a string")
357 | if kwargs:
358 | raise TypeError("invalid keyword arguments to print()")
359 | if not want_unicode:
360 | for arg in args:
361 | if isinstance(arg, unicode):
362 | want_unicode = True
363 | break
364 | if want_unicode:
365 | newline = unicode("\n")
366 | space = unicode(" ")
367 | else:
368 | newline = "\n"
369 | space = " "
370 | if sep is None:
371 | sep = space
372 | if end is None:
373 | end = newline
374 | for i, arg in enumerate(args):
375 | if i:
376 | write(sep)
377 | write(arg)
378 | write(end)
379 |
380 | _add_doc(reraise, """Reraise an exception.""")
381 |
382 |
383 | def with_metaclass(meta, base=object):
384 | """Create a base class with a metaclass."""
385 | return meta("NewBase", (base,), {})
386 |
--------------------------------------------------------------------------------
/src/urllib3/packages/ssl_match_hostname/__init__.py:
--------------------------------------------------------------------------------
1 | try:
2 | # Python 3.2+
3 | from ssl import CertificateError, match_hostname
4 | except ImportError:
5 | try:
6 | # Backport of the function from a pypi module
7 | from backports.ssl_match_hostname import CertificateError, match_hostname
8 | except ImportError:
9 | # Our vendored copy
10 | from ._implementation import CertificateError, match_hostname
11 |
12 | # Not needed, but documenting what we provide.
13 | __all__ = ('CertificateError', 'match_hostname')
14 |
--------------------------------------------------------------------------------
/src/urllib3/packages/ssl_match_hostname/_implementation.py:
--------------------------------------------------------------------------------
1 | """The match_hostname() function from Python 3.3.3, essential when using SSL."""
2 |
3 | # Note: This file is under the PSF license as the code comes from the python
4 | # stdlib. http://docs.python.org/3/license.html
5 |
6 | import re
7 |
8 | __version__ = '3.4.0.2'
9 |
10 | class CertificateError(ValueError):
11 | pass
12 |
13 |
14 | def _dnsname_match(dn, hostname, max_wildcards=1):
15 | """Matching according to RFC 6125, section 6.4.3
16 |
17 | http://tools.ietf.org/html/rfc6125#section-6.4.3
18 | """
19 | pats = []
20 | if not dn:
21 | return False
22 |
23 | # Ported from python3-syntax:
24 | # leftmost, *remainder = dn.split(r'.')
25 | parts = dn.split(r'.')
26 | leftmost = parts[0]
27 | remainder = parts[1:]
28 |
29 | wildcards = leftmost.count('*')
30 | if wildcards > max_wildcards:
31 | # Issue #17980: avoid denials of service by refusing more
32 | # than one wildcard per fragment. A survey of established
33 | # policy among SSL implementations showed it to be a
34 | # reasonable choice.
35 | raise CertificateError(
36 | "too many wildcards in certificate DNS name: " + repr(dn))
37 |
38 | # speed up common case w/o wildcards
39 | if not wildcards:
40 | return dn.lower() == hostname.lower()
41 |
42 | # RFC 6125, section 6.4.3, subitem 1.
43 | # The client SHOULD NOT attempt to match a presented identifier in which
44 | # the wildcard character comprises a label other than the left-most label.
45 | if leftmost == '*':
46 | # When '*' is a fragment by itself, it matches a non-empty dotless
47 | # fragment.
48 | pats.append('[^.]+')
49 | elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
50 | # RFC 6125, section 6.4.3, subitem 3.
51 | # The client SHOULD NOT attempt to match a presented identifier
52 | # where the wildcard character is embedded within an A-label or
53 | # U-label of an internationalized domain name.
54 | pats.append(re.escape(leftmost))
55 | else:
56 | # Otherwise, '*' matches any dotless string, e.g. www*
57 | pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
58 |
59 | # add the remaining fragments, ignore any wildcards
60 | for frag in remainder:
61 | pats.append(re.escape(frag))
62 |
63 | pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
64 | return pat.match(hostname)
65 |
66 |
67 | def match_hostname(cert, hostname):
68 | """Verify that *cert* (in decoded format as returned by
69 | SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
70 | rules are followed, but IP addresses are not accepted for *hostname*.
71 |
72 | CertificateError is raised on failure. On success, the function
73 | returns nothing.
74 | """
75 | if not cert:
76 | raise ValueError("empty or no certificate")
77 | dnsnames = []
78 | san = cert.get('subjectAltName', ())
79 | for key, value in san:
80 | if key == 'DNS':
81 | if _dnsname_match(value, hostname):
82 | return
83 | dnsnames.append(value)
84 | if not dnsnames:
85 | # The subject is only checked when there is no dNSName entry
86 | # in subjectAltName
87 | for sub in cert.get('subject', ()):
88 | for key, value in sub:
89 | # XXX according to RFC 2818, the most specific Common Name
90 | # must be used.
91 | if key == 'commonName':
92 | if _dnsname_match(value, hostname):
93 | return
94 | dnsnames.append(value)
95 | if len(dnsnames) > 1:
96 | raise CertificateError("hostname %r "
97 | "doesn't match either of %s"
98 | % (hostname, ', '.join(map(repr, dnsnames))))
99 | elif len(dnsnames) == 1:
100 | raise CertificateError("hostname %r "
101 | "doesn't match %r"
102 | % (hostname, dnsnames[0]))
103 | else:
104 | raise CertificateError("no appropriate commonName or "
105 | "subjectAltName fields were found")
106 |
--------------------------------------------------------------------------------
/src/urllib3/poolmanager.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | try: # Python 3
4 | from urllib.parse import urljoin
5 | except ImportError:
6 | from urlparse import urljoin
7 |
8 | from ._collections import RecentlyUsedContainer
9 | from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
10 | from .connectionpool import port_by_scheme
11 | from .exceptions import LocationValueError
12 | from .request import RequestMethods
13 | from .util.url import parse_url
14 | from .util.retry import Retry
15 |
16 |
17 | __all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url']
18 |
19 |
20 | pool_classes_by_scheme = {
21 | 'http': HTTPConnectionPool,
22 | 'https': HTTPSConnectionPool,
23 | }
24 |
25 | log = logging.getLogger(__name__)
26 |
27 | SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs',
28 | 'ssl_version')
29 |
30 |
31 | class PoolManager(RequestMethods):
32 | """
33 | Allows for arbitrary requests while transparently keeping track of
34 | necessary connection pools for you.
35 |
36 | :param num_pools:
37 | Number of connection pools to cache before discarding the least
38 | recently used pool.
39 |
40 | :param headers:
41 | Headers to include with all requests, unless other headers are given
42 | explicitly.
43 |
44 | :param \**connection_pool_kw:
45 | Additional parameters are used to create fresh
46 | :class:`urllib3.connectionpool.ConnectionPool` instances.
47 |
48 | Example::
49 |
50 | >>> manager = PoolManager(num_pools=2)
51 | >>> r = manager.request('GET', 'http://google.com/')
52 | >>> r = manager.request('GET', 'http://google.com/mail')
53 | >>> r = manager.request('GET', 'http://yahoo.com/')
54 | >>> len(manager.pools)
55 | 2
56 |
57 | """
58 |
59 | proxy = None
60 |
61 | def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
62 | RequestMethods.__init__(self, headers)
63 | self.connection_pool_kw = connection_pool_kw
64 | self.pools = RecentlyUsedContainer(num_pools,
65 | dispose_func=lambda p: p.close())
66 |
67 | def _new_pool(self, scheme, host, port):
68 | """
69 | Create a new :class:`ConnectionPool` based on host, port and scheme.
70 |
71 | This method is used to actually create the connection pools handed out
72 | by :meth:`connection_from_url` and companion methods. It is intended
73 | to be overridden for customization.
74 | """
75 | pool_cls = pool_classes_by_scheme[scheme]
76 | kwargs = self.connection_pool_kw
77 | if scheme == 'http':
78 | kwargs = self.connection_pool_kw.copy()
79 | for kw in SSL_KEYWORDS:
80 | kwargs.pop(kw, None)
81 |
82 | return pool_cls(host, port, **kwargs)
83 |
84 | def clear(self):
85 | """
86 | Empty our store of pools and direct them all to close.
87 |
88 | This will not affect in-flight connections, but they will not be
89 | re-used after completion.
90 | """
91 | self.pools.clear()
92 |
93 | def connection_from_host(self, host, port=None, scheme='http'):
94 | """
95 | Get a :class:`ConnectionPool` based on the host, port, and scheme.
96 |
97 | If ``port`` isn't given, it will be derived from the ``scheme`` using
98 | ``urllib3.connectionpool.port_by_scheme``.
99 | """
100 |
101 | if not host:
102 | raise LocationValueError("No host specified.")
103 |
104 | scheme = scheme or 'http'
105 | port = port or port_by_scheme.get(scheme, 80)
106 | pool_key = (scheme, host, port)
107 |
108 | with self.pools.lock:
109 | # If the scheme, host, or port doesn't match existing open
110 | # connections, open a new ConnectionPool.
111 | pool = self.pools.get(pool_key)
112 | if pool:
113 | return pool
114 |
115 | # Make a fresh ConnectionPool of the desired type
116 | pool = self._new_pool(scheme, host, port)
117 | self.pools[pool_key] = pool
118 |
119 | return pool
120 |
121 | def connection_from_url(self, url):
122 | """
123 | Similar to :func:`urllib3.connectionpool.connection_from_url` but
124 | doesn't pass any additional parameters to the
125 | :class:`urllib3.connectionpool.ConnectionPool` constructor.
126 |
127 | Additional parameters are taken from the :class:`.PoolManager`
128 | constructor.
129 | """
130 | u = parse_url(url)
131 | return self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
132 |
133 | def urlopen(self, method, url, redirect=True, **kw):
134 | """
135 | Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`
136 | with custom cross-host redirect logic and only sends the request-uri
137 | portion of the ``url``.
138 |
139 | The given ``url`` parameter must be absolute, such that an appropriate
140 | :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
141 | """
142 | u = parse_url(url)
143 | conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
144 |
145 | kw['assert_same_host'] = False
146 | kw['redirect'] = False
147 | if 'headers' not in kw:
148 | kw['headers'] = self.headers
149 |
150 | if self.proxy is not None and u.scheme == "http":
151 | response = conn.urlopen(method, url, **kw)
152 | else:
153 | response = conn.urlopen(method, u.request_uri, **kw)
154 |
155 | redirect_location = redirect and response.get_redirect_location()
156 | if not redirect_location:
157 | return response
158 |
159 | # Support relative URLs for redirecting.
160 | redirect_location = urljoin(url, redirect_location)
161 |
162 | # RFC 7231, Section 6.4.4
163 | if response.status == 303:
164 | method = 'GET'
165 |
166 | retries = kw.get('retries')
167 | if not isinstance(retries, Retry):
168 | retries = Retry.from_int(retries, redirect=redirect)
169 |
170 | kw['retries'] = retries.increment(method, redirect_location)
171 | kw['redirect'] = redirect
172 |
173 | log.info("Redirecting %s -> %s" % (url, redirect_location))
174 | return self.urlopen(method, redirect_location, **kw)
175 |
176 |
177 | class ProxyManager(PoolManager):
178 | """
179 | Behaves just like :class:`PoolManager`, but sends all requests through
180 | the defined proxy, using the CONNECT method for HTTPS URLs.
181 |
182 | :param proxy_url:
183 | The URL of the proxy to be used.
184 |
185 | :param proxy_headers:
186 | A dictionary contaning headers that will be sent to the proxy. In case
187 | of HTTP they are being sent with each request, while in the
188 | HTTPS/CONNECT case they are sent only once. Could be used for proxy
189 | authentication.
190 |
191 | Example:
192 | >>> proxy = urllib3.ProxyManager('http://localhost:3128/')
193 | >>> r1 = proxy.request('GET', 'http://google.com/')
194 | >>> r2 = proxy.request('GET', 'http://httpbin.org/')
195 | >>> len(proxy.pools)
196 | 1
197 | >>> r3 = proxy.request('GET', 'https://httpbin.org/')
198 | >>> r4 = proxy.request('GET', 'https://twitter.com/')
199 | >>> len(proxy.pools)
200 | 3
201 |
202 | """
203 |
204 | def __init__(self, proxy_url, num_pools=10, headers=None,
205 | proxy_headers=None, **connection_pool_kw):
206 |
207 | if isinstance(proxy_url, HTTPConnectionPool):
208 | proxy_url = '%s://%s:%i' % (proxy_url.scheme, proxy_url.host,
209 | proxy_url.port)
210 | proxy = parse_url(proxy_url)
211 | if not proxy.port:
212 | port = port_by_scheme.get(proxy.scheme, 80)
213 | proxy = proxy._replace(port=port)
214 |
215 | assert proxy.scheme in ("http", "https"), \
216 | 'Not supported proxy scheme %s' % proxy.scheme
217 |
218 | self.proxy = proxy
219 | self.proxy_headers = proxy_headers or {}
220 |
221 | connection_pool_kw['_proxy'] = self.proxy
222 | connection_pool_kw['_proxy_headers'] = self.proxy_headers
223 |
224 | super(ProxyManager, self).__init__(
225 | num_pools, headers, **connection_pool_kw)
226 |
227 | def connection_from_host(self, host, port=None, scheme='http'):
228 | if scheme == "https":
229 | return super(ProxyManager, self).connection_from_host(
230 | host, port, scheme)
231 |
232 | return super(ProxyManager, self).connection_from_host(
233 | self.proxy.host, self.proxy.port, self.proxy.scheme)
234 |
235 | def _set_proxy_headers(self, url, headers=None):
236 | """
237 | Sets headers needed by proxies: specifically, the Accept and Host
238 | headers. Only sets headers not provided by the user.
239 | """
240 | headers_ = {'Accept': '*/*'}
241 |
242 | netloc = parse_url(url).netloc
243 | if netloc:
244 | headers_['Host'] = netloc
245 |
246 | if headers:
247 | headers_.update(headers)
248 | return headers_
249 |
250 | def urlopen(self, method, url, redirect=True, **kw):
251 | "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
252 | u = parse_url(url)
253 |
254 | if u.scheme == "http":
255 | # For proxied HTTPS requests, httplib sets the necessary headers
256 | # on the CONNECT to the proxy. For HTTP, we'll definitely
257 | # need to set 'Host' at the very least.
258 | headers = kw.get('headers', self.headers)
259 | kw['headers'] = self._set_proxy_headers(url, headers)
260 |
261 | return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw)
262 |
263 |
264 | def proxy_from_url(url, **kw):
265 | return ProxyManager(proxy_url=url, **kw)
266 |
--------------------------------------------------------------------------------
/src/urllib3/request.py:
--------------------------------------------------------------------------------
1 | try:
2 | from urllib.parse import urlencode
3 | except ImportError:
4 | from urllib import urlencode
5 |
6 | from .filepost import encode_multipart_formdata
7 |
8 |
9 | __all__ = ['RequestMethods']
10 |
11 |
12 | class RequestMethods(object):
13 | """
14 | Convenience mixin for classes who implement a :meth:`urlopen` method, such
15 | as :class:`~urllib3.connectionpool.HTTPConnectionPool` and
16 | :class:`~urllib3.poolmanager.PoolManager`.
17 |
18 | Provides behavior for making common types of HTTP request methods and
19 | decides which type of request field encoding to use.
20 |
21 | Specifically,
22 |
23 | :meth:`.request_encode_url` is for sending requests whose fields are
24 | encoded in the URL (such as GET, HEAD, DELETE).
25 |
26 | :meth:`.request_encode_body` is for sending requests whose fields are
27 | encoded in the *body* of the request using multipart or www-form-urlencoded
28 | (such as for POST, PUT, PATCH).
29 |
30 | :meth:`.request` is for making any kind of request, it will look up the
31 | appropriate encoding format and use one of the above two methods to make
32 | the request.
33 |
34 | Initializer parameters:
35 |
36 | :param headers:
37 | Headers to include with all requests, unless other headers are given
38 | explicitly.
39 | """
40 |
41 | _encode_url_methods = set(['DELETE', 'GET', 'HEAD', 'OPTIONS'])
42 |
43 | def __init__(self, headers=None):
44 | self.headers = headers or {}
45 |
46 | def urlopen(self, method, url, body=None, headers=None,
47 | encode_multipart=True, multipart_boundary=None,
48 | **kw): # Abstract
49 | raise NotImplemented("Classes extending RequestMethods must implement "
50 | "their own ``urlopen`` method.")
51 |
52 | def request(self, method, url, fields=None, headers=None, **urlopen_kw):
53 | """
54 | Make a request using :meth:`urlopen` with the appropriate encoding of
55 | ``fields`` based on the ``method`` used.
56 |
57 | This is a convenience method that requires the least amount of manual
58 | effort. It can be used in most situations, while still having the
59 | option to drop down to more specific methods when necessary, such as
60 | :meth:`request_encode_url`, :meth:`request_encode_body`,
61 | or even the lowest level :meth:`urlopen`.
62 | """
63 | method = method.upper()
64 |
65 | if method in self._encode_url_methods:
66 | return self.request_encode_url(method, url, fields=fields,
67 | headers=headers,
68 | **urlopen_kw)
69 | else:
70 | return self.request_encode_body(method, url, fields=fields,
71 | headers=headers,
72 | **urlopen_kw)
73 |
74 | def request_encode_url(self, method, url, fields=None, **urlopen_kw):
75 | """
76 | Make a request using :meth:`urlopen` with the ``fields`` encoded in
77 | the url. This is useful for request methods like GET, HEAD, DELETE, etc.
78 | """
79 | if fields:
80 | url += '?' + urlencode(fields)
81 | return self.urlopen(method, url, **urlopen_kw)
82 |
83 | def request_encode_body(self, method, url, fields=None, headers=None,
84 | encode_multipart=True, multipart_boundary=None,
85 | **urlopen_kw):
86 | """
87 | Make a request using :meth:`urlopen` with the ``fields`` encoded in
88 | the body. This is useful for request methods like POST, PUT, PATCH, etc.
89 |
90 | When ``encode_multipart=True`` (default), then
91 | :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode
92 | the payload with the appropriate content type. Otherwise
93 | :meth:`urllib.urlencode` is used with the
94 | 'application/x-www-form-urlencoded' content type.
95 |
96 | Multipart encoding must be used when posting files, and it's reasonably
97 | safe to use it in other times too. However, it may break request
98 | signing, such as with OAuth.
99 |
100 | Supports an optional ``fields`` parameter of key/value strings AND
101 | key/filetuple. A filetuple is a (filename, data, MIME type) tuple where
102 | the MIME type is optional. For example::
103 |
104 | fields = {
105 | 'foo': 'bar',
106 | 'fakefile': ('foofile.txt', 'contents of foofile'),
107 | 'realfile': ('barfile.txt', open('realfile').read()),
108 | 'typedfile': ('bazfile.bin', open('bazfile').read(),
109 | 'image/jpeg'),
110 | 'nonamefile': 'contents of nonamefile field',
111 | }
112 |
113 | When uploading a file, providing a filename (the first parameter of the
114 | tuple) is optional but recommended to best mimick behavior of browsers.
115 |
116 | Note that if ``headers`` are supplied, the 'Content-Type' header will
117 | be overwritten because it depends on the dynamic random boundary string
118 | which is used to compose the body of the request. The random boundary
119 | string can be explicitly set with the ``multipart_boundary`` parameter.
120 | """
121 | if encode_multipart:
122 | body, content_type = encode_multipart_formdata(
123 | fields or {}, boundary=multipart_boundary)
124 | else:
125 | body, content_type = (urlencode(fields or {}),
126 | 'application/x-www-form-urlencoded')
127 |
128 | if headers is None:
129 | headers = self.headers
130 |
131 | headers_ = {'Content-Type': content_type}
132 | headers_.update(headers)
133 |
134 | return self.urlopen(method, url, body=body, headers=headers_,
135 | **urlopen_kw)
136 |
--------------------------------------------------------------------------------
/src/urllib3/response.py:
--------------------------------------------------------------------------------
1 | import zlib
2 | import io
3 | from socket import timeout as SocketTimeout
4 |
5 | from ._collections import HTTPHeaderDict
6 | from .exceptions import ProtocolError, DecodeError, ReadTimeoutError
7 | from .packages.six import string_types as basestring, binary_type
8 | from .connection import HTTPException, BaseSSLError
9 | from .util.response import is_fp_closed
10 |
11 |
12 |
13 | class DeflateDecoder(object):
14 |
15 | def __init__(self):
16 | self._first_try = True
17 | self._data = binary_type()
18 | self._obj = zlib.decompressobj()
19 |
20 | def __getattr__(self, name):
21 | return getattr(self._obj, name)
22 |
23 | def decompress(self, data):
24 | if not self._first_try:
25 | return self._obj.decompress(data)
26 |
27 | self._data += data
28 | try:
29 | return self._obj.decompress(data)
30 | except zlib.error:
31 | self._first_try = False
32 | self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
33 | try:
34 | return self.decompress(self._data)
35 | finally:
36 | self._data = None
37 |
38 |
39 | def _get_decoder(mode):
40 | if mode == 'gzip':
41 | return zlib.decompressobj(16 + zlib.MAX_WBITS)
42 |
43 | return DeflateDecoder()
44 |
45 |
46 | class HTTPResponse(io.IOBase):
47 | """
48 | HTTP Response container.
49 |
50 | Backwards-compatible to httplib's HTTPResponse but the response ``body`` is
51 | loaded and decoded on-demand when the ``data`` property is accessed.
52 |
53 | Extra parameters for behaviour not present in httplib.HTTPResponse:
54 |
55 | :param preload_content:
56 | If True, the response's body will be preloaded during construction.
57 |
58 | :param decode_content:
59 | If True, attempts to decode specific content-encoding's based on headers
60 | (like 'gzip' and 'deflate') will be skipped and raw data will be used
61 | instead.
62 |
63 | :param original_response:
64 | When this HTTPResponse wrapper is generated from an httplib.HTTPResponse
65 | object, it's convenient to include the original for debug purposes. It's
66 | otherwise unused.
67 | """
68 |
69 | CONTENT_DECODERS = ['gzip', 'deflate']
70 | REDIRECT_STATUSES = [301, 302, 303, 307, 308]
71 |
72 | def __init__(self, body='', headers=None, status=0, version=0, reason=None,
73 | strict=0, preload_content=True, decode_content=True,
74 | original_response=None, pool=None, connection=None):
75 |
76 | self.headers = HTTPHeaderDict()
77 | if headers:
78 | self.headers.update(headers)
79 | self.status = status
80 | self.version = version
81 | self.reason = reason
82 | self.strict = strict
83 | self.decode_content = decode_content
84 |
85 | self._decoder = None
86 | self._body = None
87 | self._fp = None
88 | self._original_response = original_response
89 | self._fp_bytes_read = 0
90 |
91 | if body and isinstance(body, (basestring, binary_type)):
92 | self._body = body
93 |
94 | self._pool = pool
95 | self._connection = connection
96 |
97 | if hasattr(body, 'read'):
98 | self._fp = body
99 |
100 | if preload_content and not self._body:
101 | self._body = self.read(decode_content=decode_content)
102 |
103 | def get_redirect_location(self):
104 | """
105 | Should we redirect and where to?
106 |
107 | :returns: Truthy redirect location string if we got a redirect status
108 | code and valid location. ``None`` if redirect status and no
109 | location. ``False`` if not a redirect status code.
110 | """
111 | if self.status in self.REDIRECT_STATUSES:
112 | return self.headers.get('location')
113 |
114 | return False
115 |
116 | def release_conn(self):
117 | if not self._pool or not self._connection:
118 | return
119 |
120 | self._pool._put_conn(self._connection)
121 | self._connection = None
122 |
123 | @property
124 | def data(self):
125 | # For backwords-compat with earlier urllib3 0.4 and earlier.
126 | if self._body:
127 | return self._body
128 |
129 | if self._fp:
130 | return self.read(cache_content=True)
131 |
132 | def tell(self):
133 | """
134 | Obtain the number of bytes pulled over the wire so far. May differ from
135 | the amount of content returned by :meth:``HTTPResponse.read`` if bytes
136 | are encoded on the wire (e.g, compressed).
137 | """
138 | return self._fp_bytes_read
139 |
140 | def read(self, amt=None, decode_content=None, cache_content=False):
141 | """
142 | Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
143 | parameters: ``decode_content`` and ``cache_content``.
144 |
145 | :param amt:
146 | How much of the content to read. If specified, caching is skipped
147 | because it doesn't make sense to cache partial content as the full
148 | response.
149 |
150 | :param decode_content:
151 | If True, will attempt to decode the body based on the
152 | 'content-encoding' header.
153 |
154 | :param cache_content:
155 | If True, will save the returned data such that the same result is
156 | returned despite of the state of the underlying file object. This
157 | is useful if you want the ``.data`` property to continue working
158 | after having ``.read()`` the file object. (Overridden if ``amt`` is
159 | set.)
160 | """
161 | # Note: content-encoding value should be case-insensitive, per RFC 7230
162 | # Section 3.2
163 | content_encoding = self.headers.get('content-encoding', '').lower()
164 | if self._decoder is None:
165 | if content_encoding in self.CONTENT_DECODERS:
166 | self._decoder = _get_decoder(content_encoding)
167 | if decode_content is None:
168 | decode_content = self.decode_content
169 |
170 | if self._fp is None:
171 | return
172 |
173 | flush_decoder = False
174 |
175 | try:
176 | try:
177 | if amt is None:
178 | # cStringIO doesn't like amt=None
179 | data = self._fp.read()
180 | flush_decoder = True
181 | else:
182 | cache_content = False
183 | data = self._fp.read(amt)
184 | if amt != 0 and not data: # Platform-specific: Buggy versions of Python.
185 | # Close the connection when no data is returned
186 | #
187 | # This is redundant to what httplib/http.client _should_
188 | # already do. However, versions of python released before
189 | # December 15, 2012 (http://bugs.python.org/issue16298) do
190 | # not properly close the connection in all cases. There is
191 | # no harm in redundantly calling close.
192 | self._fp.close()
193 | flush_decoder = True
194 |
195 | except SocketTimeout:
196 | # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
197 | # there is yet no clean way to get at it from this context.
198 | raise ReadTimeoutError(self._pool, None, 'Read timed out.')
199 |
200 | except BaseSSLError as e:
201 | # FIXME: Is there a better way to differentiate between SSLErrors?
202 | if not 'read operation timed out' in str(e): # Defensive:
203 | # This shouldn't happen but just in case we're missing an edge
204 | # case, let's avoid swallowing SSL errors.
205 | raise
206 |
207 | raise ReadTimeoutError(self._pool, None, 'Read timed out.')
208 |
209 | except HTTPException as e:
210 | # This includes IncompleteRead.
211 | raise ProtocolError('Connection broken: %r' % e, e)
212 |
213 | self._fp_bytes_read += len(data)
214 |
215 | try:
216 | if decode_content and self._decoder:
217 | data = self._decoder.decompress(data)
218 | except (IOError, zlib.error) as e:
219 | raise DecodeError(
220 | "Received response with content-encoding: %s, but "
221 | "failed to decode it." % content_encoding, e)
222 |
223 | if flush_decoder and decode_content and self._decoder:
224 | buf = self._decoder.decompress(binary_type())
225 | data += buf + self._decoder.flush()
226 |
227 | if cache_content:
228 | self._body = data
229 |
230 | return data
231 |
232 | finally:
233 | if self._original_response and self._original_response.isclosed():
234 | self.release_conn()
235 |
236 | def stream(self, amt=2**16, decode_content=None):
237 | """
238 | A generator wrapper for the read() method. A call will block until
239 | ``amt`` bytes have been read from the connection or until the
240 | connection is closed.
241 |
242 | :param amt:
243 | How much of the content to read. The generator will return up to
244 | much data per iteration, but may return less. This is particularly
245 | likely when using compressed data. However, the empty string will
246 | never be returned.
247 |
248 | :param decode_content:
249 | If True, will attempt to decode the body based on the
250 | 'content-encoding' header.
251 | """
252 | while not is_fp_closed(self._fp):
253 | data = self.read(amt=amt, decode_content=decode_content)
254 |
255 | if data:
256 | yield data
257 |
258 | @classmethod
259 | def from_httplib(ResponseCls, r, **response_kw):
260 | """
261 | Given an :class:`httplib.HTTPResponse` instance ``r``, return a
262 | corresponding :class:`urllib3.response.HTTPResponse` object.
263 |
264 | Remaining parameters are passed to the HTTPResponse constructor, along
265 | with ``original_response=r``.
266 | """
267 |
268 | headers = HTTPHeaderDict()
269 | for k, v in r.getheaders():
270 | headers.add(k, v)
271 |
272 | # HTTPResponse objects in Python 3 don't have a .strict attribute
273 | strict = getattr(r, 'strict', 0)
274 | return ResponseCls(body=r,
275 | headers=headers,
276 | status=r.status,
277 | version=r.version,
278 | reason=r.reason,
279 | strict=strict,
280 | original_response=r,
281 | **response_kw)
282 |
283 | # Backwards-compatibility methods for httplib.HTTPResponse
284 | def getheaders(self):
285 | return self.headers
286 |
287 | def getheader(self, name, default=None):
288 | return self.headers.get(name, default)
289 |
290 | # Overrides from io.IOBase
291 | def close(self):
292 | if not self.closed:
293 | self._fp.close()
294 |
295 | @property
296 | def closed(self):
297 | if self._fp is None:
298 | return True
299 | elif hasattr(self._fp, 'closed'):
300 | return self._fp.closed
301 | elif hasattr(self._fp, 'isclosed'): # Python 2
302 | return self._fp.isclosed()
303 | else:
304 | return True
305 |
306 | def fileno(self):
307 | if self._fp is None:
308 | raise IOError("HTTPResponse has no file to get a fileno from")
309 | elif hasattr(self._fp, "fileno"):
310 | return self._fp.fileno()
311 | else:
312 | raise IOError("The file-like object this HTTPResponse is wrapped "
313 | "around has no file descriptor")
314 |
315 | def flush(self):
316 | if self._fp is not None and hasattr(self._fp, 'flush'):
317 | return self._fp.flush()
318 |
319 | def readable(self):
320 | return True
321 |
--------------------------------------------------------------------------------
/src/urllib3/util/__init__.py:
--------------------------------------------------------------------------------
1 | # For backwards compatibility, provide imports that used to be here.
2 | from .connection import is_connection_dropped
3 | from .request import make_headers
4 | from .response import is_fp_closed
5 | from .ssl_ import (
6 | SSLContext,
7 | HAS_SNI,
8 | assert_fingerprint,
9 | resolve_cert_reqs,
10 | resolve_ssl_version,
11 | ssl_wrap_socket,
12 | )
13 | from .timeout import (
14 | current_time,
15 | Timeout,
16 | )
17 |
18 | from .retry import Retry
19 | from .url import (
20 | get_host,
21 | parse_url,
22 | split_first,
23 | Url,
24 | )
25 |
--------------------------------------------------------------------------------
/src/urllib3/util/connection.py:
--------------------------------------------------------------------------------
1 | import socket
2 | try:
3 | from select import poll, POLLIN
4 | except ImportError: # `poll` doesn't exist on OSX and other platforms
5 | poll = False
6 | try:
7 | from select import select
8 | except ImportError: # `select` doesn't exist on AppEngine.
9 | select = False
10 |
11 |
12 | def is_connection_dropped(conn): # Platform-specific
13 | """
14 | Returns True if the connection is dropped and should be closed.
15 |
16 | :param conn:
17 | :class:`httplib.HTTPConnection` object.
18 |
19 | Note: For platforms like AppEngine, this will always return ``False`` to
20 | let the platform handle connection recycling transparently for us.
21 | """
22 | sock = getattr(conn, 'sock', False)
23 | if sock is False: # Platform-specific: AppEngine
24 | return False
25 | if sock is None: # Connection already closed (such as by httplib).
26 | return True
27 |
28 | if not poll:
29 | if not select: # Platform-specific: AppEngine
30 | return False
31 |
32 | try:
33 | return select([sock], [], [], 0.0)[0]
34 | except socket.error:
35 | return True
36 |
37 | # This version is better on platforms that support it.
38 | p = poll()
39 | p.register(sock, POLLIN)
40 | for (fno, ev) in p.poll(0.0):
41 | if fno == sock.fileno():
42 | # Either data is buffered (bad), or the connection is dropped.
43 | return True
44 |
45 |
46 | # This function is copied from socket.py in the Python 2.7 standard
47 | # library test suite. Added to its signature is only `socket_options`.
48 | def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
49 | source_address=None, socket_options=None):
50 | """Connect to *address* and return the socket object.
51 |
52 | Convenience function. Connect to *address* (a 2-tuple ``(host,
53 | port)``) and return the socket object. Passing the optional
54 | *timeout* parameter will set the timeout on the socket instance
55 | before attempting to connect. If no *timeout* is supplied, the
56 | global default timeout setting returned by :func:`getdefaulttimeout`
57 | is used. If *source_address* is set it must be a tuple of (host, port)
58 | for the socket to bind as a source address before making the connection.
59 | An host of '' or port 0 tells the OS to use the default.
60 | """
61 |
62 | host, port = address
63 | err = None
64 | for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
65 | af, socktype, proto, canonname, sa = res
66 | sock = None
67 | try:
68 | sock = socket.socket(af, socktype, proto)
69 | if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
70 | sock.settimeout(timeout)
71 | if source_address:
72 | sock.bind(source_address)
73 | # If provided, set socket level options before connecting.
74 | # This is the only addition urllib3 makes to this function.
75 | _set_socket_options(sock, socket_options)
76 | sock.connect(sa)
77 | return sock
78 |
79 | except socket.error as _:
80 | err = _
81 | if sock is not None:
82 | sock.close()
83 |
84 | if err is not None:
85 | raise err
86 | else:
87 | raise socket.error("getaddrinfo returns an empty list")
88 |
89 |
90 | def _set_socket_options(sock, options):
91 | if options is None:
92 | return
93 |
94 | for opt in options:
95 | sock.setsockopt(*opt)
96 |
--------------------------------------------------------------------------------
/src/urllib3/util/request.py:
--------------------------------------------------------------------------------
1 | from base64 import b64encode
2 |
3 | from ..packages.six import b
4 |
5 | ACCEPT_ENCODING = 'gzip,deflate'
6 |
7 |
8 | def make_headers(keep_alive=None, accept_encoding=None, user_agent=None,
9 | basic_auth=None, proxy_basic_auth=None, disable_cache=None):
10 | """
11 | Shortcuts for generating request headers.
12 |
13 | :param keep_alive:
14 | If ``True``, adds 'connection: keep-alive' header.
15 |
16 | :param accept_encoding:
17 | Can be a boolean, list, or string.
18 | ``True`` translates to 'gzip,deflate'.
19 | List will get joined by comma.
20 | String will be used as provided.
21 |
22 | :param user_agent:
23 | String representing the user-agent you want, such as
24 | "python-urllib3/0.6"
25 |
26 | :param basic_auth:
27 | Colon-separated username:password string for 'authorization: basic ...'
28 | auth header.
29 |
30 | :param proxy_basic_auth:
31 | Colon-separated username:password string for 'proxy-authorization: basic ...'
32 | auth header.
33 |
34 | :param disable_cache:
35 | If ``True``, adds 'cache-control: no-cache' header.
36 |
37 | Example::
38 |
39 | >>> make_headers(keep_alive=True, user_agent="Batman/1.0")
40 | {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
41 | >>> make_headers(accept_encoding=True)
42 | {'accept-encoding': 'gzip,deflate'}
43 | """
44 | headers = {}
45 | if accept_encoding:
46 | if isinstance(accept_encoding, str):
47 | pass
48 | elif isinstance(accept_encoding, list):
49 | accept_encoding = ','.join(accept_encoding)
50 | else:
51 | accept_encoding = ACCEPT_ENCODING
52 | headers['accept-encoding'] = accept_encoding
53 |
54 | if user_agent:
55 | headers['user-agent'] = user_agent
56 |
57 | if keep_alive:
58 | headers['connection'] = 'keep-alive'
59 |
60 | if basic_auth:
61 | headers['authorization'] = 'Basic ' + \
62 | b64encode(b(basic_auth)).decode('utf-8')
63 |
64 | if proxy_basic_auth:
65 | headers['proxy-authorization'] = 'Basic ' + \
66 | b64encode(b(proxy_basic_auth)).decode('utf-8')
67 |
68 | if disable_cache:
69 | headers['cache-control'] = 'no-cache'
70 |
71 | return headers
72 |
--------------------------------------------------------------------------------
/src/urllib3/util/response.py:
--------------------------------------------------------------------------------
1 | def is_fp_closed(obj):
2 | """
3 | Checks whether a given file-like object is closed.
4 |
5 | :param obj:
6 | The file-like object to check.
7 | """
8 | if hasattr(obj, 'fp'):
9 | # Object is a container for another file-like object that gets released
10 | # on exhaustion (e.g. HTTPResponse)
11 | return obj.fp is None
12 |
13 | return obj.closed
14 |
--------------------------------------------------------------------------------
/src/urllib3/util/retry.py:
--------------------------------------------------------------------------------
1 | import time
2 | import logging
3 |
4 | from ..exceptions import (
5 | ProtocolError,
6 | ConnectTimeoutError,
7 | ReadTimeoutError,
8 | MaxRetryError,
9 | )
10 | from ..packages import six
11 |
12 |
13 | log = logging.getLogger(__name__)
14 |
15 |
16 | class Retry(object):
17 | """ Retry configuration.
18 |
19 | Each retry attempt will create a new Retry object with updated values, so
20 | they can be safely reused.
21 |
22 | Retries can be defined as a default for a pool::
23 |
24 | retries = Retry(connect=5, read=2, redirect=5)
25 | http = PoolManager(retries=retries)
26 | response = http.request('GET', 'http://example.com/')
27 |
28 | Or per-request (which overrides the default for the pool)::
29 |
30 | response = http.request('GET', 'http://example.com/', retries=Retry(10))
31 |
32 | Retries can be disabled by passing ``False``::
33 |
34 | response = http.request('GET', 'http://example.com/', retries=False)
35 |
36 | Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless
37 | retries are disabled, in which case the causing exception will be raised.
38 |
39 |
40 | :param int total:
41 | Total number of retries to allow. Takes precedence over other counts.
42 |
43 | Set to ``None`` to remove this constraint and fall back on other
44 | counts. It's a good idea to set this to some sensibly-high value to
45 | account for unexpected edge cases and avoid infinite retry loops.
46 |
47 | Set to ``0`` to fail on the first retry.
48 |
49 | Set to ``False`` to disable and imply ``raise_on_redirect=False``.
50 |
51 | :param int connect:
52 | How many connection-related errors to retry on.
53 |
54 | These are errors raised before the request is sent to the remote server,
55 | which we assume has not triggered the server to process the request.
56 |
57 | Set to ``0`` to fail on the first retry of this type.
58 |
59 | :param int read:
60 | How many times to retry on read errors.
61 |
62 | These errors are raised after the request was sent to the server, so the
63 | request may have side-effects.
64 |
65 | Set to ``0`` to fail on the first retry of this type.
66 |
67 | :param int redirect:
68 | How many redirects to perform. Limit this to avoid infinite redirect
69 | loops.
70 |
71 | A redirect is a HTTP response with a status code 301, 302, 303, 307 or
72 | 308.
73 |
74 | Set to ``0`` to fail on the first retry of this type.
75 |
76 | Set to ``False`` to disable and imply ``raise_on_redirect=False``.
77 |
78 | :param iterable method_whitelist:
79 | Set of uppercased HTTP method verbs that we should retry on.
80 |
81 | By default, we only retry on methods which are considered to be
82 | indempotent (multiple requests with the same parameters end with the
83 | same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`.
84 |
85 | :param iterable status_forcelist:
86 | A set of HTTP status codes that we should force a retry on.
87 |
88 | By default, this is disabled with ``None``.
89 |
90 | :param float backoff_factor:
91 | A backoff factor to apply between attempts. urllib3 will sleep for::
92 |
93 | {backoff factor} * (2 ^ ({number of total retries} - 1))
94 |
95 | seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep
96 | for [0.1s, 0.2s, 0.4s, ...] between retries. It will never be longer
97 | than :attr:`Retry.MAX_BACKOFF`.
98 |
99 | By default, backoff is disabled (set to 0).
100 |
101 | :param bool raise_on_redirect: Whether, if the number of redirects is
102 | exhausted, to raise a MaxRetryError, or to return a response with a
103 | response code in the 3xx range.
104 | """
105 |
106 | DEFAULT_METHOD_WHITELIST = frozenset([
107 | 'HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS', 'TRACE'])
108 |
109 | #: Maximum backoff time.
110 | BACKOFF_MAX = 120
111 |
112 | def __init__(self, total=10, connect=None, read=None, redirect=None,
113 | method_whitelist=DEFAULT_METHOD_WHITELIST, status_forcelist=None,
114 | backoff_factor=0, raise_on_redirect=True, _observed_errors=0):
115 |
116 | self.total = total
117 | self.connect = connect
118 | self.read = read
119 |
120 | if redirect is False or total is False:
121 | redirect = 0
122 | raise_on_redirect = False
123 |
124 | self.redirect = redirect
125 | self.status_forcelist = status_forcelist or set()
126 | self.method_whitelist = method_whitelist
127 | self.backoff_factor = backoff_factor
128 | self.raise_on_redirect = raise_on_redirect
129 | self._observed_errors = _observed_errors # TODO: use .history instead?
130 |
131 | def new(self, **kw):
132 | params = dict(
133 | total=self.total,
134 | connect=self.connect, read=self.read, redirect=self.redirect,
135 | method_whitelist=self.method_whitelist,
136 | status_forcelist=self.status_forcelist,
137 | backoff_factor=self.backoff_factor,
138 | raise_on_redirect=self.raise_on_redirect,
139 | _observed_errors=self._observed_errors,
140 | )
141 | params.update(kw)
142 | return type(self)(**params)
143 |
144 | @classmethod
145 | def from_int(cls, retries, redirect=True, default=None):
146 | """ Backwards-compatibility for the old retries format."""
147 | if retries is None:
148 | retries = default if default is not None else cls.DEFAULT
149 |
150 | if isinstance(retries, Retry):
151 | return retries
152 |
153 | redirect = bool(redirect) and None
154 | new_retries = cls(retries, redirect=redirect)
155 | log.debug("Converted retries value: %r -> %r" % (retries, new_retries))
156 | return new_retries
157 |
158 | def get_backoff_time(self):
159 | """ Formula for computing the current backoff
160 |
161 | :rtype: float
162 | """
163 | if self._observed_errors <= 1:
164 | return 0
165 |
166 | backoff_value = self.backoff_factor * (2 ** (self._observed_errors - 1))
167 | return min(self.BACKOFF_MAX, backoff_value)
168 |
169 | def sleep(self):
170 | """ Sleep between retry attempts using an exponential backoff.
171 |
172 | By default, the backoff factor is 0 and this method will return
173 | immediately.
174 | """
175 | backoff = self.get_backoff_time()
176 | if backoff <= 0:
177 | return
178 | time.sleep(backoff)
179 |
180 | def _is_connection_error(self, err):
181 | """ Errors when we're fairly sure that the server did not receive the
182 | request, so it should be safe to retry.
183 | """
184 | return isinstance(err, ConnectTimeoutError)
185 |
186 | def _is_read_error(self, err):
187 | """ Errors that occur after the request has been started, so we can't
188 | assume that the server did not process any of it.
189 | """
190 | return isinstance(err, (ReadTimeoutError, ProtocolError))
191 |
192 | def is_forced_retry(self, method, status_code):
193 | """ Is this method/response retryable? (Based on method/codes whitelists)
194 | """
195 | if self.method_whitelist and method.upper() not in self.method_whitelist:
196 | return False
197 |
198 | return self.status_forcelist and status_code in self.status_forcelist
199 |
200 | def is_exhausted(self):
201 | """ Are we out of retries?
202 | """
203 | retry_counts = (self.total, self.connect, self.read, self.redirect)
204 | retry_counts = list(filter(None, retry_counts))
205 | if not retry_counts:
206 | return False
207 |
208 | return min(retry_counts) < 0
209 |
210 | def increment(self, method=None, url=None, response=None, error=None, _pool=None, _stacktrace=None):
211 | """ Return a new Retry object with incremented retry counters.
212 |
213 | :param response: A response object, or None, if the server did not
214 | return a response.
215 | :type response: :class:`~urllib3.response.HTTPResponse`
216 | :param Exception error: An error encountered during the request, or
217 | None if the response was received successfully.
218 |
219 | :return: A new ``Retry`` object.
220 | """
221 | if self.total is False and error:
222 | # Disabled, indicate to re-raise the error.
223 | raise six.reraise(type(error), error, _stacktrace)
224 |
225 | total = self.total
226 | if total is not None:
227 | total -= 1
228 |
229 | _observed_errors = self._observed_errors
230 | connect = self.connect
231 | read = self.read
232 | redirect = self.redirect
233 |
234 | if error and self._is_connection_error(error):
235 | # Connect retry?
236 | if connect is False:
237 | raise six.reraise(type(error), error, _stacktrace)
238 | elif connect is not None:
239 | connect -= 1
240 | _observed_errors += 1
241 |
242 | elif error and self._is_read_error(error):
243 | # Read retry?
244 | if read is False:
245 | raise six.reraise(type(error), error, _stacktrace)
246 | elif read is not None:
247 | read -= 1
248 | _observed_errors += 1
249 |
250 | elif response and response.get_redirect_location():
251 | # Redirect retry?
252 | if redirect is not None:
253 | redirect -= 1
254 |
255 | else:
256 | # FIXME: Nothing changed, scenario doesn't make sense.
257 | _observed_errors += 1
258 |
259 | new_retry = self.new(
260 | total=total,
261 | connect=connect, read=read, redirect=redirect,
262 | _observed_errors=_observed_errors)
263 |
264 | if new_retry.is_exhausted():
265 | raise MaxRetryError(_pool, url, error)
266 |
267 | log.debug("Incremented Retry for (url='%s'): %r" % (url, new_retry))
268 |
269 | return new_retry
270 |
271 |
272 | def __repr__(self):
273 | return ('{cls.__name__}(total={self.total}, connect={self.connect}, '
274 | 'read={self.read}, redirect={self.redirect})').format(
275 | cls=type(self), self=self)
276 |
277 |
278 | # For backwards compatibility (equivalent to pre-v1.9):
279 | Retry.DEFAULT = Retry(3)
280 |
--------------------------------------------------------------------------------
/src/urllib3/util/ssl_.py:
--------------------------------------------------------------------------------
1 | from binascii import hexlify, unhexlify
2 | from hashlib import md5, sha1
3 |
4 | from ..exceptions import SSLError
5 |
6 |
7 | try: # Test for SSL features
8 | SSLContext = None
9 | HAS_SNI = False
10 |
11 | import ssl
12 | from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23
13 | from ssl import SSLContext # Modern SSL?
14 | from ssl import HAS_SNI # Has SNI?
15 | except ImportError:
16 | pass
17 |
18 |
19 | def assert_fingerprint(cert, fingerprint):
20 | """
21 | Checks if given fingerprint matches the supplied certificate.
22 |
23 | :param cert:
24 | Certificate as bytes object.
25 | :param fingerprint:
26 | Fingerprint as string of hexdigits, can be interspersed by colons.
27 | """
28 |
29 | # Maps the length of a digest to a possible hash function producing
30 | # this digest.
31 | hashfunc_map = {
32 | 16: md5,
33 | 20: sha1
34 | }
35 |
36 | fingerprint = fingerprint.replace(':', '').lower()
37 | digest_length, odd = divmod(len(fingerprint), 2)
38 |
39 | if odd or digest_length not in hashfunc_map:
40 | raise SSLError('Fingerprint is of invalid length.')
41 |
42 | # We need encode() here for py32; works on py2 and p33.
43 | fingerprint_bytes = unhexlify(fingerprint.encode())
44 |
45 | hashfunc = hashfunc_map[digest_length]
46 |
47 | cert_digest = hashfunc(cert).digest()
48 |
49 | if not cert_digest == fingerprint_bytes:
50 | raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".'
51 | .format(hexlify(fingerprint_bytes),
52 | hexlify(cert_digest)))
53 |
54 |
55 | def resolve_cert_reqs(candidate):
56 | """
57 | Resolves the argument to a numeric constant, which can be passed to
58 | the wrap_socket function/method from the ssl module.
59 | Defaults to :data:`ssl.CERT_NONE`.
60 | If given a string it is assumed to be the name of the constant in the
61 | :mod:`ssl` module or its abbrevation.
62 | (So you can specify `REQUIRED` instead of `CERT_REQUIRED`.
63 | If it's neither `None` nor a string we assume it is already the numeric
64 | constant which can directly be passed to wrap_socket.
65 | """
66 | if candidate is None:
67 | return CERT_NONE
68 |
69 | if isinstance(candidate, str):
70 | res = getattr(ssl, candidate, None)
71 | if res is None:
72 | res = getattr(ssl, 'CERT_' + candidate)
73 | return res
74 |
75 | return candidate
76 |
77 |
78 | def resolve_ssl_version(candidate):
79 | """
80 | like resolve_cert_reqs
81 | """
82 | if candidate is None:
83 | return PROTOCOL_SSLv23
84 |
85 | if isinstance(candidate, str):
86 | res = getattr(ssl, candidate, None)
87 | if res is None:
88 | res = getattr(ssl, 'PROTOCOL_' + candidate)
89 | return res
90 |
91 | return candidate
92 |
93 |
94 | if SSLContext is not None: # Python 3.2+
95 | def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
96 | ca_certs=None, server_hostname=None,
97 | ssl_version=None):
98 | """
99 | All arguments except `server_hostname` have the same meaning as for
100 | :func:`ssl.wrap_socket`
101 |
102 | :param server_hostname:
103 | Hostname of the expected certificate
104 | """
105 | context = SSLContext(ssl_version)
106 | context.verify_mode = cert_reqs
107 |
108 | # Disable TLS compression to migitate CRIME attack (issue #309)
109 | OP_NO_COMPRESSION = 0x20000
110 | context.options |= OP_NO_COMPRESSION
111 |
112 | if ca_certs:
113 | try:
114 | context.load_verify_locations(ca_certs)
115 | # Py32 raises IOError
116 | # Py33 raises FileNotFoundError
117 | except Exception as e: # Reraise as SSLError
118 | raise SSLError(e)
119 | if certfile:
120 | # FIXME: This block needs a test.
121 | context.load_cert_chain(certfile, keyfile)
122 | if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI
123 | return context.wrap_socket(sock, server_hostname=server_hostname)
124 | return context.wrap_socket(sock)
125 |
126 | else: # Python 3.1 and earlier
127 | def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
128 | ca_certs=None, server_hostname=None,
129 | ssl_version=None):
130 | return wrap_socket(sock, keyfile=keyfile, certfile=certfile,
131 | ca_certs=ca_certs, cert_reqs=cert_reqs,
132 | ssl_version=ssl_version)
133 |
--------------------------------------------------------------------------------
/src/urllib3/util/timeout.py:
--------------------------------------------------------------------------------
1 | # The default socket timeout, used by httplib to indicate that no timeout was
2 | # specified by the user
3 | from socket import _GLOBAL_DEFAULT_TIMEOUT
4 | import time
5 |
6 | from ..exceptions import TimeoutStateError
7 |
8 | # A sentinel value to indicate that no timeout was specified by the user in
9 | # urllib3
10 | _Default = object()
11 |
12 | def current_time():
13 | """
14 | Retrieve the current time. This function is mocked out in unit testing.
15 | """
16 | return time.time()
17 |
18 |
19 | class Timeout(object):
20 | """ Timeout configuration.
21 |
22 | Timeouts can be defined as a default for a pool::
23 |
24 | timeout = Timeout(connect=2.0, read=7.0)
25 | http = PoolManager(timeout=timeout)
26 | response = http.request('GET', 'http://example.com/')
27 |
28 | Or per-request (which overrides the default for the pool)::
29 |
30 | response = http.request('GET', 'http://example.com/', timeout=Timeout(10))
31 |
32 | Timeouts can be disabled by setting all the parameters to ``None``::
33 |
34 | no_timeout = Timeout(connect=None, read=None)
35 | response = http.request('GET', 'http://example.com/, timeout=no_timeout)
36 |
37 |
38 | :param total:
39 | This combines the connect and read timeouts into one; the read timeout
40 | will be set to the time leftover from the connect attempt. In the
41 | event that both a connect timeout and a total are specified, or a read
42 | timeout and a total are specified, the shorter timeout will be applied.
43 |
44 | Defaults to None.
45 |
46 | :type total: integer, float, or None
47 |
48 | :param connect:
49 | The maximum amount of time to wait for a connection attempt to a server
50 | to succeed. Omitting the parameter will default the connect timeout to
51 | the system default, probably `the global default timeout in socket.py
52 | `_.
53 | None will set an infinite timeout for connection attempts.
54 |
55 | :type connect: integer, float, or None
56 |
57 | :param read:
58 | The maximum amount of time to wait between consecutive
59 | read operations for a response from the server. Omitting
60 | the parameter will default the read timeout to the system
61 | default, probably `the global default timeout in socket.py
62 | `_.
63 | None will set an infinite timeout.
64 |
65 | :type read: integer, float, or None
66 |
67 | .. note::
68 |
69 | Many factors can affect the total amount of time for urllib3 to return
70 | an HTTP response.
71 |
72 | For example, Python's DNS resolver does not obey the timeout specified
73 | on the socket. Other factors that can affect total request time include
74 | high CPU load, high swap, the program running at a low priority level,
75 | or other behaviors.
76 |
77 | In addition, the read and total timeouts only measure the time between
78 | read operations on the socket connecting the client and the server,
79 | not the total amount of time for the request to return a complete
80 | response. For most requests, the timeout is raised because the server
81 | has not sent the first byte in the specified time. This is not always
82 | the case; if a server streams one byte every fifteen seconds, a timeout
83 | of 20 seconds will not trigger, even though the request will take
84 | several minutes to complete.
85 |
86 | If your goal is to cut off any request after a set amount of wall clock
87 | time, consider having a second "watcher" thread to cut off a slow
88 | request.
89 | """
90 |
91 | #: A sentinel object representing the default timeout value
92 | DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT
93 |
94 | def __init__(self, total=None, connect=_Default, read=_Default):
95 | self._connect = self._validate_timeout(connect, 'connect')
96 | self._read = self._validate_timeout(read, 'read')
97 | self.total = self._validate_timeout(total, 'total')
98 | self._start_connect = None
99 |
100 | def __str__(self):
101 | return '%s(connect=%r, read=%r, total=%r)' % (
102 | type(self).__name__, self._connect, self._read, self.total)
103 |
104 | @classmethod
105 | def _validate_timeout(cls, value, name):
106 | """ Check that a timeout attribute is valid.
107 |
108 | :param value: The timeout value to validate
109 | :param name: The name of the timeout attribute to validate. This is
110 | used to specify in error messages.
111 | :return: The validated and casted version of the given value.
112 | :raises ValueError: If the type is not an integer or a float, or if it
113 | is a numeric value less than zero.
114 | """
115 | if value is _Default:
116 | return cls.DEFAULT_TIMEOUT
117 |
118 | if value is None or value is cls.DEFAULT_TIMEOUT:
119 | return value
120 |
121 | try:
122 | float(value)
123 | except (TypeError, ValueError):
124 | raise ValueError("Timeout value %s was %s, but it must be an "
125 | "int or float." % (name, value))
126 |
127 | try:
128 | if value < 0:
129 | raise ValueError("Attempted to set %s timeout to %s, but the "
130 | "timeout cannot be set to a value less "
131 | "than 0." % (name, value))
132 | except TypeError: # Python 3
133 | raise ValueError("Timeout value %s was %s, but it must be an "
134 | "int or float." % (name, value))
135 |
136 | return value
137 |
138 | @classmethod
139 | def from_float(cls, timeout):
140 | """ Create a new Timeout from a legacy timeout value.
141 |
142 | The timeout value used by httplib.py sets the same timeout on the
143 | connect(), and recv() socket requests. This creates a :class:`Timeout`
144 | object that sets the individual timeouts to the ``timeout`` value
145 | passed to this function.
146 |
147 | :param timeout: The legacy timeout value.
148 | :type timeout: integer, float, sentinel default object, or None
149 | :return: Timeout object
150 | :rtype: :class:`Timeout`
151 | """
152 | return Timeout(read=timeout, connect=timeout)
153 |
154 | def clone(self):
155 | """ Create a copy of the timeout object
156 |
157 | Timeout properties are stored per-pool but each request needs a fresh
158 | Timeout object to ensure each one has its own start/stop configured.
159 |
160 | :return: a copy of the timeout object
161 | :rtype: :class:`Timeout`
162 | """
163 | # We can't use copy.deepcopy because that will also create a new object
164 | # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to
165 | # detect the user default.
166 | return Timeout(connect=self._connect, read=self._read,
167 | total=self.total)
168 |
169 | def start_connect(self):
170 | """ Start the timeout clock, used during a connect() attempt
171 |
172 | :raises urllib3.exceptions.TimeoutStateError: if you attempt
173 | to start a timer that has been started already.
174 | """
175 | if self._start_connect is not None:
176 | raise TimeoutStateError("Timeout timer has already been started.")
177 | self._start_connect = current_time()
178 | return self._start_connect
179 |
180 | def get_connect_duration(self):
181 | """ Gets the time elapsed since the call to :meth:`start_connect`.
182 |
183 | :return: Elapsed time.
184 | :rtype: float
185 | :raises urllib3.exceptions.TimeoutStateError: if you attempt
186 | to get duration for a timer that hasn't been started.
187 | """
188 | if self._start_connect is None:
189 | raise TimeoutStateError("Can't get connect duration for timer "
190 | "that has not started.")
191 | return current_time() - self._start_connect
192 |
193 | @property
194 | def connect_timeout(self):
195 | """ Get the value to use when setting a connection timeout.
196 |
197 | This will be a positive float or integer, the value None
198 | (never timeout), or the default system timeout.
199 |
200 | :return: Connect timeout.
201 | :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
202 | """
203 | if self.total is None:
204 | return self._connect
205 |
206 | if self._connect is None or self._connect is self.DEFAULT_TIMEOUT:
207 | return self.total
208 |
209 | return min(self._connect, self.total)
210 |
211 | @property
212 | def read_timeout(self):
213 | """ Get the value for the read timeout.
214 |
215 | This assumes some time has elapsed in the connection timeout and
216 | computes the read timeout appropriately.
217 |
218 | If self.total is set, the read timeout is dependent on the amount of
219 | time taken by the connect timeout. If the connection time has not been
220 | established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be
221 | raised.
222 |
223 | :return: Value to use for the read timeout.
224 | :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
225 | :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect`
226 | has not yet been called on this object.
227 | """
228 | if (self.total is not None and
229 | self.total is not self.DEFAULT_TIMEOUT and
230 | self._read is not None and
231 | self._read is not self.DEFAULT_TIMEOUT):
232 | # In case the connect timeout has not yet been established.
233 | if self._start_connect is None:
234 | return self._read
235 | return max(0, min(self.total - self.get_connect_duration(),
236 | self._read))
237 | elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT:
238 | return max(0, self.total - self.get_connect_duration())
239 | else:
240 | return self._read
241 |
--------------------------------------------------------------------------------
/src/urllib3/util/url.py:
--------------------------------------------------------------------------------
1 | from collections import namedtuple
2 |
3 | from ..exceptions import LocationParseError
4 |
5 |
6 | url_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment']
7 |
8 |
9 | class Url(namedtuple('Url', url_attrs)):
10 | """
11 | Datastructure for representing an HTTP URL. Used as a return value for
12 | :func:`parse_url`.
13 | """
14 | slots = ()
15 |
16 | def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None,
17 | query=None, fragment=None):
18 | return super(Url, cls).__new__(cls, scheme, auth, host, port, path,
19 | query, fragment)
20 |
21 | @property
22 | def hostname(self):
23 | """For backwards-compatibility with urlparse. We're nice like that."""
24 | return self.host
25 |
26 | @property
27 | def request_uri(self):
28 | """Absolute path including the query string."""
29 | uri = self.path or '/'
30 |
31 | if self.query is not None:
32 | uri += '?' + self.query
33 |
34 | return uri
35 |
36 | @property
37 | def netloc(self):
38 | """Network location including host and port"""
39 | if self.port:
40 | return '%s:%d' % (self.host, self.port)
41 | return self.host
42 |
43 |
44 | def split_first(s, delims):
45 | """
46 | Given a string and an iterable of delimiters, split on the first found
47 | delimiter. Return two split parts and the matched delimiter.
48 |
49 | If not found, then the first part is the full input string.
50 |
51 | Example::
52 |
53 | >>> split_first('foo/bar?baz', '?/=')
54 | ('foo', 'bar?baz', '/')
55 | >>> split_first('foo/bar?baz', '123')
56 | ('foo/bar?baz', '', None)
57 |
58 | Scales linearly with number of delims. Not ideal for large number of delims.
59 | """
60 | min_idx = None
61 | min_delim = None
62 | for d in delims:
63 | idx = s.find(d)
64 | if idx < 0:
65 | continue
66 |
67 | if min_idx is None or idx < min_idx:
68 | min_idx = idx
69 | min_delim = d
70 |
71 | if min_idx is None or min_idx < 0:
72 | return s, '', None
73 |
74 | return s[:min_idx], s[min_idx+1:], min_delim
75 |
76 |
77 | def parse_url(url):
78 | """
79 | Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
80 | performed to parse incomplete urls. Fields not provided will be None.
81 |
82 | Partly backwards-compatible with :mod:`urlparse`.
83 |
84 | Example::
85 |
86 | >>> parse_url('http://google.com/mail/')
87 | Url(scheme='http', host='google.com', port=None, path='/', ...)
88 | >>> parse_url('google.com:80')
89 | Url(scheme=None, host='google.com', port=80, path=None, ...)
90 | >>> parse_url('/foo?bar')
91 | Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)
92 | """
93 |
94 | # While this code has overlap with stdlib's urlparse, it is much
95 | # simplified for our needs and less annoying.
96 | # Additionally, this implementations does silly things to be optimal
97 | # on CPython.
98 |
99 | if not url:
100 | # Empty
101 | return Url()
102 |
103 | scheme = None
104 | auth = None
105 | host = None
106 | port = None
107 | path = None
108 | fragment = None
109 | query = None
110 |
111 | # Scheme
112 | if '://' in url:
113 | scheme, url = url.split('://', 1)
114 |
115 | # Find the earliest Authority Terminator
116 | # (http://tools.ietf.org/html/rfc3986#section-3.2)
117 | url, path_, delim = split_first(url, ['/', '?', '#'])
118 |
119 | if delim:
120 | # Reassemble the path
121 | path = delim + path_
122 |
123 | # Auth
124 | if '@' in url:
125 | # Last '@' denotes end of auth part
126 | auth, url = url.rsplit('@', 1)
127 |
128 | # IPv6
129 | if url and url[0] == '[':
130 | host, url = url.split(']', 1)
131 | host += ']'
132 |
133 | # Port
134 | if ':' in url:
135 | _host, port = url.split(':', 1)
136 |
137 | if not host:
138 | host = _host
139 |
140 | if port:
141 | # If given, ports must be integers.
142 | if not port.isdigit():
143 | raise LocationParseError(url)
144 | port = int(port)
145 | else:
146 | # Blank ports are cool, too. (rfc3986#section-3.2.3)
147 | port = None
148 |
149 | elif not host and url:
150 | host = url
151 |
152 | if not path:
153 | return Url(scheme, auth, host, port, path, query, fragment)
154 |
155 | # Fragment
156 | if '#' in path:
157 | path, fragment = path.split('#', 1)
158 |
159 | # Query
160 | if '?' in path:
161 | path, query = path.split('?', 1)
162 |
163 | return Url(scheme, auth, host, port, path, query, fragment)
164 |
165 |
166 | def get_host(url):
167 | """
168 | Deprecated. Use :func:`.parse_url` instead.
169 | """
170 | p = parse_url(url)
171 | return p.scheme or 'http', p.hostname, p.port
172 |
--------------------------------------------------------------------------------
/src/version:
--------------------------------------------------------------------------------
1 | v2.3
--------------------------------------------------------------------------------
/src/workflow/Notify.tgz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fniephaus/alfred-dropbox/635945c766f543f77e6310f47c9150fa061a8015/src/workflow/Notify.tgz
--------------------------------------------------------------------------------
/src/workflow/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # encoding: utf-8
3 | #
4 | # Copyright (c) 2014 Dean Jackson
5 | #
6 | # MIT Licence. See http://opensource.org/licenses/MIT
7 | #
8 | # Created on 2014-02-15
9 | #
10 |
11 | """A helper library for `Alfred `_ workflows."""
12 |
13 | import os
14 |
15 | # Workflow objects
16 | from .workflow import Workflow, manager
17 | from .workflow3 import Variables, Workflow3
18 |
19 | # Exceptions
20 | from .workflow import PasswordNotFound, KeychainError
21 |
22 | # Icons
23 | from .workflow import (
24 | ICON_ACCOUNT,
25 | ICON_BURN,
26 | ICON_CLOCK,
27 | ICON_COLOR,
28 | ICON_COLOUR,
29 | ICON_EJECT,
30 | ICON_ERROR,
31 | ICON_FAVORITE,
32 | ICON_FAVOURITE,
33 | ICON_GROUP,
34 | ICON_HELP,
35 | ICON_HOME,
36 | ICON_INFO,
37 | ICON_NETWORK,
38 | ICON_NOTE,
39 | ICON_SETTINGS,
40 | ICON_SWIRL,
41 | ICON_SWITCH,
42 | ICON_SYNC,
43 | ICON_TRASH,
44 | ICON_USER,
45 | ICON_WARNING,
46 | ICON_WEB,
47 | )
48 |
49 | # Filter matching rules
50 | from .workflow import (
51 | MATCH_ALL,
52 | MATCH_ALLCHARS,
53 | MATCH_ATOM,
54 | MATCH_CAPITALS,
55 | MATCH_INITIALS,
56 | MATCH_INITIALS_CONTAIN,
57 | MATCH_INITIALS_STARTSWITH,
58 | MATCH_STARTSWITH,
59 | MATCH_SUBSTRING,
60 | )
61 |
62 |
63 | __title__ = 'Alfred-Workflow'
64 | __version__ = open(os.path.join(os.path.dirname(__file__), 'version')).read()
65 | __author__ = 'Dean Jackson'
66 | __licence__ = 'MIT'
67 | __copyright__ = 'Copyright 2014-2017 Dean Jackson'
68 |
69 | __all__ = [
70 | 'Variables',
71 | 'Workflow',
72 | 'Workflow3',
73 | 'manager',
74 | 'PasswordNotFound',
75 | 'KeychainError',
76 | 'ICON_ACCOUNT',
77 | 'ICON_BURN',
78 | 'ICON_CLOCK',
79 | 'ICON_COLOR',
80 | 'ICON_COLOUR',
81 | 'ICON_EJECT',
82 | 'ICON_ERROR',
83 | 'ICON_FAVORITE',
84 | 'ICON_FAVOURITE',
85 | 'ICON_GROUP',
86 | 'ICON_HELP',
87 | 'ICON_HOME',
88 | 'ICON_INFO',
89 | 'ICON_NETWORK',
90 | 'ICON_NOTE',
91 | 'ICON_SETTINGS',
92 | 'ICON_SWIRL',
93 | 'ICON_SWITCH',
94 | 'ICON_SYNC',
95 | 'ICON_TRASH',
96 | 'ICON_USER',
97 | 'ICON_WARNING',
98 | 'ICON_WEB',
99 | 'MATCH_ALL',
100 | 'MATCH_ALLCHARS',
101 | 'MATCH_ATOM',
102 | 'MATCH_CAPITALS',
103 | 'MATCH_INITIALS',
104 | 'MATCH_INITIALS_CONTAIN',
105 | 'MATCH_INITIALS_STARTSWITH',
106 | 'MATCH_STARTSWITH',
107 | 'MATCH_SUBSTRING',
108 | ]
109 |
--------------------------------------------------------------------------------
/src/workflow/background.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # encoding: utf-8
3 | #
4 | # Copyright (c) 2014 deanishe@deanishe.net
5 | #
6 | # MIT Licence. See http://opensource.org/licenses/MIT
7 | #
8 | # Created on 2014-04-06
9 | #
10 |
11 | """
12 | This module provides an API to run commands in background processes.
13 | Combine with the :ref:`caching API ` to work from cached data
14 | while you fetch fresh data in the background.
15 |
16 | See :ref:`the User Manual ` for more information
17 | and examples.
18 | """
19 |
20 | from __future__ import print_function, unicode_literals
21 |
22 | import sys
23 | import os
24 | import subprocess
25 | import pickle
26 |
27 | from workflow import Workflow
28 |
29 | __all__ = ['is_running', 'run_in_background']
30 |
31 | _wf = None
32 |
33 |
34 | def wf():
35 | global _wf
36 | if _wf is None:
37 | _wf = Workflow()
38 | return _wf
39 |
40 |
41 | def _log():
42 | return wf().logger
43 |
44 |
45 | def _arg_cache(name):
46 | """Return path to pickle cache file for arguments.
47 |
48 | :param name: name of task
49 | :type name: ``unicode``
50 | :returns: Path to cache file
51 | :rtype: ``unicode`` filepath
52 |
53 | """
54 | return wf().cachefile(name + '.argcache')
55 |
56 |
57 | def _pid_file(name):
58 | """Return path to PID file for ``name``.
59 |
60 | :param name: name of task
61 | :type name: ``unicode``
62 | :returns: Path to PID file for task
63 | :rtype: ``unicode`` filepath
64 |
65 | """
66 | return wf().cachefile(name + '.pid')
67 |
68 |
69 | def _process_exists(pid):
70 | """Check if a process with PID ``pid`` exists.
71 |
72 | :param pid: PID to check
73 | :type pid: ``int``
74 | :returns: ``True`` if process exists, else ``False``
75 | :rtype: ``Boolean``
76 |
77 | """
78 | try:
79 | os.kill(pid, 0)
80 | except OSError: # not running
81 | return False
82 | return True
83 |
84 |
85 | def is_running(name):
86 | """Test whether task ``name`` is currently running.
87 |
88 | :param name: name of task
89 | :type name: unicode
90 | :returns: ``True`` if task with name ``name`` is running, else ``False``
91 | :rtype: bool
92 |
93 | """
94 | pidfile = _pid_file(name)
95 | if not os.path.exists(pidfile):
96 | return False
97 |
98 | with open(pidfile, 'rb') as file_obj:
99 | pid = int(file_obj.read().strip())
100 |
101 | if _process_exists(pid):
102 | return True
103 |
104 | elif os.path.exists(pidfile):
105 | os.unlink(pidfile)
106 |
107 | return False
108 |
109 |
110 | def _background(stdin='/dev/null', stdout='/dev/null',
111 | stderr='/dev/null'): # pragma: no cover
112 | """Fork the current process into a background daemon.
113 |
114 | :param stdin: where to read input
115 | :type stdin: filepath
116 | :param stdout: where to write stdout output
117 | :type stdout: filepath
118 | :param stderr: where to write stderr output
119 | :type stderr: filepath
120 |
121 | """
122 | def _fork_and_exit_parent(errmsg):
123 | try:
124 | pid = os.fork()
125 | if pid > 0:
126 | os._exit(0)
127 | except OSError as err:
128 | _log().critical('%s: (%d) %s', errmsg, err.errno, err.strerror)
129 | raise err
130 |
131 | # Do first fork.
132 | _fork_and_exit_parent('fork #1 failed')
133 |
134 | # Decouple from parent environment.
135 | os.chdir(wf().workflowdir)
136 | os.setsid()
137 |
138 | # Do second fork.
139 | _fork_and_exit_parent('fork #2 failed')
140 |
141 | # Now I am a daemon!
142 | # Redirect standard file descriptors.
143 | si = open(stdin, 'r', 0)
144 | so = open(stdout, 'a+', 0)
145 | se = open(stderr, 'a+', 0)
146 | if hasattr(sys.stdin, 'fileno'):
147 | os.dup2(si.fileno(), sys.stdin.fileno())
148 | if hasattr(sys.stdout, 'fileno'):
149 | os.dup2(so.fileno(), sys.stdout.fileno())
150 | if hasattr(sys.stderr, 'fileno'):
151 | os.dup2(se.fileno(), sys.stderr.fileno())
152 |
153 |
154 | def run_in_background(name, args, **kwargs):
155 | r"""Cache arguments then call this script again via :func:`subprocess.call`.
156 |
157 | :param name: name of task
158 | :type name: unicode
159 | :param args: arguments passed as first argument to :func:`subprocess.call`
160 | :param \**kwargs: keyword arguments to :func:`subprocess.call`
161 | :returns: exit code of sub-process
162 | :rtype: int
163 |
164 | When you call this function, it caches its arguments and then calls
165 | ``background.py`` in a subprocess. The Python subprocess will load the
166 | cached arguments, fork into the background, and then run the command you
167 | specified.
168 |
169 | This function will return as soon as the ``background.py`` subprocess has
170 | forked, returning the exit code of *that* process (i.e. not of the command
171 | you're trying to run).
172 |
173 | If that process fails, an error will be written to the log file.
174 |
175 | If a process is already running under the same name, this function will
176 | return immediately and will not run the specified command.
177 |
178 | """
179 | if is_running(name):
180 | _log().info('[%s] job already running', name)
181 | return
182 |
183 | argcache = _arg_cache(name)
184 |
185 | # Cache arguments
186 | with open(argcache, 'wb') as file_obj:
187 | pickle.dump({'args': args, 'kwargs': kwargs}, file_obj)
188 | _log().debug('[%s] command cached: %s', name, argcache)
189 |
190 | # Call this script
191 | cmd = ['/usr/bin/python', __file__, name]
192 | _log().debug('[%s] passing job to background runner: %r', name, cmd)
193 | retcode = subprocess.call(cmd)
194 | if retcode: # pragma: no cover
195 | _log().error('[%s] background runner failed with %d', retcode)
196 | else:
197 | _log().debug('[%s] background job started', name)
198 | return retcode
199 |
200 |
201 | def main(wf): # pragma: no cover
202 | """Run command in a background process.
203 |
204 | Load cached arguments, fork into background, then call
205 | :meth:`subprocess.call` with cached arguments.
206 |
207 | """
208 | log = wf.logger
209 | name = wf.args[0]
210 | argcache = _arg_cache(name)
211 | if not os.path.exists(argcache):
212 | log.critical('[%s] command cache not found: %r', name, argcache)
213 | return 1
214 |
215 | # Load cached arguments
216 | with open(argcache, 'rb') as file_obj:
217 | data = pickle.load(file_obj)
218 |
219 | # Cached arguments
220 | args = data['args']
221 | kwargs = data['kwargs']
222 |
223 | # Delete argument cache file
224 | os.unlink(argcache)
225 |
226 | pidfile = _pid_file(name)
227 |
228 | # Fork to background
229 | _background()
230 |
231 | # Write PID to file
232 | with open(pidfile, 'wb') as file_obj:
233 | file_obj.write(str(os.getpid()))
234 |
235 | # Run the command
236 | try:
237 | log.debug('[%s] running command: %r', name, args)
238 |
239 | retcode = subprocess.call(args, **kwargs)
240 |
241 | if retcode:
242 | log.error('[%s] command failed with status %d', name, retcode)
243 |
244 | finally:
245 | if os.path.exists(pidfile):
246 | os.unlink(pidfile)
247 | log.debug('[%s] job complete', name)
248 |
249 |
250 | if __name__ == '__main__': # pragma: no cover
251 | wf().run(main)
252 |
--------------------------------------------------------------------------------
/src/workflow/notify.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # encoding: utf-8
3 | #
4 | # Copyright (c) 2015 deanishe@deanishe.net
5 | #
6 | # MIT Licence. See http://opensource.org/licenses/MIT
7 | #
8 | # Created on 2015-11-26
9 | #
10 |
11 | # TODO: Exclude this module from test and code coverage in py2.6
12 |
13 | """
14 | Post notifications via the macOS Notification Center. This feature
15 | is only available on Mountain Lion (10.8) and later. It will
16 | silently fail on older systems.
17 |
18 | The main API is a single function, :func:`~workflow.notify.notify`.
19 |
20 | It works by copying a simple application to your workflow's data
21 | directory. It replaces the application's icon with your workflow's
22 | icon and then calls the application to post notifications.
23 | """
24 |
25 | from __future__ import print_function, unicode_literals
26 |
27 | import os
28 | import plistlib
29 | import shutil
30 | import subprocess
31 | import sys
32 | import tarfile
33 | import tempfile
34 | import uuid
35 |
36 | import workflow
37 |
38 |
39 | _wf = None
40 | _log = None
41 |
42 |
43 | #: Available system sounds from System Preferences > Sound > Sound Effects
44 | SOUNDS = (
45 | 'Basso',
46 | 'Blow',
47 | 'Bottle',
48 | 'Frog',
49 | 'Funk',
50 | 'Glass',
51 | 'Hero',
52 | 'Morse',
53 | 'Ping',
54 | 'Pop',
55 | 'Purr',
56 | 'Sosumi',
57 | 'Submarine',
58 | 'Tink',
59 | )
60 |
61 |
62 | def wf():
63 | """Return Workflow object for this module.
64 |
65 | Returns:
66 | workflow.Workflow: Workflow object for current workflow.
67 | """
68 | global _wf
69 | if _wf is None:
70 | _wf = workflow.Workflow()
71 | return _wf
72 |
73 |
74 | def log():
75 | """Return logger for this module.
76 |
77 | Returns:
78 | logging.Logger: Logger for this module.
79 | """
80 | global _log
81 | if _log is None:
82 | _log = wf().logger
83 | return _log
84 |
85 |
86 | def notifier_program():
87 | """Return path to notifier applet executable.
88 |
89 | Returns:
90 | unicode: Path to Notify.app ``applet`` executable.
91 | """
92 | return wf().datafile('Notify.app/Contents/MacOS/applet')
93 |
94 |
95 | def notifier_icon_path():
96 | """Return path to icon file in installed Notify.app.
97 |
98 | Returns:
99 | unicode: Path to ``applet.icns`` within the app bundle.
100 | """
101 | return wf().datafile('Notify.app/Contents/Resources/applet.icns')
102 |
103 |
104 | def install_notifier():
105 | """Extract ``Notify.app`` from the workflow to data directory.
106 |
107 | Changes the bundle ID of the installed app and gives it the
108 | workflow's icon.
109 | """
110 | archive = os.path.join(os.path.dirname(__file__), 'Notify.tgz')
111 | destdir = wf().datadir
112 | app_path = os.path.join(destdir, 'Notify.app')
113 | n = notifier_program()
114 | log().debug('installing Notify.app to %r ...', destdir)
115 | # z = zipfile.ZipFile(archive, 'r')
116 | # z.extractall(destdir)
117 | tgz = tarfile.open(archive, 'r:gz')
118 | tgz.extractall(destdir)
119 | assert os.path.exists(n), \
120 | 'Notify.app could not be installed in %s' % destdir
121 |
122 | # Replace applet icon
123 | icon = notifier_icon_path()
124 | workflow_icon = wf().workflowfile('icon.png')
125 | if os.path.exists(icon):
126 | os.unlink(icon)
127 |
128 | png_to_icns(workflow_icon, icon)
129 |
130 | # Set file icon
131 | # PyObjC isn't available for 2.6, so this is 2.7 only. Actually,
132 | # none of this code will "work" on pre-10.8 systems. Let it run
133 | # until I figure out a better way of excluding this module
134 | # from coverage in py2.6.
135 | if sys.version_info >= (2, 7): # pragma: no cover
136 | from AppKit import NSWorkspace, NSImage
137 |
138 | ws = NSWorkspace.sharedWorkspace()
139 | img = NSImage.alloc().init()
140 | img.initWithContentsOfFile_(icon)
141 | ws.setIcon_forFile_options_(img, app_path, 0)
142 |
143 | # Change bundle ID of installed app
144 | ip_path = os.path.join(app_path, 'Contents/Info.plist')
145 | bundle_id = '{0}.{1}'.format(wf().bundleid, uuid.uuid4().hex)
146 | data = plistlib.readPlist(ip_path)
147 | log().debug('changing bundle ID to %r', bundle_id)
148 | data['CFBundleIdentifier'] = bundle_id
149 | plistlib.writePlist(data, ip_path)
150 |
151 |
152 | def validate_sound(sound):
153 | """Coerce ``sound`` to valid sound name.
154 |
155 | Returns ``None`` for invalid sounds. Sound names can be found
156 | in ``System Preferences > Sound > Sound Effects``.
157 |
158 | Args:
159 | sound (str): Name of system sound.
160 |
161 | Returns:
162 | str: Proper name of sound or ``None``.
163 | """
164 | if not sound:
165 | return None
166 |
167 | # Case-insensitive comparison of `sound`
168 | if sound.lower() in [s.lower() for s in SOUNDS]:
169 | # Title-case is correct for all system sounds as of macOS 10.11
170 | return sound.title()
171 | return None
172 |
173 |
174 | def notify(title='', text='', sound=None):
175 | """Post notification via Notify.app helper.
176 |
177 | Args:
178 | title (str, optional): Notification title.
179 | text (str, optional): Notification body text.
180 | sound (str, optional): Name of sound to play.
181 |
182 | Raises:
183 | ValueError: Raised if both ``title`` and ``text`` are empty.
184 |
185 | Returns:
186 | bool: ``True`` if notification was posted, else ``False``.
187 | """
188 | if title == text == '':
189 | raise ValueError('Empty notification')
190 |
191 | sound = validate_sound(sound) or ''
192 |
193 | n = notifier_program()
194 |
195 | if not os.path.exists(n):
196 | install_notifier()
197 |
198 | env = os.environ.copy()
199 | enc = 'utf-8'
200 | env['NOTIFY_TITLE'] = title.encode(enc)
201 | env['NOTIFY_MESSAGE'] = text.encode(enc)
202 | env['NOTIFY_SOUND'] = sound.encode(enc)
203 | cmd = [n]
204 | retcode = subprocess.call(cmd, env=env)
205 | if retcode == 0:
206 | return True
207 |
208 | log().error('Notify.app exited with status {0}.'.format(retcode))
209 | return False
210 |
211 |
212 | def convert_image(inpath, outpath, size):
213 | """Convert an image file using ``sips``.
214 |
215 | Args:
216 | inpath (str): Path of source file.
217 | outpath (str): Path to destination file.
218 | size (int): Width and height of destination image in pixels.
219 |
220 | Raises:
221 | RuntimeError: Raised if ``sips`` exits with non-zero status.
222 | """
223 | cmd = [
224 | b'sips',
225 | b'-z', str(size), str(size),
226 | inpath,
227 | b'--out', outpath]
228 | # log().debug(cmd)
229 | with open(os.devnull, 'w') as pipe:
230 | retcode = subprocess.call(cmd, stdout=pipe, stderr=subprocess.STDOUT)
231 |
232 | if retcode != 0:
233 | raise RuntimeError('sips exited with %d' % retcode)
234 |
235 |
236 | def png_to_icns(png_path, icns_path):
237 | """Convert PNG file to ICNS using ``iconutil``.
238 |
239 | Create an iconset from the source PNG file. Generate PNG files
240 | in each size required by macOS, then call ``iconutil`` to turn
241 | them into a single ICNS file.
242 |
243 | Args:
244 | png_path (str): Path to source PNG file.
245 | icns_path (str): Path to destination ICNS file.
246 |
247 | Raises:
248 | RuntimeError: Raised if ``iconutil`` or ``sips`` fail.
249 | """
250 | tempdir = tempfile.mkdtemp(prefix='aw-', dir=wf().datadir)
251 |
252 | try:
253 | iconset = os.path.join(tempdir, 'Icon.iconset')
254 |
255 | assert not os.path.exists(iconset), \
256 | 'iconset already exists: ' + iconset
257 | os.makedirs(iconset)
258 |
259 | # Copy source icon to icon set and generate all the other
260 | # sizes needed
261 | configs = []
262 | for i in (16, 32, 128, 256, 512):
263 | configs.append(('icon_{0}x{0}.png'.format(i), i))
264 | configs.append((('icon_{0}x{0}@2x.png'.format(i), i * 2)))
265 |
266 | shutil.copy(png_path, os.path.join(iconset, 'icon_256x256.png'))
267 | shutil.copy(png_path, os.path.join(iconset, 'icon_128x128@2x.png'))
268 |
269 | for name, size in configs:
270 | outpath = os.path.join(iconset, name)
271 | if os.path.exists(outpath):
272 | continue
273 | convert_image(png_path, outpath, size)
274 |
275 | cmd = [
276 | b'iconutil',
277 | b'-c', b'icns',
278 | b'-o', icns_path,
279 | iconset]
280 |
281 | retcode = subprocess.call(cmd)
282 | if retcode != 0:
283 | raise RuntimeError('iconset exited with %d' % retcode)
284 |
285 | assert os.path.exists(icns_path), \
286 | 'generated ICNS file not found: ' + repr(icns_path)
287 | finally:
288 | try:
289 | shutil.rmtree(tempdir)
290 | except OSError: # pragma: no cover
291 | pass
292 |
293 |
294 | if __name__ == '__main__': # pragma: nocover
295 | # Simple command-line script to test module with
296 | # This won't work on 2.6, as `argparse` isn't available
297 | # by default.
298 | import argparse
299 |
300 | from unicodedata import normalize
301 |
302 | def ustr(s):
303 | """Coerce `s` to normalised Unicode."""
304 | return normalize('NFD', s.decode('utf-8'))
305 |
306 | p = argparse.ArgumentParser()
307 | p.add_argument('-p', '--png', help="PNG image to convert to ICNS.")
308 | p.add_argument('-l', '--list-sounds', help="Show available sounds.",
309 | action='store_true')
310 | p.add_argument('-t', '--title',
311 | help="Notification title.", type=ustr,
312 | default='')
313 | p.add_argument('-s', '--sound', type=ustr,
314 | help="Optional notification sound.", default='')
315 | p.add_argument('text', type=ustr,
316 | help="Notification body text.", default='', nargs='?')
317 | o = p.parse_args()
318 |
319 | # List available sounds
320 | if o.list_sounds:
321 | for sound in SOUNDS:
322 | print(sound)
323 | sys.exit(0)
324 |
325 | # Convert PNG to ICNS
326 | if o.png:
327 | icns = os.path.join(
328 | os.path.dirname(o.png),
329 | os.path.splitext(os.path.basename(o.png))[0] + '.icns')
330 |
331 | print('converting {0!r} to {1!r} ...'.format(o.png, icns),
332 | file=sys.stderr)
333 |
334 | assert not os.path.exists(icns), \
335 | 'destination file already exists: ' + icns
336 |
337 | png_to_icns(o.png, icns)
338 | sys.exit(0)
339 |
340 | # Post notification
341 | if o.title == o.text == '':
342 | print('ERROR: empty notification.', file=sys.stderr)
343 | sys.exit(1)
344 | else:
345 | notify(o.title, o.text, o.sound)
346 |
--------------------------------------------------------------------------------
/src/workflow/update.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # encoding: utf-8
3 | #
4 | # Copyright (c) 2014 Fabio Niephaus ,
5 | # Dean Jackson
6 | #
7 | # MIT Licence. See http://opensource.org/licenses/MIT
8 | #
9 | # Created on 2014-08-16
10 | #
11 |
12 | """Self-updating from GitHub.
13 |
14 | .. versionadded:: 1.9
15 |
16 | .. note::
17 |
18 | This module is not intended to be used directly. Automatic updates
19 | are controlled by the ``update_settings`` :class:`dict` passed to
20 | :class:`~workflow.workflow.Workflow` objects.
21 |
22 | """
23 |
24 | from __future__ import print_function, unicode_literals
25 |
26 | import os
27 | import tempfile
28 | import re
29 | import subprocess
30 |
31 | import workflow
32 | import web
33 |
34 | # __all__ = []
35 |
36 |
37 | RELEASES_BASE = 'https://api.github.com/repos/{0}/releases'
38 |
39 |
40 | _wf = None
41 |
42 |
43 | def wf():
44 | """Lazy `Workflow` object."""
45 | global _wf
46 | if _wf is None:
47 | _wf = workflow.Workflow()
48 | return _wf
49 |
50 |
51 | class Version(object):
52 | """Mostly semantic versioning.
53 |
54 | The main difference to proper :ref:`semantic versioning `
55 | is that this implementation doesn't require a minor or patch version.
56 |
57 | Version strings may also be prefixed with "v", e.g.:
58 |
59 | >>> v = Version('v1.1.1')
60 | >>> v.tuple
61 | (1, 1, 1, '')
62 |
63 | >>> v = Version('2.0')
64 | >>> v.tuple
65 | (2, 0, 0, '')
66 |
67 | >>> Version('3.1-beta').tuple
68 | (3, 1, 0, 'beta')
69 |
70 | >>> Version('1.0.1') > Version('0.0.1')
71 | True
72 | """
73 |
74 | #: Match version and pre-release/build information in version strings
75 | match_version = re.compile(r'([0-9\.]+)(.+)?').match
76 |
77 | def __init__(self, vstr):
78 | """Create new `Version` object.
79 |
80 | Args:
81 | vstr (basestring): Semantic version string.
82 | """
83 | self.vstr = vstr
84 | self.major = 0
85 | self.minor = 0
86 | self.patch = 0
87 | self.suffix = ''
88 | self.build = ''
89 | self._parse(vstr)
90 |
91 | def _parse(self, vstr):
92 | if vstr.startswith('v'):
93 | m = self.match_version(vstr[1:])
94 | else:
95 | m = self.match_version(vstr)
96 | if not m:
97 | raise ValueError('invalid version number: {0}'.format(vstr))
98 |
99 | version, suffix = m.groups()
100 | parts = self._parse_dotted_string(version)
101 | self.major = parts.pop(0)
102 | if len(parts):
103 | self.minor = parts.pop(0)
104 | if len(parts):
105 | self.patch = parts.pop(0)
106 | if not len(parts) == 0:
107 | raise ValueError('invalid version (too long) : {0}'.format(vstr))
108 |
109 | if suffix:
110 | # Build info
111 | idx = suffix.find('+')
112 | if idx > -1:
113 | self.build = suffix[idx+1:]
114 | suffix = suffix[:idx]
115 | if suffix:
116 | if not suffix.startswith('-'):
117 | raise ValueError(
118 | 'suffix must start with - : {0}'.format(suffix))
119 | self.suffix = suffix[1:]
120 |
121 | # wf().logger.debug('version str `{}` -> {}'.format(vstr, repr(self)))
122 |
123 | def _parse_dotted_string(self, s):
124 | """Parse string ``s`` into list of ints and strings."""
125 | parsed = []
126 | parts = s.split('.')
127 | for p in parts:
128 | if p.isdigit():
129 | p = int(p)
130 | parsed.append(p)
131 | return parsed
132 |
133 | @property
134 | def tuple(self):
135 | """Version number as a tuple of major, minor, patch, pre-release."""
136 | return (self.major, self.minor, self.patch, self.suffix)
137 |
138 | def __lt__(self, other):
139 | """Implement comparison."""
140 | if not isinstance(other, Version):
141 | raise ValueError('not a Version instance: {0!r}'.format(other))
142 | t = self.tuple[:3]
143 | o = other.tuple[:3]
144 | if t < o:
145 | return True
146 | if t == o: # We need to compare suffixes
147 | if self.suffix and not other.suffix:
148 | return True
149 | if other.suffix and not self.suffix:
150 | return False
151 | return (self._parse_dotted_string(self.suffix) <
152 | self._parse_dotted_string(other.suffix))
153 | # t > o
154 | return False
155 |
156 | def __eq__(self, other):
157 | """Implement comparison."""
158 | if not isinstance(other, Version):
159 | raise ValueError('not a Version instance: {0!r}'.format(other))
160 | return self.tuple == other.tuple
161 |
162 | def __ne__(self, other):
163 | """Implement comparison."""
164 | return not self.__eq__(other)
165 |
166 | def __gt__(self, other):
167 | """Implement comparison."""
168 | if not isinstance(other, Version):
169 | raise ValueError('not a Version instance: {0!r}'.format(other))
170 | return other.__lt__(self)
171 |
172 | def __le__(self, other):
173 | """Implement comparison."""
174 | if not isinstance(other, Version):
175 | raise ValueError('not a Version instance: {0!r}'.format(other))
176 | return not other.__lt__(self)
177 |
178 | def __ge__(self, other):
179 | """Implement comparison."""
180 | return not self.__lt__(other)
181 |
182 | def __str__(self):
183 | """Return semantic version string."""
184 | vstr = '{0}.{1}.{2}'.format(self.major, self.minor, self.patch)
185 | if self.suffix:
186 | vstr = '{0}-{1}'.format(vstr, self.suffix)
187 | if self.build:
188 | vstr = '{0}+{1}'.format(vstr, self.build)
189 | return vstr
190 |
191 | def __repr__(self):
192 | """Return 'code' representation of `Version`."""
193 | return "Version('{0}')".format(str(self))
194 |
195 |
196 | def download_workflow(url):
197 | """Download workflow at ``url`` to a local temporary file.
198 |
199 | :param url: URL to .alfredworkflow file in GitHub repo
200 | :returns: path to downloaded file
201 |
202 | """
203 | filename = url.split('/')[-1]
204 |
205 | if (not filename.endswith('.alfredworkflow') and
206 | not filename.endswith('.alfred3workflow')):
207 | raise ValueError('attachment not a workflow: {0}'.format(filename))
208 |
209 | local_path = os.path.join(tempfile.gettempdir(), filename)
210 |
211 | wf().logger.debug(
212 | 'downloading updated workflow from `%s` to `%s` ...', url, local_path)
213 |
214 | response = web.get(url)
215 |
216 | with open(local_path, 'wb') as output:
217 | output.write(response.content)
218 |
219 | return local_path
220 |
221 |
222 | def build_api_url(slug):
223 | """Generate releases URL from GitHub slug.
224 |
225 | :param slug: Repo name in form ``username/repo``
226 | :returns: URL to the API endpoint for the repo's releases
227 |
228 | """
229 | if len(slug.split('/')) != 2:
230 | raise ValueError('invalid GitHub slug: {0}'.format(slug))
231 |
232 | return RELEASES_BASE.format(slug)
233 |
234 |
235 | def _validate_release(release):
236 | """Return release for running version of Alfred."""
237 | alf3 = wf().alfred_version.major == 3
238 |
239 | downloads = {'.alfredworkflow': [], '.alfred3workflow': []}
240 | dl_count = 0
241 | version = release['tag_name']
242 |
243 | for asset in release.get('assets', []):
244 | url = asset.get('browser_download_url')
245 | if not url: # pragma: nocover
246 | continue
247 |
248 | ext = os.path.splitext(url)[1].lower()
249 | if ext not in downloads:
250 | continue
251 |
252 | # Ignore Alfred 3-only files if Alfred 2 is running
253 | if ext == '.alfred3workflow' and not alf3:
254 | continue
255 |
256 | downloads[ext].append(url)
257 | dl_count += 1
258 |
259 | # download_urls.append(url)
260 |
261 | if dl_count == 0:
262 | wf().logger.warning(
263 | 'invalid release (no workflow file): %s', version)
264 | return None
265 |
266 | for k in downloads:
267 | if len(downloads[k]) > 1:
268 | wf().logger.warning(
269 | 'invalid release (multiple %s files): %s', k, version)
270 | return None
271 |
272 | # Prefer .alfred3workflow file if there is one and Alfred 3 is
273 | # running.
274 | if alf3 and len(downloads['.alfred3workflow']):
275 | download_url = downloads['.alfred3workflow'][0]
276 |
277 | else:
278 | download_url = downloads['.alfredworkflow'][0]
279 |
280 | wf().logger.debug('release %s: %s', version, download_url)
281 |
282 | return {
283 | 'version': version,
284 | 'download_url': download_url,
285 | 'prerelease': release['prerelease']
286 | }
287 |
288 |
289 | def get_valid_releases(github_slug, prereleases=False):
290 | """Return list of all valid releases.
291 |
292 | :param github_slug: ``username/repo`` for workflow's GitHub repo
293 | :param prereleases: Whether to include pre-releases.
294 | :returns: list of dicts. Each :class:`dict` has the form
295 | ``{'version': '1.1', 'download_url': 'http://github.com/...',
296 | 'prerelease': False }``
297 |
298 |
299 | A valid release is one that contains one ``.alfredworkflow`` file.
300 |
301 | If the GitHub version (i.e. tag) is of the form ``v1.1``, the leading
302 | ``v`` will be stripped.
303 |
304 | """
305 | api_url = build_api_url(github_slug)
306 | releases = []
307 |
308 | wf().logger.debug('retrieving releases list: %s', api_url)
309 |
310 | def retrieve_releases():
311 | wf().logger.info(
312 | 'retrieving releases: %s', github_slug)
313 | return web.get(api_url).json()
314 |
315 | slug = github_slug.replace('/', '-')
316 | for release in wf().cached_data('gh-releases-' + slug, retrieve_releases):
317 |
318 | release = _validate_release(release)
319 | if release is None:
320 | wf().logger.debug('invalid release: %r', release)
321 | continue
322 |
323 | elif release['prerelease'] and not prereleases:
324 | wf().logger.debug('ignoring prerelease: %s', release['version'])
325 | continue
326 |
327 | wf().logger.debug('release: %r', release)
328 |
329 | releases.append(release)
330 |
331 | return releases
332 |
333 |
334 | def check_update(github_slug, current_version, prereleases=False):
335 | """Check whether a newer release is available on GitHub.
336 |
337 | :param github_slug: ``username/repo`` for workflow's GitHub repo
338 | :param current_version: the currently installed version of the
339 | workflow. :ref:`Semantic versioning ` is required.
340 | :param prereleases: Whether to include pre-releases.
341 | :type current_version: ``unicode``
342 | :returns: ``True`` if an update is available, else ``False``
343 |
344 | If an update is available, its version number and download URL will
345 | be cached.
346 |
347 | """
348 | releases = get_valid_releases(github_slug, prereleases)
349 |
350 | if not len(releases):
351 | raise ValueError('no valid releases for %s', github_slug)
352 |
353 | wf().logger.info('%d releases for %s', len(releases), github_slug)
354 |
355 | # GitHub returns releases newest-first
356 | latest_release = releases[0]
357 |
358 | # (latest_version, download_url) = get_latest_release(releases)
359 | vr = Version(latest_release['version'])
360 | vl = Version(current_version)
361 | wf().logger.debug('latest=%r, installed=%r', vr, vl)
362 | if vr > vl:
363 |
364 | wf().cache_data('__workflow_update_status', {
365 | 'version': latest_release['version'],
366 | 'download_url': latest_release['download_url'],
367 | 'available': True
368 | })
369 |
370 | return True
371 |
372 | wf().cache_data('__workflow_update_status', {'available': False})
373 | return False
374 |
375 |
376 | def install_update():
377 | """If a newer release is available, download and install it.
378 |
379 | :returns: ``True`` if an update is installed, else ``False``
380 |
381 | """
382 | update_data = wf().cached_data('__workflow_update_status', max_age=0)
383 |
384 | if not update_data or not update_data.get('available'):
385 | wf().logger.info('no update available')
386 | return False
387 |
388 | local_file = download_workflow(update_data['download_url'])
389 |
390 | wf().logger.info('installing updated workflow ...')
391 | subprocess.call(['open', local_file])
392 |
393 | update_data['available'] = False
394 | wf().cache_data('__workflow_update_status', update_data)
395 | return True
396 |
397 |
398 | if __name__ == '__main__': # pragma: nocover
399 | import sys
400 |
401 | def show_help(status=0):
402 | """Print help message."""
403 | print('Usage : update.py (check|install) '
404 | '[--prereleases] ')
405 | sys.exit(status)
406 |
407 | argv = sys.argv[:]
408 | if '-h' in argv or '--help' in argv:
409 | show_help()
410 |
411 | prereleases = '--prereleases' in argv
412 |
413 | if prereleases:
414 | argv.remove('--prereleases')
415 |
416 | if len(argv) != 4:
417 | show_help(1)
418 |
419 | action, github_slug, version = argv[1:]
420 |
421 | if action == 'check':
422 | check_update(github_slug, version, prereleases)
423 | elif action == 'install':
424 | install_update()
425 | else:
426 | show_help(1)
427 |
--------------------------------------------------------------------------------
/src/workflow/version:
--------------------------------------------------------------------------------
1 | 1.27
--------------------------------------------------------------------------------