├── .gitignore ├── .travis.yml ├── AUTHORS.txt ├── CHANGES.rst ├── LICENSE.txt ├── MANIFEST.in ├── Makefile ├── README.rst ├── cassette ├── __init__.py ├── cassette_library.py ├── config.py ├── http_connection.py ├── http_response.py ├── mocked_response.py ├── patcher.py ├── player.py ├── tests │ ├── __init__.py │ ├── base.py │ ├── data │ │ ├── requests │ │ │ ├── httplib_GET_httpbin.org_80__unused.json │ │ │ ├── httplib_GET_httpbin.org_80__unused2.json │ │ │ └── httplib_GET_httpbin.org_80_get_01abfc750a0c942167651c40d088531d_c2585c6aafb8fa06dc8bb6de88f9de0b.json │ │ ├── responses.yaml │ │ └── responses_0.3.2.yaml │ ├── server │ │ ├── __init__.py │ │ ├── image.png │ │ └── run.py │ ├── test_cassette.py │ ├── test_cassette_library.py │ ├── test_cassette_performance.py │ ├── test_cassette_utils.py │ └── use_cases │ │ ├── __init__.py │ │ └── test_report_unused.py ├── unpatched.py └── utils.py ├── docs ├── Makefile ├── api.rst ├── changelog.rst ├── conf.py ├── development.rst ├── foreword.rst ├── index.rst ├── license.rst ├── make.bat ├── quickstart.rst └── usage.rst ├── requirements-dev.txt ├── requirements.txt ├── setup.cfg └── setup.py /.gitignore: -------------------------------------------------------------------------------- 1 | *.py[co] 2 | 3 | # Packages 4 | *.egg 5 | *.egg-info 6 | dist 7 | build 8 | eggs 9 | parts 10 | bin 11 | var 12 | sdist 13 | develop-eggs 14 | .installed.cfg 15 | 16 | # Installer logs 17 | pip-log.txt 18 | 19 | # Unit test / coverage reports 20 | .coverage 21 | .tox 22 | 23 | # Translations 24 | *.mo 25 | 26 | # Temporary cassette libraries 27 | *.temp.* 28 | 29 | # Doc 30 | docs/_build 31 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | python: 3 | - "2.7" 4 | install: 5 | - "pip install -r requirements.txt -r requirements-dev.txt --use-mirrors" 6 | script: "py.test cassette" 7 | -------------------------------------------------------------------------------- /AUTHORS.txt: -------------------------------------------------------------------------------- 1 | Cassette is written and maintained by Charles-Axel Dein and various 2 | contributors: 3 | 4 | Development Lead 5 | ```````````````` 6 | 7 | - Charles-Axel Dein 8 | 9 | Patches and Suggestions 10 | ``````````````````````` 11 | 12 | - Todd Wolfson 13 | - Zack Heller 14 | -------------------------------------------------------------------------------- /CHANGES.rst: -------------------------------------------------------------------------------- 1 | Changelog for Cassette 2 | ====================== 3 | 4 | 0.3.8 (2015-04-03) 5 | ------------------ 6 | 7 | - Add compatibility with Python 2.7.9 8 | 9 | 0.3.7 (2015-03-12) 10 | ------------------ 11 | 12 | - Add ability to report on unused cassette. 13 | 14 | 0.3.6 (2014-10-31) 15 | ------------------ 16 | 17 | - Fix NameError when using UL3CassetteHTTPConnection (thanks to @carolinevdh) 18 | - Fix HTTP Response to use cStringIO, adding Unicode support (thanks to 19 | @carolinevdh) 20 | 21 | 0.3.5 (2014-08-28) 22 | ------------------ 23 | 24 | - Fix error closing HTTPConnections directly 25 | 26 | 0.3.4 (2014-08-27) 27 | ------------------ 28 | 29 | - Improve backward compatibility with 0.3.2 30 | 31 | 0.3.3 (2014-08-27) 32 | ------------------ 33 | 34 | - Added support for `requests`. Note that libraries are not neccessarily 35 | cross compatible-requests cached with `urllib2` may not work with `requests` 36 | and vice versa. 37 | 38 | 0.3.2 (2014-06-26) 39 | ------------------ 40 | 41 | - Handle absent headers with httplib (thanks to @blampe) 42 | 43 | 0.3.1 (2014-06-04) 44 | ------------------ 45 | 46 | - Add the ability to read from a directory instead of from a single file 47 | (thanks to @anthonysutardja) 48 | 49 | 0.3 (2014-03-18) 50 | ---------------- 51 | 52 | - Respect request headers in cassette name. Requires regenerating cassette 53 | files. 54 | 55 | 0.2 (2013-05-14) 56 | ---------------- 57 | 58 | - Get rid of urlopen mocking, mock only at ``httplib`` level to circumvent 59 | the problem with urlopen raising exceptions when getting non-2XX codes 60 | - Clean up the docs, streamline their structure 61 | - **This is a backward incompatible release**, you'll need to delete your 62 | YAML file. 63 | 64 | 0.1.13 (2013-05-13) 65 | ------------------- 66 | 67 | - Fix binary file downloading (thanks to @twolfson) 68 | 69 | 0.1.12 (2013-04-26) 70 | ------------------- 71 | 72 | - Add performance tests (courtesy of @twolfson) 73 | - Cache the loaded file content to achieve significant performance improvements 74 | (thanks to @twolfson) 75 | 76 | 0.1.11 (2013-04-11) 77 | ------------------- 78 | 79 | - Lazily load YAML file 80 | 81 | 0.1.11 (2013-04-11) 82 | ------------------- 83 | 84 | - Started tracking changes 85 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2013 by Uber Technologies, Inc. and contributors. See 2 | AUTHORS.txt for more details. 3 | 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms of the software as well 7 | as documentation, with or without modification, are permitted provided 8 | that the following conditions are met: 9 | 10 | * Redistributions of source code must retain the above copyright 11 | notice, this list of conditions and the following disclaimer. 12 | 13 | * Redistributions in binary form must reproduce the above 14 | copyright notice, this list of conditions and the following 15 | disclaimer in the documentation and/or other materials provided 16 | with the distribution. 17 | 18 | * The names of the contributors may not be used to endorse or 19 | promote products derived from this software without specific 20 | prior written permission. 21 | 22 | THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND 23 | CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT 24 | NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 25 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER 26 | OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 27 | EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 28 | PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 29 | PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 30 | LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 31 | NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 32 | SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH 33 | DAMAGE. 34 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE.txt 2 | include README.rst 3 | include requirements.txt 4 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | all: bootstrap develop test 2 | 3 | bootstrap: 4 | pip install -r requirements.txt 5 | pip install -r requirements-dev.txt 6 | 7 | develop: 8 | python setup.py develop 9 | 10 | test: clean develop lint 11 | py.test 12 | 13 | lint: 14 | flake8 --ignore=E501,E702 . 15 | 16 | clean: clean-build 17 | find . -name '*.py[co]' -exec rm -f {} + 18 | 19 | clean-build: 20 | rm -fr build/ 21 | rm -fr dist/ 22 | rm -fr *.egg-info 23 | 24 | release: clean clean-build test docs 25 | prerelease && release 26 | git push --tags 27 | git push 28 | 29 | doc: clean develop 30 | $(MAKE) -C docs clean 31 | $(MAKE) -C docs html 32 | 33 | open_doc: doc 34 | open docs/_build/html/index.html 35 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | Cassette 2 | ======== 3 | 4 | **Deprecation Warning**: cassette has some known limitations and is not maintained anymore, we recommend using `vcrpy `_ instead. 5 | 6 | .. image:: https://img.shields.io/pypi/v/cassette.svg 7 | 8 | Cassette stores and replays HTTP requests made in your Python app. 9 | 10 | .. code:: python 11 | 12 | import urllib2 13 | 14 | import cassette 15 | 16 | with cassette.play("data/responses.yaml"): 17 | 18 | # If the request is not already stored in responses.yaml, cassette 19 | # will request the URL and store its response in the file. 20 | r = urllib2.urlopen("http://www.internic.net/domain/named.root") 21 | 22 | # This time, the request response must be in the file. The external 23 | # request is not made. cassette retrieves the response from the 24 | # file. 25 | r = urllib2.urlopen("http://www.internic.net/domain/named.root") 26 | 27 | assert "A.ROOT-SERVERS.NET" in r.read(10000) 28 | 29 | Cassette also supports the `requests `_ 30 | library. 31 | 32 | .. code:: python 33 | 34 | import requests 35 | 36 | with cassette.play("data/responses.yaml"): 37 | r = requests.get("http://www.internic.net/domain/named.root") 38 | 39 | Note that requests stored between different libraries may not be compatible with 40 | each other. That is, a request stored with ``urllib2`` might still trigger an external 41 | request is the same URL is requested with ``requests``. 42 | 43 | 44 | Installation 45 | ------------ 46 | 47 | .. code-block:: sh 48 | 49 | $ pip install cassette 50 | 51 | 52 | Documentation 53 | ------------- 54 | 55 | Latest documentation: `cassette.readthedocs.org `_ 56 | 57 | 58 | License 59 | ------- 60 | 61 | cassette is available under the MIT License. 62 | 63 | Copyright Uber 2013, Charles-Axel Dein 64 | -------------------------------------------------------------------------------- /cassette/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | import contextlib 3 | import logging 4 | 5 | from cassette.player import Player 6 | 7 | player = None 8 | logging.getLogger("cassette").addHandler(logging.NullHandler()) 9 | 10 | 11 | def insert(filename, file_format=''): 12 | """Setup cassette. 13 | 14 | :param filename: path to where requests and responses will be stored. 15 | """ 16 | global player 17 | 18 | player = Player(filename, file_format) 19 | player.__enter__() 20 | 21 | 22 | def eject(exc_type=None, exc_value=None, tb=None): 23 | """Remove cassette, unpatching HTTP requests.""" 24 | player.__exit__(exc_type, exc_value, tb) 25 | 26 | 27 | @contextlib.contextmanager 28 | def play(filename, file_format=''): 29 | """Use cassette.""" 30 | insert(filename, file_format=file_format) 31 | yield 32 | eject() 33 | -------------------------------------------------------------------------------- /cassette/cassette_library.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | import hashlib 3 | import logging 4 | import os 5 | import sys 6 | from urlparse import urlparse 7 | 8 | from cassette.config import Config 9 | from cassette.http_response import MockedHTTPResponse 10 | from cassette.utils import Encoder 11 | 12 | log = logging.getLogger("cassette") 13 | 14 | 15 | def _hash(content): 16 | m = hashlib.md5() 17 | m.update(content) 18 | return m.digest() 19 | 20 | 21 | class CassetteName(unicode): 22 | 23 | """ 24 | A CassetteName represents an unique way to retrieve the cassette 25 | from the library. 26 | """ 27 | 28 | @classmethod 29 | def from_httplib_connection(cls, host, port, method, url, body, 30 | headers, will_hash_body=False): 31 | """Create an object from an httplib request.""" 32 | 33 | if headers: 34 | if 'Host' in headers: 35 | # 'Host' is already covered explicitly below, remove from the 36 | # hash so hostname/post are easy to change and sed the file 37 | del headers['Host'] 38 | 39 | headers = hashlib.md5(repr(sorted(headers.items()))).hexdigest() 40 | 41 | if will_hash_body: 42 | if body: 43 | body = hashlib.md5(body).hexdigest() 44 | else: 45 | body = '' 46 | 47 | if url: 48 | # instantiated to 0.0.0.0 so we can parse 49 | parsed_url = urlparse('http://0.0.0.0' + url) 50 | url = parsed_url.path 51 | query = hashlib.md5(parsed_url.query + '#' + 52 | parsed_url.fragment).hexdigest() 53 | else: 54 | # requests/urllib3 defaults to '/' while urllib2 is ''. So this 55 | # value should be '/' to ensure compatability. 56 | url = '/' 57 | query = '' 58 | name = ("httplib:{method} {host}:{port}{url} {query} " 59 | "{headers} {body}").format(**locals()) 60 | else: 61 | # note that old yaml files will not contain the correct matching 62 | # query and body 63 | name = ("httplib:{method} {host}:{port}{url} " 64 | "{headers} {body}").format(**locals()) 65 | 66 | name = name.strip() 67 | return name 68 | 69 | 70 | class CassetteLibrary(object): 71 | """ 72 | The CassetteLibrary holds the stored requests and manage them. 73 | 74 | This is an abstract class that needs to have several methods implemented. 75 | In addition, subclasses must store request/response pairs as a keys and 76 | values as a dictionary in the property `self.data` 77 | 78 | :param str filename: filename to use when storing and replaying requests. 79 | :param Encoder encoder: the instantiated encodeder to use 80 | """ 81 | 82 | cache = {} 83 | 84 | def __init__(self, filename, encoder, config=None): 85 | self.filename = os.path.abspath(filename) 86 | self.is_dirty = False 87 | self.used = set() 88 | self.config = config or self.get_default_config() 89 | 90 | self.encoder = encoder 91 | 92 | def get_default_config(self): 93 | return Config() 94 | 95 | def add_response(self, cassette_name, response): 96 | """Add a new response to the mocked response. 97 | 98 | :param str cassette_name: 99 | :param response: 100 | """ 101 | 102 | if not cassette_name: 103 | raise TypeError("No cassette name provided.") 104 | 105 | mock_response_class = MockedHTTPResponse 106 | mocked = mock_response_class.from_response(response) 107 | self.data[cassette_name] = mocked 108 | 109 | # Mark the cassette changes as dirty for ejection 110 | self.is_dirty = True 111 | 112 | return mocked 113 | 114 | def save_to_cache(self, file_hash, data): 115 | """Save a decoded data object into cache.""" 116 | CassetteLibrary.cache[self.filename] = { 117 | 'hash': file_hash, 118 | 'data': data 119 | } 120 | 121 | def rewind(self): 122 | """Restore all responses to a re-seekable state.""" 123 | for k, v in self.data.items(): 124 | v.rewind() 125 | 126 | def _had_response(self): 127 | """Mark that the library already the response. 128 | 129 | This is for testing purposes (it's complicated to patch the unpatched 130 | version of urllib2/httplib). 131 | """ 132 | pass 133 | 134 | def cassette_name_for_httplib_connection(self, host, port, method, 135 | url, body, headers): 136 | """Create a cassette name from an httplib request.""" 137 | return CassetteName.from_httplib_connection( 138 | host, port, method, url, body, headers) 139 | 140 | def _log_contains(self, cassette_name, contains): 141 | """Logging for checking access to cassettes.""" 142 | if contains: 143 | self._had_response() # For testing purposes 144 | log.info('Library has %s', cassette_name) 145 | else: 146 | log.info('Library does not have %s', cassette_name) 147 | 148 | def log_cassette_used(self, path): 149 | """Log that a path was used.""" 150 | if self.config['log_cassette_used']: 151 | self.used.add(path) 152 | 153 | def report_unused_cassettes(self, output=sys.stdout): 154 | """Report unused path to a file.""" 155 | if not self.config['log_cassette_used']: 156 | raise ValueError('Need to activate log_cassette_used first.') 157 | available = set(self.get_all_available()) 158 | unused = available.difference(self.used) 159 | output.write('\n'.join(unused)) 160 | 161 | # Methods that need to be implemented by subclasses 162 | def write_to_file(self): 163 | """Write the response data to file.""" 164 | raise NotImplementedError('CassetteLibrary not implemented.') 165 | 166 | def __contains__(self, cassette_name): 167 | raise NotImplementedError('CassetteLibrary not implemented.') 168 | 169 | def __getitem__(self, cassette_name): 170 | raise NotImplementedError('CassetteLibrary not implemented.') 171 | 172 | @classmethod 173 | def create_new_cassette_library(cls, path, file_format, config=None): 174 | """Return an instantiated CassetteLibrary. 175 | 176 | Use this method to create new a CassetteLibrary. It will 177 | automatically determine if it should use a file or directory to 178 | back the cassette based on the filename. The method assumes that 179 | all file names with an extension (e.g. ``/file.json``) are files, 180 | and all file names without extensions are directories (e.g. 181 | ``/requests``). 182 | 183 | :param str path: filename of file or directory for storing requests 184 | :param str file_format: the file_format to use for storing requests 185 | :param dict config: configuration 186 | """ 187 | if not Encoder.is_supported_format(file_format): 188 | raise KeyError('%r is not a supported file_format.' % file_format) 189 | 190 | _, extension = os.path.splitext(path) 191 | if file_format: 192 | encoder = Encoder.get_encoder_from_file_format(file_format) 193 | else: 194 | encoder = Encoder.get_encoder_from_extension(extension) 195 | 196 | # Check if file has extension 197 | if extension: 198 | if os.path.isdir(path): 199 | raise IOError('Expected a file, but found a directory at %s' 200 | % path) 201 | klass = FileCassetteLibrary 202 | else: 203 | if os.path.isfile(path): 204 | raise IOError('Expected a directory, but found a file at %r' % 205 | path) 206 | klass = DirectoryCassetteLibrary 207 | 208 | return klass(path, encoder, config) 209 | 210 | 211 | class FileCassetteLibrary(CassetteLibrary): 212 | """Store and manage requests with a single file.""" 213 | 214 | @property 215 | def data(self): 216 | """Lazily loaded data.""" 217 | if not hasattr(self, "_data"): 218 | self._data = self.load_file() 219 | 220 | return self._data 221 | 222 | def write_to_file(self): 223 | """Write mocked responses to file.""" 224 | # Serialize the items via YAML 225 | data = {k: v.to_dict() for k, v in self.data.items()} 226 | encoded_str = self.encoder.dump(data) 227 | 228 | # Save the changes to file 229 | with open(self.filename, "w+") as f: 230 | f.write(encoded_str) 231 | 232 | # Update our hash 233 | self.save_to_cache(file_hash=_hash(encoded_str), data=self.data) 234 | 235 | self.is_dirty = False 236 | 237 | def load_file(self): 238 | """Load MockedResponses from YAML file.""" 239 | data = {} 240 | filename = self.filename 241 | 242 | if not os.path.exists(filename): 243 | log.info("File '{f}' does not exist.".format(f=filename)) 244 | return data 245 | 246 | # Open and read in the file 247 | with open(filename) as f: 248 | encoded_str = f.read() 249 | encoded_hash = _hash(encoded_str) 250 | 251 | # If the contents are cached, return them 252 | cached_result = CassetteLibrary.cache.get(filename, None) 253 | if cached_result and cached_result['hash'] == encoded_hash: 254 | return cached_result['data'] 255 | 256 | # Otherwise, parse the contents 257 | content = self.encoder.load(encoded_str) 258 | 259 | if content: 260 | for k, v in content.items(): 261 | mock_response_class = MockedHTTPResponse 262 | data[k] = mock_response_class.from_dict(v) 263 | 264 | # Cache the file for later 265 | self.save_to_cache(file_hash=encoded_hash, data=data) 266 | 267 | return data 268 | 269 | def __contains__(self, cassette_name): 270 | contains = cassette_name in self.data 271 | self._log_contains(cassette_name, contains) 272 | 273 | return contains 274 | 275 | def __getitem__(self, cassette_name): 276 | """Return the request from the loaded dictionary data.""" 277 | req = self.data.get(cassette_name, None) 278 | 279 | if not req: 280 | raise KeyError("Cassette '{c}' does not exist in \ 281 | library.".format(c=cassette_name)) 282 | 283 | req.rewind() 284 | return req 285 | 286 | def get_all_available(self): 287 | """Return all available cassette.""" 288 | return self.data.keys() 289 | 290 | 291 | class DirectoryCassetteLibrary(CassetteLibrary): 292 | """A CassetteLibrary that stores and manages requests with directory.""" 293 | 294 | def __init__(self, *args, **kwargs): 295 | super(DirectoryCassetteLibrary, self).__init__(*args, **kwargs) 296 | 297 | self.data = {} 298 | 299 | def generate_filename(self, cassette_name): 300 | """Generate the filename for a given cassette name.""" 301 | for character in ('/', ':', ' '): 302 | cassette_name = cassette_name.replace(character, '_') 303 | 304 | return cassette_name + self.encoder.file_ext 305 | 306 | def generate_path_from_cassette_name(self, cassette_name): 307 | """Generate the full path to cassette file.""" 308 | return os.path.join( 309 | self.filename, self.generate_filename(cassette_name)) 310 | 311 | def write_to_file(self): 312 | """Write mocked response to a directory of files.""" 313 | if not os.path.exists(self.filename): 314 | os.mkdir(self.filename) 315 | 316 | for cassette_name, response in self.data.items(): 317 | filename = self.generate_filename(cassette_name) 318 | encoded_str = self.encoder.dump(response.to_dict()) 319 | 320 | with open(os.path.join(self.filename, filename), 'w') as f: 321 | f.write(encoded_str) 322 | 323 | # Update our hash 324 | self.save_to_cache(file_hash=_hash(encoded_str), data=response) 325 | 326 | self.is_dirty = False 327 | 328 | def __contains__(self, cassette_name): 329 | """Return whether or not the cassette already exists. 330 | 331 | The method first checks if it is already stored in memory. If not, it 332 | will check if a file supporting the cassette name exists. 333 | """ 334 | contains = cassette_name in self.data 335 | if not contains: 336 | # Check file directory if it exists 337 | filename = self.generate_path_from_cassette_name(cassette_name) 338 | contains = os.path.exists(filename) 339 | 340 | self._log_contains(cassette_name, contains) 341 | 342 | return contains 343 | 344 | def __getitem__(self, cassette_name): 345 | """Return the request if it is in memory. Otherwise, look to disk.""" 346 | if cassette_name in self.data: 347 | req = self.data[cassette_name] 348 | else: 349 | # If not in self.data, need to fetch from disk 350 | req = self._load_request_from_file(cassette_name) 351 | 352 | if not req: 353 | raise KeyError('Cassette %s does not exist in library.' % 354 | cassette_name) 355 | 356 | req.rewind() 357 | return req 358 | 359 | def _load_request_from_file(self, cassette_name): 360 | """Return the mocked response object from the encoded file. 361 | 362 | If the cassette file is in the cache, then use it. Otherwise, read 363 | from the disk to fetch the particular request. 364 | """ 365 | filename = self.generate_path_from_cassette_name(cassette_name) 366 | 367 | with open(filename) as f: 368 | encoded_str = f.read() 369 | 370 | self.log_cassette_used(self.generate_filename(cassette_name)) 371 | encoded_hash = _hash(encoded_str) 372 | 373 | # If the contents are cached, return them 374 | cached_result = CassetteLibrary.cache.get(filename, None) 375 | if cached_result and cached_result['hash'] == encoded_hash: 376 | return cached_result['data'] 377 | 378 | # Otherwise, parse the contents 379 | content = self.encoder.load(encoded_str) 380 | 381 | if content: 382 | mock_response_class = MockedHTTPResponse 383 | req = mock_response_class.from_dict(content) 384 | 385 | # Cache the file for later 386 | self.save_to_cache(file_hash=encoded_hash, data=req) 387 | return req 388 | 389 | # Override 390 | def cassette_name_for_httplib_connection(self, host, port, method, 391 | url, body, headers): 392 | """Create a cassette name from an httplib request.""" 393 | return CassetteName.from_httplib_connection( 394 | host, port, method, url, body, headers, will_hash_body=True) 395 | 396 | def get_all_available(self): 397 | """Return all available cassette.""" 398 | return os.listdir(self.filename) 399 | -------------------------------------------------------------------------------- /cassette/config.py: -------------------------------------------------------------------------------- 1 | class Config(dict): 2 | 3 | def __init__(self): 4 | # Defaults 5 | self['log_cassette_used'] = False 6 | -------------------------------------------------------------------------------- /cassette/http_connection.py: -------------------------------------------------------------------------------- 1 | """Contains mock HTTPConnection objects that imitate the behavior of 2 | HTTPConnection from httplib. Cassette works by monkeypatching HTTPConnection 3 | to check if a certain request has been cached already. 4 | 5 | Note that although requests, urllib3 and urllib2 all use httplib, urllib3 6 | uses its own, subclassed, version of HTTPConnection. So to make requests work, 7 | we need to mock and patch this object. 8 | """ 9 | import logging 10 | import socket 11 | import ssl 12 | from httplib import HTTPConnection, HTTPSConnection 13 | 14 | import semver 15 | 16 | log = logging.getLogger("cassette") 17 | 18 | 19 | class CassetteConnectionMixin(object): 20 | _delete_sock_when_returning_from_library = False 21 | 22 | def request(self, method, url, body=None, headers=None): 23 | """Send HTTP request.""" 24 | 25 | lib = self._cassette_library 26 | self._cassette_name = lib.cassette_name_for_httplib_connection( 27 | host=self.host, 28 | port=self.port, 29 | method=method, 30 | url=url, 31 | body=body, 32 | headers=headers, 33 | ) 34 | if self._cassette_name in lib: 35 | self._response = lib[self._cassette_name] 36 | 37 | if self._delete_sock_when_returning_from_library: 38 | if hasattr(self, 'sock') and self.sock is None: 39 | delattr(self, 'sock') 40 | 41 | return 42 | 43 | log.warning("Making external HTTP request: %s" % self._cassette_name) 44 | self._baseclass.request(self, method, url, body, headers or {}) 45 | 46 | def getresponse(self, buffering=False): 47 | """Return HTTP response.""" 48 | 49 | if buffering: 50 | raise NotImplemented("buffering not supported by cassette.") 51 | 52 | if hasattr(self, "_response"): 53 | return self._response 54 | 55 | lib = self._cassette_library 56 | response = self._baseclass.getresponse(self) 57 | 58 | # If we were just returning the response here, the file 59 | # descriptor would be at the end of the file, and read() would 60 | # return nothing. 61 | response = lib.add_response(self._cassette_name, response) 62 | 63 | return response 64 | 65 | 66 | class CassetteHTTPConnection(CassetteConnectionMixin, HTTPConnection): 67 | 68 | _baseclass = HTTPConnection 69 | 70 | def __init__(self, *args, **kwargs): 71 | HTTPConnection.__init__(self, *args, **kwargs) 72 | 73 | 74 | class CassetteHTTPSConnectionPre279(CassetteConnectionMixin, HTTPSConnection): 75 | 76 | _baseclass = HTTPSConnection 77 | 78 | def __init__(self, host, port=None, key_file=None, cert_file=None, 79 | strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, 80 | source_address=None): 81 | # Directly taken from httplib. 82 | HTTPConnection.__init__(self, host, port, strict, timeout, 83 | source_address) 84 | self.key_file = key_file 85 | self.cert_file = cert_file 86 | 87 | 88 | class CassetteHTTPSConnectionPost279(CassetteConnectionMixin, HTTPSConnection): 89 | 90 | _baseclass = HTTPSConnection 91 | 92 | def __init__(self, host, port=None, key_file=None, cert_file=None, 93 | strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, 94 | source_address=None, context=None): 95 | # Directly taken from httplib. 96 | HTTPConnection.__init__(self, host, port, strict, timeout, 97 | source_address) 98 | self.key_file = key_file 99 | self.cert_file = cert_file 100 | if context is None: 101 | context = ssl._create_default_https_context() 102 | if key_file or cert_file: 103 | context.load_cert_chain(cert_file, key_file) 104 | self._context = context 105 | 106 | def connect(self): 107 | "Connect to a host on a given (SSL) port." 108 | 109 | HTTPConnection.connect(self) 110 | 111 | if self._tunnel_host: 112 | server_hostname = self._tunnel_host 113 | else: 114 | server_hostname = self.host 115 | 116 | self.sock = self._context.wrap_socket(self.sock, 117 | server_hostname=server_hostname) 118 | 119 | 120 | if hasattr(ssl, 'SSLContext'): 121 | CassetteHTTPSConnection = CassetteHTTPSConnectionPost279 122 | else: 123 | CassetteHTTPSConnection = CassetteHTTPSConnectionPre279 124 | 125 | try: 126 | from requests.packages import urllib3 as requests_urllib3 127 | import requests 128 | except ImportError: 129 | pass 130 | else: 131 | class UL3CassetteHTTPConnection(CassetteConnectionMixin, 132 | requests_urllib3.connection.HTTPConnection): 133 | 134 | _baseclass = requests_urllib3.connection.HTTPConnection 135 | # requests 2.3.0 and below have an issue where they get confused if 136 | # the HTTPConnection has a "sock" attribute which is set to None at 137 | # the end of a request/response cycle. So we delete the attribute in 138 | # those cases 139 | _delete_sock_when_returning_from_library = True if \ 140 | semver.compare(requests.__version__, '2.4.0') == -1 else False 141 | 142 | class UL3CassetteHTTPSConnection(UL3CassetteHTTPConnection, 143 | CassetteConnectionMixin, 144 | requests_urllib3.connection.HTTPSConnection): 145 | 146 | _baseclass = requests_urllib3.connection.HTTPSConnection 147 | -------------------------------------------------------------------------------- /cassette/http_response.py: -------------------------------------------------------------------------------- 1 | import cStringIO 2 | import io 3 | from httplib import HTTPMessage 4 | 5 | from cassette.mocked_response import MockedResponse 6 | 7 | 8 | class MockedHTTPResponse(MockedResponse): 9 | 10 | attrs = ("headers", "content", "status", "reason", "raw_headers", "length", 11 | "version") 12 | 13 | @classmethod 14 | def from_response(cls, response): 15 | """Create object from true response.""" 16 | 17 | d = { 18 | "headers": dict(response.getheaders()), 19 | "content": response.read(), 20 | "status": response.status, 21 | "reason": response.reason, 22 | "raw_headers": response.msg.headers, 23 | "length": response.length, 24 | "version": response.version, 25 | } 26 | return cls.from_dict(d) 27 | 28 | @classmethod 29 | def from_dict(cls, data): 30 | """Create object from dict.""" 31 | 32 | # Hack to ensure backwards compatibility with older versions of the 33 | # that did not have the length and version attributes. 34 | data.setdefault('length', len(data['content'])) 35 | data.setdefault('version', 10) 36 | 37 | obj = cls() 38 | 39 | for k in cls.attrs: 40 | setattr(obj, k, data[k]) 41 | 42 | obj.fp = cls.create_file_descriptor(obj.content) 43 | 44 | obj.msg = HTTPMessage(io.StringIO(unicode()), 0) 45 | for k, v in obj.headers.iteritems(): 46 | obj.msg.addheader(k, v) 47 | 48 | obj.msg.headers = data["raw_headers"] 49 | 50 | return obj 51 | 52 | @staticmethod 53 | def create_file_descriptor(content): 54 | """Create a file descriptor for content.""" 55 | 56 | fp = cStringIO.StringIO(content) 57 | 58 | return fp 59 | 60 | def read(self, chunked=None): 61 | return self.fp.read() 62 | 63 | def getheaders(self): 64 | return self.headers.items() 65 | 66 | def getheader(self, name): 67 | return self.headers.get(name) 68 | 69 | def stream(self, chunk_size, decode_content): 70 | yield self.read() 71 | 72 | def rewind(self): 73 | self.fp = self.create_file_descriptor(self.content) 74 | self.read = self.fp.read 75 | return self 76 | 77 | def close(self): 78 | self.fp = None 79 | 80 | def isclosed(self): 81 | return True 82 | -------------------------------------------------------------------------------- /cassette/mocked_response.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | class MockedResponse(object): 4 | 5 | def to_dict(self): 6 | """Return dict representation.""" 7 | 8 | if not hasattr(self, "attrs"): 9 | raise AttributeError("You need to have an 'attrs' class attr.") 10 | 11 | return {k: getattr(self, k) for k in self.attrs} 12 | 13 | @classmethod 14 | def from_response(self, response): 15 | raise NotImplementedError 16 | 17 | def rewind(self): 18 | """Make sure the file description is at the start.""" 19 | pass 20 | -------------------------------------------------------------------------------- /cassette/patcher.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | import httplib 4 | 5 | try: 6 | import requests 7 | except ImportError: 8 | requests = None 9 | else: 10 | from cassette.http_connection import (UL3CassetteHTTPConnection, 11 | UL3CassetteHTTPSConnection) 12 | 13 | from cassette.http_connection import (CassetteHTTPConnection, 14 | CassetteHTTPSConnection) 15 | 16 | 17 | unpatched_HTTPConnection = httplib.HTTPConnection 18 | unpatched_HTTPSConnection = httplib.HTTPSConnection 19 | if requests: 20 | unpatched_requests_HTTPConnection = requests.packages.urllib3.connection.HTTPConnection 21 | unpatched_requests_HTTPSConnection = requests.packages.urllib3.connection.HTTPSConnection 22 | 23 | 24 | def patch(cassette_library): 25 | """Replace standard library.""" 26 | 27 | # Inspired by vcrpy 28 | 29 | CassetteHTTPConnection._cassette_library = cassette_library 30 | CassetteHTTPSConnection._cassette_library = cassette_library 31 | 32 | httplib.HTTPConnection = CassetteHTTPConnection 33 | httplib.HTTP._connection_class = CassetteHTTPConnection 34 | httplib.HTTPSConnection = CassetteHTTPSConnection 35 | httplib.HTTPS._connection_class = CassetteHTTPSConnection 36 | 37 | if requests: 38 | UL3CassetteHTTPConnection._cassette_library = cassette_library 39 | UL3CassetteHTTPSConnection._cassette_library = cassette_library 40 | 41 | requests.packages.urllib3.connectionpool.HTTPConnectionPool.ConnectionCls = \ 42 | UL3CassetteHTTPConnection 43 | requests.packages.urllib3.connectionpool.HTTPSConnectionPool.ConnectionCls = \ 44 | UL3CassetteHTTPSConnection 45 | 46 | 47 | def unpatch(): 48 | """Unpatch standard library.""" 49 | 50 | # Inspired by vcrpy 51 | 52 | httplib.HTTPConnection = unpatched_HTTPConnection 53 | httplib.HTTP._connection_class = unpatched_HTTPConnection 54 | httplib.HTTPSConnection = unpatched_HTTPSConnection 55 | httplib.HTTPS._connection_class = unpatched_HTTPSConnection 56 | 57 | if requests: 58 | requests.packages.urllib3.connection.HTTPConnection = \ 59 | unpatched_requests_HTTPConnection 60 | requests.packages.urllib3.connection.HTTPSConnection = \ 61 | unpatched_requests_HTTPSConnection 62 | -------------------------------------------------------------------------------- /cassette/player.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | from cassette.cassette_library import CassetteLibrary 4 | from cassette.patcher import patch, unpatch 5 | 6 | 7 | class Player(object): 8 | 9 | def __init__(self, path, file_format='', config=None): 10 | self.library = CassetteLibrary.create_new_cassette_library( 11 | path, file_format, config) 12 | 13 | def play(self): 14 | """Return contextenv.""" 15 | return self 16 | 17 | def __enter__(self): 18 | patch(self.library) 19 | 20 | def __exit__(self, exc_type, exc_value, tb): 21 | # If the cassette items have changed, save the changes to file 22 | if self.library.is_dirty: 23 | self.library.write_to_file() 24 | # Remove our overrides 25 | unpatch() 26 | 27 | def report_unused_cassettes(self, output=sys.stdout): 28 | """Report unused cassettes to file.""" 29 | self.library.report_unused_cassettes(output) 30 | -------------------------------------------------------------------------------- /cassette/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uber/cassette/475b02010f05a183d01abe8034fb320fd0e2f635/cassette/tests/__init__.py -------------------------------------------------------------------------------- /cassette/tests/base.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | TEMPORARY_RESPONSES_FILENAME = "./cassette/tests/data/responses.temp" 4 | TEMPORARY_RESPONSES_DIRECTORY = "./cassette/tests/data/responsedir" 5 | TEMPORARY_RESPONSES_ROOT = "./cassette/tests/data/" 6 | 7 | 8 | class TestCase(unittest.TestCase): 9 | pass 10 | -------------------------------------------------------------------------------- /cassette/tests/data/requests/httplib_GET_httpbin.org_80__unused.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uber/cassette/475b02010f05a183d01abe8034fb320fd0e2f635/cassette/tests/data/requests/httplib_GET_httpbin.org_80__unused.json -------------------------------------------------------------------------------- /cassette/tests/data/requests/httplib_GET_httpbin.org_80__unused2.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uber/cassette/475b02010f05a183d01abe8034fb320fd0e2f635/cassette/tests/data/requests/httplib_GET_httpbin.org_80__unused2.json -------------------------------------------------------------------------------- /cassette/tests/data/requests/httplib_GET_httpbin.org_80_get_01abfc750a0c942167651c40d088531d_c2585c6aafb8fa06dc8bb6de88f9de0b.json: -------------------------------------------------------------------------------- 1 | { 2 | "status": 200, 3 | "raw_headers": [ 4 | "Server: nginx\r\n", 5 | "Date: Sat, 06 Jun 2015 00:30:37 GMT\r\n", 6 | "Content-Type: application/json\r\n", 7 | "Content-Length: 207\r\n", 8 | "Connection: close\r\n", 9 | "Access-Control-Allow-Origin: *\r\n", 10 | "Access-Control-Allow-Credentials: true\r\n" 11 | ], 12 | "content": "{\n \"args\": {}, \n \"headers\": {\n \"Accept-Encoding\": \"identity\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"Python-urllib/2.7\"\n }, \n \"origin\": \"67.165.177.180\", \n \"url\": \"http://httpbin.org/get\"\n}\n", 13 | "headers": { 14 | "content-length": "207", 15 | "server": "nginx", 16 | "connection": "close", 17 | "access-control-allow-credentials": "true", 18 | "date": "Sat, 06 Jun 2015 00:30:37 GMT", 19 | "access-control-allow-origin": "*", 20 | "content-type": "application/json" 21 | }, 22 | "reason": "OK", 23 | "version": 11, 24 | "length": 0 25 | } -------------------------------------------------------------------------------- /cassette/tests/data/responses.yaml: -------------------------------------------------------------------------------- 1 | httplib:GET 127.0.0.1:5000/headers 917ed38bcb583a67c891f9fa59792bc0 None: 2 | content: "{\n \"json\": true\n}" 3 | headers: {content-length: '18', content-type: application/json, date: 'Mon, 18 Aug 4 | 2014 23:27:09 GMT', server: Werkzeug/0.9.6 Python/2.7.5} 5 | length: 0 6 | raw_headers: ["Content-Type: application/json\r\n", "Content-Length: 18\r\n", "Server:\ 7 | \ Werkzeug/0.9.6 Python/2.7.5\r\n", "Date: Mon, 18 Aug 2014 23:27:09 GMT\r\n"] 8 | reason: OK 9 | status: 200 10 | version: 10 11 | httplib:GET 127.0.0.1:5000/headers c2585c6aafb8fa06dc8bb6de88f9de0b None: 12 | content: not json 13 | headers: {content-length: '8', content-type: text/html; charset=utf-8, date: 'Mon, 14 | 18 Aug 2014 23:27:09 GMT', server: Werkzeug/0.9.6 Python/2.7.5} 15 | length: 0 16 | raw_headers: ["Content-Type: text/html; charset=utf-8\r\n", "Content-Length: 8\r\ 17 | \n", "Server: Werkzeug/0.9.6 Python/2.7.5\r\n", "Date: Mon, 18 Aug 2014 23:27:09\ 18 | \ GMT\r\n"] 19 | reason: OK 20 | status: 200 21 | version: 10 22 | httplib:GET 127.0.0.1:5000/index None None: 23 | content: hello world 24 | headers: {content-length: '11', content-type: text/html; charset=utf-8, date: 'Mon, 25 | 18 Aug 2014 23:27:08 GMT', server: Werkzeug/0.9.6 Python/2.7.5} 26 | length: 0 27 | raw_headers: ["Content-Type: text/html; charset=utf-8\r\n", "Content-Length: 11\r\ 28 | \n", "Server: Werkzeug/0.9.6 Python/2.7.5\r\n", "Date: Mon, 18 Aug 2014 23:27:08\ 29 | \ GMT\r\n"] 30 | reason: OK 31 | status: 200 32 | version: 10 33 | httplib:GET 127.0.0.1:5000/index c2585c6aafb8fa06dc8bb6de88f9de0b None: 34 | content: hello world 35 | headers: {content-length: '11', content-type: text/html; charset=utf-8, date: 'Mon, 36 | 18 Aug 2014 23:27:08 GMT', server: Werkzeug/0.9.6 Python/2.7.5} 37 | length: 0 38 | raw_headers: ["Content-Type: text/html; charset=utf-8\r\n", "Content-Length: 11\r\ 39 | \n", "Server: Werkzeug/0.9.6 Python/2.7.5\r\n", "Date: Mon, 18 Aug 2014 23:27:08\ 40 | \ GMT\r\n"] 41 | reason: OK 42 | status: 200 43 | version: 10 44 | httplib:GET 127.0.0.1:5000/non-ascii-content c2585c6aafb8fa06dc8bb6de88f9de0b None: 45 | content: !!python/str "Le Mexicain l'avait achet\xE9e en viager \xE0 un procureur\ 46 | \ \xE0 la retraite. Apr\xE8s trois mois, l'accident b\xEAte. Une affaire." 47 | headers: {content-length: '120', content-type: text/html; charset=utf-8, date: 'Mon, 48 | 18 Aug 2014 23:27:08 GMT', server: Werkzeug/0.9.6 Python/2.7.5} 49 | length: 0 50 | raw_headers: ["Content-Type: text/html; charset=utf-8\r\n", "Content-Length: 120\r\ 51 | \n", "Server: Werkzeug/0.9.6 Python/2.7.5\r\n", "Date: Mon, 18 Aug 2014 23:27:08\ 52 | \ GMT\r\n"] 53 | reason: OK 54 | status: 200 55 | version: 10 56 | httplib:GET 127.0.0.1:5000/redirected c2585c6aafb8fa06dc8bb6de88f9de0b None: 57 | content: hello world redirected 58 | headers: {content-length: '22', content-type: text/html; charset=utf-8, date: 'Mon, 59 | 18 Aug 2014 23:27:09 GMT', server: Werkzeug/0.9.6 Python/2.7.5} 60 | length: 0 61 | raw_headers: ["Content-Type: text/html; charset=utf-8\r\n", "Content-Length: 22\r\ 62 | \n", "Server: Werkzeug/0.9.6 Python/2.7.5\r\n", "Date: Mon, 18 Aug 2014 23:27:09\ 63 | \ GMT\r\n"] 64 | reason: OK 65 | status: 200 66 | version: 10 67 | httplib:GET 127.0.0.1:5000/will_redirect c2585c6aafb8fa06dc8bb6de88f9de0b None: 68 | content: ' 69 | 70 | Redirecting... 71 | 72 |

Redirecting...

73 | 74 |

You should be redirected automatically to target URL: /redirected. If 75 | not click the link.' 76 | headers: {content-length: '229', content-type: text/html; charset=utf-8, date: 'Mon, 77 | 18 Aug 2014 23:27:09 GMT', location: 'http://127.0.0.1:5000/redirected', server: Werkzeug/0.9.6 78 | Python/2.7.5} 79 | length: 0 80 | raw_headers: ["Content-Type: text/html; charset=utf-8\r\n", "Content-Length: 229\r\ 81 | \n", "Location: http://127.0.0.1:5000/redirected\r\n", "Server: Werkzeug/0.9.6\ 82 | \ Python/2.7.5\r\n", "Date: Mon, 18 Aug 2014 23:27:09 GMT\r\n"] 83 | reason: FOUND 84 | status: 302 85 | version: 10 86 | httplib:GET httpbin.org:443/ip c2585c6aafb8fa06dc8bb6de88f9de0b None: 87 | content: "{\n \"origin\": \"8.26.157.128\"\n}" 88 | headers: {access-control-allow-credentials: 'true', access-control-allow-origin: '*', 89 | connection: Close, content-length: '30', content-type: application/json, date: 'Mon, 90 | 18 Aug 2014 23:27:09 GMT', server: gunicorn/18.0} 91 | length: 0 92 | raw_headers: ["Access-Control-Allow-Credentials: true\r\n", "Access-Control-Allow-Origin:\ 93 | \ *\r\n", "Content-Type: application/json\r\n", "Date: Mon, 18 Aug 2014 23:27:09\ 94 | \ GMT\r\n", "Server: gunicorn/18.0\r\n", "Content-Length: 30\r\n", "Connection:\ 95 | \ Close\r\n"] 96 | reason: OK 97 | status: 200 98 | version: 11 99 | -------------------------------------------------------------------------------- /cassette/tests/data/responses_0.3.2.yaml: -------------------------------------------------------------------------------- 1 | httplib:GET 127.0.0.1:5000/headers 917ed38bcb583a67c891f9fa59792bc0 None: 2 | content: "{\n \"json\": true\n}" 3 | headers: {content-length: '18', content-type: application/json, date: 'Mon, 16 Jun 4 | 2014 18:55:54 GMT', server: Werkzeug/0.9.6 Python/2.7.2} 5 | raw_headers: ["Content-Type: application/json\r\n", "Content-Length: 18\r\n", "Server:\ 6 | \ Werkzeug/0.9.6 Python/2.7.2\r\n", "Date: Mon, 16 Jun 2014 18:55:54 GMT\r\n"] 7 | reason: OK 8 | status: 200 9 | httplib:GET 127.0.0.1:5000/headers c2585c6aafb8fa06dc8bb6de88f9de0b None: 10 | content: not json 11 | headers: {content-length: '8', content-type: text/html; charset=utf-8, date: 'Mon, 12 | 16 Jun 2014 18:55:54 GMT', server: Werkzeug/0.9.6 Python/2.7.2} 13 | raw_headers: ["Content-Type: text/html; charset=utf-8\r\n", "Content-Length: 8\r\ 14 | \n", "Server: Werkzeug/0.9.6 Python/2.7.2\r\n", "Date: Mon, 16 Jun 2014 18:55:54\ 15 | \ GMT\r\n"] 16 | reason: OK 17 | status: 200 18 | httplib:GET 127.0.0.1:5000/index None None: 19 | content: hello world 20 | headers: {content-length: '11', content-type: text/html; charset=utf-8, date: 'Mon, 21 | 16 Jun 2014 18:55:53 GMT', server: Werkzeug/0.9.6 Python/2.7.2} 22 | raw_headers: ["Content-Type: text/html; charset=utf-8\r\n", "Content-Length: 11\r\ 23 | \n", "Server: Werkzeug/0.9.6 Python/2.7.2\r\n", "Date: Mon, 16 Jun 2014 18:55:53\ 24 | \ GMT\r\n"] 25 | reason: OK 26 | status: 200 27 | httplib:GET 127.0.0.1:5000/index c2585c6aafb8fa06dc8bb6de88f9de0b None: 28 | content: hello world 29 | headers: {content-length: '11', content-type: text/html; charset=utf-8, date: 'Mon, 30 | 16 Jun 2014 18:55:54 GMT', server: Werkzeug/0.9.6 Python/2.7.2} 31 | raw_headers: ["Content-Type: text/html; charset=utf-8\r\n", "Content-Length: 11\r\ 32 | \n", "Server: Werkzeug/0.9.6 Python/2.7.2\r\n", "Date: Mon, 16 Jun 2014 18:55:54\ 33 | \ GMT\r\n"] 34 | reason: OK 35 | status: 200 36 | httplib:GET 127.0.0.1:5000/non-ascii-content c2585c6aafb8fa06dc8bb6de88f9de0b None: 37 | content: !!python/str "Le Mexicain l'avait achet\xE9e en viager \xE0 un procureur\ 38 | \ \xE0 la retraite. Apr\xE8s trois mois, l'accident b\xEAte. Une affaire." 39 | headers: {content-length: '120', content-type: text/html; charset=utf-8, date: 'Mon, 40 | 16 Jun 2014 18:55:54 GMT', server: Werkzeug/0.9.6 Python/2.7.2} 41 | raw_headers: ["Content-Type: text/html; charset=utf-8\r\n", "Content-Length: 120\r\ 42 | \n", "Server: Werkzeug/0.9.6 Python/2.7.2\r\n", "Date: Mon, 16 Jun 2014 18:55:54\ 43 | \ GMT\r\n"] 44 | reason: OK 45 | status: 200 46 | httplib:GET 127.0.0.1:5000/redirected c2585c6aafb8fa06dc8bb6de88f9de0b None: 47 | content: hello world redirected 48 | headers: {content-length: '22', content-type: text/html; charset=utf-8, date: 'Mon, 49 | 16 Jun 2014 18:55:54 GMT', server: Werkzeug/0.9.6 Python/2.7.2} 50 | raw_headers: ["Content-Type: text/html; charset=utf-8\r\n", "Content-Length: 22\r\ 51 | \n", "Server: Werkzeug/0.9.6 Python/2.7.2\r\n", "Date: Mon, 16 Jun 2014 18:55:54\ 52 | \ GMT\r\n"] 53 | reason: OK 54 | status: 200 55 | httplib:GET 127.0.0.1:5000/will_redirect c2585c6aafb8fa06dc8bb6de88f9de0b None: 56 | content: ' 57 | 58 | Redirecting... 59 | 60 |

Redirecting...

61 | 62 |

You should be redirected automatically to target URL: /redirected. If 63 | not click the link.' 64 | headers: {content-length: '229', content-type: text/html; charset=utf-8, date: 'Mon, 65 | 16 Jun 2014 18:55:54 GMT', location: 'http://127.0.0.1:5000/redirected', server: Werkzeug/0.9.6 66 | Python/2.7.2} 67 | raw_headers: ["Content-Type: text/html; charset=utf-8\r\n", "Content-Length: 229\r\ 68 | \n", "Location: http://127.0.0.1:5000/redirected\r\n", "Server: Werkzeug/0.9.6\ 69 | \ Python/2.7.2\r\n", "Date: Mon, 16 Jun 2014 18:55:54 GMT\r\n"] 70 | reason: FOUND 71 | status: 302 72 | httplib:GET httpbin.org:443/ip c2585c6aafb8fa06dc8bb6de88f9de0b None: 73 | content: "{\n \"origin\": \"8.26.157.128\"\n}" 74 | headers: {access-control-allow-origin: '*', connection: Close, content-length: '30', 75 | content-type: application/json, date: 'Mon, 16 Jun 2014 18:55:50 GMT', server: gunicorn/18.0} 76 | raw_headers: ["Access-Control-Allow-Origin: *\r\n", "Content-Type: application/json\r\ 77 | \n", "Date: Mon, 16 Jun 2014 18:55:50 GMT\r\n", "Server: gunicorn/18.0\r\n", 78 | "Content-Length: 30\r\n", "Connection: Close\r\n"] 79 | reason: OK 80 | status: 200 81 | -------------------------------------------------------------------------------- /cassette/tests/server/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uber/cassette/475b02010f05a183d01abe8034fb320fd0e2f635/cassette/tests/server/__init__.py -------------------------------------------------------------------------------- /cassette/tests/server/image.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uber/cassette/475b02010f05a183d01abe8034fb320fd0e2f635/cassette/tests/server/image.png -------------------------------------------------------------------------------- /cassette/tests/server/run.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from flask import Flask, jsonify, redirect, request, url_for 3 | 4 | app = Flask(__name__) 5 | 6 | IMAGE_FILENAME = "./cassette/tests/server/image.png" 7 | 8 | 9 | @app.route("/index") 10 | def index(): 11 | return "hello world" 12 | 13 | 14 | @app.route("/non-ascii-content") 15 | def non_ascii_content(): 16 | return (u"Le Mexicain l'avait achetée en viager " 17 | u"à un procureur à la retraite. Après trois mois, " 18 | u"l'accident bête. Une affaire.") 19 | 20 | 21 | @app.route("/image") 22 | def image(): 23 | with open(IMAGE_FILENAME) as image_handle: 24 | return image_handle.read() 25 | 26 | 27 | @app.route("/will_redirect") 28 | def will_redirect(): 29 | return redirect(url_for("redirected")) 30 | 31 | 32 | @app.route("/redirected") 33 | def redirected(): 34 | return "hello world redirected" 35 | 36 | 37 | @app.route("/get") 38 | def get(): 39 | return jsonify(args=request.args) 40 | 41 | 42 | @app.route("/headers") 43 | def headers(): 44 | if request.headers.get("Accept") == "application/json": 45 | return jsonify(json=True) 46 | else: 47 | return "not json" 48 | 49 | if __name__ == "__main__": 50 | app.run() 51 | -------------------------------------------------------------------------------- /cassette/tests/test_cassette.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf8 -*- 2 | """ 3 | Test the cassette behavior. 4 | """ 5 | 6 | import httplib 7 | import json 8 | import os 9 | import shutil 10 | import threading 11 | import urllib 12 | import urllib2 13 | import requests 14 | 15 | import mock 16 | 17 | import cassette 18 | from cassette.cassette_library import CassetteLibrary 19 | from cassette.tests.base import (TEMPORARY_RESPONSES_DIRECTORY, 20 | TEMPORARY_RESPONSES_FILENAME, TestCase) 21 | from cassette.tests.server.run import app 22 | 23 | IMAGE_FILENAME = "./cassette/tests/server/image.png" 24 | TEST_HOST = "http://127.0.0.1:5000/" 25 | TEST_URL = "http://127.0.0.1:5000/index" 26 | TEST_URL_HTTPS = "https://httpbin.org/ip" 27 | TEST_URL_REDIRECT = "http://127.0.0.1:5000/will_redirect" 28 | TEST_URL_IMAGE = "http://127.0.0.1:5000/image" 29 | TEST_URL_404 = "http://127.0.0.1:5000/404" 30 | TEST_URL_HEADERS = "http://127.0.0.1:5000/headers" 31 | 32 | 33 | # Taken from requests 34 | def to_key_val_list(value): 35 | """Take an object and test to see if it can be represented as a 36 | dictionary. If it can be, return a list of tuples, e.g., 37 | 38 | :: 39 | 40 | >>> to_key_val_list([('key', 'val')]) 41 | [('key', 'val')] 42 | >>> to_key_val_list({'key': 'val'}) 43 | [('key', 'val')] 44 | >>> to_key_val_list('string') 45 | Traceback (most recent call last): 46 | ... 47 | ValueError: cannot encode objects that are not 2-tuples 48 | """ 49 | 50 | if value is None: 51 | return None 52 | 53 | if isinstance(value, (str, bytes, bool, int)): 54 | raise ValueError('cannot encode objects that are not 2-tuples') 55 | 56 | if isinstance(value, dict): 57 | value = value.items() 58 | 59 | return list(value) 60 | 61 | 62 | def _encode_params(data): 63 | """Encode parameters in a piece of data. 64 | 65 | Will successfully encode parameters when passed as a dict or a list of 66 | 2-tuples. Order is retained if data is a list of 2-tuples but abritrary 67 | if parameters are supplied as a dict. 68 | """ 69 | 70 | if isinstance(data, (str, bytes)): 71 | return data 72 | elif hasattr(data, 'read'): 73 | return data 74 | elif hasattr(data, '__iter__'): 75 | result = [] 76 | for k, vs in to_key_val_list(data): 77 | if isinstance(vs, basestring) or not hasattr(vs, '__iter__'): 78 | vs = [vs] 79 | for v in vs: 80 | if v is not None: 81 | result.append( 82 | (k.encode('utf-8') if isinstance(k, str) else k, 83 | v.encode('utf-8') if isinstance(v, str) else v)) 84 | return urllib.urlencode(result, doseq=True) 85 | 86 | 87 | def url_for(endpoint): 88 | """Return full URL for endpoint.""" 89 | return TEST_HOST + endpoint 90 | 91 | 92 | class TestCassette(TestCase): 93 | """Testing the whole flow with a temporary response file.""" 94 | 95 | # Keep track of a single HTTP server instance here so base classes don't 96 | # try to re-use the address. 97 | server_thread = None 98 | 99 | def setUp(self): 100 | self.filename = TEMPORARY_RESPONSES_FILENAME 101 | self.file_format = 'yaml' 102 | 103 | # This is a dummy method that we use to check if cassette had 104 | # the response. 105 | patcher = mock.patch.object(CassetteLibrary, "_had_response") 106 | self.had_response = patcher.start() 107 | self.addCleanup(patcher.stop) 108 | 109 | if os.path.exists(self.filename): 110 | os.remove(self.filename) 111 | 112 | @classmethod 113 | def setUpClass(cls): 114 | if cls.server_thread is None: 115 | cls.server_thread = threading.Thread( 116 | target=app.run, 117 | ) 118 | # Daemonizing will kill the thread when python exits. 119 | cls.server_thread.daemon = True 120 | cls.server_thread.start() 121 | 122 | def tearDown(self): 123 | if os.path.exists(self.filename): 124 | os.remove(self.filename) 125 | 126 | def check_urllib2_flow(self, url, expected_content=None, 127 | allow_incomplete_match=False, 128 | data=None): 129 | """Verify the urllib2 flow.""" 130 | 131 | if not url.startswith("http"): 132 | url = url_for(url) 133 | 134 | # First run 135 | with cassette.play(self.filename, file_format=self.file_format): 136 | r = urllib2.urlopen(url, data) # 1st run 137 | 138 | self.assertEqual(self.had_response.called, False) 139 | if expected_content: 140 | content = unicode(r.read(), "utf-8") 141 | if allow_incomplete_match: 142 | self.assertIn(expected_content, content) 143 | else: 144 | self.assertEqual(content, expected_content) 145 | 146 | self.had_response.reset_mock() 147 | 148 | # Second run 149 | with cassette.play(self.filename, file_format=self.file_format): 150 | r = urllib2.urlopen(url, data) # 2nd run 151 | 152 | self.assertEqual(self.had_response.called, True) 153 | if expected_content: 154 | content = unicode(r.read(), "utf-8") 155 | if allow_incomplete_match: 156 | self.assertIn(expected_content, content) 157 | else: 158 | self.assertEqual(content, expected_content) 159 | 160 | if r.headers["Content-Type"] == "application/json": 161 | try: 162 | r.json = json.loads(r.read()) 163 | except ValueError: 164 | pass 165 | 166 | return r 167 | 168 | def test_flow(self): 169 | """Verify that cassette works when using urllib2.urlopen.""" 170 | self.check_urllib2_flow(TEST_URL, "hello world") 171 | 172 | def test_flow_redirected(self): 173 | """Verify that cassette works when redirected.""" 174 | self.check_urllib2_flow(TEST_URL_REDIRECT, "hello world redirected") 175 | 176 | def test_flow_httplib(self): 177 | """Verify that cassette works when using httplib directly.""" 178 | 179 | # First run 180 | with cassette.play(self.filename, file_format=self.file_format): 181 | conn = httplib.HTTPConnection("127.0.0.1", 5000) 182 | conn.request("GET", "/index") 183 | r = conn.getresponse() 184 | conn.close() 185 | 186 | self.assertEqual(r.status, 200) 187 | self.assertEqual(r.reason, "OK") 188 | self.assertEqual(r.read(), "hello world") 189 | self.assertEqual(self.had_response.called, False) 190 | 191 | self.had_response.reset_mock() 192 | 193 | # Second run 194 | with cassette.play(self.filename, file_format=self.file_format): 195 | conn = httplib.HTTPConnection("127.0.0.1", 5000) 196 | conn.request("GET", "/index") 197 | r = conn.getresponse() 198 | conn.close() 199 | 200 | self.assertEqual(r.status, 200) 201 | self.assertEqual(r.reason, "OK") 202 | self.assertEqual(r.read(), "hello world") 203 | self.assertEqual(self.had_response.called, True) 204 | 205 | def test_flow_https(self): 206 | """Verify the cassette behavior for HTTPS.""" 207 | self.check_urllib2_flow(TEST_URL_HTTPS, "origin", 208 | allow_incomplete_match=True) 209 | 210 | def test_flow_manual_context(self): 211 | """Verify the cassette behavior when setting up the context.""" 212 | 213 | # First run 214 | cassette.insert(self.filename, file_format=self.file_format) 215 | r = urllib2.urlopen(TEST_URL + '?manual') 216 | cassette.eject() 217 | 218 | self.assertEqual(self.had_response.called, False) 219 | self.assertEqual(r.read(), "hello world") 220 | 221 | self.had_response.reset_mock() 222 | 223 | # Second run 224 | cassette.insert(self.filename, file_format=self.file_format) 225 | r = urllib2.urlopen(TEST_URL + '?manual') 226 | cassette.eject() 227 | 228 | self.assertEqual(self.had_response.called, True) 229 | self.assertEqual(r.read(), "hello world") 230 | 231 | def test_flow_get_non_ascii_page(self): 232 | """Verify that cassette can store a non-ascii page.""" 233 | self.check_urllib2_flow( 234 | url="http://127.0.0.1:5000/non-ascii-content", 235 | expected_content=u"Le Mexicain l'avait achetée en viager " 236 | u"à un procureur à la retraite. Après trois mois, " 237 | u"l'accident bête. Une affaire.") 238 | 239 | def test_flow_get_with_array_args(self): 240 | """Verify that cassette can store array args.""" 241 | 242 | param = { 243 | "dict1": {"dict2": "dict3", "dict4": "dict5"}, 244 | "array": ["item1", "item2"], 245 | "int": 1, 246 | "param": "1", 247 | } 248 | 249 | url = "get?" 250 | url += _encode_params(param) 251 | r = self.check_urllib2_flow(url=url) 252 | self.assertEqual(r.json["args"]["param"], "1") 253 | 254 | def test_download_binary_file(self): 255 | """Verify that cassette can store a binary file (e.g. picture)""" 256 | 257 | # An image 258 | with open(IMAGE_FILENAME) as image_handle: 259 | expected_image = image_handle.read() 260 | 261 | # downloaded via urllib 262 | cassette.insert(self.filename, file_format=self.file_format) 263 | actual_image = urllib2.urlopen(TEST_URL_IMAGE).read() 264 | cassette.eject() 265 | 266 | # has a matching image 267 | self.assertEqual(self.had_response.called, False) 268 | self.assertEqual(expected_image, actual_image) 269 | 270 | self.had_response.reset_mock() 271 | 272 | # downloaded again via urllib 273 | cassette.insert(self.filename, file_format=self.file_format) 274 | actual_image = urllib2.urlopen(TEST_URL_IMAGE).read() 275 | cassette.eject() 276 | 277 | # still has a matching image 278 | self.assertEqual(self.had_response.called, True) 279 | self.assertEqual(expected_image, actual_image) 280 | 281 | def test_flow_404(self): 282 | """Verify that cassette can returns 404 from file.""" 283 | 284 | # First run 285 | with cassette.play(self.filename, file_format=self.file_format): 286 | self.assertRaises(urllib2.HTTPError, urllib2.urlopen, TEST_URL_404) 287 | 288 | self.assertEqual(self.had_response.called, False) 289 | 290 | # Second run, it has the response. 291 | with cassette.play(self.filename, file_format=self.file_format): 292 | self.assertRaises(urllib2.HTTPError, urllib2.urlopen, TEST_URL_404) 293 | 294 | self.assertEqual(self.had_response.called, True) 295 | 296 | def helper_requestslib(self, url): 297 | with mock.patch.object(CassetteLibrary, '_had_response', autospec=True) as was_cached: 298 | with cassette.play(self.filename, file_format=self.file_format): 299 | r0 = requests.get(url) 300 | 301 | assert not was_cached.called 302 | 303 | with mock.patch.object(CassetteLibrary, '_had_response', autospec=True) as was_cached: 304 | with cassette.play(self.filename, file_format=self.file_format): 305 | r1 = requests.get(url) 306 | 307 | assert was_cached.called 308 | 309 | assert r0.text == r1.text 310 | return r1 311 | 312 | def test_requestslib_http(self): 313 | """Test that normal HTTP requests work using requests.""" 314 | resp = self.helper_requestslib(TEST_URL) 315 | assert resp.headers['content-length'] == '11' 316 | assert resp.text == 'hello world' 317 | 318 | def test_requestslib_https(self): 319 | """Test that HTTPS requests work using requests.""" 320 | resp = self.helper_requestslib(TEST_URL_HTTPS) 321 | # len('{\n "origin":"0.0.0.0"\n}') # 24 322 | assert int(resp.headers['content-length']) >= 24 323 | # len('{\n "origin": "255.255.255.255"\n}') # 34 324 | assert int(resp.headers['content-length']) <= 34 325 | assert 'origin' in resp.json() 326 | 327 | def test_requestslib_redir(self): 328 | """Test that redirect behavior works using requests.""" 329 | resp = self.helper_requestslib(TEST_URL_REDIRECT) 330 | assert resp.headers['content-length'] == '22' 331 | assert resp.text == 'hello world redirected' 332 | 333 | 334 | class TestCassetteJson(TestCassette): 335 | """Perform the same test but in JSON.""" 336 | 337 | def setUp(self): 338 | self.filename = TEMPORARY_RESPONSES_FILENAME 339 | self.file_format = 'json' 340 | 341 | # This is a dummy method that we use to check if cassette had 342 | # the response. 343 | patcher = mock.patch.object(CassetteLibrary, "_had_response") 344 | self.had_response = patcher.start() 345 | self.addCleanup(patcher.stop) 346 | 347 | if os.path.exists(self.filename): 348 | os.remove(self.filename) 349 | 350 | 351 | class TestCassetteDirectory(TestCassette): 352 | """Testing the whole flow with a temporary response directory in yaml.""" 353 | 354 | def setUp(self): 355 | self.filename = TEMPORARY_RESPONSES_DIRECTORY 356 | self.file_format = 'yaml' 357 | 358 | # This is a dummy method that we use to check if cassette had 359 | # the response. 360 | patcher = mock.patch.object(CassetteLibrary, "_had_response") 361 | self.had_response = patcher.start() 362 | self.addCleanup(patcher.stop) 363 | 364 | if os.path.exists(self.filename) and os.path.isdir(self.filename): 365 | shutil.rmtree(self.filename) 366 | 367 | def tearDown(self): 368 | if os.path.exists(self.filename) and os.path.isdir(self.filename): 369 | shutil.rmtree(self.filename) 370 | 371 | 372 | class TestCassetteDirectoryJson(TestCassetteDirectory): 373 | """Testing the whole flow with a temporary response directory in json.""" 374 | 375 | def setUp(self): 376 | self.filename = TEMPORARY_RESPONSES_DIRECTORY 377 | self.file_format = 'json' 378 | 379 | # This is a dummy method that we use to check if cassette had 380 | # the response. 381 | patcher = mock.patch.object(CassetteLibrary, "_had_response") 382 | self.had_response = patcher.start() 383 | self.addCleanup(patcher.stop) 384 | 385 | if os.path.exists(self.filename) and os.path.isdir(self.filename): 386 | shutil.rmtree(self.filename) 387 | 388 | 389 | class TestCassetteFile(TestCase): 390 | """Verify that cassette can read from an existing file. This is also 391 | the base test case for regression testing older versions of the schema. 392 | To avoid breaking test suites, new versions of cassette should always 393 | work with older schemas. 394 | """ 395 | 396 | # The base class tests the most up to date schema. 397 | responses_filename = './cassette/tests/data/responses.yaml' 398 | 399 | def setUp(self): 400 | 401 | # This is a dummy method that we use to check if cassette had 402 | # the response. 403 | patcher = mock.patch.object(CassetteLibrary, "_had_response") 404 | self.had_response = patcher.start() 405 | self.addCleanup(patcher.stop) 406 | 407 | def check_read_from_file_flow(self, url, expected_content, 408 | allow_incomplete_match=False, 409 | request_headers=None): 410 | """Verify the flow when reading from an existing file.""" 411 | 412 | with cassette.play(self.responses_filename): 413 | request = urllib2.Request(url, headers=request_headers or {}) 414 | r = urllib2.urlopen(request) 415 | 416 | content = unicode(r.read(), "utf-8") 417 | if allow_incomplete_match: 418 | self.assertIn(expected_content, content) 419 | else: 420 | self.assertEqual(content, expected_content) 421 | self.assertEqual(self.had_response.called, True) 422 | 423 | return r 424 | 425 | def test_flow_urlopen(self): 426 | """Verify that cassette can read a file when using urlopen.""" 427 | self.check_read_from_file_flow(TEST_URL, "hello world") 428 | 429 | def test_flow_httplib(self): 430 | """Verify that cassette can read a file when using httplib.""" 431 | 432 | with cassette.play(self.responses_filename): 433 | conn = httplib.HTTPConnection("127.0.0.1", 5000) 434 | conn.request("GET", "/index") 435 | r = conn.getresponse() 436 | 437 | self.assertEqual(r.status, 200) 438 | self.assertEqual(r.reason, "OK") 439 | self.assertEqual(r.read(), "hello world") 440 | self.assertEqual(self.had_response.called, True) 441 | 442 | def test_httplib_getheader_with_present_header(self): 443 | with cassette.play(self.responses_filename): 444 | conn = httplib.HTTPConnection("127.0.0.1", 5000) 445 | conn.request("GET", "/index") 446 | r = conn.getresponse() 447 | 448 | assert r.getheader('content-length') 449 | 450 | def test_httplib_getheader_with_absent_header(self): 451 | with cassette.play(self.responses_filename): 452 | conn = httplib.HTTPConnection("127.0.0.1", 5000) 453 | conn.request("GET", "/index") 454 | r = conn.getresponse() 455 | 456 | assert r.getheader('X-FOOBAR') is None 457 | 458 | def test_read_twice(self): 459 | """Verify that response are not empty.""" 460 | 461 | url = TEST_URL 462 | expected_content = "hello world" 463 | 464 | with cassette.play(self.responses_filename): 465 | r = urllib2.urlopen(url) 466 | self.assertEqual(r.read(), expected_content) 467 | r = urllib2.urlopen(url) 468 | self.assertEqual(r.read(), expected_content) 469 | 470 | def test_read_file_https(self): 471 | """Verify that cassette can read a file for an HTTPS request.""" 472 | self.check_read_from_file_flow(TEST_URL_HTTPS, "origin", 473 | allow_incomplete_match=True) 474 | 475 | def test_redirects(self): 476 | """Verify that cassette can handle a redirect.""" 477 | self.check_read_from_file_flow(TEST_URL_REDIRECT, "hello world redirected") 478 | 479 | def test_non_ascii_content(self): 480 | """Verify that cassette can handle non-ascii content.""" 481 | 482 | self.check_read_from_file_flow( 483 | url="http://127.0.0.1:5000/non-ascii-content", 484 | expected_content=u"Le Mexicain l'avait achetée en viager " 485 | u"à un procureur à la retraite. Après trois mois, " 486 | u"l'accident bête. Une affaire.") 487 | 488 | def test_request_headers_json(self): 489 | """Verify that request headers are respected for application/json.""" 490 | 491 | self.check_read_from_file_flow( 492 | url=TEST_URL_HEADERS, 493 | request_headers={"Accept": "application/json"}, 494 | expected_content='"json": true', 495 | allow_incomplete_match=True) 496 | 497 | def test_request_headers_no_accept(self): 498 | """Verify that request headers are respected for default headers.""" 499 | 500 | self.check_read_from_file_flow( 501 | url=TEST_URL_HEADERS, 502 | expected_content="not json") 503 | 504 | 505 | class TestCassetteFile_0_3_2(TestCassetteFile): 506 | responses_filename = './cassette/tests/data/responses_0.3.2.yaml' 507 | -------------------------------------------------------------------------------- /cassette/tests/test_cassette_library.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import mock 4 | 5 | from cassette.cassette_library import (CassetteLibrary, 6 | DirectoryCassetteLibrary, 7 | FileCassetteLibrary) 8 | from cassette.tests.base import (TEMPORARY_RESPONSES_FILENAME, 9 | TEMPORARY_RESPONSES_ROOT, TestCase) 10 | from cassette.utils import JsonEncoder, YamlEncoder 11 | 12 | BAD_DIRECTORY = os.path.join(TEMPORARY_RESPONSES_ROOT, 'tmp.json') 13 | BAD_FILE = os.path.join(TEMPORARY_RESPONSES_ROOT, 'tmp') 14 | 15 | 16 | class TestFileCassetteLibrary(TestCase): 17 | 18 | @mock.patch.object(FileCassetteLibrary, "load_file") 19 | def test_lazy_load(self, mock_load): 20 | """Verify that the file is lazily loaded.""" 21 | 22 | FileCassetteLibrary(TEMPORARY_RESPONSES_FILENAME, 'yaml') 23 | self.assertEqual(mock_load.called, False) 24 | 25 | 26 | class TestCassetteLibrary(TestCase): 27 | """Verify that CassetteLibrary creates the correct subclasses.""" 28 | 29 | def setUp(self): 30 | self.clean_up() 31 | 32 | def tearDown(self): 33 | self.clean_up() 34 | 35 | def clean_up(self): 36 | """Clean up bad temporary files and directories.""" 37 | if os.path.isdir(BAD_DIRECTORY): 38 | os.rmdir(BAD_DIRECTORY) 39 | elif os.path.isfile(BAD_FILE): 40 | os.remove(BAD_FILE) 41 | 42 | def create_bad_files(self): 43 | """Generate bad examples of a directory and file. 44 | 45 | This will generate a directory and file that will not be able to be used 46 | by CassetteLibrary. 47 | """ 48 | if not os.path.exists(BAD_DIRECTORY): 49 | os.mkdir(BAD_DIRECTORY) 50 | 51 | if not os.path.exists(BAD_FILE): 52 | with open(BAD_FILE, 'w') as f: 53 | f.write('') 54 | 55 | def test_create_new_cassette_library_with_extension(self): 56 | """Verify correct encoder is attached to a file CassetteLibrary.""" 57 | filename = os.path.join(TEMPORARY_RESPONSES_ROOT, 'tmp.json') 58 | lib = CassetteLibrary.create_new_cassette_library(filename, '') 59 | self.assertTrue(isinstance(lib, FileCassetteLibrary)) 60 | self.assertTrue(isinstance(lib.encoder, JsonEncoder)) 61 | 62 | filename = os.path.join(TEMPORARY_RESPONSES_ROOT, 'tmp.yaml') 63 | lib = CassetteLibrary.create_new_cassette_library(filename, '') 64 | self.assertTrue(isinstance(lib, FileCassetteLibrary)) 65 | self.assertTrue(isinstance(lib.encoder, YamlEncoder)) 66 | 67 | def test_create_new_cassette_library_with_directory(self): 68 | """Verify correct encoder is attached to a directory CassetteLibrary.""" 69 | filename = os.path.join(TEMPORARY_RESPONSES_ROOT, 'tmp') 70 | lib = CassetteLibrary.create_new_cassette_library(filename, '') 71 | self.assertTrue(isinstance(lib, DirectoryCassetteLibrary)) 72 | self.assertTrue(isinstance(lib.encoder, JsonEncoder)) 73 | 74 | def test_create_new_cassette_library_with_extension_and_file_type(self): 75 | """Verify correct encoder is attached with encoder override. 76 | 77 | Specifying file format takes precedent over the file extension. 78 | """ 79 | # Manual enforcement of encoding overrides file type 80 | filename = os.path.join(TEMPORARY_RESPONSES_ROOT, 'tmp.json') 81 | lib = CassetteLibrary.create_new_cassette_library(filename, 'yaml') 82 | self.assertTrue(isinstance(lib, FileCassetteLibrary)) 83 | self.assertTrue(isinstance(lib.encoder, YamlEncoder)) 84 | 85 | # Manual enforcement of encoding overrides file type 86 | filename = os.path.join(TEMPORARY_RESPONSES_ROOT, 'tmp.yaml') 87 | lib = CassetteLibrary.create_new_cassette_library(filename, 'json') 88 | self.assertTrue(isinstance(lib, FileCassetteLibrary)) 89 | self.assertTrue(isinstance(lib.encoder, JsonEncoder)) 90 | 91 | def test_create_new_cassette_library_errors(self): 92 | """Verify correct errors are raised.""" 93 | self.create_bad_files() 94 | 95 | # Check to see that proper error handling is occuring for malformed file 96 | with self.assertRaises(IOError): 97 | CassetteLibrary.create_new_cassette_library(BAD_FILE, '') 98 | 99 | with self.assertRaises(IOError): 100 | CassetteLibrary.create_new_cassette_library(BAD_DIRECTORY, '') 101 | 102 | # Check to see if unsupported encoding raises error 103 | with self.assertRaises(KeyError): 104 | CassetteLibrary.create_new_cassette_library(BAD_DIRECTORY, 'derp') 105 | -------------------------------------------------------------------------------- /cassette/tests/test_cassette_performance.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | import urllib2 4 | from datetime import datetime, timedelta 5 | from unittest import skip 6 | 7 | import cassette 8 | from cassette.tests.base import TestCase 9 | 10 | TEST_URL = "http://127.0.0.1:5000/non-ascii-content" 11 | CASSETTE_FILE = './cassette/tests/data/performance.tmp' 12 | CASSETTE_DIRECTORY = './cassette/tests/data/performancedir/' 13 | 14 | 15 | @skip('Skipping performance tests') 16 | class TestCassettePerformanceSingleFile(TestCase): 17 | """Benchmark performance of a single file cassette.""" 18 | 19 | def setUp(self): 20 | self.filename = CASSETTE_FILE 21 | 22 | if os.path.exists(self.filename): 23 | os.remove(self.filename) 24 | 25 | def tearDown(self): 26 | # Tear down for every test case 27 | if os.path.exists(self.filename): 28 | os.remove(self.filename) 29 | 30 | def generate_large_cassette_yaml(self): 31 | """Generate a large set of responses and store in YAML.""" 32 | # Record every next request 33 | cassette.insert(self.filename) 34 | 35 | # Create 100 requests to load in 36 | for i in range(0, 100): 37 | url = '%s?%s' % (TEST_URL, i) 38 | urllib2.urlopen(url).read() 39 | 40 | # Write out to files 41 | cassette.eject() 42 | 43 | def generate_large_cassette_json(self): 44 | """Generate a large set of responses and store in JSON.""" 45 | # Record every next request 46 | cassette.insert(self.filename, file_format='json') 47 | 48 | # Create 100 requests to load in 49 | for i in range(0, 100): 50 | url = '%s?%s' % (TEST_URL, i) 51 | urllib2.urlopen(url).read() 52 | 53 | # Write out to files 54 | cassette.eject() 55 | 56 | def test_generate_speed_yaml(self): 57 | """Verify YAML generation of large response set takes under 2 secs.""" 58 | # Record how long it takes for the generation to take place 59 | start_time = datetime.now() 60 | self.generate_large_cassette_yaml() 61 | stop_time = datetime.now() 62 | 63 | # Verify the file generates in under 2 seconds 64 | two_seconds = timedelta(seconds=2) 65 | self.assertLess(stop_time - start_time, two_seconds) 66 | 67 | def fetch_frequent_cassette(self): 68 | """Make repeated fetches of the same url with YAML file storage.""" 69 | # 100 times in a row 70 | for i in range(0, 100): 71 | # Open cassette 72 | cassette.insert(self.filename, file_format='yaml') 73 | 74 | # Make a few requests 75 | for j in range(0, 5): 76 | url = '%s?%s' % (TEST_URL, j) 77 | urllib2.urlopen(url).read() 78 | 79 | # Close cassette 80 | cassette.eject() 81 | 82 | def fetch_frequent_cassette_json(self): 83 | """Make repeated fetches of the same url in JSON file storage.""" 84 | # 100 times in a row 85 | for i in range(0, 100): 86 | # Open cassette 87 | cassette.insert(self.filename, file_format='json') 88 | 89 | # Make a few requests 90 | for j in range(0, 5): 91 | url = '%s?%s' % (TEST_URL, j) 92 | urllib2.urlopen(url).read() 93 | 94 | # Close cassette 95 | cassette.eject() 96 | 97 | def test_fetch_speed_yaml(self): 98 | """Verify fetching repeated files in YAML takes under 2 secs.""" 99 | # Guarantee there is a large cassette to test against 100 | if not os.path.exists(self.filename): 101 | self.generate_large_cassette_yaml() 102 | 103 | # Record how long it takes to fetch from a file frequently 104 | start_time = datetime.now() 105 | self.fetch_frequent_cassette() 106 | stop_time = datetime.now() 107 | 108 | # Verify the frequent fetches can run in under 2 seconds 109 | two_seconds = timedelta(seconds=2) 110 | self.assertLess(stop_time - start_time, two_seconds) 111 | 112 | def test_generate_speed_json(self): 113 | """Verify JSON generation of large response set takes under 2 secs.""" 114 | # Record how long it takes for the generation to take place 115 | start_time = datetime.now() 116 | self.generate_large_cassette_json() 117 | stop_time = datetime.now() 118 | 119 | # Verify the file generates in under 2 seconds 120 | two_seconds = timedelta(seconds=2) 121 | self.assertLess(stop_time - start_time, two_seconds) 122 | 123 | def test_fetch_speed_json(self): 124 | """Verify fetching repeated files in JSON takes under 2 secs.""" 125 | # Guarantee there is a large cassette to test against 126 | if not os.path.exists(self.filename): 127 | self.generate_large_cassette_json() 128 | 129 | # Record how long it takes to fetch from a file frequently 130 | start_time = datetime.now() 131 | self.fetch_frequent_cassette_json() 132 | stop_time = datetime.now() 133 | 134 | # Verify the frequent fetches can run in under 2 seconds 135 | two_seconds = timedelta(seconds=2) 136 | self.assertLess(stop_time - start_time, two_seconds) 137 | 138 | 139 | class TestCassettePerformanceDirectory(TestCassettePerformanceSingleFile): 140 | """Perform the same tests but with a cassette backed by a directory.""" 141 | 142 | def setUp(self): 143 | self.filename = CASSETTE_DIRECTORY 144 | 145 | if os.path.exists(self.filename) and os.path.isdir(self.filename): 146 | shutil.rmtree(self.filename) 147 | 148 | os.mkdir(self.filename) 149 | 150 | def tearDown(self): 151 | # Tear down for every test case 152 | if os.path.exists(self.filename) and os.path.isdir(self.filename): 153 | shutil.rmtree(self.filename) 154 | -------------------------------------------------------------------------------- /cassette/tests/test_cassette_utils.py: -------------------------------------------------------------------------------- 1 | from cassette.tests.base import TestCase 2 | from cassette.utils import Encoder, JsonEncoder, YamlEncoder 3 | 4 | TEST_DATA = { 5 | 'binary_data': '\x89\x70\x00', 6 | 'normal_str': 'ABCDEFG', 7 | 'some_list': [1, 2, 3, 4], 8 | 'deep_list': [ 9 | {'a': 3}, 10 | {'b': 4}, 11 | {'c': 6}, 12 | {'d': 7}, 13 | ] 14 | } 15 | 16 | 17 | class TestEncoder(TestCase): 18 | 19 | def test_implementation_errors(self): 20 | """Verify that we cannot call dump/load since they are yet not implemented.""" 21 | with self.assertRaises(NotImplementedError): 22 | Encoder().dump({}) 23 | 24 | with self.assertRaises(NotImplementedError): 25 | Encoder().load('') 26 | 27 | 28 | class CommonEncoderTest(object): 29 | """Common test cases shared by TestJsonEncoder and TestYamlEncoder.""" 30 | 31 | def test_dump_and_load(self): 32 | """Verify dump and load are working. Used for implemented encoders.""" 33 | data = self.encoder.load(self.encoder.dump(TEST_DATA)) 34 | 35 | for key in TEST_DATA.keys(): 36 | self.assertEqual(data[key], TEST_DATA[key]) 37 | 38 | 39 | class TestJsonEncoder(TestCase, CommonEncoderTest): 40 | """Verify that the JSON dump/load is working.""" 41 | 42 | def setUp(self): 43 | self.encoder = JsonEncoder() 44 | 45 | 46 | class TestYamlEncoder(TestCase, CommonEncoderTest): 47 | """Verify that the JSON dump/load is working.""" 48 | 49 | def setUp(self): 50 | self.encoder = YamlEncoder() 51 | -------------------------------------------------------------------------------- /cassette/tests/use_cases/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uber/cassette/475b02010f05a183d01abe8034fb320fd0e2f635/cassette/tests/use_cases/__init__.py -------------------------------------------------------------------------------- /cassette/tests/use_cases/test_report_unused.py: -------------------------------------------------------------------------------- 1 | import urllib2 2 | from cStringIO import StringIO 3 | 4 | from cassette.player import Player 5 | 6 | 7 | def test_report_unused(): 8 | config = {'log_cassette_used': True} 9 | player = Player('./cassette/tests/data/requests/', config=config) 10 | with player.play(): 11 | urllib2.urlopen('http://httpbin.org/get') 12 | 13 | content = StringIO() 14 | player.report_unused_cassettes(content) 15 | content.seek(0) 16 | expected = ('httplib_GET_httpbin.org_80__unused.json\n' 17 | 'httplib_GET_httpbin.org_80__unused2.json') 18 | assert content.read() == expected 19 | -------------------------------------------------------------------------------- /cassette/unpatched.py: -------------------------------------------------------------------------------- 1 | # This rather useless module is needed to prevent cyclical imports 2 | from __future__ import absolute_import 3 | 4 | import contextlib 5 | 6 | 7 | @contextlib.contextmanager 8 | def unpatched_httplib_context(cassette_library): 9 | """Create a context in which httplib is unpatched.""" 10 | 11 | from cassette.patcher import patch, unpatch 12 | 13 | unpatch() 14 | yield 15 | patch(cassette_library) 16 | -------------------------------------------------------------------------------- /cassette/utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | utils.py 3 | 4 | Helper functions. 5 | """ 6 | import json 7 | 8 | import yaml 9 | 10 | TEXT_ENCODING = 'ISO-8859-1' 11 | 12 | 13 | class Encoder(object): 14 | """Abstract class for an encoder consumed by cassette.""" 15 | 16 | # Used for matching filenames that correspond to the encoder 17 | file_ext = '.file' 18 | 19 | @staticmethod 20 | def is_supported_format(file_format): 21 | """Return whether the file format is supported. 22 | 23 | :param str file_format: 24 | """ 25 | return file_format in SUPPORTED_FORMATS.keys() or file_format == '' 26 | 27 | @staticmethod 28 | def get_encoder_from_file_format(file_format): 29 | """Return the correct encoder that corresponds to the file format. 30 | 31 | :param str file_format: 32 | """ 33 | return SUPPORTED_FORMATS.get( 34 | file_format.lower(), DEFAULT_COMPATIBLE_ENCODER) 35 | 36 | @staticmethod 37 | def get_encoder_from_extension(extension): 38 | """Return the correct encoder that corresponds to the file extension. 39 | 40 | :param str extension: 41 | """ 42 | if not extension: 43 | # It's a dir. 44 | return DEFAULT_ENCODER 45 | 46 | file_format = extension.replace('.', '') 47 | return Encoder.get_encoder_from_file_format(file_format) 48 | 49 | def dump(self, data): 50 | """Abstract method for dumping objects into an encoded form.""" 51 | raise NotImplementedError('Encoder not implemented.') 52 | 53 | def load(self, encoded_str): 54 | """Abstract method for dumping an encoded string into objects.""" 55 | raise NotImplementedError('Encoder not implemented.') 56 | 57 | 58 | class JsonEncoder(Encoder): 59 | """JSON encoder for storing HTTP responses in plain text.""" 60 | 61 | file_ext = '.json' 62 | 63 | def dump(self, data): 64 | """Return a YAML encoded string of the data.""" 65 | return json.dumps(data, indent=4, ensure_ascii=False) 66 | 67 | def load(self, encoded_str): 68 | """Return an object from the encoded JSON string.""" 69 | return json.loads(encoded_str, TEXT_ENCODING, 70 | object_hook=JsonEncoder.json_str_decode_dict) 71 | 72 | @staticmethod 73 | def json_str_decode_list(data): 74 | """Decode the list portion of a JSON blob as a string.""" 75 | 76 | rv = [] 77 | for item in data: 78 | if isinstance(item, unicode): 79 | item = item.encode(TEXT_ENCODING) 80 | elif isinstance(item, list): 81 | item = JsonEncoder.json_str_decode_list(item) 82 | elif isinstance(item, dict): 83 | item = JsonEncoder.json_str_decode_dict(item) 84 | rv.append(item) 85 | return rv 86 | 87 | @staticmethod 88 | def json_str_decode_dict(data): 89 | """Decode the dictionary portion of a JSON blob as a string. 90 | 91 | This helper function is necessary to decode the data as an ASCII string 92 | instead of a unicode string, which is required in order to be consumed 93 | by the mock HTTP response. 94 | """ 95 | # Original code is from stackoverflow: 96 | # http://stackoverflow.com/questions/956867/how-to-get-string-objects-i 97 | # nstead-of-unicode-ones-from-json-in-python 98 | 99 | rv = {} 100 | for key, value in data.iteritems(): 101 | if isinstance(key, unicode): 102 | key = key.encode(TEXT_ENCODING) 103 | if isinstance(value, unicode): 104 | value = value.encode(TEXT_ENCODING) 105 | elif isinstance(value, list): 106 | value = JsonEncoder.json_str_decode_list(value) 107 | elif isinstance(value, dict): 108 | value = JsonEncoder.json_str_decode_dict(value) 109 | rv[key] = value 110 | return rv 111 | 112 | 113 | class YamlEncoder(Encoder): 114 | """YAML encoder for storing HTTP responses in plain text.""" 115 | 116 | file_ext = '.yaml' 117 | 118 | def dump(self, data): 119 | """Return a YAML encoded string of the data.""" 120 | return yaml.dump(data) 121 | 122 | def load(self, encoded_str): 123 | """Return an object from the encoded JSON string.""" 124 | return yaml.load(encoded_str) 125 | 126 | 127 | SUPPORTED_FORMATS = { 128 | 'json': JsonEncoder(), 129 | 'yaml': YamlEncoder() 130 | } 131 | 132 | DEFAULT_COMPATIBLE_ENCODER = SUPPORTED_FORMATS['yaml'] 133 | DEFAULT_ENCODER = SUPPORTED_FORMATS['json'] 134 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Cassette.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Cassette.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/Cassette" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Cassette" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /docs/api.rst: -------------------------------------------------------------------------------- 1 | API 2 | === 3 | 4 | cassette module 5 | --------------- 6 | 7 | .. automodule:: cassette 8 | :members: 9 | 10 | cassette_library module 11 | ----------------------- 12 | 13 | .. automodule:: cassette.cassette_library 14 | :members: 15 | 16 | patcher module 17 | -------------- 18 | 19 | .. automodule:: cassette.patcher 20 | :members: 21 | 22 | unpatched module 23 | ---------------- 24 | 25 | .. automodule:: cassette.unpatched 26 | :members: 27 | -------------------------------------------------------------------------------- /docs/changelog.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../CHANGES.rst 2 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # flake8: noqa 3 | # 4 | # Cassette documentation build configuration file, created by 5 | # sphinx-quickstart on Thu Apr 11 10:52:01 2013. 6 | # 7 | # This file is execfile()d with the current directory set to its containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | import sys, os 16 | 17 | # If extensions (or modules to document with autodoc) are in another directory, 18 | # add these directories to sys.path here. If the directory is relative to the 19 | # documentation root, use os.path.abspath to make it absolute, like shown here. 20 | #sys.path.insert(0, os.path.abspath('.')) 21 | 22 | # -- General configuration ----------------------------------------------------- 23 | 24 | # If your documentation needs a minimal Sphinx version, state it here. 25 | #needs_sphinx = '1.0' 26 | 27 | # Add any Sphinx extension module names here, as strings. They can be extensions 28 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 29 | extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.viewcode'] 30 | 31 | # Add any paths that contain templates here, relative to this directory. 32 | templates_path = ['_templates'] 33 | 34 | # The suffix of source filenames. 35 | source_suffix = '.rst' 36 | 37 | # The encoding of source files. 38 | #source_encoding = 'utf-8-sig' 39 | 40 | # The master toctree document. 41 | master_doc = 'index' 42 | 43 | # General information about the project. 44 | project = u'Cassette' 45 | copyright = u'2013, Charles-Axel Dein' 46 | 47 | # The version info for the project you're documenting, acts as replacement for 48 | # |version| and |release|, also used in various other places throughout the 49 | # built documents. 50 | import pkg_resources 51 | try: 52 | release = pkg_resources.get_distribution('cassette').version 53 | except pkg_resources.DistributionNotFound: 54 | print "Distribution information not found. Run 'setup.py develop'" 55 | sys.exit(1) 56 | del pkg_resources 57 | version = '.'.join(release.split('.')[:2]) 58 | 59 | # The language for content autogenerated by Sphinx. Refer to documentation 60 | # for a list of supported languages. 61 | #language = None 62 | 63 | # There are two options for replacing |today|: either, you set today to some 64 | # non-false value, then it is used: 65 | #today = '' 66 | # Else, today_fmt is used as the format for a strftime call. 67 | #today_fmt = '%B %d, %Y' 68 | 69 | # List of patterns, relative to source directory, that match files and 70 | # directories to ignore when looking for source files. 71 | exclude_patterns = ['_build'] 72 | 73 | # The reST default role (used for this markup: `text`) to use for all documents. 74 | #default_role = None 75 | 76 | # If true, '()' will be appended to :func: etc. cross-reference text. 77 | #add_function_parentheses = True 78 | 79 | # If true, the current module name will be prepended to all description 80 | # unit titles (such as .. function::). 81 | #add_module_names = True 82 | 83 | # If true, sectionauthor and moduleauthor directives will be shown in the 84 | # output. They are ignored by default. 85 | #show_authors = False 86 | 87 | # The name of the Pygments (syntax highlighting) style to use. 88 | pygments_style = 'sphinx' 89 | 90 | # A list of ignored prefixes for module index sorting. 91 | #modindex_common_prefix = [] 92 | 93 | # If true, keep warnings as "system message" paragraphs in the built documents. 94 | #keep_warnings = False 95 | 96 | 97 | # -- Options for HTML output --------------------------------------------------- 98 | 99 | # The theme to use for HTML and HTML Help pages. See the documentation for 100 | # a list of builtin themes. 101 | html_theme = 'default' 102 | 103 | # Theme options are theme-specific and customize the look and feel of a theme 104 | # further. For a list of options available for each theme, see the 105 | # documentation. 106 | #html_theme_options = {} 107 | 108 | # Add any paths that contain custom themes here, relative to this directory. 109 | #html_theme_path = [] 110 | 111 | # The name for this set of Sphinx documents. If None, it defaults to 112 | # " v documentation". 113 | #html_title = None 114 | 115 | # A shorter title for the navigation bar. Default is the same as html_title. 116 | #html_short_title = None 117 | 118 | # The name of an image file (relative to this directory) to place at the top 119 | # of the sidebar. 120 | #html_logo = None 121 | 122 | # The name of an image file (within the static path) to use as favicon of the 123 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 124 | # pixels large. 125 | #html_favicon = None 126 | 127 | # Add any paths that contain custom static files (such as style sheets) here, 128 | # relative to this directory. They are copied after the builtin static files, 129 | # so a file named "default.css" will overwrite the builtin "default.css". 130 | html_static_path = ['_static'] 131 | 132 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 133 | # using the given strftime format. 134 | #html_last_updated_fmt = '%b %d, %Y' 135 | 136 | # If true, SmartyPants will be used to convert quotes and dashes to 137 | # typographically correct entities. 138 | #html_use_smartypants = True 139 | 140 | # Custom sidebar templates, maps document names to template names. 141 | #html_sidebars = {} 142 | 143 | # Additional templates that should be rendered to pages, maps page names to 144 | # template names. 145 | #html_additional_pages = {} 146 | 147 | # If false, no module index is generated. 148 | #html_domain_indices = True 149 | 150 | # If false, no index is generated. 151 | #html_use_index = True 152 | 153 | # If true, the index is split into individual pages for each letter. 154 | #html_split_index = False 155 | 156 | # If true, links to the reST sources are added to the pages. 157 | #html_show_sourcelink = True 158 | 159 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 160 | #html_show_sphinx = True 161 | 162 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 163 | #html_show_copyright = True 164 | 165 | # If true, an OpenSearch description file will be output, and all pages will 166 | # contain a tag referring to it. The value of this option must be the 167 | # base URL from which the finished HTML is served. 168 | #html_use_opensearch = '' 169 | 170 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 171 | #html_file_suffix = None 172 | 173 | # Output file base name for HTML help builder. 174 | htmlhelp_basename = 'Cassettedoc' 175 | 176 | 177 | # -- Options for LaTeX output -------------------------------------------------- 178 | 179 | latex_elements = { 180 | # The paper size ('letterpaper' or 'a4paper'). 181 | #'papersize': 'letterpaper', 182 | 183 | # The font size ('10pt', '11pt' or '12pt'). 184 | #'pointsize': '10pt', 185 | 186 | # Additional stuff for the LaTeX preamble. 187 | #'preamble': '', 188 | } 189 | 190 | # Grouping the document tree into LaTeX files. List of tuples 191 | # (source start file, target name, title, author, documentclass [howto/manual]). 192 | latex_documents = [ 193 | ('index', 'Cassette.tex', u'Cassette Documentation', 194 | u'Charles-Axel Dein', 'manual'), 195 | ] 196 | 197 | # The name of an image file (relative to this directory) to place at the top of 198 | # the title page. 199 | #latex_logo = None 200 | 201 | # For "manual" documents, if this is true, then toplevel headings are parts, 202 | # not chapters. 203 | #latex_use_parts = False 204 | 205 | # If true, show page references after internal links. 206 | #latex_show_pagerefs = False 207 | 208 | # If true, show URL addresses after external links. 209 | #latex_show_urls = False 210 | 211 | # Documents to append as an appendix to all manuals. 212 | #latex_appendices = [] 213 | 214 | # If false, no module index is generated. 215 | #latex_domain_indices = True 216 | 217 | 218 | # -- Options for manual page output -------------------------------------------- 219 | 220 | # One entry per manual page. List of tuples 221 | # (source start file, name, description, authors, manual section). 222 | man_pages = [ 223 | ('index', 'cassette', u'Cassette Documentation', 224 | [u'Charles-Axel Dein'], 1) 225 | ] 226 | 227 | # If true, show URL addresses after external links. 228 | #man_show_urls = False 229 | 230 | 231 | # -- Options for Texinfo output ------------------------------------------------ 232 | 233 | # Grouping the document tree into Texinfo files. List of tuples 234 | # (source start file, target name, title, author, 235 | # dir menu entry, description, category) 236 | texinfo_documents = [ 237 | ('index', 'Cassette', u'Cassette Documentation', 238 | u'Charles-Axel Dein', 'Cassette', 'One line description of project.', 239 | 'Miscellaneous'), 240 | ] 241 | 242 | # Documents to append as an appendix to all manuals. 243 | #texinfo_appendices = [] 244 | 245 | # If false, no module index is generated. 246 | #texinfo_domain_indices = True 247 | 248 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 249 | #texinfo_show_urls = 'footnote' 250 | 251 | # If true, do not generate a @detailmenu in the "Top" node's menu. 252 | #texinfo_no_detailmenu = False 253 | 254 | 255 | # -- Options for Epub output --------------------------------------------------- 256 | 257 | # Bibliographic Dublin Core info. 258 | epub_title = u'Cassette' 259 | epub_author = u'Charles-Axel Dein' 260 | epub_publisher = u'Charles-Axel Dein' 261 | epub_copyright = u'2013, Charles-Axel Dein' 262 | 263 | # The language of the text. It defaults to the language option 264 | # or en if the language is not set. 265 | #epub_language = '' 266 | 267 | # The scheme of the identifier. Typical schemes are ISBN or URL. 268 | #epub_scheme = '' 269 | 270 | # The unique identifier of the text. This can be a ISBN number 271 | # or the project homepage. 272 | #epub_identifier = '' 273 | 274 | # A unique identification for the text. 275 | #epub_uid = '' 276 | 277 | # A tuple containing the cover image and cover page html template filenames. 278 | #epub_cover = () 279 | 280 | # A sequence of (type, uri, title) tuples for the guide element of content.opf. 281 | #epub_guide = () 282 | 283 | # HTML files that should be inserted before the pages created by sphinx. 284 | # The format is a list of tuples containing the path and title. 285 | #epub_pre_files = [] 286 | 287 | # HTML files shat should be inserted after the pages created by sphinx. 288 | # The format is a list of tuples containing the path and title. 289 | #epub_post_files = [] 290 | 291 | # A list of files that should not be packed into the epub file. 292 | #epub_exclude_files = [] 293 | 294 | # The depth of the table of contents in toc.ncx. 295 | #epub_tocdepth = 3 296 | 297 | # Allow duplicate toc entries. 298 | #epub_tocdup = True 299 | 300 | # Fix unsupported image types using the PIL. 301 | #epub_fix_images = False 302 | 303 | # Scale large images. 304 | #epub_max_image_width = 0 305 | 306 | # If 'no', URL addresses will not be shown. 307 | #epub_show_urls = 'inline' 308 | 309 | # If false, no index is generated. 310 | #epub_use_index = True 311 | -------------------------------------------------------------------------------- /docs/development.rst: -------------------------------------------------------------------------------- 1 | Development 2 | =========== 3 | 4 | Running cassette tests 5 | ---------------------- 6 | 7 | The quick and dirty way to run tests is through setup.py: 8 | 9 | :: 10 | 11 | $ python setup.py test 12 | 13 | For more involved development, you can create a virtual environment, install 14 | fabric (``pip install fabric``) then install requirements: 15 | 16 | :: 17 | 18 | $ fab bootstrap 19 | 20 | Start the test server and run tests: 21 | 22 | :: 23 | 24 | $ fab test 25 | 26 | Tests spin up a test server to bounce requests off of, but you can run this 27 | server manually: 28 | 29 | :: 30 | 31 | $ fab serve_test_server 32 | 33 | You will see an error about the test server's address being in use, but this is 34 | harmless. 35 | -------------------------------------------------------------------------------- /docs/foreword.rst: -------------------------------------------------------------------------------- 1 | Foreword 2 | ======== 3 | 4 | Similar libraries 5 | ----------------- 6 | 7 | Cassette took a lot inspiration from the following packages: 8 | 9 | - `vcrpy `__: HTTP recording and playback library 10 | - `vcr `__: in Ruby 11 | 12 | Limitations 13 | ----------- 14 | 15 | Cassette should be considered **alpha**: 16 | 17 | - Only tested with ``urllib2`` and ``httplib`` 18 | - Does not work with ``urllib`` and ``requests`` 19 | - The format used is not compatible with ``vcr`` or ``vcrpy`` 20 | - Only tested with Python 2.7 21 | - File format **WILL** change. 22 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. Cassette documentation master file, created by 2 | sphinx-quickstart on Thu Apr 11 10:52:01 2013. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to Cassette's documentation! 7 | ==================================== 8 | 9 | Cassette stores and replays HTTP requests made in your Python app. 10 | 11 | Latest documentation: `cassette.readthedocs.org/en/latest/ `_ 12 | 13 | User's Guide 14 | ------------ 15 | 16 | .. toctree:: 17 | :maxdepth: 2 18 | 19 | foreword 20 | quickstart 21 | usage 22 | 23 | API Reference 24 | ------------- 25 | 26 | .. toctree:: 27 | :maxdepth: 2 28 | 29 | api 30 | 31 | Additional Notes 32 | ---------------- 33 | 34 | .. toctree:: 35 | :maxdepth: 2 36 | 37 | development 38 | changelog 39 | license 40 | 41 | 42 | Indices and tables 43 | ================== 44 | 45 | * :ref:`genindex` 46 | * :ref:`modindex` 47 | * :ref:`search` 48 | -------------------------------------------------------------------------------- /docs/license.rst: -------------------------------------------------------------------------------- 1 | License 2 | ======= 3 | 4 | Cassette is licensed under a three clause BSD License. 5 | 6 | The full license text can be found below (:ref:`cassette-license`). 7 | 8 | .. _authors: 9 | 10 | Authors 11 | ------- 12 | 13 | .. include:: ../AUTHORS.txt 14 | 15 | 16 | .. _cassette-license: 17 | 18 | Cassette License 19 | ---------------- 20 | 21 | .. include:: ../LICENSE.txt 22 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | set I18NSPHINXOPTS=%SPHINXOPTS% . 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. xml to make Docutils-native XML files 37 | echo. pseudoxml to make pseudoxml-XML files for display purposes 38 | echo. linkcheck to check all external links for integrity 39 | echo. doctest to run all doctests embedded in the documentation if enabled 40 | goto end 41 | ) 42 | 43 | if "%1" == "clean" ( 44 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 45 | del /q /s %BUILDDIR%\* 46 | goto end 47 | ) 48 | 49 | 50 | %SPHINXBUILD% 2> nul 51 | if errorlevel 9009 ( 52 | echo. 53 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 54 | echo.installed, then set the SPHINXBUILD environment variable to point 55 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 56 | echo.may add the Sphinx directory to PATH. 57 | echo. 58 | echo.If you don't have Sphinx installed, grab it from 59 | echo.http://sphinx-doc.org/ 60 | exit /b 1 61 | ) 62 | 63 | if "%1" == "html" ( 64 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 65 | if errorlevel 1 exit /b 1 66 | echo. 67 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 68 | goto end 69 | ) 70 | 71 | if "%1" == "dirhtml" ( 72 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 73 | if errorlevel 1 exit /b 1 74 | echo. 75 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 76 | goto end 77 | ) 78 | 79 | if "%1" == "singlehtml" ( 80 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 81 | if errorlevel 1 exit /b 1 82 | echo. 83 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 84 | goto end 85 | ) 86 | 87 | if "%1" == "pickle" ( 88 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 89 | if errorlevel 1 exit /b 1 90 | echo. 91 | echo.Build finished; now you can process the pickle files. 92 | goto end 93 | ) 94 | 95 | if "%1" == "json" ( 96 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 97 | if errorlevel 1 exit /b 1 98 | echo. 99 | echo.Build finished; now you can process the JSON files. 100 | goto end 101 | ) 102 | 103 | if "%1" == "htmlhelp" ( 104 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 105 | if errorlevel 1 exit /b 1 106 | echo. 107 | echo.Build finished; now you can run HTML Help Workshop with the ^ 108 | .hhp project file in %BUILDDIR%/htmlhelp. 109 | goto end 110 | ) 111 | 112 | if "%1" == "qthelp" ( 113 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 114 | if errorlevel 1 exit /b 1 115 | echo. 116 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 117 | .qhcp project file in %BUILDDIR%/qthelp, like this: 118 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Cassette.qhcp 119 | echo.To view the help file: 120 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Cassette.ghc 121 | goto end 122 | ) 123 | 124 | if "%1" == "devhelp" ( 125 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished. 129 | goto end 130 | ) 131 | 132 | if "%1" == "epub" ( 133 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 134 | if errorlevel 1 exit /b 1 135 | echo. 136 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 137 | goto end 138 | ) 139 | 140 | if "%1" == "latex" ( 141 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 142 | if errorlevel 1 exit /b 1 143 | echo. 144 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 145 | goto end 146 | ) 147 | 148 | if "%1" == "latexpdf" ( 149 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 150 | cd %BUILDDIR%/latex 151 | make all-pdf 152 | cd %BUILDDIR%/.. 153 | echo. 154 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 155 | goto end 156 | ) 157 | 158 | if "%1" == "latexpdfja" ( 159 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 160 | cd %BUILDDIR%/latex 161 | make all-pdf-ja 162 | cd %BUILDDIR%/.. 163 | echo. 164 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 165 | goto end 166 | ) 167 | 168 | if "%1" == "text" ( 169 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 170 | if errorlevel 1 exit /b 1 171 | echo. 172 | echo.Build finished. The text files are in %BUILDDIR%/text. 173 | goto end 174 | ) 175 | 176 | if "%1" == "man" ( 177 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 178 | if errorlevel 1 exit /b 1 179 | echo. 180 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 181 | goto end 182 | ) 183 | 184 | if "%1" == "texinfo" ( 185 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 186 | if errorlevel 1 exit /b 1 187 | echo. 188 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 189 | goto end 190 | ) 191 | 192 | if "%1" == "gettext" ( 193 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 194 | if errorlevel 1 exit /b 1 195 | echo. 196 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 197 | goto end 198 | ) 199 | 200 | if "%1" == "changes" ( 201 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 202 | if errorlevel 1 exit /b 1 203 | echo. 204 | echo.The overview file is in %BUILDDIR%/changes. 205 | goto end 206 | ) 207 | 208 | if "%1" == "linkcheck" ( 209 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 210 | if errorlevel 1 exit /b 1 211 | echo. 212 | echo.Link check complete; look for any errors in the above output ^ 213 | or in %BUILDDIR%/linkcheck/output.txt. 214 | goto end 215 | ) 216 | 217 | if "%1" == "doctest" ( 218 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 219 | if errorlevel 1 exit /b 1 220 | echo. 221 | echo.Testing of doctests in the sources finished, look at the ^ 222 | results in %BUILDDIR%/doctest/output.txt. 223 | goto end 224 | ) 225 | 226 | if "%1" == "xml" ( 227 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 228 | if errorlevel 1 exit /b 1 229 | echo. 230 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 231 | goto end 232 | ) 233 | 234 | if "%1" == "pseudoxml" ( 235 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 236 | if errorlevel 1 exit /b 1 237 | echo. 238 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 239 | goto end 240 | ) 241 | 242 | :end 243 | -------------------------------------------------------------------------------- /docs/quickstart.rst: -------------------------------------------------------------------------------- 1 | Quickstart 2 | ========== 3 | 4 | * The first time you run your tests, ``cassette`` will store all the 5 | external requests response in a YAML file. 6 | * Next time you run your tests, ``cassette`` will fetch those responses 7 | from the YAML file. You can now run your tests while being offline. 8 | 9 | .. code:: python 10 | 11 | import urllib2 12 | 13 | import cassette 14 | 15 | with cassette.play("data/responses.yaml"): 16 | 17 | # If the request is not already stored in responses.yaml, cassette 18 | # will request the URL and store its response in the file. 19 | r = urllib2.urlopen("http://www.internic.net/domain/named.root") 20 | 21 | # This time, the request response must be in the file. The external 22 | # request is not made. cassette retrieves the response from the 23 | # file. 24 | r = urllib2.urlopen("http://www.internic.net/domain/named.root") 25 | 26 | assert "A.ROOT-SERVERS.NET" in r.read(10000) 27 | -------------------------------------------------------------------------------- /docs/usage.rst: -------------------------------------------------------------------------------- 1 | Usage 2 | ===== 3 | 4 | cassette provides a ``play`` context: 5 | 6 | .. code:: python 7 | 8 | import cassette 9 | 10 | with cassette.play("./data/responses.yaml"): 11 | urllib2.urlopen("http://...") 12 | 13 | You can also setup the context manually: 14 | 15 | .. code:: python 16 | 17 | import cassette 18 | 19 | cassette.insert("./data/responses.yaml") 20 | urllib2.urlopen("http://...") 21 | cassette.eject() 22 | 23 | Storage backend 24 | --------------- 25 | 26 | .. versionadded:: 0.3.1 27 | Ability to read from a directory. 28 | 29 | .. versionadded:: 0.3.1 30 | Ability to read from JSON files. 31 | 32 | cassette supports multiple storage backend: 33 | 34 | * File based (all the requests and responses are in the same file) 35 | * Directory based (each request/response is in a single file) 36 | 37 | Two formats are supported, JSON (faster) and YAML. 38 | 39 | To read from a directory, just provide the path: 40 | 41 | .. code:: python 42 | 43 | cassette.insert("./data/", file_format="json") 44 | 45 | Report which cassettes are not used 46 | ----------------------------------- 47 | 48 | .. versionadded:: 0.3.7 49 | Ability to report which cassettes are not used. 50 | 51 | Here's a way to do it: 52 | 53 | .. literalinclude:: ../cassette/tests/use_cases/test_report_unused.py 54 | 55 | Here's who you would use teardown with pytest to log those unused cassettes 56 | to a file: 57 | 58 | .. code:: python 59 | 60 | @pytest.fixture(scope="session", autouse=True) 61 | def report_unused_cassette(request): 62 | """Report unused cassettes.""" 63 | def func(): 64 | with open('.unused_cassette.log', 'w') as f: 65 | cassette_player.report_unused_cassettes(f) 66 | request.addfinalizer(func) 67 | 68 | You would then ``cd`` to the directory containing the fixtures, and run:: 69 | 70 | $ xargs rm < ../../../../.unused_cassette.log 71 | -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | mock==1.0.1 2 | flake8==2.0 3 | Flask==0.9 4 | sphinx==1.2b1 5 | zest.releaser>=3.52 6 | requests>=2.3.0 7 | 8 | # Test runner 9 | pytest==2.6.4 10 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | PyYAML>=3.10 2 | semver>=2.0 3 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [pytest] 2 | addopts = --tb=native --ignore=env --ignore=build --doctest-modules --ignore=runtests.py --ignore=setup.py 3 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from setuptools import setup, find_packages 3 | 4 | 5 | def read_long_description(filename="README.rst"): 6 | with open(filename) as f: 7 | return f.read().strip() 8 | 9 | 10 | def read_requirements(filename="requirements.txt"): 11 | with open(filename) as f: 12 | return f.readlines() 13 | 14 | setup( 15 | name="cassette", 16 | version='0.3.8', 17 | author="Charles-Axel Dein", 18 | author_email="charles@uber.com", 19 | url="http://cassette.readthedocs.org/", 20 | license="MIT", 21 | packages=find_packages(), 22 | keywords=["http", "tests", "mock"], 23 | description="Cassette stores and replays HTTP requests.", 24 | long_description=read_long_description(), 25 | install_requires=read_requirements(), 26 | tests_require=[ 27 | 'pytest', 28 | 'mock', 29 | 'flask', 30 | ], 31 | zip_safe=False, 32 | classifiers=[ 33 | "Development Status :: 3 - Alpha", 34 | "Environment :: Web Environment", 35 | "Programming Language :: Python", 36 | "Programming Language :: Python :: 2.7", 37 | "Intended Audience :: Developers", 38 | "License :: OSI Approved :: MIT License", 39 | "Operating System :: OS Independent", 40 | "Topic :: Internet :: WWW/HTTP", 41 | "Topic :: Software Development :: Testing", 42 | "Topic :: Software Development :: Libraries :: Python Modules" 43 | ] 44 | ) 45 | --------------------------------------------------------------------------------