├── paisley ├── test │ ├── __init__.py │ ├── test.changes │ ├── test_pjson.py │ ├── test_mapping.py │ ├── test_views.py │ ├── util.py │ ├── test.ini.template │ ├── test_changes.py │ └── test_client.py ├── __init__.py ├── views.py ├── pjson.py ├── changes.py ├── client.py └── mapping.py ├── .gitignore ├── Makefile ├── setup.py ├── README.md └── scripts ├── paisley_bench.py ├── show-coverage.py ├── paisley_example.py └── pep8.py /paisley/test/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | build 3 | _trial_temp 4 | PKG-INFO -------------------------------------------------------------------------------- /paisley/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- Mode: Python; test-case-name: paisley.test -*- 2 | # vi:si:et:sw=4:sts=4:ts=4 3 | 4 | # Copyright (c) 2007-2008 5 | # See LICENSE for details. 6 | 7 | from client import CouchDB 8 | -------------------------------------------------------------------------------- /paisley/test/test.changes: -------------------------------------------------------------------------------- 1 | {"seq":3934,"id":"cc4fadc922f11ffb5e358d5da2760de2","changes":[{"rev":"1-1e379f46917bc2fc9b9562a58afde75a"}]} 2 | {"changes": [{"rev": "12-7bfdb7016aa8aa0dd0279d3324b524d1"}],"id": "_design/couchdb", "seq": 5823} 3 | {"last_seq":3934} 4 | {"deleted": true,"changes": [{"rev": "2-5e8bd6dae4307ca6f8fcf8afa53e6bc4"}],"id": "27e74762ad0e64d4094f6feea800a826", "seq": 34} 5 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | trial: 2 | PYTHONPATH=`pwd` trial paisley.test 3 | 4 | coverage: paisley 5 | PYTHONPATH=`pwd` trial --coverage paisley.test 6 | find _trial_temp/coverage -name 'paisley.*.cover' | grep -v paisley.test | grep -v paisley.mapping | xargs scripts/show-coverage.py 7 | 8 | clean: 9 | rm paisley/*.pyc paisley/test/*.pyc 10 | 11 | pep8: 12 | find paisley -name '*.py' | grep -v paisley/mapping.py | xargs scripts/pep8.py --repeat 13 | 14 | check: trial pep8 15 | 16 | testkill: 17 | ps auxw | grep beam | grep paisley | cut -c10-16 | xargs kill 18 | 19 | trialall: 20 | for couchdb in ~/prefix/couchdb/*; do echo $$couchdb; PATH=$$couchdb:$$PATH trial paisley; done 21 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- Mode: Python -*- 3 | # vi:si:et:sw=4:sts=4:ts=4 4 | 5 | # Copyright (c) 2007-2008 6 | # See LICENSE for details. 7 | 8 | from distutils.core import setup 9 | import setuptools 10 | 11 | 12 | def main(): 13 | setup( 14 | name="paisley", 15 | version="0.3.1", 16 | description=("Paisley is a CouchDB client written in Python to be used " 17 | "within a Twisted application."), 18 | author="Paisley Developers", 19 | author_email="", 20 | license="MIT", 21 | url="http://github.com/smcq/paisley", 22 | download_url="http://github.com/smcq/paisley/zipball/v0.3.1", 23 | packages = setuptools.find_packages (), 24 | package_data = { 25 | '': ['*.template'], 26 | }) 27 | 28 | if __name__ == "__main__": 29 | main() 30 | -------------------------------------------------------------------------------- /paisley/test/test_pjson.py: -------------------------------------------------------------------------------- 1 | # -*- Mode: Python; test-case-name: paisley.test.test_pjson -*- 2 | # vi:si:et:sw=4:sts=4:ts=4 3 | 4 | # Copyright (c) 2011 5 | # See LICENSE for details. 6 | 7 | """ 8 | Test for Paisley JSON code. 9 | """ 10 | 11 | from paisley import pjson as json 12 | 13 | # uncomment to run test with non-strict JSON parsing 14 | # json.set_strict(False) 15 | 16 | from twisted.trial import unittest 17 | 18 | 19 | class JSONTestCase(unittest.TestCase): 20 | 21 | def testStrict(self): 22 | self.assertEquals(json.STRICT, True) 23 | 24 | def testStrToUnicode(self): 25 | u = json.loads('"str"') 26 | self.assertEquals(u, u'str') 27 | self.assertEquals(type(u), unicode) 28 | 29 | def testUnicodeToUnicode(self): 30 | u = json.loads(u'"str"') 31 | self.assertEquals(u, u'str') 32 | self.assertEquals(type(u), unicode) 33 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Implements the CouchDB API for twisted. 2 | 3 | ## Release Notes v0.3 4 | 5 | _Enhancements_ 6 | 7 | * Added support for CouchDB 1.1.0. 8 | * Added test cases against real CouchDB instance. 9 | * Upgraded client interface to reflect CouchDB API changes (count -> limit). The changes occurred in CouchDB 0.8 -> 0.9. 10 | * Tests will run even if the admin party is closed in the system-wide couchdb configuration 11 | * Added CouchDB authentication support (supply username and password args when instantiating) 12 | * Re-factored underlying HTTP transport to use Twisted Web Agent (will allow connection pooling in the future). 13 | * Re-factored code into formal Python package. API compatible with old package layout. 14 | 15 | _Bug Fixes_ 16 | * Fixed attachment handling so it returns the attachment instead of the owning document (dwb) 17 | 18 | ## Release Notes (0.1->0.2) 19 | 20 | * Updated CouchDB support up to version CouchDB 1.0.1 21 | 22 | 23 | ## Known issues 24 | 25 | * Doesn't keep connections alive between requests. 26 | 27 | ## Notes 28 | 29 | This isn't under heavy maintenance by me, I only use a subset of the functionality and wrap the rest away in a non-portable internal library. Please fork and make it better. 30 | 31 | 32 | For David's initial repo, see https://launchpad.net/paisley. David has asked me to make github the official repo since we're actively keeping it up with CouchDB version bumps. 33 | 34 | testing buildbot integration 35 | -------------------------------------------------------------------------------- /paisley/views.py: -------------------------------------------------------------------------------- 1 | # -*- Mode: Python; test-case-name: paisley.test.test_views -*- 2 | # vi:si:et:sw=4:sts=4:ts=4 3 | 4 | # Copyright (c) 2007-2008 5 | # See LICENSE for details. 6 | 7 | """ 8 | Object mapping view API. 9 | """ 10 | 11 | 12 | class View(object): 13 | 14 | def __init__(self, couch, dbName, docId, viewId, objectFactory, **options): 15 | """ 16 | objectFactory should implement fromDict, taking a dictionary containing 17 | key and value. 18 | """ 19 | self._couch = couch 20 | self._dbName = dbName 21 | self._docId = docId 22 | self._viewId = viewId 23 | self._objectFactory = objectFactory 24 | self._options = options 25 | 26 | def __repr__(self): 27 | return "" % ( 28 | self._dbName, self._docId, self._viewId) 29 | 30 | def _mapObjects(self, result, **options): 31 | # result is a dict: 32 | # rows -> dict with id, key, value, [doc?] 33 | # total_rows 34 | # offset 35 | for x in result['rows']: 36 | obj = self._objectFactory() 37 | if options.get('include_docs', False): 38 | obj.fromDict(x['doc']) 39 | self._couch.mapped(self._dbName, x['id'], obj) 40 | else: 41 | obj.fromDict(x) 42 | yield obj 43 | 44 | # how do we know if it is bound already ? 45 | 46 | def queryView(self): 47 | d = self._couch.openView( 48 | self._dbName, 49 | self._docId, 50 | self._viewId, 51 | **self._options) 52 | d.addCallback(self._mapObjects, **self._options) 53 | return d 54 | -------------------------------------------------------------------------------- /paisley/test/test_mapping.py: -------------------------------------------------------------------------------- 1 | # -*- Mode: Python; test-case-name: paisley.test.test_mapping -*- 2 | # vi:si:et:sw=4:sts=4:ts=4 3 | 4 | # Copyright (c) 2008 5 | # See LICENSE for details. 6 | 7 | """ 8 | Tests for the object mapping API. 9 | """ 10 | 11 | from twisted.trial.unittest import TestCase 12 | from paisley import mapping, views 13 | from test_views import StubCouch 14 | 15 | # an object for a view result that includes docs 16 | 17 | 18 | class Tag(mapping.Document): 19 | name = mapping.TextField() 20 | count = mapping.IntegerField() 21 | 22 | def fromDict(self, dictionary): 23 | self._data = dictionary['doc'] 24 | 25 | 26 | class MappingTests(TestCase): 27 | 28 | def setUp(self): 29 | # this StubCouch is different than in test_views; it replies to 30 | # include_docs=true, hence it has an additional key/value pair 31 | # for doc from which the object can be mapped 32 | self.fc = StubCouch(views={'all_tags?include_docs=true': { 33 | 'total_rows': 3, 34 | 'offset': 0, 35 | 'rows': [ 36 | {'key':'foo', 'value':3, 'doc': {'name':'foo', 'count':3}}, 37 | {'key':'bar', 'value':2, 'doc': {'name':'foo', 'count':3}}, 38 | {'key':'baz', 'value':1, 'doc': {'name':'foo', 'count':3}}, 39 | ]}}) 40 | 41 | def test_queryView(self): 42 | """ 43 | Test that a querying a view gives us an iterable of our user defined 44 | objects. 45 | """ 46 | v = views.View(self.fc, None, None, 'all_tags?include_docs=true', Tag) 47 | 48 | def _checkResults(results): 49 | results = list(results) 50 | self.assertEquals(len(results), 3) 51 | 52 | # this used to be not executed because it worked on the empty 53 | # generator; so guard against that 54 | looped = False 55 | for tag in results: 56 | looped = True 57 | self.assertIn(tag.name, ['foo', 'bar', 'baz']) 58 | self.failUnless(looped) 59 | 60 | d = v.queryView() 61 | d.addCallback(_checkResults) 62 | return d 63 | -------------------------------------------------------------------------------- /scripts/paisley_bench.py: -------------------------------------------------------------------------------- 1 | # -*- Mode: Python -*- 2 | # vi:si:et:sw=4:sts=4:ts=4 3 | 4 | # Copyright (c) 2007-2008 5 | # See LICENSE for details. 6 | 7 | 8 | import time 9 | import sys 10 | import numpy 11 | 12 | import paisley 13 | 14 | from twisted.internet import reactor 15 | from twisted.internet.defer import inlineCallbacks, waitForDeferred 16 | 17 | def benchmark(times, timer=time.time, timeStore=None, progressDest=sys.stdout): 18 | def _decorator(f): 19 | def _decorated(*args, **kwargs): 20 | for x in xrange(times): 21 | startTime=timer() 22 | result = yield f(*args, **kwargs) 23 | timeStore.setdefault(f.__name__, []).append(timer()-startTime) 24 | 25 | if x%(times*.10) == 0.0: 26 | progressDest.write('.') 27 | progressDest.flush() 28 | progressDest.write('\n') 29 | 30 | _decorated.__name__ = f.__name__ 31 | 32 | return inlineCallbacks(_decorated) 33 | 34 | return _decorator 35 | 36 | RUN_TIMES = 1000 37 | TIMES = {} 38 | 39 | benchmarkDecorator = benchmark(RUN_TIMES, timeStore=TIMES) 40 | 41 | 42 | @benchmarkDecorator 43 | def bench_saveDoc(server): 44 | d = server.saveDoc('benchmarks', """ 45 | { 46 | "Subject":"I like Planktion", 47 | "Author":"Rusty", 48 | "PostedDate":"2006-08-15T17:30:12-04:00", 49 | "Tags":["plankton", "baseball", "decisions"], 50 | "Body":"I decided today that I don't like baseball. I like plankton." 51 | } 52 | """) 53 | return d 54 | 55 | 56 | @inlineCallbacks 57 | def run_tests(server): 58 | for bench in [bench_saveDoc]: 59 | print "benchmarking %s" % (bench.__name__,) 60 | result = yield bench(server).addCallback(_printCb) 61 | print " avg: %r" % ( 62 | sum(TIMES[bench.__name__])/len(TIMES[bench.__name__]),) 63 | print " std: %r" % ( 64 | numpy.std(TIMES[bench.__name__]),) 65 | print " min: %r" % ( 66 | min(TIMES[bench.__name__]),) 67 | print " max: %r" % ( 68 | max(TIMES[bench.__name__]),) 69 | print " total: %r" % ( 70 | sum(TIMES[bench.__name__]),) 71 | 72 | 73 | def run(): 74 | s = paisley.CouchDB('localhost') 75 | d = s.createDB('benchmarks') 76 | d.addBoth(_printCb) 77 | d.addCallback(lambda _: run_tests(s)) 78 | 79 | return d 80 | 81 | 82 | def _printCb(msg): 83 | if msg is not None: 84 | print msg 85 | 86 | 87 | if __name__ == '__main__': 88 | def _run(): 89 | d = run() 90 | d.addBoth(_printCb) 91 | d.addBoth(lambda _: reactor.stop()) 92 | 93 | reactor.callWhenRunning(_run) 94 | reactor.run() 95 | -------------------------------------------------------------------------------- /paisley/pjson.py: -------------------------------------------------------------------------------- 1 | # -*- Mode: Python; test-case-name: paisley.test.test_pjson -*- 2 | # vi:si:et:sw=4:sts=4:ts=4 3 | 4 | # Copyright (c) 2011 5 | # See LICENSE for details. 6 | 7 | """ 8 | Paisley JSON compatibility code. 9 | 10 | json is the stdlib JSON library. 11 | It has an unfortunate bug in 2.7: http://bugs.python.org/issue10038 12 | where the C-based implementation returns str instead of unicode for text. 13 | 14 | This caused 15 | import json; json.loads('"abc"') 16 | to return 17 | "abc" 18 | instead of 19 | u"abc" 20 | when using the C implementation, but not the python implementation. 21 | 22 | If json is not installed, this falls back to simplejson, which is 23 | also not strict and will return str instead of unicode. 24 | 25 | In that case, STRICT will be set to True. 26 | """ 27 | 28 | STRICT = True 29 | 30 | 31 | def set_strict(strict=True): 32 | """ 33 | Set strictness of the loads function. 34 | Can be called after importing to change strictness level. 35 | 36 | Recommended to use only at startup. 37 | """ 38 | global loads 39 | loads = _get_loads(strict) 40 | global STRICT 41 | STRICT = strict 42 | 43 | 44 | def _get_loads(strict=STRICT): 45 | if not strict: 46 | try: 47 | from simplejson import loads 48 | except ImportError: 49 | from json import loads 50 | return loads 51 | 52 | # If we don't have json, we can only fall back to simplejson, non-strict 53 | try: 54 | from json import decoder 55 | except ImportError: 56 | global STRICT 57 | STRICT = False 58 | from simplejson import loads 59 | return loads 60 | try: 61 | res = decoder.c_scanstring('"str"', 1) 62 | except TypeError: 63 | # github issue #33: pypy may not have c_scanstring 64 | res = decoder.scanstring('"str"', 1) 65 | 66 | if type(res[0]) is unicode: 67 | from json import loads 68 | return loads 69 | 70 | import json as _myjson 71 | from json import scanner 72 | 73 | class MyJSONDecoder(_myjson.JSONDecoder): 74 | 75 | def __init__(self, *args, **kwargs): 76 | _myjson.JSONDecoder.__init__(self, *args, **kwargs) 77 | 78 | # reset scanner to python-based one using python scanstring 79 | self.parse_string = decoder.py_scanstring 80 | self.scan_once = scanner.py_make_scanner(self) 81 | 82 | def loads(s, *args, **kwargs): 83 | if 'cls' not in kwargs: 84 | kwargs['cls'] = MyJSONDecoder 85 | return _myjson.loads(s, *args, **kwargs) 86 | 87 | return loads 88 | 89 | 90 | def _get_dumps(strict=STRICT): 91 | if not strict: 92 | try: 93 | from simplejson import dumps 94 | except ImportError: 95 | from json import dumps 96 | return dumps 97 | 98 | 99 | try: 100 | from json import dumps 101 | return dumps 102 | except ImportError: 103 | global STRICT 104 | STRICT = False 105 | from simplejson import dumps 106 | return dumps 107 | 108 | dumps = _get_dumps() 109 | loads = _get_loads() 110 | -------------------------------------------------------------------------------- /paisley/test/test_views.py: -------------------------------------------------------------------------------- 1 | # -*- Mode: Python; test-case-name: paisley.test.test_views -*- 2 | # vi:si:et:sw=4:sts=4:ts=4 3 | 4 | # Copyright (c) 2008 5 | # See LICENSE for details. 6 | 7 | """ 8 | Tests for the object mapping view API. 9 | """ 10 | 11 | from twisted.trial.unittest import TestCase 12 | from twisted.internet.defer import succeed 13 | 14 | from paisley.test import util 15 | 16 | from paisley.views import View 17 | 18 | 19 | class StubCouch(object): 20 | """ 21 | A stub couchdb object that will return preset dictionaries 22 | """ 23 | 24 | def __init__(self, views=None): 25 | self._views = views 26 | 27 | def openView(self, dbName, docId, viewId, **kwargs): 28 | return succeed(self._views[viewId]) 29 | 30 | # an object for a view result not including docs 31 | 32 | 33 | class Tag(object): 34 | 35 | def fromDict(self, dictionary): 36 | self.name = dictionary['key'] 37 | self.count = dictionary['value'] 38 | 39 | ROWS = [ 40 | {'key':'foo', 'value':3}, 41 | {'key':'bar', 'value':2}, 42 | {'key':'baz', 'value':1}, 43 | ] 44 | 45 | 46 | class CommonTestCase: 47 | """ 48 | These tests are executed both against the stub couch and the real couch. 49 | """ 50 | 51 | def test_queryView(self): 52 | """ 53 | Test that querying a view gives us an iterable of our user defined 54 | objects. 55 | """ 56 | v = View(self.db, 'test', 'design_doc', 'all_tags', Tag) 57 | 58 | def _checkResults(results): 59 | results = list(results) 60 | self.assertEquals(len(results), 3) 61 | 62 | # this used to be not executed because it worked on the empty 63 | # generator; so guard against that 64 | looped = False 65 | for tag in results: 66 | looped = True 67 | self.assertIn({'key': tag.name, 'value': tag.count}, 68 | ROWS) 69 | self.failUnless(looped) 70 | 71 | d = v.queryView() 72 | d.addCallback(_checkResults) 73 | return d 74 | 75 | 76 | class StubViewTests(CommonTestCase, TestCase): 77 | 78 | def setUp(self): 79 | self.db = StubCouch(views={'all_tags': { 80 | 'total_rows': 3, 81 | 'offset': 0, 82 | 'rows': ROWS, 83 | }}) 84 | 85 | 86 | class RealViewTests(CommonTestCase, util.CouchDBTestCase): 87 | 88 | def setUp(self): 89 | util.CouchDBTestCase.setUp(self) 90 | 91 | d = self.db.createDB('test') 92 | 93 | for row in ROWS: 94 | d.addCallback(lambda _, r: self.db.saveDoc('test', r), row) 95 | 96 | 97 | viewmapjs = """ 98 | function(doc) { 99 | emit(doc.key, doc.value); 100 | } 101 | """ 102 | 103 | d.addCallback(lambda _: self.db.saveDoc('test', 104 | { 105 | 'views': { 106 | "all_tags": { 107 | "map": viewmapjs, 108 | }, 109 | }, 110 | }, 111 | '_design/design_doc')) 112 | 113 | return d 114 | -------------------------------------------------------------------------------- /scripts/show-coverage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import os 4 | import re 5 | import sys 6 | 7 | 8 | class Presentation: 9 | 10 | def __init__(self, name, lines, covered): 11 | self.name = name 12 | self.lines = lines 13 | self.covered = covered 14 | 15 | if self.covered == 0: 16 | self.percent = 0 17 | else: 18 | self.percent = 100 * self.covered / float(self.lines) 19 | 20 | def show(self, maxlen=20): 21 | format = '%%-%ds %%3d %%%% (%%4d / %%4d)' % maxlen 22 | print format % (self.name, self.percent, self.covered, self.lines) 23 | 24 | 25 | class Coverage: 26 | 27 | def __init__(self): 28 | self.files = [] 29 | self.total_lines = 0 30 | self.total_covered = 0 31 | 32 | # The python Trace class prints coverage results by prefixing 33 | # lines that got executed with a couple of spaces, the number 34 | # of times it has been executed and a colon. Uncovered lines 35 | # get prefixed with six angle brackets. Lines like comments 36 | # and blank lines just get indented. 37 | # This regexp will match executed and executable-but-not-covered lines. 38 | self.codeline_matcher = re.compile(r'^(>>>>>>)|(\s*\d+:)') 39 | 40 | def _strip_filename(self, filename): 41 | filename = os.path.basename(filename) 42 | if filename.endswith('.cover'): 43 | filename = filename[:-6] 44 | return filename 45 | 46 | def add_file(self, file): 47 | self.files.append(file) 48 | 49 | def show_results(self): 50 | if not hasattr(self, 'files'): 51 | print 'No coverage data' 52 | return 53 | 54 | self.maxlen = max(map(lambda f: len(self._strip_filename(f)), 55 | self.files)) 56 | print 'Coverage report:' 57 | print '-' * (self.maxlen + 23) 58 | for file in self.files: 59 | self.show_one(file) 60 | print '-' * (self.maxlen + 23) 61 | 62 | p = Presentation('Total', self.total_lines, self.total_covered) 63 | p.show(self.maxlen) 64 | 65 | def show_one(self, filename): 66 | f = open(filename) 67 | # Grab all executables lines 68 | lines = [line for line in f.readlines() 69 | if self.codeline_matcher.match(line)] 70 | 71 | # Find out which of them were not executed 72 | uncovered_lines = [line for line in lines 73 | if line.startswith('>>>>>>')] 74 | if not lines: 75 | return 76 | 77 | filename = self._strip_filename(filename) 78 | 79 | p = Presentation(filename, 80 | len(lines), 81 | len(lines) - len(uncovered_lines)) 82 | p.show(self.maxlen) 83 | 84 | self.total_lines += p.lines 85 | self.total_covered += p.covered 86 | 87 | 88 | def main(args): 89 | c = Coverage() 90 | files = args[1:] 91 | files.sort() 92 | for file in files: 93 | if 'flumotion.test' in file: 94 | continue 95 | if '__init__' in file: 96 | continue 97 | c.add_file(file) 98 | 99 | c.show_results() 100 | 101 | if __name__ == '__main__': 102 | sys.exit(main(sys.argv)) 103 | -------------------------------------------------------------------------------- /scripts/paisley_example.py: -------------------------------------------------------------------------------- 1 | # -*- Mode: Python -*- 2 | # vi:si:et:sw=4:sts=4:ts=4 3 | 4 | # Copyright (c) 2007-2008 5 | # See LICENSE for details. 6 | 7 | 8 | import paisley 9 | 10 | import sys 11 | from twisted.internet import reactor, defer 12 | from twisted.python import log 13 | from twisted.web import client, error, http 14 | 15 | client.HTTPClientFactory.noisy = False 16 | 17 | 18 | 19 | def test(): 20 | foo = paisley.CouchDB('localhost') 21 | 22 | print "\nCreate database 'mydb':" 23 | d = foo.createDB('mydb') 24 | wfd = defer.waitForDeferred(d) 25 | yield wfd 26 | 27 | try: 28 | print wfd.getResult() 29 | except error.Error, e: 30 | # FIXME: not sure why Error.status is a str compared to http constants 31 | if e.status == str(http.UNAUTHORIZED): 32 | print "\nError: not allowed to create databases" 33 | reactor.stop() 34 | return 35 | else: 36 | raise 37 | 38 | print "\nList databases on server:" 39 | d = foo.listDB() 40 | wfd = defer.waitForDeferred(d) 41 | yield wfd 42 | print wfd.getResult() 43 | 44 | print "\nCreate a document 'mydoc' in database 'mydb':" 45 | doc = """ 46 | { 47 | "value": 48 | { 49 | "Subject":"I like Planktion", 50 | "Author":"Rusty", 51 | "PostedDate":"2006-08-15T17:30:12-04:00", 52 | "Tags":["plankton", "baseball", "decisions"], 53 | "Body":"I decided today that I don't like baseball. I like plankton." 54 | } 55 | } 56 | """ 57 | d = foo.saveDoc('mydb', doc, 'mydoc') 58 | wfd = defer.waitForDeferred(d) 59 | yield wfd 60 | mydoc = wfd.getResult() 61 | print mydoc 62 | 63 | print "\nCreate a document, using an assigned docId:" 64 | d = foo.saveDoc('mydb', doc) 65 | wfd = defer.waitForDeferred(d) 66 | yield wfd 67 | print wfd.getResult() 68 | 69 | print "\nList all documents in database 'mydb'" 70 | d = foo.listDoc('mydb') 71 | wfd = defer.waitForDeferred(d) 72 | yield wfd 73 | print wfd.getResult() 74 | 75 | print "\nRetrieve document 'mydoc' in database 'mydb':" 76 | d = foo.openDoc('mydb', 'mydoc') 77 | wfd = defer.waitForDeferred(d) 78 | yield wfd 79 | print wfd.getResult() 80 | 81 | print "\nDelete document 'mydoc' in database 'mydb':" 82 | d = foo.deleteDoc('mydb', 'mydoc', mydoc['rev']) 83 | wfd = defer.waitForDeferred(d) 84 | yield wfd 85 | print wfd.getResult() 86 | 87 | print "\nList all documents in database 'mydb'" 88 | d = foo.listDoc('mydb') 89 | wfd = defer.waitForDeferred(d) 90 | yield wfd 91 | print wfd.getResult() 92 | 93 | print "\nList info about database 'mydb':" 94 | d = foo.infoDB('mydb') 95 | wfd = defer.waitForDeferred(d) 96 | yield wfd 97 | print wfd.getResult() 98 | 99 | print "\nDelete database 'mydb':" 100 | d = foo.deleteDB('mydb') 101 | wfd = defer.waitForDeferred(d) 102 | yield wfd 103 | print wfd.getResult() 104 | 105 | print "\nList databases on server:" 106 | d = foo.listDB() 107 | wfd = defer.waitForDeferred(d) 108 | yield wfd 109 | print wfd.getResult() 110 | 111 | reactor.stop() 112 | test = defer.deferredGenerator(test) 113 | 114 | 115 | if __name__ == "__main__": 116 | log.startLogging(sys.stdout) 117 | reactor.callWhenRunning(test) 118 | reactor.run() 119 | -------------------------------------------------------------------------------- /paisley/test/util.py: -------------------------------------------------------------------------------- 1 | # -*- Mode: Python -*- 2 | # vi:si:et:sw=4:sts=4:ts=4 3 | 4 | # Copyright (c) 2007-2008 5 | # See LICENSE for details. 6 | 7 | import re 8 | import os 9 | import tempfile 10 | import subprocess 11 | import time 12 | 13 | from twisted.trial import unittest 14 | 15 | from paisley import client 16 | 17 | 18 | class CouchDBWrapper(object): 19 | """ 20 | I wrap an external CouchDB instance started and stopped for testing. 21 | 22 | @ivar tempdir: the temporary directory used for logging and running 23 | @ivar process: the CouchDB process 24 | @type process: L{subprocess.Popen} 25 | @ivar port: the randomly assigned port on which CouchDB listens 26 | @type port: str 27 | @ivar db: the CouchDB client to this server 28 | @type db: L{client.CouchDB} 29 | """ 30 | 31 | def start(self): 32 | self.tempdir = tempfile.mkdtemp(suffix='.paisley.test') 33 | 34 | path = os.path.join(os.path.dirname(__file__), 35 | 'test.ini.template') 36 | handle = open(path) 37 | 38 | conf = handle.read() % { 39 | 'tempdir': self.tempdir, 40 | } 41 | 42 | confPath = os.path.join(self.tempdir, 'test.ini') 43 | handle = open(confPath, 'w') 44 | handle.write(conf) 45 | handle.close() 46 | 47 | # create the dirs from the template 48 | os.mkdir(os.path.join(self.tempdir, 'lib')) 49 | os.mkdir(os.path.join(self.tempdir, 'log')) 50 | 51 | args = ['couchdb', '-a', confPath] 52 | null = open('/dev/null', 'w') 53 | self.process = subprocess.Popen( 54 | args, env=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 55 | 56 | # find port 57 | logPath = os.path.join(self.tempdir, 'log', 'couch.log') 58 | while not os.path.exists(logPath): 59 | if self.process.poll() is not None: 60 | raise Exception(""" 61 | couchdb exited with code %d. 62 | stdout: 63 | %s 64 | stderr: 65 | %s""" % ( 66 | self.process.returncode, self.process.stdout.read(), 67 | self.process.stderr.read())) 68 | time.sleep(0.01) 69 | 70 | while os.stat(logPath).st_size == 0: 71 | time.sleep(0.01) 72 | 73 | PORT_RE = re.compile( 74 | 'Apache CouchDB has started on http://127.0.0.1:(?P\d+)') 75 | 76 | handle = open(logPath) 77 | line = handle.read() 78 | m = PORT_RE.search(line) 79 | if not m: 80 | self.stop() 81 | raise Exception("Cannot find port in line %s" % line) 82 | 83 | self.port = int(m.group('port')) 84 | self.db = client.CouchDB(host='localhost', port=self.port, 85 | username='testpaisley', password='testpaisley') 86 | 87 | def stop(self): 88 | self.process.terminate() 89 | 90 | os.system("rm -rf %s" % self.tempdir) 91 | 92 | 93 | class CouchDBTestCase(unittest.TestCase): 94 | """ 95 | I am a TestCase base class for tests against a real CouchDB server. 96 | I start a server during setup and stop it during teardown. 97 | 98 | @ivar db: the CouchDB client 99 | @type db: L{client.CouchDB} 100 | """ 101 | 102 | def setUp(self): 103 | self.wrapper = CouchDBWrapper() 104 | self.wrapper.start() 105 | self.db = self.wrapper.db 106 | 107 | def tearDown(self): 108 | self.wrapper.stop() 109 | 110 | # helper callbacks 111 | 112 | def checkDatabaseEmpty(self, result): 113 | self.assertEquals(result['rows'], []) 114 | self.assertEquals(result['total_rows'], 0) 115 | self.assertEquals(result['offset'], 0) 116 | 117 | def checkInfoNewDatabase(self, result): 118 | self.assertEquals(result['update_seq'], 0) 119 | self.assertEquals(result['purge_seq'], 0) 120 | self.assertEquals(result['doc_count'], 0) 121 | self.assertEquals(result['db_name'], 'test') 122 | self.assertEquals(result['doc_del_count'], 0) 123 | self.assertEquals(result['committed_update_seq'], 0) 124 | 125 | def checkResultOk(self, result): 126 | self.assertEquals(result, {'ok': True}) 127 | 128 | def checkResultEmptyView(self, result): 129 | self.assertEquals(result['rows'], []) 130 | self.assertEquals(result['total_rows'], 0) 131 | self.assertEquals(result['offset'], 0) 132 | 133 | 134 | def eight_bit_test_string(): 135 | return ''.join(chr(cn) for cn in xrange(0x100)) * 2 136 | -------------------------------------------------------------------------------- /paisley/changes.py: -------------------------------------------------------------------------------- 1 | # -*- Mode: Python; test-case-name: paisley.test.test_changes -*- 2 | # vi:si:et:sw=4:sts=4:ts=4 3 | 4 | # Copyright (c) 2011 5 | # See LICENSE for details. 6 | 7 | from urllib import urlencode 8 | 9 | from twisted.internet import error, defer 10 | from twisted.protocols import basic 11 | 12 | from paisley.client import json 13 | 14 | 15 | class ChangeReceiver(basic.LineReceiver): 16 | # figured out by checking the last two characters on actually received 17 | # lines 18 | delimiter = '\n' 19 | 20 | def __init__(self, notifier): 21 | self._notifier = notifier 22 | 23 | def lineReceived(self, line): 24 | if not line: 25 | return 26 | 27 | change = json.loads(line) 28 | 29 | if not 'id' in change: 30 | return 31 | 32 | self._notifier.changed(change) 33 | 34 | def connectionLost(self, reason): 35 | self._notifier.connectionLost(reason) 36 | 37 | 38 | class ChangeListener: 39 | """ 40 | I am an interface for receiving changes from a L{ChangeNotifier}. 41 | """ 42 | 43 | def changed(self, change): 44 | """ 45 | @type change: dict of str -> str 46 | 47 | The given change was received. 48 | Only changes that contain an id get received. 49 | 50 | A change is a dictionary with: 51 | - id: document id 52 | - seq: sequence number of change 53 | - changes: list of dict containing document revisions 54 | - deleted (optional) 55 | """ 56 | pass 57 | 58 | def connectionLost(self, reason): 59 | """ 60 | @type reason: L{twisted.python.failure.Failure} 61 | """ 62 | pass 63 | 64 | 65 | class ChangeNotifier(object): 66 | 67 | def __init__(self, db, dbName, since=None): 68 | self._db = db 69 | self._dbName = dbName 70 | 71 | self._caches = [] 72 | self._listeners = [] 73 | self._prot = None 74 | 75 | self._since = since 76 | 77 | self._running = False 78 | 79 | def addCache(self, cache): 80 | self._caches.append(cache) 81 | 82 | def addListener(self, listener): 83 | self._listeners.append(listener) 84 | 85 | def isRunning(self): 86 | return self._running 87 | 88 | def start(self, **kwargs): 89 | """ 90 | Start listening and notifying of changes. 91 | Separated from __init__ so you can add caches and listeners. 92 | 93 | By default, I will start listening from the most recent change. 94 | """ 95 | assert 'feed' not in kwargs, \ 96 | "ChangeNotifier always listens continuously." 97 | 98 | d = defer.succeed(None) 99 | 100 | def setSince(info): 101 | self._since = info['update_seq'] 102 | 103 | if self._since is None: 104 | d.addCallback(lambda _: self._db.infoDB(self._dbName)) 105 | d.addCallback(setSince) 106 | 107 | def requestChanges(): 108 | kwargs['feed'] = 'continuous' 109 | kwargs['since'] = self._since 110 | # FIXME: str should probably be unicode, as dbName can be 111 | url = str(self._db.url_template % 112 | '/%s/_changes?%s' % (self._dbName, urlencode(kwargs))) 113 | return self._db.client.request('GET', url) 114 | d.addCallback(lambda _: requestChanges()) 115 | 116 | def requestCb(response): 117 | self._prot = ChangeReceiver(self) 118 | response.deliverBody(self._prot) 119 | self._running = True 120 | d.addCallback(requestCb) 121 | 122 | def returnCb(_): 123 | return self._since 124 | d.addCallback(returnCb) 125 | return d 126 | 127 | def stop(self): 128 | # FIXME: this should produce a clean stop, but it does not. 129 | # From http://twistedmatrix.com/documents/current/web/howto/client.html 130 | # "If it is decided that the rest of the response body is not desired, 131 | # stopProducing can be used to stop delivery permanently; after this, 132 | # the protocol's connectionLost method will be called." 133 | self._running = False 134 | self._prot.stopProducing() 135 | 136 | # called by receiver 137 | 138 | def changed(self, change): 139 | seq = change.get('seq', None) 140 | if seq: 141 | self._since = seq 142 | 143 | for cache in self._caches: 144 | cache.delete(change['id']) 145 | 146 | for listener in self._listeners: 147 | listener.changed(change) 148 | 149 | def connectionLost(self, reason): 150 | # even if we asked to stop, we still get 151 | # a twisted.web._newclient.ResponseFailed containing 152 | # twisted.internet.error.ConnectionDone 153 | # and 154 | # twisted.web.http._DataLoss 155 | # If we actually asked to stop, just pass through only ConnectionDone 156 | 157 | # FIXME: poking at internals to get failures ? Yuck! 158 | from twisted.web import _newclient 159 | if reason.check(_newclient.ResponseFailed): 160 | if reason.value.reasons[0].check(error.ConnectionDone) and \ 161 | not self.isRunning(): 162 | reason = reason.value.reasons[0] 163 | 164 | self._prot = None 165 | self._running = False 166 | for listener in self._listeners: 167 | listener.connectionLost(reason) 168 | -------------------------------------------------------------------------------- /paisley/test/test.ini.template: -------------------------------------------------------------------------------- 1 | ; This file is copied from the one generated by 'make dev' in couchdb 2 | ; and adapted to template some directories couchdb needs to write to 3 | 4 | 5 | [couchdb] 6 | database_dir = %(tempdir)s/lib 7 | view_index_dir = %(tempdir)s/lib 8 | ; util_driver_dir = %(tempdir)s/couchdb/priv/.libs 9 | max_document_size = 4294967296 ; 4 GB 10 | max_attachment_chunk_size = 4294967296 ; 4GB 11 | os_process_timeout = 5000 ; 5 seconds. for view and external servers. 12 | max_dbs_open = 100 13 | delayed_commits = true ; set this to false to ensure an fsync before 201 Created is returned 14 | uri_file = %(tempdir)s/lib/couch.uri 15 | 16 | [httpd] 17 | port = 0 18 | bind_address = 127.0.0.1 19 | max_connections = 2048 20 | authentication_handlers = {couch_httpd_oauth, oauth_authentication_handler}, {couch_httpd_auth, cookie_authentication_handler}, {couch_httpd_auth, default_authentication_handler} 21 | default_handler = {couch_httpd_db, handle_request} 22 | secure_rewrites = true 23 | vhost_global_handlers = _utils, _uuids, _session, _oauth, _users 24 | allow_jsonp = false 25 | 26 | [log] 27 | file = %(tempdir)s/log/couch.log 28 | level = info 29 | include_sasl = true 30 | 31 | [couch_httpd_auth] 32 | authentication_db = _users 33 | authentication_redirect = /_utils/session.html 34 | require_valid_user = false 35 | timeout = 600 ; number of seconds before automatic logout 36 | auth_cache_size = 50 ; size is number of cache entries 37 | 38 | [query_servers] 39 | ; javascript = bindir/couchjs_dev datadir/server/main.js 40 | 41 | ; Changing reduce_limit to false will disable reduce_limit. 42 | ; If you think you're hitting reduce_limit with a "good" reduce function, 43 | ; please let us know on the mailing list so we can fine tune the heuristic. 44 | [query_server_config] 45 | reduce_limit = true 46 | 47 | ; enable external as an httpd handler, then link it with commands here. 48 | ; note, this api is still under consideration. 49 | ; [external] 50 | ; mykey = /path/to/mycommand 51 | 52 | [daemons] 53 | view_manager={couch_view, start_link, []} 54 | external_manager={couch_external_manager, start_link, []} 55 | db_update_notifier={couch_db_update_notifier_sup, start_link, []} 56 | query_servers={couch_query_servers, start_link, []} 57 | httpd={couch_httpd, start_link, []} 58 | stats_aggregator={couch_stats_aggregator, start, []} 59 | stats_collector={couch_stats_collector, start, []} 60 | uuids={couch_uuids, start, []} 61 | auth_cache={couch_auth_cache, start_link, []} 62 | 63 | [httpd_global_handlers] 64 | / = {couch_httpd_misc_handlers, handle_welcome_req, <<"Welcome">>} 65 | ; favicon.ico = {couch_httpd_misc_handlers, handle_favicon_req, "datadir/www"} 66 | 67 | ; _utils = {couch_httpd_misc_handlers, handle_utils_dir_req, "datadir/www"} 68 | _all_dbs = {couch_httpd_misc_handlers, handle_all_dbs_req} 69 | _active_tasks = {couch_httpd_misc_handlers, handle_task_status_req} 70 | _config = {couch_httpd_misc_handlers, handle_config_req} 71 | _replicate = {couch_httpd_misc_handlers, handle_replicate_req} 72 | _uuids = {couch_httpd_misc_handlers, handle_uuids_req} 73 | _restart = {couch_httpd_misc_handlers, handle_restart_req} 74 | _stats = {couch_httpd_stats_handlers, handle_stats_req} 75 | _log = {couch_httpd_misc_handlers, handle_log_req} 76 | _session = {couch_httpd_auth, handle_session_req} 77 | _oauth = {couch_httpd_oauth, handle_oauth_req} 78 | 79 | [httpd_db_handlers] 80 | _view_cleanup = {couch_httpd_db, handle_view_cleanup_req} 81 | _compact = {couch_httpd_db, handle_compact_req} 82 | _design = {couch_httpd_db, handle_design_req} 83 | _temp_view = {couch_httpd_view, handle_temp_view_req} 84 | _changes = {couch_httpd_db, handle_changes_req} 85 | 86 | ; The external module takes an optional argument allowing you to narrow it to a 87 | ; single script. Otherwise the script name is inferred from the first path section 88 | ; after _external's own path. 89 | ; _mypath = {couch_httpd_external, handle_external_req, <<"mykey">>} 90 | ; _external = {couch_httpd_external, handle_external_req} 91 | 92 | [httpd_design_handlers] 93 | _view = {couch_httpd_view, handle_view_req} 94 | _show = {couch_httpd_show, handle_doc_show_req} 95 | _list = {couch_httpd_show, handle_view_list_req} 96 | _info = {couch_httpd_db, handle_design_info_req} 97 | _rewrite = {couch_httpd_rewrite, handle_rewrite_req} 98 | _update = {couch_httpd_show, handle_doc_update_req} 99 | 100 | [uuids] 101 | ; Known algorithms: 102 | ; random - 128 bits of random awesome 103 | ; All awesome, all the time. 104 | ; sequential - monotonically increasing ids with random increments 105 | ; First 26 hex characters are random. Last 6 increment in 106 | ; random amounts until an overflow occurs. On overflow, the 107 | ; random prefix is regenerated and the process starts over. 108 | ; utc_random - Time since Jan 1, 1970 UTC with microseconds 109 | ; First 14 characters are the time in hex. Last 18 are random. 110 | algorithm = sequential 111 | 112 | [stats] 113 | ; rate is in milliseconds 114 | rate = 1000 115 | ; sample intervals are in seconds 116 | samples = [0, 60, 300, 900] 117 | 118 | [attachments] 119 | compression_level = 8 ; from 1 (lowest, fastest) to 9 (highest, slowest), 0 to disable compression 120 | compressible_types = text/*, application/javascript, application/json, application/xml 121 | 122 | [replicator] 123 | max_http_sessions = 20 124 | max_http_pipeline_size = 50 125 | ; set to true to validate peer certificates 126 | verify_ssl_certificates = false 127 | ; file containing a list of peer trusted certificates (PEM format) 128 | ; ssl_trusted_certificates_file = /etc/ssl/certs/ca-certificates.crt 129 | ; maximum peer certificate depth (must be set even if certificate validation is off) 130 | ssl_certificate_max_depth = 3 131 | 132 | [admins] 133 | testpaisley = -hashed-2f07ef57064262c2fe4540e4a099278b62182141,0b816cd603a91a48cc6634e062e9d50f 134 | -------------------------------------------------------------------------------- /paisley/test/test_changes.py: -------------------------------------------------------------------------------- 1 | # -*- Mode: Python; test-case-name: paisley.test.test_changes -*- 2 | # vi:si:et:sw=4:sts=4:ts=4 3 | 4 | # Copyright (c) 2011 5 | # See LICENSE for details. 6 | 7 | import os 8 | 9 | from twisted.internet import defer, reactor, error 10 | from twisted.trial import unittest 11 | 12 | from paisley import client, changes 13 | 14 | from paisley.test import util 15 | 16 | 17 | class FakeNotifier(object): 18 | 19 | def __init__(self): 20 | self.changes = [] 21 | 22 | def changed(self, change): 23 | self.changes.append(change) 24 | 25 | 26 | class TestStubChangeReceiver(unittest.TestCase): 27 | 28 | def testChanges(self): 29 | notifier = FakeNotifier() 30 | receiver = changes.ChangeReceiver(notifier) 31 | 32 | # ChangeNotifier test lines 33 | path = os.path.join(os.path.dirname(__file__), 34 | 'test.changes') 35 | handle = open(path) 36 | text = handle.read() 37 | 38 | for line in text.split("\n"): 39 | receiver.lineReceived(line) 40 | 41 | self.assertEquals(len(notifier.changes), 3) 42 | self.assertEquals(notifier.changes[0]["seq"], 3934) 43 | self.assertEquals(notifier.changes[2]["deleted"], True) 44 | 45 | 46 | class BaseTestCase(util.CouchDBTestCase): 47 | tearing = False # set to True during teardown so we can assert 48 | expect_tearing = False 49 | 50 | def setUp(self): 51 | util.CouchDBTestCase.setUp(self) 52 | 53 | def tearDown(self): 54 | self.tearing = True 55 | util.CouchDBTestCase.tearDown(self) 56 | 57 | def waitForNextCycle(self): 58 | # Wait for the reactor to cycle. 59 | # Useful after telling the notifier to stop, since the actual 60 | # shutdown is triggered on one of the next cycles 61 | # 0 is not enough though 62 | d = defer.Deferred() 63 | reactor.callLater(0.01, d.callback, None) 64 | return d 65 | 66 | 67 | class ChangeReceiverTestCase(BaseTestCase, changes.ChangeListener): 68 | 69 | lastChange = None 70 | _deferred = None 71 | 72 | ### ChangeListener interface 73 | 74 | def changed(self, change): 75 | self.lastChange = change 76 | if self._deferred is not None: 77 | # reset self._deferred before callback because this can be 78 | # called recursively 79 | d = self._deferred 80 | self._deferred = None 81 | d.callback(change) 82 | 83 | def connectionLost(self, reason): 84 | # make sure we lost the connection cleanly 85 | self.failIf(self.tearing, 86 | 'connectionLost should be called before teardown ' 87 | 'through notifier.stop') 88 | self.failUnless(reason.check(error.ConnectionDone)) 89 | 90 | ### method for subclasses 91 | 92 | def waitForChange(self): 93 | self._deferred = defer.Deferred() 94 | return self._deferred 95 | 96 | 97 | class ListenerChangeReceiverTestCase(ChangeReceiverTestCase): 98 | 99 | def setUp(self): 100 | ChangeReceiverTestCase.setUp(self) 101 | 102 | return self.db.createDB('test') 103 | 104 | def testChanges(self): 105 | notifier = changes.ChangeNotifier(self.db, 'test') 106 | notifier.addListener(self) 107 | 108 | 109 | d = notifier.start() 110 | 111 | def create(_): 112 | changeD = self.waitForChange() 113 | 114 | saveD = self.db.saveDoc('test', {'key': 'value'}) 115 | saveD.addCallback(lambda r: setattr(self, 'firstid', r['id'])) 116 | 117 | dl = defer.DeferredList([saveD, changeD]) 118 | 119 | def check(_): 120 | c = self.lastChange 121 | self.assertEquals(c['id'], self.firstid) 122 | self.assertEquals(len(c['changes']), 1) 123 | self.assertEquals(c['changes'][0]['rev'][:2], '1-') 124 | dl.addCallback(check) 125 | 126 | dl.addCallback(lambda _: self.db.openDoc('test', self.firstid)) 127 | dl.addCallback(lambda r: setattr(self, 'first', r)) 128 | 129 | return dl 130 | d.addCallback(create) 131 | 132 | def update(_): 133 | changeD = self.waitForChange() 134 | 135 | self.first['key'] = 'othervalue' 136 | saveD = self.db.saveDoc('test', self.first, docId=self.firstid) 137 | 138 | dl = defer.DeferredList([saveD, changeD]) 139 | 140 | def check(_): 141 | c = self.lastChange 142 | self.assertEquals(c['id'], self.firstid) 143 | self.assertEquals(len(c['changes']), 1) 144 | self.assertEquals(c['changes'][0]['rev'][:2], '2-') 145 | dl.addCallback(check) 146 | 147 | return dl 148 | d.addCallback(update) 149 | 150 | d.addCallback(lambda _: notifier.stop()) 151 | d.addCallback(lambda _: self.waitForNextCycle()) 152 | return d 153 | 154 | def testChangesFiltered(self): 155 | """ 156 | This tests that we can use a filter to only receive notifications 157 | for documents that interest us. 158 | """ 159 | notifier = changes.ChangeNotifier(self.db, 'test') 160 | notifier.addListener(self) 161 | 162 | d = defer.Deferred() 163 | 164 | filterjs = """ 165 | function(doc, req) { 166 | log(req.query); 167 | var docids = eval('(' + req.query.docids + ')'); 168 | log(docids); 169 | if (docids.indexOf(doc._id) > -1) { 170 | return true; 171 | } else { 172 | return false; 173 | } 174 | } 175 | """ 176 | 177 | d.addCallback(lambda _: self.db.saveDoc('test', 178 | { 179 | 'filters': { 180 | "test": filterjs, 181 | }, 182 | }, 183 | '_design/design_doc')) 184 | 185 | 186 | d.addCallback(lambda _: notifier.start( 187 | filter='design_doc/test', 188 | docids=client.json.dumps(['one', ]))) 189 | 190 | def create(_): 191 | changeD = self.waitForChange() 192 | 193 | saveD = self.db.saveDoc('test', {'key': 'value'}, docId='one') 194 | saveD.addCallback(lambda r: setattr(self, 'firstid', r['id'])) 195 | 196 | dl = defer.DeferredList([saveD, changeD]) 197 | 198 | def check(_): 199 | c = self.lastChange 200 | self.assertEquals(c['id'], self.firstid) 201 | self.assertEquals(len(c['changes']), 1) 202 | self.assertEquals(c['changes'][0]['rev'][:2], '1-') 203 | self.assertEquals(c['seq'], 2) 204 | dl.addCallback(check) 205 | 206 | dl.addCallback(lambda _: self.db.openDoc('test', self.firstid)) 207 | dl.addCallback(lambda r: setattr(self, 'first', r)) 208 | 209 | return dl 210 | d.addCallback(create) 211 | 212 | def update(_): 213 | changeD = self.waitForChange() 214 | 215 | self.first['key'] = 'othervalue' 216 | saveD = self.db.saveDoc('test', self.first, docId=self.firstid) 217 | saveD.addCallback(lambda r: setattr(self, 'firstrev', r['rev'])) 218 | 219 | dl = defer.DeferredList([saveD, changeD]) 220 | 221 | def check(_): 222 | c = self.lastChange 223 | self.assertEquals(c['id'], self.firstid) 224 | self.assertEquals(len(c['changes']), 1) 225 | self.assertEquals(c['changes'][0]['rev'][:2], '2-') 226 | self.assertEquals(c['seq'], 3) 227 | dl.addCallback(check) 228 | 229 | return dl 230 | d.addCallback(update) 231 | 232 | def createTwoAndUpdateOne(_): 233 | # since createTwo is not supposed to trigger a change, we can't 234 | # assert that it didn't until we make another change that is 235 | # detected. 236 | changeD = self.waitForChange() 237 | 238 | saveD = self.db.saveDoc('test', {'key': 'othervalue'}, docId='two') 239 | 240 | def update(_): 241 | self.first['key'] = 'thirdvalue' 242 | self.first['_rev'] = self.firstrev 243 | return self.db.saveDoc('test', self.first, docId=self.firstid) 244 | saveD.addCallback(update) 245 | 246 | dl = defer.DeferredList([saveD, changeD]) 247 | # FIXME: this failure actually gets swallowed, even though 248 | # DeferredList should not do that; so force DeferredList to fail 249 | # to reproduce, remove the line that updates firstrev, and 250 | # don't add the eb below 251 | 252 | def eb(failure): 253 | dl.errback(failure) 254 | # self.fail('Could not update: %r' % failure) 255 | return failure 256 | saveD.addErrback(eb) 257 | 258 | def check(_): 259 | c = self.lastChange 260 | self.assertEquals(c['id'], self.firstid) 261 | self.assertEquals(len(c['changes']), 1) 262 | self.assertEquals(c['changes'][0]['rev'][:2], '3-') 263 | # Note that we didn't receive change with seq 4, 264 | # which was the creation of doc two 265 | self.assertEquals(c['seq'], 5) 266 | dl.addCallback(check) 267 | 268 | return dl 269 | d.addCallback(createTwoAndUpdateOne) 270 | d.addCallback(lambda _: notifier.stop()) 271 | d.addCallback(lambda _: self.waitForNextCycle()) 272 | 273 | d.callback(None) 274 | return d 275 | 276 | 277 | class RestartingNotifierTest(ChangeReceiverTestCase): 278 | 279 | def setUp(self): 280 | ChangeReceiverTestCase.setUp(self) 281 | # get database with some history 282 | d = self.db.createDB('test') 283 | d.addCallback(self._createDoc, 'mydoc') 284 | return d 285 | 286 | def testStartingWithSinceParam(self): 287 | ''' 288 | Here we start notifier from the begining of the history and assert 289 | we get the historical change. 290 | Than we update the database once the notifier is stopped, restart 291 | notifier and assert we got the change. 292 | ''' 293 | 294 | notifier = self._createNotifier(since=0) 295 | self.assertFalse(notifier.isRunning()) 296 | 297 | d = defer.succeed(None) 298 | d.addCallback(self._start, notifier) 299 | d.addCallback(self._assertNotification, 'mydoc') 300 | d.addCallback(self._stop, notifier) 301 | # now create other document while notifier is not working 302 | d.addCallback(self._createDoc, 'other_doc') 303 | d.addCallback(self._start, notifier) 304 | # assert than we receive notification after reconnecting 305 | d.addCallback(self._assertNotification, 'other_doc') 306 | d.addCallback(self._stop, notifier) 307 | return d 308 | 309 | def _start(self, _, notifier): 310 | d = self.waitForChange() 311 | d2 = notifier.start() 312 | d2.addCallback(lambda _: d) 313 | d2.addCallback(lambda _: self.assertTrue(notifier.isRunning())) 314 | return d2 315 | 316 | def _stop(self, _, notifier): 317 | notifier.stop() 318 | d = self.waitForNextCycle() 319 | d.addCallback(lambda _: self.assertFalse(notifier.isRunning())) 320 | return d 321 | 322 | def _assertNotification(self, _, expected_id): 323 | self.assertEqual(expected_id, self.lastChange['id']) 324 | 325 | def _createNotifier(self, **options): 326 | notifier = changes.ChangeNotifier(self.db, 'test', **options) 327 | notifier.addListener(self) 328 | return notifier 329 | 330 | def _createDoc(self, _, doc_id): 331 | return self.db.saveDoc('test', {'key': 'value'}, doc_id) 332 | 333 | 334 | class ConnectionLostTestCase(BaseTestCase, changes.ChangeListener): 335 | 336 | def setUp(self): 337 | BaseTestCase.setUp(self) 338 | 339 | notifier = changes.ChangeNotifier(self.db, 'test') 340 | notifier.addListener(self) 341 | 342 | d = self.db.createDB('test') 343 | d.addCallback(lambda _: notifier.start()) 344 | return d 345 | 346 | ### ChangeListener interface 347 | 348 | def changed(self, change): 349 | pass 350 | 351 | def connectionLost(self, reason): 352 | # make sure we lost the connection before teardown 353 | self.failIf(self.tearing and self.expect_tearing, 354 | 'connectionLost should be called before teardown') 355 | 356 | self.failIf(reason.check(error.ConnectionDone)) 357 | 358 | from twisted.web import _newclient 359 | self.failUnless(reason.check(_newclient.ResponseFailed)) 360 | 361 | def testKill(self): 362 | self.expect_tearing = True 363 | self.wrapper.process.terminate() 364 | return self.waitForNextCycle() 365 | -------------------------------------------------------------------------------- /paisley/client.py: -------------------------------------------------------------------------------- 1 | # -*- Mode: Python; test-case-name: paisley.test.test_client -*- 2 | # vi:si:et:sw=4:sts=4:ts=4 3 | 4 | # Copyright (c) 2007-2008 5 | # See LICENSE for details. 6 | 7 | """ 8 | CouchDB client. 9 | """ 10 | 11 | from paisley import pjson as json 12 | 13 | from encodings import utf_8 14 | import logging 15 | import new 16 | 17 | from urllib import urlencode, quote 18 | from zope.interface import implements 19 | 20 | from twisted.web.http_headers import Headers 21 | from twisted.web.iweb import IBodyProducer 22 | 23 | from twisted.internet.defer import Deferred, maybeDeferred 24 | from twisted.internet.protocol import Protocol 25 | 26 | try: 27 | from base64 import b64encode 28 | except ImportError: 29 | import base64 30 | 31 | def b64encode(s): 32 | return "".join(base64.encodestring(s).split("\n")) 33 | 34 | 35 | def short_print(body, trim=255): 36 | # don't go nuts on possibly huge log entries 37 | # since we're a library we should try to avoid calling this and instead 38 | # write awesome logs 39 | if not isinstance(body, basestring): 40 | body = str(body) 41 | if len(body) < trim: 42 | return body.replace('\n', '\\n') 43 | else: 44 | return body[:trim].replace('\n', '\\n') + '...' 45 | 46 | try: 47 | from functools import partial 48 | except ImportError: 49 | 50 | class partial(object): 51 | 52 | def __init__(self, fn, *args, **kw): 53 | self.fn = fn 54 | self.args = args 55 | self.kw = kw 56 | 57 | def __call__(self, *args, **kw): 58 | if kw and self.kw: 59 | d = self.kw.copy() 60 | d.update(kw) 61 | else: 62 | d = kw or self.kw 63 | return self.fn(*(self.args + args), **d) 64 | 65 | SOCK_TIMEOUT = 300 66 | 67 | 68 | class StringProducer(object): 69 | """ 70 | Body producer for t.w.c.Agent 71 | """ 72 | implements(IBodyProducer) 73 | 74 | def __init__(self, body): 75 | self.body = body 76 | self.length = len(body) 77 | 78 | def startProducing(self, consumer): 79 | return maybeDeferred(consumer.write, self.body) 80 | 81 | def pauseProducing(self): 82 | pass 83 | 84 | def stopProducing(self): 85 | pass 86 | 87 | 88 | class ResponseReceiver(Protocol): 89 | """ 90 | Assembles HTTP response from return stream. 91 | """ 92 | 93 | def __init__(self, deferred, decode_utf8): 94 | self.recv_chunks = [] 95 | self.decoder = utf_8.IncrementalDecoder() if decode_utf8 else None 96 | self.deferred = deferred 97 | 98 | def dataReceived(self, bytes, final=False): 99 | if self.decoder: 100 | bytes = self.decoder.decode(bytes, final) 101 | self.recv_chunks.append(bytes) 102 | 103 | def connectionLost(self, reason): 104 | # _newclient and http import reactor 105 | from twisted.web._newclient import ResponseDone 106 | from twisted.web.http import PotentialDataLoss 107 | 108 | if reason.check(ResponseDone) or reason.check(PotentialDataLoss): 109 | self.dataReceived('', final=True) 110 | self.deferred.callback(''.join(self.recv_chunks)) 111 | else: 112 | self.deferred.errback(reason) 113 | 114 | 115 | class CouchDB(object): 116 | """ 117 | CouchDB client: hold methods for accessing a couchDB. 118 | """ 119 | 120 | def __init__(self, host, port=5984, dbName=None, 121 | username=None, password=None, disable_log=False, 122 | version=(1, 0, 1)): 123 | """ 124 | Initialize the client for given host. 125 | 126 | @param host: address of the server. 127 | @type host: C{str} 128 | 129 | @param port: if specified, the port of the server. 130 | @type port: C{int} 131 | 132 | @param dbName: if specified, all calls needing a database name will use 133 | this one by default. 134 | @type dbName: C{str} 135 | """ 136 | from twisted.internet import reactor 137 | # t.w.c imports reactor 138 | from twisted.web.client import Agent 139 | self.client = Agent(reactor) 140 | self.host = host 141 | self.port = int(port) 142 | self.username = username 143 | self.password =password 144 | self.url_template = "http://%s:%s%%s" % (self.host, self.port) 145 | if dbName is not None: 146 | self.bindToDB(dbName) 147 | 148 | if disable_log: 149 | # since this is the db layer, and we generate a lot of logs, 150 | # let people disable them completely if they want to. 151 | levels = ['trace', 'debug', 'info', 'warn', 'error', 'exception'] 152 | 153 | class FakeLog(object): 154 | pass 155 | 156 | def nullfn(self, *a, **k): 157 | pass 158 | self.log = FakeLog() 159 | for level in levels: 160 | self.log.__dict__[level] = new.instancemethod(nullfn, self.log) 161 | else: 162 | self.log = logging.getLogger('paisley') 163 | 164 | 165 | self.log.debug("[%s%s:%s/%s] init new db client", 166 | '%s@' % (username, ) if username else '', 167 | host, 168 | port, 169 | dbName if dbName else '') 170 | self.version = version 171 | 172 | def parseResult(self, result): 173 | """ 174 | Parse JSON result from the DB. 175 | """ 176 | return json.loads(result) 177 | 178 | def bindToDB(self, dbName): 179 | """ 180 | Bind all operations asking for a DB name to the given DB. 181 | """ 182 | for methname in ["createDB", "deleteDB", "infoDB", "listDoc", 183 | "openDoc", "saveDoc", "deleteDoc", "openView", 184 | "tempView"]: 185 | method = getattr(self, methname) 186 | newMethod = partial(method, dbName) 187 | setattr(self, methname, newMethod) 188 | 189 | # Database operations 190 | 191 | def createDB(self, dbName): 192 | """ 193 | Creates a new database on the server. 194 | 195 | @type dbName: str 196 | """ 197 | # Responses: {u'ok': True}, 409 Conflict, 500 Internal Server Error, 198 | # 401 Unauthorized 199 | # 400 {"error":"illegal_database_name","reason":"Only lowercase 200 | # characters (a-z), digits (0-9), and any of the characters _, $, (, 201 | # ), +, -, and / are allowed. Must begin with a letter."} 202 | 203 | return self.put("/%s/" % (dbName, ), "", descr='CreateDB' 204 | ).addCallback(self.parseResult) 205 | 206 | def deleteDB(self, dbName): 207 | """ 208 | Deletes the database on the server. 209 | 210 | @type dbName: str 211 | """ 212 | # Responses: {u'ok': True}, 404 Object Not Found 213 | return self.delete("/%s/" % (dbName, ) 214 | ).addCallback(self.parseResult) 215 | 216 | def listDB(self): 217 | """ 218 | List the databases on the server. 219 | """ 220 | # Responses: list of db names 221 | return self.get("/_all_dbs", descr='listDB').addCallback( 222 | self.parseResult) 223 | 224 | def getVersion(self): 225 | """ 226 | Returns the couchDB version. 227 | """ 228 | # Responses: {u'couchdb': u'Welcome', u'version': u'1.1.0'} 229 | # Responses: {u'couchdb': u'Welcome', u'version': u'1.1.1a1162549'} 230 | d = self.get("/", descr='version').addCallback(self.parseResult) 231 | 232 | def cacheVersion(result): 233 | self.version = self._parseVersion(result['version']) 234 | return result 235 | return d.addCallback(cacheVersion) 236 | 237 | def _parseVersion(self, versionString): 238 | 239 | def onlyInt(part): 240 | import re 241 | intRegexp = re.compile("^(\d+)") 242 | m = intRegexp.search(part) 243 | if not m: 244 | return None 245 | return int(m.expand('\\1')) 246 | 247 | ret = tuple(onlyInt(_) for _ in versionString.split('.')) 248 | return ret 249 | 250 | def infoDB(self, dbName): 251 | """ 252 | Returns info about the couchDB. 253 | """ 254 | # Responses: {u'update_seq': 0, u'db_name': u'mydb', u'doc_count': 0} 255 | # 404 Object Not Found 256 | return self.get("/%s/" % (dbName, ), descr='infoDB' 257 | ).addCallback(self.parseResult) 258 | 259 | # Document operations 260 | 261 | def listDoc(self, dbName, reverse=False, startkey=None, endkey=None, 262 | include_docs=False, limit=-1, **obsolete): 263 | """ 264 | List all documents in a given database. 265 | """ 266 | # Responses: {u'rows': [{u'_rev': -1825937535, u'_id': u'mydoc'}], 267 | # u'view': u'_all_docs'}, 404 Object Not Found 268 | import warnings 269 | if 'count' in obsolete: 270 | warnings.warn("listDoc 'count' parameter has been renamed to " 271 | "'limit' to reflect changing couchDB api", 272 | DeprecationWarning) 273 | limit = obsolete.pop('count') 274 | if obsolete: 275 | raise AttributeError("Unknown attribute(s): %r" % ( 276 | obsolete.keys(), )) 277 | uri = "/%s/_all_docs" % (dbName, ) 278 | args = {} 279 | if reverse: 280 | args["reverse"] = "true" 281 | if startkey: 282 | args["startkey"] = json.dumps(startkey) 283 | if endkey: 284 | args["endkey"] = json.dumps(endkey) 285 | if include_docs: 286 | args["include_docs"] = True 287 | if limit >= 0: 288 | args["limit"] = int(limit) 289 | if args: 290 | uri += "?%s" % (urlencode(args), ) 291 | return self.get(uri, descr='listDoc').addCallback(self.parseResult) 292 | 293 | def openDoc(self, dbName, docId, revision=None, full=False, attachment=""): 294 | """ 295 | Open a document in a given database. 296 | 297 | @type docId: C{unicode} 298 | 299 | @param revision: if specified, the revision of the document desired. 300 | @type revision: C{unicode} 301 | 302 | @param full: if specified, return the list of all the revisions of the 303 | document, along with the document itself. 304 | @type full: C{bool} 305 | 306 | @param attachment: if specified, return the named attachment from the 307 | document. 308 | @type attachment: C{str} 309 | """ 310 | # Responses: {u'_rev': -1825937535, u'_id': u'mydoc', ...} 311 | # 404 Object Not Found 312 | 313 | # FIXME: remove these conversions and have our callers do them 314 | docId = unicode(docId) 315 | assert type(docId) is unicode, \ 316 | 'docId is %r instead of unicode' % (type(docId), ) 317 | 318 | if revision: 319 | revision = unicode(revision) 320 | assert type(revision) is unicode, \ 321 | 'revision is %r instead of unicode' % (type(revision), ) 322 | 323 | uri = "/%s/%s" % (dbName, quote(docId.encode('utf-8'))) 324 | if revision is not None: 325 | uri += "?%s" % (urlencode({"rev": revision.encode('utf-8')}), ) 326 | elif full: 327 | uri += "?%s" % (urlencode({"full": "true"}), ) 328 | elif attachment: 329 | uri += "/%s" % quote(attachment) 330 | # No parsing 331 | return self.get(uri, descr='openDoc', isJson=False) 332 | return self.get(uri, descr='openDoc').addCallback(self.parseResult) 333 | 334 | def addAttachments(self, document, attachments): 335 | """ 336 | Add attachments to a document, before sending it to the DB. 337 | 338 | @param document: the document to modify. 339 | @type document: C{dict} 340 | 341 | @param attachments: the attachments to add. 342 | @type attachments: C{dict} 343 | """ 344 | document.setdefault("_attachments", {}) 345 | for name, data in attachments.iteritems(): 346 | data = b64encode(data) 347 | document["_attachments"][name] = {"type": "base64", "data": data} 348 | 349 | def saveDoc(self, dbName, body, docId=None): 350 | """ 351 | Save/create a document to/in a given database. 352 | 353 | @param dbName: identifier of the database. 354 | @type dbName: C{str} 355 | 356 | @param body: content of the document. 357 | @type body: C{str} or any structured object 358 | 359 | @param docId: if specified, the identifier to be used in the database. 360 | @type docId: C{unicode} 361 | """ 362 | # Responses: {'rev': '1-9dd776365618752ddfaf79d9079edf84', 363 | # 'ok': True, 'id': '198abfee8852816bc112992564000295'} 364 | 365 | # 404 Object not found (if database does not exist) 366 | # 409 Conflict, 500 Internal Server Error 367 | if docId: 368 | # FIXME: remove these conversions and have our callers do them 369 | docId = unicode(docId) 370 | assert type(docId) is unicode, \ 371 | 'docId is %r instead of unicode' % (type(docId), ) 372 | 373 | if not isinstance(body, (str, unicode)): 374 | body = json.dumps(body) 375 | if docId is not None: 376 | d = self.put("/%s/%s" % (dbName, quote(docId.encode('utf-8'))), 377 | body, descr='saveDoc') 378 | else: 379 | d = self.post("/%s/" % (dbName, ), body, descr='saveDoc') 380 | return d.addCallback(self.parseResult) 381 | 382 | def deleteDoc(self, dbName, docId, revision): 383 | """ 384 | Delete a document on given database. 385 | 386 | @param dbName: identifier of the database. 387 | @type dbName: C{str} 388 | 389 | @param docId: the document identifier to be used in the database. 390 | @type docId: C{unicode} 391 | 392 | @param revision: the revision of the document to delete. 393 | @type revision: C{unicode} 394 | 395 | """ 396 | # Responses: {u'_rev': 1469561101, u'ok': True} 397 | # 500 Internal Server Error 398 | 399 | docId = unicode(docId) 400 | assert type(docId) is unicode, \ 401 | 'docId is %r instead of unicode' % (type(docId), ) 402 | 403 | revision = unicode(revision) 404 | assert type(revision) is unicode, \ 405 | 'revision is %r instead of unicode' % (type(revision), ) 406 | 407 | 408 | return self.delete("/%s/%s?%s" % ( 409 | dbName, 410 | quote(docId.encode('utf-8')), 411 | urlencode({'rev': revision.encode('utf-8')}))).addCallback( 412 | self.parseResult) 413 | 414 | # View operations 415 | 416 | def openView(self, dbName, docId, viewId, **kwargs): 417 | """ 418 | Open a view of a document in a given database. 419 | """ 420 | # Responses: 421 | # 500 Internal Server Error (illegal database name) 422 | 423 | def buildUri(dbName=dbName, docId=docId, viewId=viewId, kwargs=kwargs): 424 | return "/%s/_design/%s/_view/%s?%s" % ( 425 | dbName, quote(docId), viewId, urlencode(kwargs)) 426 | 427 | # if there is a "keys" argument, remove it from the kwargs 428 | # dictionary now so that it doesn't get double JSON-encoded 429 | body = None 430 | if "keys" in kwargs: 431 | body = json.dumps({"keys": kwargs.pop("keys")}) 432 | 433 | # encode the rest of the values with JSON for use as query 434 | # arguments in the URI 435 | for k, v in kwargs.iteritems(): 436 | if k == 'keys': # we do this below, for the full body 437 | pass 438 | else: 439 | kwargs[k] = json.dumps(v) 440 | # we keep the paisley API, but couchdb uses limit now 441 | if 'count' in kwargs: 442 | kwargs['limit'] = kwargs.pop('count') 443 | 444 | # If there's a list of keys to send, POST the 445 | # query so that we can upload the keys as the body of 446 | # the POST request, otherwise use a GET request 447 | if body: 448 | return self.post( 449 | buildUri(), body=body, descr='openView').addCallback( 450 | self.parseResult) 451 | else: 452 | return self.get( 453 | buildUri(), descr='openView').addCallback( 454 | self.parseResult) 455 | 456 | def addViews(self, document, views): 457 | """ 458 | Add views to a document. 459 | 460 | @param document: the document to modify. 461 | @type document: C{dict} 462 | 463 | @param views: the views to add. 464 | @type views: C{dict} 465 | """ 466 | document.setdefault("views", {}) 467 | for name, data in views.iteritems(): 468 | document["views"][name] = data 469 | 470 | def tempView(self, dbName, view): 471 | """ 472 | Make a temporary view on the server. 473 | """ 474 | if not isinstance(view, (str, unicode)): 475 | view = json.dumps(view) 476 | d = self.post("/%s/_temp_view" % (dbName, ), view, descr='tempView') 477 | return d.addCallback(self.parseResult) 478 | 479 | # Basic http methods 480 | 481 | def _getPage(self, uri, method="GET", postdata=None, headers=None, 482 | isJson=True): 483 | """ 484 | C{getPage}-like. 485 | """ 486 | 487 | def cb_recv_resp(response): 488 | d_resp_recvd = Deferred() 489 | content_type = response.headers.getRawHeaders('Content-Type', 490 | [''])[0].lower().strip() 491 | decode_utf8 = 'charset=utf-8' in content_type or \ 492 | content_type == 'application/json' 493 | response.deliverBody(ResponseReceiver(d_resp_recvd, 494 | decode_utf8=decode_utf8)) 495 | return d_resp_recvd.addCallback(cb_process_resp, response) 496 | 497 | def cb_process_resp(body, response): 498 | # twisted.web.error imports reactor 499 | from twisted.web import error as tw_error 500 | 501 | # Emulate HTTPClientFactory and raise t.w.e.Error 502 | # and PageRedirect if we have errors. 503 | if response.code > 299 and response.code < 400: 504 | raise tw_error.PageRedirect(response.code, body) 505 | elif response.code > 399: 506 | raise tw_error.Error(response.code, body) 507 | 508 | return body 509 | 510 | uurl = unicode(self.url_template % (uri, )) 511 | url = uurl.encode('utf-8') 512 | 513 | if not headers: 514 | headers = {} 515 | 516 | if isJson: 517 | headers["Accept"] = ["application/json"] 518 | headers["Content-Type"] = ["application/json"] 519 | 520 | if self.username: 521 | headers["Authorization"] = ["Basic %s" % b64encode( 522 | "%s:%s" % (self.username, self.password))] 523 | 524 | body = StringProducer(postdata) if postdata else None 525 | 526 | d = self.client.request(method, url, Headers(headers), body) 527 | 528 | d.addCallback(cb_recv_resp) 529 | 530 | return d 531 | 532 | def get(self, uri, descr='', isJson=True): 533 | """ 534 | Execute a C{GET} at C{uri}. 535 | """ 536 | self.log.debug("[%s:%s%s] GET %s", 537 | self.host, self.port, short_print(uri), descr) 538 | return self._getPage(uri, method="GET", isJson=isJson) 539 | 540 | def post(self, uri, body, descr=''): 541 | """ 542 | Execute a C{POST} of C{body} at C{uri}. 543 | """ 544 | self.log.debug("[%s:%s%s] POST %s: %s", 545 | self.host, self.port, short_print(uri), descr, 546 | short_print(repr(body))) 547 | return self._getPage(uri, method="POST", postdata=body) 548 | 549 | def put(self, uri, body, descr=''): 550 | """ 551 | Execute a C{PUT} of C{body} at C{uri}. 552 | """ 553 | self.log.debug("[%s:%s%s] PUT %s: %s", 554 | self.host, self.port, short_print(uri), descr, 555 | short_print(repr(body))) 556 | return self._getPage(uri, method="PUT", postdata=body) 557 | 558 | def delete(self, uri, descr=''): 559 | """ 560 | Execute a C{DELETE} at C{uri}. 561 | """ 562 | self.log.debug("[%s:%s%s] DELETE %s", 563 | self.host, self.port, short_print(uri), descr) 564 | return self._getPage(uri, method="DELETE") 565 | -------------------------------------------------------------------------------- /paisley/mapping.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2007-2009 Christopher Lenz 4 | # All rights reserved. 5 | # 6 | # This software is licensed as described in the file COPYING, which 7 | # you should have received as part of this distribution. 8 | 9 | """Mapping from raw JSON data structures to Python objects and vice versa. 10 | 11 | >>> from couchdb import Server 12 | >>> server = Server('http://localhost:5984/') 13 | >>> db = server.create('python-tests') 14 | 15 | To define a document mapping, you declare a Python class inherited from 16 | `Document`, and add any number of `Field` attributes: 17 | 18 | >>> class Person(Document): 19 | ... name = TextField() 20 | ... age = IntegerField() 21 | ... added = DateTimeField(default=datetime.now) 22 | >>> person = Person(name='John Doe', age=42) 23 | >>> person.store(db) #doctest: +ELLIPSIS 24 | 25 | >>> person.age 26 | 42 27 | 28 | You can then load the data from the CouchDB server through your `Document` 29 | subclass, and conveniently access all attributes: 30 | 31 | >>> person = Person.load(db, person.id) 32 | >>> old_rev = person.rev 33 | >>> person.name 34 | u'John Doe' 35 | >>> person.age 36 | 42 37 | >>> person.added #doctest: +ELLIPSIS 38 | datetime.datetime(...) 39 | 40 | To update a document, simply set the attributes, and then call the ``store()`` 41 | method: 42 | 43 | >>> person.name = 'John R. Doe' 44 | >>> person.store(db) #doctest: +ELLIPSIS 45 | 46 | 47 | If you retrieve the document from the server again, you should be getting the 48 | updated data: 49 | 50 | >>> person = Person.load(db, person.id) 51 | >>> person.name 52 | u'John R. Doe' 53 | >>> person.rev != old_rev 54 | True 55 | 56 | >>> del server['python-tests'] 57 | """ 58 | 59 | import copy 60 | 61 | from calendar import timegm 62 | from datetime import date, datetime, time 63 | from decimal import Decimal 64 | from time import strptime, struct_time 65 | 66 | __all__ = ['Mapping', 'Document', 'Field', 'TextField', 'FloatField', 67 | 'IntegerField', 'LongField', 'BooleanField', 'DecimalField', 68 | 'DateField', 'DateTimeField', 'TimeField', 'DictField', 'ListField', 69 | 'TupleField'] 70 | __docformat__ = 'restructuredtext en' 71 | 72 | DEFAULT = object() 73 | 74 | 75 | class Field(object): 76 | """Basic unit for mapping a piece of data between Python and JSON. 77 | 78 | Instances of this class can be added to subclasses of `Document` to describe 79 | the mapping of a document. 80 | """ 81 | 82 | def __init__(self, name=None, default=None): 83 | self.name = name 84 | self.default = default 85 | 86 | def __get__(self, instance, owner): 87 | if instance is None: 88 | return self 89 | value = instance._data.get(self.name) 90 | if value is not None: 91 | value = self._to_python(value) 92 | elif self.default is not None: 93 | default = self.default 94 | if callable(default): 95 | default = default() 96 | value = default 97 | return value 98 | 99 | def __set__(self, instance, value): 100 | if value is not None: 101 | value = self._to_json(value) 102 | instance._data[self.name] = value 103 | 104 | def _to_python(self, value): 105 | return unicode(value) 106 | 107 | def _to_json(self, value): 108 | return self._to_python(value) 109 | 110 | 111 | class MappingMeta(type): 112 | 113 | def __new__(cls, name, bases, d): 114 | fields = {} 115 | for base in bases: 116 | if hasattr(base, '_fields'): 117 | fields.update(base._fields) 118 | for attrname, attrval in d.items(): 119 | if isinstance(attrval, Field): 120 | if not attrval.name: 121 | attrval.name = attrname 122 | fields[attrname] = attrval 123 | d['_fields'] = fields 124 | return type.__new__(cls, name, bases, d) 125 | 126 | 127 | class Mapping(object): 128 | __metaclass__ = MappingMeta 129 | 130 | def __init__(self, **values): 131 | self._data = {} 132 | for attrname, field in self._fields.items(): 133 | if attrname in values: 134 | setattr(self, attrname, values.pop(attrname)) 135 | else: 136 | setattr(self, attrname, getattr(self, attrname)) 137 | 138 | def __iter__(self): 139 | return iter(self._data) 140 | 141 | def __len__(self): 142 | return len(self._data or ()) 143 | 144 | def __delitem__(self, name): 145 | del self._data[name] 146 | 147 | def __getitem__(self, name): 148 | return self._data[name] 149 | 150 | def __setitem__(self, name, value): 151 | self._data[name] = value 152 | 153 | def get(self, name, default): 154 | return self._data.get(name, default) 155 | 156 | def setdefault(self, name, default): 157 | return self._data.setdefault(name, default) 158 | 159 | def unwrap(self): 160 | return self._data 161 | 162 | @classmethod 163 | def build(cls, **d): 164 | fields = {} 165 | for attrname, attrval in d.items(): 166 | if not attrval.name: 167 | attrval.name = attrname 168 | fields[attrname] = attrval 169 | d['_fields'] = fields 170 | return type('AnonymousStruct', (cls,), d) 171 | 172 | @classmethod 173 | def wrap(cls, data): 174 | instance = cls() 175 | instance._data = data 176 | return instance 177 | 178 | def _to_python(self, value): 179 | return self.wrap(value) 180 | 181 | def _to_json(self, value): 182 | return self.unwrap() 183 | 184 | 185 | class DocumentMeta(MappingMeta): 186 | pass 187 | 188 | 189 | class Document(Mapping): 190 | __metaclass__ = DocumentMeta 191 | 192 | def __init__(self, id=None, **values): 193 | Mapping.__init__(self, **values) 194 | if id is not None: 195 | self.id = id 196 | 197 | def __repr__(self): 198 | return '<%s %r@%r %r>' % (type(self).__name__, self.id, self.rev, 199 | dict([(k, v) for k, v in self._data.items() 200 | if k not in ('_id', '_rev')])) 201 | 202 | def _get_id(self): 203 | if hasattr(self._data, 'id'): # When data is client.Document 204 | return self._data.id 205 | return self._data.get('_id') 206 | def _set_id(self, value): 207 | if self.id is not None: 208 | raise AttributeError('id can only be set on new documents') 209 | self._data['_id'] = value 210 | id = property(_get_id, _set_id, doc='The document ID') 211 | 212 | @property 213 | def rev(self): 214 | """The document revision. 215 | 216 | :rtype: basestring 217 | """ 218 | if hasattr(self._data, 'rev'): # When data is client.Document 219 | return self._data.rev 220 | return self._data.get('_rev') 221 | 222 | def items(self): 223 | """Return the fields as a list of ``(name, value)`` tuples. 224 | 225 | This method is provided to enable easy conversion to native dictionary 226 | objects, for example to allow use of `mapping.Document` instances with 227 | `client.Database.update`. 228 | 229 | >>> class Post(Document): 230 | ... title = TextField() 231 | ... author = TextField() 232 | >>> post = Post(id='foo-bar', title='Foo bar', author='Joe') 233 | >>> sorted(post.items()) 234 | [('_id', 'foo-bar'), ('author', u'Joe'), ('title', u'Foo bar')] 235 | 236 | :return: a list of ``(name, value)`` tuples 237 | """ 238 | retval = [] 239 | if self.id is not None: 240 | retval.append(('_id', self.id)) 241 | if self.rev is not None: 242 | retval.append(('_rev', self.rev)) 243 | for name, value in self._data.items(): 244 | if name not in ('_id', '_rev'): 245 | retval.append((name, value)) 246 | return retval 247 | 248 | @classmethod 249 | def load(cls, db, id): 250 | """Load a specific document from the given database. 251 | 252 | :param db: the `Database` object to retrieve the document from 253 | :param id: the document ID 254 | :return: the `Document` instance, or `None` if no document with the 255 | given ID was found 256 | """ 257 | doc = db.get(id) 258 | if doc is None: 259 | return None 260 | return cls.wrap(doc) 261 | 262 | def store(self, db): 263 | """Store the document in the given database.""" 264 | db.save(self._data) 265 | return self 266 | 267 | @classmethod 268 | def query(cls, db, map_fun, reduce_fun, language='javascript', **options): 269 | """Execute a CouchDB temporary view and map the result values back to 270 | objects of this mapping. 271 | 272 | Note that by default, any properties of the document that are not 273 | included in the values of the view will be treated as if they were 274 | missing from the document. If you want to load the full document for 275 | every row, set the ``include_docs`` option to ``True``. 276 | """ 277 | def _wrapper(row): 278 | if row.doc is not None: 279 | return cls.wrap(row.doc) 280 | data = row.value 281 | data['_id'] = row.id 282 | return cls.wrap(data) 283 | return db.query(map_fun, reduce_fun=reduce_fun, language=language, 284 | wrapper=_wrapper, **options) 285 | 286 | @classmethod 287 | def view(cls, db, viewname, **options): 288 | """Execute a CouchDB named view and map the result values back to 289 | objects of this mapping. 290 | 291 | Note that by default, any properties of the document that are not 292 | included in the values of the view will be treated as if they were 293 | missing from the document. If you want to load the full document for 294 | every row, set the ``include_docs`` option to ``True``. 295 | """ 296 | def _wrapper(row): 297 | if row.doc is not None: # include_docs=True 298 | return cls.wrap(row.doc) 299 | data = row.value 300 | data['_id'] = row.id 301 | return cls.wrap(data) 302 | return db.view(viewname, wrapper=_wrapper, **options) 303 | 304 | def fromDict(self, d): 305 | """ 306 | Set the object from the given result dictionary obtained from CouchDB. 307 | """ 308 | # FIXME: this is poking at internals of python-couchdb 309 | # FIXME: do we need copy ? 310 | self._data = d.copy() 311 | return 312 | 313 | class TextField(Field): 314 | """Mapping field for string values.""" 315 | _to_python = unicode 316 | 317 | 318 | class FloatField(Field): 319 | """Mapping field for float values.""" 320 | _to_python = float 321 | 322 | 323 | class IntegerField(Field): 324 | """Mapping field for integer values.""" 325 | _to_python = int 326 | 327 | 328 | class LongField(Field): 329 | """Mapping field for long integer values.""" 330 | _to_python = long 331 | 332 | 333 | class BooleanField(Field): 334 | """Mapping field for boolean values.""" 335 | _to_python = bool 336 | 337 | 338 | class DecimalField(Field): 339 | """Mapping field for decimal values.""" 340 | 341 | def _to_python(self, value): 342 | return Decimal(value) 343 | 344 | def _to_json(self, value): 345 | return unicode(value) 346 | 347 | 348 | class DateField(Field): 349 | """Mapping field for storing dates. 350 | 351 | >>> field = DateField() 352 | >>> field._to_python('2007-04-01') 353 | datetime.date(2007, 4, 1) 354 | >>> field._to_json(date(2007, 4, 1)) 355 | '2007-04-01' 356 | >>> field._to_json(datetime(2007, 4, 1, 15, 30)) 357 | '2007-04-01' 358 | """ 359 | 360 | def _to_python(self, value): 361 | if isinstance(value, basestring): 362 | try: 363 | value = date(*strptime(value, '%Y-%m-%d')[:3]) 364 | except ValueError: 365 | raise ValueError('Invalid ISO date %r' % value) 366 | return value 367 | 368 | def _to_json(self, value): 369 | if isinstance(value, datetime): 370 | value = value.date() 371 | return value.isoformat() 372 | 373 | 374 | class DateTimeField(Field): 375 | """Mapping field for storing date/time values. 376 | 377 | >>> field = DateTimeField() 378 | >>> field._to_python('2007-04-01T15:30:00Z') 379 | datetime.datetime(2007, 4, 1, 15, 30) 380 | >>> field._to_json(datetime(2007, 4, 1, 15, 30, 0, 9876)) 381 | '2007-04-01T15:30:00Z' 382 | >>> field._to_json(date(2007, 4, 1)) 383 | '2007-04-01T00:00:00Z' 384 | """ 385 | 386 | def _to_python(self, value): 387 | if isinstance(value, basestring): 388 | try: 389 | value = value.split('.', 1)[0] # strip out microseconds 390 | value = value.rstrip('Z') # remove timezone separator 391 | timestamp = timegm(strptime(value, '%Y-%m-%dT%H:%M:%S')) 392 | value = datetime.utcfromtimestamp(timestamp) 393 | except ValueError: 394 | raise ValueError('Invalid ISO date/time %r' % value) 395 | return value 396 | 397 | def _to_json(self, value): 398 | if isinstance(value, struct_time): 399 | value = datetime.utcfromtimestamp(timegm(value)) 400 | elif not isinstance(value, datetime): 401 | value = datetime.combine(value, time(0)) 402 | return value.replace(microsecond=0).isoformat() + 'Z' 403 | 404 | 405 | class TimeField(Field): 406 | """Mapping field for storing times. 407 | 408 | >>> field = TimeField() 409 | >>> field._to_python('15:30:00') 410 | datetime.time(15, 30) 411 | >>> field._to_json(time(15, 30)) 412 | '15:30:00' 413 | >>> field._to_json(datetime(2007, 4, 1, 15, 30)) 414 | '15:30:00' 415 | """ 416 | 417 | def _to_python(self, value): 418 | if isinstance(value, basestring): 419 | try: 420 | value = value.split('.', 1)[0] # strip out microseconds 421 | value = time(*strptime(value, '%H:%M:%S')[3:6]) 422 | except ValueError: 423 | raise ValueError('Invalid ISO time %r' % value) 424 | return value 425 | 426 | def _to_json(self, value): 427 | if isinstance(value, datetime): 428 | value = value.time() 429 | return value.replace(microsecond=0).isoformat() 430 | 431 | 432 | class DictField(Field): 433 | """Field type for nested dictionaries. 434 | 435 | >>> from couchdb import Server 436 | >>> server = Server('http://localhost:5984/') 437 | >>> db = server.create('python-tests') 438 | 439 | >>> class Post(Document): 440 | ... title = TextField() 441 | ... content = TextField() 442 | ... author = DictField(Mapping.build( 443 | ... name = TextField(), 444 | ... email = TextField() 445 | ... )) 446 | ... extra = DictField() 447 | 448 | >>> post = Post( 449 | ... title='Foo bar', 450 | ... author=dict(name='John Doe', 451 | ... email='john@doe.com'), 452 | ... extra=dict(foo='bar'), 453 | ... ) 454 | >>> post.store(db) #doctest: +ELLIPSIS 455 | 456 | >>> post = Post.load(db, post.id) 457 | >>> post.author.name 458 | u'John Doe' 459 | >>> post.author.email 460 | u'john@doe.com' 461 | >>> post.extra 462 | {'foo': 'bar'} 463 | 464 | >>> del server['python-tests'] 465 | """ 466 | def __init__(self, mapping=None, name=None, default=None): 467 | default = default or {} 468 | Field.__init__(self, name=name, default=lambda: default.copy()) 469 | self.mapping = mapping 470 | 471 | def _to_python(self, value): 472 | if self.mapping is None: 473 | return value 474 | else: 475 | return self.mapping.wrap(value) 476 | 477 | def _to_json(self, value): 478 | if self.mapping is None: 479 | return value 480 | if not isinstance(value, Mapping): 481 | value = self.mapping(**value) 482 | return value.unwrap() 483 | 484 | 485 | class ListField(Field): 486 | """Field type for sequences of other fields. 487 | 488 | >>> from couchdb import Server 489 | >>> server = Server('http://localhost:5984/') 490 | >>> db = server.create('python-tests') 491 | 492 | >>> class Post(Document): 493 | ... title = TextField() 494 | ... content = TextField() 495 | ... pubdate = DateTimeField(default=datetime.now) 496 | ... comments = ListField(DictField(Mapping.build( 497 | ... author = TextField(), 498 | ... content = TextField(), 499 | ... time = DateTimeField() 500 | ... ))) 501 | 502 | >>> post = Post(title='Foo bar') 503 | >>> post.comments.append(author='myself', content='Bla bla', 504 | ... time=datetime.now()) 505 | >>> len(post.comments) 506 | 1 507 | >>> post.store(db) #doctest: +ELLIPSIS 508 | 509 | >>> post = Post.load(db, post.id) 510 | >>> comment = post.comments[0] 511 | >>> comment['author'] 512 | 'myself' 513 | >>> comment['content'] 514 | 'Bla bla' 515 | >>> comment['time'] #doctest: +ELLIPSIS 516 | '...T...Z' 517 | 518 | >>> del server['python-tests'] 519 | """ 520 | 521 | def __init__(self, field, name=None, default=None): 522 | default = default or [] 523 | Field.__init__(self, name=name, default=lambda: copy.copy(default)) 524 | if type(field) is type: 525 | if issubclass(field, Field): 526 | field = field() 527 | elif issubclass(field, Mapping): 528 | field = DictField(field) 529 | self.field = field 530 | 531 | def _to_python(self, value): 532 | return self.Proxy(value, self.field) 533 | 534 | def _to_json(self, value): 535 | return [self.field._to_json(item) for item in value] 536 | 537 | 538 | class Proxy(list): 539 | 540 | def __init__(self, list, field): 541 | self.list = list 542 | self.field = field 543 | 544 | def __lt__(self, other): 545 | return self.list < other 546 | 547 | def __le__(self, other): 548 | return self.list <= other 549 | 550 | def __eq__(self, other): 551 | return self.list == other 552 | 553 | def __ne__(self, other): 554 | return self.list != other 555 | 556 | def __gt__(self, other): 557 | return self.list > other 558 | 559 | def __ge__(self, other): 560 | return self.list >= other 561 | 562 | def __repr__(self): 563 | return repr(self.list) 564 | 565 | def __str__(self): 566 | return str(self.list) 567 | 568 | def __unicode__(self): 569 | return unicode(self.list) 570 | 571 | def __delitem__(self, index): 572 | del self.list[index] 573 | 574 | def __getitem__(self, index): 575 | return self.field._to_python(self.list[index]) 576 | 577 | def __setitem__(self, index, value): 578 | self.list[index] = self.field._to_json(value) 579 | 580 | def __delslice__(self, i, j): 581 | del self.list[i:j] 582 | 583 | def __getslice__(self, i, j): 584 | return ListField.Proxy(self.list[i:j], self.field) 585 | 586 | def __setslice__(self, i, j, seq): 587 | self.list[i:j] = (self.field._to_json(v) for v in seq) 588 | 589 | def __contains__(self, value): 590 | for item in self.list: 591 | if self.field._to_python(item) == value: 592 | return True 593 | return False 594 | 595 | def __iter__(self): 596 | for index in range(len(self)): 597 | yield self[index] 598 | 599 | def __len__(self): 600 | return len(self.list) 601 | 602 | def __nonzero__(self): 603 | return bool(self.list) 604 | 605 | def append(self, *args, **kwargs): 606 | if args or not isinstance(self.field, DictField): 607 | if len(args) != 1: 608 | raise TypeError('append() takes exactly one argument ' 609 | '(%s given)' % len(args)) 610 | value = args[0] 611 | else: 612 | value = kwargs 613 | self.list.append(self.field._to_json(value)) 614 | 615 | def count(self, value): 616 | return [i for i in self].count(value) 617 | 618 | def extend(self, list): 619 | for item in list: 620 | self.append(item) 621 | 622 | def index(self, value): 623 | return self.list.index(self.field._to_json(value)) 624 | 625 | def insert(self, idx, *args, **kwargs): 626 | if args or not isinstance(self.field, DictField): 627 | if len(args) != 1: 628 | raise TypeError('insert() takes exactly 2 arguments ' 629 | '(%s given)' % len(args)) 630 | value = args[0] 631 | else: 632 | value = kwargs 633 | self.list.insert(idx, self.field._to_json(value)) 634 | 635 | def remove(self, value): 636 | return self.list.remove(self.field._to_json(value)) 637 | 638 | def pop(self, *args): 639 | return self.field._to_python(self.list.pop(*args)) 640 | 641 | class TupleField(Field): 642 | """Field type for tuple of other fields, with possibly different types. 643 | 644 | >>> from couchdb import Server 645 | >>> server = Server('http://localhost:5984/') 646 | >>> db = server.create('python-tests') 647 | 648 | >>> class Post(Document): 649 | ... title = TextField() 650 | ... content = TextField() 651 | ... pubdate = DateTimeField(default=datetime.now) 652 | ... comments = ListField(TupleField(( 653 | ... TextField(), 654 | ... TextField(), 655 | ... DateTimeField() 656 | ... ))) 657 | 658 | >>> post = Post(title='Foo bar') 659 | >>> post.comments.append(('myself', 'Bla bla', 660 | ... datetime.now())) 661 | >>> len(post.comments) 662 | 1 663 | >>> post.store(db) #doctest: +ELLIPSIS 664 | 665 | >>> post = Post.load(db, post.id) 666 | >>> comment = post.comments[0] 667 | >>> comment[0] 668 | u'myself' 669 | >>> comment[1] 670 | u'Bla bla' 671 | >>> comment[2] #doctest: +ELLIPSIS 672 | datetime.datetime(...) 673 | 674 | >>> del server['python-tests'] 675 | """ 676 | 677 | def __init__(self, fields, name=None, default=None): 678 | Field.__init__(self, name=name, 679 | default=default or (None, ) * len(fields)) 680 | 681 | res = [] 682 | for field in fields: 683 | if type(field) is type: 684 | if issubclass(field, Field): 685 | field = field() 686 | elif issubclass(field, Mapping): 687 | field = DictField(field) 688 | res.append(field) 689 | self.fields = tuple(res) 690 | 691 | def _to_python(self, value): 692 | return tuple([self.fields[i]._to_python(m) 693 | for i, m in enumerate(value)]) 694 | 695 | def _to_json(self, value): 696 | # value is a tuple with python values to be converted 697 | assert len(self.fields) == len(value) 698 | return [self.fields[i]._to_json(m) for i, m in enumerate(value)] 699 | 700 | 701 | -------------------------------------------------------------------------------- /scripts/pep8.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # pep8.py - Check Python source code formatting, according to PEP 8 3 | # Copyright (C) 2006 Johann C. Rocholl 4 | # 5 | # Permission is hereby granted, free of charge, to any person 6 | # obtaining a copy of this software and associated documentation files 7 | # (the "Software"), to deal in the Software without restriction, 8 | # including without limitation the rights to use, copy, modify, merge, 9 | # publish, distribute, sublicense, and/or sell copies of the Software, 10 | # and to permit persons to whom the Software is furnished to do so, 11 | # subject to the following conditions: 12 | # 13 | # The above copyright notice and this permission notice shall be 14 | # included in all copies or substantial portions of the Software. 15 | # 16 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 17 | # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 18 | # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 19 | # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS 20 | # BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN 21 | # ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 22 | # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 23 | # SOFTWARE. 24 | 25 | """ 26 | Check Python source code formatting, according to PEP 8: 27 | http://www.python.org/dev/peps/pep-0008/ 28 | 29 | For usage and a list of options, try this: 30 | $ python pep8.py -h 31 | 32 | This program and its regression test suite live here: 33 | http://svn.browsershots.org/trunk/devtools/pep8/ 34 | http://trac.browsershots.org/browser/trunk/devtools/pep8/ 35 | 36 | Groups of errors and warnings: 37 | E errors 38 | W warnings 39 | 100 indentation 40 | 200 whitespace 41 | 300 blank lines 42 | 400 imports 43 | 500 line length 44 | 600 deprecation 45 | 700 statements 46 | 47 | You can add checks to this program by writing plugins. Each plugin is 48 | a simple function that is called for each line of source code, either 49 | physical or logical. 50 | 51 | Physical line: 52 | - Raw line of text from the input file. 53 | 54 | Logical line: 55 | - Multi-line statements converted to a single line. 56 | - Stripped left and right. 57 | - Contents of strings replaced with 'xxx' of same length. 58 | - Comments removed. 59 | 60 | The check function requests physical or logical lines by the name of 61 | the first argument: 62 | 63 | def maximum_line_length(physical_line) 64 | def extraneous_whitespace(logical_line) 65 | def blank_lines(logical_line, blank_lines, indent_level, line_number) 66 | 67 | The last example above demonstrates how check plugins can request 68 | additional information with extra arguments. All attributes of the 69 | Checker object are available. Some examples: 70 | 71 | lines: a list of the raw lines from the input file 72 | tokens: the tokens that contribute to this logical line 73 | line_number: line number in the input file 74 | blank_lines: blank lines before this one 75 | indent_char: first indentation character in this file (' ' or '\t') 76 | indent_level: indentation (with tabs expanded to multiples of 8) 77 | previous_indent_level: indentation on previous line 78 | previous_logical: previous logical line 79 | 80 | The docstring of each check function shall be the relevant part of 81 | text from PEP 8. It is printed if the user enables --show-pep8. 82 | 83 | """ 84 | 85 | import os 86 | import sys 87 | import re 88 | import time 89 | import inspect 90 | import tokenize 91 | from optparse import OptionParser 92 | from keyword import iskeyword 93 | from fnmatch import fnmatch 94 | 95 | __version__ = '0.2.0' 96 | __revision__ = '$Rev$' 97 | 98 | default_exclude = '.svn,CVS,*.pyc,*.pyo' 99 | 100 | indent_match = re.compile(r'([ \t]*)').match 101 | raise_comma_match = re.compile(r'raise\s+\w+\s*(,)').match 102 | 103 | operators = """ 104 | + - * / % ^ & | = < > >> << 105 | += -= *= /= %= ^= &= |= == <= >= >>= <<= 106 | != <> : 107 | in is or not and 108 | """.split() 109 | 110 | options = None 111 | args = None 112 | 113 | 114 | ############################################################################## 115 | # Plugins (check functions) for physical lines 116 | ############################################################################## 117 | 118 | 119 | def tabs_or_spaces(physical_line, indent_char): 120 | """ 121 | Never mix tabs and spaces. 122 | 123 | The most popular way of indenting Python is with spaces only. The 124 | second-most popular way is with tabs only. Code indented with a mixture 125 | of tabs and spaces should be converted to using spaces exclusively. When 126 | invoking the Python command line interpreter with the -t option, it issues 127 | warnings about code that illegally mixes tabs and spaces. When using -tt 128 | these warnings become errors. These options are highly recommended! 129 | """ 130 | indent = indent_match(physical_line).group(1) 131 | for offset, char in enumerate(indent): 132 | if char != indent_char: 133 | return offset, "E101 indentation contains mixed spaces and tabs" 134 | 135 | 136 | def tabs_obsolete(physical_line): 137 | """ 138 | For new projects, spaces-only are strongly recommended over tabs. Most 139 | editors have features that make this easy to do. 140 | """ 141 | indent = indent_match(physical_line).group(1) 142 | if indent.count('\t'): 143 | return indent.index('\t'), "W191 indentation contains tabs" 144 | 145 | 146 | def trailing_whitespace(physical_line): 147 | """ 148 | JCR: Trailing whitespace is superfluous. 149 | """ 150 | physical_line = physical_line.rstrip('\n') # chr(10), newline 151 | physical_line = physical_line.rstrip('\r') # chr(13), carriage return 152 | physical_line = physical_line.rstrip('\x0c') # chr(12), form feed, ^L 153 | stripped = physical_line.rstrip() 154 | if physical_line != stripped: 155 | return len(stripped), "W291 trailing whitespace" 156 | 157 | 158 | def trailing_blank_lines(physical_line, lines, line_number): 159 | """ 160 | JCR: Trailing blank lines are superfluous. 161 | """ 162 | if physical_line.strip() == '' and line_number == len(lines): 163 | return 0, "W391 blank line at end of file" 164 | 165 | 166 | def missing_newline(physical_line): 167 | """ 168 | JCR: The last line should have a newline. 169 | """ 170 | if physical_line.rstrip() == physical_line: 171 | return len(physical_line), "W292 no newline at end of file" 172 | 173 | 174 | def maximum_line_length(physical_line): 175 | """ 176 | Limit all lines to a maximum of 79 characters. 177 | 178 | There are still many devices around that are limited to 80 character 179 | lines; plus, limiting windows to 80 characters makes it possible to have 180 | several windows side-by-side. The default wrapping on such devices looks 181 | ugly. Therefore, please limit all lines to a maximum of 79 characters. 182 | For flowing long blocks of text (docstrings or comments), limiting the 183 | length to 72 characters is recommended. 184 | """ 185 | length = len(physical_line.rstrip()) 186 | if length > 79: 187 | return 79, "E501 line too long (%d characters)" % length 188 | 189 | 190 | ############################################################################## 191 | # Plugins (check functions) for logical lines 192 | ############################################################################## 193 | 194 | 195 | def blank_lines(logical_line, blank_lines, indent_level, line_number, 196 | previous_logical): 197 | """ 198 | Separate top-level function and class definitions with two blank lines. 199 | 200 | Method definitions inside a class are separated by a single blank line. 201 | 202 | Extra blank lines may be used (sparingly) to separate groups of related 203 | functions. Blank lines may be omitted between a bunch of related 204 | one-liners (e.g. a set of dummy implementations). 205 | 206 | Use blank lines in functions, sparingly, to indicate logical sections. 207 | """ 208 | if line_number == 1: 209 | return # Don't expect blank lines before the first line 210 | if previous_logical.startswith('@'): 211 | return # Don't expect blank lines after function decorator 212 | if (logical_line.startswith('def ') or 213 | logical_line.startswith('class ') or 214 | logical_line.startswith('@')): 215 | if indent_level > 0 and blank_lines != 1: 216 | return 0, "E301 expected 1 blank line, found %d" % blank_lines 217 | if indent_level == 0 and blank_lines != 2: 218 | return 0, "E302 expected 2 blank lines, found %d" % blank_lines 219 | if blank_lines > 2: 220 | return 0, "E303 too many blank lines (%d)" % blank_lines 221 | 222 | 223 | def extraneous_whitespace(logical_line): 224 | """ 225 | Avoid extraneous whitespace in the following situations: 226 | 227 | - Immediately inside parentheses, brackets or braces. 228 | 229 | - Immediately before a comma, semicolon, or colon. 230 | """ 231 | line = logical_line 232 | for char in '([{': 233 | found = line.find(char + ' ') 234 | if found > -1: 235 | return found + 1, "E201 whitespace after '%s'" % char 236 | for char in '}])': 237 | found = line.find(' ' + char) 238 | if found > -1 and line[found - 1] != ',': 239 | return found, "E202 whitespace before '%s'" % char 240 | for char in ',;:': 241 | found = line.find(' ' + char) 242 | if found > -1: 243 | return found, "E203 whitespace before '%s'" % char 244 | 245 | 246 | def missing_whitespace(logical_line): 247 | """ 248 | JCR: Each comma, semicolon or colon should be followed by whitespace. 249 | """ 250 | line = logical_line 251 | for index in range(len(line) - 1): 252 | char = line[index] 253 | if char in ',;:' and line[index + 1] != ' ': 254 | before = line[:index] 255 | if char == ':' and before.count('[') > before.count(']'): 256 | continue # Slice syntax, no space required 257 | return index, "E231 missing whitespace after '%s'" % char 258 | 259 | 260 | def indentation(logical_line, previous_logical, indent_char, 261 | indent_level, previous_indent_level): 262 | """ 263 | Use 4 spaces per indentation level. 264 | 265 | For really old code that you don't want to mess up, you can continue to 266 | use 8-space tabs. 267 | """ 268 | if indent_char == ' ' and indent_level % 4: 269 | return 0, "E111 indentation is not a multiple of four" 270 | indent_expect = previous_logical.endswith(':') 271 | if indent_expect and indent_level <= previous_indent_level: 272 | return 0, "E112 expected an indented block" 273 | if indent_level > previous_indent_level and not indent_expect: 274 | return 0, "E113 unexpected indentation" 275 | 276 | 277 | def whitespace_before_parameters(logical_line, tokens): 278 | """ 279 | Avoid extraneous whitespace in the following situations: 280 | 281 | - Immediately before the open parenthesis that starts the argument 282 | list of a function call. 283 | 284 | - Immediately before the open parenthesis that starts an indexing or 285 | slicing. 286 | """ 287 | prev_type = tokens[0][0] 288 | prev_text = tokens[0][1] 289 | prev_end = tokens[0][3] 290 | for index in range(1, len(tokens)): 291 | token_type, text, start, end, line = tokens[index] 292 | if (token_type == tokenize.OP and 293 | text in '([' and 294 | start != prev_end and 295 | prev_type == tokenize.NAME and 296 | (index < 2 or tokens[index - 2][1] != 'class') and 297 | (not iskeyword(prev_text))): 298 | return prev_end, "E211 whitespace before '%s'" % text 299 | prev_type = token_type 300 | prev_text = text 301 | prev_end = end 302 | 303 | 304 | def whitespace_around_operator(logical_line): 305 | """ 306 | Avoid extraneous whitespace in the following situations: 307 | 308 | - More than one space around an assignment (or other) operator to 309 | align it with another. 310 | """ 311 | line = logical_line 312 | for operator in operators: 313 | found = line.find(' ' + operator) 314 | if found > -1: 315 | return found, "E221 multiple spaces before operator" 316 | found = line.find(operator + ' ') 317 | if found > -1: 318 | return found, "E222 multiple spaces after operator" 319 | found = line.find('\t' + operator) 320 | if found > -1: 321 | return found, "E223 tab before operator" 322 | found = line.find(operator + '\t') 323 | if found > -1: 324 | return found, "E224 tab after operator" 325 | 326 | 327 | def whitespace_around_comma(logical_line): 328 | """ 329 | Avoid extraneous whitespace in the following situations: 330 | 331 | - More than one space around an assignment (or other) operator to 332 | align it with another. 333 | 334 | JCR: This should also be applied around comma etc. 335 | """ 336 | line = logical_line 337 | for separator in ',;:': 338 | found = line.find(separator + ' ') 339 | if found > -1: 340 | return found + 1, "E241 multiple spaces after '%s'" % separator 341 | found = line.find(separator + '\t') 342 | if found > -1: 343 | return found + 1, "E242 tab after '%s'" % separator 344 | 345 | 346 | def imports_on_separate_lines(logical_line): 347 | """ 348 | Imports should usually be on separate lines. 349 | """ 350 | line = logical_line 351 | if line.startswith('import '): 352 | found = line.find(',') 353 | if found > -1: 354 | return found, "E401 multiple imports on one line" 355 | 356 | 357 | def compound_statements(logical_line): 358 | """ 359 | Compound statements (multiple statements on the same line) are 360 | generally discouraged. 361 | """ 362 | line = logical_line 363 | found = line.find(':') 364 | if -1 < found < len(line) - 1: 365 | before = line[:found] 366 | if (before.count('{') <= before.count('}') and # {'a': 1} (dict) 367 | before.count('[') <= before.count(']') and # [1:2] (slice) 368 | not re.search(r'\blambda\b', before)): # lambda x: x 369 | return found, "E701 multiple statements on one line (colon)" 370 | found = line.find(';') 371 | if -1 < found: 372 | return found, "E702 multiple statements on one line (semicolon)" 373 | 374 | 375 | def python_3000_has_key(logical_line): 376 | """ 377 | The {}.has_key() method will be removed in the future version of 378 | Python. Use the 'in' operation instead, like: 379 | d = {"a": 1, "b": 2} 380 | if "b" in d: 381 | print d["b"] 382 | """ 383 | pos = logical_line.find('.has_key(') 384 | if pos > -1: 385 | return pos, "W601 .has_key() is deprecated, use 'in'" 386 | 387 | 388 | def python_3000_raise_comma(logical_line): 389 | """ 390 | When raising an exception, use "raise ValueError('message')" 391 | instead of the older form "raise ValueError, 'message'". 392 | 393 | The paren-using form is preferred because when the exception arguments 394 | are long or include string formatting, you don't need to use line 395 | continuation characters thanks to the containing parentheses. The older 396 | form will be removed in Python 3000. 397 | """ 398 | match = raise_comma_match(logical_line) 399 | if match: 400 | return match.start(1), "W602 deprecated form of raising exception" 401 | 402 | 403 | ############################################################################## 404 | # Helper functions 405 | ############################################################################## 406 | 407 | 408 | def expand_indent(line): 409 | """ 410 | Return the amount of indentation. 411 | Tabs are expanded to the next multiple of 8. 412 | 413 | >>> expand_indent(' ') 414 | 4 415 | >>> expand_indent('\\t') 416 | 8 417 | >>> expand_indent(' \\t') 418 | 8 419 | >>> expand_indent(' \\t') 420 | 8 421 | >>> expand_indent(' \\t') 422 | 16 423 | """ 424 | result = 0 425 | for char in line: 426 | if char == '\t': 427 | result = result / 8 * 8 + 8 428 | elif char == ' ': 429 | result += 1 430 | else: 431 | break 432 | return result 433 | 434 | 435 | ############################################################################## 436 | # Framework to run all checks 437 | ############################################################################## 438 | 439 | 440 | def message(text): 441 | """Print a message.""" 442 | # print >> sys.stderr, options.prog + ': ' + text 443 | # print >> sys.stderr, text 444 | print text 445 | 446 | 447 | def find_checks(argument_name): 448 | """ 449 | Find all globally visible functions where the first argument name 450 | starts with argument_name. 451 | """ 452 | checks = [] 453 | function_type = type(find_checks) 454 | for name, function in globals().iteritems(): 455 | if type(function) is function_type: 456 | args = inspect.getargspec(function)[0] 457 | if len(args) >= 1 and args[0].startswith(argument_name): 458 | checks.append((name, function, args)) 459 | checks.sort() 460 | return checks 461 | 462 | 463 | def mute_string(text): 464 | """ 465 | Replace contents with 'xxx' to prevent syntax matching. 466 | 467 | >>> mute_string('"abc"') 468 | '"xxx"' 469 | >>> mute_string("'''abc'''") 470 | "'''xxx'''" 471 | >>> mute_string("r'abc'") 472 | "r'xxx'" 473 | """ 474 | start = 1 475 | end = len(text) - 1 476 | # String modifiers (e.g. u or r) 477 | if text.endswith('"'): 478 | start += text.index('"') 479 | elif text.endswith("'"): 480 | start += text.index("'") 481 | # Triple quotes 482 | if text.endswith('"""') or text.endswith("'''"): 483 | start += 2 484 | end -= 2 485 | return text[:start] + 'x' * (end - start) + text[end:] 486 | 487 | 488 | class Checker: 489 | """ 490 | Load a Python source file, tokenize it, check coding style. 491 | """ 492 | 493 | def __init__(self, filename): 494 | self.filename = filename 495 | self.lines = file(filename).readlines() 496 | self.physical_checks = find_checks('physical_line') 497 | self.logical_checks = find_checks('logical_line') 498 | options.counters['physical lines'] = \ 499 | options.counters.get('physical lines', 0) + len(self.lines) 500 | 501 | def readline(self): 502 | """ 503 | Get the next line from the input buffer. 504 | """ 505 | self.line_number += 1 506 | if self.line_number > len(self.lines): 507 | return '' 508 | return self.lines[self.line_number - 1] 509 | 510 | def readline_check_physical(self): 511 | """ 512 | Check and return the next physical line. This method can be 513 | used to feed tokenize.generate_tokens. 514 | """ 515 | line = self.readline() 516 | if line: 517 | self.check_physical(line) 518 | return line 519 | 520 | def run_check(self, check, argument_names): 521 | """ 522 | Run a check plugin. 523 | """ 524 | arguments = [] 525 | for name in argument_names: 526 | arguments.append(getattr(self, name)) 527 | return check(*arguments) 528 | 529 | def check_physical(self, line): 530 | """ 531 | Run all physical checks on a raw input line. 532 | """ 533 | self.physical_line = line 534 | if self.indent_char is None and len(line) and line[0] in ' \t': 535 | self.indent_char = line[0] 536 | for name, check, argument_names in self.physical_checks: 537 | result = self.run_check(check, argument_names) 538 | if result is not None: 539 | offset, text = result 540 | self.report_error(self.line_number, offset, text, check) 541 | 542 | def build_tokens_line(self): 543 | """ 544 | Build a logical line from tokens. 545 | """ 546 | self.mapping = [] 547 | logical = [] 548 | length = 0 549 | previous = None 550 | for token in self.tokens: 551 | token_type, text = token[0:2] 552 | if token_type in (tokenize.COMMENT, tokenize.NL, 553 | tokenize.INDENT, tokenize.DEDENT, 554 | tokenize.NEWLINE): 555 | continue 556 | if token_type == tokenize.STRING: 557 | text = mute_string(text) 558 | if previous: 559 | end_line, end = previous[3] 560 | start_line, start = token[2] 561 | if end_line != start_line: # different row 562 | if self.lines[end_line - 1][end - 1] not in '{[(': 563 | logical.append(' ') 564 | length += 1 565 | elif end != start: # different column 566 | fill = self.lines[end_line - 1][end:start] 567 | logical.append(fill) 568 | length += len(fill) 569 | self.mapping.append((length, token)) 570 | logical.append(text) 571 | length += len(text) 572 | previous = token 573 | self.logical_line = ''.join(logical) 574 | assert self.logical_line.lstrip() == self.logical_line 575 | assert self.logical_line.rstrip() == self.logical_line 576 | 577 | def check_logical(self): 578 | """ 579 | Build a line from tokens and run all logical checks on it. 580 | """ 581 | options.counters['logical lines'] = \ 582 | options.counters.get('logical lines', 0) + 1 583 | self.build_tokens_line() 584 | first_line = self.lines[self.mapping[0][1][2][0] - 1] 585 | indent = first_line[:self.mapping[0][1][2][1]] 586 | self.previous_indent_level = self.indent_level 587 | self.indent_level = expand_indent(indent) 588 | if options.verbose >= 2: 589 | print self.logical_line[:80].rstrip() 590 | for name, check, argument_names in self.logical_checks: 591 | if options.verbose >= 3: 592 | print ' ', name 593 | result = self.run_check(check, argument_names) 594 | if result is not None: 595 | offset, text = result 596 | if type(offset) is tuple: 597 | original_number, original_offset = offset 598 | else: 599 | for token_offset, token in self.mapping: 600 | if offset >= token_offset: 601 | original_number = token[2][0] 602 | original_offset = (token[2][1] 603 | + offset - token_offset) 604 | self.report_error(original_number, original_offset, 605 | text, check) 606 | self.previous_logical = self.logical_line 607 | 608 | def check_all(self): 609 | """ 610 | Run all checks on the input file. 611 | """ 612 | self.file_errors = 0 613 | self.line_number = 0 614 | self.indent_char = None 615 | self.indent_level = 0 616 | self.previous_logical = '' 617 | self.blank_lines = 0 618 | self.tokens = [] 619 | parens = 0 620 | for token in tokenize.generate_tokens(self.readline_check_physical): 621 | # print tokenize.tok_name[token[0]], repr(token) 622 | self.tokens.append(token) 623 | token_type, text = token[0:2] 624 | if token_type == tokenize.OP and text in '([{': 625 | parens += 1 626 | if token_type == tokenize.OP and text in '}])': 627 | parens -= 1 628 | if token_type == tokenize.NEWLINE and not parens: 629 | self.check_logical() 630 | self.blank_lines = 0 631 | self.tokens = [] 632 | if token_type == tokenize.NL and not parens: 633 | if len(self.tokens) <= 1: 634 | # The physical line contains only this token. 635 | self.blank_lines += 1 636 | self.tokens = [] 637 | if token_type == tokenize.COMMENT: 638 | source_line = token[4] 639 | token_start = token[2][1] 640 | if source_line[:token_start].strip() == '': 641 | self.blank_lines = 0 642 | if text.endswith('\n') and not parens: 643 | # The comment also ends a physical line. This works around 644 | # Python < 2.6 behaviour, which does not generate NL after 645 | # a comment which is on a line by itself. 646 | self.tokens = [] 647 | return self.file_errors 648 | 649 | def report_error(self, line_number, offset, text, check): 650 | """ 651 | Report an error, according to options. 652 | """ 653 | if options.quiet == 1 and not self.file_errors: 654 | message(self.filename) 655 | self.file_errors += 1 656 | code = text[:4] 657 | options.counters[code] = options.counters.get(code, 0) + 1 658 | options.messages[code] = text[5:] 659 | if options.quiet: 660 | return 661 | if options.testsuite: 662 | base = os.path.basename(self.filename)[:4] 663 | if base == code: 664 | return 665 | if base[0] == 'E' and code[0] == 'W': 666 | return 667 | if ignore_code(code): 668 | return 669 | if options.counters[code] == 1 or options.repeat: 670 | message("%s:%s:%d: %s" % 671 | (self.filename, line_number, offset + 1, text)) 672 | if options.show_source: 673 | line = self.lines[line_number - 1] 674 | message(line.rstrip()) 675 | message(' ' * offset + '^') 676 | if options.show_pep8: 677 | message(check.__doc__.lstrip('\n').rstrip()) 678 | 679 | 680 | def input_file(filename): 681 | """ 682 | Run all checks on a Python source file. 683 | """ 684 | if excluded(filename) or not filename_match(filename): 685 | return {} 686 | if options.verbose: 687 | message('checking ' + filename) 688 | options.counters['files'] = options.counters.get('files', 0) + 1 689 | errors = Checker(filename).check_all() 690 | if options.testsuite and not errors: 691 | message("%s: %s" % (filename, "no errors found")) 692 | return errors 693 | 694 | 695 | def input_dir(dirname): 696 | """ 697 | Check all Python source files in this directory and all subdirectories. 698 | """ 699 | dirname = dirname.rstrip('/') 700 | if excluded(dirname): 701 | return 0 702 | errors = 0 703 | for root, dirs, files in os.walk(dirname): 704 | if options.verbose: 705 | message('directory ' + root) 706 | options.counters['directories'] = \ 707 | options.counters.get('directories', 0) + 1 708 | dirs.sort() 709 | for subdir in dirs: 710 | if excluded(subdir): 711 | dirs.remove(subdir) 712 | files.sort() 713 | for filename in files: 714 | errors += input_file(os.path.join(root, filename)) 715 | return errors 716 | 717 | 718 | def excluded(filename): 719 | """ 720 | Check if options.exclude contains a pattern that matches filename. 721 | """ 722 | basename = os.path.basename(filename) 723 | for pattern in options.exclude: 724 | if fnmatch(basename, pattern): 725 | # print basename, 'excluded because it matches', pattern 726 | return True 727 | 728 | 729 | def filename_match(filename): 730 | """ 731 | Check if options.filename contains a pattern that matches filename. 732 | If options.filename is unspecified, this always returns True. 733 | """ 734 | if not options.filename: 735 | return True 736 | for pattern in options.filename: 737 | if fnmatch(filename, pattern): 738 | return True 739 | 740 | 741 | def ignore_code(code): 742 | """ 743 | Check if options.ignore contains a prefix of the error code. 744 | """ 745 | for ignore in options.ignore: 746 | if code.startswith(ignore): 747 | return True 748 | 749 | 750 | def get_error_statistics(): 751 | """Get error statistics.""" 752 | return get_statistics("E") 753 | 754 | 755 | def get_warning_statistics(): 756 | """Get warning statistics.""" 757 | return get_statistics("W") 758 | 759 | 760 | def get_statistics(prefix=''): 761 | """ 762 | Get statistics for message codes that start with the prefix. 763 | 764 | prefix='' matches all errors and warnings 765 | prefix='E' matches all errors 766 | prefix='W' matches all warnings 767 | prefix='E4' matches all errors that have to do with imports 768 | """ 769 | stats = [] 770 | keys = options.messages.keys() 771 | keys.sort() 772 | for key in keys: 773 | if key.startswith(prefix): 774 | stats.append('%-7s %s %s' % 775 | (options.counters[key], key, options.messages[key])) 776 | return stats 777 | 778 | 779 | def print_statistics(prefix=''): 780 | """Print overall statistics (number of errors and warnings).""" 781 | for line in get_statistics(prefix): 782 | print line 783 | 784 | 785 | def print_benchmark(elapsed): 786 | """ 787 | Print benchmark numbers. 788 | """ 789 | print '%-7.2f %s' % (elapsed, 'seconds elapsed') 790 | keys = ['directories', 'files', 791 | 'logical lines', 'physical lines'] 792 | for key in keys: 793 | if key in options.counters: 794 | print '%-7d %s per second (%d total)' % ( 795 | options.counters[key] / elapsed, key, 796 | options.counters[key]) 797 | 798 | 799 | def process_options(arglist=None): 800 | """ 801 | Process options passed either via arglist or via command line args. 802 | """ 803 | global options, args 804 | usage = "%prog [options] input ..." 805 | parser = OptionParser(usage) 806 | parser.add_option('-v', '--verbose', default=0, action='count', 807 | help="print status messages, or debug with -vv") 808 | parser.add_option('-q', '--quiet', default=0, action='count', 809 | help="report only file names, or nothing with -qq") 810 | parser.add_option('--exclude', metavar='patterns', default=default_exclude, 811 | help="skip matches (default %s)" % default_exclude) 812 | parser.add_option('--filename', metavar='patterns', 813 | help="only check matching files (e.g. *.py)") 814 | parser.add_option('--ignore', metavar='errors', default='', 815 | help="skip errors and warnings (e.g. E4,W)") 816 | parser.add_option('--repeat', action='store_true', 817 | help="show all occurrences of the same error") 818 | parser.add_option('--show-source', action='store_true', 819 | help="show source code for each error") 820 | parser.add_option('--show-pep8', action='store_true', 821 | help="show text of PEP 8 for each error") 822 | parser.add_option('--statistics', action='store_true', 823 | help="count errors and warnings") 824 | parser.add_option('--benchmark', action='store_true', 825 | help="measure processing speed") 826 | parser.add_option('--testsuite', metavar='dir', 827 | help="run regression tests from dir") 828 | parser.add_option('--doctest', action='store_true', 829 | help="run doctest on myself") 830 | options, args = parser.parse_args(arglist) 831 | if options.testsuite: 832 | args.append(options.testsuite) 833 | if len(args) == 0: 834 | parser.error('input not specified') 835 | options.prog = os.path.basename(sys.argv[0]) 836 | options.exclude = options.exclude.split(',') 837 | for index in range(len(options.exclude)): 838 | options.exclude[index] = options.exclude[index].rstrip('/') 839 | if options.filename: 840 | options.filename = options.filename.split(',') 841 | if options.ignore: 842 | options.ignore = options.ignore.split(',') 843 | else: 844 | options.ignore = [] 845 | options.counters = {} 846 | options.messages = {} 847 | 848 | return options, args 849 | 850 | 851 | def _main(): 852 | """ 853 | Parse options and run checks on Python source. 854 | """ 855 | options, args = process_options() 856 | if options.doctest: 857 | import doctest 858 | return doctest.testmod() 859 | start_time = time.time() 860 | errors = 0 861 | for path in args: 862 | if os.path.isdir(path): 863 | errors += input_dir(path) 864 | else: 865 | errors += input_file(path) 866 | elapsed = time.time() - start_time 867 | if options.statistics: 868 | print_statistics() 869 | if options.benchmark: 870 | print_benchmark(elapsed) 871 | return errors > 0 872 | 873 | if __name__ == '__main__': 874 | sys.exit(_main()) 875 | -------------------------------------------------------------------------------- /paisley/test/test_client.py: -------------------------------------------------------------------------------- 1 | # -*- Mode: Python; test-case-name: paisley.test.test_client -*- 2 | # vi:si:et:sw=4:sts=4:ts=4 3 | 4 | # Copyright (c) 2007-2008 5 | # See LICENSE for details. 6 | 7 | """ 8 | Test for couchdb client. 9 | """ 10 | 11 | from paisley import pjson as json 12 | 13 | import cgi 14 | 15 | from twisted.internet import defer 16 | 17 | from twisted.trial.unittest import TestCase 18 | from twisted.internet.defer import Deferred 19 | from twisted.internet import reactor 20 | from twisted.web import resource, server 21 | from twisted.web._newclient import ResponseDone 22 | from twisted.python.failure import Failure 23 | 24 | from paisley import client 25 | 26 | from paisley.test import util 27 | 28 | 29 | class TestableCouchDB(client.CouchDB): 30 | """ 31 | A couchdb client that can be tested: override the getPage method. 32 | """ 33 | 34 | def __init__(self, *args, **kwargs): 35 | """ 36 | Initialize the client: forward parameters, and create attributes used 37 | in tests. 38 | """ 39 | client.CouchDB.__init__(self, *args, **kwargs) 40 | self.deferred = Deferred() 41 | self.uri = None 42 | self.kwargs = None 43 | self.called = False 44 | 45 | def _getPage(self, uri, *args, **kwargs): 46 | """ 47 | Fake getPage that do nothing but saving the arguments. 48 | """ 49 | if self.called: 50 | raise RuntimeError("One shot client") 51 | self.called = True 52 | self.uri = uri 53 | self.kwargs = kwargs 54 | return self.deferred 55 | 56 | 57 | class CouchDBTestCase(TestCase): 58 | """ 59 | Test methods against a couchDB. 60 | """ 61 | 62 | def setUp(self): 63 | """ 64 | Create a fake client to be used in the tests. 65 | """ 66 | self.client = TestableCouchDB("localhost") 67 | 68 | def test_disable_log(self): 69 | client = TestableCouchDB('localhost', disable_log=True) 70 | import logging 71 | log = logging.getLogger('paisley') 72 | self.assertNotEqual(log, client.log) 73 | 74 | def test_enable_log_and_defaults(self): 75 | client = TestableCouchDB('localhost') 76 | import logging 77 | log = logging.getLogger('paisley') 78 | self.assertEqual(log, client.log) 79 | 80 | def test_auth_init(self): 81 | """ 82 | Test setting up client with authentication 83 | """ 84 | self.client_auth = client.CouchDB("localhost", 85 | username="test", 86 | password="testpass") 87 | 88 | self.assertEquals(self.client_auth.username, "test") 89 | self.assertEquals(self.client_auth.password, "testpass") 90 | 91 | def test_get(self): 92 | """ 93 | Test get method. 94 | """ 95 | self.client.get("foo") 96 | self.assertEquals(self.client.uri, "foo") 97 | self.assertEquals(self.client.kwargs["method"], "GET") 98 | 99 | def test_post(self): 100 | """ 101 | Test post method. 102 | """ 103 | self.client.post("bar", "egg") 104 | self.assertEquals(self.client.uri, "bar") 105 | self.assertEquals(self.client.kwargs["method"], "POST") 106 | self.assertEquals(self.client.kwargs["postdata"], "egg") 107 | 108 | def test_put(self): 109 | """ 110 | Test put method. 111 | """ 112 | self.client.put("bar", "egg") 113 | self.assertEquals(self.client.uri, "bar") 114 | self.assertEquals(self.client.kwargs["method"], "PUT") 115 | self.assertEquals(self.client.kwargs["postdata"], "egg") 116 | 117 | def test_delete(self): 118 | """ 119 | Test get method. 120 | """ 121 | self.client.delete("foo") 122 | self.assertEquals(self.client.uri, "foo") 123 | self.assertEquals(self.client.kwargs["method"], "DELETE") 124 | 125 | def _checkParseDeferred(self, d): 126 | """ 127 | Utility function to test that a Deferred is called with JSON parsing. 128 | """ 129 | d.callback('["foo"]') 130 | 131 | def cb(res): 132 | self.assertEquals(res, ["foo"]) 133 | return d.addCallback(cb) 134 | 135 | def test_createDB(self): 136 | """ 137 | Test createDB: this should C{PUT} the DB name in the uri. 138 | """ 139 | d = self.client.createDB("mydb") 140 | self.assertEquals(self.client.uri, "/mydb/") 141 | self.assertEquals(self.client.kwargs["method"], "PUT") 142 | return self._checkParseDeferred(d) 143 | 144 | def test_deleteDB(self): 145 | """ 146 | Test deleteDB: this should C{DELETE} the DB name. 147 | """ 148 | d = self.client.deleteDB("mydb") 149 | self.assertEquals(self.client.uri, "/mydb/") 150 | self.assertEquals(self.client.kwargs["method"], "DELETE") 151 | return self._checkParseDeferred(d) 152 | 153 | def test_listDB(self): 154 | """ 155 | Test listDB: this should C{GET} a specific uri. 156 | """ 157 | d = self.client.listDB() 158 | self.assertEquals(self.client.uri, "/_all_dbs") 159 | self.assertEquals(self.client.kwargs["method"], "GET") 160 | return self._checkParseDeferred(d) 161 | 162 | def test_infoDB(self): 163 | """ 164 | Test infoDB: this should C{GET} the DB name. 165 | """ 166 | d = self.client.infoDB("mydb") 167 | self.assertEquals(self.client.uri, "/mydb/") 168 | self.assertEquals(self.client.kwargs["method"], "GET") 169 | return self._checkParseDeferred(d) 170 | 171 | def test_listDoc(self): 172 | """ 173 | Test listDoc. 174 | """ 175 | d = self.client.listDoc("mydb") 176 | self.assertEquals(self.client.uri, "/mydb/_all_docs") 177 | self.assertEquals(self.client.kwargs["method"], "GET") 178 | return self._checkParseDeferred(d) 179 | 180 | def test_listDocReversed(self): 181 | """ 182 | Test listDoc reversed. 183 | """ 184 | d = self.client.listDoc("mydb", reverse=True) 185 | self.assertEquals(self.client.uri, "/mydb/_all_docs?reverse=true") 186 | self.assertEquals(self.client.kwargs["method"], "GET") 187 | return self._checkParseDeferred(d) 188 | 189 | def test_listDocStartKey(self): 190 | """ 191 | Test listDoc with a start_key. 192 | """ 193 | d = self.client.listDoc("mydb", startkey=2) 194 | self.assertEquals(self.client.uri, "/mydb/_all_docs?startkey=2") 195 | self.assertEquals(self.client.kwargs["method"], "GET") 196 | return self._checkParseDeferred(d) 197 | 198 | def test_listDocLimit(self): 199 | """ 200 | Test listDoc with a limit. 201 | """ 202 | d = self.client.listDoc("mydb", limit=3) 203 | self.assertEquals(self.client.uri, "/mydb/_all_docs?limit=3") 204 | self.assertEquals(self.client.kwargs["method"], "GET") 205 | return self._checkParseDeferred(d) 206 | 207 | def test_listDocMultipleArguments(self): 208 | """ 209 | Test listDoc with all options activated. 210 | """ 211 | d = self.client.listDoc("mydb", limit=3, startkey=1, reverse=True) 212 | self.assertEquals(self.client.uri, 213 | "/mydb/_all_docs?startkey=1&limit=3&reverse=true") 214 | self.assertEquals(self.client.kwargs["method"], "GET") 215 | return self._checkParseDeferred(d) 216 | 217 | def test_openDoc(self): 218 | """ 219 | Test openDoc. 220 | """ 221 | d = self.client.openDoc("mydb", "mydoc") 222 | self.assertEquals(self.client.uri, "/mydb/mydoc") 223 | self.assertEquals(self.client.kwargs["method"], "GET") 224 | return self._checkParseDeferred(d) 225 | 226 | def test_openDocAtRevision(self): 227 | """ 228 | Test openDoc with a specific revision. 229 | """ 230 | d = self.client.openDoc("mydb", "mydoc", revision="ABC") 231 | self.assertEquals(self.client.uri, "/mydb/mydoc?rev=ABC") 232 | self.assertEquals(self.client.kwargs["method"], "GET") 233 | return self._checkParseDeferred(d) 234 | 235 | def test_openDocWithRevisionHistory(self): 236 | """ 237 | Test openDoc with revision history. 238 | """ 239 | d = self.client.openDoc("mydb", "mydoc", full=True) 240 | self.assertEquals(self.client.uri, "/mydb/mydoc?full=true") 241 | self.assertEquals(self.client.kwargs["method"], "GET") 242 | return self._checkParseDeferred(d) 243 | 244 | def test_openDocAttachment(self): 245 | """ 246 | Test openDoc for an attachment. 247 | """ 248 | d = self.client.openDoc("mydb", "mydoc", attachment="bar") 249 | self.assertEquals(self.client.uri, "/mydb/mydoc/bar") 250 | self.assertEquals(self.client.kwargs["method"], "GET") 251 | # Data is transfered without parsing 252 | d.callback("test") 253 | return d.addCallback(self.assertEquals, "test") 254 | 255 | def test_saveDocWithDocId(self): 256 | """ 257 | Test saveDoc, giving an explicit document ID. 258 | """ 259 | d = self.client.saveDoc("mydb", "mybody", "mydoc") 260 | self.assertEquals(self.client.uri, "/mydb/mydoc") 261 | self.assertEquals(self.client.kwargs["method"], "PUT") 262 | return self._checkParseDeferred(d) 263 | 264 | def test_saveDocWithoutDocId(self): 265 | """ 266 | Test saveDoc without a document ID. 267 | """ 268 | d = self.client.saveDoc("mydb", "mybody") 269 | self.assertEquals(self.client.uri, "/mydb/") 270 | self.assertEquals(self.client.kwargs["method"], "POST") 271 | return self._checkParseDeferred(d) 272 | 273 | def test_saveStructuredDoc(self): 274 | """ 275 | saveDoc should automatically serialize a structured document. 276 | """ 277 | d = self.client.saveDoc("mydb", {"value": "mybody", "_id": "foo"}, 278 | "mydoc") 279 | self.assertEquals(self.client.uri, "/mydb/mydoc") 280 | self.assertEquals(self.client.kwargs["method"], "PUT") 281 | return self._checkParseDeferred(d) 282 | 283 | def test_deleteDoc(self): 284 | """ 285 | Test deleteDoc. 286 | """ 287 | d = self.client.deleteDoc("mydb", "mydoc", "1234567890") 288 | self.assertEquals(self.client.uri, "/mydb/mydoc?rev=1234567890") 289 | self.assertEquals(self.client.kwargs["method"], "DELETE") 290 | return self._checkParseDeferred(d) 291 | 292 | def test_addAttachments(self): 293 | """ 294 | Test addAttachments. 295 | """ 296 | doc = {"value": "bar"} 297 | self.client.addAttachments(doc, 298 | {"file1": "value", "file2": "second value"}) 299 | self.assertEquals(doc["_attachments"], 300 | {'file2': {'data': 'c2Vjb25kIHZhbHVl', 'type': 'base64'}, 301 | 'file1': {'data': 'dmFsdWU=', 'type': 'base64'}}) 302 | 303 | def test_openView(self): 304 | """ 305 | Test openView. 306 | """ 307 | d = self.client.openView("mydb", "viewdoc", "myview") 308 | self.assertEquals(self.client.uri, 309 | "/mydb/_design/viewdoc/_view/myview?") 310 | self.assertEquals(self.client.kwargs["method"], "GET") 311 | return self._checkParseDeferred(d) 312 | 313 | def test_openViewWithQuery(self): 314 | """ 315 | Test openView with query arguments. 316 | """ 317 | d = self.client.openView("mydb", 318 | "viewdoc", 319 | "myview", 320 | startkey="foo", 321 | limit=10) 322 | self.assertEquals(self.client.kwargs["method"], "GET") 323 | self.failUnless( 324 | self.client.uri.startswith("/mydb/_design/viewdoc/_view/myview")) 325 | query = cgi.parse_qs(self.client.uri.split('?', 1)[-1]) 326 | # couchdb expects valid JSON as the query values, so a string of foo 327 | # should be serialized as "foo" explicitly 328 | # e.g., ?startkey=A would return 329 | # {"error":"bad_request","reason":"invalid UTF-8 JSON"} 330 | self.assertEquals(query["startkey"], ['"foo"']) 331 | self.assertEquals(query["limit"], ["10"]) 332 | return self._checkParseDeferred(d) 333 | 334 | def test_openViewWithKeysQuery(self): 335 | """ 336 | Test openView handles couchdb's strange requirements for keys arguments 337 | """ 338 | d = self.client.openView("mydb2", 339 | "viewdoc2", 340 | "myview2", 341 | keys=[1, 3, 4, "hello, world", {1: 5}], 342 | limit=5) 343 | self.assertEquals(self.client.kwargs["method"], "POST") 344 | self.failUnless( 345 | self.client.uri.startswith( 346 | '/mydb2/_design/viewdoc2/_view/myview2')) 347 | query = cgi.parse_qs(self.client.uri.split('?', 1)[-1]) 348 | self.assertEquals(query, dict(limit=['5'])) 349 | self.assertEquals(self.client.kwargs['postdata'], 350 | '{"keys": [1, 3, 4, "hello, world", {"1": 5}]}') 351 | 352 | def test_tempView(self): 353 | """ 354 | Test tempView. 355 | """ 356 | d = self.client.tempView("mydb", "js code") 357 | self.assertEquals(self.client.uri, "/mydb/_temp_view") 358 | self.assertEquals(self.client.kwargs["postdata"], "js code") 359 | self.assertEquals(self.client.kwargs["method"], "POST") 360 | return self._checkParseDeferred(d) 361 | 362 | def test_addViews(self): 363 | """ 364 | Test addViews. 365 | """ 366 | doc = {"value": "bar"} 367 | self.client.addViews(doc, {"view1": "js code 1", "view2": "js code 2"}) 368 | self.assertEquals(doc["views"], 369 | {"view1": "js code 1", "view2": "js code 2"}) 370 | 371 | def test_bindToDB(self): 372 | """ 373 | Test bindToDB, calling a bind method afterwards. 374 | """ 375 | self.client.bindToDB("mydb") 376 | d = self.client.listDoc() 377 | self.assertEquals(self.client.uri, "/mydb/_all_docs") 378 | self.assertEquals(self.client.kwargs["method"], "GET") 379 | return self._checkParseDeferred(d) 380 | 381 | def test_escapeId(self): 382 | d = self.client.openDoc("mydb", "my doc with spaces") 383 | self.assertEquals(self.client.uri, "/mydb/my%20doc%20with%20spaces") 384 | self.assertEquals(self.client.kwargs["method"], "GET") 385 | return self._checkParseDeferred(d) 386 | 387 | def test_parseVersion(self): 388 | version = self.client._parseVersion('1.1.0') 389 | self.assertEquals(version, (1, 1, 0)) 390 | version = self.client._parseVersion('1.1.1a1162549') 391 | self.assertEquals(version, (1, 1, 1)) 392 | 393 | 394 | class FakeCouchDBResource(resource.Resource): 395 | """ 396 | Fake a couchDB resource. 397 | 398 | @ivar result: value set in tests to be returned by the resource. 399 | @type result: C{str} 400 | """ 401 | result = "" 402 | 403 | def getChild(self, path, request): 404 | """ 405 | Return self as only child. 406 | """ 407 | return self 408 | 409 | def render(self, request): 410 | """ 411 | Return C{result}. 412 | """ 413 | return self.result 414 | 415 | 416 | class ConnectedCouchDBTestCase(TestCase): 417 | """ 418 | Test C{CouchDB} with a real web server. 419 | """ 420 | 421 | def setUp(self): 422 | """ 423 | Create a web server and a client bound to it. 424 | """ 425 | self.resource = FakeCouchDBResource() 426 | site = server.Site(self.resource) 427 | port = reactor.listenTCP(0, site, interface="127.0.0.1") 428 | self.addCleanup(port.stopListening) 429 | self.client = client.CouchDB("127.0.0.1", port.getHost().port) 430 | 431 | def test_createDB(self): 432 | """ 433 | Test listDB. 434 | """ 435 | data = [u"mydb"] 436 | self.resource.result = json.dumps(data) 437 | d = self.client.listDB() 438 | 439 | def cb(result): 440 | self.assertEquals(result, data) 441 | d.addCallback(cb) 442 | return d 443 | 444 | 445 | class RealCouchDBTestCase(util.CouchDBTestCase): 446 | 447 | def setUp(self): 448 | util.CouchDBTestCase.setUp(self) 449 | self.bound = False 450 | self.db_name = 'test' 451 | return self._resetDatabase() 452 | 453 | def _resetDatabase(self): 454 | """ 455 | Helper method to create an empty test database, deleting the existing 456 | one if required. Used to clean up before running each test. 457 | """ 458 | d = defer.Deferred() 459 | d.addCallback(lambda _: self._deleteTestDatabaseIfExists()) 460 | d.addCallback(lambda _: self.db.createDB(self.db_name)) 461 | d.addCallback(self.checkResultOk) 462 | d.addCallback(lambda _: self.db.infoDB(self.db_name)) 463 | 464 | d.addCallback(self.checkInfoNewDatabase) 465 | # We need to know the version to perform the tests 466 | # Ideally the client class would trigger this automatically 467 | d.addCallback(lambda _: self.db.getVersion()) 468 | d.callback(None) 469 | return d 470 | 471 | def _deleteTestDatabaseIfExists(self): 472 | """ 473 | Helper method to delete the test database, wether it exists or not. 474 | Used to clean up before running each test. 475 | """ 476 | d = defer.Deferred() 477 | if self.bound: 478 | d.addCallback(lambda _: self.db.deleteDB()) 479 | else: 480 | d.addCallback(lambda _: self.db.deleteDB(self.db_name)) 481 | 482 | def deleteFailedCb(failure): 483 | pass 484 | d.addCallbacks(self.checkResultOk, deleteFailedCb) 485 | d.callback(None) 486 | return d 487 | 488 | def _saveDoc(self, body, doc_id): 489 | """ 490 | Helper method to save a document, and verify that it was successfull. 491 | """ 492 | d = defer.Deferred() 493 | if self.bound: 494 | d.addCallback(lambda _: self.db.saveDoc(body, doc_id)) 495 | else: 496 | d.addCallback(lambda _: 497 | self.db.saveDoc(self.db_name, body, doc_id)) 498 | 499 | def checkDocumentCreated(result): 500 | self.assertEquals(result['ok'], True) 501 | if doc_id != None: 502 | self.assertEquals(result['id'], doc_id) 503 | self._rev = result['rev'] 504 | d.addCallback(checkDocumentCreated) 505 | d.callback(None) 506 | return d 507 | 508 | def testDB(self): 509 | d = defer.Deferred() 510 | d.addCallback(lambda _: self._deleteTestDatabaseIfExists()) 511 | d.addCallback(lambda _: self.db.getVersion()) 512 | d.addCallback(lambda _: self.db.createDB('test')) 513 | d.addCallback(self.checkResultOk) 514 | d.addCallback(lambda _: self.db.listDB()) 515 | 516 | def listCb(result): 517 | if self.db.version.__ge__((1, 1, 0)): 518 | self.assertEquals(len(result), 3) 519 | self.failUnless('_replicator' in result) 520 | else: 521 | self.assertEquals(len(result), 2) 522 | self.failUnless('test' in result) 523 | self.failUnless('_users' in result) 524 | d.addCallback(listCb) 525 | d.addCallback(lambda _: self.db.saveDoc('test', {'number': 1}, '1')) 526 | 527 | def saveDoc(result): 528 | self.assertEquals(result[u'ok'], True) 529 | self.assertEquals(result[u'id'], u'1') 530 | # save document revision for comparison later 531 | self.doc_rev = result[u'rev'] 532 | d.addCallback(saveDoc) 533 | doc = {} 534 | self.db.addViews(doc, {'test': 535 | {'map': 'function (doc) { emit(doc.number, doc) }'}}) 536 | d.addCallback(lambda _: self.db.saveDoc('test', doc, '_design/test')) 537 | 538 | def addViewCb(result): 539 | self.assertEquals(result[u'ok'], True) 540 | self.assertEquals(result[u'id'], u'_design/test') 541 | d.addCallback(addViewCb) 542 | d.addCallback(lambda _: self.db.openView('test', 'test', 'test')) 543 | 544 | def openViewCb(result): 545 | self.assertEquals(result[u'total_rows'], 1) 546 | self.assertEquals(result[u'offset'], 0) 547 | self.assertEquals(result[u'rows'][0][u'id'], u'1') 548 | self.assertEquals(result[u'rows'][0][u'key'], 1) 549 | self.assertEquals(result[u'rows'][0][u'value'][u'_id'], u'1') 550 | self.assertEquals(result[u'rows'][0][u'value'][u'number'], 1) 551 | self.assertEquals(result[u'rows'][0][u'value'][u'_rev'], 552 | self.doc_rev) 553 | d.addCallback(openViewCb) 554 | d.addCallback(lambda _: 555 | self.db.openView('test', 'test', 'test', keys=[1])) 556 | d.addCallback(openViewCb) 557 | d.addCallback(lambda _: 558 | self.db.openView('test', 'test', 'test', keys = [0])) 559 | 560 | def openView3Cb(result): 561 | self.assertEquals(result[u'total_rows'], 1) 562 | self.assertEquals(result[u'offset'], 0) 563 | self.assertEquals(result[u'update_seq'], 2) 564 | self.assertEquals(result[u'rows'], []) 565 | d.addCallback(openView3Cb) 566 | d.addCallback(lambda _: self.db.deleteDB('test')) 567 | d.addCallback(self.checkResultOk) 568 | d.addCallback(lambda _: self.db.listDB()) 569 | 570 | def listCbAgain(result): 571 | if self.db.version.__ge__((1, 1, 0)): 572 | self.assertEquals(len(result), 2) 573 | else: 574 | self.assertEquals(len(result), 1) 575 | self.failUnless('_users' in result) 576 | d.addCallback(listCbAgain) 577 | 578 | d.callback(None) 579 | return d 580 | 581 | def test_createDB(self): 582 | """ 583 | Test createDB: this should C{PUT} the DB name in the uri. 584 | """ 585 | d = defer.Deferred() 586 | # Since during setUp we already create the database, and here we are 587 | # specifically testing the creation, we need to delete it first 588 | d.addCallback(lambda _: self._deleteTestDatabaseIfExists()) 589 | d.addCallback(lambda _: self.db.createDB(self.db_name)) 590 | d.addCallback(self.checkResultOk) 591 | d.callback(None) 592 | return d 593 | 594 | def test_deleteDB(self): 595 | """ 596 | Test deleteDB: this should C{DELETE} the DB name. 597 | """ 598 | d = defer.Deferred() 599 | d.addCallback(lambda _: self.db.deleteDB(self.db_name)) 600 | d.addCallback(self.checkResultOk) 601 | d.callback(None) 602 | return d 603 | 604 | def test_listDB(self): 605 | """ 606 | Test listDB: this should C{GET} a specific uri. 607 | """ 608 | d = defer.Deferred() 609 | d.addCallback(lambda _: self.db.listDB()) 610 | 611 | def listCb(result): 612 | if self.db.version.__ge__((1, 1, 0)): 613 | self.assertEquals(len(result), 3) 614 | self.failUnless('_replicator' in result) 615 | else: 616 | self.assertEquals(len(result), 2) 617 | self.failUnless('test' in result) 618 | self.failUnless('_users' in result) 619 | d.addCallback(listCb) 620 | d.callback(None) 621 | return d 622 | 623 | def test_infoDB(self): 624 | """ 625 | Test infoDB: this should C{GET} the DB name. 626 | """ 627 | d = defer.Deferred() 628 | # Get info about newly created database 629 | d.addCallback(lambda _: self.db.infoDB(self.db_name)) 630 | d.addCallback(self.checkInfoNewDatabase) 631 | d.callback(None) 632 | return d 633 | 634 | def test_listDoc(self): 635 | """ 636 | Test listDoc. 637 | """ 638 | d = defer.Deferred() 639 | # List documents in newly created database 640 | d.addCallback(lambda _: self.db.listDoc(self.db_name)) 641 | d.addCallback(self.checkDatabaseEmpty) 642 | d.callback(None) 643 | return d 644 | 645 | def test_listDocReversed(self): 646 | """ 647 | Test listDoc reversed. 648 | """ 649 | d = defer.Deferred() 650 | # List documents in newly created database 651 | d.addCallback(lambda _: self.db.listDoc(self.db_name, reverse=True)) 652 | d.addCallback(self.checkDatabaseEmpty) 653 | d.callback(None) 654 | return d 655 | 656 | def test_listDocStartKey(self): 657 | """ 658 | Test listDoc with a startkey. 659 | """ 660 | d = defer.Deferred() 661 | # List documents in newly created database 662 | d.addCallback(lambda _: self.db.listDoc(self.db_name, startkey=u'2')) 663 | d.addCallback(self.checkDatabaseEmpty) 664 | d.callback(None) 665 | return d 666 | 667 | def test_listDocLimit(self): 668 | """ 669 | Test listDoc with a limit. 670 | """ 671 | d = defer.Deferred() 672 | # List documents in newly created database 673 | d.addCallback(lambda _: self.db.listDoc(self.db_name, limit=3)) 674 | d.addCallback(self.checkDatabaseEmpty) 675 | d.callback(None) 676 | return d 677 | 678 | def test_listDocMultipleArguments(self): 679 | """ 680 | Test listDoc with all options activated. 681 | """ 682 | d = defer.Deferred() 683 | # List documents in newly created database 684 | d.addCallback(lambda _: 685 | self.db.listDoc(self.db_name, limit=3, startkey=u'1', 686 | reverse=True)) 687 | d.addCallback(self.checkDatabaseEmpty) 688 | d.callback(None) 689 | return d 690 | 691 | def test_openDoc(self): 692 | """ 693 | Test openDoc. 694 | """ 695 | d = defer.Deferred() 696 | doc_id = 'foo' 697 | body = {"value": "mybody"} 698 | d.addCallback(lambda _: self._saveDoc(body, doc_id)) 699 | d.addCallback(lambda _: self.db.openDoc(self.db_name, doc_id)) 700 | 701 | def checkDoc(result): 702 | self.assertEquals(result['_id'], doc_id) 703 | self.assertEquals(result['value'], 'mybody') 704 | d.addCallback(checkDoc) 705 | d.callback(None) 706 | return d 707 | 708 | @defer.inlineCallbacks 709 | def test_openDocAttachment(self): 710 | """ 711 | Test opening an attachment with openDoc. 712 | """ 713 | attachment_name = 'bindata.dat' 714 | attachment_data = util.eight_bit_test_string() 715 | 716 | doc_id = 'foo' 717 | body = {"value": "mybody"} 718 | self.db.addAttachments(body, {attachment_name: attachment_data}) 719 | 720 | yield self._saveDoc(body, doc_id) 721 | 722 | retrieved_data = yield self.db.openDoc(self.db_name, doc_id, 723 | attachment=attachment_name) 724 | self.assertEquals(retrieved_data, attachment_data) 725 | 726 | def test_saveDocWithDocId(self): 727 | """ 728 | Test saveDoc, giving an explicit document ID. 729 | """ 730 | d = defer.Deferred() 731 | # Save simple document and check the result 732 | doc_id = 'foo' 733 | body = {} 734 | d.addCallback(lambda _: self._saveDoc(body, doc_id)) 735 | d.callback(None) 736 | return d 737 | 738 | def test_saveDocWithoutDocId(self): 739 | """ 740 | Test saveDoc without a document ID. 741 | """ 742 | d = defer.Deferred() 743 | doc_id = None 744 | body = {} 745 | d.addCallback(lambda _: self._saveDoc(body, doc_id)) 746 | d.callback(None) 747 | return d 748 | 749 | def test_saveStructuredDoc(self): 750 | """ 751 | saveDoc should automatically serialize a structured document. 752 | """ 753 | d = defer.Deferred() 754 | doc_id = 'foo' 755 | body = {"value": "mybody", "_id": doc_id} 756 | d.addCallback(lambda _: self._saveDoc(body, doc_id)) 757 | d.addCallback(lambda _: self.db.openDoc(self.db_name, doc_id)) 758 | 759 | def checkDocumentContent(result): 760 | #self.assertEquals(result['_id'], "AAA") 761 | self.assertEquals(result['_id'], doc_id) 762 | self.assertEquals(result['value'], 'mybody') 763 | d.addCallback(checkDocumentContent) 764 | d.callback(None) 765 | return d 766 | 767 | def test_deleteDoc(self): 768 | """ 769 | Test deleteDoc. 770 | """ 771 | d = defer.Deferred() 772 | doc_id = 'foo' 773 | body = {"value": "mybody", "_id": doc_id} 774 | d.addCallback(lambda _: self._saveDoc(body, doc_id)) 775 | d.addCallback(lambda _: 776 | self.db.deleteDoc(self.db_name, doc_id, self._rev)) 777 | 778 | def checkDocumentDeleted(result): 779 | self.assertEquals(result['id'], doc_id) 780 | self.assertEquals(result['ok'], True) 781 | d.addCallback(checkDocumentDeleted) 782 | d.callback(None) 783 | return d 784 | 785 | def test_addAttachments(self): 786 | """ 787 | Test addAttachments. 788 | """ 789 | doc_id = 'foo' 790 | d = defer.Deferred() 791 | body = {"value": "mybody", "_id": doc_id} 792 | attachments = {"file1": "value", "file2": "second value"} 793 | d.addCallback(lambda _: self.db.addAttachments(body, attachments)) 794 | d.addCallback(lambda _: self._saveDoc(body, doc_id)) 795 | d.addCallback(lambda _: self.db.openDoc(self.db_name, doc_id)) 796 | 797 | def checkAttachments(result): 798 | self.failUnless('file1' in result["_attachments"]) 799 | self.failUnless('file2' in result["_attachments"]) 800 | self.assertEquals(result['_id'], doc_id) 801 | self.assertEquals(result['value'], 'mybody') 802 | d.addCallback(checkAttachments) 803 | d.callback(None) 804 | return d 805 | 806 | #def test_openView(self): 807 | # This is already covered by test_addViews 808 | 809 | def test_openViewWithKeysQuery(self): 810 | """ 811 | Test openView handles couchdb's strange requirements for keys arguments 812 | """ 813 | d = defer.Deferred() 814 | #d = Deferred() 815 | doc_id = 'foo' 816 | body = {"value": "bar"} 817 | view1_id = 'view1' 818 | view1 = ''' function(doc) { 819 | emit(doc._id, doc); 820 | }''' 821 | views = {view1_id: {'map': view1}} 822 | d.addCallback(lambda _: self.db.addViews(body, views)) 823 | d.addCallback(lambda _: self._saveDoc(body, '_design/' + doc_id)) 824 | keys=[ 825 | { 826 | 'startkey': ["a", "b", "c"], 827 | 'endkey': ["x", "y", "z"], 828 | }, 829 | { 830 | 'startkey': ["a", "b", "c"], 831 | 'endkey': ["x", "y", "z"], 832 | }, 833 | ] 834 | d.addCallback(lambda _: self.db.openView( 835 | self.db_name, doc_id, view1_id, keys=keys, limit=5)) 836 | d.addCallback(self.checkResultEmptyView) 837 | d.callback(None) 838 | return d 839 | 840 | def test_tempView(self): 841 | """ 842 | Test tempView. 843 | """ 844 | d = defer.Deferred() 845 | view1 = ''' function(doc) { emit(doc._id, doc); } ''' 846 | view1 = ''' function(doc) { 847 | emit(doc._id, doc); 848 | }''' 849 | doc = {'map': view1} 850 | d.addCallback(lambda _: self.db.tempView(self.db_name, doc)) 851 | d.addCallback(self.checkResultEmptyView) 852 | d.callback(None) 853 | return d 854 | 855 | def test_addViews(self): 856 | """ 857 | Test addViews. 858 | """ 859 | d = defer.Deferred() 860 | doc_id = 'foo' 861 | #d = Deferred() 862 | body = {"value": "bar"} 863 | view1 = ''' function(doc) { 864 | emit(doc._id, doc); 865 | }''' 866 | view2 = ''' function(doc) { 867 | emit(doc._id, doc); 868 | }''' 869 | views = {"view1": {'map': view1}, "view2": {'map': view2}} 870 | d.addCallback(lambda _: self.db.addViews(body, views)) 871 | d.addCallback(lambda _: self._saveDoc(body, '_design/' + doc_id)) 872 | d.addCallback(lambda _: 873 | self.db.openDoc(self.db_name, '_design/' + doc_id)) 874 | 875 | def checkViews(result): 876 | self.failUnless(result["views"]['view1']['map'] == view1) 877 | self.failUnless(result["views"]['view2']['map'] == view2) 878 | self.assertEquals(result['_id'], '_design/' + doc_id) 879 | self.assertEquals(result['value'], 'bar') 880 | d.addCallback(checkViews) 881 | d.addCallback(lambda _: 882 | self.db.openView(self.db_name, doc_id, 'view1')) 883 | 884 | def checkOpenView(result): 885 | self.assertEquals(result["rows"], []) 886 | self.assertEquals(result["total_rows"], 0) 887 | self.assertEquals(result["offset"], 0) 888 | d.addCallback(checkOpenView) 889 | d.addCallback(lambda _: 890 | self.db.openView(self.db_name, doc_id, 'view2')) 891 | d.addCallback(checkOpenView) 892 | d.callback(None) 893 | return d 894 | 895 | def test_bindToDB(self): 896 | """ 897 | Test bindToDB, calling a bind method afterwards. 898 | """ 899 | d = defer.Deferred() 900 | doc_id = 'foo' 901 | body = {"value": "bar"} 902 | self.db.bindToDB(self.db_name) 903 | self.bound = True 904 | d.addCallback(lambda _: self._saveDoc(body, '_design/' + doc_id)) 905 | d.addCallback(lambda _: self.db.listDoc(self.db_name)) 906 | 907 | def checkViews(result): 908 | self.assertEquals(result['total_rows'], 1) 909 | self.assertEquals(result['offset'], 0) 910 | d.addCallback(checkViews) 911 | d.callback(None) 912 | return d 913 | 914 | def test_escapeId(self): 915 | d = defer.Deferred() 916 | doc_id = 'my doc with spaces' 917 | body = {"value": "bar"} 918 | d.addCallback(lambda _: self._saveDoc(body, doc_id)) 919 | d.addCallback(lambda _: self.db.openDoc(self.db_name, doc_id)) 920 | 921 | def checkDoc(result): 922 | self.assertEquals(result['_id'], doc_id) 923 | self.assertEquals(result['value'], 'bar') 924 | d.addCallback(checkDoc) 925 | d.callback(None) 926 | return d 927 | 928 | 929 | class UnicodeTestCase(util.CouchDBTestCase): 930 | 931 | def setUp(self): 932 | util.CouchDBTestCase.setUp(self) 933 | d = self.db.createDB('test') 934 | d.addCallback(self.checkResultOk) 935 | return d 936 | 937 | def tearDown(self): 938 | d = self.db.deleteDB('test') 939 | d.addCallback(self.checkResultOk) 940 | d.addCallback(lambda _: util.CouchDBTestCase.tearDown(self)) 941 | return d 942 | 943 | def testUnicodeContents(self): 944 | name = u'\xc3\xa9preuve' 945 | 946 | d = defer.Deferred() 947 | 948 | d.addCallback(lambda _: self.db.saveDoc('test', { 949 | 'name': name, 950 | name: 'name', 951 | })) 952 | d.addCallback(lambda r: self.db.openDoc('test', r['id'])) 953 | 954 | def check(r): 955 | self.assertEquals(r['name'], name) 956 | self.assertEquals(r[name], u'name') 957 | self.assertEquals(type(r['name']), unicode) 958 | self.assertEquals(type(r[name]), unicode) 959 | d.addCallback(check) 960 | d.callback(None) 961 | return d 962 | 963 | def testUnicodeId(self): 964 | docId = u'\xc3\xa9preuve' 965 | 966 | d = defer.Deferred() 967 | 968 | d.addCallback(lambda _: self.db.saveDoc('test', { 969 | 'name': 'name', 970 | }, docId=docId)) 971 | 972 | def saveDocCb(r): 973 | self.assertEquals(r['id'], docId) 974 | return self.db.openDoc('test', r['id']) 975 | d.addCallback(saveDocCb) 976 | 977 | def check(r): 978 | self.assertEquals(r[u'name'], u'name') 979 | self.assertEquals(type(r['name']), unicode) 980 | self.assertEquals(r[u'_id'], docId) 981 | self.assertEquals(type(r[u'_id']), unicode) 982 | self.assertEquals(type(r[u'_rev']), unicode) 983 | 984 | # open again, with revision 985 | return self.db.openDoc('test', r['_id'], revision=r['_rev']) 986 | d.addCallback(check) 987 | 988 | def checkRevisioned(r): 989 | self.assertEquals(r[u'name'], u'name') 990 | self.assertEquals(type(r['name']), unicode) 991 | self.assertEquals(r[u'_id'], docId) 992 | self.assertEquals(type(r[u'_id']), unicode) 993 | self.assertEquals(type(r[u'_rev']), unicode) 994 | return r 995 | d.addCallback(checkRevisioned) 996 | 997 | d.addCallback(lambda r: self.db.deleteDoc( 998 | 'test', r[u'_id'], r[u'_rev'])) 999 | 1000 | d.callback(None) 1001 | return d 1002 | 1003 | 1004 | class ResponseReceiverTestCase(TestCase): 1005 | 1006 | def test_utf8Receiving(self): 1007 | d = defer.Deferred() 1008 | rvr = client.ResponseReceiver(d, decode_utf8=True) 1009 | 1010 | # "Internationalization" string from 1011 | # http://rentzsch.tumblr.com 1012 | # /post/9133498042/howto-use-utf-8-throughout-your-web-stack 1013 | data = u'\u201cI\xf1t\xebrn\xe2ti\xf4n\xe0liz\xe6ti\xf8n\u201d' 1014 | d.addCallback(lambda encoded_out: self.assertEqual(encoded_out, data)) 1015 | 1016 | for c in data.encode('utf-8'): 1017 | rvr.dataReceived(c) 1018 | 1019 | rvr.connectionLost(Failure(ResponseDone())) 1020 | 1021 | def test_8bitReceiving(self): 1022 | d = defer.Deferred() 1023 | rvr = client.ResponseReceiver(d, decode_utf8=False) 1024 | 1025 | data = util.eight_bit_test_string() 1026 | d.addCallback(lambda out: self.assertEqual(out, data)) 1027 | 1028 | for c in data: 1029 | rvr.dataReceived(c) 1030 | 1031 | rvr.connectionLost(Failure(ResponseDone())) 1032 | --------------------------------------------------------------------------------