├── .gitignore
├── mixpanel_api
├── __init__.py
├── data_export.py
└── general.py
├── psycopg2
├── tz.pyo
├── _json.pyo
├── _range.pyo
├── extras.pyo
├── pool.pyo
├── __init__.pyo
├── _psycopg.pyd
├── psycopg1.pyo
├── errorcodes.pyo
├── extensions.pyo
├── tests
│ ├── dbapi20.pyo
│ ├── __init__.pyo
│ ├── test_async.pyo
│ ├── test_copy.pyo
│ ├── test_dates.pyo
│ ├── test_green.pyo
│ ├── test_quote.pyo
│ ├── test_with.pyo
│ ├── testconfig.pyo
│ ├── testutils.pyo
│ ├── dbapi20_tpc.pyo
│ ├── test_bugX000.pyo
│ ├── test_bug_gc.pyo
│ ├── test_cancel.pyo
│ ├── test_cursor.pyo
│ ├── test_lobject.pyo
│ ├── test_module.pyo
│ ├── test_notify.pyo
│ ├── test_connection.pyo
│ ├── test_transaction.pyo
│ ├── test_types_basic.pyo
│ ├── test_types_extras.pyo
│ ├── test_extras_dictcursor.pyo
│ ├── test_psycopg2_dbapi20.pyo
│ ├── testconfig.py
│ ├── test_bugX000.py
│ ├── test_bug_gc.py
│ ├── test_psycopg2_dbapi20.py
│ ├── __init__.py
│ ├── test_cancel.py
│ ├── test_green.py
│ ├── dbapi20_tpc.py
│ ├── test_quote.py
│ ├── test_with.py
│ ├── test_notify.py
│ └── test_transaction.py
├── psycopg1.py
├── tz.py
├── __init__.py
├── extensions.py
├── _json.py
└── pool.py
├── simplejson
├── tests
│ ├── test_default.py
│ ├── test_pass2.py
│ ├── test_pass3.py
│ ├── test_check_circular.py
│ ├── test_separators.py
│ ├── test_encode_for_html.py
│ ├── test_speedups.py
│ ├── test_item_sort_key.py
│ ├── test_float.py
│ ├── test_errors.py
│ ├── test_recursion.py
│ ├── test_pass1.py
│ ├── test_tuple.py
│ ├── test_bigint_as_string.py
│ ├── test_encode_basestring_ascii.py
│ ├── test_bitsize_int_as_string.py
│ ├── test_decimal.py
│ ├── test_indent.py
│ ├── test_for_json.py
│ ├── test_tool.py
│ ├── __init__.py
│ ├── test_decode.py
│ ├── test_namedtuple.py
│ ├── test_dump.py
│ ├── test_fail.py
│ ├── test_unicode.py
│ └── test_scanstring.py
├── compat.py
├── tool.py
├── ordered_dict.py
└── scanner.py
├── mixpanel_puller.py
├── to_postgres.py
├── funnels_script.py
├── README.md
└── raw_export_script.py
/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 |
--------------------------------------------------------------------------------
/mixpanel_api/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/psycopg2/tz.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/tz.pyo
--------------------------------------------------------------------------------
/psycopg2/_json.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/_json.pyo
--------------------------------------------------------------------------------
/psycopg2/_range.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/_range.pyo
--------------------------------------------------------------------------------
/psycopg2/extras.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/extras.pyo
--------------------------------------------------------------------------------
/psycopg2/pool.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/pool.pyo
--------------------------------------------------------------------------------
/psycopg2/__init__.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/__init__.pyo
--------------------------------------------------------------------------------
/psycopg2/_psycopg.pyd:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/_psycopg.pyd
--------------------------------------------------------------------------------
/psycopg2/psycopg1.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/psycopg1.pyo
--------------------------------------------------------------------------------
/psycopg2/errorcodes.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/errorcodes.pyo
--------------------------------------------------------------------------------
/psycopg2/extensions.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/extensions.pyo
--------------------------------------------------------------------------------
/psycopg2/tests/dbapi20.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/tests/dbapi20.pyo
--------------------------------------------------------------------------------
/psycopg2/tests/__init__.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/tests/__init__.pyo
--------------------------------------------------------------------------------
/psycopg2/tests/test_async.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/tests/test_async.pyo
--------------------------------------------------------------------------------
/psycopg2/tests/test_copy.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/tests/test_copy.pyo
--------------------------------------------------------------------------------
/psycopg2/tests/test_dates.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/tests/test_dates.pyo
--------------------------------------------------------------------------------
/psycopg2/tests/test_green.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/tests/test_green.pyo
--------------------------------------------------------------------------------
/psycopg2/tests/test_quote.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/tests/test_quote.pyo
--------------------------------------------------------------------------------
/psycopg2/tests/test_with.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/tests/test_with.pyo
--------------------------------------------------------------------------------
/psycopg2/tests/testconfig.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/tests/testconfig.pyo
--------------------------------------------------------------------------------
/psycopg2/tests/testutils.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/tests/testutils.pyo
--------------------------------------------------------------------------------
/psycopg2/tests/dbapi20_tpc.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/tests/dbapi20_tpc.pyo
--------------------------------------------------------------------------------
/psycopg2/tests/test_bugX000.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/tests/test_bugX000.pyo
--------------------------------------------------------------------------------
/psycopg2/tests/test_bug_gc.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/tests/test_bug_gc.pyo
--------------------------------------------------------------------------------
/psycopg2/tests/test_cancel.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/tests/test_cancel.pyo
--------------------------------------------------------------------------------
/psycopg2/tests/test_cursor.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/tests/test_cursor.pyo
--------------------------------------------------------------------------------
/psycopg2/tests/test_lobject.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/tests/test_lobject.pyo
--------------------------------------------------------------------------------
/psycopg2/tests/test_module.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/tests/test_module.pyo
--------------------------------------------------------------------------------
/psycopg2/tests/test_notify.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/tests/test_notify.pyo
--------------------------------------------------------------------------------
/psycopg2/tests/test_connection.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/tests/test_connection.pyo
--------------------------------------------------------------------------------
/psycopg2/tests/test_transaction.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/tests/test_transaction.pyo
--------------------------------------------------------------------------------
/psycopg2/tests/test_types_basic.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/tests/test_types_basic.pyo
--------------------------------------------------------------------------------
/psycopg2/tests/test_types_extras.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/tests/test_types_extras.pyo
--------------------------------------------------------------------------------
/psycopg2/tests/test_extras_dictcursor.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/tests/test_extras_dictcursor.pyo
--------------------------------------------------------------------------------
/psycopg2/tests/test_psycopg2_dbapi20.pyo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kimeshan/mixpanel-puller/HEAD/psycopg2/tests/test_psycopg2_dbapi20.pyo
--------------------------------------------------------------------------------
/simplejson/tests/test_default.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 |
3 | import simplejson as json
4 |
5 | class TestDefault(TestCase):
6 | def test_default(self):
7 | self.assertEqual(
8 | json.dumps(type, default=repr),
9 | json.dumps(repr(type)))
10 |
--------------------------------------------------------------------------------
/simplejson/tests/test_pass2.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 | import simplejson as json
3 |
4 | # from http://json.org/JSON_checker/test/pass2.json
5 | JSON = r'''
6 | [[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]]
7 | '''
8 |
9 | class TestPass2(TestCase):
10 | def test_parse(self):
11 | # test in/out equivalence and parsing
12 | res = json.loads(JSON)
13 | out = json.dumps(res)
14 | self.assertEqual(res, json.loads(out))
15 |
--------------------------------------------------------------------------------
/simplejson/tests/test_pass3.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 |
3 | import simplejson as json
4 |
5 | # from http://json.org/JSON_checker/test/pass3.json
6 | JSON = r'''
7 | {
8 | "JSON Test Pattern pass3": {
9 | "The outermost value": "must be an object or array.",
10 | "In this test": "It is an object."
11 | }
12 | }
13 | '''
14 |
15 | class TestPass3(TestCase):
16 | def test_parse(self):
17 | # test in/out equivalence and parsing
18 | res = json.loads(JSON)
19 | out = json.dumps(res)
20 | self.assertEqual(res, json.loads(out))
21 |
--------------------------------------------------------------------------------
/simplejson/tests/test_check_circular.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 | import simplejson as json
3 |
4 | def default_iterable(obj):
5 | return list(obj)
6 |
7 | class TestCheckCircular(TestCase):
8 | def test_circular_dict(self):
9 | dct = {}
10 | dct['a'] = dct
11 | self.assertRaises(ValueError, json.dumps, dct)
12 |
13 | def test_circular_list(self):
14 | lst = []
15 | lst.append(lst)
16 | self.assertRaises(ValueError, json.dumps, lst)
17 |
18 | def test_circular_composite(self):
19 | dct2 = {}
20 | dct2['a'] = []
21 | dct2['a'].append(dct2)
22 | self.assertRaises(ValueError, json.dumps, dct2)
23 |
24 | def test_circular_default(self):
25 | json.dumps([set()], default=default_iterable)
26 | self.assertRaises(TypeError, json.dumps, [set()])
27 |
28 | def test_circular_off_default(self):
29 | json.dumps([set()], default=default_iterable, check_circular=False)
30 | self.assertRaises(TypeError, json.dumps, [set()], check_circular=False)
31 |
--------------------------------------------------------------------------------
/simplejson/tests/test_separators.py:
--------------------------------------------------------------------------------
1 | import textwrap
2 | from unittest import TestCase
3 |
4 | import simplejson as json
5 |
6 |
7 | class TestSeparators(TestCase):
8 | def test_separators(self):
9 | h = [['blorpie'], ['whoops'], [], 'd-shtaeou', 'd-nthiouh', 'i-vhbjkhnth',
10 | {'nifty': 87}, {'field': 'yes', 'morefield': False} ]
11 |
12 | expect = textwrap.dedent("""\
13 | [
14 | [
15 | "blorpie"
16 | ] ,
17 | [
18 | "whoops"
19 | ] ,
20 | [] ,
21 | "d-shtaeou" ,
22 | "d-nthiouh" ,
23 | "i-vhbjkhnth" ,
24 | {
25 | "nifty" : 87
26 | } ,
27 | {
28 | "field" : "yes" ,
29 | "morefield" : false
30 | }
31 | ]""")
32 |
33 |
34 | d1 = json.dumps(h)
35 | d2 = json.dumps(h, indent=' ', sort_keys=True, separators=(' ,', ' : '))
36 |
37 | h1 = json.loads(d1)
38 | h2 = json.loads(d2)
39 |
40 | self.assertEqual(h1, h)
41 | self.assertEqual(h2, h)
42 | self.assertEqual(d2, expect)
43 |
--------------------------------------------------------------------------------
/simplejson/tests/test_encode_for_html.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | import simplejson as json
4 |
5 | class TestEncodeForHTML(unittest.TestCase):
6 |
7 | def setUp(self):
8 | self.decoder = json.JSONDecoder()
9 | self.encoder = json.JSONEncoderForHTML()
10 |
11 | def test_basic_encode(self):
12 | self.assertEqual(r'"\u0026"', self.encoder.encode('&'))
13 | self.assertEqual(r'"\u003c"', self.encoder.encode('<'))
14 | self.assertEqual(r'"\u003e"', self.encoder.encode('>'))
15 |
16 | def test_basic_roundtrip(self):
17 | for char in '&<>':
18 | self.assertEqual(
19 | char, self.decoder.decode(
20 | self.encoder.encode(char)))
21 |
22 | def test_prevent_script_breakout(self):
23 | bad_string = ''
24 | self.assertEqual(
25 | r'"\u003c/script\u003e\u003cscript\u003e'
26 | r'alert(\"gotcha\")\u003c/script\u003e"',
27 | self.encoder.encode(bad_string))
28 | self.assertEqual(
29 | bad_string, self.decoder.decode(
30 | self.encoder.encode(bad_string)))
31 |
--------------------------------------------------------------------------------
/simplejson/tests/test_speedups.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import unittest
3 | from unittest import TestCase
4 |
5 | from simplejson import encoder, scanner
6 |
7 |
8 | def has_speedups():
9 | return encoder.c_make_encoder is not None
10 |
11 |
12 | def skip_if_speedups_missing(func):
13 | def wrapper(*args, **kwargs):
14 | if not has_speedups():
15 | if hasattr(unittest, 'SkipTest'):
16 | raise unittest.SkipTest("C Extension not available")
17 | else:
18 | sys.stdout.write("C Extension not available")
19 | return
20 | return func(*args, **kwargs)
21 |
22 | return wrapper
23 |
24 |
25 | class TestDecode(TestCase):
26 | @skip_if_speedups_missing
27 | def test_make_scanner(self):
28 | self.assertRaises(AttributeError, scanner.c_make_scanner, 1)
29 |
30 | @skip_if_speedups_missing
31 | def test_make_encoder(self):
32 | self.assertRaises(
33 | TypeError,
34 | encoder.c_make_encoder,
35 | None,
36 | ("\xCD\x7D\x3D\x4E\x12\x4C\xF9\x79\xD7"
37 | "\x52\xBA\x82\xF2\x27\x4A\x7D\xA0\xCA\x75"),
38 | None
39 | )
40 |
--------------------------------------------------------------------------------
/simplejson/tests/test_item_sort_key.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 |
3 | import simplejson as json
4 | from operator import itemgetter
5 |
6 | class TestItemSortKey(TestCase):
7 | def test_simple_first(self):
8 | a = {'a': 1, 'c': 5, 'jack': 'jill', 'pick': 'axe', 'array': [1, 5, 6, 9], 'tuple': (83, 12, 3), 'crate': 'dog', 'zeak': 'oh'}
9 | self.assertEqual(
10 | '{"a": 1, "c": 5, "crate": "dog", "jack": "jill", "pick": "axe", "zeak": "oh", "array": [1, 5, 6, 9], "tuple": [83, 12, 3]}',
11 | json.dumps(a, item_sort_key=json.simple_first))
12 |
13 | def test_case(self):
14 | a = {'a': 1, 'c': 5, 'Jack': 'jill', 'pick': 'axe', 'Array': [1, 5, 6, 9], 'tuple': (83, 12, 3), 'crate': 'dog', 'zeak': 'oh'}
15 | self.assertEqual(
16 | '{"Array": [1, 5, 6, 9], "Jack": "jill", "a": 1, "c": 5, "crate": "dog", "pick": "axe", "tuple": [83, 12, 3], "zeak": "oh"}',
17 | json.dumps(a, item_sort_key=itemgetter(0)))
18 | self.assertEqual(
19 | '{"a": 1, "Array": [1, 5, 6, 9], "c": 5, "crate": "dog", "Jack": "jill", "pick": "axe", "tuple": [83, 12, 3], "zeak": "oh"}',
20 | json.dumps(a, item_sort_key=lambda kv: kv[0].lower()))
21 |
--------------------------------------------------------------------------------
/simplejson/compat.py:
--------------------------------------------------------------------------------
1 | """Python 3 compatibility shims
2 | """
3 | import sys
4 | if sys.version_info[0] < 3:
5 | PY3 = False
6 | def b(s):
7 | return s
8 | def u(s):
9 | return unicode(s, 'unicode_escape')
10 | import cStringIO as StringIO
11 | StringIO = BytesIO = StringIO.StringIO
12 | text_type = unicode
13 | binary_type = str
14 | string_types = (basestring,)
15 | integer_types = (int, long)
16 | unichr = unichr
17 | reload_module = reload
18 | def fromhex(s):
19 | return s.decode('hex')
20 |
21 | else:
22 | PY3 = True
23 | if sys.version_info[:2] >= (3, 4):
24 | from importlib import reload as reload_module
25 | else:
26 | from imp import reload as reload_module
27 | import codecs
28 | def b(s):
29 | return codecs.latin_1_encode(s)[0]
30 | def u(s):
31 | return s
32 | import io
33 | StringIO = io.StringIO
34 | BytesIO = io.BytesIO
35 | text_type = str
36 | binary_type = bytes
37 | string_types = (str,)
38 | integer_types = (int,)
39 |
40 | def unichr(s):
41 | return u(chr(s))
42 |
43 | def fromhex(s):
44 | return bytes.fromhex(s)
45 |
46 | long_type = integer_types[-1]
47 |
--------------------------------------------------------------------------------
/psycopg2/tests/testconfig.py:
--------------------------------------------------------------------------------
1 | # Configure the test suite from the env variables.
2 |
3 | import os
4 |
5 | dbname = os.environ.get('PSYCOPG2_TESTDB', 'psycopg2_test')
6 | dbhost = os.environ.get('PSYCOPG2_TESTDB_HOST', None)
7 | dbport = os.environ.get('PSYCOPG2_TESTDB_PORT', None)
8 | dbuser = os.environ.get('PSYCOPG2_TESTDB_USER', None)
9 | dbpass = os.environ.get('PSYCOPG2_TESTDB_PASSWORD', None)
10 |
11 | # Check if we want to test psycopg's green path.
12 | green = os.environ.get('PSYCOPG2_TEST_GREEN', None)
13 | if green:
14 | if green == '1':
15 | from psycopg2.extras import wait_select as wait_callback
16 | elif green == 'eventlet':
17 | from eventlet.support.psycopg2_patcher import eventlet_wait_callback \
18 | as wait_callback
19 | else:
20 | raise ValueError("please set 'PSYCOPG2_TEST_GREEN' to a valid value")
21 |
22 | import psycopg2.extensions
23 | psycopg2.extensions.set_wait_callback(wait_callback)
24 |
25 | # Construct a DSN to connect to the test database:
26 | dsn = 'dbname=%s' % dbname
27 | if dbhost is not None:
28 | dsn += ' host=%s' % dbhost
29 | if dbport is not None:
30 | dsn += ' port=%s' % dbport
31 | if dbuser is not None:
32 | dsn += ' user=%s' % dbuser
33 | if dbpass is not None:
34 | dsn += ' password=%s' % dbpass
35 |
36 |
37 |
--------------------------------------------------------------------------------
/simplejson/tool.py:
--------------------------------------------------------------------------------
1 | r"""Command-line tool to validate and pretty-print JSON
2 |
3 | Usage::
4 |
5 | $ echo '{"json":"obj"}' | python -m simplejson.tool
6 | {
7 | "json": "obj"
8 | }
9 | $ echo '{ 1.2:3.4}' | python -m simplejson.tool
10 | Expecting property name: line 1 column 2 (char 2)
11 |
12 | """
13 | from __future__ import with_statement
14 | import sys
15 | import simplejson as json
16 |
17 | def main():
18 | if len(sys.argv) == 1:
19 | infile = sys.stdin
20 | outfile = sys.stdout
21 | elif len(sys.argv) == 2:
22 | infile = open(sys.argv[1], 'r')
23 | outfile = sys.stdout
24 | elif len(sys.argv) == 3:
25 | infile = open(sys.argv[1], 'r')
26 | outfile = open(sys.argv[2], 'w')
27 | else:
28 | raise SystemExit(sys.argv[0] + " [infile [outfile]]")
29 | with infile:
30 | try:
31 | obj = json.load(infile,
32 | object_pairs_hook=json.OrderedDict,
33 | use_decimal=True)
34 | except ValueError:
35 | raise SystemExit(sys.exc_info()[1])
36 | with outfile:
37 | json.dump(obj, outfile, sort_keys=True, indent=' ', use_decimal=True)
38 | outfile.write('\n')
39 |
40 |
41 | if __name__ == '__main__':
42 | main()
43 |
--------------------------------------------------------------------------------
/simplejson/tests/test_float.py:
--------------------------------------------------------------------------------
1 | import math
2 | from unittest import TestCase
3 | from simplejson.compat import long_type, text_type
4 | import simplejson as json
5 | from simplejson.decoder import NaN, PosInf, NegInf
6 |
7 | class TestFloat(TestCase):
8 | def test_degenerates_allow(self):
9 | for inf in (PosInf, NegInf):
10 | self.assertEqual(json.loads(json.dumps(inf)), inf)
11 | # Python 2.5 doesn't have math.isnan
12 | nan = json.loads(json.dumps(NaN))
13 | self.assertTrue((0 + nan) != nan)
14 |
15 | def test_degenerates_ignore(self):
16 | for f in (PosInf, NegInf, NaN):
17 | self.assertEqual(json.loads(json.dumps(f, ignore_nan=True)), None)
18 |
19 | def test_degenerates_deny(self):
20 | for f in (PosInf, NegInf, NaN):
21 | self.assertRaises(ValueError, json.dumps, f, allow_nan=False)
22 |
23 | def test_floats(self):
24 | for num in [1617161771.7650001, math.pi, math.pi**100,
25 | math.pi**-100, 3.1]:
26 | self.assertEqual(float(json.dumps(num)), num)
27 | self.assertEqual(json.loads(json.dumps(num)), num)
28 | self.assertEqual(json.loads(text_type(json.dumps(num))), num)
29 |
30 | def test_ints(self):
31 | for num in [1, long_type(1), 1<<32, 1<<64]:
32 | self.assertEqual(json.dumps(num), str(num))
33 | self.assertEqual(int(json.dumps(num)), num)
34 | self.assertEqual(json.loads(json.dumps(num)), num)
35 | self.assertEqual(json.loads(text_type(json.dumps(num))), num)
36 |
--------------------------------------------------------------------------------
/mixpanel_puller.py:
--------------------------------------------------------------------------------
1 | # Module contains methods for exporting raw event data and pulling funnel
2 | # list and funnel data. Uses Mixpanel API (Python libary) to send pull requests.
3 | from mixpanel_api import data_export
4 | from mixpanel_api import general
5 |
6 |
7 | # Method to export raw data from Mixpanel and output JSON dump
8 | def pull_raw_export(start_date, end_date, api_key, api_secret):
9 | api_raw_export = data_export.Mixpanel(api_key,api_secret)
10 | print("""Exporting raw data from Mixpanel. This might take a while depending on the number of events!
11 | Grab a coffee in the meanwhile.""")
12 |
13 | exported_data = api_raw_export.request(['export'], {
14 | 'from_date': start_date,
15 | 'to_date': end_date
16 | })
17 |
18 | return exported_data
19 |
20 |
21 | # Pull list of funnels and return it
22 | def list_funnels(api_key, api_secret):
23 | api = general.Mixpanel(api_key, api_secret)
24 | funnel_list = api.request(["funnels/list"], {})
25 |
26 | return funnel_list
27 | # Format:[{"funnel_id": 989319, "name": "Random"}] - array of objects/dicts
28 |
29 |
30 | # Use list of funnels to pull each funnel and it's details
31 | def pull_funnels(funnel_id, length, interval, from_date, to_date, api_key, api_secret):
32 | api = general.Mixpanel(api_key, api_secret)
33 | print("Data for funnel ID "+str(funnel_id)+" requested from Mixpanel.com...")
34 | funnel_data = api.request(["funnels"], {
35 | "funnel_id": funnel_id,
36 | "length": length,
37 | "interval": interval,
38 | "from_date": from_date,
39 | "to_date": to_date
40 | })
41 |
42 | return funnel_data
43 |
44 |
--------------------------------------------------------------------------------
/simplejson/tests/test_errors.py:
--------------------------------------------------------------------------------
1 | import sys, pickle
2 | from unittest import TestCase
3 |
4 | import simplejson as json
5 | from simplejson.compat import u, b
6 |
7 | class TestErrors(TestCase):
8 | def test_string_keys_error(self):
9 | data = [{'a': 'A', 'b': (2, 4), 'c': 3.0, ('d',): 'D tuple'}]
10 | self.assertRaises(TypeError, json.dumps, data)
11 |
12 | def test_decode_error(self):
13 | err = None
14 | try:
15 | json.loads('{}\na\nb')
16 | except json.JSONDecodeError:
17 | err = sys.exc_info()[1]
18 | else:
19 | self.fail('Expected JSONDecodeError')
20 | self.assertEqual(err.lineno, 2)
21 | self.assertEqual(err.colno, 1)
22 | self.assertEqual(err.endlineno, 3)
23 | self.assertEqual(err.endcolno, 2)
24 |
25 | def test_scan_error(self):
26 | err = None
27 | for t in (u, b):
28 | try:
29 | json.loads(t('{"asdf": "'))
30 | except json.JSONDecodeError:
31 | err = sys.exc_info()[1]
32 | else:
33 | self.fail('Expected JSONDecodeError')
34 | self.assertEqual(err.lineno, 1)
35 | self.assertEqual(err.colno, 10)
36 |
37 | def test_error_is_pickable(self):
38 | err = None
39 | try:
40 | json.loads('{}\na\nb')
41 | except json.JSONDecodeError:
42 | err = sys.exc_info()[1]
43 | else:
44 | self.fail('Expected JSONDecodeError')
45 | s = pickle.dumps(err)
46 | e = pickle.loads(s)
47 |
48 | self.assertEqual(err.msg, e.msg)
49 | self.assertEqual(err.doc, e.doc)
50 | self.assertEqual(err.pos, e.pos)
51 | self.assertEqual(err.end, e.end)
52 |
--------------------------------------------------------------------------------
/psycopg2/tests/test_bugX000.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | # bugX000.py - test for DateTime object allocation bug
4 | #
5 | # Copyright (C) 2007-2011 Federico Di Gregorio
6 | #
7 | # psycopg2 is free software: you can redistribute it and/or modify it
8 | # under the terms of the GNU Lesser General Public License as published
9 | # by the Free Software Foundation, either version 3 of the License, or
10 | # (at your option) any later version.
11 | #
12 | # In addition, as a special exception, the copyright holders give
13 | # permission to link this program with the OpenSSL library (or with
14 | # modified versions of OpenSSL that use the same license as OpenSSL),
15 | # and distribute linked combinations including the two.
16 | #
17 | # You must obey the GNU Lesser General Public License in all respects for
18 | # all of the code used other than OpenSSL.
19 | #
20 | # psycopg2 is distributed in the hope that it will be useful, but WITHOUT
21 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
22 | # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
23 | # License for more details.
24 |
25 | import psycopg2
26 | import time
27 | import unittest
28 |
29 | class DateTimeAllocationBugTestCase(unittest.TestCase):
30 | def test_date_time_allocation_bug(self):
31 | d1 = psycopg2.Date(2002,12,25)
32 | d2 = psycopg2.DateFromTicks(time.mktime((2002,12,25,0,0,0,0,0,0)))
33 | t1 = psycopg2.Time(13,45,30)
34 | t2 = psycopg2.TimeFromTicks(time.mktime((2001,1,1,13,45,30,0,0,0)))
35 | t1 = psycopg2.Timestamp(2002,12,25,13,45,30)
36 | t2 = psycopg2.TimestampFromTicks(
37 | time.mktime((2002,12,25,13,45,30,0,0,0)))
38 |
39 |
40 | def test_suite():
41 | return unittest.TestLoader().loadTestsFromName(__name__)
42 |
43 | if __name__ == "__main__":
44 | unittest.main()
45 |
--------------------------------------------------------------------------------
/psycopg2/tests/test_bug_gc.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | # bug_gc.py - test for refcounting/GC bug
4 | #
5 | # Copyright (C) 2010-2011 Federico Di Gregorio
6 | #
7 | # psycopg2 is free software: you can redistribute it and/or modify it
8 | # under the terms of the GNU Lesser General Public License as published
9 | # by the Free Software Foundation, either version 3 of the License, or
10 | # (at your option) any later version.
11 | #
12 | # In addition, as a special exception, the copyright holders give
13 | # permission to link this program with the OpenSSL library (or with
14 | # modified versions of OpenSSL that use the same license as OpenSSL),
15 | # and distribute linked combinations including the two.
16 | #
17 | # You must obey the GNU Lesser General Public License in all respects for
18 | # all of the code used other than OpenSSL.
19 | #
20 | # psycopg2 is distributed in the hope that it will be useful, but WITHOUT
21 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
22 | # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
23 | # License for more details.
24 |
25 | import psycopg2
26 | import psycopg2.extensions
27 | import unittest
28 | import gc
29 |
30 | from testutils import ConnectingTestCase, skip_if_no_uuid
31 |
32 | class StolenReferenceTestCase(ConnectingTestCase):
33 | @skip_if_no_uuid
34 | def test_stolen_reference_bug(self):
35 | def fish(val, cur):
36 | gc.collect()
37 | return 42
38 | UUID = psycopg2.extensions.new_type((2950,), "UUID", fish)
39 | psycopg2.extensions.register_type(UUID, self.conn)
40 | curs = self.conn.cursor()
41 | curs.execute("select 'b5219e01-19ab-4994-b71e-149225dc51e4'::uuid")
42 | curs.fetchone()
43 |
44 | def test_suite():
45 | return unittest.TestLoader().loadTestsFromName(__name__)
46 |
47 | if __name__ == "__main__":
48 | unittest.main()
49 |
--------------------------------------------------------------------------------
/simplejson/tests/test_recursion.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 |
3 | import simplejson as json
4 |
5 | class JSONTestObject:
6 | pass
7 |
8 |
9 | class RecursiveJSONEncoder(json.JSONEncoder):
10 | recurse = False
11 | def default(self, o):
12 | if o is JSONTestObject:
13 | if self.recurse:
14 | return [JSONTestObject]
15 | else:
16 | return 'JSONTestObject'
17 | return json.JSONEncoder.default(o)
18 |
19 |
20 | class TestRecursion(TestCase):
21 | def test_listrecursion(self):
22 | x = []
23 | x.append(x)
24 | try:
25 | json.dumps(x)
26 | except ValueError:
27 | pass
28 | else:
29 | self.fail("didn't raise ValueError on list recursion")
30 | x = []
31 | y = [x]
32 | x.append(y)
33 | try:
34 | json.dumps(x)
35 | except ValueError:
36 | pass
37 | else:
38 | self.fail("didn't raise ValueError on alternating list recursion")
39 | y = []
40 | x = [y, y]
41 | # ensure that the marker is cleared
42 | json.dumps(x)
43 |
44 | def test_dictrecursion(self):
45 | x = {}
46 | x["test"] = x
47 | try:
48 | json.dumps(x)
49 | except ValueError:
50 | pass
51 | else:
52 | self.fail("didn't raise ValueError on dict recursion")
53 | x = {}
54 | y = {"a": x, "b": x}
55 | # ensure that the marker is cleared
56 | json.dumps(y)
57 |
58 | def test_defaultrecursion(self):
59 | enc = RecursiveJSONEncoder()
60 | self.assertEqual(enc.encode(JSONTestObject), '"JSONTestObject"')
61 | enc.recurse = True
62 | try:
63 | enc.encode(JSONTestObject)
64 | except ValueError:
65 | pass
66 | else:
67 | self.fail("didn't raise ValueError on default recursion")
68 |
--------------------------------------------------------------------------------
/simplejson/tests/test_pass1.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 |
3 | import simplejson as json
4 |
5 | # from http://json.org/JSON_checker/test/pass1.json
6 | JSON = r'''
7 | [
8 | "JSON Test Pattern pass1",
9 | {"object with 1 member":["array with 1 element"]},
10 | {},
11 | [],
12 | -42,
13 | true,
14 | false,
15 | null,
16 | {
17 | "integer": 1234567890,
18 | "real": -9876.543210,
19 | "e": 0.123456789e-12,
20 | "E": 1.234567890E+34,
21 | "": 23456789012E66,
22 | "zero": 0,
23 | "one": 1,
24 | "space": " ",
25 | "quote": "\"",
26 | "backslash": "\\",
27 | "controls": "\b\f\n\r\t",
28 | "slash": "/ & \/",
29 | "alpha": "abcdefghijklmnopqrstuvwyz",
30 | "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ",
31 | "digit": "0123456789",
32 | "special": "`1~!@#$%^&*()_+-={':[,]}|;.>?",
33 | "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A",
34 | "true": true,
35 | "false": false,
36 | "null": null,
37 | "array":[ ],
38 | "object":{ },
39 | "address": "50 St. James Street",
40 | "url": "http://www.JSON.org/",
41 | "comment": "// /* */": " ",
43 | " s p a c e d " :[1,2 , 3
44 |
45 | ,
46 |
47 | 4 , 5 , 6 ,7 ],"compact": [1,2,3,4,5,6,7],
48 | "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}",
49 | "quotes": "" \u0022 %22 0x22 034 "",
50 | "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?"
51 | : "A key can be any string"
52 | },
53 | 0.5 ,98.6
54 | ,
55 | 99.44
56 | ,
57 |
58 | 1066,
59 | 1e1,
60 | 0.1e1,
61 | 1e-1,
62 | 1e00,2e+00,2e-00
63 | ,"rosebud"]
64 | '''
65 |
66 | class TestPass1(TestCase):
67 | def test_parse(self):
68 | # test in/out equivalence and parsing
69 | res = json.loads(JSON)
70 | out = json.dumps(res)
71 | self.assertEqual(res, json.loads(out))
72 |
--------------------------------------------------------------------------------
/simplejson/tests/test_tuple.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | from simplejson.compat import StringIO
4 | import simplejson as json
5 |
6 | class TestTuples(unittest.TestCase):
7 | def test_tuple_array_dumps(self):
8 | t = (1, 2, 3)
9 | expect = json.dumps(list(t))
10 | # Default is True
11 | self.assertEqual(expect, json.dumps(t))
12 | self.assertEqual(expect, json.dumps(t, tuple_as_array=True))
13 | self.assertRaises(TypeError, json.dumps, t, tuple_as_array=False)
14 | # Ensure that the "default" does not get called
15 | self.assertEqual(expect, json.dumps(t, default=repr))
16 | self.assertEqual(expect, json.dumps(t, tuple_as_array=True,
17 | default=repr))
18 | # Ensure that the "default" gets called
19 | self.assertEqual(
20 | json.dumps(repr(t)),
21 | json.dumps(t, tuple_as_array=False, default=repr))
22 |
23 | def test_tuple_array_dump(self):
24 | t = (1, 2, 3)
25 | expect = json.dumps(list(t))
26 | # Default is True
27 | sio = StringIO()
28 | json.dump(t, sio)
29 | self.assertEqual(expect, sio.getvalue())
30 | sio = StringIO()
31 | json.dump(t, sio, tuple_as_array=True)
32 | self.assertEqual(expect, sio.getvalue())
33 | self.assertRaises(TypeError, json.dump, t, StringIO(),
34 | tuple_as_array=False)
35 | # Ensure that the "default" does not get called
36 | sio = StringIO()
37 | json.dump(t, sio, default=repr)
38 | self.assertEqual(expect, sio.getvalue())
39 | sio = StringIO()
40 | json.dump(t, sio, tuple_as_array=True, default=repr)
41 | self.assertEqual(expect, sio.getvalue())
42 | # Ensure that the "default" gets called
43 | sio = StringIO()
44 | json.dump(t, sio, tuple_as_array=False, default=repr)
45 | self.assertEqual(
46 | json.dumps(repr(t)),
47 | sio.getvalue())
48 |
49 | class TestNamedTuple(unittest.TestCase):
50 | def test_namedtuple_dump(self):
51 | pass
52 |
--------------------------------------------------------------------------------
/psycopg2/tests/test_psycopg2_dbapi20.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | # test_psycopg2_dbapi20.py - DB API conformance test for psycopg2
4 | #
5 | # Copyright (C) 2006-2011 Federico Di Gregorio
6 | #
7 | # psycopg2 is free software: you can redistribute it and/or modify it
8 | # under the terms of the GNU Lesser General Public License as published
9 | # by the Free Software Foundation, either version 3 of the License, or
10 | # (at your option) any later version.
11 | #
12 | # In addition, as a special exception, the copyright holders give
13 | # permission to link this program with the OpenSSL library (or with
14 | # modified versions of OpenSSL that use the same license as OpenSSL),
15 | # and distribute linked combinations including the two.
16 | #
17 | # You must obey the GNU Lesser General Public License in all respects for
18 | # all of the code used other than OpenSSL.
19 | #
20 | # psycopg2 is distributed in the hope that it will be useful, but WITHOUT
21 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
22 | # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
23 | # License for more details.
24 |
25 | import dbapi20
26 | import dbapi20_tpc
27 | from testutils import skip_if_tpc_disabled
28 | from testutils import unittest, decorate_all_tests
29 | import psycopg2
30 |
31 | from testconfig import dsn
32 |
33 | class Psycopg2Tests(dbapi20.DatabaseAPI20Test):
34 | driver = psycopg2
35 | connect_args = ()
36 | connect_kw_args = {'dsn': dsn}
37 |
38 | lower_func = 'lower' # For stored procedure test
39 |
40 | def test_setoutputsize(self):
41 | # psycopg2's setoutputsize() is a no-op
42 | pass
43 |
44 | def test_nextset(self):
45 | # psycopg2 does not implement nextset()
46 | pass
47 |
48 |
49 | class Psycopg2TPCTests(dbapi20_tpc.TwoPhaseCommitTests, unittest.TestCase):
50 | driver = psycopg2
51 |
52 | def connect(self):
53 | return psycopg2.connect(dsn=dsn)
54 |
55 | decorate_all_tests(Psycopg2TPCTests, skip_if_tpc_disabled)
56 |
57 |
58 | def test_suite():
59 | return unittest.TestLoader().loadTestsFromName(__name__)
60 |
61 | if __name__ == '__main__':
62 | unittest.main()
63 |
--------------------------------------------------------------------------------
/simplejson/tests/test_bigint_as_string.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 |
3 | import simplejson as json
4 |
5 |
6 | class TestBigintAsString(TestCase):
7 | # Python 2.5, at least the one that ships on Mac OS X, calculates
8 | # 2 ** 53 as 0! It manages to calculate 1 << 53 correctly.
9 | values = [(200, 200),
10 | ((1 << 53) - 1, 9007199254740991),
11 | ((1 << 53), '9007199254740992'),
12 | ((1 << 53) + 1, '9007199254740993'),
13 | (-100, -100),
14 | ((-1 << 53), '-9007199254740992'),
15 | ((-1 << 53) - 1, '-9007199254740993'),
16 | ((-1 << 53) + 1, -9007199254740991)]
17 |
18 | options = (
19 | {"bigint_as_string": True},
20 | {"int_as_string_bitcount": 53}
21 | )
22 |
23 | def test_ints(self):
24 | for opts in self.options:
25 | for val, expect in self.values:
26 | self.assertEqual(
27 | val,
28 | json.loads(json.dumps(val)))
29 | self.assertEqual(
30 | expect,
31 | json.loads(json.dumps(val, **opts)))
32 |
33 | def test_lists(self):
34 | for opts in self.options:
35 | for val, expect in self.values:
36 | val = [val, val]
37 | expect = [expect, expect]
38 | self.assertEqual(
39 | val,
40 | json.loads(json.dumps(val)))
41 | self.assertEqual(
42 | expect,
43 | json.loads(json.dumps(val, **opts)))
44 |
45 | def test_dicts(self):
46 | for opts in self.options:
47 | for val, expect in self.values:
48 | val = {'k': val}
49 | expect = {'k': expect}
50 | self.assertEqual(
51 | val,
52 | json.loads(json.dumps(val)))
53 | self.assertEqual(
54 | expect,
55 | json.loads(json.dumps(val, **opts)))
56 |
57 | def test_dict_keys(self):
58 | for opts in self.options:
59 | for val, _ in self.values:
60 | expect = {str(val): 'value'}
61 | val = {val: 'value'}
62 | self.assertEqual(
63 | expect,
64 | json.loads(json.dumps(val)))
65 | self.assertEqual(
66 | expect,
67 | json.loads(json.dumps(val, **opts)))
68 |
--------------------------------------------------------------------------------
/simplejson/tests/test_encode_basestring_ascii.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 |
3 | import simplejson.encoder
4 | from simplejson.compat import b
5 |
6 | CASES = [
7 | (u'/\\"\ucafe\ubabe\uab98\ufcde\ubcda\uef4a\x08\x0c\n\r\t`1~!@#$%^&*()_+-=[]{}|;:\',./<>?', '"/\\\\\\"\\ucafe\\ubabe\\uab98\\ufcde\\ubcda\\uef4a\\b\\f\\n\\r\\t`1~!@#$%^&*()_+-=[]{}|;:\',./<>?"'),
8 | (u'\u0123\u4567\u89ab\ucdef\uabcd\uef4a', '"\\u0123\\u4567\\u89ab\\ucdef\\uabcd\\uef4a"'),
9 | (u'controls', '"controls"'),
10 | (u'\x08\x0c\n\r\t', '"\\b\\f\\n\\r\\t"'),
11 | (u'{"object with 1 member":["array with 1 element"]}', '"{\\"object with 1 member\\":[\\"array with 1 element\\"]}"'),
12 | (u' s p a c e d ', '" s p a c e d "'),
13 | (u'\U0001d120', '"\\ud834\\udd20"'),
14 | (u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
15 | (b('\xce\xb1\xce\xa9'), '"\\u03b1\\u03a9"'),
16 | (u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
17 | (b('\xce\xb1\xce\xa9'), '"\\u03b1\\u03a9"'),
18 | (u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
19 | (u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
20 | (u"`1~!@#$%^&*()_+-={':[,]}|;.>?", '"`1~!@#$%^&*()_+-={\':[,]}|;.>?"'),
21 | (u'\x08\x0c\n\r\t', '"\\b\\f\\n\\r\\t"'),
22 | (u'\u0123\u4567\u89ab\ucdef\uabcd\uef4a', '"\\u0123\\u4567\\u89ab\\ucdef\\uabcd\\uef4a"'),
23 | ]
24 |
25 | class TestEncodeBaseStringAscii(TestCase):
26 | def test_py_encode_basestring_ascii(self):
27 | self._test_encode_basestring_ascii(simplejson.encoder.py_encode_basestring_ascii)
28 |
29 | def test_c_encode_basestring_ascii(self):
30 | if not simplejson.encoder.c_encode_basestring_ascii:
31 | return
32 | self._test_encode_basestring_ascii(simplejson.encoder.c_encode_basestring_ascii)
33 |
34 | def _test_encode_basestring_ascii(self, encode_basestring_ascii):
35 | fname = encode_basestring_ascii.__name__
36 | for input_string, expect in CASES:
37 | result = encode_basestring_ascii(input_string)
38 | #self.assertEqual(result, expect,
39 | # '{0!r} != {1!r} for {2}({3!r})'.format(
40 | # result, expect, fname, input_string))
41 | self.assertEqual(result, expect,
42 | '%r != %r for %s(%r)' % (result, expect, fname, input_string))
43 |
44 | def test_sorted_dict(self):
45 | items = [('one', 1), ('two', 2), ('three', 3), ('four', 4), ('five', 5)]
46 | s = simplejson.dumps(dict(items), sort_keys=True)
47 | self.assertEqual(s, '{"five": 5, "four": 4, "one": 1, "three": 3, "two": 2}')
48 |
--------------------------------------------------------------------------------
/simplejson/tests/test_bitsize_int_as_string.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 |
3 | import simplejson as json
4 |
5 |
6 | class TestBitSizeIntAsString(TestCase):
7 | # Python 2.5, at least the one that ships on Mac OS X, calculates
8 | # 2 ** 31 as 0! It manages to calculate 1 << 31 correctly.
9 | values = [
10 | (200, 200),
11 | ((1 << 31) - 1, (1 << 31) - 1),
12 | ((1 << 31), str(1 << 31)),
13 | ((1 << 31) + 1, str((1 << 31) + 1)),
14 | (-100, -100),
15 | ((-1 << 31), str(-1 << 31)),
16 | ((-1 << 31) - 1, str((-1 << 31) - 1)),
17 | ((-1 << 31) + 1, (-1 << 31) + 1),
18 | ]
19 |
20 | def test_invalid_counts(self):
21 | for n in ['foo', -1, 0, 1.0]:
22 | self.assertRaises(
23 | TypeError,
24 | json.dumps, 0, int_as_string_bitcount=n)
25 |
26 | def test_ints_outside_range_fails(self):
27 | self.assertNotEqual(
28 | str(1 << 15),
29 | json.loads(json.dumps(1 << 15, int_as_string_bitcount=16)),
30 | )
31 |
32 | def test_ints(self):
33 | for val, expect in self.values:
34 | self.assertEqual(
35 | val,
36 | json.loads(json.dumps(val)))
37 | self.assertEqual(
38 | expect,
39 | json.loads(json.dumps(val, int_as_string_bitcount=31)),
40 | )
41 |
42 | def test_lists(self):
43 | for val, expect in self.values:
44 | val = [val, val]
45 | expect = [expect, expect]
46 | self.assertEqual(
47 | val,
48 | json.loads(json.dumps(val)))
49 | self.assertEqual(
50 | expect,
51 | json.loads(json.dumps(val, int_as_string_bitcount=31)))
52 |
53 | def test_dicts(self):
54 | for val, expect in self.values:
55 | val = {'k': val}
56 | expect = {'k': expect}
57 | self.assertEqual(
58 | val,
59 | json.loads(json.dumps(val)))
60 | self.assertEqual(
61 | expect,
62 | json.loads(json.dumps(val, int_as_string_bitcount=31)))
63 |
64 | def test_dict_keys(self):
65 | for val, _ in self.values:
66 | expect = {str(val): 'value'}
67 | val = {val: 'value'}
68 | self.assertEqual(
69 | expect,
70 | json.loads(json.dumps(val)))
71 | self.assertEqual(
72 | expect,
73 | json.loads(json.dumps(val, int_as_string_bitcount=31)))
74 |
--------------------------------------------------------------------------------
/simplejson/tests/test_decimal.py:
--------------------------------------------------------------------------------
1 | import decimal
2 | from decimal import Decimal
3 | from unittest import TestCase
4 | from simplejson.compat import StringIO, reload_module
5 |
6 | import simplejson as json
7 |
8 | class TestDecimal(TestCase):
9 | NUMS = "1.0", "10.00", "1.1", "1234567890.1234567890", "500"
10 | def dumps(self, obj, **kw):
11 | sio = StringIO()
12 | json.dump(obj, sio, **kw)
13 | res = json.dumps(obj, **kw)
14 | self.assertEqual(res, sio.getvalue())
15 | return res
16 |
17 | def loads(self, s, **kw):
18 | sio = StringIO(s)
19 | res = json.loads(s, **kw)
20 | self.assertEqual(res, json.load(sio, **kw))
21 | return res
22 |
23 | def test_decimal_encode(self):
24 | for d in map(Decimal, self.NUMS):
25 | self.assertEqual(self.dumps(d, use_decimal=True), str(d))
26 |
27 | def test_decimal_decode(self):
28 | for s in self.NUMS:
29 | self.assertEqual(self.loads(s, parse_float=Decimal), Decimal(s))
30 |
31 | def test_stringify_key(self):
32 | for d in map(Decimal, self.NUMS):
33 | v = {d: d}
34 | self.assertEqual(
35 | self.loads(
36 | self.dumps(v, use_decimal=True), parse_float=Decimal),
37 | {str(d): d})
38 |
39 | def test_decimal_roundtrip(self):
40 | for d in map(Decimal, self.NUMS):
41 | # The type might not be the same (int and Decimal) but they
42 | # should still compare equal.
43 | for v in [d, [d], {'': d}]:
44 | self.assertEqual(
45 | self.loads(
46 | self.dumps(v, use_decimal=True), parse_float=Decimal),
47 | v)
48 |
49 | def test_decimal_defaults(self):
50 | d = Decimal('1.1')
51 | # use_decimal=True is the default
52 | self.assertRaises(TypeError, json.dumps, d, use_decimal=False)
53 | self.assertEqual('1.1', json.dumps(d))
54 | self.assertEqual('1.1', json.dumps(d, use_decimal=True))
55 | self.assertRaises(TypeError, json.dump, d, StringIO(),
56 | use_decimal=False)
57 | sio = StringIO()
58 | json.dump(d, sio)
59 | self.assertEqual('1.1', sio.getvalue())
60 | sio = StringIO()
61 | json.dump(d, sio, use_decimal=True)
62 | self.assertEqual('1.1', sio.getvalue())
63 |
64 | def test_decimal_reload(self):
65 | # Simulate a subinterpreter that reloads the Python modules but not
66 | # the C code https://github.com/simplejson/simplejson/issues/34
67 | global Decimal
68 | Decimal = reload_module(decimal).Decimal
69 | import simplejson.encoder
70 | simplejson.encoder.Decimal = Decimal
71 | self.test_decimal_roundtrip()
72 |
--------------------------------------------------------------------------------
/mixpanel_api/data_export.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 | #
3 | # Based on Python 2 Version by Mixpanel Inc. available at https://mixpanel.com/site_media/api/v2/mixpanel.py
4 | #
5 | # Copyright 2018 Jan Kyri
6 | #
7 | # Licensed under the Apache License, Version 2.0 (the "License");
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 |
19 |
20 | import os
21 | import base64
22 | import json
23 | import urllib
24 | import requests
25 |
26 |
27 | class Mixpanel(object):
28 |
29 | ENDPOINT = 'https://data.mixpanel.com/api'
30 | VERSION = '2.0'
31 |
32 | def __init__(self, api_secret):
33 | self.api_secret = api_secret
34 |
35 | def request(self, params, http_method='GET', format='json'):
36 | """
37 | params - Extra parameters associated with method
38 | """
39 | params['format'] = format
40 |
41 | request_url = '/'.join([self.ENDPOINT, str(self.VERSION), 'export'])
42 | if http_method == 'GET':
43 | data = None
44 | request_url = request_url + '/?' + self.unicode_urlencode(params)
45 | else:
46 | data = self.unicode_urlencode(params)
47 |
48 | headers = {'Authorization': 'Basic {encoded_secret}'.format(
49 | encoded_secret=str(base64.b64encode(self.api_secret.encode()), 'utf-8'))}
50 |
51 | request = requests.get(request_url, headers=headers, timeout=120)
52 | response = request.text
53 |
54 | for line in response.split('\n'):
55 | return json.loads(line)
56 |
57 | def unicode_urlencode(self, params):
58 | """
59 | Convert lists to JSON encoded strings, and correctly handle any
60 | unicode URL parameters.
61 | """
62 | if isinstance(params, dict):
63 | params = list(params.items())
64 | for i, param in enumerate(params):
65 | if isinstance(param[1], list):
66 | params[i] = (param[0], json.dumps(param[1]),)
67 |
68 | return urllib.parse.urlencode(
69 | [(k, isinstance(v, str) and v.encode('utf-8') or v) for k, v in params]
70 | )
71 |
72 |
73 | if __name__ == '__main__':
74 | api = Mixpanel(api_secret=os.environ['MIXPANEL_SECRET'])
75 |
76 | param_dict = {
77 | 'event': ["ev1", "ev2", "ev3"],
78 | 'from_date': "2017-01-10",
79 | 'to_date': "2017-01-11",
80 | }
81 |
82 | data = api.request(param_dict)
83 |
84 | print(data)
--------------------------------------------------------------------------------
/simplejson/tests/test_indent.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 | import textwrap
3 |
4 | import simplejson as json
5 | from simplejson.compat import StringIO
6 |
7 | class TestIndent(TestCase):
8 | def test_indent(self):
9 | h = [['blorpie'], ['whoops'], [], 'd-shtaeou', 'd-nthiouh',
10 | 'i-vhbjkhnth',
11 | {'nifty': 87}, {'field': 'yes', 'morefield': False} ]
12 |
13 | expect = textwrap.dedent("""\
14 | [
15 | \t[
16 | \t\t"blorpie"
17 | \t],
18 | \t[
19 | \t\t"whoops"
20 | \t],
21 | \t[],
22 | \t"d-shtaeou",
23 | \t"d-nthiouh",
24 | \t"i-vhbjkhnth",
25 | \t{
26 | \t\t"nifty": 87
27 | \t},
28 | \t{
29 | \t\t"field": "yes",
30 | \t\t"morefield": false
31 | \t}
32 | ]""")
33 |
34 |
35 | d1 = json.dumps(h)
36 | d2 = json.dumps(h, indent='\t', sort_keys=True, separators=(',', ': '))
37 | d3 = json.dumps(h, indent=' ', sort_keys=True, separators=(',', ': '))
38 | d4 = json.dumps(h, indent=2, sort_keys=True, separators=(',', ': '))
39 |
40 | h1 = json.loads(d1)
41 | h2 = json.loads(d2)
42 | h3 = json.loads(d3)
43 | h4 = json.loads(d4)
44 |
45 | self.assertEqual(h1, h)
46 | self.assertEqual(h2, h)
47 | self.assertEqual(h3, h)
48 | self.assertEqual(h4, h)
49 | self.assertEqual(d3, expect.replace('\t', ' '))
50 | self.assertEqual(d4, expect.replace('\t', ' '))
51 | # NOTE: Python 2.4 textwrap.dedent converts tabs to spaces,
52 | # so the following is expected to fail. Python 2.4 is not a
53 | # supported platform in simplejson 2.1.0+.
54 | self.assertEqual(d2, expect)
55 |
56 | def test_indent0(self):
57 | h = {3: 1}
58 | def check(indent, expected):
59 | d1 = json.dumps(h, indent=indent)
60 | self.assertEqual(d1, expected)
61 |
62 | sio = StringIO()
63 | json.dump(h, sio, indent=indent)
64 | self.assertEqual(sio.getvalue(), expected)
65 |
66 | # indent=0 should emit newlines
67 | check(0, '{\n"3": 1\n}')
68 | # indent=None is more compact
69 | check(None, '{"3": 1}')
70 |
71 | def test_separators(self):
72 | lst = [1,2,3,4]
73 | expect = '[\n1,\n2,\n3,\n4\n]'
74 | expect_spaces = '[\n1, \n2, \n3, \n4\n]'
75 | # Ensure that separators still works
76 | self.assertEqual(
77 | expect_spaces,
78 | json.dumps(lst, indent=0, separators=(', ', ': ')))
79 | # Force the new defaults
80 | self.assertEqual(
81 | expect,
82 | json.dumps(lst, indent=0, separators=(',', ': ')))
83 | # Added in 2.1.4
84 | self.assertEqual(
85 | expect,
86 | json.dumps(lst, indent=0))
87 |
--------------------------------------------------------------------------------
/mixpanel_api/general.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 | #
3 | # Based on Python 2 Version by Mixpanel Inc. available at https://mixpanel.com/site_media/api/v2/mixpanel.py
4 | #
5 | # Copyright 2018 Jan Kyri
6 | #
7 | # Licensed under the Apache License, Version 2.0 (the "License");
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 |
19 |
20 | import os
21 | import urllib
22 | import requests
23 | import base64
24 | import json
25 |
26 | """
27 | try:
28 | import json
29 | except ImportError:
30 | import simplejson as json
31 | """
32 |
33 | class Mixpanel(object):
34 |
35 | ENDPOINT = 'https://mixpanel.com/api'
36 | VERSION = '2.0'
37 |
38 | def __init__(self, api_secret):
39 | self.api_secret = api_secret
40 |
41 | def request(self, methods, params, format='json'):
42 | """
43 | methods - List of methods to be joined, e.g. ['events', 'properties', 'values']
44 | will give us http://mixpanel.com/api/2.0/events/properties/values/
45 | params - Extra parameters associated with method
46 | """
47 |
48 | params['format'] = format
49 |
50 | request_url = '/'.join([self.ENDPOINT, str(self.VERSION), methods, '/?']) + self.unicode_urlencode(params)
51 |
52 | headers = {'Authorization': 'Basic {encoded_secret}'.format(
53 | encoded_secret=str(base64.b64encode(self.api_secret.encode()), 'utf-8'))}
54 |
55 | request = requests.get(request_url, headers=headers, timeout=120)
56 | response = request.text
57 |
58 | return json.loads(response)
59 |
60 | def unicode_urlencode(self, params):
61 | """
62 | Convert lists to JSON encoded strings, and correctly handle any
63 | unicode URL parameters.
64 | """
65 | if isinstance(params, dict):
66 | params = list(params.items())
67 | for i, param in enumerate(params):
68 | if isinstance(param[1], list):
69 | params[i] = (param[0], json.dumps(param[1]),)
70 |
71 | return urllib.parse.urlencode(
72 | [(k, isinstance(v, str) and v.encode('utf-8') or v) for k, v in params]
73 | )
74 |
75 |
76 | if __name__ == '__main__':
77 | api = Mixpanel(api_secret=os.environ['MIXPANEL_SECRET'])
78 |
79 | param_dict = {
80 | 'event': ["ev1", "ev2", "ev3"],
81 | 'type': "average",
82 | 'unit': "day",
83 | 'from_date': "2017-01-10",
84 | 'to_date': "2017-01-11",
85 | }
86 |
87 | data = api.request('events', param_dict)
88 |
89 | print(data)
90 |
--------------------------------------------------------------------------------
/simplejson/tests/test_for_json.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import simplejson as json
3 |
4 |
5 | class ForJson(object):
6 | def for_json(self):
7 | return {'for_json': 1}
8 |
9 |
10 | class NestedForJson(object):
11 | def for_json(self):
12 | return {'nested': ForJson()}
13 |
14 |
15 | class ForJsonList(object):
16 | def for_json(self):
17 | return ['list']
18 |
19 |
20 | class DictForJson(dict):
21 | def for_json(self):
22 | return {'alpha': 1}
23 |
24 |
25 | class ListForJson(list):
26 | def for_json(self):
27 | return ['list']
28 |
29 |
30 | class TestForJson(unittest.TestCase):
31 | def assertRoundTrip(self, obj, other, for_json=True):
32 | if for_json is None:
33 | # None will use the default
34 | s = json.dumps(obj)
35 | else:
36 | s = json.dumps(obj, for_json=for_json)
37 | self.assertEqual(
38 | json.loads(s),
39 | other)
40 |
41 | def test_for_json_encodes_stand_alone_object(self):
42 | self.assertRoundTrip(
43 | ForJson(),
44 | ForJson().for_json())
45 |
46 | def test_for_json_encodes_object_nested_in_dict(self):
47 | self.assertRoundTrip(
48 | {'hooray': ForJson()},
49 | {'hooray': ForJson().for_json()})
50 |
51 | def test_for_json_encodes_object_nested_in_list_within_dict(self):
52 | self.assertRoundTrip(
53 | {'list': [0, ForJson(), 2, 3]},
54 | {'list': [0, ForJson().for_json(), 2, 3]})
55 |
56 | def test_for_json_encodes_object_nested_within_object(self):
57 | self.assertRoundTrip(
58 | NestedForJson(),
59 | {'nested': {'for_json': 1}})
60 |
61 | def test_for_json_encodes_list(self):
62 | self.assertRoundTrip(
63 | ForJsonList(),
64 | ForJsonList().for_json())
65 |
66 | def test_for_json_encodes_list_within_object(self):
67 | self.assertRoundTrip(
68 | {'nested': ForJsonList()},
69 | {'nested': ForJsonList().for_json()})
70 |
71 | def test_for_json_encodes_dict_subclass(self):
72 | self.assertRoundTrip(
73 | DictForJson(a=1),
74 | DictForJson(a=1).for_json())
75 |
76 | def test_for_json_encodes_list_subclass(self):
77 | self.assertRoundTrip(
78 | ListForJson(['l']),
79 | ListForJson(['l']).for_json())
80 |
81 | def test_for_json_ignored_if_not_true_with_dict_subclass(self):
82 | for for_json in (None, False):
83 | self.assertRoundTrip(
84 | DictForJson(a=1),
85 | {'a': 1},
86 | for_json=for_json)
87 |
88 | def test_for_json_ignored_if_not_true_with_list_subclass(self):
89 | for for_json in (None, False):
90 | self.assertRoundTrip(
91 | ListForJson(['l']),
92 | ['l'],
93 | for_json=for_json)
94 |
95 | def test_raises_typeerror_if_for_json_not_true_with_object(self):
96 | self.assertRaises(TypeError, json.dumps, ForJson())
97 | self.assertRaises(TypeError, json.dumps, ForJson(), for_json=False)
98 |
--------------------------------------------------------------------------------
/simplejson/tests/test_tool.py:
--------------------------------------------------------------------------------
1 | from __future__ import with_statement
2 | import os
3 | import sys
4 | import textwrap
5 | import unittest
6 | import subprocess
7 | import tempfile
8 | try:
9 | # Python 3.x
10 | from test.support import strip_python_stderr
11 | except ImportError:
12 | # Python 2.6+
13 | try:
14 | from test.test_support import strip_python_stderr
15 | except ImportError:
16 | # Python 2.5
17 | import re
18 | def strip_python_stderr(stderr):
19 | return re.sub(
20 | r"\[\d+ refs\]\r?\n?$".encode(),
21 | "".encode(),
22 | stderr).strip()
23 |
24 | class TestTool(unittest.TestCase):
25 | data = """
26 |
27 | [["blorpie"],[ "whoops" ] , [
28 | ],\t"d-shtaeou",\r"d-nthiouh",
29 | "i-vhbjkhnth", {"nifty":87}, {"morefield" :\tfalse,"field"
30 | :"yes"} ]
31 | """
32 |
33 | expect = textwrap.dedent("""\
34 | [
35 | [
36 | "blorpie"
37 | ],
38 | [
39 | "whoops"
40 | ],
41 | [],
42 | "d-shtaeou",
43 | "d-nthiouh",
44 | "i-vhbjkhnth",
45 | {
46 | "nifty": 87
47 | },
48 | {
49 | "field": "yes",
50 | "morefield": false
51 | }
52 | ]
53 | """)
54 |
55 | def runTool(self, args=None, data=None):
56 | argv = [sys.executable, '-m', 'simplejson.tool']
57 | if args:
58 | argv.extend(args)
59 | proc = subprocess.Popen(argv,
60 | stdin=subprocess.PIPE,
61 | stderr=subprocess.PIPE,
62 | stdout=subprocess.PIPE)
63 | out, err = proc.communicate(data)
64 | self.assertEqual(strip_python_stderr(err), ''.encode())
65 | self.assertEqual(proc.returncode, 0)
66 | return out
67 |
68 | def test_stdin_stdout(self):
69 | self.assertEqual(
70 | self.runTool(data=self.data.encode()),
71 | self.expect.encode())
72 |
73 | def test_infile_stdout(self):
74 | with tempfile.NamedTemporaryFile() as infile:
75 | infile.write(self.data.encode())
76 | infile.flush()
77 | self.assertEqual(
78 | self.runTool(args=[infile.name]),
79 | self.expect.encode())
80 |
81 | def test_infile_outfile(self):
82 | with tempfile.NamedTemporaryFile() as infile:
83 | infile.write(self.data.encode())
84 | infile.flush()
85 | # outfile will get overwritten by tool, so the delete
86 | # may not work on some platforms. Do it manually.
87 | outfile = tempfile.NamedTemporaryFile()
88 | try:
89 | self.assertEqual(
90 | self.runTool(args=[infile.name, outfile.name]),
91 | ''.encode())
92 | with open(outfile.name, 'rb') as f:
93 | self.assertEqual(f.read(), self.expect.encode())
94 | finally:
95 | outfile.close()
96 | if os.path.exists(outfile.name):
97 | os.unlink(outfile.name)
98 |
--------------------------------------------------------------------------------
/psycopg2/tests/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | # psycopg2 test suite
4 | #
5 | # Copyright (C) 2007-2011 Federico Di Gregorio
6 | #
7 | # psycopg2 is free software: you can redistribute it and/or modify it
8 | # under the terms of the GNU Lesser General Public License as published
9 | # by the Free Software Foundation, either version 3 of the License, or
10 | # (at your option) any later version.
11 | #
12 | # In addition, as a special exception, the copyright holders give
13 | # permission to link this program with the OpenSSL library (or with
14 | # modified versions of OpenSSL that use the same license as OpenSSL),
15 | # and distribute linked combinations including the two.
16 | #
17 | # You must obey the GNU Lesser General Public License in all respects for
18 | # all of the code used other than OpenSSL.
19 | #
20 | # psycopg2 is distributed in the hope that it will be useful, but WITHOUT
21 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
22 | # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
23 | # License for more details.
24 |
25 | import sys
26 | from testconfig import dsn
27 | from testutils import unittest
28 |
29 | import test_async
30 | import test_bugX000
31 | import test_bug_gc
32 | import test_cancel
33 | import test_connection
34 | import test_copy
35 | import test_cursor
36 | import test_dates
37 | import test_extras_dictcursor
38 | import test_green
39 | import test_lobject
40 | import test_module
41 | import test_notify
42 | import test_psycopg2_dbapi20
43 | import test_quote
44 | import test_transaction
45 | import test_types_basic
46 | import test_types_extras
47 |
48 | if sys.version_info[:2] >= (2, 5):
49 | import test_with
50 | else:
51 | test_with = None
52 |
53 | def test_suite():
54 | # If connection to test db fails, bail out early.
55 | import psycopg2
56 | try:
57 | cnn = psycopg2.connect(dsn)
58 | except Exception, e:
59 | print "Failed connection to test db:", e.__class__.__name__, e
60 | print "Please set env vars 'PSYCOPG2_TESTDB*' to valid values."
61 | sys.exit(1)
62 | else:
63 | cnn.close()
64 |
65 | suite = unittest.TestSuite()
66 | suite.addTest(test_async.test_suite())
67 | suite.addTest(test_bugX000.test_suite())
68 | suite.addTest(test_bug_gc.test_suite())
69 | suite.addTest(test_cancel.test_suite())
70 | suite.addTest(test_connection.test_suite())
71 | suite.addTest(test_copy.test_suite())
72 | suite.addTest(test_cursor.test_suite())
73 | suite.addTest(test_dates.test_suite())
74 | suite.addTest(test_extras_dictcursor.test_suite())
75 | suite.addTest(test_green.test_suite())
76 | suite.addTest(test_lobject.test_suite())
77 | suite.addTest(test_module.test_suite())
78 | suite.addTest(test_notify.test_suite())
79 | suite.addTest(test_psycopg2_dbapi20.test_suite())
80 | suite.addTest(test_quote.test_suite())
81 | suite.addTest(test_transaction.test_suite())
82 | suite.addTest(test_types_basic.test_suite())
83 | suite.addTest(test_types_extras.test_suite())
84 | if test_with:
85 | suite.addTest(test_with.test_suite())
86 | return suite
87 |
88 | if __name__ == '__main__':
89 | unittest.main(defaultTest='test_suite')
90 |
--------------------------------------------------------------------------------
/simplejson/tests/__init__.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 | import unittest
3 | import doctest
4 | import sys
5 |
6 |
7 | class NoExtensionTestSuite(unittest.TestSuite):
8 | def run(self, result):
9 | import simplejson
10 | simplejson._toggle_speedups(False)
11 | result = unittest.TestSuite.run(self, result)
12 | simplejson._toggle_speedups(True)
13 | return result
14 |
15 |
16 | class TestMissingSpeedups(unittest.TestCase):
17 | def runTest(self):
18 | if hasattr(sys, 'pypy_translation_info'):
19 | "PyPy doesn't need speedups! :)"
20 | elif hasattr(self, 'skipTest'):
21 | self.skipTest('_speedups.so is missing!')
22 |
23 |
24 | def additional_tests(suite=None):
25 | import simplejson
26 | import simplejson.encoder
27 | import simplejson.decoder
28 | if suite is None:
29 | suite = unittest.TestSuite()
30 | for mod in (simplejson, simplejson.encoder, simplejson.decoder):
31 | suite.addTest(doctest.DocTestSuite(mod))
32 | suite.addTest(doctest.DocFileSuite('../../index.rst'))
33 | return suite
34 |
35 |
36 | def all_tests_suite():
37 | def get_suite():
38 | return additional_tests(
39 | unittest.TestLoader().loadTestsFromNames([
40 | 'simplejson.tests.test_bitsize_int_as_string',
41 | 'simplejson.tests.test_bigint_as_string',
42 | 'simplejson.tests.test_check_circular',
43 | 'simplejson.tests.test_decode',
44 | 'simplejson.tests.test_default',
45 | 'simplejson.tests.test_dump',
46 | 'simplejson.tests.test_encode_basestring_ascii',
47 | 'simplejson.tests.test_encode_for_html',
48 | 'simplejson.tests.test_errors',
49 | 'simplejson.tests.test_fail',
50 | 'simplejson.tests.test_float',
51 | 'simplejson.tests.test_indent',
52 | 'simplejson.tests.test_pass1',
53 | 'simplejson.tests.test_pass2',
54 | 'simplejson.tests.test_pass3',
55 | 'simplejson.tests.test_recursion',
56 | 'simplejson.tests.test_scanstring',
57 | 'simplejson.tests.test_separators',
58 | 'simplejson.tests.test_speedups',
59 | 'simplejson.tests.test_unicode',
60 | 'simplejson.tests.test_decimal',
61 | 'simplejson.tests.test_tuple',
62 | 'simplejson.tests.test_namedtuple',
63 | 'simplejson.tests.test_tool',
64 | 'simplejson.tests.test_for_json',
65 | ]))
66 | suite = get_suite()
67 | import simplejson
68 | if simplejson._import_c_make_encoder() is None:
69 | suite.addTest(TestMissingSpeedups())
70 | else:
71 | suite = unittest.TestSuite([
72 | suite,
73 | NoExtensionTestSuite([get_suite()]),
74 | ])
75 | return suite
76 |
77 |
78 | def main():
79 | runner = unittest.TextTestRunner(verbosity=1 + sys.argv.count('-v'))
80 | suite = all_tests_suite()
81 | raise SystemExit(not runner.run(suite).wasSuccessful())
82 |
83 |
84 | if __name__ == '__main__':
85 | import os
86 | import sys
87 | sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
88 | main()
89 |
--------------------------------------------------------------------------------
/to_postgres.py:
--------------------------------------------------------------------------------
1 | # Postgres database methods to connect to DB and create various tables
2 | import psycopg2
3 | import sys
4 |
5 |
6 | # Method connects to a Postgres database using the psycopg2 module db adapter
7 | def connect_db(hostname, db, name, pw):
8 | print("Connecting to database at "+str(hostname)+"...")
9 | con = None
10 | try:
11 | # Try to connect to the database, store connection object in 'con'
12 | con = psycopg2.connect(host=hostname,database=db, user=name, password=pw)
13 | # Get the cursor object from connection, used to traverse records
14 | cur = con.cursor()
15 | print("Successfully connected to database!\n")
16 | return con, cur
17 |
18 | except psycopg2.DatabaseError as e:
19 | # If database error, let's rollback any changes.
20 | if con:
21 | con.rollback()
22 | con.close()
23 | print('Error %s' % e)
24 | sys.exit(1)
25 |
26 |
27 | def create_raw_export_tables(con, cur):
28 | try:
29 | # Creating tables
30 | print("Creating tables that do not exist already...")
31 | cur.execute("CREATE TABLE IF NOT EXISTS event_def (event_id serial PRIMARY KEY , event_name VARCHAR(50))")
32 | cur.execute("CREATE TABLE IF NOT EXISTS property_def (property_id serial PRIMARY KEY, property_name VARCHAR(50), event_id integer REFERENCES event_def);")
33 | cur.execute("CREATE TABLE IF NOT EXISTS property_trans (trans_id bigserial PRIMARY KEY, property_value text, property_id integer REFERENCES property_def);")
34 | cur.execute("CREATE TABLE IF NOT EXISTS event_trans (trans_id serial PRIMARY KEY, event_id integer REFERENCES event_def, timestamp bigint);")
35 | con.commit()
36 | print("Done.")
37 |
38 | except psycopg2.DatabaseError as e:
39 | # If database error, let's rollback any changes.
40 | if con:
41 | con.rollback()
42 | con.close()
43 | print('Error %s' % e)
44 | sys.exit(1)
45 |
46 |
47 | def create_funnel_table(con, cur):
48 | try:
49 | # Creating funnel_trans (funnel transaction) table: 25 columns
50 | print("Creating funnel_trans if it doesn't already exist...")
51 | cur.execute("CREATE TABLE IF NOT EXISTS funnel_trans (trans_id SERIAL PRIMARY KEY, funnel_id INTEGER, \
52 | funnel_name VARCHAR(200), from_date DATE, to_date DATE, completion INTEGER, \
53 | starting_amount INTEGER, steps INTEGER, worst INTEGER, \
54 | step_1_goal VARCHAR(50), step_1_count INTEGER, step_1_overall_conv_ratio DECIMAL, \
55 | step_1_step_conv_ratio DECIMAL, step_2_goal VARCHAR(50), step_2_count INTEGER, \
56 | step_2_overall_conv_ratio DECIMAL, step_2_step_conv_ratio DECIMAL, step_3_goal VARCHAR(50), \
57 | step_3_count INTEGER, step_3_overall_conv_ratio DECIMAL, step_3_step_conv_ratio DECIMAL,\
58 | step_4_goal VARCHAR(50), step_4_count INTEGER, step_4_overall_conv_ratio DECIMAL, \
59 | step_4_step_conv_ratio DECIMAL);")
60 | con.commit()
61 | print("Done.")
62 |
63 | except psycopg2.DatabaseError as e:
64 | # If database error, let's rollback any changes.
65 | if con:
66 | con.rollback()
67 | con.close()
68 | print('Error %s' % e)
69 | sys.exit(1)
70 |
71 |
--------------------------------------------------------------------------------
/psycopg2/psycopg1.py:
--------------------------------------------------------------------------------
1 | """psycopg 1.1.x compatibility module
2 |
3 | This module uses the new style connection and cursor types to build a psycopg
4 | 1.1.1.x compatibility layer. It should be considered a temporary hack to run
5 | old code while porting to psycopg 2. Import it as follows::
6 |
7 | from psycopg2 import psycopg1 as psycopg
8 | """
9 | # psycopg/psycopg1.py - psycopg 1.1.x compatibility module
10 | #
11 | # Copyright (C) 2003-2010 Federico Di Gregorio
12 | #
13 | # psycopg2 is free software: you can redistribute it and/or modify it
14 | # under the terms of the GNU Lesser General Public License as published
15 | # by the Free Software Foundation, either version 3 of the License, or
16 | # (at your option) any later version.
17 | #
18 | # In addition, as a special exception, the copyright holders give
19 | # permission to link this program with the OpenSSL library (or with
20 | # modified versions of OpenSSL that use the same license as OpenSSL),
21 | # and distribute linked combinations including the two.
22 | #
23 | # You must obey the GNU Lesser General Public License in all respects for
24 | # all of the code used other than OpenSSL.
25 | #
26 | # psycopg2 is distributed in the hope that it will be useful, but WITHOUT
27 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
28 | # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
29 | # License for more details.
30 |
31 | import _psycopg as _2psycopg
32 | from psycopg2.extensions import cursor as _2cursor
33 | from psycopg2.extensions import connection as _2connection
34 |
35 | from psycopg2 import *
36 | import psycopg2.extensions as _ext
37 | _2connect = connect
38 |
39 | def connect(*args, **kwargs):
40 | """connect(dsn, ...) -> new psycopg 1.1.x compatible connection object"""
41 | kwargs['connection_factory'] = connection
42 | conn = _2connect(*args, **kwargs)
43 | conn.set_isolation_level(_ext.ISOLATION_LEVEL_READ_COMMITTED)
44 | return conn
45 |
46 | class connection(_2connection):
47 | """psycopg 1.1.x connection."""
48 |
49 | def cursor(self):
50 | """cursor() -> new psycopg 1.1.x compatible cursor object"""
51 | return _2connection.cursor(self, cursor_factory=cursor)
52 |
53 | def autocommit(self, on_off=1):
54 | """autocommit(on_off=1) -> switch autocommit on (1) or off (0)"""
55 | if on_off > 0:
56 | self.set_isolation_level(_ext.ISOLATION_LEVEL_AUTOCOMMIT)
57 | else:
58 | self.set_isolation_level(_ext.ISOLATION_LEVEL_READ_COMMITTED)
59 |
60 |
61 | class cursor(_2cursor):
62 | """psycopg 1.1.x cursor.
63 |
64 | Note that this cursor implements the exact procedure used by psycopg 1 to
65 | build dictionaries out of result rows. The DictCursor in the
66 | psycopg.extras modules implements a much better and faster algorithm.
67 | """
68 |
69 | def __build_dict(self, row):
70 | res = {}
71 | for i in range(len(self.description)):
72 | res[self.description[i][0]] = row[i]
73 | return res
74 |
75 | def dictfetchone(self):
76 | row = _2cursor.fetchone(self)
77 | if row:
78 | return self.__build_dict(row)
79 | else:
80 | return row
81 |
82 | def dictfetchmany(self, size):
83 | res = []
84 | rows = _2cursor.fetchmany(self, size)
85 | for row in rows:
86 | res.append(self.__build_dict(row))
87 | return res
88 |
89 | def dictfetchall(self):
90 | res = []
91 | rows = _2cursor.fetchall(self)
92 | for row in rows:
93 | res.append(self.__build_dict(row))
94 | return res
95 |
96 |
--------------------------------------------------------------------------------
/simplejson/ordered_dict.py:
--------------------------------------------------------------------------------
1 | """Drop-in replacement for collections.OrderedDict by Raymond Hettinger
2 |
3 | http://code.activestate.com/recipes/576693/
4 |
5 | """
6 | from UserDict import DictMixin
7 |
8 | # Modified from original to support Python 2.4, see
9 | # http://code.google.com/p/simplejson/issues/detail?id=53
10 | try:
11 | all
12 | except NameError:
13 | def all(seq):
14 | for elem in seq:
15 | if not elem:
16 | return False
17 | return True
18 |
19 | class OrderedDict(dict, DictMixin):
20 |
21 | def __init__(self, *args, **kwds):
22 | if len(args) > 1:
23 | raise TypeError('expected at most 1 arguments, got %d' % len(args))
24 | try:
25 | self.__end
26 | except AttributeError:
27 | self.clear()
28 | self.update(*args, **kwds)
29 |
30 | def clear(self):
31 | self.__end = end = []
32 | end += [None, end, end] # sentinel node for doubly linked list
33 | self.__map = {} # key --> [key, prev, next]
34 | dict.clear(self)
35 |
36 | def __setitem__(self, key, value):
37 | if key not in self:
38 | end = self.__end
39 | curr = end[1]
40 | curr[2] = end[1] = self.__map[key] = [key, curr, end]
41 | dict.__setitem__(self, key, value)
42 |
43 | def __delitem__(self, key):
44 | dict.__delitem__(self, key)
45 | key, prev, next = self.__map.pop(key)
46 | prev[2] = next
47 | next[1] = prev
48 |
49 | def __iter__(self):
50 | end = self.__end
51 | curr = end[2]
52 | while curr is not end:
53 | yield curr[0]
54 | curr = curr[2]
55 |
56 | def __reversed__(self):
57 | end = self.__end
58 | curr = end[1]
59 | while curr is not end:
60 | yield curr[0]
61 | curr = curr[1]
62 |
63 | def popitem(self, last=True):
64 | if not self:
65 | raise KeyError('dictionary is empty')
66 | # Modified from original to support Python 2.4, see
67 | # http://code.google.com/p/simplejson/issues/detail?id=53
68 | if last:
69 | key = reversed(self).next()
70 | else:
71 | key = iter(self).next()
72 | value = self.pop(key)
73 | return key, value
74 |
75 | def __reduce__(self):
76 | items = [[k, self[k]] for k in self]
77 | tmp = self.__map, self.__end
78 | del self.__map, self.__end
79 | inst_dict = vars(self).copy()
80 | self.__map, self.__end = tmp
81 | if inst_dict:
82 | return (self.__class__, (items,), inst_dict)
83 | return self.__class__, (items,)
84 |
85 | def keys(self):
86 | return list(self)
87 |
88 | setdefault = DictMixin.setdefault
89 | update = DictMixin.update
90 | pop = DictMixin.pop
91 | values = DictMixin.values
92 | items = DictMixin.items
93 | iterkeys = DictMixin.iterkeys
94 | itervalues = DictMixin.itervalues
95 | iteritems = DictMixin.iteritems
96 |
97 | def __repr__(self):
98 | if not self:
99 | return '%s()' % (self.__class__.__name__,)
100 | return '%s(%r)' % (self.__class__.__name__, self.items())
101 |
102 | def copy(self):
103 | return self.__class__(self)
104 |
105 | @classmethod
106 | def fromkeys(cls, iterable, value=None):
107 | d = cls()
108 | for key in iterable:
109 | d[key] = value
110 | return d
111 |
112 | def __eq__(self, other):
113 | if isinstance(other, OrderedDict):
114 | return len(self)==len(other) and \
115 | all(p==q for p, q in zip(self.items(), other.items()))
116 | return dict.__eq__(self, other)
117 |
118 | def __ne__(self, other):
119 | return not self == other
120 |
--------------------------------------------------------------------------------
/psycopg2/tests/test_cancel.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | # test_cancel.py - unit test for query cancellation
5 | #
6 | # Copyright (C) 2010-2011 Jan Urbański
7 | #
8 | # psycopg2 is free software: you can redistribute it and/or modify it
9 | # under the terms of the GNU Lesser General Public License as published
10 | # by the Free Software Foundation, either version 3 of the License, or
11 | # (at your option) any later version.
12 | #
13 | # In addition, as a special exception, the copyright holders give
14 | # permission to link this program with the OpenSSL library (or with
15 | # modified versions of OpenSSL that use the same license as OpenSSL),
16 | # and distribute linked combinations including the two.
17 | #
18 | # You must obey the GNU Lesser General Public License in all respects for
19 | # all of the code used other than OpenSSL.
20 | #
21 | # psycopg2 is distributed in the hope that it will be useful, but WITHOUT
22 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
23 | # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
24 | # License for more details.
25 |
26 | import threading
27 |
28 | import psycopg2
29 | import psycopg2.extensions
30 | from psycopg2 import extras
31 |
32 | from testconfig import dsn
33 | from testutils import unittest, ConnectingTestCase, skip_before_postgres
34 |
35 | class CancelTests(ConnectingTestCase):
36 |
37 | def setUp(self):
38 | ConnectingTestCase.setUp(self)
39 |
40 | cur = self.conn.cursor()
41 | cur.execute('''
42 | CREATE TEMPORARY TABLE table1 (
43 | id int PRIMARY KEY
44 | )''')
45 | self.conn.commit()
46 |
47 | def test_empty_cancel(self):
48 | self.conn.cancel()
49 |
50 | @skip_before_postgres(8, 2)
51 | def test_cancel(self):
52 | errors = []
53 |
54 | def neverending(conn):
55 | cur = conn.cursor()
56 | try:
57 | self.assertRaises(psycopg2.extensions.QueryCanceledError,
58 | cur.execute, "select pg_sleep(60)")
59 | # make sure the connection still works
60 | conn.rollback()
61 | cur.execute("select 1")
62 | self.assertEqual(cur.fetchall(), [(1, )])
63 | except Exception, e:
64 | errors.append(e)
65 | raise
66 |
67 | def canceller(conn):
68 | cur = conn.cursor()
69 | try:
70 | conn.cancel()
71 | except Exception, e:
72 | errors.append(e)
73 | raise
74 |
75 | thread1 = threading.Thread(target=neverending, args=(self.conn, ))
76 | # wait a bit to make sure that the other thread is already in
77 | # pg_sleep -- ugly and racy, but the chances are ridiculously low
78 | thread2 = threading.Timer(0.3, canceller, args=(self.conn, ))
79 | thread1.start()
80 | thread2.start()
81 | thread1.join()
82 | thread2.join()
83 |
84 | self.assertEqual(errors, [])
85 |
86 | @skip_before_postgres(8, 2)
87 | def test_async_cancel(self):
88 | async_conn = psycopg2.connect(dsn, async=True)
89 | self.assertRaises(psycopg2.OperationalError, async_conn.cancel)
90 | extras.wait_select(async_conn)
91 | cur = async_conn.cursor()
92 | cur.execute("select pg_sleep(10000)")
93 | self.assertTrue(async_conn.isexecuting())
94 | async_conn.cancel()
95 | self.assertRaises(psycopg2.extensions.QueryCanceledError,
96 | extras.wait_select, async_conn)
97 | cur.execute("select 1")
98 | extras.wait_select(async_conn)
99 | self.assertEqual(cur.fetchall(), [(1, )])
100 |
101 | def test_async_connection_cancel(self):
102 | async_conn = psycopg2.connect(dsn, async=True)
103 | async_conn.close()
104 | self.assertTrue(async_conn.closed)
105 |
106 |
107 | def test_suite():
108 | return unittest.TestLoader().loadTestsFromName(__name__)
109 |
110 | if __name__ == "__main__":
111 | unittest.main()
112 |
--------------------------------------------------------------------------------
/funnels_script.py:
--------------------------------------------------------------------------------
1 | # Script to pull funnel data and write to funnel_trans (funnel transacations)
2 | # table on Postgres DB
3 | from datetime import date, timedelta
4 | import mixpanel_puller
5 | import to_postgres
6 | import psycopg2
7 | import sys
8 | import os
9 |
10 | print("Starting script to pull funnel data from Mixpanel and write to database...")
11 |
12 | # Mixpanel credentials
13 | # CHECK AUTH METHOD, NOT VALID ANYMORE
14 | api_key = os.environ['MIXPANEL_KEY']
15 | api_secret = os.environ['MIXPANEL_SECRET']
16 |
17 | # Pull funnel list
18 | print("Pulling funnel list, request sent to Mixpanel:")
19 | funnel_list = mixpanel_puller.list_funnels(api_secret)
20 |
21 | # Set parameters to pull funnel data
22 | length = 60
23 | interval = 1
24 | seven_days_ago = date.today()-timedelta(days=7)
25 | yesterday = date.today()-timedelta(days=1)
26 |
27 | # Database operations
28 | # 1. Connect to Postgres database
29 | hostname = os.environ['POSTGRES_HOST']
30 | db = os.environ['POSTGRES_DB']
31 | user = os.environ['POSTGRES_USER']
32 | pw = os.environ['POSTGRES_PW']
33 | con, cur = to_postgres.connect_db(hostname, db, user, pw)
34 |
35 | # 2.Create funnel_trans table if it does not already exist
36 | to_postgres.create_funnel_table(con, cur)
37 |
38 | # 3.Insert funnel data into funnel_trans table
39 | try:
40 | # Update funnel_trans table
41 | print("Writing funnel data to funnel_trans table..")
42 |
43 | for each in funnel_list:
44 |
45 | funnel_id, funnel_name = each["funnel_id"], each["name"]
46 | funnel_data = mixpanel_puller.pull_funnels(
47 | funnel_id,
48 | length,
49 | interval,
50 | seven_days_ago,
51 | yesterday,
52 | api_key,
53 | api_secret
54 | )
55 | print("Funnel data received.\n")
56 |
57 | for date in funnel_data["meta"]["dates"]:
58 | date_data = funnel_data["data"][date]
59 | completion = funnel_data["data"][date]["analysis"]["completion"]
60 | starting_amount = funnel_data["data"][date]["analysis"]["starting_amount"]
61 | steps = funnel_data["data"][date]["analysis"]["steps"]
62 | worst = funnel_data["data"][date]["analysis"]["worst"]
63 | # Loop through steps, cater for max of 4 steps (to match database schema)
64 | step_data = [["", 0, 0.0, 0.0], ["", 0, 0.0, 0.0], ["", 0, 0.0, 0.0], ["", 0, 0.0, 0.0]]
65 | steps_dicts = date_data["steps"]
66 | for step in range(steps):
67 | step_data[step] = [steps_dicts[step]["goal"], steps_dicts[step]["count"],
68 | steps_dicts[step]["overall_conv_ratio"], steps_dicts[step]["step_conv_ratio"]]
69 | # Insert row for this date and funnel into table
70 | cur.execute("INSERT INTO funnel_trans(funnel_id, funnel_name, from_date, to_date, completion, starting_amount,\
71 | steps, worst, step_1_goal, step_1_count, step_1_overall_conv_ratio, step_1_step_conv_ratio,\
72 | step_2_goal, step_2_count, step_2_overall_conv_ratio, step_2_step_conv_ratio,\
73 | step_3_goal, step_3_count, step_3_overall_conv_ratio, step_3_step_conv_ratio,\
74 | step_4_goal, step_4_count, step_4_overall_conv_ratio, step_4_step_conv_ratio) \
75 | VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);",
76 | (funnel_id, funnel_name, date, date, completion, starting_amount, steps, worst,
77 | step_data[0][0], step_data[0][1], step_data[0][2], step_data[0][3], step_data[1][0],
78 | step_data[1][1], step_data[1][2], step_data[1][3], step_data[2][0], step_data[2][1],
79 | step_data[2][2], step_data[2][3], step_data[3][0], step_data[3][1], step_data[3][2],
80 | step_data[3][3]))
81 | con.commit()
82 | print("funnel_trans table updated successfully.")
83 |
84 | except psycopg2.DatabaseError as e:
85 | # If database error, let's rollback any changes.
86 | if con:
87 | con.rollback()
88 | print("Error %s" % e)
89 | sys.exit(1)
90 |
91 | finally:
92 | if con:
93 | con.close()
94 | print("Connection to database closed.")
95 |
96 |
--------------------------------------------------------------------------------
/simplejson/tests/test_decode.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 | import decimal
3 | from unittest import TestCase
4 |
5 | import simplejson as json
6 | from simplejson.compat import StringIO
7 | from simplejson import OrderedDict
8 |
9 | class TestDecode(TestCase):
10 | if not hasattr(TestCase, 'assertIs'):
11 | def assertIs(self, a, b):
12 | self.assertTrue(a is b, '%r is %r' % (a, b))
13 |
14 | def test_decimal(self):
15 | rval = json.loads('1.1', parse_float=decimal.Decimal)
16 | self.assertTrue(isinstance(rval, decimal.Decimal))
17 | self.assertEqual(rval, decimal.Decimal('1.1'))
18 |
19 | def test_float(self):
20 | rval = json.loads('1', parse_int=float)
21 | self.assertTrue(isinstance(rval, float))
22 | self.assertEqual(rval, 1.0)
23 |
24 | def test_decoder_optimizations(self):
25 | # Several optimizations were made that skip over calls to
26 | # the whitespace regex, so this test is designed to try and
27 | # exercise the uncommon cases. The array cases are already covered.
28 | rval = json.loads('{ "key" : "value" , "k":"v" }')
29 | self.assertEqual(rval, {"key":"value", "k":"v"})
30 |
31 | def test_empty_objects(self):
32 | s = '{}'
33 | self.assertEqual(json.loads(s), eval(s))
34 | s = '[]'
35 | self.assertEqual(json.loads(s), eval(s))
36 | s = '""'
37 | self.assertEqual(json.loads(s), eval(s))
38 |
39 | def test_object_pairs_hook(self):
40 | s = '{"xkd":1, "kcw":2, "art":3, "hxm":4, "qrt":5, "pad":6, "hoy":7}'
41 | p = [("xkd", 1), ("kcw", 2), ("art", 3), ("hxm", 4),
42 | ("qrt", 5), ("pad", 6), ("hoy", 7)]
43 | self.assertEqual(json.loads(s), eval(s))
44 | self.assertEqual(json.loads(s, object_pairs_hook=lambda x: x), p)
45 | self.assertEqual(json.load(StringIO(s),
46 | object_pairs_hook=lambda x: x), p)
47 | od = json.loads(s, object_pairs_hook=OrderedDict)
48 | self.assertEqual(od, OrderedDict(p))
49 | self.assertEqual(type(od), OrderedDict)
50 | # the object_pairs_hook takes priority over the object_hook
51 | self.assertEqual(json.loads(s,
52 | object_pairs_hook=OrderedDict,
53 | object_hook=lambda x: None),
54 | OrderedDict(p))
55 |
56 | def check_keys_reuse(self, source, loads):
57 | rval = loads(source)
58 | (a, b), (c, d) = sorted(rval[0]), sorted(rval[1])
59 | self.assertIs(a, c)
60 | self.assertIs(b, d)
61 |
62 | def test_keys_reuse_str(self):
63 | s = u'[{"a_key": 1, "b_\xe9": 2}, {"a_key": 3, "b_\xe9": 4}]'.encode('utf8')
64 | self.check_keys_reuse(s, json.loads)
65 |
66 | def test_keys_reuse_unicode(self):
67 | s = u'[{"a_key": 1, "b_\xe9": 2}, {"a_key": 3, "b_\xe9": 4}]'
68 | self.check_keys_reuse(s, json.loads)
69 |
70 | def test_empty_strings(self):
71 | self.assertEqual(json.loads('""'), "")
72 | self.assertEqual(json.loads(u'""'), u"")
73 | self.assertEqual(json.loads('[""]'), [""])
74 | self.assertEqual(json.loads(u'[""]'), [u""])
75 |
76 | def test_raw_decode(self):
77 | cls = json.decoder.JSONDecoder
78 | self.assertEqual(
79 | ({'a': {}}, 9),
80 | cls().raw_decode("{\"a\": {}}"))
81 | # http://code.google.com/p/simplejson/issues/detail?id=85
82 | self.assertEqual(
83 | ({'a': {}}, 9),
84 | cls(object_pairs_hook=dict).raw_decode("{\"a\": {}}"))
85 | # https://github.com/simplejson/simplejson/pull/38
86 | self.assertEqual(
87 | ({'a': {}}, 11),
88 | cls().raw_decode(" \n{\"a\": {}}"))
89 |
90 | def test_bounds_checking(self):
91 | # https://github.com/simplejson/simplejson/issues/98
92 | j = json.decoder.JSONDecoder()
93 | for i in [4, 5, 6, -1, -2, -3, -4, -5, -6]:
94 | self.assertRaises(ValueError, j.scan_once, '1234', i)
95 | self.assertRaises(ValueError, j.raw_decode, '1234', i)
96 | x, y = sorted(['128931233', '472389423'], key=id)
97 | diff = id(x) - id(y)
98 | self.assertRaises(ValueError, j.scan_once, y, diff)
99 | self.assertRaises(ValueError, j.raw_decode, y, i)
100 |
--------------------------------------------------------------------------------
/simplejson/tests/test_namedtuple.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 | import unittest
3 | import simplejson as json
4 | from simplejson.compat import StringIO
5 |
6 | try:
7 | from collections import namedtuple
8 | except ImportError:
9 | class Value(tuple):
10 | def __new__(cls, *args):
11 | return tuple.__new__(cls, args)
12 |
13 | def _asdict(self):
14 | return {'value': self[0]}
15 | class Point(tuple):
16 | def __new__(cls, *args):
17 | return tuple.__new__(cls, args)
18 |
19 | def _asdict(self):
20 | return {'x': self[0], 'y': self[1]}
21 | else:
22 | Value = namedtuple('Value', ['value'])
23 | Point = namedtuple('Point', ['x', 'y'])
24 |
25 | class DuckValue(object):
26 | def __init__(self, *args):
27 | self.value = Value(*args)
28 |
29 | def _asdict(self):
30 | return self.value._asdict()
31 |
32 | class DuckPoint(object):
33 | def __init__(self, *args):
34 | self.point = Point(*args)
35 |
36 | def _asdict(self):
37 | return self.point._asdict()
38 |
39 | class DeadDuck(object):
40 | _asdict = None
41 |
42 | class DeadDict(dict):
43 | _asdict = None
44 |
45 | CONSTRUCTORS = [
46 | lambda v: v,
47 | lambda v: [v],
48 | lambda v: [{'key': v}],
49 | ]
50 |
51 | class TestNamedTuple(unittest.TestCase):
52 | def test_namedtuple_dumps(self):
53 | for v in [Value(1), Point(1, 2), DuckValue(1), DuckPoint(1, 2)]:
54 | d = v._asdict()
55 | self.assertEqual(d, json.loads(json.dumps(v)))
56 | self.assertEqual(
57 | d,
58 | json.loads(json.dumps(v, namedtuple_as_object=True)))
59 | self.assertEqual(d, json.loads(json.dumps(v, tuple_as_array=False)))
60 | self.assertEqual(
61 | d,
62 | json.loads(json.dumps(v, namedtuple_as_object=True,
63 | tuple_as_array=False)))
64 |
65 | def test_namedtuple_dumps_false(self):
66 | for v in [Value(1), Point(1, 2)]:
67 | l = list(v)
68 | self.assertEqual(
69 | l,
70 | json.loads(json.dumps(v, namedtuple_as_object=False)))
71 | self.assertRaises(TypeError, json.dumps, v,
72 | tuple_as_array=False, namedtuple_as_object=False)
73 |
74 | def test_namedtuple_dump(self):
75 | for v in [Value(1), Point(1, 2), DuckValue(1), DuckPoint(1, 2)]:
76 | d = v._asdict()
77 | sio = StringIO()
78 | json.dump(v, sio)
79 | self.assertEqual(d, json.loads(sio.getvalue()))
80 | sio = StringIO()
81 | json.dump(v, sio, namedtuple_as_object=True)
82 | self.assertEqual(
83 | d,
84 | json.loads(sio.getvalue()))
85 | sio = StringIO()
86 | json.dump(v, sio, tuple_as_array=False)
87 | self.assertEqual(d, json.loads(sio.getvalue()))
88 | sio = StringIO()
89 | json.dump(v, sio, namedtuple_as_object=True,
90 | tuple_as_array=False)
91 | self.assertEqual(
92 | d,
93 | json.loads(sio.getvalue()))
94 |
95 | def test_namedtuple_dump_false(self):
96 | for v in [Value(1), Point(1, 2)]:
97 | l = list(v)
98 | sio = StringIO()
99 | json.dump(v, sio, namedtuple_as_object=False)
100 | self.assertEqual(
101 | l,
102 | json.loads(sio.getvalue()))
103 | self.assertRaises(TypeError, json.dump, v, StringIO(),
104 | tuple_as_array=False, namedtuple_as_object=False)
105 |
106 | def test_asdict_not_callable_dump(self):
107 | for f in CONSTRUCTORS:
108 | self.assertRaises(TypeError,
109 | json.dump, f(DeadDuck()), StringIO(), namedtuple_as_object=True)
110 | sio = StringIO()
111 | json.dump(f(DeadDict()), sio, namedtuple_as_object=True)
112 | self.assertEqual(
113 | json.dumps(f({})),
114 | sio.getvalue())
115 |
116 | def test_asdict_not_callable_dumps(self):
117 | for f in CONSTRUCTORS:
118 | self.assertRaises(TypeError,
119 | json.dumps, f(DeadDuck()), namedtuple_as_object=True)
120 | self.assertEqual(
121 | json.dumps(f({})),
122 | json.dumps(f(DeadDict()), namedtuple_as_object=True))
123 |
--------------------------------------------------------------------------------
/psycopg2/tests/test_green.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | # test_green.py - unit test for async wait callback
4 | #
5 | # Copyright (C) 2010-2011 Daniele Varrazzo
6 | #
7 | # psycopg2 is free software: you can redistribute it and/or modify it
8 | # under the terms of the GNU Lesser General Public License as published
9 | # by the Free Software Foundation, either version 3 of the License, or
10 | # (at your option) any later version.
11 | #
12 | # In addition, as a special exception, the copyright holders give
13 | # permission to link this program with the OpenSSL library (or with
14 | # modified versions of OpenSSL that use the same license as OpenSSL),
15 | # and distribute linked combinations including the two.
16 | #
17 | # You must obey the GNU Lesser General Public License in all respects for
18 | # all of the code used other than OpenSSL.
19 | #
20 | # psycopg2 is distributed in the hope that it will be useful, but WITHOUT
21 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
22 | # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
23 | # License for more details.
24 |
25 | import unittest
26 | import psycopg2
27 | import psycopg2.extensions
28 | import psycopg2.extras
29 |
30 | from testutils import ConnectingTestCase
31 |
32 | class ConnectionStub(object):
33 | """A `connection` wrapper allowing analysis of the `poll()` calls."""
34 | def __init__(self, conn):
35 | self.conn = conn
36 | self.polls = []
37 |
38 | def fileno(self):
39 | return self.conn.fileno()
40 |
41 | def poll(self):
42 | rv = self.conn.poll()
43 | self.polls.append(rv)
44 | return rv
45 |
46 | class GreenTestCase(ConnectingTestCase):
47 | def setUp(self):
48 | self._cb = psycopg2.extensions.get_wait_callback()
49 | psycopg2.extensions.set_wait_callback(psycopg2.extras.wait_select)
50 | ConnectingTestCase.setUp(self)
51 |
52 | def tearDown(self):
53 | ConnectingTestCase.tearDown(self)
54 | psycopg2.extensions.set_wait_callback(self._cb)
55 |
56 | def set_stub_wait_callback(self, conn):
57 | stub = ConnectionStub(conn)
58 | psycopg2.extensions.set_wait_callback(
59 | lambda conn: psycopg2.extras.wait_select(stub))
60 | return stub
61 |
62 | def test_flush_on_write(self):
63 | # a very large query requires a flush loop to be sent to the backend
64 | conn = self.conn
65 | stub = self.set_stub_wait_callback(conn)
66 | curs = conn.cursor()
67 | for mb in 1, 5, 10, 20, 50:
68 | size = mb * 1024 * 1024
69 | del stub.polls[:]
70 | curs.execute("select %s;", ('x' * size,))
71 | self.assertEqual(size, len(curs.fetchone()[0]))
72 | if stub.polls.count(psycopg2.extensions.POLL_WRITE) > 1:
73 | return
74 |
75 | # This is more a testing glitch than an error: it happens
76 | # on high load on linux: probably because the kernel has more
77 | # buffers ready. A warning may be useful during development,
78 | # but an error is bad during regression testing.
79 | import warnings
80 | warnings.warn("sending a large query didn't trigger block on write.")
81 |
82 | def test_error_in_callback(self):
83 | # behaviour changed after issue #113: if there is an error in the
84 | # callback for the moment we don't have a way to reset the connection
85 | # without blocking (ticket #113) so just close it.
86 | conn = self.conn
87 | curs = conn.cursor()
88 | curs.execute("select 1") # have a BEGIN
89 | curs.fetchone()
90 |
91 | # now try to do something that will fail in the callback
92 | psycopg2.extensions.set_wait_callback(lambda conn: 1//0)
93 | self.assertRaises(ZeroDivisionError, curs.execute, "select 2")
94 |
95 | self.assert_(conn.closed)
96 |
97 | def test_dont_freak_out(self):
98 | # if there is an error in a green query, don't freak out and close
99 | # the connection
100 | conn = self.conn
101 | curs = conn.cursor()
102 | self.assertRaises(psycopg2.ProgrammingError,
103 | curs.execute, "select the unselectable")
104 |
105 | # check that the connection is left in an usable state
106 | self.assert_(not conn.closed)
107 | conn.rollback()
108 | curs.execute("select 1")
109 | self.assertEqual(curs.fetchone()[0], 1)
110 |
111 |
112 | def test_suite():
113 | return unittest.TestLoader().loadTestsFromName(__name__)
114 |
115 | if __name__ == "__main__":
116 | unittest.main()
117 |
--------------------------------------------------------------------------------
/psycopg2/tests/dbapi20_tpc.py:
--------------------------------------------------------------------------------
1 | """ Python DB API 2.0 driver Two Phase Commit compliance test suite.
2 |
3 | """
4 |
5 | import unittest
6 |
7 |
8 | class TwoPhaseCommitTests(unittest.TestCase):
9 |
10 | driver = None
11 |
12 | def connect(self):
13 | """Make a database connection."""
14 | raise NotImplementedError
15 |
16 | _last_id = 0
17 | _global_id_prefix = "dbapi20_tpc:"
18 |
19 | def make_xid(self, con):
20 | id = TwoPhaseCommitTests._last_id
21 | TwoPhaseCommitTests._last_id += 1
22 | return con.xid(42, "%s%d" % (self._global_id_prefix, id), "qualifier")
23 |
24 | def test_xid(self):
25 | con = self.connect()
26 | try:
27 | xid = con.xid(42, "global", "bqual")
28 | except self.driver.NotSupportedError:
29 | self.fail("Driver does not support transaction IDs.")
30 |
31 | self.assertEquals(xid[0], 42)
32 | self.assertEquals(xid[1], "global")
33 | self.assertEquals(xid[2], "bqual")
34 |
35 | # Try some extremes for the transaction ID:
36 | xid = con.xid(0, "", "")
37 | self.assertEquals(tuple(xid), (0, "", ""))
38 | xid = con.xid(0x7fffffff, "a" * 64, "b" * 64)
39 | self.assertEquals(tuple(xid), (0x7fffffff, "a" * 64, "b" * 64))
40 |
41 | def test_tpc_begin(self):
42 | con = self.connect()
43 | try:
44 | xid = self.make_xid(con)
45 | try:
46 | con.tpc_begin(xid)
47 | except self.driver.NotSupportedError:
48 | self.fail("Driver does not support tpc_begin()")
49 | finally:
50 | con.close()
51 |
52 | def test_tpc_commit_without_prepare(self):
53 | con = self.connect()
54 | try:
55 | xid = self.make_xid(con)
56 | con.tpc_begin(xid)
57 | cursor = con.cursor()
58 | cursor.execute("SELECT 1")
59 | con.tpc_commit()
60 | finally:
61 | con.close()
62 |
63 | def test_tpc_rollback_without_prepare(self):
64 | con = self.connect()
65 | try:
66 | xid = self.make_xid(con)
67 | con.tpc_begin(xid)
68 | cursor = con.cursor()
69 | cursor.execute("SELECT 1")
70 | con.tpc_rollback()
71 | finally:
72 | con.close()
73 |
74 | def test_tpc_commit_with_prepare(self):
75 | con = self.connect()
76 | try:
77 | xid = self.make_xid(con)
78 | con.tpc_begin(xid)
79 | cursor = con.cursor()
80 | cursor.execute("SELECT 1")
81 | con.tpc_prepare()
82 | con.tpc_commit()
83 | finally:
84 | con.close()
85 |
86 | def test_tpc_rollback_with_prepare(self):
87 | con = self.connect()
88 | try:
89 | xid = self.make_xid(con)
90 | con.tpc_begin(xid)
91 | cursor = con.cursor()
92 | cursor.execute("SELECT 1")
93 | con.tpc_prepare()
94 | con.tpc_rollback()
95 | finally:
96 | con.close()
97 |
98 | def test_tpc_begin_in_transaction_fails(self):
99 | con = self.connect()
100 | try:
101 | xid = self.make_xid(con)
102 |
103 | cursor = con.cursor()
104 | cursor.execute("SELECT 1")
105 | self.assertRaises(self.driver.ProgrammingError,
106 | con.tpc_begin, xid)
107 | finally:
108 | con.close()
109 |
110 | def test_tpc_begin_in_tpc_transaction_fails(self):
111 | con = self.connect()
112 | try:
113 | xid = self.make_xid(con)
114 |
115 | cursor = con.cursor()
116 | cursor.execute("SELECT 1")
117 | self.assertRaises(self.driver.ProgrammingError,
118 | con.tpc_begin, xid)
119 | finally:
120 | con.close()
121 |
122 | def test_commit_in_tpc_fails(self):
123 | # calling commit() within a TPC transaction fails with
124 | # ProgrammingError.
125 | con = self.connect()
126 | try:
127 | xid = self.make_xid(con)
128 | con.tpc_begin(xid)
129 |
130 | self.assertRaises(self.driver.ProgrammingError, con.commit)
131 | finally:
132 | con.close()
133 |
134 | def test_rollback_in_tpc_fails(self):
135 | # calling rollback() within a TPC transaction fails with
136 | # ProgrammingError.
137 | con = self.connect()
138 | try:
139 | xid = self.make_xid(con)
140 | con.tpc_begin(xid)
141 |
142 | self.assertRaises(self.driver.ProgrammingError, con.rollback)
143 | finally:
144 | con.close()
145 |
--------------------------------------------------------------------------------
/psycopg2/tz.py:
--------------------------------------------------------------------------------
1 | """tzinfo implementations for psycopg2
2 |
3 | This module holds two different tzinfo implementations that can be used as
4 | the 'tzinfo' argument to datetime constructors, directly passed to psycopg
5 | functions or used to set the .tzinfo_factory attribute in cursors.
6 | """
7 | # psycopg/tz.py - tzinfo implementation
8 | #
9 | # Copyright (C) 2003-2010 Federico Di Gregorio
10 | #
11 | # psycopg2 is free software: you can redistribute it and/or modify it
12 | # under the terms of the GNU Lesser General Public License as published
13 | # by the Free Software Foundation, either version 3 of the License, or
14 | # (at your option) any later version.
15 | #
16 | # In addition, as a special exception, the copyright holders give
17 | # permission to link this program with the OpenSSL library (or with
18 | # modified versions of OpenSSL that use the same license as OpenSSL),
19 | # and distribute linked combinations including the two.
20 | #
21 | # You must obey the GNU Lesser General Public License in all respects for
22 | # all of the code used other than OpenSSL.
23 | #
24 | # psycopg2 is distributed in the hope that it will be useful, but WITHOUT
25 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
26 | # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
27 | # License for more details.
28 |
29 | import datetime
30 | import time
31 |
32 | ZERO = datetime.timedelta(0)
33 |
34 | class FixedOffsetTimezone(datetime.tzinfo):
35 | """Fixed offset in minutes east from UTC.
36 |
37 | This is exactly the implementation__ found in Python 2.3.x documentation,
38 | with a small change to the `!__init__()` method to allow for pickling
39 | and a default name in the form ``sHH:MM`` (``s`` is the sign.).
40 |
41 | The implementation also caches instances. During creation, if a
42 | FixedOffsetTimezone instance has previously been created with the same
43 | offset and name that instance will be returned. This saves memory and
44 | improves comparability.
45 |
46 | .. __: http://docs.python.org/library/datetime.html#datetime-tzinfo
47 | """
48 | _name = None
49 | _offset = ZERO
50 |
51 | _cache = {}
52 |
53 | def __init__(self, offset=None, name=None):
54 | if offset is not None:
55 | self._offset = datetime.timedelta(minutes = offset)
56 | if name is not None:
57 | self._name = name
58 |
59 | def __new__(cls, offset=None, name=None):
60 | """Return a suitable instance created earlier if it exists
61 | """
62 | key = (offset, name)
63 | try:
64 | return cls._cache[key]
65 | except KeyError:
66 | tz = super(FixedOffsetTimezone, cls).__new__(cls, offset, name)
67 | cls._cache[key] = tz
68 | return tz
69 |
70 | def __repr__(self):
71 | offset_mins = self._offset.seconds // 60 + self._offset.days * 24 * 60
72 | return "psycopg2.tz.FixedOffsetTimezone(offset=%r, name=%r)" \
73 | % (offset_mins, self._name)
74 |
75 | def __getinitargs__(self):
76 | offset_mins = self._offset.seconds // 60 + self._offset.days * 24 * 60
77 | return (offset_mins, self._name)
78 |
79 | def utcoffset(self, dt):
80 | return self._offset
81 |
82 | def tzname(self, dt):
83 | if self._name is not None:
84 | return self._name
85 | else:
86 | seconds = self._offset.seconds + self._offset.days * 86400
87 | hours, seconds = divmod(seconds, 3600)
88 | minutes = seconds/60
89 | if minutes:
90 | return "%+03d:%d" % (hours, minutes)
91 | else:
92 | return "%+03d" % hours
93 |
94 | def dst(self, dt):
95 | return ZERO
96 |
97 |
98 | STDOFFSET = datetime.timedelta(seconds = -time.timezone)
99 | if time.daylight:
100 | DSTOFFSET = datetime.timedelta(seconds = -time.altzone)
101 | else:
102 | DSTOFFSET = STDOFFSET
103 | DSTDIFF = DSTOFFSET - STDOFFSET
104 |
105 | class LocalTimezone(datetime.tzinfo):
106 | """Platform idea of local timezone.
107 |
108 | This is the exact implementation from the Python 2.3 documentation.
109 | """
110 | def utcoffset(self, dt):
111 | if self._isdst(dt):
112 | return DSTOFFSET
113 | else:
114 | return STDOFFSET
115 |
116 | def dst(self, dt):
117 | if self._isdst(dt):
118 | return DSTDIFF
119 | else:
120 | return ZERO
121 |
122 | def tzname(self, dt):
123 | return time.tzname[self._isdst(dt)]
124 |
125 | def _isdst(self, dt):
126 | tt = (dt.year, dt.month, dt.day,
127 | dt.hour, dt.minute, dt.second,
128 | dt.weekday(), 0, -1)
129 | stamp = time.mktime(tt)
130 | tt = time.localtime(stamp)
131 | return tt.tm_isdst > 0
132 |
133 | LOCAL = LocalTimezone()
134 |
135 | # TODO: pre-generate some interesting time zones?
136 |
--------------------------------------------------------------------------------
/simplejson/scanner.py:
--------------------------------------------------------------------------------
1 | """JSON token scanner
2 | """
3 | import re
4 | def _import_c_make_scanner():
5 | try:
6 | from simplejson._speedups import make_scanner
7 | return make_scanner
8 | except ImportError:
9 | return None
10 | c_make_scanner = _import_c_make_scanner()
11 |
12 | __all__ = ['make_scanner', 'JSONDecodeError']
13 |
14 | NUMBER_RE = re.compile(
15 | r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?',
16 | (re.VERBOSE | re.MULTILINE | re.DOTALL))
17 |
18 | class JSONDecodeError(ValueError):
19 | """Subclass of ValueError with the following additional properties:
20 |
21 | msg: The unformatted error message
22 | doc: The JSON document being parsed
23 | pos: The start index of doc where parsing failed
24 | end: The end index of doc where parsing failed (may be None)
25 | lineno: The line corresponding to pos
26 | colno: The column corresponding to pos
27 | endlineno: The line corresponding to end (may be None)
28 | endcolno: The column corresponding to end (may be None)
29 |
30 | """
31 | # Note that this exception is used from _speedups
32 | def __init__(self, msg, doc, pos, end=None):
33 | ValueError.__init__(self, errmsg(msg, doc, pos, end=end))
34 | self.msg = msg
35 | self.doc = doc
36 | self.pos = pos
37 | self.end = end
38 | self.lineno, self.colno = linecol(doc, pos)
39 | if end is not None:
40 | self.endlineno, self.endcolno = linecol(doc, end)
41 | else:
42 | self.endlineno, self.endcolno = None, None
43 |
44 | def __reduce__(self):
45 | return self.__class__, (self.msg, self.doc, self.pos, self.end)
46 |
47 |
48 | def linecol(doc, pos):
49 | lineno = doc.count('\n', 0, pos) + 1
50 | if lineno == 1:
51 | colno = pos + 1
52 | else:
53 | colno = pos - doc.rindex('\n', 0, pos)
54 | return lineno, colno
55 |
56 |
57 | def errmsg(msg, doc, pos, end=None):
58 | lineno, colno = linecol(doc, pos)
59 | msg = msg.replace('%r', repr(doc[pos:pos + 1]))
60 | if end is None:
61 | fmt = '%s: line %d column %d (char %d)'
62 | return fmt % (msg, lineno, colno, pos)
63 | endlineno, endcolno = linecol(doc, end)
64 | fmt = '%s: line %d column %d - line %d column %d (char %d - %d)'
65 | return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end)
66 |
67 |
68 | def py_make_scanner(context):
69 | parse_object = context.parse_object
70 | parse_array = context.parse_array
71 | parse_string = context.parse_string
72 | match_number = NUMBER_RE.match
73 | encoding = context.encoding
74 | strict = context.strict
75 | parse_float = context.parse_float
76 | parse_int = context.parse_int
77 | parse_constant = context.parse_constant
78 | object_hook = context.object_hook
79 | object_pairs_hook = context.object_pairs_hook
80 | memo = context.memo
81 |
82 | def _scan_once(string, idx):
83 | errmsg = 'Expecting value'
84 | try:
85 | nextchar = string[idx]
86 | except IndexError:
87 | raise JSONDecodeError(errmsg, string, idx)
88 |
89 | if nextchar == '"':
90 | return parse_string(string, idx + 1, encoding, strict)
91 | elif nextchar == '{':
92 | return parse_object((string, idx + 1), encoding, strict,
93 | _scan_once, object_hook, object_pairs_hook, memo)
94 | elif nextchar == '[':
95 | return parse_array((string, idx + 1), _scan_once)
96 | elif nextchar == 'n' and string[idx:idx + 4] == 'null':
97 | return None, idx + 4
98 | elif nextchar == 't' and string[idx:idx + 4] == 'true':
99 | return True, idx + 4
100 | elif nextchar == 'f' and string[idx:idx + 5] == 'false':
101 | return False, idx + 5
102 |
103 | m = match_number(string, idx)
104 | if m is not None:
105 | integer, frac, exp = m.groups()
106 | if frac or exp:
107 | res = parse_float(integer + (frac or '') + (exp or ''))
108 | else:
109 | res = parse_int(integer)
110 | return res, m.end()
111 | elif nextchar == 'N' and string[idx:idx + 3] == 'NaN':
112 | return parse_constant('NaN'), idx + 3
113 | elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity':
114 | return parse_constant('Infinity'), idx + 8
115 | elif nextchar == '-' and string[idx:idx + 9] == '-Infinity':
116 | return parse_constant('-Infinity'), idx + 9
117 | else:
118 | raise JSONDecodeError(errmsg, string, idx)
119 |
120 | def scan_once(string, idx):
121 | if idx < 0:
122 | # Ensure the same behavior as the C speedup, otherwise
123 | # this would work for *some* negative string indices due
124 | # to the behavior of __getitem__ for strings. #98
125 | raise JSONDecodeError('Expecting value', string, idx)
126 | try:
127 | return _scan_once(string, idx)
128 | finally:
129 | memo.clear()
130 |
131 | return scan_once
132 |
133 | make_scanner = c_make_scanner or py_make_scanner
134 |
--------------------------------------------------------------------------------
/simplejson/tests/test_dump.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 | from simplejson.compat import StringIO, long_type, b, binary_type, PY3
3 | import simplejson as json
4 |
5 | def as_text_type(s):
6 | if PY3 and isinstance(s, binary_type):
7 | return s.decode('ascii')
8 | return s
9 |
10 | class TestDump(TestCase):
11 | def test_dump(self):
12 | sio = StringIO()
13 | json.dump({}, sio)
14 | self.assertEqual(sio.getvalue(), '{}')
15 |
16 | def test_constants(self):
17 | for c in [None, True, False]:
18 | self.assertTrue(json.loads(json.dumps(c)) is c)
19 | self.assertTrue(json.loads(json.dumps([c]))[0] is c)
20 | self.assertTrue(json.loads(json.dumps({'a': c}))['a'] is c)
21 |
22 | def test_stringify_key(self):
23 | items = [(b('bytes'), 'bytes'),
24 | (1.0, '1.0'),
25 | (10, '10'),
26 | (True, 'true'),
27 | (False, 'false'),
28 | (None, 'null'),
29 | (long_type(100), '100')]
30 | for k, expect in items:
31 | self.assertEqual(
32 | json.loads(json.dumps({k: expect})),
33 | {expect: expect})
34 | self.assertEqual(
35 | json.loads(json.dumps({k: expect}, sort_keys=True)),
36 | {expect: expect})
37 | self.assertRaises(TypeError, json.dumps, {json: 1})
38 | for v in [{}, {'other': 1}, {b('derp'): 1, 'herp': 2}]:
39 | for sort_keys in [False, True]:
40 | v0 = dict(v)
41 | v0[json] = 1
42 | v1 = dict((as_text_type(key), val) for (key, val) in v.items())
43 | self.assertEqual(
44 | json.loads(json.dumps(v0, skipkeys=True, sort_keys=sort_keys)),
45 | v1)
46 | self.assertEqual(
47 | json.loads(json.dumps({'': v0}, skipkeys=True, sort_keys=sort_keys)),
48 | {'': v1})
49 | self.assertEqual(
50 | json.loads(json.dumps([v0], skipkeys=True, sort_keys=sort_keys)),
51 | [v1])
52 |
53 | def test_dumps(self):
54 | self.assertEqual(json.dumps({}), '{}')
55 |
56 | def test_encode_truefalse(self):
57 | self.assertEqual(json.dumps(
58 | {True: False, False: True}, sort_keys=True),
59 | '{"false": true, "true": false}')
60 | self.assertEqual(
61 | json.dumps(
62 | {2: 3.0,
63 | 4.0: long_type(5),
64 | False: 1,
65 | long_type(6): True,
66 | "7": 0},
67 | sort_keys=True),
68 | '{"2": 3.0, "4.0": 5, "6": true, "7": 0, "false": 1}')
69 |
70 | def test_ordered_dict(self):
71 | # http://bugs.python.org/issue6105
72 | items = [('one', 1), ('two', 2), ('three', 3), ('four', 4), ('five', 5)]
73 | s = json.dumps(json.OrderedDict(items))
74 | self.assertEqual(
75 | s,
76 | '{"one": 1, "two": 2, "three": 3, "four": 4, "five": 5}')
77 |
78 | def test_indent_unknown_type_acceptance(self):
79 | """
80 | A test against the regression mentioned at `github issue 29`_.
81 |
82 | The indent parameter should accept any type which pretends to be
83 | an instance of int or long when it comes to being multiplied by
84 | strings, even if it is not actually an int or long, for
85 | backwards compatibility.
86 |
87 | .. _github issue 29:
88 | http://github.com/simplejson/simplejson/issue/29
89 | """
90 |
91 | class AwesomeInt(object):
92 | """An awesome reimplementation of integers"""
93 |
94 | def __init__(self, *args, **kwargs):
95 | if len(args) > 0:
96 | # [construct from literals, objects, etc.]
97 | # ...
98 |
99 | # Finally, if args[0] is an integer, store it
100 | if isinstance(args[0], int):
101 | self._int = args[0]
102 |
103 | # [various methods]
104 |
105 | def __mul__(self, other):
106 | # [various ways to multiply AwesomeInt objects]
107 | # ... finally, if the right-hand operand is not awesome enough,
108 | # try to do a normal integer multiplication
109 | if hasattr(self, '_int'):
110 | return self._int * other
111 | else:
112 | raise NotImplementedError("To do non-awesome things with"
113 | " this object, please construct it from an integer!")
114 |
115 | s = json.dumps([0, 1, 2], indent=AwesomeInt(3))
116 | self.assertEqual(s, '[\n 0,\n 1,\n 2\n]')
117 |
118 | def test_accumulator(self):
119 | # the C API uses an accumulator that collects after 100,000 appends
120 | lst = [0] * 100000
121 | self.assertEqual(json.loads(json.dumps(lst)), lst)
122 |
123 | def test_sort_keys(self):
124 | # https://github.com/simplejson/simplejson/issues/106
125 | for num_keys in range(2, 32):
126 | p = dict((str(x), x) for x in range(num_keys))
127 | sio = StringIO()
128 | json.dump(p, sio, sort_keys=True)
129 | self.assertEqual(sio.getvalue(), json.dumps(p, sort_keys=True))
130 | self.assertEqual(json.loads(sio.getvalue()), p)
131 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Mixpanel Puller 📊
2 |
3 | 
4 | 
5 |
6 | Python scripts that pull data from Mixpanel using the Mixpanel API 📡. It then extracts the relevant data and inserts it into various tables in PostgreSQL.
7 |
8 | It consist of two main runnable scripts:
9 |
10 | 1. **funnels_script.py** - Pulls funnel data from Mixpanel for the previous 7 days (script to run once a week) and inserts it into the funnel transactions (funnel_trans) table.
11 |
12 | 2. **raw_export_script.py** - Pulls raw event data from Mixpanel for the previous day (script to run daily) and inserts it into 4 tables:
13 |
14 | * *event_def*: Event definitions table
15 | * *event_trans*: Event transactions table.
16 | * *property_def*: Property definitions table.
17 | * *property_trans*: Property transactions table.
18 |
19 | ## Getting started
20 |
21 | ### Files and libraries explained
22 |
23 | #### Description of Python script files
24 |
25 | The two main scripts run from the following Python files:
26 |
27 | 1. Raw export: raw_export_script.py
28 | 2. Funnels: funnels_script.py
29 |
30 | These two scripts will call:
31 |
32 | * **mixpanel_puller.py** - this is called by the scripts in order to pull the relevant data from Mixpanel.com. It contains three methods:
33 |
34 | * *pull_raw_export* - pulls raw event data from Mixpanel.
35 | * *list_funnels* - pulls a list of funnels with funnel_id and funnel_name from Mixpanel.
36 | * *pull_funnels* - pulls funnel data for a given funnel_id, one of the parameters it takes.
37 |
38 | * **to_postgres.py** - this is called by the scripts to connect to the database and create tables. It contains 3 methods:
39 | * *connect_db* - accepts parameters hostname, db (database name), name (username) and pw (password) in order to connect to the Postgres database.
40 | * *create_raw_export_table* - creates tables that will be used to store raw event data exported from Mixpanel. These are the four tables event_def, property_def, property_trans and event_trans. Will only create the table if it doesn't already exist.
41 | * *create_funnel_table* - creates the funnel_trans table. Will only create the table if it doesn't already exist.
42 |
43 | #### Description of libraries used
44 |
45 | * mixpanel_api: contains Mixpanel API Python library. Inside the folder *mixpanel_api* are the Python library API files for Mipxanel, one used for raw data export (*data_export.py*) and the other (*general.py*) for requesting funnel data. The Mixpanel API uses a slightly different syntax for requesting raw data so therefore has it's own API script which slight differs from the general
46 | * simplejson - json module for encoding and decoding.
47 |
48 | ### Dependencies
49 |
50 | **psycopg2:** These scripts use psycopg2 which is a Python-Postgres database adaptor which allows PostgreSQL syntax to be used in Python scripts.
51 |
52 | #### How to install psycopg2
53 | Visit [this page][install] for instructions on how to install on Linux, Windows or Mac OS X. On Linux servers or PC's running on Debian, Ubuntu and other deb-based distributions you should just need:
54 |
55 | ```
56 | sudo apt-get install python-psycopg2
57 | ```
58 |
59 | Alternatively, install as a Python package using:
60 |
61 | ```
62 | pip install psycopg2
63 |
64 | ```
65 |
66 | ### What do I need to change before running scripts?
67 |
68 | You will need to make some changes to certain variables (specified below) to the actual .py script files (source code) before running any scripts.
69 |
70 | #### Mixpanel API Key and Secret
71 | In order to start pulling data from your Mixpanel account, you will need your API Key and Secret.
72 |
73 | Enter these by simply changing the following variables:
74 |
75 | * *api_key* - Line 11 in *funnels_script.py*, Line 9 in *raw_export_script.py*.
76 | * *api_secret* - Line 12 in *funnels_script.py*, Line 10 in *raw_export_script.py*.
77 |
78 | #### PostgreSQL database parameters
79 | **Note: THIS MUST BE CHANGED BEFORE RUNNING ANY SCRIPT**
80 |
81 | The following database parameter variables in each script will have to be changed in order to connect to the correct database:
82 |
83 | * *hostname*: This is the hostname such as "localhost" or xxx.eu-west-1.compute.amazonaws.com". Change this in Line 26 in *funnels_script.py* and Line 32 in *raw_export_script.py*.
84 |
85 | * *db*: This is the database name. Change it in Line 27 in *funnels_script.py* and Line 33 in *raw_export_script.py*.
86 |
87 | * *name*: This is the username to connect to the database, Line 28 in *funnels_script.py* and Line 34 in *raw_export_script.py*.
88 |
89 | * *pw*: This is the password to connect to the database. Change it in Line 29 in *funnels_script.py* and Line 35 in *raw_export_script.py*.
90 |
91 | ## Testing
92 |
93 | ### Date ranges
94 | By default **funnels_script.py** will pull funnel data for the previous seven days while the **raw_export_script.py** will pull event data for the previous day. These date ranges can also be changed by editing the appropriate variables, so that data for other date ranges can be pulled as required.
95 |
96 | ## Lift off!
97 | Once you have installed the dependencies and changed the relevant variables as described above, go ahead and run the scripts!
98 |
99 | After running *funnels_script.py* and *raw_export_script.py*, you should see five tables in your Postgres database beautifully populated with the relevant columns and Mixpanel data.
100 |
101 | [install]: http://initd.org/psycopg/docs/install.html "How to Install Psycopg2"
102 |
--------------------------------------------------------------------------------
/raw_export_script.py:
--------------------------------------------------------------------------------
1 | # This is the main class
2 | from datetime import date, timedelta, datetime
3 | import mixpanel_puller
4 | import to_postgres
5 | import psycopg2
6 | import sys
7 | import os
8 |
9 | # Mixpanel credentials
10 | # CHECK AUTH METHOD, NOT VALID ANYMORE
11 | api_key = os.environ['MIXPANEL_KEY']
12 | api_secret = os.environ['MIXPANEL_SECRET']
13 |
14 | # To log time taken to export all raw event data for the day
15 | timing_file = open('timing.txt', 'w')
16 |
17 | # More logs
18 | # mixpanel_data_file = open('mixpanel_sample.txt','r')
19 |
20 | # Export raw event data from Mixpanel
21 | print("Exporting raw event data from Mixpanel...")
22 | export_start_date = date.today()-timedelta(days=1) # Yesterday
23 | export_end_date = date.today()-timedelta(days=1) # Yesterday
24 | timing_file.write("Started exporting raw data: " + str(datetime.now())+"\n")
25 | event_dicts = mixpanel_puller.pull_raw_export(export_start_date, export_end_date, api_key, api_secret)
26 | # event_dicts = ast.literal_eval(mixpanel_data_file.read())
27 | number_of_events = len(event_dicts)
28 | timing_file.write("Completed exporting " + str(number_of_events) + " events at: " + str(datetime.now()) + "\n")
29 | print("Raw data export completed successfully!")
30 | print(str(len(event_dicts)) + " event(s) exported.")
31 |
32 | # Database operations
33 | # 1. Connect to Postgres database
34 | hostname = os.environ['POSTGRES_HOST']
35 | db = os.environ['POSTGRES_DB']
36 | user = os.environ['POSTGRES_USER']
37 | pw = os.environ['POSTGRES_PW']
38 | con, cur = to_postgres.connect_db(hostname, db, user, pw)
39 |
40 | # 2. Create required tables if they do not yet exist
41 | to_postgres.create_raw_export_tables(con, cur)
42 |
43 | # 3. Update tables
44 | try:
45 | # Update event definition and event transaction table
46 | print("Updating events tables...")
47 | for each_event in event_dicts:
48 | event_name = each_event["event"]
49 | # Update event def table
50 | cur.execute("SELECT exists(SELECT * FROM event_def where event_name=%s);", (event_name, ))
51 | if not(cur.fetchone()[0]): # If not found in table, add the event.
52 | cur.execute("INSERT INTO event_def(event_name) VALUES(%s);", (event_name, ))
53 | # Update event transaction table
54 | cur.execute("SELECT event_id FROM event_def WHERE event_name=%s;", (event_name, )) # Select event id
55 | event_id = cur.fetchone()[0] # Store event_id
56 | timestamp = each_event["properties"]["time"] # Store timestamp
57 | cur.execute("INSERT INTO event_trans(event_id,timestamp) VALUES(%s,%s);", (event_id, timestamp))
58 | timing_file.write("Completed event table writing and started properties at: " + str(datetime.now()) + "\n")
59 | con.commit()
60 |
61 | event_count = 0
62 | current_progress_percent = 0
63 | print("Updating property tables...")
64 | # Update property definition and property transaction table
65 | print("Percentage completed...")
66 | for each_event in event_dicts:
67 | event_count+=1
68 | cur.execute("SELECT event_id FROM event_def WHERE event_name=%s", (each_event["event"], ))
69 | event_id = cur.fetchone()[0]
70 | properties = each_event["properties"] # Properties is a dictionary of various key-values (properties)
71 | property_trans_list = []
72 | # Loop through each property
73 | for key in properties:
74 | key_formatted = key.lstrip('$').replace(' ', '_').lower() # Remove spaces and $ sign from beginning of keys
75 | # Update property def table
76 | cur.execute(
77 | "SELECT exists(SELECT * FROM property_def where property_name=%s AND event_id=%s);",
78 | (key_formatted, event_id)
79 | )
80 | if not(cur.fetchone()[0]): # If not found in table, add the property name and event id
81 | cur.execute(
82 | "INSERT INTO property_def(property_name, event_id) VALUES(%s,%s);",
83 | (key_formatted, event_id)
84 | )
85 | # Update property transaction table
86 | # Let's try multi row insertion here
87 | cur.execute(
88 | "SELECT property_id FROM property_def WHERE property_name=%s AND event_id=%s;",
89 | (key_formatted, event_id)
90 | ) # Get property id
91 | property_id = cur.fetchone()[0]
92 | # Let's batch them by properties per event,
93 | # so insert all the property rows for each event using one "INSERT"
94 | property_trans_list.append([str(properties[key]), property_id])
95 | property_trans_tuple = tuple(property_trans_list)
96 | # Execute the insertion of all properties for this event
97 | cur.executemany("INSERT INTO property_trans(property_value,property_id) VALUES(%s,%s);", property_trans_tuple)
98 | if ((event_count/1.00)/number_of_events)*100 > 5+current_progress_percent:
99 | current_progress_percent = ((event_count/1.00)/number_of_events)*100
100 | timing_file.write("Progress bar: " + str(current_progress_percent) + "%\n")
101 | print(str(int(current_progress_percent)) + "%...")
102 | timing_file.write("Completed property table writing at: " + str(datetime.now()) + "\n")
103 | con.commit()
104 | timing_file.write("Commited to database at: " + str(datetime.now()) + "\n")
105 |
106 | print("Update complete and changes committed to database.")
107 |
108 | except psycopg2.DatabaseError as e:
109 | # If database error, let's rollback any changes.
110 | if con:
111 | con.rollback()
112 | print('Error %s' % e)
113 | timing_file.write("Error encountered at: " + str(datetime.now()) + "\n")
114 | timing_file.close()
115 | sys.exit(1)
116 |
117 | finally:
118 | if con:
119 | con.close()
120 | print("Connection to database closed.")
121 | timing_file.write("Connection to database closed at: " + str(datetime.now()) + "\n")
122 | timing_file.close()
123 |
124 |
--------------------------------------------------------------------------------
/psycopg2/__init__.py:
--------------------------------------------------------------------------------
1 | """A Python driver for PostgreSQL
2 |
3 | psycopg is a PostgreSQL_ database adapter for the Python_ programming
4 | language. This is version 2, a complete rewrite of the original code to
5 | provide new-style classes for connection and cursor objects and other sweet
6 | candies. Like the original, psycopg 2 was written with the aim of being very
7 | small and fast, and stable as a rock.
8 |
9 | Homepage: http://initd.org/projects/psycopg2
10 |
11 | .. _PostgreSQL: http://www.postgresql.org/
12 | .. _Python: http://www.python.org/
13 |
14 | :Groups:
15 | * `Connections creation`: connect
16 | * `Value objects constructors`: Binary, Date, DateFromTicks, Time,
17 | TimeFromTicks, Timestamp, TimestampFromTicks
18 | """
19 | # psycopg/__init__.py - initialization of the psycopg module
20 | #
21 | # Copyright (C) 2003-2010 Federico Di Gregorio
22 | #
23 | # psycopg2 is free software: you can redistribute it and/or modify it
24 | # under the terms of the GNU Lesser General Public License as published
25 | # by the Free Software Foundation, either version 3 of the License, or
26 | # (at your option) any later version.
27 | #
28 | # In addition, as a special exception, the copyright holders give
29 | # permission to link this program with the OpenSSL library (or with
30 | # modified versions of OpenSSL that use the same license as OpenSSL),
31 | # and distribute linked combinations including the two.
32 | #
33 | # You must obey the GNU Lesser General Public License in all respects for
34 | # all of the code used other than OpenSSL.
35 | #
36 | # psycopg2 is distributed in the hope that it will be useful, but WITHOUT
37 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
38 | # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
39 | # License for more details.
40 |
41 | # Import modules needed by _psycopg to allow tools like py2exe to do
42 | # their work without bothering about the module dependencies.
43 |
44 | # Note: the first internal import should be _psycopg, otherwise the real cause
45 | # of a failed loading of the C module may get hidden, see
46 | # http://archives.postgresql.org/psycopg/2011-02/msg00044.php
47 |
48 | # Import the DBAPI-2.0 stuff into top-level module.
49 |
50 | from psycopg2._psycopg import BINARY, NUMBER, STRING, DATETIME, ROWID
51 |
52 | from psycopg2._psycopg import Binary, Date, Time, Timestamp
53 | from psycopg2._psycopg import DateFromTicks, TimeFromTicks, TimestampFromTicks
54 |
55 | from psycopg2._psycopg import Error, Warning, DataError, DatabaseError, ProgrammingError
56 | from psycopg2._psycopg import IntegrityError, InterfaceError, InternalError
57 | from psycopg2._psycopg import NotSupportedError, OperationalError
58 |
59 | from psycopg2._psycopg import _connect, apilevel, threadsafety, paramstyle
60 | from psycopg2._psycopg import __version__
61 |
62 | from psycopg2 import tz
63 |
64 |
65 | # Register default adapters.
66 |
67 | import psycopg2.extensions as _ext
68 | _ext.register_adapter(tuple, _ext.SQL_IN)
69 | _ext.register_adapter(type(None), _ext.NoneAdapter)
70 |
71 | # Register the Decimal adapter here instead of in the C layer.
72 | # This way a new class is registered for each sub-interpreter.
73 | # See ticket #52
74 | try:
75 | from decimal import Decimal
76 | except ImportError:
77 | pass
78 | else:
79 | from psycopg2._psycopg import Decimal as Adapter
80 | _ext.register_adapter(Decimal, Adapter)
81 | del Decimal, Adapter
82 |
83 | import re
84 |
85 | def _param_escape(s,
86 | re_escape=re.compile(r"([\\'])"),
87 | re_space=re.compile(r'\s')):
88 | """
89 | Apply the escaping rule required by PQconnectdb
90 | """
91 | if not s: return "''"
92 |
93 | s = re_escape.sub(r'\\\1', s)
94 | if re_space.search(s):
95 | s = "'" + s + "'"
96 |
97 | return s
98 |
99 | del re
100 |
101 |
102 | def connect(dsn=None,
103 | database=None, user=None, password=None, host=None, port=None,
104 | connection_factory=None, cursor_factory=None, async=False, **kwargs):
105 | """
106 | Create a new database connection.
107 |
108 | The connection parameters can be specified either as a string:
109 |
110 | conn = psycopg2.connect("dbname=test user=postgres password=secret")
111 |
112 | or using a set of keyword arguments:
113 |
114 | conn = psycopg2.connect(database="test", user="postgres", password="secret")
115 |
116 | The basic connection parameters are:
117 |
118 | - *dbname*: the database name (only in dsn string)
119 | - *database*: the database name (only as keyword argument)
120 | - *user*: user name used to authenticate
121 | - *password*: password used to authenticate
122 | - *host*: database host address (defaults to UNIX socket if not provided)
123 | - *port*: connection port number (defaults to 5432 if not provided)
124 |
125 | Using the *connection_factory* parameter a different class or connections
126 | factory can be specified. It should be a callable object taking a dsn
127 | argument.
128 |
129 | Using the *cursor_factory* parameter, a new default cursor factory will be
130 | used by cursor().
131 |
132 | Using *async*=True an asynchronous connection will be created.
133 |
134 | Any other keyword parameter will be passed to the underlying client
135 | library: the list of supported parameters depends on the library version.
136 |
137 | """
138 | items = []
139 | if database is not None:
140 | items.append(('dbname', database))
141 | if user is not None:
142 | items.append(('user', user))
143 | if password is not None:
144 | items.append(('password', password))
145 | if host is not None:
146 | items.append(('host', host))
147 | if port is not None:
148 | items.append(('port', port))
149 |
150 | items.extend([(k, v) for (k, v) in kwargs.iteritems() if v is not None])
151 |
152 | if dsn is not None and items:
153 | raise TypeError(
154 | "'%s' is an invalid keyword argument when the dsn is specified"
155 | % items[0][0])
156 |
157 | if dsn is None:
158 | if not items:
159 | raise TypeError('missing dsn and no parameters')
160 | else:
161 | dsn = " ".join(["%s=%s" % (k, _param_escape(str(v)))
162 | for (k, v) in items])
163 |
164 | conn = _connect(dsn, connection_factory=connection_factory, async=async)
165 | if cursor_factory is not None:
166 | conn.cursor_factory = cursor_factory
167 |
168 | return conn
169 |
--------------------------------------------------------------------------------
/psycopg2/extensions.py:
--------------------------------------------------------------------------------
1 | """psycopg extensions to the DBAPI-2.0
2 |
3 | This module holds all the extensions to the DBAPI-2.0 provided by psycopg.
4 |
5 | - `connection` -- the new-type inheritable connection class
6 | - `cursor` -- the new-type inheritable cursor class
7 | - `lobject` -- the new-type inheritable large object class
8 | - `adapt()` -- exposes the PEP-246_ compatible adapting mechanism used
9 | by psycopg to adapt Python types to PostgreSQL ones
10 |
11 | .. _PEP-246: http://www.python.org/peps/pep-0246.html
12 | """
13 | # psycopg/extensions.py - DBAPI-2.0 extensions specific to psycopg
14 | #
15 | # Copyright (C) 2003-2010 Federico Di Gregorio
16 | #
17 | # psycopg2 is free software: you can redistribute it and/or modify it
18 | # under the terms of the GNU Lesser General Public License as published
19 | # by the Free Software Foundation, either version 3 of the License, or
20 | # (at your option) any later version.
21 | #
22 | # In addition, as a special exception, the copyright holders give
23 | # permission to link this program with the OpenSSL library (or with
24 | # modified versions of OpenSSL that use the same license as OpenSSL),
25 | # and distribute linked combinations including the two.
26 | #
27 | # You must obey the GNU Lesser General Public License in all respects for
28 | # all of the code used other than OpenSSL.
29 | #
30 | # psycopg2 is distributed in the hope that it will be useful, but WITHOUT
31 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
32 | # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
33 | # License for more details.
34 |
35 | from psycopg2._psycopg import UNICODE, INTEGER, LONGINTEGER, BOOLEAN, FLOAT
36 | from psycopg2._psycopg import TIME, DATE, INTERVAL, DECIMAL
37 | from psycopg2._psycopg import BINARYARRAY, BOOLEANARRAY, DATEARRAY, DATETIMEARRAY
38 | from psycopg2._psycopg import DECIMALARRAY, FLOATARRAY, INTEGERARRAY, INTERVALARRAY
39 | from psycopg2._psycopg import LONGINTEGERARRAY, ROWIDARRAY, STRINGARRAY, TIMEARRAY
40 | from psycopg2._psycopg import UNICODEARRAY
41 |
42 | from psycopg2._psycopg import Binary, Boolean, Int, Float, QuotedString, AsIs
43 | try:
44 | from psycopg2._psycopg import MXDATE, MXDATETIME, MXINTERVAL, MXTIME
45 | from psycopg2._psycopg import MXDATEARRAY, MXDATETIMEARRAY, MXINTERVALARRAY, MXTIMEARRAY
46 | from psycopg2._psycopg import DateFromMx, TimeFromMx, TimestampFromMx
47 | from psycopg2._psycopg import IntervalFromMx
48 | except ImportError:
49 | pass
50 |
51 | try:
52 | from psycopg2._psycopg import PYDATE, PYDATETIME, PYINTERVAL, PYTIME
53 | from psycopg2._psycopg import PYDATEARRAY, PYDATETIMEARRAY, PYINTERVALARRAY, PYTIMEARRAY
54 | from psycopg2._psycopg import DateFromPy, TimeFromPy, TimestampFromPy
55 | from psycopg2._psycopg import IntervalFromPy
56 | except ImportError:
57 | pass
58 |
59 | from psycopg2._psycopg import adapt, adapters, encodings, connection, cursor, lobject, Xid
60 | from psycopg2._psycopg import string_types, binary_types, new_type, new_array_type, register_type
61 | from psycopg2._psycopg import ISQLQuote, Notify, Diagnostics
62 |
63 | from psycopg2._psycopg import QueryCanceledError, TransactionRollbackError
64 |
65 | try:
66 | from psycopg2._psycopg import set_wait_callback, get_wait_callback
67 | except ImportError:
68 | pass
69 |
70 | """Isolation level values."""
71 | ISOLATION_LEVEL_AUTOCOMMIT = 0
72 | ISOLATION_LEVEL_READ_UNCOMMITTED = 4
73 | ISOLATION_LEVEL_READ_COMMITTED = 1
74 | ISOLATION_LEVEL_REPEATABLE_READ = 2
75 | ISOLATION_LEVEL_SERIALIZABLE = 3
76 |
77 | """psycopg connection status values."""
78 | STATUS_SETUP = 0
79 | STATUS_READY = 1
80 | STATUS_BEGIN = 2
81 | STATUS_SYNC = 3 # currently unused
82 | STATUS_ASYNC = 4 # currently unused
83 | STATUS_PREPARED = 5
84 |
85 | # This is a useful mnemonic to check if the connection is in a transaction
86 | STATUS_IN_TRANSACTION = STATUS_BEGIN
87 |
88 | """psycopg asynchronous connection polling values"""
89 | POLL_OK = 0
90 | POLL_READ = 1
91 | POLL_WRITE = 2
92 | POLL_ERROR = 3
93 |
94 | """Backend transaction status values."""
95 | TRANSACTION_STATUS_IDLE = 0
96 | TRANSACTION_STATUS_ACTIVE = 1
97 | TRANSACTION_STATUS_INTRANS = 2
98 | TRANSACTION_STATUS_INERROR = 3
99 | TRANSACTION_STATUS_UNKNOWN = 4
100 |
101 | import sys as _sys
102 |
103 | # Return bytes from a string
104 | if _sys.version_info[0] < 3:
105 | def b(s):
106 | return s
107 | else:
108 | def b(s):
109 | return s.encode('utf8')
110 |
111 | def register_adapter(typ, callable):
112 | """Register 'callable' as an ISQLQuote adapter for type 'typ'."""
113 | adapters[(typ, ISQLQuote)] = callable
114 |
115 |
116 | # The SQL_IN class is the official adapter for tuples starting from 2.0.6.
117 | class SQL_IN(object):
118 | """Adapt any iterable to an SQL quotable object."""
119 | def __init__(self, seq):
120 | self._seq = seq
121 | self._conn = None
122 |
123 | def prepare(self, conn):
124 | self._conn = conn
125 |
126 | def getquoted(self):
127 | # this is the important line: note how every object in the
128 | # list is adapted and then how getquoted() is called on it
129 | pobjs = [adapt(o) for o in self._seq]
130 | if self._conn is not None:
131 | for obj in pobjs:
132 | if hasattr(obj, 'prepare'):
133 | obj.prepare(self._conn)
134 | qobjs = [o.getquoted() for o in pobjs]
135 | return b('(') + b(', ').join(qobjs) + b(')')
136 |
137 | def __str__(self):
138 | return str(self.getquoted())
139 |
140 |
141 | class NoneAdapter(object):
142 | """Adapt None to NULL.
143 |
144 | This adapter is not used normally as a fast path in mogrify uses NULL,
145 | but it makes easier to adapt composite types.
146 | """
147 | def __init__(self, obj):
148 | pass
149 |
150 | def getquoted(self, _null=b("NULL")):
151 | return _null
152 |
153 |
154 | # Create default json typecasters for PostgreSQL 9.2 oids
155 | from psycopg2._json import register_default_json, register_default_jsonb
156 |
157 | try:
158 | JSON, JSONARRAY = register_default_json()
159 | JSONB, JSONBARRAY = register_default_jsonb()
160 | except ImportError:
161 | pass
162 |
163 | del register_default_json, register_default_jsonb
164 |
165 |
166 | # Create default Range typecasters
167 | from psycopg2. _range import Range
168 | del Range
169 |
170 |
171 | # Add the "cleaned" version of the encodings to the key.
172 | # When the encoding is set its name is cleaned up from - and _ and turned
173 | # uppercase, so an encoding not respecting these rules wouldn't be found in the
174 | # encodings keys and would raise an exception with the unicode typecaster
175 | for k, v in encodings.items():
176 | k = k.replace('_', '').replace('-', '').upper()
177 | encodings[k] = v
178 |
179 | del k, v
180 |
--------------------------------------------------------------------------------
/psycopg2/tests/test_quote.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | # test_quote.py - unit test for strings quoting
4 | #
5 | # Copyright (C) 2007-2011 Daniele Varrazzo
6 | #
7 | # psycopg2 is free software: you can redistribute it and/or modify it
8 | # under the terms of the GNU Lesser General Public License as published
9 | # by the Free Software Foundation, either version 3 of the License, or
10 | # (at your option) any later version.
11 | #
12 | # In addition, as a special exception, the copyright holders give
13 | # permission to link this program with the OpenSSL library (or with
14 | # modified versions of OpenSSL that use the same license as OpenSSL),
15 | # and distribute linked combinations including the two.
16 | #
17 | # You must obey the GNU Lesser General Public License in all respects for
18 | # all of the code used other than OpenSSL.
19 | #
20 | # psycopg2 is distributed in the hope that it will be useful, but WITHOUT
21 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
22 | # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
23 | # License for more details.
24 |
25 | import sys
26 | from testutils import unittest, ConnectingTestCase
27 |
28 | import psycopg2
29 | import psycopg2.extensions
30 | from psycopg2.extensions import b
31 |
32 | class QuotingTestCase(ConnectingTestCase):
33 | r"""Checks the correct quoting of strings and binary objects.
34 |
35 | Since ver. 8.1, PostgreSQL is moving towards SQL standard conforming
36 | strings, where the backslash (\) is treated as literal character,
37 | not as escape. To treat the backslash as a C-style escapes, PG supports
38 | the E'' quotes.
39 |
40 | This test case checks that the E'' quotes are used whenever they are
41 | needed. The tests are expected to pass with all PostgreSQL server versions
42 | (currently tested with 7.4 <= PG <= 8.3beta) and with any
43 | 'standard_conforming_strings' server parameter value.
44 | The tests also check that no warning is raised ('escape_string_warning'
45 | should be on).
46 |
47 | http://www.postgresql.org/docs/current/static/sql-syntax-lexical.html#SQL-SYNTAX-STRINGS
48 | http://www.postgresql.org/docs/current/static/runtime-config-compatible.html
49 | """
50 | def test_string(self):
51 | data = """some data with \t chars
52 | to escape into, 'quotes' and \\ a backslash too.
53 | """
54 | data += "".join(map(chr, range(1,127)))
55 |
56 | curs = self.conn.cursor()
57 | curs.execute("SELECT %s;", (data,))
58 | res = curs.fetchone()[0]
59 |
60 | self.assertEqual(res, data)
61 | self.assert_(not self.conn.notices)
62 |
63 | def test_binary(self):
64 | data = b("""some data with \000\013 binary
65 | stuff into, 'quotes' and \\ a backslash too.
66 | """)
67 | if sys.version_info[0] < 3:
68 | data += "".join(map(chr, range(256)))
69 | else:
70 | data += bytes(range(256))
71 |
72 | curs = self.conn.cursor()
73 | curs.execute("SELECT %s::bytea;", (psycopg2.Binary(data),))
74 | if sys.version_info[0] < 3:
75 | res = str(curs.fetchone()[0])
76 | else:
77 | res = curs.fetchone()[0].tobytes()
78 |
79 | if res[0] in (b('x'), ord(b('x'))) and self.conn.server_version >= 90000:
80 | return self.skipTest(
81 | "bytea broken with server >= 9.0, libpq < 9")
82 |
83 | self.assertEqual(res, data)
84 | self.assert_(not self.conn.notices)
85 |
86 | def test_unicode(self):
87 | curs = self.conn.cursor()
88 | curs.execute("SHOW server_encoding")
89 | server_encoding = curs.fetchone()[0]
90 | if server_encoding != "UTF8":
91 | return self.skipTest(
92 | "Unicode test skipped since server encoding is %s"
93 | % server_encoding)
94 |
95 | data = u"""some data with \t chars
96 | to escape into, 'quotes', \u20ac euro sign and \\ a backslash too.
97 | """
98 | data += u"".join(map(unichr, [ u for u in range(1,65536)
99 | if not 0xD800 <= u <= 0xDFFF ])) # surrogate area
100 | self.conn.set_client_encoding('UNICODE')
101 |
102 | psycopg2.extensions.register_type(psycopg2.extensions.UNICODE, self.conn)
103 | curs.execute("SELECT %s::text;", (data,))
104 | res = curs.fetchone()[0]
105 |
106 | self.assertEqual(res, data)
107 | self.assert_(not self.conn.notices)
108 |
109 | def test_latin1(self):
110 | self.conn.set_client_encoding('LATIN1')
111 | curs = self.conn.cursor()
112 | if sys.version_info[0] < 3:
113 | data = ''.join(map(chr, range(32, 127) + range(160, 256)))
114 | else:
115 | data = bytes(range(32, 127) + range(160, 256)).decode('latin1')
116 |
117 | # as string
118 | curs.execute("SELECT %s::text;", (data,))
119 | res = curs.fetchone()[0]
120 | self.assertEqual(res, data)
121 | self.assert_(not self.conn.notices)
122 |
123 | # as unicode
124 | if sys.version_info[0] < 3:
125 | psycopg2.extensions.register_type(psycopg2.extensions.UNICODE, self.conn)
126 | data = data.decode('latin1')
127 |
128 | curs.execute("SELECT %s::text;", (data,))
129 | res = curs.fetchone()[0]
130 | self.assertEqual(res, data)
131 | self.assert_(not self.conn.notices)
132 |
133 | def test_koi8(self):
134 | self.conn.set_client_encoding('KOI8')
135 | curs = self.conn.cursor()
136 | if sys.version_info[0] < 3:
137 | data = ''.join(map(chr, range(32, 127) + range(128, 256)))
138 | else:
139 | data = bytes(range(32, 127) + range(128, 256)).decode('koi8_r')
140 |
141 | # as string
142 | curs.execute("SELECT %s::text;", (data,))
143 | res = curs.fetchone()[0]
144 | self.assertEqual(res, data)
145 | self.assert_(not self.conn.notices)
146 |
147 | # as unicode
148 | if sys.version_info[0] < 3:
149 | psycopg2.extensions.register_type(psycopg2.extensions.UNICODE, self.conn)
150 | data = data.decode('koi8_r')
151 |
152 | curs.execute("SELECT %s::text;", (data,))
153 | res = curs.fetchone()[0]
154 | self.assertEqual(res, data)
155 | self.assert_(not self.conn.notices)
156 |
157 |
158 | class TestQuotedString(ConnectingTestCase):
159 | def test_encoding(self):
160 | q = psycopg2.extensions.QuotedString('hi')
161 | self.assertEqual(q.encoding, 'latin1')
162 |
163 | self.conn.set_client_encoding('utf_8')
164 | q.prepare(self.conn)
165 | self.assertEqual(q.encoding, 'utf_8')
166 |
167 |
168 | def test_suite():
169 | return unittest.TestLoader().loadTestsFromName(__name__)
170 |
171 | if __name__ == "__main__":
172 | unittest.main()
173 |
174 |
--------------------------------------------------------------------------------
/simplejson/tests/test_fail.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from unittest import TestCase
3 |
4 | import simplejson as json
5 |
6 | # 2007-10-05
7 | JSONDOCS = [
8 | # http://json.org/JSON_checker/test/fail1.json
9 | '"A JSON payload should be an object or array, not a string."',
10 | # http://json.org/JSON_checker/test/fail2.json
11 | '["Unclosed array"',
12 | # http://json.org/JSON_checker/test/fail3.json
13 | '{unquoted_key: "keys must be quoted"}',
14 | # http://json.org/JSON_checker/test/fail4.json
15 | '["extra comma",]',
16 | # http://json.org/JSON_checker/test/fail5.json
17 | '["double extra comma",,]',
18 | # http://json.org/JSON_checker/test/fail6.json
19 | '[ , "<-- missing value"]',
20 | # http://json.org/JSON_checker/test/fail7.json
21 | '["Comma after the close"],',
22 | # http://json.org/JSON_checker/test/fail8.json
23 | '["Extra close"]]',
24 | # http://json.org/JSON_checker/test/fail9.json
25 | '{"Extra comma": true,}',
26 | # http://json.org/JSON_checker/test/fail10.json
27 | '{"Extra value after close": true} "misplaced quoted value"',
28 | # http://json.org/JSON_checker/test/fail11.json
29 | '{"Illegal expression": 1 + 2}',
30 | # http://json.org/JSON_checker/test/fail12.json
31 | '{"Illegal invocation": alert()}',
32 | # http://json.org/JSON_checker/test/fail13.json
33 | '{"Numbers cannot have leading zeroes": 013}',
34 | # http://json.org/JSON_checker/test/fail14.json
35 | '{"Numbers cannot be hex": 0x14}',
36 | # http://json.org/JSON_checker/test/fail15.json
37 | '["Illegal backslash escape: \\x15"]',
38 | # http://json.org/JSON_checker/test/fail16.json
39 | '[\\naked]',
40 | # http://json.org/JSON_checker/test/fail17.json
41 | '["Illegal backslash escape: \\017"]',
42 | # http://json.org/JSON_checker/test/fail18.json
43 | '[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]',
44 | # http://json.org/JSON_checker/test/fail19.json
45 | '{"Missing colon" null}',
46 | # http://json.org/JSON_checker/test/fail20.json
47 | '{"Double colon":: null}',
48 | # http://json.org/JSON_checker/test/fail21.json
49 | '{"Comma instead of colon", null}',
50 | # http://json.org/JSON_checker/test/fail22.json
51 | '["Colon instead of comma": false]',
52 | # http://json.org/JSON_checker/test/fail23.json
53 | '["Bad value", truth]',
54 | # http://json.org/JSON_checker/test/fail24.json
55 | "['single quote']",
56 | # http://json.org/JSON_checker/test/fail25.json
57 | '["\ttab\tcharacter\tin\tstring\t"]',
58 | # http://json.org/JSON_checker/test/fail26.json
59 | '["tab\\ character\\ in\\ string\\ "]',
60 | # http://json.org/JSON_checker/test/fail27.json
61 | '["line\nbreak"]',
62 | # http://json.org/JSON_checker/test/fail28.json
63 | '["line\\\nbreak"]',
64 | # http://json.org/JSON_checker/test/fail29.json
65 | '[0e]',
66 | # http://json.org/JSON_checker/test/fail30.json
67 | '[0e+]',
68 | # http://json.org/JSON_checker/test/fail31.json
69 | '[0e+-1]',
70 | # http://json.org/JSON_checker/test/fail32.json
71 | '{"Comma instead if closing brace": true,',
72 | # http://json.org/JSON_checker/test/fail33.json
73 | '["mismatch"}',
74 | # http://code.google.com/p/simplejson/issues/detail?id=3
75 | u'["A\u001FZ control characters in string"]',
76 | # misc based on coverage
77 | '{',
78 | '{]',
79 | '{"foo": "bar"]',
80 | '{"foo": "bar"',
81 | 'nul',
82 | 'nulx',
83 | '-',
84 | '-x',
85 | '-e',
86 | '-e0',
87 | '-Infinite',
88 | '-Inf',
89 | 'Infinit',
90 | 'Infinite',
91 | 'NaM',
92 | 'NuN',
93 | 'falsy',
94 | 'fal',
95 | 'trug',
96 | 'tru',
97 | '1e',
98 | '1ex',
99 | '1e-',
100 | '1e-x',
101 | ]
102 |
103 | SKIPS = {
104 | 1: "why not have a string payload?",
105 | 18: "spec doesn't specify any nesting limitations",
106 | }
107 |
108 | class TestFail(TestCase):
109 | def test_failures(self):
110 | for idx, doc in enumerate(JSONDOCS):
111 | idx = idx + 1
112 | if idx in SKIPS:
113 | json.loads(doc)
114 | continue
115 | try:
116 | json.loads(doc)
117 | except json.JSONDecodeError:
118 | pass
119 | else:
120 | self.fail("Expected failure for fail%d.json: %r" % (idx, doc))
121 |
122 | def test_array_decoder_issue46(self):
123 | # http://code.google.com/p/simplejson/issues/detail?id=46
124 | for doc in [u'[,]', '[,]']:
125 | try:
126 | json.loads(doc)
127 | except json.JSONDecodeError:
128 | e = sys.exc_info()[1]
129 | self.assertEqual(e.pos, 1)
130 | self.assertEqual(e.lineno, 1)
131 | self.assertEqual(e.colno, 2)
132 | except Exception:
133 | e = sys.exc_info()[1]
134 | self.fail("Unexpected exception raised %r %s" % (e, e))
135 | else:
136 | self.fail("Unexpected success parsing '[,]'")
137 |
138 | def test_truncated_input(self):
139 | test_cases = [
140 | ('', 'Expecting value', 0),
141 | ('[', "Expecting value or ']'", 1),
142 | ('[42', "Expecting ',' delimiter", 3),
143 | ('[42,', 'Expecting value', 4),
144 | ('["', 'Unterminated string starting at', 1),
145 | ('["spam', 'Unterminated string starting at', 1),
146 | ('["spam"', "Expecting ',' delimiter", 7),
147 | ('["spam",', 'Expecting value', 8),
148 | ('{', 'Expecting property name enclosed in double quotes', 1),
149 | ('{"', 'Unterminated string starting at', 1),
150 | ('{"spam', 'Unterminated string starting at', 1),
151 | ('{"spam"', "Expecting ':' delimiter", 7),
152 | ('{"spam":', 'Expecting value', 8),
153 | ('{"spam":42', "Expecting ',' delimiter", 10),
154 | ('{"spam":42,', 'Expecting property name enclosed in double quotes',
155 | 11),
156 | ('"', 'Unterminated string starting at', 0),
157 | ('"spam', 'Unterminated string starting at', 0),
158 | ('[,', "Expecting value", 1),
159 | ]
160 | for data, msg, idx in test_cases:
161 | try:
162 | json.loads(data)
163 | except json.JSONDecodeError:
164 | e = sys.exc_info()[1]
165 | self.assertEqual(
166 | e.msg[:len(msg)],
167 | msg,
168 | "%r doesn't start with %r for %r" % (e.msg, msg, data))
169 | self.assertEqual(
170 | e.pos, idx,
171 | "pos %r != %r for %r" % (e.pos, idx, data))
172 | except Exception:
173 | e = sys.exc_info()[1]
174 | self.fail("Unexpected exception raised %r %s" % (e, e))
175 | else:
176 | self.fail("Unexpected success parsing '%r'" % (data,))
177 |
--------------------------------------------------------------------------------
/simplejson/tests/test_unicode.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import codecs
3 | from unittest import TestCase
4 |
5 | import simplejson as json
6 | from simplejson.compat import unichr, text_type, b, u, BytesIO
7 |
8 | class TestUnicode(TestCase):
9 | def test_encoding1(self):
10 | encoder = json.JSONEncoder(encoding='utf-8')
11 | u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
12 | s = u.encode('utf-8')
13 | ju = encoder.encode(u)
14 | js = encoder.encode(s)
15 | self.assertEqual(ju, js)
16 |
17 | def test_encoding2(self):
18 | u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
19 | s = u.encode('utf-8')
20 | ju = json.dumps(u, encoding='utf-8')
21 | js = json.dumps(s, encoding='utf-8')
22 | self.assertEqual(ju, js)
23 |
24 | def test_encoding3(self):
25 | u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
26 | j = json.dumps(u)
27 | self.assertEqual(j, '"\\u03b1\\u03a9"')
28 |
29 | def test_encoding4(self):
30 | u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
31 | j = json.dumps([u])
32 | self.assertEqual(j, '["\\u03b1\\u03a9"]')
33 |
34 | def test_encoding5(self):
35 | u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
36 | j = json.dumps(u, ensure_ascii=False)
37 | self.assertEqual(j, u'"' + u + u'"')
38 |
39 | def test_encoding6(self):
40 | u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
41 | j = json.dumps([u], ensure_ascii=False)
42 | self.assertEqual(j, u'["' + u + u'"]')
43 |
44 | def test_big_unicode_encode(self):
45 | u = u'\U0001d120'
46 | self.assertEqual(json.dumps(u), '"\\ud834\\udd20"')
47 | self.assertEqual(json.dumps(u, ensure_ascii=False), u'"\U0001d120"')
48 |
49 | def test_big_unicode_decode(self):
50 | u = u'z\U0001d120x'
51 | self.assertEqual(json.loads('"' + u + '"'), u)
52 | self.assertEqual(json.loads('"z\\ud834\\udd20x"'), u)
53 |
54 | def test_unicode_decode(self):
55 | for i in range(0, 0xd7ff):
56 | u = unichr(i)
57 | #s = '"\\u{0:04x}"'.format(i)
58 | s = '"\\u%04x"' % (i,)
59 | self.assertEqual(json.loads(s), u)
60 |
61 | def test_object_pairs_hook_with_unicode(self):
62 | s = u'{"xkd":1, "kcw":2, "art":3, "hxm":4, "qrt":5, "pad":6, "hoy":7}'
63 | p = [(u"xkd", 1), (u"kcw", 2), (u"art", 3), (u"hxm", 4),
64 | (u"qrt", 5), (u"pad", 6), (u"hoy", 7)]
65 | self.assertEqual(json.loads(s), eval(s))
66 | self.assertEqual(json.loads(s, object_pairs_hook=lambda x: x), p)
67 | od = json.loads(s, object_pairs_hook=json.OrderedDict)
68 | self.assertEqual(od, json.OrderedDict(p))
69 | self.assertEqual(type(od), json.OrderedDict)
70 | # the object_pairs_hook takes priority over the object_hook
71 | self.assertEqual(json.loads(s,
72 | object_pairs_hook=json.OrderedDict,
73 | object_hook=lambda x: None),
74 | json.OrderedDict(p))
75 |
76 |
77 | def test_default_encoding(self):
78 | self.assertEqual(json.loads(u'{"a": "\xe9"}'.encode('utf-8')),
79 | {'a': u'\xe9'})
80 |
81 | def test_unicode_preservation(self):
82 | self.assertEqual(type(json.loads(u'""')), text_type)
83 | self.assertEqual(type(json.loads(u'"a"')), text_type)
84 | self.assertEqual(type(json.loads(u'["a"]')[0]), text_type)
85 |
86 | def test_ensure_ascii_false_returns_unicode(self):
87 | # http://code.google.com/p/simplejson/issues/detail?id=48
88 | self.assertEqual(type(json.dumps([], ensure_ascii=False)), text_type)
89 | self.assertEqual(type(json.dumps(0, ensure_ascii=False)), text_type)
90 | self.assertEqual(type(json.dumps({}, ensure_ascii=False)), text_type)
91 | self.assertEqual(type(json.dumps("", ensure_ascii=False)), text_type)
92 |
93 | def test_ensure_ascii_false_bytestring_encoding(self):
94 | # http://code.google.com/p/simplejson/issues/detail?id=48
95 | doc1 = {u'quux': b('Arr\xc3\xaat sur images')}
96 | doc2 = {u'quux': u('Arr\xeat sur images')}
97 | doc_ascii = '{"quux": "Arr\\u00eat sur images"}'
98 | doc_unicode = u'{"quux": "Arr\xeat sur images"}'
99 | self.assertEqual(json.dumps(doc1), doc_ascii)
100 | self.assertEqual(json.dumps(doc2), doc_ascii)
101 | self.assertEqual(json.dumps(doc1, ensure_ascii=False), doc_unicode)
102 | self.assertEqual(json.dumps(doc2, ensure_ascii=False), doc_unicode)
103 |
104 | def test_ensure_ascii_linebreak_encoding(self):
105 | # http://timelessrepo.com/json-isnt-a-javascript-subset
106 | s1 = u'\u2029\u2028'
107 | s2 = s1.encode('utf8')
108 | expect = '"\\u2029\\u2028"'
109 | self.assertEqual(json.dumps(s1), expect)
110 | self.assertEqual(json.dumps(s2), expect)
111 | self.assertEqual(json.dumps(s1, ensure_ascii=False), expect)
112 | self.assertEqual(json.dumps(s2, ensure_ascii=False), expect)
113 |
114 | def test_invalid_escape_sequences(self):
115 | # incomplete escape sequence
116 | self.assertRaises(json.JSONDecodeError, json.loads, '"\\u')
117 | self.assertRaises(json.JSONDecodeError, json.loads, '"\\u1')
118 | self.assertRaises(json.JSONDecodeError, json.loads, '"\\u12')
119 | self.assertRaises(json.JSONDecodeError, json.loads, '"\\u123')
120 | self.assertRaises(json.JSONDecodeError, json.loads, '"\\u1234')
121 | # invalid escape sequence
122 | self.assertRaises(json.JSONDecodeError, json.loads, '"\\u123x"')
123 | self.assertRaises(json.JSONDecodeError, json.loads, '"\\u12x4"')
124 | self.assertRaises(json.JSONDecodeError, json.loads, '"\\u1x34"')
125 | self.assertRaises(json.JSONDecodeError, json.loads, '"\\ux234"')
126 | if sys.maxunicode > 65535:
127 | # invalid escape sequence for low surrogate
128 | self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u"')
129 | self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u0"')
130 | self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u00"')
131 | self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u000"')
132 | self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u000x"')
133 | self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u00x0"')
134 | self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u0x00"')
135 | self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\ux000"')
136 |
137 | def test_ensure_ascii_still_works(self):
138 | # in the ascii range, ensure that everything is the same
139 | for c in map(unichr, range(0, 127)):
140 | self.assertEqual(
141 | json.dumps(c, ensure_ascii=False),
142 | json.dumps(c))
143 | snowman = u'\N{SNOWMAN}'
144 | self.assertEqual(
145 | json.dumps(c, ensure_ascii=False),
146 | '"' + c + '"')
147 |
148 | def test_strip_bom(self):
149 | content = u"\u3053\u3093\u306b\u3061\u308f"
150 | json_doc = codecs.BOM_UTF8 + b(json.dumps(content))
151 | self.assertEqual(json.load(BytesIO(json_doc)), content)
152 | for doc in json_doc, json_doc.decode('utf8'):
153 | self.assertEqual(json.loads(doc), content)
154 |
--------------------------------------------------------------------------------
/psycopg2/tests/test_with.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | # test_ctxman.py - unit test for connection and cursor used as context manager
4 | #
5 | # Copyright (C) 2012 Daniele Varrazzo
6 | #
7 | # psycopg2 is free software: you can redistribute it and/or modify it
8 | # under the terms of the GNU Lesser General Public License as published
9 | # by the Free Software Foundation, either version 3 of the License, or
10 | # (at your option) any later version.
11 | #
12 | # In addition, as a special exception, the copyright holders give
13 | # permission to link this program with the OpenSSL library (or with
14 | # modified versions of OpenSSL that use the same license as OpenSSL),
15 | # and distribute linked combinations including the two.
16 | #
17 | # You must obey the GNU Lesser General Public License in all respects for
18 | # all of the code used other than OpenSSL.
19 | #
20 | # psycopg2 is distributed in the hope that it will be useful, but WITHOUT
21 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
22 | # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
23 | # License for more details.
24 |
25 |
26 | from __future__ import with_statement
27 |
28 | import psycopg2
29 | import psycopg2.extensions as ext
30 |
31 | from testutils import unittest, ConnectingTestCase
32 |
33 | class WithTestCase(ConnectingTestCase):
34 | def setUp(self):
35 | ConnectingTestCase.setUp(self)
36 | curs = self.conn.cursor()
37 | try:
38 | curs.execute("delete from test_with")
39 | self.conn.commit()
40 | except psycopg2.ProgrammingError:
41 | # assume table doesn't exist
42 | self.conn.rollback()
43 | curs.execute("create table test_with (id integer primary key)")
44 | self.conn.commit()
45 |
46 |
47 | class WithConnectionTestCase(WithTestCase):
48 | def test_with_ok(self):
49 | with self.conn as conn:
50 | self.assert_(self.conn is conn)
51 | self.assertEqual(conn.status, ext.STATUS_READY)
52 | curs = conn.cursor()
53 | curs.execute("insert into test_with values (1)")
54 | self.assertEqual(conn.status, ext.STATUS_BEGIN)
55 |
56 | self.assertEqual(self.conn.status, ext.STATUS_READY)
57 | self.assert_(not self.conn.closed)
58 |
59 | curs = self.conn.cursor()
60 | curs.execute("select * from test_with")
61 | self.assertEqual(curs.fetchall(), [(1,)])
62 |
63 | def test_with_connect_idiom(self):
64 | with self.connect() as conn:
65 | self.assertEqual(conn.status, ext.STATUS_READY)
66 | curs = conn.cursor()
67 | curs.execute("insert into test_with values (2)")
68 | self.assertEqual(conn.status, ext.STATUS_BEGIN)
69 |
70 | self.assertEqual(self.conn.status, ext.STATUS_READY)
71 | self.assert_(not self.conn.closed)
72 |
73 | curs = self.conn.cursor()
74 | curs.execute("select * from test_with")
75 | self.assertEqual(curs.fetchall(), [(2,)])
76 |
77 | def test_with_error_db(self):
78 | def f():
79 | with self.conn as conn:
80 | curs = conn.cursor()
81 | curs.execute("insert into test_with values ('a')")
82 |
83 | self.assertRaises(psycopg2.DataError, f)
84 | self.assertEqual(self.conn.status, ext.STATUS_READY)
85 | self.assert_(not self.conn.closed)
86 |
87 | curs = self.conn.cursor()
88 | curs.execute("select * from test_with")
89 | self.assertEqual(curs.fetchall(), [])
90 |
91 | def test_with_error_python(self):
92 | def f():
93 | with self.conn as conn:
94 | curs = conn.cursor()
95 | curs.execute("insert into test_with values (3)")
96 | 1/0
97 |
98 | self.assertRaises(ZeroDivisionError, f)
99 | self.assertEqual(self.conn.status, ext.STATUS_READY)
100 | self.assert_(not self.conn.closed)
101 |
102 | curs = self.conn.cursor()
103 | curs.execute("select * from test_with")
104 | self.assertEqual(curs.fetchall(), [])
105 |
106 | def test_with_closed(self):
107 | def f():
108 | with self.conn:
109 | pass
110 |
111 | self.conn.close()
112 | self.assertRaises(psycopg2.InterfaceError, f)
113 |
114 | def test_subclass_commit(self):
115 | commits = []
116 | class MyConn(ext.connection):
117 | def commit(self):
118 | commits.append(None)
119 | super(MyConn, self).commit()
120 |
121 | with self.connect(connection_factory=MyConn) as conn:
122 | curs = conn.cursor()
123 | curs.execute("insert into test_with values (10)")
124 |
125 | self.assertEqual(conn.status, ext.STATUS_READY)
126 | self.assert_(commits)
127 |
128 | curs = self.conn.cursor()
129 | curs.execute("select * from test_with")
130 | self.assertEqual(curs.fetchall(), [(10,)])
131 |
132 | def test_subclass_rollback(self):
133 | rollbacks = []
134 | class MyConn(ext.connection):
135 | def rollback(self):
136 | rollbacks.append(None)
137 | super(MyConn, self).rollback()
138 |
139 | try:
140 | with self.connect(connection_factory=MyConn) as conn:
141 | curs = conn.cursor()
142 | curs.execute("insert into test_with values (11)")
143 | 1/0
144 | except ZeroDivisionError:
145 | pass
146 | else:
147 | self.assert_("exception not raised")
148 |
149 | self.assertEqual(conn.status, ext.STATUS_READY)
150 | self.assert_(rollbacks)
151 |
152 | curs = conn.cursor()
153 | curs.execute("select * from test_with")
154 | self.assertEqual(curs.fetchall(), [])
155 |
156 |
157 | class WithCursorTestCase(WithTestCase):
158 | def test_with_ok(self):
159 | with self.conn as conn:
160 | with conn.cursor() as curs:
161 | curs.execute("insert into test_with values (4)")
162 | self.assert_(not curs.closed)
163 | self.assertEqual(self.conn.status, ext.STATUS_BEGIN)
164 | self.assert_(curs.closed)
165 |
166 | self.assertEqual(self.conn.status, ext.STATUS_READY)
167 | self.assert_(not self.conn.closed)
168 |
169 | curs = self.conn.cursor()
170 | curs.execute("select * from test_with")
171 | self.assertEqual(curs.fetchall(), [(4,)])
172 |
173 | def test_with_error(self):
174 | try:
175 | with self.conn as conn:
176 | with conn.cursor() as curs:
177 | curs.execute("insert into test_with values (5)")
178 | 1/0
179 | except ZeroDivisionError:
180 | pass
181 |
182 | self.assertEqual(self.conn.status, ext.STATUS_READY)
183 | self.assert_(not self.conn.closed)
184 | self.assert_(curs.closed)
185 |
186 | curs = self.conn.cursor()
187 | curs.execute("select * from test_with")
188 | self.assertEqual(curs.fetchall(), [])
189 |
190 | def test_subclass(self):
191 | closes = []
192 | class MyCurs(ext.cursor):
193 | def close(self):
194 | closes.append(None)
195 | super(MyCurs, self).close()
196 |
197 | with self.conn.cursor(cursor_factory=MyCurs) as curs:
198 | self.assert_(isinstance(curs, MyCurs))
199 |
200 | self.assert_(curs.closed)
201 | self.assert_(closes)
202 |
203 |
204 | def test_suite():
205 | return unittest.TestLoader().loadTestsFromName(__name__)
206 |
207 | if __name__ == "__main__":
208 | unittest.main()
209 |
--------------------------------------------------------------------------------
/psycopg2/tests/test_notify.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | # test_notify.py - unit test for async notifications
4 | #
5 | # Copyright (C) 2010-2011 Daniele Varrazzo
6 | #
7 | # psycopg2 is free software: you can redistribute it and/or modify it
8 | # under the terms of the GNU Lesser General Public License as published
9 | # by the Free Software Foundation, either version 3 of the License, or
10 | # (at your option) any later version.
11 | #
12 | # In addition, as a special exception, the copyright holders give
13 | # permission to link this program with the OpenSSL library (or with
14 | # modified versions of OpenSSL that use the same license as OpenSSL),
15 | # and distribute linked combinations including the two.
16 | #
17 | # You must obey the GNU Lesser General Public License in all respects for
18 | # all of the code used other than OpenSSL.
19 | #
20 | # psycopg2 is distributed in the hope that it will be useful, but WITHOUT
21 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
22 | # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
23 | # License for more details.
24 |
25 | from testutils import unittest
26 |
27 | import psycopg2
28 | from psycopg2 import extensions
29 | from testutils import ConnectingTestCase, script_to_py3
30 | from testconfig import dsn
31 |
32 | import sys
33 | import time
34 | import select
35 | from subprocess import Popen, PIPE
36 |
37 |
38 | class NotifiesTests(ConnectingTestCase):
39 |
40 | def autocommit(self, conn):
41 | """Set a connection in autocommit mode."""
42 | conn.set_isolation_level(extensions.ISOLATION_LEVEL_AUTOCOMMIT)
43 |
44 | def listen(self, name):
45 | """Start listening for a name on self.conn."""
46 | curs = self.conn.cursor()
47 | curs.execute("LISTEN " + name)
48 | curs.close()
49 |
50 | def notify(self, name, sec=0, payload=None):
51 | """Send a notification to the database, eventually after some time."""
52 | if payload is None:
53 | payload = ''
54 | else:
55 | payload = ", %r" % payload
56 |
57 | script = ("""\
58 | import time
59 | time.sleep(%(sec)s)
60 | import psycopg2
61 | import psycopg2.extensions
62 | conn = psycopg2.connect(%(dsn)r)
63 | conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
64 | print conn.get_backend_pid()
65 | curs = conn.cursor()
66 | curs.execute("NOTIFY " %(name)r %(payload)r)
67 | curs.close()
68 | conn.close()
69 | """
70 | % { 'dsn': dsn, 'sec': sec, 'name': name, 'payload': payload})
71 |
72 | return Popen([sys.executable, '-c', script_to_py3(script)], stdout=PIPE)
73 |
74 | def test_notifies_received_on_poll(self):
75 | self.autocommit(self.conn)
76 | self.listen('foo')
77 |
78 | proc = self.notify('foo', 1)
79 |
80 | t0 = time.time()
81 | ready = select.select([self.conn], [], [], 5)
82 | t1 = time.time()
83 | self.assert_(0.99 < t1 - t0 < 4, t1 - t0)
84 |
85 | pid = int(proc.communicate()[0])
86 | self.assertEqual(0, len(self.conn.notifies))
87 | self.assertEqual(extensions.POLL_OK, self.conn.poll())
88 | self.assertEqual(1, len(self.conn.notifies))
89 | self.assertEqual(pid, self.conn.notifies[0][0])
90 | self.assertEqual('foo', self.conn.notifies[0][1])
91 |
92 | def test_many_notifies(self):
93 | self.autocommit(self.conn)
94 | for name in ['foo', 'bar', 'baz']:
95 | self.listen(name)
96 |
97 | pids = {}
98 | for name in ['foo', 'bar', 'baz', 'qux']:
99 | pids[name] = int(self.notify(name).communicate()[0])
100 |
101 | self.assertEqual(0, len(self.conn.notifies))
102 | for i in range(10):
103 | self.assertEqual(extensions.POLL_OK, self.conn.poll())
104 | self.assertEqual(3, len(self.conn.notifies))
105 |
106 | names = dict.fromkeys(['foo', 'bar', 'baz'])
107 | for (pid, name) in self.conn.notifies:
108 | self.assertEqual(pids[name], pid)
109 | names.pop(name) # raise if name found twice
110 |
111 | def test_notifies_received_on_execute(self):
112 | self.autocommit(self.conn)
113 | self.listen('foo')
114 | pid = int(self.notify('foo').communicate()[0])
115 | self.assertEqual(0, len(self.conn.notifies))
116 | self.conn.cursor().execute('select 1;')
117 | self.assertEqual(1, len(self.conn.notifies))
118 | self.assertEqual(pid, self.conn.notifies[0][0])
119 | self.assertEqual('foo', self.conn.notifies[0][1])
120 |
121 | def test_notify_object(self):
122 | self.autocommit(self.conn)
123 | self.listen('foo')
124 | self.notify('foo').communicate()
125 | time.sleep(0.5)
126 | self.conn.poll()
127 | notify = self.conn.notifies[0]
128 | self.assert_(isinstance(notify, psycopg2.extensions.Notify))
129 |
130 | def test_notify_attributes(self):
131 | self.autocommit(self.conn)
132 | self.listen('foo')
133 | pid = int(self.notify('foo').communicate()[0])
134 | time.sleep(0.5)
135 | self.conn.poll()
136 | self.assertEqual(1, len(self.conn.notifies))
137 | notify = self.conn.notifies[0]
138 | self.assertEqual(pid, notify.pid)
139 | self.assertEqual('foo', notify.channel)
140 | self.assertEqual('', notify.payload)
141 |
142 | def test_notify_payload(self):
143 | if self.conn.server_version < 90000:
144 | return self.skipTest("server version %s doesn't support notify payload"
145 | % self.conn.server_version)
146 | self.autocommit(self.conn)
147 | self.listen('foo')
148 | pid = int(self.notify('foo', payload="Hello, world!").communicate()[0])
149 | time.sleep(0.5)
150 | self.conn.poll()
151 | self.assertEqual(1, len(self.conn.notifies))
152 | notify = self.conn.notifies[0]
153 | self.assertEqual(pid, notify.pid)
154 | self.assertEqual('foo', notify.channel)
155 | self.assertEqual('Hello, world!', notify.payload)
156 |
157 | def test_notify_init(self):
158 | n = psycopg2.extensions.Notify(10, 'foo')
159 | self.assertEqual(10, n.pid)
160 | self.assertEqual('foo', n.channel)
161 | self.assertEqual('', n.payload)
162 | (pid, channel) = n
163 | self.assertEqual((pid, channel), (10, 'foo'))
164 |
165 | n = psycopg2.extensions.Notify(42, 'bar', 'baz')
166 | self.assertEqual(42, n.pid)
167 | self.assertEqual('bar', n.channel)
168 | self.assertEqual('baz', n.payload)
169 | (pid, channel) = n
170 | self.assertEqual((pid, channel), (42, 'bar'))
171 |
172 | def test_compare(self):
173 | data = [(10, 'foo'), (20, 'foo'), (10, 'foo', 'bar'), (10, 'foo', 'baz')]
174 | for d1 in data:
175 | for d2 in data:
176 | n1 = psycopg2.extensions.Notify(*d1)
177 | n2 = psycopg2.extensions.Notify(*d2)
178 | self.assertEqual((n1 == n2), (d1 == d2))
179 | self.assertEqual((n1 != n2), (d1 != d2))
180 |
181 | def test_compare_tuple(self):
182 | from psycopg2.extensions import Notify
183 | self.assertEqual((10, 'foo'), Notify(10, 'foo'))
184 | self.assertEqual((10, 'foo'), Notify(10, 'foo', 'bar'))
185 | self.assertNotEqual((10, 'foo'), Notify(20, 'foo'))
186 | self.assertNotEqual((10, 'foo'), Notify(10, 'bar'))
187 |
188 | def test_hash(self):
189 | from psycopg2.extensions import Notify
190 | self.assertEqual(hash((10, 'foo')), hash(Notify(10, 'foo')))
191 | self.assertNotEqual(hash(Notify(10, 'foo', 'bar')),
192 | hash(Notify(10, 'foo')))
193 |
194 | def test_suite():
195 | return unittest.TestLoader().loadTestsFromName(__name__)
196 |
197 | if __name__ == "__main__":
198 | unittest.main()
199 |
200 |
--------------------------------------------------------------------------------
/simplejson/tests/test_scanstring.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from unittest import TestCase
3 |
4 | import simplejson as json
5 | import simplejson.decoder
6 | from simplejson.compat import b, PY3
7 |
8 | class TestScanString(TestCase):
9 | # The bytes type is intentionally not used in most of these tests
10 | # under Python 3 because the decoder immediately coerces to str before
11 | # calling scanstring. In Python 2 we are testing the code paths
12 | # for both unicode and str.
13 | #
14 | # The reason this is done is because Python 3 would require
15 | # entirely different code paths for parsing bytes and str.
16 | #
17 | def test_py_scanstring(self):
18 | self._test_scanstring(simplejson.decoder.py_scanstring)
19 |
20 | def test_c_scanstring(self):
21 | if not simplejson.decoder.c_scanstring:
22 | return
23 | self._test_scanstring(simplejson.decoder.c_scanstring)
24 |
25 | def _test_scanstring(self, scanstring):
26 | if sys.maxunicode == 65535:
27 | self.assertEqual(
28 | scanstring(u'"z\U0001d120x"', 1, None, True),
29 | (u'z\U0001d120x', 6))
30 | else:
31 | self.assertEqual(
32 | scanstring(u'"z\U0001d120x"', 1, None, True),
33 | (u'z\U0001d120x', 5))
34 |
35 | self.assertEqual(
36 | scanstring('"\\u007b"', 1, None, True),
37 | (u'{', 8))
38 |
39 | self.assertEqual(
40 | scanstring('"A JSON payload should be an object or array, not a string."', 1, None, True),
41 | (u'A JSON payload should be an object or array, not a string.', 60))
42 |
43 | self.assertEqual(
44 | scanstring('["Unclosed array"', 2, None, True),
45 | (u'Unclosed array', 17))
46 |
47 | self.assertEqual(
48 | scanstring('["extra comma",]', 2, None, True),
49 | (u'extra comma', 14))
50 |
51 | self.assertEqual(
52 | scanstring('["double extra comma",,]', 2, None, True),
53 | (u'double extra comma', 21))
54 |
55 | self.assertEqual(
56 | scanstring('["Comma after the close"],', 2, None, True),
57 | (u'Comma after the close', 24))
58 |
59 | self.assertEqual(
60 | scanstring('["Extra close"]]', 2, None, True),
61 | (u'Extra close', 14))
62 |
63 | self.assertEqual(
64 | scanstring('{"Extra comma": true,}', 2, None, True),
65 | (u'Extra comma', 14))
66 |
67 | self.assertEqual(
68 | scanstring('{"Extra value after close": true} "misplaced quoted value"', 2, None, True),
69 | (u'Extra value after close', 26))
70 |
71 | self.assertEqual(
72 | scanstring('{"Illegal expression": 1 + 2}', 2, None, True),
73 | (u'Illegal expression', 21))
74 |
75 | self.assertEqual(
76 | scanstring('{"Illegal invocation": alert()}', 2, None, True),
77 | (u'Illegal invocation', 21))
78 |
79 | self.assertEqual(
80 | scanstring('{"Numbers cannot have leading zeroes": 013}', 2, None, True),
81 | (u'Numbers cannot have leading zeroes', 37))
82 |
83 | self.assertEqual(
84 | scanstring('{"Numbers cannot be hex": 0x14}', 2, None, True),
85 | (u'Numbers cannot be hex', 24))
86 |
87 | self.assertEqual(
88 | scanstring('[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]', 21, None, True),
89 | (u'Too deep', 30))
90 |
91 | self.assertEqual(
92 | scanstring('{"Missing colon" null}', 2, None, True),
93 | (u'Missing colon', 16))
94 |
95 | self.assertEqual(
96 | scanstring('{"Double colon":: null}', 2, None, True),
97 | (u'Double colon', 15))
98 |
99 | self.assertEqual(
100 | scanstring('{"Comma instead of colon", null}', 2, None, True),
101 | (u'Comma instead of colon', 25))
102 |
103 | self.assertEqual(
104 | scanstring('["Colon instead of comma": false]', 2, None, True),
105 | (u'Colon instead of comma', 25))
106 |
107 | self.assertEqual(
108 | scanstring('["Bad value", truth]', 2, None, True),
109 | (u'Bad value', 12))
110 |
111 | for c in map(chr, range(0x00, 0x1f)):
112 | self.assertEqual(
113 | scanstring(c + '"', 0, None, False),
114 | (c, 2))
115 | self.assertRaises(
116 | ValueError,
117 | scanstring, c + '"', 0, None, True)
118 |
119 | self.assertRaises(ValueError, scanstring, '', 0, None, True)
120 | self.assertRaises(ValueError, scanstring, 'a', 0, None, True)
121 | self.assertRaises(ValueError, scanstring, '\\', 0, None, True)
122 | self.assertRaises(ValueError, scanstring, '\\u', 0, None, True)
123 | self.assertRaises(ValueError, scanstring, '\\u0', 0, None, True)
124 | self.assertRaises(ValueError, scanstring, '\\u01', 0, None, True)
125 | self.assertRaises(ValueError, scanstring, '\\u012', 0, None, True)
126 | self.assertRaises(ValueError, scanstring, '\\u0123', 0, None, True)
127 | if sys.maxunicode > 65535:
128 | self.assertRaises(ValueError,
129 | scanstring, '\\ud834\\u"', 0, None, True)
130 | self.assertRaises(ValueError,
131 | scanstring, '\\ud834\\x0123"', 0, None, True)
132 |
133 | def test_issue3623(self):
134 | self.assertRaises(ValueError, json.decoder.scanstring, "xxx", 1,
135 | "xxx")
136 | self.assertRaises(UnicodeDecodeError,
137 | json.encoder.encode_basestring_ascii, b("xx\xff"))
138 |
139 | def test_overflow(self):
140 | # Python 2.5 does not have maxsize, Python 3 does not have maxint
141 | maxsize = getattr(sys, 'maxsize', getattr(sys, 'maxint', None))
142 | assert maxsize is not None
143 | self.assertRaises(OverflowError, json.decoder.scanstring, "xxx",
144 | maxsize + 1)
145 |
146 | def test_surrogates(self):
147 | scanstring = json.decoder.scanstring
148 |
149 | def assertScan(given, expect, test_utf8=True):
150 | givens = [given]
151 | if not PY3 and test_utf8:
152 | givens.append(given.encode('utf8'))
153 | for given in givens:
154 | (res, count) = scanstring(given, 1, None, True)
155 | self.assertEqual(len(given), count)
156 | self.assertEqual(res, expect)
157 |
158 | assertScan(
159 | u'"z\\ud834\\u0079x"',
160 | u'z\ud834yx')
161 | assertScan(
162 | u'"z\\ud834\\udd20x"',
163 | u'z\U0001d120x')
164 | assertScan(
165 | u'"z\\ud834\\ud834\\udd20x"',
166 | u'z\ud834\U0001d120x')
167 | assertScan(
168 | u'"z\\ud834x"',
169 | u'z\ud834x')
170 | assertScan(
171 | u'"z\\udd20x"',
172 | u'z\udd20x')
173 | assertScan(
174 | u'"z\ud834x"',
175 | u'z\ud834x')
176 | # It may look strange to join strings together, but Python is drunk.
177 | # https://gist.github.com/etrepum/5538443
178 | assertScan(
179 | u'"z\\ud834\udd20x12345"',
180 | u''.join([u'z\ud834', u'\udd20x12345']))
181 | assertScan(
182 | u'"z\ud834\\udd20x"',
183 | u''.join([u'z\ud834', u'\udd20x']))
184 | # these have different behavior given UTF8 input, because the surrogate
185 | # pair may be joined (in maxunicode > 65535 builds)
186 | assertScan(
187 | u''.join([u'"z\ud834', u'\udd20x"']),
188 | u''.join([u'z\ud834', u'\udd20x']),
189 | test_utf8=False)
190 |
191 | self.assertRaises(ValueError,
192 | scanstring, u'"z\\ud83x"', 1, None, True)
193 | self.assertRaises(ValueError,
194 | scanstring, u'"z\\ud834\\udd2x"', 1, None, True)
195 |
--------------------------------------------------------------------------------
/psycopg2/_json.py:
--------------------------------------------------------------------------------
1 | """Implementation of the JSON adaptation objects
2 |
3 | This module exists to avoid a circular import problem: pyscopg2.extras depends
4 | on psycopg2.extension, so I can't create the default JSON typecasters in
5 | extensions importing register_json from extras.
6 | """
7 |
8 | # psycopg/_json.py - Implementation of the JSON adaptation objects
9 | #
10 | # Copyright (C) 2012 Daniele Varrazzo
11 | #
12 | # psycopg2 is free software: you can redistribute it and/or modify it
13 | # under the terms of the GNU Lesser General Public License as published
14 | # by the Free Software Foundation, either version 3 of the License, or
15 | # (at your option) any later version.
16 | #
17 | # In addition, as a special exception, the copyright holders give
18 | # permission to link this program with the OpenSSL library (or with
19 | # modified versions of OpenSSL that use the same license as OpenSSL),
20 | # and distribute linked combinations including the two.
21 | #
22 | # You must obey the GNU Lesser General Public License in all respects for
23 | # all of the code used other than OpenSSL.
24 | #
25 | # psycopg2 is distributed in the hope that it will be useful, but WITHOUT
26 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
27 | # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
28 | # License for more details.
29 |
30 | import sys
31 |
32 | from psycopg2._psycopg import ISQLQuote, QuotedString
33 | from psycopg2._psycopg import new_type, new_array_type, register_type
34 |
35 |
36 | # import the best json implementation available
37 | if sys.version_info[:2] >= (2,6):
38 | import json
39 | else:
40 | try:
41 | import simplejson as json
42 | except ImportError:
43 | json = None
44 |
45 |
46 | # oids from PostgreSQL 9.2
47 | JSON_OID = 114
48 | JSONARRAY_OID = 199
49 |
50 | # oids from PostgreSQL 9.4
51 | JSONB_OID = 3802
52 | JSONBARRAY_OID = 3807
53 |
54 | class Json(object):
55 | """
56 | An `~psycopg2.extensions.ISQLQuote` wrapper to adapt a Python object to
57 | :sql:`json` data type.
58 |
59 | `!Json` can be used to wrap any object supported by the provided *dumps*
60 | function. If none is provided, the standard :py:func:`json.dumps()` is
61 | used (`!simplejson` for Python < 2.6;
62 | `~psycopg2.extensions.ISQLQuote.getquoted()` will raise `!ImportError` if
63 | the module is not available).
64 |
65 | """
66 | def __init__(self, adapted, dumps=None):
67 | self.adapted = adapted
68 |
69 | if dumps is not None:
70 | self._dumps = dumps
71 | elif json is not None:
72 | self._dumps = json.dumps
73 | else:
74 | self._dumps = None
75 |
76 | def __conform__(self, proto):
77 | if proto is ISQLQuote:
78 | return self
79 |
80 | def dumps(self, obj):
81 | """Serialize *obj* in JSON format.
82 |
83 | The default is to call `!json.dumps()` or the *dumps* function
84 | provided in the constructor. You can override this method to create a
85 | customized JSON wrapper.
86 | """
87 | dumps = self._dumps
88 | if dumps is not None:
89 | return dumps(obj)
90 | else:
91 | raise ImportError(
92 | "json module not available: "
93 | "you should provide a dumps function")
94 |
95 | def getquoted(self):
96 | s = self.dumps(self.adapted)
97 | return QuotedString(s).getquoted()
98 |
99 |
100 | def register_json(conn_or_curs=None, globally=False, loads=None,
101 | oid=None, array_oid=None, name='json'):
102 | """Create and register typecasters converting :sql:`json` type to Python objects.
103 |
104 | :param conn_or_curs: a connection or cursor used to find the :sql:`json`
105 | and :sql:`json[]` oids; the typecasters are registered in a scope
106 | limited to this object, unless *globally* is set to `!True`. It can be
107 | `!None` if the oids are provided
108 | :param globally: if `!False` register the typecasters only on
109 | *conn_or_curs*, otherwise register them globally
110 | :param loads: the function used to parse the data into a Python object. If
111 | `!None` use `!json.loads()`, where `!json` is the module chosen
112 | according to the Python version (see above)
113 | :param oid: the OID of the :sql:`json` type if known; If not, it will be
114 | queried on *conn_or_curs*
115 | :param array_oid: the OID of the :sql:`json[]` array type if known;
116 | if not, it will be queried on *conn_or_curs*
117 | :param name: the name of the data type to look for in *conn_or_curs*
118 |
119 | The connection or cursor passed to the function will be used to query the
120 | database and look for the OID of the :sql:`json` type (or an alternative
121 | type if *name* if provided). No query is performed if *oid* and *array_oid*
122 | are provided. Raise `~psycopg2.ProgrammingError` if the type is not found.
123 |
124 | """
125 | if oid is None:
126 | oid, array_oid = _get_json_oids(conn_or_curs, name)
127 |
128 | JSON, JSONARRAY = _create_json_typecasters(
129 | oid, array_oid, loads=loads, name=name.upper())
130 |
131 | register_type(JSON, not globally and conn_or_curs or None)
132 |
133 | if JSONARRAY is not None:
134 | register_type(JSONARRAY, not globally and conn_or_curs or None)
135 |
136 | return JSON, JSONARRAY
137 |
138 | def register_default_json(conn_or_curs=None, globally=False, loads=None):
139 | """
140 | Create and register :sql:`json` typecasters for PostgreSQL 9.2 and following.
141 |
142 | Since PostgreSQL 9.2 :sql:`json` is a builtin type, hence its oid is known
143 | and fixed. This function allows specifying a customized *loads* function
144 | for the default :sql:`json` type without querying the database.
145 | All the parameters have the same meaning of `register_json()`.
146 | """
147 | return register_json(conn_or_curs=conn_or_curs, globally=globally,
148 | loads=loads, oid=JSON_OID, array_oid=JSONARRAY_OID)
149 |
150 | def register_default_jsonb(conn_or_curs=None, globally=False, loads=None):
151 | """
152 | Create and register :sql:`jsonb` typecasters for PostgreSQL 9.4 and following.
153 |
154 | As in `register_default_json()`, the function allows to register a
155 | customized *loads* function for the :sql:`jsonb` type at its known oid for
156 | PostgreSQL 9.4 and following versions. All the parameters have the same
157 | meaning of `register_json()`.
158 | """
159 | return register_json(conn_or_curs=conn_or_curs, globally=globally,
160 | loads=loads, oid=JSONB_OID, array_oid=JSONBARRAY_OID, name='jsonb')
161 |
162 | def _create_json_typecasters(oid, array_oid, loads=None, name='JSON'):
163 | """Create typecasters for json data type."""
164 | if loads is None:
165 | if json is None:
166 | raise ImportError("no json module available")
167 | else:
168 | loads = json.loads
169 |
170 | def typecast_json(s, cur):
171 | if s is None:
172 | return None
173 | return loads(s)
174 |
175 | JSON = new_type((oid, ), name, typecast_json)
176 | if array_oid is not None:
177 | JSONARRAY = new_array_type((array_oid, ), "%sARRAY" % name, JSON)
178 | else:
179 | JSONARRAY = None
180 |
181 | return JSON, JSONARRAY
182 |
183 | def _get_json_oids(conn_or_curs, name='json'):
184 | # lazy imports
185 | from psycopg2.extensions import STATUS_IN_TRANSACTION
186 | from psycopg2.extras import _solve_conn_curs
187 |
188 | conn, curs = _solve_conn_curs(conn_or_curs)
189 |
190 | # Store the transaction status of the connection to revert it after use
191 | conn_status = conn.status
192 |
193 | # column typarray not available before PG 8.3
194 | typarray = conn.server_version >= 80300 and "typarray" or "NULL"
195 |
196 | # get the oid for the hstore
197 | curs.execute(
198 | "SELECT t.oid, %s FROM pg_type t WHERE t.typname = %%s;"
199 | % typarray, (name,))
200 | r = curs.fetchone()
201 |
202 | # revert the status of the connection as before the command
203 | if (conn_status != STATUS_IN_TRANSACTION and not conn.autocommit):
204 | conn.rollback()
205 |
206 | if not r:
207 | raise conn.ProgrammingError("%s data type not found" % name)
208 |
209 | return r
210 |
211 |
212 |
213 |
--------------------------------------------------------------------------------
/psycopg2/pool.py:
--------------------------------------------------------------------------------
1 | """Connection pooling for psycopg2
2 |
3 | This module implements thread-safe (and not) connection pools.
4 | """
5 | # psycopg/pool.py - pooling code for psycopg
6 | #
7 | # Copyright (C) 2003-2010 Federico Di Gregorio
8 | #
9 | # psycopg2 is free software: you can redistribute it and/or modify it
10 | # under the terms of the GNU Lesser General Public License as published
11 | # by the Free Software Foundation, either version 3 of the License, or
12 | # (at your option) any later version.
13 | #
14 | # In addition, as a special exception, the copyright holders give
15 | # permission to link this program with the OpenSSL library (or with
16 | # modified versions of OpenSSL that use the same license as OpenSSL),
17 | # and distribute linked combinations including the two.
18 | #
19 | # You must obey the GNU Lesser General Public License in all respects for
20 | # all of the code used other than OpenSSL.
21 | #
22 | # psycopg2 is distributed in the hope that it will be useful, but WITHOUT
23 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
24 | # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
25 | # License for more details.
26 |
27 | import psycopg2
28 | import psycopg2.extensions as _ext
29 |
30 |
31 | class PoolError(psycopg2.Error):
32 | pass
33 |
34 |
35 | class AbstractConnectionPool(object):
36 | """Generic key-based pooling code."""
37 |
38 | def __init__(self, minconn, maxconn, *args, **kwargs):
39 | """Initialize the connection pool.
40 |
41 | New 'minconn' connections are created immediately calling 'connfunc'
42 | with given parameters. The connection pool will support a maximum of
43 | about 'maxconn' connections.
44 | """
45 | self.minconn = int(minconn)
46 | self.maxconn = int(maxconn)
47 | self.closed = False
48 |
49 | self._args = args
50 | self._kwargs = kwargs
51 |
52 | self._pool = []
53 | self._used = {}
54 | self._rused = {} # id(conn) -> key map
55 | self._keys = 0
56 |
57 | for i in range(self.minconn):
58 | self._connect()
59 |
60 | def _connect(self, key=None):
61 | """Create a new connection and assign it to 'key' if not None."""
62 | conn = psycopg2.connect(*self._args, **self._kwargs)
63 | if key is not None:
64 | self._used[key] = conn
65 | self._rused[id(conn)] = key
66 | else:
67 | self._pool.append(conn)
68 | return conn
69 |
70 | def _getkey(self):
71 | """Return a new unique key."""
72 | self._keys += 1
73 | return self._keys
74 |
75 | def _getconn(self, key=None):
76 | """Get a free connection and assign it to 'key' if not None."""
77 | if self.closed: raise PoolError("connection pool is closed")
78 | if key is None: key = self._getkey()
79 |
80 | if key in self._used:
81 | return self._used[key]
82 |
83 | if self._pool:
84 | self._used[key] = conn = self._pool.pop()
85 | self._rused[id(conn)] = key
86 | return conn
87 | else:
88 | if len(self._used) == self.maxconn:
89 | raise PoolError("connection pool exausted")
90 | return self._connect(key)
91 |
92 | def _putconn(self, conn, key=None, close=False):
93 | """Put away a connection."""
94 | if self.closed: raise PoolError("connection pool is closed")
95 | if key is None: key = self._rused.get(id(conn))
96 |
97 | if not key:
98 | raise PoolError("trying to put unkeyed connection")
99 |
100 | if len(self._pool) < self.minconn and not close:
101 | # Return the connection into a consistent state before putting
102 | # it back into the pool
103 | if not conn.closed:
104 | status = conn.get_transaction_status()
105 | if status == _ext.TRANSACTION_STATUS_UNKNOWN:
106 | # server connection lost
107 | conn.close()
108 | elif status != _ext.TRANSACTION_STATUS_IDLE:
109 | # connection in error or in transaction
110 | conn.rollback()
111 | self._pool.append(conn)
112 | else:
113 | # regular idle connection
114 | self._pool.append(conn)
115 | # If the connection is closed, we just discard it.
116 | else:
117 | conn.close()
118 |
119 | # here we check for the presence of key because it can happen that a
120 | # thread tries to put back a connection after a call to close
121 | if not self.closed or key in self._used:
122 | del self._used[key]
123 | del self._rused[id(conn)]
124 |
125 | def _closeall(self):
126 | """Close all connections.
127 |
128 | Note that this can lead to some code fail badly when trying to use
129 | an already closed connection. If you call .closeall() make sure
130 | your code can deal with it.
131 | """
132 | if self.closed: raise PoolError("connection pool is closed")
133 | for conn in self._pool + list(self._used.values()):
134 | try:
135 | conn.close()
136 | except:
137 | pass
138 | self.closed = True
139 |
140 |
141 | class SimpleConnectionPool(AbstractConnectionPool):
142 | """A connection pool that can't be shared across different threads."""
143 |
144 | getconn = AbstractConnectionPool._getconn
145 | putconn = AbstractConnectionPool._putconn
146 | closeall = AbstractConnectionPool._closeall
147 |
148 |
149 | class ThreadedConnectionPool(AbstractConnectionPool):
150 | """A connection pool that works with the threading module."""
151 |
152 | def __init__(self, minconn, maxconn, *args, **kwargs):
153 | """Initialize the threading lock."""
154 | import threading
155 | AbstractConnectionPool.__init__(
156 | self, minconn, maxconn, *args, **kwargs)
157 | self._lock = threading.Lock()
158 |
159 | def getconn(self, key=None):
160 | """Get a free connection and assign it to 'key' if not None."""
161 | self._lock.acquire()
162 | try:
163 | return self._getconn(key)
164 | finally:
165 | self._lock.release()
166 |
167 | def putconn(self, conn=None, key=None, close=False):
168 | """Put away an unused connection."""
169 | self._lock.acquire()
170 | try:
171 | self._putconn(conn, key, close)
172 | finally:
173 | self._lock.release()
174 |
175 | def closeall(self):
176 | """Close all connections (even the one currently in use.)"""
177 | self._lock.acquire()
178 | try:
179 | self._closeall()
180 | finally:
181 | self._lock.release()
182 |
183 |
184 | class PersistentConnectionPool(AbstractConnectionPool):
185 | """A pool that assigns persistent connections to different threads.
186 |
187 | Note that this connection pool generates by itself the required keys
188 | using the current thread id. This means that until a thread puts away
189 | a connection it will always get the same connection object by successive
190 | `!getconn()` calls. This also means that a thread can't use more than one
191 | single connection from the pool.
192 | """
193 |
194 | def __init__(self, minconn, maxconn, *args, **kwargs):
195 | """Initialize the threading lock."""
196 | import warnings
197 | warnings.warn("deprecated: use ZPsycopgDA.pool implementation",
198 | DeprecationWarning)
199 |
200 | import threading
201 | AbstractConnectionPool.__init__(
202 | self, minconn, maxconn, *args, **kwargs)
203 | self._lock = threading.Lock()
204 |
205 | # we we'll need the thread module, to determine thread ids, so we
206 | # import it here and copy it in an instance variable
207 | import thread
208 | self.__thread = thread
209 |
210 | def getconn(self):
211 | """Generate thread id and return a connection."""
212 | key = self.__thread.get_ident()
213 | self._lock.acquire()
214 | try:
215 | return self._getconn(key)
216 | finally:
217 | self._lock.release()
218 |
219 | def putconn(self, conn=None, close=False):
220 | """Put away an unused connection."""
221 | key = self.__thread.get_ident()
222 | self._lock.acquire()
223 | try:
224 | if not conn: conn = self._used[key]
225 | self._putconn(conn, key, close)
226 | finally:
227 | self._lock.release()
228 |
229 | def closeall(self):
230 | """Close all connections (even the one currently in use.)"""
231 | self._lock.acquire()
232 | try:
233 | self._closeall()
234 | finally:
235 | self._lock.release()
236 |
--------------------------------------------------------------------------------
/psycopg2/tests/test_transaction.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | # test_transaction - unit test on transaction behaviour
4 | #
5 | # Copyright (C) 2007-2011 Federico Di Gregorio
6 | #
7 | # psycopg2 is free software: you can redistribute it and/or modify it
8 | # under the terms of the GNU Lesser General Public License as published
9 | # by the Free Software Foundation, either version 3 of the License, or
10 | # (at your option) any later version.
11 | #
12 | # In addition, as a special exception, the copyright holders give
13 | # permission to link this program with the OpenSSL library (or with
14 | # modified versions of OpenSSL that use the same license as OpenSSL),
15 | # and distribute linked combinations including the two.
16 | #
17 | # You must obey the GNU Lesser General Public License in all respects for
18 | # all of the code used other than OpenSSL.
19 | #
20 | # psycopg2 is distributed in the hope that it will be useful, but WITHOUT
21 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
22 | # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
23 | # License for more details.
24 |
25 | import threading
26 | from testutils import unittest, ConnectingTestCase, skip_before_postgres
27 |
28 | import psycopg2
29 | from psycopg2.extensions import (
30 | ISOLATION_LEVEL_SERIALIZABLE, STATUS_BEGIN, STATUS_READY)
31 |
32 | class TransactionTests(ConnectingTestCase):
33 |
34 | def setUp(self):
35 | ConnectingTestCase.setUp(self)
36 | self.conn.set_isolation_level(ISOLATION_LEVEL_SERIALIZABLE)
37 | curs = self.conn.cursor()
38 | curs.execute('''
39 | CREATE TEMPORARY TABLE table1 (
40 | id int PRIMARY KEY
41 | )''')
42 | # The constraint is set to deferrable for the commit_failed test
43 | curs.execute('''
44 | CREATE TEMPORARY TABLE table2 (
45 | id int PRIMARY KEY,
46 | table1_id int,
47 | CONSTRAINT table2__table1_id__fk
48 | FOREIGN KEY (table1_id) REFERENCES table1(id) DEFERRABLE)''')
49 | curs.execute('INSERT INTO table1 VALUES (1)')
50 | curs.execute('INSERT INTO table2 VALUES (1, 1)')
51 | self.conn.commit()
52 |
53 | def test_rollback(self):
54 | # Test that rollback undoes changes
55 | curs = self.conn.cursor()
56 | curs.execute('INSERT INTO table2 VALUES (2, 1)')
57 | # Rollback takes us from BEGIN state to READY state
58 | self.assertEqual(self.conn.status, STATUS_BEGIN)
59 | self.conn.rollback()
60 | self.assertEqual(self.conn.status, STATUS_READY)
61 | curs.execute('SELECT id, table1_id FROM table2 WHERE id = 2')
62 | self.assertEqual(curs.fetchall(), [])
63 |
64 | def test_commit(self):
65 | # Test that commit stores changes
66 | curs = self.conn.cursor()
67 | curs.execute('INSERT INTO table2 VALUES (2, 1)')
68 | # Rollback takes us from BEGIN state to READY state
69 | self.assertEqual(self.conn.status, STATUS_BEGIN)
70 | self.conn.commit()
71 | self.assertEqual(self.conn.status, STATUS_READY)
72 | # Now rollback and show that the new record is still there:
73 | self.conn.rollback()
74 | curs.execute('SELECT id, table1_id FROM table2 WHERE id = 2')
75 | self.assertEqual(curs.fetchall(), [(2, 1)])
76 |
77 | def test_failed_commit(self):
78 | # Test that we can recover from a failed commit.
79 | # We use a deferred constraint to cause a failure on commit.
80 | curs = self.conn.cursor()
81 | curs.execute('SET CONSTRAINTS table2__table1_id__fk DEFERRED')
82 | curs.execute('INSERT INTO table2 VALUES (2, 42)')
83 | # The commit should fail, and move the cursor back to READY state
84 | self.assertEqual(self.conn.status, STATUS_BEGIN)
85 | self.assertRaises(psycopg2.IntegrityError, self.conn.commit)
86 | self.assertEqual(self.conn.status, STATUS_READY)
87 | # The connection should be ready to use for the next transaction:
88 | curs.execute('SELECT 1')
89 | self.assertEqual(curs.fetchone()[0], 1)
90 |
91 |
92 | class DeadlockSerializationTests(ConnectingTestCase):
93 | """Test deadlock and serialization failure errors."""
94 |
95 | def connect(self):
96 | conn = ConnectingTestCase.connect(self)
97 | conn.set_isolation_level(ISOLATION_LEVEL_SERIALIZABLE)
98 | return conn
99 |
100 | def setUp(self):
101 | ConnectingTestCase.setUp(self)
102 |
103 | curs = self.conn.cursor()
104 | # Drop table if it already exists
105 | try:
106 | curs.execute("DROP TABLE table1")
107 | self.conn.commit()
108 | except psycopg2.DatabaseError:
109 | self.conn.rollback()
110 | try:
111 | curs.execute("DROP TABLE table2")
112 | self.conn.commit()
113 | except psycopg2.DatabaseError:
114 | self.conn.rollback()
115 | # Create sample data
116 | curs.execute("""
117 | CREATE TABLE table1 (
118 | id int PRIMARY KEY,
119 | name text)
120 | """)
121 | curs.execute("INSERT INTO table1 VALUES (1, 'hello')")
122 | curs.execute("CREATE TABLE table2 (id int PRIMARY KEY)")
123 | self.conn.commit()
124 |
125 | def tearDown(self):
126 | curs = self.conn.cursor()
127 | curs.execute("DROP TABLE table1")
128 | curs.execute("DROP TABLE table2")
129 | self.conn.commit()
130 |
131 | ConnectingTestCase.tearDown(self)
132 |
133 | def test_deadlock(self):
134 | self.thread1_error = self.thread2_error = None
135 | step1 = threading.Event()
136 | step2 = threading.Event()
137 |
138 | def task1():
139 | try:
140 | conn = self.connect()
141 | curs = conn.cursor()
142 | curs.execute("LOCK table1 IN ACCESS EXCLUSIVE MODE")
143 | step1.set()
144 | step2.wait()
145 | curs.execute("LOCK table2 IN ACCESS EXCLUSIVE MODE")
146 | except psycopg2.DatabaseError, exc:
147 | self.thread1_error = exc
148 | step1.set()
149 | conn.close()
150 | def task2():
151 | try:
152 | conn = self.connect()
153 | curs = conn.cursor()
154 | step1.wait()
155 | curs.execute("LOCK table2 IN ACCESS EXCLUSIVE MODE")
156 | step2.set()
157 | curs.execute("LOCK table1 IN ACCESS EXCLUSIVE MODE")
158 | except psycopg2.DatabaseError, exc:
159 | self.thread2_error = exc
160 | step2.set()
161 | conn.close()
162 |
163 | # Run the threads in parallel. The "step1" and "step2" events
164 | # ensure that the two transactions overlap.
165 | thread1 = threading.Thread(target=task1)
166 | thread2 = threading.Thread(target=task2)
167 | thread1.start()
168 | thread2.start()
169 | thread1.join()
170 | thread2.join()
171 |
172 | # Exactly one of the threads should have failed with
173 | # TransactionRollbackError:
174 | self.assertFalse(self.thread1_error and self.thread2_error)
175 | error = self.thread1_error or self.thread2_error
176 | self.assertTrue(isinstance(
177 | error, psycopg2.extensions.TransactionRollbackError))
178 |
179 | def test_serialisation_failure(self):
180 | self.thread1_error = self.thread2_error = None
181 | step1 = threading.Event()
182 | step2 = threading.Event()
183 |
184 | def task1():
185 | try:
186 | conn = self.connect()
187 | curs = conn.cursor()
188 | curs.execute("SELECT name FROM table1 WHERE id = 1")
189 | curs.fetchall()
190 | step1.set()
191 | step2.wait()
192 | curs.execute("UPDATE table1 SET name='task1' WHERE id = 1")
193 | conn.commit()
194 | except psycopg2.DatabaseError, exc:
195 | self.thread1_error = exc
196 | step1.set()
197 | conn.close()
198 | def task2():
199 | try:
200 | conn = self.connect()
201 | curs = conn.cursor()
202 | step1.wait()
203 | curs.execute("UPDATE table1 SET name='task2' WHERE id = 1")
204 | conn.commit()
205 | except psycopg2.DatabaseError, exc:
206 | self.thread2_error = exc
207 | step2.set()
208 | conn.close()
209 |
210 | # Run the threads in parallel. The "step1" and "step2" events
211 | # ensure that the two transactions overlap.
212 | thread1 = threading.Thread(target=task1)
213 | thread2 = threading.Thread(target=task2)
214 | thread1.start()
215 | thread2.start()
216 | thread1.join()
217 | thread2.join()
218 |
219 | # Exactly one of the threads should have failed with
220 | # TransactionRollbackError:
221 | self.assertFalse(self.thread1_error and self.thread2_error)
222 | error = self.thread1_error or self.thread2_error
223 | self.assertTrue(isinstance(
224 | error, psycopg2.extensions.TransactionRollbackError))
225 |
226 |
227 | class QueryCancellationTests(ConnectingTestCase):
228 | """Tests for query cancellation."""
229 |
230 | def setUp(self):
231 | ConnectingTestCase.setUp(self)
232 | self.conn.set_isolation_level(ISOLATION_LEVEL_SERIALIZABLE)
233 |
234 | @skip_before_postgres(8, 2)
235 | def test_statement_timeout(self):
236 | curs = self.conn.cursor()
237 | # Set a low statement timeout, then sleep for a longer period.
238 | curs.execute('SET statement_timeout TO 10')
239 | self.assertRaises(psycopg2.extensions.QueryCanceledError,
240 | curs.execute, 'SELECT pg_sleep(50)')
241 |
242 |
243 | def test_suite():
244 | return unittest.TestLoader().loadTestsFromName(__name__)
245 |
246 | if __name__ == "__main__":
247 | unittest.main()
248 |
--------------------------------------------------------------------------------