├── test_pg13 ├── __init__.py ├── test_treepath.py ├── test_misc.py ├── test_scope.py ├── test_weval.py ├── test_threevl.py ├── test_sqla.py ├── test_dbapi2.py ├── test_sqex.py ├── test_sqparse2.py ├── test_pg.py ├── test_sqparse.py └── test_pgmock.py ├── .envrc ├── MANIFEST.in ├── pg13 ├── version.py ├── errors.py ├── __init__.py ├── misc.py ├── pool_psyco.py ├── treepath.py ├── threevl.py ├── sqla.py ├── weval.py ├── scope.py ├── table.py ├── pgmock.py ├── pgmock_dbapi2.py ├── pg.py ├── sqparse2.py └── sqex.py ├── .editorconfig ├── docs ├── pg13.pg.rst ├── pg13.misc.rst ├── pg13.sqex.rst ├── pg13.pgmock.rst ├── pg13.sqparse.rst ├── pg13.threevl.rst ├── design │ └── evaluation.md ├── pg13.rst ├── index.rst ├── Makefile ├── make.bat └── conf.py ├── tox.ini ├── .github └── workflows │ └── test.yml ├── .pylintrc ├── .gitignore ├── README.rst ├── LICENSE ├── setup.py └── README.md /test_pg13/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.envrc: -------------------------------------------------------------------------------- 1 | layout python3 2 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include tox.ini 2 | -------------------------------------------------------------------------------- /pg13/version.py: -------------------------------------------------------------------------------- 1 | "version" 2 | __version__ = '0.2.0' 3 | -------------------------------------------------------------------------------- /pg13/errors.py: -------------------------------------------------------------------------------- 1 | "errors" 2 | 3 | class PgPoolError(Exception): 4 | pass 5 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*.py] 4 | indent_style = space 5 | indent_size = 2 6 | -------------------------------------------------------------------------------- /docs/pg13.pg.rst: -------------------------------------------------------------------------------- 1 | pg13.pg module 2 | ============== 3 | 4 | .. automodule:: pg13.pg 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/pg13.misc.rst: -------------------------------------------------------------------------------- 1 | pg13.misc module 2 | ================ 3 | 4 | .. automodule:: pg13.misc 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/pg13.sqex.rst: -------------------------------------------------------------------------------- 1 | pg13.sqex module 2 | ================ 3 | 4 | .. automodule:: pg13.sqex 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/pg13.pgmock.rst: -------------------------------------------------------------------------------- 1 | pg13.pgmock module 2 | ================== 3 | 4 | .. automodule:: pg13.pgmock 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/pg13.sqparse.rst: -------------------------------------------------------------------------------- 1 | pg13.sqparse module 2 | =================== 3 | 4 | .. automodule:: pg13.sqparse 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/pg13.threevl.rst: -------------------------------------------------------------------------------- 1 | pg13.threevl module 2 | =================== 3 | 4 | .. automodule:: pg13.threevl 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /pg13/__init__.py: -------------------------------------------------------------------------------- 1 | "module" 2 | from . import misc, pg 3 | # don't import pgmock and stubredis -- they're only useful for test mode or nonstandard env (i.e. stubredis on windows) 4 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py27 3 | 4 | [pytest] 5 | norecursedirs = env build dist docs *.egg-info 6 | markers = 7 | travis_skip: test that break in CI 8 | 9 | [testenv] 10 | commands = py.test test_pg13 11 | 12 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: test 2 | on: [push] 3 | jobs: 4 | build: 5 | runs-on: ubuntu-latest 6 | steps: 7 | - uses: actions/checkout@v2 8 | - uses: actions/setup-python@v1 9 | with: 10 | python-version: '>=3.6' 11 | - name: deps 12 | run: | 13 | pip install -e .[psyco,sqla] 14 | pip install pytest pylint 15 | - name: test 16 | run: pytest 17 | - name: lint 18 | run: pylint pg13 19 | -------------------------------------------------------------------------------- /docs/design/evaluation.md: -------------------------------------------------------------------------------- 1 | ## evaluation order 2 | 3 | design doc for SQL statement evaluation 4 | 5 | ### precedence model for doing where-filtering 6 | These actions apply to select, update, delete: 7 | 1. CTEs 8 | 1. nested select 9 | - can this bind variables from its parent scope? check specs, but easier not to 10 | 1. single-table where 11 | 1. multi-table where 12 | 13 | When the expression is a select, we're also interested in: 14 | 1. order & group 15 | 2. create output rows 16 | -------------------------------------------------------------------------------- /docs/pg13.rst: -------------------------------------------------------------------------------- 1 | pg13 package 2 | ============ 3 | 4 | Submodules 5 | ---------- 6 | 7 | .. toctree:: 8 | 9 | pg13.cdiff 10 | pg13.diff 11 | pg13.misc 12 | pg13.pg 13 | pg13.pgmock 14 | pg13.redismodel 15 | pg13.sqex 16 | pg13.sqparse 17 | pg13.stubredis 18 | pg13.syncmessage 19 | pg13.syncschema 20 | pg13.threevl 21 | 22 | Module contents 23 | --------------- 24 | 25 | .. automodule:: pg13 26 | :members: 27 | :undoc-members: 28 | :show-inheritance: 29 | -------------------------------------------------------------------------------- /.pylintrc: -------------------------------------------------------------------------------- 1 | [MESSAGES CONTROL] 2 | 3 | disable= 4 | line-too-long, 5 | multiple-imports, 6 | fixme, 7 | missing-function-docstring, 8 | too-many-arguments, 9 | # multiple-statements, 10 | # too-many-locals, 11 | # trailing-comma-tuple, 12 | no-else-return, 13 | missing-class-docstring, 14 | no-else-raise, 15 | too-few-public-methods, 16 | too-many-public-methods, 17 | # global-statement, 18 | # too-many-instance-attributes, 19 | 20 | [FORMAT] 21 | 22 | indent-string = " " 23 | indent-after-paren=2 24 | -------------------------------------------------------------------------------- /test_pg13/test_treepath.py: -------------------------------------------------------------------------------- 1 | from pg13 import treepath 2 | 3 | class PT(treepath.PathTree): 4 | def __init__(self, a, b): self.a, self.b = a, b 5 | 6 | def test_get(): 7 | pt = PT(1, [PT(2, 3), PT(4, 5)]) 8 | assert pt[('b',0),'a'] == 2 9 | assert pt[('b',0),'b'] == 3 10 | assert pt[('b',1),'a'] == 4 11 | assert pt[('b',1),'b'] == 5 12 | assert pt['a',] == 1 13 | 14 | def test_set(): 15 | pt = PT(1, [PT(2, 3), PT(4, 5)]) 16 | pt['a',] = 6 17 | pt[('b',1),] = 7 18 | pt[('b',0),'a'] = 8 19 | assert pt[('b',0),'a'] == 8 20 | assert pt[('b',1),] == 7 21 | assert pt[('a',)] == 6 22 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. pg13 documentation master file, created by 2 | sphinx-quickstart on Wed Jan 14 17:55:29 2015. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | pg13 -- sql orm with built-in mocking 7 | ===================================== 8 | 9 | To get started with pg13, check out the examples on our github page (https://github.com/abe-winter/pg13-py). 10 | 11 | pg13 is still alpha software, and these docs are pretty incomplete. A good starting point is :doc:`pg13.pg`, where you should read about: 12 | 13 | * the Row class (your models should inherit from it) 14 | * how to construct a PgPool that connects to your database 15 | 16 | Contents: 17 | 18 | .. toctree:: 19 | :maxdepth: 4 20 | 21 | pg13 22 | 23 | 24 | Indices and tables 25 | ================== 26 | 27 | * :ref:`genindex` 28 | * :ref:`modindex` 29 | * :ref:`search` 30 | 31 | -------------------------------------------------------------------------------- /test_pg13/test_misc.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pg13 import misc 3 | 4 | @pytest.mark.xfail 5 | def test_getterby(): raise NotImplementedError 6 | 7 | def test_methonce(): 8 | class C: 9 | @misc.meth_once 10 | def once(self): return 5 11 | c = C() 12 | assert c.once() == 5 13 | with pytest.raises(misc.CallOnceError): c.once() 14 | 15 | def test_trace(): 16 | def f2(): raise ValueError('whatever') 17 | def f1(): f2() 18 | trace = None 19 | try: f1() 20 | except: trace = misc.trace() 21 | assert trace is not None 22 | assert all(x.split(':')[0]=='test_misc.py' for x in trace) 23 | assert [x.split(':')[1] for x in trace]==['test_trace','f1','f2'] 24 | 25 | def test_multimap(): 26 | # note: this is assuming stable sort (in assuming the values will be sorted); whatever, it's passing 27 | assert misc.multimap([(1,1),(1,2),(2,2),(3,3),(1,3)]) == { 28 | 1: [1,2,3], 29 | 2: [2], 30 | 3: [3], 31 | } 32 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | lib/ 17 | lib64/ 18 | parts/ 19 | sdist/ 20 | var/ 21 | *.egg-info/ 22 | .installed.cfg 23 | *.egg 24 | 25 | # PyInstaller 26 | # Usually these files are written by a python script from a template 27 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 28 | *.manifest 29 | *.spec 30 | 31 | # Installer logs 32 | pip-log.txt 33 | pip-delete-this-directory.txt 34 | 35 | # Unit test / coverage reports 36 | htmlcov/ 37 | .tox/ 38 | .coverage 39 | .cache 40 | nosetests.xml 41 | coverage.xml 42 | 43 | # Translations 44 | *.mo 45 | *.pot 46 | 47 | # Django stuff: 48 | *.log 49 | 50 | # Sphinx documentation 51 | docs/_build/ 52 | 53 | # PyBuilder 54 | target/ 55 | 56 | .direnv/ 57 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | pg13 3 | ============ 4 | 5 | .. image:: https://travis-ci.org/abe-winter/pg13-py.svg?branch=master 6 | 7 | **install** with `pip install pg13` 8 | 9 | **docs** at http://pg13.readthedocs.org/en/latest/ 10 | 11 | pg13 is a SQL evaluator for python designed for testing. Normally when you want to test an application with database dependencies, you have three dangerous options: 12 | 13 | 1. **artisanal mocking**: standard mocking frameworks make you specify the output of every DB call 14 | 2. **local db**: have a running copy of the database 15 | 3. **everything but**: test everything but the DB interaction 16 | 17 | pg13 takes a different approach: 18 | 19 | * SQL is simulated in python 20 | * every test can create and populate its own lightweight database 21 | * tests are completely deterministic 22 | * parallelization is safe (because parallel tests have no chance of touching the same data) 23 | * performance: 200+ tests per second on my laptop 24 | 25 | See the github readme for examples. https://github.com/abe-winter/pg13-py 26 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2015 Abe Winter 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /test_pg13/test_scope.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pg13 import scope, sqparse2 3 | from .test_pgmock import prep 4 | 5 | EXP = sqparse2.parse("select * from t1, (select a as alias from t2 where userid=1) as t_sub") 6 | 7 | def test_scope_from_fromx(): 8 | tables, run = prep('create table t1 (a int, b text)') 9 | run('create table t2 (a int, b text)') 10 | scope.Scope.from_fromx(tables, EXP.tables) 11 | with pytest.raises(scope.ScopeCollisionError): 12 | scope.Scope.from_fromx(tables, sqparse2.parse('select * from t1 as t2, t2').tables) 13 | 14 | def test_scope_resolve_column(): 15 | from pg13.sqparse2 import NameX, AttrX, AsterX 16 | tables, run = prep('create table t1 (a int, b text)') 17 | run('create table t2 (a int, b text)') 18 | scope_ = scope.Scope.from_fromx(tables, EXP.tables) 19 | assert scope_.resolve_column(NameX('a')) == ('t1', 'a') 20 | assert scope_.resolve_column(AttrX(NameX('t1'), NameX('a'))) == ('t1', 'a') 21 | assert scope_.resolve_column(NameX('alias')) == ('t_sub', 'alias') 22 | assert scope_.resolve_column(AttrX(NameX('t_sub'), NameX('alias'))) == ('t_sub', 'alias') 23 | 24 | @pytest.mark.xfail 25 | def test_scope_resolve_column_asterx(): 26 | raise NotImplementedError('handle AsterX') 27 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import ast,os,setuptools 2 | from pg13 import version 3 | 4 | def get_version(fname): 5 | "grab __version__ variable from fname (assuming fname is a python file). parses without importing." 6 | assign_stmts = [s for s in ast.parse(open(fname).read()).body if isinstance(s,ast.Assign)] 7 | valid_targets = [s for s in assign_stmts if len(s.targets) == 1 and s.targets[0].id == '__version__'] 8 | return valid_targets[-1].value.s # fail if valid_targets empty 9 | 10 | setuptools.setup( 11 | name='pg13', 12 | version=version.__version__, 13 | description='sql evaluator for mocking', 14 | classifiers=[ 15 | 'Development Status :: 4 - Beta', 16 | 'License :: OSI Approved :: MIT License', 17 | 'Programming Language :: Python :: 3.6', 18 | 'Programming Language :: Python :: 3 :: Only', 19 | 'Topic :: Database', 20 | 'Topic :: Software Development :: Interpreters', 21 | 'Topic :: Software Development :: Testing', 22 | ], 23 | keywords=['sql','mocking','orm','database','testing'], 24 | author='Abe Winter', 25 | author_email='awinter.public+pg13@gmail.com', 26 | url='https://github.com/abe-winter/pg13-py', 27 | license='MIT', 28 | packages=setuptools.find_packages(), 29 | install_requires=['ply==3.11'], 30 | extras_require={ 31 | 'psyco':['psycopg2-binary==2.8.4'], 32 | 'sqla':['sqlalchemy==1.3.13'], 33 | }, 34 | ) 35 | -------------------------------------------------------------------------------- /pg13/misc.py: -------------------------------------------------------------------------------- 1 | "misc whatever" 2 | 3 | import time, os, collections, sys, functools, itertools 4 | 5 | def utcnow(): 6 | return int(time.time()) 7 | 8 | def tbframes(traceback): 9 | 'unwind traceback tb_next structure to array' 10 | frames = [traceback.tb_frame] 11 | while traceback.tb_next: 12 | traceback = traceback.tb_next 13 | frames.append(traceback.tb_frame) 14 | return frames 15 | def tbfuncs(frames): 16 | 'this takes the frames array returned by tbframes' 17 | return ['%s:%s:%s' % (os.path.split(f.f_code.co_filename)[-1], f.f_code.co_name, f.f_lineno) for f in frames] 18 | def trace(): 19 | return tbfuncs(tbframes(sys.exc_info()[2])) 20 | 21 | def key_from_pair(pair): 22 | "helper for multimap" 23 | return pair[0] 24 | 25 | def multimap(kv_pairs): 26 | # note: sort is on just key, not k + v, because sorting on both would require sortable value type 27 | return { 28 | key: [v for _, v in pairs] 29 | for key, pairs in itertools.groupby(sorted(kv_pairs, key=key_from_pair), key_from_pair) 30 | } 31 | 32 | # warning: EnvBundle is too specific to the applications that spawned pg13. ok to release to OSS, but make a more general way to pass stuff around. 33 | EnvBundle = collections.namedtuple('EnvBundle', 'pool redis sesh userid chanid apicon') # environment bundle; convenient capsule for passing this stuff around 34 | 35 | class CallOnceError(Exception): 36 | pass 37 | 38 | def meth_once(func): 39 | "call once for member function (i.e. takes self as first arg)" 40 | attr = '__meth_once_' + func.__name__ 41 | @functools.wraps(func) 42 | def wrapper(self, *args, **kwargs): 43 | if hasattr(self, attr): 44 | raise CallOnceError(func.__name__) 45 | setattr(self, attr, True) 46 | return func(self, *args, **kwargs) 47 | return wrapper 48 | -------------------------------------------------------------------------------- /test_pg13/test_weval.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pg13 import weval, sqparse2, scope, treepath 3 | from .test_pgmock import prep 4 | 5 | EXP = sqparse2.parse("select * from t1, (select a as alias from t2 where userid=1) as t_sub where a = alias and t1.a = 0") 6 | 7 | def test_flatten_tree(): 8 | # todo: move to test_treepath 9 | from pg13.sqparse2 import BinX, NameX, OpX, Literal 10 | exp = sqparse2.parse('select * from t where a and b = 1 and d').where 11 | def test(exp): 12 | return isinstance(exp, sqparse2.BinX) and exp.op.op == 'and' 13 | def enumerator(exp): 14 | return [exp.left, exp.right] 15 | assert treepath.flatten_tree(test, enumerator, exp) == [ 16 | NameX('a'), 17 | BinX(OpX('='), NameX('b'), Literal(1)), 18 | NameX('d') 19 | ] 20 | 21 | def test_names_from_exp(): 22 | from pg13.sqparse2 import AttrX, NameX 23 | exp = sqparse2.parse('select * from t where a and b = t.c and d').where 24 | assert weval.names_from_exp(exp) == [ 25 | NameX('a'), 26 | NameX('b'), 27 | AttrX(NameX('t'), NameX('c')), 28 | NameX('d') 29 | ] 30 | 31 | def test_classify_wherex(): 32 | # def classify_wherex(scope_, fromx, wherex): 33 | tables, run = prep('create table t1 (a int, b text)') 34 | run('create table t2 (a int, b text)') 35 | scope_ = scope.Scope.from_fromx(tables, EXP.tables) 36 | print(weval.classify_wherex(scope_, EXP.tables, EXP.where)) 37 | (_, single), (cart,) = weval.classify_wherex(scope_, EXP.tables, EXP.where) 38 | assert isinstance(single, weval.SingleTableCond) and single.table == 't1' and isinstance(single.exp, sqparse2.BinX) 39 | assert isinstance(cart, weval.CartesianCond) and isinstance(cart.exp, sqparse2.BinX) 40 | 41 | @pytest.mark.xfail 42 | def test_wherex_to_rowlist(): 43 | tables, run = prep('create table t1 (a int, b text)') 44 | exp = sqparse2.parse('select * from t1') 45 | print(weval.wherex_to_rowlist( 46 | scope.Scope.from_fromx(tables, exp.tables), 47 | exp.tables, 48 | exp.where 49 | )) 50 | raise NotImplementedError 51 | -------------------------------------------------------------------------------- /test_pg13/test_threevl.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | def test_3vl_basics(): 4 | from pg13.threevl import ThreeVL 5 | assert bool(ThreeVL('t')) 6 | assert not bool(ThreeVL('f')) 7 | # with pytest.raises(ValueError): bool(ThreeVL('u')) # I think this needs to be false at the top level 8 | with pytest.raises(ValueError): ThreeVL('bad value') 9 | assert ThreeVL('f')!=ThreeVL('t') 10 | assert ThreeVL('t')==ThreeVL('t') 11 | assert [True,False,False]==[ThreeVL.test(ThreeVL(c)) for c in 'tfu'] 12 | assert list(map(ThreeVL,'ftu'))==[ThreeVL.nein(ThreeVL(c)) for c in 'tfu'] 13 | assert list(map(ThreeVL,'ft'))==list(map(ThreeVL.nein,(True,False))) 14 | 15 | def test_3vl_andor(): 16 | from pg13.threevl import ThreeVL 17 | # test mixing bools and 3VLs 18 | assert ThreeVL.andor('and',True,ThreeVL('t')) 19 | assert not ThreeVL.andor('and',False,ThreeVL('t')) 20 | assert ThreeVL.andor('or',False,ThreeVL('t')) 21 | # now the truth table 22 | # https://en.wikipedia.org/wiki/Three-valued_logic#Kleene_logic 23 | TABLE=[ 24 | ('and','tt','t'), 25 | ('and','tu','u'), 26 | ('and','tf','f'), 27 | ('and','uu','u'), 28 | ('and','uf','f'), 29 | ('and','ff','f'), 30 | ('or','tt','t'), 31 | ('or','tu','t'), 32 | ('or','tf','t'), 33 | ('or','uu','u'), 34 | ('or','uf','f'), 35 | ('or','ff','f'), 36 | ] 37 | for op,(a,b),res in TABLE: 38 | assert ThreeVL(res)==ThreeVL.andor(op,ThreeVL(a),ThreeVL(b)), (op,(a,b),res) 39 | 40 | @pytest.mark.xfail 41 | def test_3vl_implication(): 42 | raise NotImplementedError("figure out what SQL/postgres supports (and what implication means)") 43 | 44 | def test_3vl_compare(): 45 | from pg13.threevl import ThreeVL 46 | COMPS=[ 47 | [False,('>',1,2)], 48 | [True,('>',1,0)], 49 | [True,('<',1,2)], 50 | [False,('<',1,0)], 51 | [False,('<',1,1)], 52 | [False,('!=',1,1)], 53 | [True,('!=',1,0)], 54 | [False,('=',1,0)], 55 | [True,('=',1,1)], 56 | [True,('!=',1,'a')], 57 | [False,('=',1,'a')], 58 | [ThreeVL('u'),('>',1,None)], 59 | [ThreeVL('u'),('!=',1,None)], 60 | [ThreeVL('u'),('!=',None,None)], 61 | ] 62 | for result,args in COMPS: 63 | assert result==ThreeVL.compare(*args),(result,args) 64 | -------------------------------------------------------------------------------- /test_pg13/test_sqla.py: -------------------------------------------------------------------------------- 1 | "test pg13 sqlalchemy dialect" 2 | 3 | import pytest 4 | try: 5 | import sqlalchemy, sqlalchemy.ext.declarative, sqlalchemy.orm.session 6 | import pg13.sqla # does this matter for registry.register? 7 | from pg13 import pgmock_dbapi2 8 | except ImportError: pass # survivable because sqla is legitimately missing on travis-ci or when -m "not travis_skip" 9 | 10 | # travis should skip this because the sqlalchemy install is expensive 11 | pytestmark = pytest.mark.travis_skip 12 | 13 | @pytest.fixture(scope='session') 14 | def sqla_dialect(): 15 | sqlalchemy.dialects.registry.register("pg13", "pg13.sqla", "PG13DBAPI2Dialect") 16 | sqlalchemy.dialects.registry.register("pg13+dbapi2", "pg13.sqla", "PG13DBAPI2Dialect") 17 | 18 | @pytest.fixture(scope='function') 19 | def engine(sqla_dialect): 20 | db_id = pgmock_dbapi2.add_db() # todo: create database instead of this 21 | engine = sqlalchemy.create_engine("pg13://%i" % db_id) 22 | engine.pgmock_db_id = db_id 23 | return engine 24 | 25 | @pytest.fixture(scope='function') 26 | def Session(engine): 27 | return sqlalchemy.orm.session.sessionmaker(bind=engine) 28 | 29 | @pytest.fixture(scope='function') 30 | def Base(): 31 | return sqlalchemy.ext.declarative.declarative_base() 32 | 33 | @pytest.fixture(scope='function') 34 | def TestTable(Base): 35 | class TestTable(Base): 36 | __tablename__ = 't1' 37 | id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True) 38 | name = sqlalchemy.Column(sqlalchemy.Text) 39 | return TestTable 40 | 41 | def test_connect(engine): 42 | engine.connect() 43 | 44 | def test_create_table(engine, Base, TestTable): 45 | Base.metadata.create_all(engine) 46 | con = pgmock_dbapi2.connect(engine.pgmock_db_id) 47 | assert TestTable.__tablename__ in con.db 48 | 49 | def test_insert(engine, Base, Session, TestTable): 50 | Base.metadata.create_all(engine) 51 | session = Session() 52 | session.add(TestTable(id=0,name='hello')) 53 | session.add(TestTable(id=1, name='goodbye')) 54 | session.commit() 55 | con = pgmock_dbapi2.connect(engine.pgmock_db_id) 56 | assert con.db['t1'].rows == [[0,'hello'],[1,'goodbye']] 57 | 58 | def test_select(engine, Base, Session, TestTable): 59 | Base.metadata.create_all(engine) 60 | session = Session() 61 | session.add(TestTable(id=0,name='hello')) 62 | session.add(TestTable(id=1, name='goodbye')) 63 | assert ['hello','goodbye'] == [x.name for x in session.query(TestTable)] 64 | session.close() # this prevents 'rollback' printout in py.test output 65 | -------------------------------------------------------------------------------- /pg13/pool_psyco.py: -------------------------------------------------------------------------------- 1 | """pool for psycopg2 backend. 2 | this *doesn't* get imported by default because we don't want to have to install a zillion backends (most users only care about one). 3 | """ 4 | 5 | import contextlib 6 | import psycopg2.pool, psycopg2 7 | from . import pg, errors 8 | 9 | class PgCursorPsyco(pg.Cursor): 10 | "this is *only* necessary for error-wrapping" 11 | JSON_WRITE = True 12 | JSON_READ = False 13 | def __init__(self, psyco_cursor): 14 | self.cursor = psyco_cursor 15 | def execute(self, qstring, vals=()): 16 | try: 17 | return self.cursor.execute(qstring, vals) 18 | except psycopg2.IntegrityError as err: 19 | raise errors.PgPoolError(err) 20 | def __iter__(self): 21 | return iter(self.cursor) 22 | def fetchone(self): 23 | return self.cursor.fetchone() 24 | 25 | class PgPoolPsyco(pg.Pool): 26 | "see pg.PgPool class for tutorial" 27 | # JSON_WRITE/JSON_READ are used to configure the psycopg2 JSONB behavior; it requires json when storing, converts to python types when loading. 28 | JSON_WRITE = True 29 | JSON_READ = False 30 | def __init__(self, dbargs): 31 | # pylint: disable=super-init-not-called 32 | # http://stackoverflow.com/questions/12650048/how-can-i-pool-connections-using-psycopg-and-gevent 33 | self.pool = psycopg2.pool.ThreadedConnectionPool(5, 10, dbargs) # I think that this is safe combined with psycogreen patching 34 | def select(self, qstring, vals=()): 35 | with self.withcur() as cur: 36 | cur.execute(qstring, vals) 37 | for row in cur: 38 | # yield stmt has to be in same function as with block to hijack it. todo: experiment and figure out what that meant. 39 | yield row 40 | def commit(self, qstring, vals=()): 41 | with self.withcur() as cur: 42 | return cur.execute(qstring, vals) 43 | def commitreturn(self, qstring, vals=()): 44 | "commit and return result. This is intended for sql UPDATE ... RETURNING" 45 | with self.withcur() as cur: 46 | cur.execute(qstring, vals) 47 | return cur.fetchone() 48 | def close(self): 49 | self.pool.closeall() 50 | @contextlib.contextmanager 51 | def __call__(self): 52 | con = self.pool.getconn() 53 | # pylint: disable=try-except-raise 54 | try: 55 | yield con 56 | except: 57 | raise 58 | else: 59 | con.commit() 60 | finally: 61 | self.pool.putconn(con) 62 | @contextlib.contextmanager 63 | def withcur(self): 64 | with self() as con, con.cursor() as cur: 65 | yield PgCursorPsyco(cur) 66 | -------------------------------------------------------------------------------- /test_pg13/test_dbapi2.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pg13 import pgmock_dbapi2, sqparse2 3 | 4 | def test_connection(): 5 | with pgmock_dbapi2.connect() as a, a.cursor() as acur: 6 | acur.execute('create table t1 (a int)') 7 | acur.execute('insert into t1 values (1)') 8 | acur.execute('insert into t1 values (3)') 9 | 10 | # test second connction into same DB 11 | with pgmock_dbapi2.connect(a.db_id) as b, b.cursor() as bcur: 12 | bcur.execute('select * from t1') 13 | assert bcur.fetchall() == [[1],[3]] 14 | 15 | # test that new connection *doesn't* share DB 16 | with pgmock_dbapi2.connect() as c, c.cursor() as ccur: 17 | with pytest.raises(KeyError): 18 | ccur.execute('select * from t1') 19 | 20 | def test_auto_rollback(): 21 | with pytest.raises(sqparse2.SQLSyntaxError): 22 | with pgmock_dbapi2.connect() as db, db.cursor() as cur: 23 | cur.execute('create table t1 (a int)') 24 | cur.execute('insert into t1 values (1)') 25 | cur.execute("this one won't parse") 26 | assert 't1' not in db.db 27 | 28 | def test_fetchone(): 29 | with pgmock_dbapi2.connect() as db, db.cursor() as cur: 30 | cur.execute('create table t1 (a int, b int)') 31 | db.db['t1'].rows = [[1,2],[2,3],[3,4]] 32 | cur.execute('select * from t1') 33 | assert cur.fetchone() == [1,2] 34 | assert cur.fetchone() == [2,3] 35 | 36 | def test_exmany(): 37 | "this is also testing subbed literals, I think" 38 | vals = [[1,2],[3,4],[5,6]] 39 | with pgmock_dbapi2.connect() as db, db.cursor() as cur: 40 | cur.execute('create table t1 (a int, b int)') 41 | cur.executemany('insert into t1 (a, b) values (%s, %s)', list(map(tuple, vals))) 42 | assert db.db['t1'].rows == vals 43 | 44 | def test_iter(): 45 | with pgmock_dbapi2.connect() as db, db.cursor() as cur: 46 | cur.execute('create table t1 (a int, b int)') 47 | db.db['t1'].rows = [[1,2],[3,4],[5,6],[7,8]] 48 | # first, test whole iteration 49 | cur.execute('select * from t1') 50 | assert list(cur) == db.db['t1'].rows 51 | # now test iteration from middle 52 | cur.execute('select * from t1') 53 | assert cur.fetchone() == [1,2] 54 | assert list(cur) == db.db['t1'].rows[1:] 55 | 56 | @pytest.mark.xfail 57 | def test_count_after_fetch(): 58 | # todo: look at spec; what's supposed to happen here 59 | raise NotImplementedError 60 | 61 | @pytest.mark.xfail 62 | def test_cursor_description_select(): 63 | raise NotImplementedError 64 | 65 | @pytest.mark.xfail 66 | def test_cursor_description_nonselect(): 67 | raise NotImplementedError 68 | -------------------------------------------------------------------------------- /pg13/treepath.py: -------------------------------------------------------------------------------- 1 | """treepath.py -- storage for tree-structured parse results. 2 | supports 'paths', tuples that describe how to index the tree. 3 | paths can be used to get and set. 4 | todo: this probably exists somewhere else so use a public library instead of roll-your-own. 5 | """ 6 | 7 | class PathTree: 8 | "'tree path' is implemented here (i.e. square brackets for get-set)" 9 | def child(self, index): 10 | "helper for __getitem__/__setitem__" 11 | if isinstance(index, tuple): 12 | attr, i = index 13 | return getattr(self, attr)[i] 14 | else: return getattr(self, index) 15 | 16 | @staticmethod 17 | def check_i(i): 18 | "helper" 19 | if not isinstance(i, tuple): 20 | raise TypeError('want:tuple', type(i)) 21 | 22 | def __getitem__(self, i): 23 | self.check_i(i) 24 | if len(i) == 0: 25 | return self 26 | elif len(i) == 1: 27 | return self.child(i[0]) 28 | else: 29 | return self.child(i[0])[i[1:]] 30 | 31 | def __setitem__(self, i, item): 32 | self.check_i(i) 33 | if len(i) == 0: 34 | raise ValueError('cant_set_toplevel') 35 | elif len(i) == 1: 36 | if isinstance(i[0], tuple): 37 | attr, ilist = i[0] 38 | getattr(self, attr)[ilist] = item 39 | else: 40 | setattr(self, i[0], item) 41 | else: 42 | self.child(i[0])[i[1:]] = item 43 | 44 | def sub_slots(item, match_fn, path=(), arr=None, match=False, recurse_into_matches=True): 45 | """given a BaseX in item, explore its ATTRS (doing the right thing for VARLEN). 46 | return a list of tree-paths (i.e. tuples) for tree children that match match_fn. The root elt won't match. 47 | """ 48 | # todo: rename match to topmatch for clarity 49 | # todo: profiling suggests this getattr-heavy recursive process is the next bottleneck 50 | if arr is None: 51 | arr = [] 52 | if match and match_fn(item): 53 | arr.append(path) 54 | if not recurse_into_matches: 55 | return arr 56 | if isinstance(item, PathTree): 57 | for attr in item.ATTRS: 58 | val = getattr(item, attr) 59 | if attr in item.VARLEN: 60 | for i, elt in enumerate(val or ()): 61 | nextpath = path + ((attr, i),) 62 | sub_slots(elt, match_fn, nextpath, arr, True, recurse_into_matches) 63 | else: 64 | nextpath = path + (attr,) 65 | sub_slots(val, match_fn, nextpath, arr, True, recurse_into_matches) 66 | return arr 67 | 68 | def flatten_tree(test, enumerator, exp): 69 | """test is function(exp) >> bool. 70 | mapper is function(expression) >> list of subexpressions. 71 | returns [subexpression, ...]. 72 | """ 73 | return sum((flatten_tree(test, enumerator, subx) for subx in enumerator(exp)), []) if test(exp) else [exp] 74 | -------------------------------------------------------------------------------- /pg13/threevl.py: -------------------------------------------------------------------------------- 1 | "3-value logic (i.e. the way that boolean ops on nulls propagate up in the expression tree in SQL). doesn't rhyme with 'evil' but should." 2 | 3 | class ThreeVL: 4 | "Implementation of sql's 3VL. Warning: use == != for comparing python values, not for 3vl comparison. Caveat emptor." 5 | # todo(awinter): is there any downside to using python True/False/None to make this work? 6 | def __init__(self, value): 7 | if value not in ('t', 'f', 'u'): 8 | raise ValueError(value) 9 | self.value = value 10 | 11 | def __repr__(self): 12 | return "<3vl %s>" % self.value 13 | 14 | def __eq__(self, other): 15 | if not isinstance(other, (bool, ThreeVL)): 16 | return False 17 | return self.value == other.value if isinstance(other, ThreeVL) else {True: 't', False: 'f'}[other] == self.value 18 | 19 | def __neq__(self, other): 20 | return not self == other 21 | 22 | def __bool__(self): 23 | # if self.value=='u': raise ValueError("can't cast 3VL 'unknown' to bool") # I think this is okay at top level 24 | return self.value == 't' 25 | 26 | @staticmethod 27 | def test(item): 28 | "this is the top-level output to SQL 'where' tests. At this level, 'u' *is* false" 29 | if not isinstance(item, (bool, ThreeVL)): 30 | raise TypeError(type(item)) # todo(awinter): test this on whereclause testing an int 31 | return item if isinstance(item, bool) else item.value == 't' 32 | # note below: the 3vl comparisons return a 3vl OR a bool 33 | 34 | @staticmethod 35 | def nein(item): 36 | "this is 'not' but not is a keyword so it's 'nein'" 37 | if not isinstance(item, (bool, ThreeVL)): 38 | raise TypeError(type(item)) 39 | return not item if isinstance(item, bool) else ThreeVL(dict(t='f', f='t', u='u')[item.value]) 40 | 41 | @staticmethod 42 | def andor(operator, left, right): 43 | # todo(awinter): does sql cast values to bools? e.g. nonempty strings, int 0 vs 1 44 | # is this the right one? https://en.wikipedia.org/wiki/Three-valued_logic#Kleene_logic 45 | if operator not in ('and', 'or'): 46 | raise ValueError('unk_operator', operator) 47 | vals = left, right 48 | if not all(isinstance(item, (bool, ThreeVL)) for item in vals): 49 | raise TypeError(list(map(type, vals))) 50 | if ThreeVL('u') in vals: 51 | if operator == 'or' and True in vals: 52 | return True 53 | return False if False in vals else ThreeVL('u') 54 | left, right = list(map(bool, vals)) 55 | return (left and right) if operator == 'and' else (left or right) 56 | 57 | @staticmethod 58 | def compare(operator, left, right): 59 | "this could be replaced by overloading but I want == to return a bool for 'in' use" 60 | # todo(awinter): what about nested 3vl like "(a=b)=(c=d)". is that allowed by sql? It will choke here if there's a null involved. 61 | if left is None or right is None: 62 | return ThreeVL('u') 63 | elif operator == '=': 64 | return left == right 65 | elif operator == '!=': 66 | return left != right 67 | elif operator == '>': 68 | return left > right 69 | elif operator == '<': 70 | return left < right 71 | else: 72 | raise ValueError('unk operator in compare', operator) 73 | -------------------------------------------------------------------------------- /test_pg13/test_sqex.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pg13 import sqex, pgmock, sqparse2, treepath 3 | 4 | def test_sub_arraylit(): 5 | from pg13.sqparse2 import ArrayLit,Literal,SubLit 6 | arlit=ArrayLit([Literal('a'),SubLit,Literal('b')]) 7 | path,=treepath.sub_slots(arlit, lambda x:x is sqparse2.SubLit) 8 | assert path==(('vals',1),) 9 | arlit[path] = Literal('hello') 10 | assert arlit.vals==[Literal('a'),Literal('hello'),Literal('b')] # this is checking that the setter closure didn't capture the end of the loop 11 | # todo: test recursion *into* array 12 | 13 | def test_sub_assignx(): 14 | # todo: test the rest of the SUBSLOT_ATTRS classes 15 | from pg13.sqparse2 import SubLit,AssignX,Literal 16 | asx=AssignX(None,SubLit) 17 | path,=treepath.sub_slots(asx, lambda x:x is sqparse2.SubLit) 18 | assert path==('expr',) 19 | asx[path] = Literal('hello') 20 | assert asx.expr==Literal('hello') 21 | 22 | def test_sub_stmt(): 23 | # warning: a thorough test of this needs to exercise every syntax type. yikes. test_subslot_classes isn't enough. 24 | from pg13.sqparse2 import Literal,CommaX 25 | xsel=sqparse2.parse('select *,z-%s from t1 where x=%s') 26 | p1,p2=treepath.sub_slots(xsel, lambda x:x is sqparse2.SubLit) 27 | xsel[p1] = Literal(9) 28 | xsel[p2] = Literal(10) 29 | assert xsel.cols.children[1].right==Literal(9) and xsel.where.right==Literal(10) 30 | xins=sqparse2.parse('insert into t1 values (%s,%s)') 31 | p1,p2=treepath.sub_slots(xins, lambda x:x is sqparse2.SubLit) 32 | xins[p1] = Literal('a') 33 | xins[p2] = Literal('b') 34 | assert xins.values==[Literal('a'), Literal('b')] 35 | x2 = sqparse2.parse('coalesce(max(col),0)') 36 | assert sqex.contains(x2,sqex.consumes_rows) # checking that sub_slots can descend into CallX.args 37 | 38 | def test_sub_recurse(): 39 | exp = sqparse2.parse('select a + b + c + d from t1') 40 | def matchfn(exp): 41 | return isinstance(exp, sqparse2.BinX) and exp.op.op == '+' 42 | recurse_paths = treepath.sub_slots(exp, matchfn, recurse_into_matches=True) 43 | norecurse_paths = treepath.sub_slots(exp, matchfn, recurse_into_matches=False) 44 | assert len(recurse_paths) == 3 45 | assert len(norecurse_paths) == 1 46 | 47 | def test_decompose_select(): 48 | # basics 49 | nix,where = sqex.decompose_select(sqparse2.parse('select * from t1, t2')) 50 | assert where ==[] and nix.table_order==['t1','t2'] 51 | # where from 'join on' 52 | nix,where = sqex.decompose_select(sqparse2.parse('select * from t1 join t2 on x=y')) 53 | assert nix.table_order==['t1','t2'] and isinstance(where[0],sqparse2.BinX) 54 | 55 | def test_dfs(): 56 | from pg13.sqparse2 import Literal,ArrayLit 57 | with pytest.raises(ValueError): 58 | sqex.depth_first_sub(sqparse2.parse('select * from t1 where x=%s'), (10,[1,2])) 59 | xsel = sqex.depth_first_sub(sqparse2.parse('select a+%s from t1 where x=%s'), (10,[1,2])) 60 | assert xsel.cols.children[0].right==Literal(10) 61 | assert xsel.where.right==ArrayLit((1,2)) 62 | 63 | def test_nix_aonly(): 64 | "NameIndexer support for nested select (i.e. alias-only table)" 65 | ex = sqparse2.parse('select * from (select * from t1) as aonly') 66 | nix = sqex.NameIndexer.ctor_fromlist(ex.tables) 67 | assert isinstance(nix.aonly['aonly'],sqparse2.SelectX) 68 | 69 | def test_eliminateseqchildren(): 70 | def get_paths(ex): 71 | return treepath.sub_slots(ex, lambda x:isinstance(x,(sqparse2.AttrX,sqparse2.NameX)), match=True) 72 | def transform(string): 73 | return sqex.eliminate_sequential_children(get_paths(sqparse2.parse(string))) 74 | assert [()]==transform('a.b') 75 | assert [()]==transform('a') 76 | assert [()]==transform('a.*') 77 | assert []==transform('*') 78 | -------------------------------------------------------------------------------- /pg13/sqla.py: -------------------------------------------------------------------------------- 1 | """sqla.py -- sqlalchemy dialect for pg13. 2 | mostly copped from here: https://github.com/zzzeek/sqlalchemy_akiban/blob/master/sqlalchemy_akiban/dialect/base.py 3 | sqlalchemy_akiban doesn't have a LICENSE file but setup.py has MIT license 4 | """ 5 | # pylint: disable=abstract-method 6 | # warning: the next line might indicate a real incompatibility with newer sqlalchemy 7 | # pylint: disable=arguments-differ,signature-differs,method-hidden 8 | 9 | import sqlalchemy 10 | from . import pgmock_dbapi2 11 | from .version import __version__ 12 | 13 | class PG13Dialect(sqlalchemy.engine.default.DefaultDialect): 14 | name = 'pg13' 15 | supports_alter = False 16 | max_identifier_length = 1024 # probably longer 17 | supports_sane_rowcount = True 18 | supports_native_enum = False 19 | supports_native_boolean = True 20 | supports_sequences = False 21 | sequences_optional = True 22 | preexecute_autoincrement_sequences = False 23 | postfetch_lastrowid = True 24 | supports_default_values = True 25 | supports_empty_insert = False 26 | default_paramstyle = pgmock_dbapi2.paramstyle 27 | # ischema_names = ischema_names 28 | # colspecs = colspecs 29 | # statement_compiler = AkibanCompiler 30 | # ddl_compiler = AkibanDDLCompiler 31 | # type_compiler = AkibanTypeCompiler 32 | # preparer = AkibanIdentifierPreparer 33 | # execution_ctx_cls = AkibanExecutionContext 34 | # inspector = AkibanInspector 35 | # isolation_level = None 36 | # dbapi_type_map = {NESTED_CURSOR: NestedResult()} 37 | _backslash_escapes = True 38 | 39 | # @staticmethod 40 | # def dbapi(db=None, paramstyle=None): 41 | # # todo: no idea what this does or why it's necessary. taking out the paramstyle arg seriously alters behavior. 42 | # return pgmock_dbapi2.connect(db) 43 | 44 | def on_connect(self): 45 | pass 46 | 47 | def _get_default_schema_name(self, connection): 48 | return 'default' # hmm; I don't support schemas, do I? 49 | 50 | def has_schema(self, connection, schema): 51 | raise NotImplementedError("has_schema") 52 | 53 | def has_table(self, connection, table_name, schema=None): 54 | print(table_name in connection.connection.connection.db) 55 | 56 | def has_sequence(self, connection, sequence_name, schema=None): 57 | raise NotImplementedError("has sequence") 58 | 59 | def _get_server_version_info(self, connection): 60 | return __version__ 61 | 62 | def get_schema_names(self, connection, **kw): 63 | raise NotImplementedError("schema names") 64 | 65 | def get_table_names(self, connection, schema=None, **kw): 66 | return list(connection.connection.connection.db.keys()) 67 | 68 | def get_view_names(self, connection, schema=None, **kw): 69 | raise NotImplementedError("view names") 70 | 71 | def get_view_definition(self, connection, view_name, schema=None, **kw): 72 | raise NotImplementedError("view definition") 73 | 74 | def get_columns(self, connection, table_name, schema=None, **kw): 75 | return connection.pool.tables[table_name].fields 76 | 77 | def _get_column_info(self, name, format_type, default, notnull, schema): 78 | raise NotImplementedError 79 | 80 | def get_pk_constraint(self, connection, table_name, schema=None, **kw): 81 | return connection.pool.tables[table_name].pkey 82 | 83 | def get_foreign_keys(self, connection, table_name, schema=None, **kw): 84 | raise NotImplementedError 85 | 86 | def get_indexes(self, connection, table_name, schema, **kw): 87 | raise NotImplementedError 88 | 89 | class PG13DBAPI2Dialect(PG13Dialect): 90 | use_native_unicode = True 91 | # execution_ctx_cls = AkibanPsycopg2ExecutionContext 92 | driver = 'dbapi2' 93 | supports_native_decimal = False 94 | 95 | @classmethod 96 | def dbapi(cls): 97 | return pgmock_dbapi2 98 | 99 | def on_connect(self): 100 | pass 101 | 102 | def create_connect_args(self, url): 103 | return (), {'db_id': int(url.host) if url.host else None} 104 | -------------------------------------------------------------------------------- /pg13/weval.py: -------------------------------------------------------------------------------- 1 | "weval -- where-clause evaluation" 2 | 3 | import collections 4 | from . import sqparse2, misc, scope, table, treepath 5 | 6 | class RowType(list): 7 | "ctor takes list of (name, type). name is string, type is a sqparse2.ColX." 8 | def index(self, name): 9 | # todo: when a name isn't found, this should look in any children that have type=RowType 10 | return list(zip(*self))[0].index(name) 11 | 12 | class RowSource: 13 | "for things like update and delete we need to know where a row came from. this stores that." 14 | def __init__(self, tab, index): 15 | "tab is a table.Table or a scope.SyntheticTable" 16 | self.table, self.index = tab, index 17 | 18 | class Row: 19 | def __init__(self, source, type_, vals): 20 | "source is a RowSource or None if it isn't from a table" 21 | if len(type_) != len(vals): 22 | raise ValueError('type/vals length mismatch', len(type_), len(vals)) 23 | self.source, self.type, self.vals = source, type_, vals 24 | 25 | def __getitem__(self, name): 26 | return self.vals[self.type.index(name)] 27 | 28 | SingleTableCond = collections.namedtuple('SingleTableCond', 'table exp') 29 | CartesianCond = collections.namedtuple('CartesianCond', 'exp') 30 | 31 | def names_from_exp(exp): 32 | "Return a list of AttrX and NameX from the expression." 33 | def match(exp): 34 | return isinstance(exp, (sqparse2.NameX, sqparse2.AttrX)) 35 | paths = treepath.sub_slots(exp, match, match=True, recurse_into_matches=False) 36 | return [exp[path] for path in paths] 37 | 38 | def classify_wherex(scope_, fromx, wherex): 39 | "helper for wherex_to_rowlist. returns [SingleTableCond,...], [CartesianCond,...]" 40 | exprs = [] 41 | for exp in fromx: 42 | if isinstance(exp, sqparse2.JoinX): 43 | # todo: probably just add exp.on_stmt as a CartesianCond. don't write this until tests are ready. 44 | # todo: do join-on clauses get special scoping w.r.t. column names? check spec. 45 | raise NotImplementedError('join') 46 | elif isinstance(exp, str): 47 | exprs.append(exp) 48 | def test_and(exp): 49 | return isinstance(exp, sqparse2.BinX) and exp.op.op == 'and' 50 | def binx_splitter(exp): 51 | return [exp.left, exp.right] 52 | exprs += treepath.flatten_tree(test_and, binx_splitter, wherex) if wherex else [] # wherex is None if not given 53 | single_conds = [] 54 | cartesian_conds = [] 55 | for exp in exprs: 56 | if isinstance(exp, str): 57 | # note: bare table names need their own case because they don't work with resolve_column 58 | single_conds.append(SingleTableCond(exp, exp)) 59 | else: 60 | tables = list(zip(*map(scope_.resolve_column, names_from_exp(exp))))[0] 61 | if len(tables) > 1: 62 | cartesian_conds.append(CartesianCond(exp)) 63 | else: 64 | single_conds.append(SingleTableCond(tables[0], exp)) 65 | return single_conds, cartesian_conds 66 | 67 | def table_to_rowlist(table_, conds): 68 | "helper for wherex_to_rowlist. (table.Table, [exp, ...]) -> [Row, ...]" 69 | if isinstance(table_, scope.SyntheticTable): 70 | raise NotImplementedError('todo: synthetic tables to Row[]') 71 | elif isinstance(table_, table.Table): 72 | rowtype = RowType([(colx.name, colx) for colx in table_.fields]) 73 | rows = [ 74 | Row(RowSource(table_, i), rowtype, row) 75 | for i, row in enumerate(table_.rows) 76 | ] 77 | raise NotImplementedError # how does filtering work? 78 | else: 79 | raise TypeError('bad type for table', type(table), table) 80 | 81 | def wherex_to_rowlist(scope_, fromx, wherex): 82 | """return a RowList with the rows included from scope by the fromx and wherex. 83 | When the scope has more than one name in it, the output will be a list of composite row 84 | (i.e. a row whose field types are themselves RowType). 85 | """ 86 | single, multi = classify_wherex(scope_, fromx, wherex) 87 | single_rowlists = { 88 | tablename: table_to_rowlist(scope_[tablename], conds) 89 | for tablename, conds in list(misc.multimap(single).items()) # i.e. {cond.table:[cond.exp, ...]} 90 | } 91 | raise NotImplementedError 92 | -------------------------------------------------------------------------------- /pg13/scope.py: -------------------------------------------------------------------------------- 1 | "scope -- storage class for managing an expression's tables and aliases" 2 | 3 | from . import sqparse2, table 4 | 5 | class ScopeError(Exception): 6 | "base" 7 | class ScopeCollisionError(ScopeError): 8 | pass 9 | class ScopeUnkError(ScopeError): 10 | pass 11 | 12 | def col2name(col_item): 13 | "helper for SyntheticTable.columns. takes something from SelectX.cols, returns a string column name" 14 | if isinstance(col_item, sqparse2.NameX): 15 | return col_item.name 16 | elif isinstance(col_item, sqparse2.AliasX): 17 | return col_item.alias 18 | else: 19 | raise TypeError(type(col_item), col_item) 20 | 21 | class SyntheticTable: 22 | def __init__(self, exp): 23 | if not isinstance(exp, sqparse2.SelectX): 24 | raise TypeError('expected SelectX for', type(exp), exp) 25 | self.exp = exp 26 | 27 | def columns(self, _scope): 28 | "return list of column names. needs scope for resolving asterisks." 29 | return list(map(col2name, self.exp.cols.children)) 30 | 31 | class Scope: 32 | "bundle for all the tables that are going to be used in a query, and their aliases" 33 | def __init__(self, expression): 34 | self.expression = expression 35 | self.names = {} 36 | 37 | def __contains__(self, name): 38 | return name in self.names 39 | 40 | def add(self, name, target): 41 | "target should be a Table or SyntheticTable" 42 | if not isinstance(target, (table.Table, SyntheticTable)): 43 | raise TypeError(type(target), target) 44 | if name in self: 45 | # note: this is critical for avoiding cycles 46 | raise ScopeCollisionError('scope already has', name) 47 | self.names[name] = target 48 | 49 | def __getitem__(self, table_name): 50 | return self.names[table_name] 51 | 52 | def resolve_column(self, ref): 53 | "ref is a NameX or AttrX. return (canonical_table_name, column_name)." 54 | # pylint: disable=too-many-branches 55 | if isinstance(ref, sqparse2.AttrX): 56 | if ref.parent.name not in self: 57 | raise ScopeUnkError('unk table or table alias', ref.parent.name) 58 | return ref.parent.name, ref.attr.name 59 | elif isinstance(ref, sqparse2.NameX): 60 | matches = set() 61 | for name, target in list(self.names.items()): 62 | if isinstance(target, SyntheticTable): 63 | if ref.name in target.columns(self): 64 | matches.add(name) 65 | elif isinstance(target, table.Table): 66 | try: 67 | target.get_column(ref.name) 68 | except KeyError: 69 | pass 70 | else: 71 | matches.add(name) 72 | else: 73 | raise TypeError('expected SyntheticTable', type(target), target) 74 | if not matches: 75 | raise ScopeUnkError(ref) 76 | elif len(matches) > 1: 77 | raise ScopeCollisionError(matches, ref) 78 | else: 79 | return list(matches)[0], ref.name 80 | else: 81 | raise TypeError('unexpected', type(ref), ref) 82 | 83 | @classmethod 84 | def from_fromx(cls, tables, fromx, ctes=()): 85 | """Build a Scope given TablesDict, from-expression and optional list of CTEs. 86 | fromx is a list of expressions (e.g. SelectX.tables). The list elts can be: 87 | 1. string (i.e. tablename) 88 | 2. AliasX(SelectX as name) 89 | 3. AliasX(name as name) 90 | """ 91 | if ctes: 92 | raise NotImplementedError # note: I don't think any other part of the program supports CTEs yet either 93 | scope_ = cls(fromx) 94 | for exp in fromx: 95 | if isinstance(exp, str): 96 | scope_.add(exp, tables[exp]) 97 | elif isinstance(exp, sqparse2.AliasX) and isinstance(exp.name, sqparse2.NameX): 98 | scope_.add(exp.alias, tables[exp.name.name]) 99 | elif isinstance(exp, sqparse2.AliasX) and isinstance(exp.name, sqparse2.SelectX): 100 | scope_.add(exp.alias, SyntheticTable(exp.name)) 101 | elif isinstance(exp, sqparse2.JoinX): 102 | raise NotImplementedError('todo: join') 103 | else: 104 | raise TypeError('bad fromx type', type(exp), exp) 105 | return scope_ 106 | -------------------------------------------------------------------------------- /test_pg13/test_sqparse2.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pg13 import sqparse2 3 | import ply.lex 4 | 5 | TOK_ATTRS = ('type','value','lineno','lexpos') 6 | def mktok(tpname,tokval,a,b): 7 | tok = ply.lex.LexToken() 8 | for attr,val in zip(TOK_ATTRS,(tpname,tokval,a,b)): setattr(tok,attr,val) 9 | return tok 10 | def eqtok(self,other): 11 | "here's hoping I don't break something by monkey patching" 12 | return all(getattr(self,attr)==getattr(other,attr) for attr in TOK_ATTRS) 13 | 14 | ply.lex.LexToken.__eq__ = eqtok 15 | 16 | def test_mktok(): 17 | assert mktok('AC','B',1,2)==mktok('AC','B',1,2) 18 | assert mktok('A','B',1,2)!=mktok('AC','B',1,2) 19 | 20 | def shortlex(string): return [(tok.type,tok.value) for tok in sqparse2.lex(string)] 21 | 22 | def test_lex_select(): 23 | assert shortlex('select * from t1 where a=b') == [('kw_select','select'), ('*','*'), ('kw_from','from'), ('NAME','t1'), ('kw_where','where'), ('NAME','a'), ('=','='), ('NAME','b')] 24 | def test_lex_array(): 25 | assert shortlex('array[1,2,3]')==[('kw_array','array'), ('[','['), ('INTLIT','1'), (',',','), ('INTLIT','2'), (',',','), ('INTLIT','3'), (']',']')] 26 | def test_lex_strlit(): 27 | assert shortlex("'abc def \\'ghi'") == [('STRLIT',"'abc def \\'ghi'")] 28 | def test_lex_float(): 29 | assert shortlex('1.2') == [('INTLIT','1'), ('.','.'), ('INTLIT','2')] 30 | def test_lex_long_toks(): 31 | from pg13.sqparse2 import NameX,OpX,BinX 32 | assert shortlex('a is not b')==[('NAME','a'),('kw_is','is'),('kw_not','not'),('NAME','b')] 33 | assert sqparse2.parse('a is not b')==BinX(OpX('is not'),NameX('a'),NameX('b')) 34 | assert shortlex('a != b')[1]==('CMP','!=') 35 | assert shortlex('a = b')[1]==('=','=') 36 | 37 | @pytest.mark.xfail 38 | def test_reentrant_lexing(): 39 | raise NotImplementedError('hmm') 40 | 41 | def test_parse_math(): 42 | from pg13.sqparse2 import Literal,OpX,BinX,UnX 43 | assert sqparse2.parse('1.5')==Literal(1.5) 44 | assert sqparse2.parse('1.5 + 3')==BinX(OpX('+'),Literal(1.5),Literal(3)) 45 | def test_parse_array(): 46 | from pg13.sqparse2 import ArrayLit,Literal 47 | arr = ArrayLit([Literal(1),Literal(2),Literal(3)]) 48 | assert arr==sqparse2.parse('{1,2,3}') 49 | assert arr==sqparse2.parse('array[1,2,3]') 50 | def test_parse_case(): 51 | from pg13.sqparse2 import CaseX,WhenX,Literal,BinX,OpX,NameX 52 | assert sqparse2.parse('case when 1 then 10 else 20 end')==CaseX( 53 | [WhenX(Literal(1),Literal(10))], 54 | Literal(20) 55 | ) 56 | print(sqparse2.parse('case when 1 then 10 when x=5 then 11 else 5 end')) 57 | assert sqparse2.parse('case when 1 then 10 when x=5 then 11 else 5 end')==CaseX( 58 | [WhenX(Literal(1),Literal(10)), WhenX(BinX(OpX('='),NameX('x'),Literal(5)),Literal(11))], 59 | Literal(5) 60 | ) 61 | assert sqparse2.parse('case when 1 then 10 end')==CaseX( 62 | [WhenX(Literal(1),Literal(10))], 63 | None 64 | ) 65 | def test_parse_attr(): 66 | from pg13.sqparse2 import NameX,AsterX,AttrX 67 | assert sqparse2.parse('hello.abc')==AttrX(NameX('hello'),NameX('abc')) 68 | assert sqparse2.parse('hello.*')==AttrX(NameX('hello'),AsterX()) 69 | def test_parse_call(): 70 | from pg13.sqparse2 import CallX,Literal,NameX,CommaX 71 | assert sqparse2.parse('call(1,2,3)')==CallX('call',CommaX([Literal(1), Literal(2), Literal(3)])) 72 | def test_parse_select(): 73 | from pg13.sqparse2 import SelectX,CommaX,AsterX,AliasX,BinX,OpX,NameX 74 | assert sqparse2.parse('select * from t1 where a=b group by a')==SelectX( 75 | CommaX([AsterX()]),['t1'], 76 | BinX(OpX('='),NameX('a'),NameX('b')), 77 | NameX('a'), 78 | None,None,None 79 | ) 80 | 81 | @pytest.mark.xfail 82 | def test_operator_order(): 83 | raise NotImplementedError 84 | 85 | def test_select_from_as(): 86 | fromx=sqparse2.parse("select * from (select a as alias from t1 where userid=1) as sub group by tag").tables[0] 87 | assert isinstance(fromx,sqparse2.AliasX) and isinstance(fromx.name,sqparse2.SelectX) and fromx.alias=='sub' 88 | def test_call_as(): 89 | from pg13.sqparse2 import AliasX,CallX,CommaX,NameX 90 | assert sqparse2.parse('select unnest(a) as b from t1').cols.children[0]==AliasX(CallX('unnest',CommaX([NameX('a')])),'b') 91 | 92 | def test_cast(): 93 | from pg13.sqparse2 import CommaX,CastX,Literal,TypeX 94 | assert sqparse2.parse('12345::text') == CastX(Literal(12345),TypeX('text',None)) 95 | # todo: what happens when a more complicated expression is before the cast? read specs and do more tests 96 | assert sqparse2.parse('cast(12345 as text)') == CastX(Literal(12345), TypeX('text',None)) 97 | 98 | def test_parse_transactions(): 99 | from pg13.sqparse2 import StartX,CommitX,RollbackX 100 | assert sqparse2.parse('start transaction') == StartX() 101 | assert sqparse2.parse('commit') == CommitX() 102 | assert sqparse2.parse('rollback') == RollbackX() 103 | 104 | def test_drop(): 105 | from pg13.sqparse2 import DropX 106 | assert DropX(False,'t1',False) == sqparse2.parse('drop table t1') 107 | assert DropX(False,'t1',True) == sqparse2.parse('drop table t1 cascade') 108 | assert DropX(True,'t1',False) == sqparse2.parse('drop table if exists t1') 109 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | > warning: despite recent CI commits, this repo is stale. Use it very cautiously, consider opening an issue with your use case before investing any time. 2 | 3 | # pg13 4 | 5 | - [intro](#intro) 6 | - [examples](#examples) 7 | - [status](#status) 8 | - [sql implementation](#pure-python-implementation-of-sql) 9 | 10 | **install** with `pip install pg13[psyco]` 11 | 12 | **docs** at http://pg13.readthedocs.org/en/latest/ 13 | 14 | ## intro 15 | 16 | pg13 is an in-memory SQL engine for python designed for fast & isolated tests (i.e. each test gets its own database). 17 | 18 | There are three ways to use pg13 in your code: 19 | * normal DBAPI 20 | * pg13 implements a SQLAlchemy dialect. For now, it's incomplete and not well-tested. 21 | * there's a small built-in ORM in the pg13 library 22 | 23 | You should consider using pg13 in your own software if: 24 | * you spend a lot of time manually specifying the behavior of database mocks 25 | * your test suite interacts with an external DB and that causes problems 26 | * your database layer has no tests because integration environments are too painful to set up 27 | * you need SQL semantics on top of non-sql data (i.e. python lists) (note: sqlite has a foreign data plugin that will also do this) 28 | 29 | `pg13` can also be used to provide a SQL frontend to python datastructures. For example, if your program produces stats and you want users to be able to query those stats with SQL. Or if you have a config API and you want clients to be able to edit the configs with strong typing & SQL locking semantics. 30 | 31 | But beware: this is alpha software. 32 | 33 | Drop me a line if you're using the library. `@gmail: awinter.public` 34 | 35 | ## examples 36 | 37 | Note: everything below is happening in-python and in-memory. Each instance (table dictionary) is completely isolated so your tests can run in parallel or whatever, you don't need a live DB on your system. Interacting with a live database looks exactly the same as below except for creating the pool and the pool.tables lines. 38 | ```python 39 | # create a model 40 | from pg13 import pg, pgmock_dbapi2 41 | class Model(pg.Row): 42 | FIELDS = [('userid','int'),('id2','int'),('content','text')] 43 | PKEY = 'userid,id2' 44 | TABLE = 'model' 45 | ``` 46 | Connection setup. The pool object is passed into all the ORM methods, so it's a one-stop shop for switching between test and prod. 47 | ```python 48 | pool = pgmock_dbapi2.PgPoolMock() 49 | ``` 50 | Create table and do an insert. 51 | ```python 52 | Model.create_table(pool) 53 | Model.insert_all(pool, 1, 2, 'three') 54 | # everything is stored like you'd expect: 55 | assert pool.tables['model'].rows == [[1, 2, 'three']] 56 | ``` 57 | Here's an example of querying the SQL engine directly. This code is running in-process without talking to an external database. 58 | ```python 59 | with pool() as dbcon, dbcon.cursor() as cur: 60 | cur.execute('select userid,id2 from model where userid=2-1') 61 | assert cur.fetchall()==[[1,2]] 62 | ``` 63 | 64 | **todo:** SQLAlchemy example 65 | 66 | ## status 67 | 68 | This is alpha software. That means you **should** use it for these reasons: 69 | 1. you're interested in contributing to the library 70 | 1. your organization can benefit from a tool like this and has enough developers to add features where needed 71 | 1. your existing test suite is so slow that people aren't running it 72 | 73 | But you **shouldn't** use it for these reasons: 74 | 1. don't expect the software to be complete or reliable 75 | 1. don't expect it to accurately mimic your prod SQL server in all cases 76 | 1. don't expect it to 'just work' 77 | 1. you rely on complicated join behavior 78 | 79 | SQL is a standard, and many implementations don't replicate the standard exactly. This one also doesn't. 80 | 81 | Run `pip install . && py.test` in the root dir to see if pg13 will work on your system. 82 | 83 | Supported SQL features: 84 | * commands: select, insert, update, create/drop table, delete (with syntax limitations) 85 | * scalar subqueries (i.e. `select * from t1 where a=(select b from t2 where c=true)`) 86 | * various join syntax (but without a serious query planner, it's not efficient on large tables) 87 | * sub-selects with alias, i.e. temporary tables in select commands 88 | * group by seems to work in simple cases, expect bugs 89 | * some array functions (including unnest) and operators 90 | * text search support is limited (limited versions of to_tsvector, to_tsquery, @@) 91 | * serial columns 92 | * :: casting operator (not all types supported) 93 | * transactions exist but are very crude. in theory they're thread-safe but that's not tested. locking is database-level (i.e. no reading from one table while mutating another). pg13 will do a rollback when there's an error. transactions copy the whole DB, so there may be performance issues for large DBs. 94 | * transactional DDL; create/drop statements are isolated and can be rolled back 95 | 96 | Missing SQL features: 97 | * alter table 98 | * common table expressions (`with t0 as (select * from t1 where a=5) select * from t0,t2 where t0.a=t2.a`) 99 | * indexes and constraints (`create index` statements will parse but are a no-op) 100 | * asc and desc keywords in `order by` expressions (asc by default; but you can use a minus sign to simulate desc in some cases) 101 | * type checking (a correct simulation of unicode quirks is particularly lacking) 102 | * lots of functions and operators 103 | * partitioning 104 | * window functions 105 | * anything unique to oracle or mysql 106 | * datetime type & interval math are not supported in syntax. if you pass python datetimes as subbed literals it might work. 107 | * stored procs 108 | * prepared statements 109 | 110 | ## pure-python implementation of SQL 111 | 112 | If you're looking for a pure-python SQL engine (an evaluator, not just a parser), you may be in the right place. pg13's SQL logic weighs in around 1000 lines (600 logic + 350 parser). 113 | 114 | See also: 115 | * http://gadfly.sourceforge.net/gadfly.html 116 | * https://pypi.python.org/pypi/engine 117 | * https://github.com/itissid/pypgTAP 118 | * cautionary note: [don't test with sqlite when you use postgres](http://michael.robellard.com/2015/07/dont-test-with-sqllite-when-you-use.html) 119 | * (todo: link to the rust SQL evaluator) 120 | -------------------------------------------------------------------------------- /pg13/table.py: -------------------------------------------------------------------------------- 1 | "table -- Table class" 2 | 3 | import collections 4 | from . import pg, threevl, sqparse2, sqex 5 | 6 | # errors 7 | class PgExecError(sqparse2.PgMockError): 8 | "base class for errors during table execution" 9 | class BadFieldName(PgExecError): 10 | pass 11 | class IntegrityError(PgExecError): 12 | pass # careful: pgmock_dbapi also defines this 13 | 14 | class Missing: "for distinguishing missing columns vs passed-in null" 15 | 16 | def expand_row(table_fields, fields, values): 17 | "helper for insert. turn (field_names, values) into the full-width, properly-ordered row" 18 | table_fieldnames = [f.name for f in table_fields] 19 | reverse_indexes = {table_fieldnames.index(f): i for i, f in enumerate(fields)} 20 | indexes = [reverse_indexes.get(i) for i in range(len(table_fields))] 21 | return [(Missing if i is None else values[i]) for i in indexes] 22 | 23 | def emergency_cast(colx, value): 24 | """ugly: this is a huge hack. get serious about where this belongs in the architecture. 25 | For now, most types rely on being fed in as SubbedLiteral. 26 | """ 27 | if colx.coltp.type.lower() == 'boolean': 28 | if isinstance(value, sqparse2.NameX): 29 | value = value.name 30 | if isinstance(value, bool): 31 | return value 32 | return dict(true=True, false=False)[value.lower()] # keyerror if other 33 | else: 34 | return value # todo: type check? 35 | 36 | def field_default(colx, table_name, tables_dict): 37 | "takes sqparse2.ColX, Table" 38 | if colx.coltp.type.lower() == 'serial': 39 | next_id = sqparse2.parse('select coalesce(max(%s),-1)+1 from %s' % (colx.name, table_name)) 40 | return sqex.run_select(next_id, tables_dict, Table)[0] 41 | elif colx.not_null: 42 | raise NotImplementedError('todo: not_null error') 43 | else: 44 | return toliteral(colx.default) 45 | 46 | FieldLookup = collections.namedtuple('FieldLookup', 'index type') 47 | def toliteral(probably_literal): 48 | # todo: among the exception cases are Missing, str. go through cases and make this cleaner. the test suite alone has multiple types here. 49 | if probably_literal == sqparse2.NameX('null'): 50 | return None 51 | return probably_literal.toliteral() if hasattr(probably_literal, 'toliteral') else probably_literal 52 | 53 | class Table: 54 | def __init__(self, name, fields, pkey): 55 | "fields is a list of sqparse2.ColX" 56 | self.name, self.fields, self.pkey = name, fields, (pkey or []) 57 | self.rows = [] 58 | self.child_tables = [] # tables that inherit from this one 59 | self.parent_table = None # table this inherits from 60 | 61 | def get_column(self, name): 62 | col = next((f for f in self.fields if f.name == name), None) 63 | if col is None: 64 | raise KeyError(name) 65 | return col 66 | 67 | def pkey_get(self, row): 68 | if len(self.pkey) > 0: 69 | indexes = [i for i, f in enumerate(self.fields) if f.name in self.pkey] 70 | if len(indexes) != len(self.pkey): 71 | raise ValueError('bad pkey') 72 | pkey_vals = list(map(row.__getitem__, indexes)) 73 | return next((r for r in self.rows if pkey_vals == list(map(r.__getitem__, indexes))), None) 74 | else: 75 | # warning: is this right? it's saying that if not given, the pkey is the whole row. test dupe inserts on a real DB. 76 | return row if row in self.rows else None 77 | 78 | def fix_rowtypes(self, row): 79 | if len(row) != len(self.fields): 80 | raise ValueError 81 | return list(map(toliteral, row)) 82 | 83 | def apply_defaults(self, row, tables_dict): 84 | "apply defaults to missing cols for a row that's being inserted" 85 | return [ 86 | emergency_cast(colx, field_default(colx, self.name, tables_dict) if v is Missing else v) 87 | for colx, v in zip(self.fields, row) 88 | ] 89 | 90 | def insert(self, fields, values, returning, tables_dict): 91 | nix = sqex.NameIndexer.ctor_name(self.name) 92 | nix.resolve_aonly(tables_dict, Table) 93 | expanded_row = self.fix_rowtypes(expand_row(self.fields, fields, values) if fields else values) 94 | row = self.apply_defaults(expanded_row, tables_dict) 95 | # todo: check ColX.not_null here. figure out what to do about null pkey field 96 | for i, elt in enumerate(row): 97 | # todo: think about dependency model if one field relies on another. (what do I mean? 'insert into t1 (a,b) values (10,a+5)'? is that valid?) 98 | row[i] = sqex.Evaluator(row, nix, tables_dict).eval(elt) 99 | if self.pkey_get(row): 100 | raise pg.DupeInsert(row) 101 | self.rows.append(row) 102 | if returning: 103 | return sqex.Evaluator((row,), nix, tables_dict).eval(returning) 104 | return None 105 | 106 | def match(self, where, tables, nix): 107 | return [r for r in self.rows if not where or threevl.ThreeVL.test(sqex.Evaluator((r,), nix, tables).eval(where))] 108 | 109 | def lookup(self, name): 110 | if isinstance(name, sqparse2.NameX): 111 | name = name.name # this is horrible; be consistent 112 | try: 113 | return FieldLookup(*next((i, f) for i, f in enumerate(self.fields) if f.name == name)) 114 | except StopIteration: 115 | # todo: confirm that next() still raises StopIteration on py3 116 | raise BadFieldName(name) 117 | 118 | def update(self, setx, where, returning, tables_dict): 119 | nix = sqex.NameIndexer.ctor_name(self.name) 120 | nix.resolve_aonly(tables_dict, Table) 121 | if not all(isinstance(x, sqparse2.AssignX) for x in setx): 122 | raise TypeError('not_xassign', list(map(type, setx))) 123 | match_rows = self.match(where, tables_dict, nix) if where else self.rows 124 | for row in match_rows: 125 | for expr in setx: 126 | row[self.lookup(expr.col).index] = sqex.Evaluator((row,), nix, tables_dict).eval(expr.expr) 127 | if returning: 128 | # todo: write a test for the empty case, make sure this doesn't crash. Should I set row to None at the top or is it not that simple? 129 | # pylint: disable=undefined-loop-variable 130 | return sqex.Evaluator((row,), nix, tables_dict).eval(returning) 131 | return None 132 | 133 | def delete(self, where, tables_dict): 134 | # todo: what's the deal with nested selects in delete. does it get evaluated once to a scalar before running the delete? 135 | # todo: this will crash with empty where clause 136 | nix = sqex.NameIndexer.ctor_name(self.name) 137 | nix.resolve_aonly(tables_dict, Table) 138 | # todo: why 'not' below? 139 | self.rows = [r for r in self.rows if not sqex.Evaluator((r,), nix, tables_dict).eval(where)] 140 | -------------------------------------------------------------------------------- /pg13/pgmock.py: -------------------------------------------------------------------------------- 1 | "table class and apply_sql. this is weirdly codependent with sqex.py" 2 | 3 | # todo: type checking of literals based on column. flag-based (i.e. not all DBs do this) cast strings to unicode. 4 | 5 | import contextlib, threading, copy 6 | from . import sqparse2, sqex, table 7 | 8 | class TablesDict: 9 | "dictionary wrapper that knows about transactions" 10 | # todo: bite the bullet and rename this Database 11 | def __init__(self): 12 | self.lock = threading.Lock() 13 | self.levels = [{}] 14 | self.transaction = False 15 | self.transaction_owner = None 16 | 17 | def __getitem__(self, k): 18 | return self.levels[-1][k] 19 | 20 | def __setitem__(self, key, val): 21 | self.levels[-1][key] = val 22 | 23 | def __contains__(self, key): 24 | return key in self.levels[-1] 25 | 26 | def __delitem__(self, key): 27 | del self.levels[-1][key] 28 | 29 | def update(self, *args, **kwargs): 30 | self.levels[-1].update(*args, **kwargs) 31 | 32 | def __iter__(self): 33 | return iter(self.levels[-1]) 34 | 35 | def keys(self): 36 | return list(self.levels[-1].keys()) 37 | 38 | def values(self): 39 | return list(self.levels[-1].values()) 40 | 41 | def items(self): 42 | return list(self.levels[-1].items()) 43 | 44 | @contextlib.contextmanager 45 | def tempkeys(self): 46 | """Add a new level to make new keys temporary. Used instead of copy in sqex. 47 | This may *seem* similar to a transaction but the tables are not being duplicated, just referenced. 48 | At __exit__, old dict is restored (but changes to Tables remain). 49 | """ 50 | self.levels.append(dict(self.levels[-1])) 51 | try: 52 | yield 53 | finally: 54 | self.levels.pop() 55 | 56 | def trans_start(self, lockref): 57 | self.lock.acquire() 58 | if self.transaction: 59 | raise RuntimeError('in transaction after acquiring lock') 60 | self.levels.append(copy.deepcopy(self.levels[0])) # i.e. copy all the tables, too 61 | self.transaction = True 62 | self.transaction_owner = lockref 63 | 64 | def trans_commit(self): 65 | if not self.transaction: 66 | raise RuntimeError('commit not in transaction') 67 | self.levels = [self.levels[1]] 68 | self.transaction = False 69 | self.lock.release() 70 | 71 | def trans_rollback(self): 72 | if not self.transaction: 73 | raise RuntimeError('commit not in transaction') 74 | self.levels = [self.levels[0]] 75 | self.transaction = False 76 | self.lock.release() 77 | 78 | @contextlib.contextmanager 79 | def lock_db(self, lockref, is_start): 80 | if self.transaction and self.transaction_owner is lockref: 81 | # note: this case is relying on the fact that if the above is true, our thread did it, 82 | # therefore the lock can't be released on our watch. 83 | yield 84 | elif is_start: 85 | yield # apply_sql will call trans_start() on its own, block there if necessary 86 | else: 87 | with self.lock: 88 | yield 89 | 90 | def cascade_delete(self, name): 91 | "this fails under diamond inheritance" 92 | for child in self[name].child_tables: 93 | self.cascade_delete(child.name) 94 | del self[name] 95 | 96 | def create(self, ex): 97 | "helper for apply_sql in CreateX case" 98 | if ex.name in self: 99 | if ex.nexists: 100 | return 101 | raise ValueError('table_exists', ex.name) 102 | if any(c.pkey for c in ex.cols): 103 | if ex.pkey: 104 | raise sqparse2.SQLSyntaxError("don't mix table-level and column-level pkeys", ex) 105 | # todo(spec): is multi pkey permitted when defined per column? 106 | ex.pkey = sqparse2.PKeyX([c.name for c in ex.cols if c.pkey]) 107 | if ex.inherits: 108 | # todo: what if child table specifies constraints etc? this needs work. 109 | if len(ex.inherits) > 1: 110 | raise NotImplementedError('todo: multi-table inherit') 111 | parent = self[ex.inherits[0]] = copy.deepcopy(self[ex.inherits[0]]) # copy so rollback works 112 | child = self[ex.name] = table.Table(ex.name, parent.fields, parent.pkey) 113 | parent.child_tables.append(child) 114 | child.parent_table = parent 115 | else: 116 | self[ex.name] = table.Table(ex.name, ex.cols, ex.pkey.fields if ex.pkey else []) 117 | 118 | def drop(self, ex): 119 | "helper for apply_sql in DropX case" 120 | # todo: factor out inheritance logic (for readability) 121 | if ex.name not in self: 122 | if ex.ifexists: 123 | return 124 | raise KeyError(ex.name) 125 | table_ = self[ex.name] 126 | parent = table_.parent_table 127 | if table_.child_tables: 128 | if not ex.cascade: 129 | raise table.IntegrityError('delete_parent_without_cascade', ex.name) 130 | self.cascade_delete(ex.name) 131 | else: del self[ex.name] 132 | if parent: 133 | parent.child_tables.remove(table_) 134 | 135 | # pylint: disable=inconsistent-return-statements 136 | def apply_sql(self, ex, values, lockref): 137 | """call the stmt in tree with values subbed on the tables in t_d. 138 | ex is a parsed statement returned by parse_expression. 139 | values is the tuple of %s replacements. 140 | lockref can be anything as long as it stays the same; it's used for assigning tranaction ownership. 141 | (safest is to make it a pgmock_dbapi2.Connection, because that will rollback on close) 142 | """ 143 | sqex.depth_first_sub(ex, values) 144 | with self.lock_db(lockref, isinstance(ex, sqparse2.StartX)): 145 | sqex.replace_subqueries(ex, self, table.Table) 146 | if isinstance(ex, sqparse2.SelectX): 147 | return sqex.run_select(ex, self, table.Table) 148 | elif isinstance(ex, sqparse2.InsertX): 149 | return self[ex.table].insert(ex.cols, ex.values, ex.ret, self) 150 | elif isinstance(ex, sqparse2.UpdateX): 151 | if len(ex.tables) != 1: 152 | raise NotImplementedError('multi-table update') 153 | return self[ex.tables[0]].update(ex.assigns, ex.where, ex.ret, self) 154 | elif isinstance(ex, sqparse2.CreateX): 155 | self.create(ex) 156 | elif isinstance(ex, sqparse2.IndexX): 157 | pass 158 | elif isinstance(ex, sqparse2.DeleteX): 159 | return self[ex.table].delete(ex.where, self) 160 | elif isinstance(ex, sqparse2.StartX): 161 | self.trans_start(lockref) 162 | elif isinstance(ex, sqparse2.CommitX): 163 | self.trans_commit() 164 | elif isinstance(ex, sqparse2.RollbackX): 165 | self.trans_rollback() 166 | elif isinstance(ex, sqparse2.DropX): 167 | self.drop(ex) 168 | else: 169 | raise TypeError(type(ex)) # pragma: no cover 170 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/pg13.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/pg13.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/pg13" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/pg13" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | set I18NSPHINXOPTS=%SPHINXOPTS% . 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. xml to make Docutils-native XML files 37 | echo. pseudoxml to make pseudoxml-XML files for display purposes 38 | echo. linkcheck to check all external links for integrity 39 | echo. doctest to run all doctests embedded in the documentation if enabled 40 | goto end 41 | ) 42 | 43 | if "%1" == "clean" ( 44 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 45 | del /q /s %BUILDDIR%\* 46 | goto end 47 | ) 48 | 49 | 50 | %SPHINXBUILD% 2> nul 51 | if errorlevel 9009 ( 52 | echo. 53 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 54 | echo.installed, then set the SPHINXBUILD environment variable to point 55 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 56 | echo.may add the Sphinx directory to PATH. 57 | echo. 58 | echo.If you don't have Sphinx installed, grab it from 59 | echo.http://sphinx-doc.org/ 60 | exit /b 1 61 | ) 62 | 63 | if "%1" == "html" ( 64 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 65 | if errorlevel 1 exit /b 1 66 | echo. 67 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 68 | goto end 69 | ) 70 | 71 | if "%1" == "dirhtml" ( 72 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 73 | if errorlevel 1 exit /b 1 74 | echo. 75 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 76 | goto end 77 | ) 78 | 79 | if "%1" == "singlehtml" ( 80 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 81 | if errorlevel 1 exit /b 1 82 | echo. 83 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 84 | goto end 85 | ) 86 | 87 | if "%1" == "pickle" ( 88 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 89 | if errorlevel 1 exit /b 1 90 | echo. 91 | echo.Build finished; now you can process the pickle files. 92 | goto end 93 | ) 94 | 95 | if "%1" == "json" ( 96 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 97 | if errorlevel 1 exit /b 1 98 | echo. 99 | echo.Build finished; now you can process the JSON files. 100 | goto end 101 | ) 102 | 103 | if "%1" == "htmlhelp" ( 104 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 105 | if errorlevel 1 exit /b 1 106 | echo. 107 | echo.Build finished; now you can run HTML Help Workshop with the ^ 108 | .hhp project file in %BUILDDIR%/htmlhelp. 109 | goto end 110 | ) 111 | 112 | if "%1" == "qthelp" ( 113 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 114 | if errorlevel 1 exit /b 1 115 | echo. 116 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 117 | .qhcp project file in %BUILDDIR%/qthelp, like this: 118 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\pg13.qhcp 119 | echo.To view the help file: 120 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\pg13.ghc 121 | goto end 122 | ) 123 | 124 | if "%1" == "devhelp" ( 125 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished. 129 | goto end 130 | ) 131 | 132 | if "%1" == "epub" ( 133 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 134 | if errorlevel 1 exit /b 1 135 | echo. 136 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 137 | goto end 138 | ) 139 | 140 | if "%1" == "latex" ( 141 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 142 | if errorlevel 1 exit /b 1 143 | echo. 144 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 145 | goto end 146 | ) 147 | 148 | if "%1" == "latexpdf" ( 149 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 150 | cd %BUILDDIR%/latex 151 | make all-pdf 152 | cd %BUILDDIR%/.. 153 | echo. 154 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 155 | goto end 156 | ) 157 | 158 | if "%1" == "latexpdfja" ( 159 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 160 | cd %BUILDDIR%/latex 161 | make all-pdf-ja 162 | cd %BUILDDIR%/.. 163 | echo. 164 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 165 | goto end 166 | ) 167 | 168 | if "%1" == "text" ( 169 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 170 | if errorlevel 1 exit /b 1 171 | echo. 172 | echo.Build finished. The text files are in %BUILDDIR%/text. 173 | goto end 174 | ) 175 | 176 | if "%1" == "man" ( 177 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 178 | if errorlevel 1 exit /b 1 179 | echo. 180 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 181 | goto end 182 | ) 183 | 184 | if "%1" == "texinfo" ( 185 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 186 | if errorlevel 1 exit /b 1 187 | echo. 188 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 189 | goto end 190 | ) 191 | 192 | if "%1" == "gettext" ( 193 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 194 | if errorlevel 1 exit /b 1 195 | echo. 196 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 197 | goto end 198 | ) 199 | 200 | if "%1" == "changes" ( 201 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 202 | if errorlevel 1 exit /b 1 203 | echo. 204 | echo.The overview file is in %BUILDDIR%/changes. 205 | goto end 206 | ) 207 | 208 | if "%1" == "linkcheck" ( 209 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 210 | if errorlevel 1 exit /b 1 211 | echo. 212 | echo.Link check complete; look for any errors in the above output ^ 213 | or in %BUILDDIR%/linkcheck/output.txt. 214 | goto end 215 | ) 216 | 217 | if "%1" == "doctest" ( 218 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 219 | if errorlevel 1 exit /b 1 220 | echo. 221 | echo.Testing of doctests in the sources finished, look at the ^ 222 | results in %BUILDDIR%/doctest/output.txt. 223 | goto end 224 | ) 225 | 226 | if "%1" == "xml" ( 227 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 228 | if errorlevel 1 exit /b 1 229 | echo. 230 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 231 | goto end 232 | ) 233 | 234 | if "%1" == "pseudoxml" ( 235 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 236 | if errorlevel 1 exit /b 1 237 | echo. 238 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 239 | goto end 240 | ) 241 | 242 | :end 243 | -------------------------------------------------------------------------------- /test_pg13/test_pg.py: -------------------------------------------------------------------------------- 1 | "pg.Row tests using pgmock" 2 | 3 | import pytest,collections,json 4 | from pg13 import pg,pgmock,misc,pgmock_dbapi2 5 | 6 | class Model(pg.Row): 7 | FIELDS = [('userid','int'),('id2','int'),('content','text'),('arr',list)] 8 | PKEY = 'userid,id2' 9 | TABLE = 'model' 10 | @pg.dirty('content') 11 | def content_len(self): return len(self['content']) 12 | 13 | USERID=1 14 | 15 | def prepmock(*schemas): 16 | "helper; create a mock environment" 17 | pool=pgmock_dbapi2.PgPoolMock() 18 | for s in schemas: s.create_table(pool) 19 | return misc.EnvBundle(pool,None,None,USERID,None,None) 20 | 21 | @pytest.mark.xfail 22 | def test_create_indexes(): raise NotImplementedError # ignoring; no support for indexes in pgmock 23 | def test_create_table(): 24 | ebuns=prepmock(Model) 25 | assert 4==len(ebuns.pool.tables['model'].fields) 26 | assert ['userid','id2']==ebuns.pool.tables['model'].pkey 27 | def test_get(): assert 'a'==Model(0,1,'a','[]')['content'] 28 | def test_get_jsonfields(): assert []==Model(0,1,'a',[])['arr'] 29 | def test_index(): assert all(i==Model.index(name) for i,(name,tp) in enumerate(Model.FIELDS)) 30 | 31 | def populate(n=3,nusers=2): 32 | "helper" 33 | ebuns=prepmock(Model) 34 | for i in range(nusers): 35 | for j in range(n): Model.insert_all(ebuns.pool,i,j,'a',[]) 36 | return ebuns 37 | 38 | def test_pkey_get(): 39 | ebuns=populate(3) 40 | Model.pkey_get(ebuns.pool,1,0) 41 | with pytest.raises(pg.Missing): Model.pkey_get(ebuns.pool,3,0) 42 | def test_select(): 43 | ebuns=populate(3) 44 | assert [[0,1,'a',[]],[1,1,'a',[]]]==list(Model.select(ebuns.pool,id2=1)) 45 | def test_row_eq(): 46 | m1=Model(1,0,'a',[]) 47 | assert m1==Model(1,0,'a',[]) 48 | assert m1!=Model(1,2,'a',[]) 49 | assert not m1==0 50 | assert m1!=0 51 | def test_select_models(): 52 | ebuns=populate(3) 53 | assert [Model(0,1,'a',[]),Model(1,1,'a',[])]==list(Model.select_models(ebuns.pool,id2=1)) 54 | assert 6==len(list(Model.select_models(ebuns.pool))) # select all used to not work 55 | def test_selectwhere(): 56 | ebuns=populate(3) 57 | assert [Model(0,2,'a',[])]==list(Model.selectwhere(ebuns.pool,0,'id2>%s',(1,))) 58 | def test_insert(): 59 | ebuns=prepmock(Model) 60 | Model.insert(ebuns.pool,['userid','id2'],[0,1]) 61 | with pytest.raises(pg.DupeInsert): 62 | Model.insert(ebuns.pool,['userid','id2'],[0,1]) 63 | # todo: make sure it handles JSONFIELDS correctly 64 | assert [2]==Model.insert(ebuns.pool,['userid','id2'],[0,2],'id2') # todo: make sure this is the same against live DBs 65 | def test_insert_all(): 66 | ebuns=prepmock(Model) 67 | assert []==Model.insert_all(ebuns.pool,0,0,'a',[])['arr'] 68 | assert [[0,0,'a',[]]]==ebuns.pool.tables['model'].rows 69 | assert []==Model.pkey_get(ebuns.pool,0,0)['arr'] 70 | def test_kwinsert(): 71 | ebuns=prepmock(Model) 72 | Model.kwinsert(ebuns.pool,userid=0,id2=1,arr=[]) 73 | assert ebuns.pool.tables['model'].rows==[[0,1,None,[]]] 74 | # todo: test 'returning' feature 75 | @pytest.mark.xfail 76 | def test_checkdb(): raise NotImplementedError # todo: find out what PG supports and implement it for pgmock 77 | def test_insert_mtac(): 78 | ebuns=prepmock(Model) 79 | # todo: edit pg.Row.insert_mtac docs re raw_value -- this works with int but not 'userid':'0' 80 | assert 0==Model.insert_mtac(ebuns.pool,{'userid':0},'id2',('content',),('a',))['id2'] 81 | assert 1==Model.insert_mtac(ebuns.pool,{'userid':0},'id2',('content',),('a',))['id2'] 82 | def test_pkey_update(): 83 | # todo(awinter): raw_keys 84 | # todo(awinter): jsonfields and raw_keys vs escape_keys 85 | ebuns=populate(2) 86 | Model.pkey_update(ebuns.pool,(0,0),{'content':'whatever'}) 87 | assert [0,0,'whatever',[]]==Model.select(ebuns.pool,userid=0,id2=0)[0] 88 | def test_pkey_vals(): assert (10,20)==Model(10,20,'a','[]').pkey_vals() 89 | def test_update(): 90 | # def update(self,pool_or_cursor,escape_keys,raw_keys=None): 91 | ebuns=populate(2) 92 | Model(0,0,'a',[]).update(ebuns.pool,{'content':'whatever'}) 93 | assert [0,0,'whatever',[]]==Model.select(ebuns.pool,userid=0,id2=0)[0] 94 | m=Model(0,0,'a',[]) 95 | m.update(ebuns.pool,{'arr':[1,2]}) 96 | assert [1,2]==m['arr'],"update isn't re-serializing updated values" 97 | # todo(awinter): raw keys, jsonfields 98 | def test_update_rawkeys(): 99 | "there was a query construction bug for rawkeys without escapekeys" 100 | ebuns=populate(1,1) 101 | m=Model(0,0,'a',[]) 102 | m.update(ebuns.pool,{},{'content':"'whatever'"}) 103 | assert m['content']=='whatever' 104 | assert ebuns.pool.tables['model'].rows==[[0,0,'whatever',[]]] 105 | def test_updatewhere(): 106 | ebuns=populate(2) 107 | Model.updatewhere(ebuns.pool,{'userid':1},content='userid_1') 108 | for userid,_,content,_ in ebuns.pool.tables['model'].rows: 109 | assert content==('userid_1' if userid==1 else 'a') 110 | def test_repr(): assert ''==repr(Model(0,0,'a',[])) 111 | 112 | def test_selectxiny(): 113 | ebuns=populate(2) 114 | print(ebuns.pool.tables['model'].rows) 115 | # [, ] 116 | assert [Model(0,0,'a',[]),Model(0,1,'a',[])]==list(Model.select_xiny(ebuns.pool,0,'id2',[0,1])) 117 | assert [Model(0,0,'a',[])]==list(Model.select_xiny(ebuns.pool,0,'id2',[0])) 118 | assert []==list(Model.select_xiny(ebuns.pool,0,'id2',[])) 119 | assert [Model(0,0,'a',[])]==list(Model.select_xiny(ebuns.pool,0,'(id2,content)',[(0,'a')])) 120 | 121 | def test_delete(): 122 | ebuns=populate(2,2) 123 | Model(0,0,'a',[]).delete(ebuns.pool) 124 | assert 3==len(ebuns.pool.tables['model'].rows) 125 | assert not Model.select(ebuns.pool,userid=0,id2=0) 126 | 127 | @pytest.mark.xfail 128 | def test_clientmodel(): 129 | "this probably goes away with sync schemas" 130 | raise NotImplementedError 131 | 132 | class SerDesClass: 133 | "helper for test_specialfield_*" 134 | def __init__(self,x): self.x=x 135 | def ser(self,validate=True): return str(self.x) 136 | @staticmethod 137 | def des(blob,validate=True): return int(blob) 138 | nt=collections.namedtuple('nt','a b') 139 | SF_TESTS=[ 140 | # single pytype 141 | [(dict,),{'hello':'there'}], 142 | # tuple pytype 143 | [((list,nt),),[nt(1,2),nt(3,4)]], 144 | [((dict,nt),),dict(a=nt(1,2),b=nt(2,3))], 145 | [((set,nt),),{nt(1,2),nt(3,4)}], 146 | # serdes class 147 | [(SerDesClass,'class'),SerDesClass(5)], 148 | ] 149 | 150 | @pytest.mark.xfail 151 | def test_specialfield_ser(): 152 | from pg13.pg import SpecialField 153 | for sfargs,val in SF_TESTS: 154 | if sfargs[1:2]==('class',): assert val.ser(None)==SpecialField(*sfargs).ser(val) 155 | else: assert json.dumps(val)==SpecialField(*sfargs).ser(val) 156 | 157 | @pytest.mark.xfail 158 | def test_specialfield_des(): 159 | from pg13.pg import SpecialField 160 | # note: namedtuples evaluate equal to tuples, but json ser/deses to list. so if there are tuples in there, this is probably working. 161 | for sfargs,val in SF_TESTS: 162 | if sfargs[1:2]==('class',): assert sfargs[0].des(val.ser(),None)==SpecialField(*sfargs).des(val.ser()) 163 | else: assert val==SpecialField(*sfargs).des(json.dumps(val)) 164 | 165 | def test_dirtycache(): 166 | ebuns=prepmock(Model) 167 | m=Model.insert_all(ebuns.pool,0,0,'a',[]) 168 | assert 1==m.content_len() 169 | # 1. bypassing normal update procedure gets stale value 170 | m.values[m.index('content')]='abc' 171 | assert 1==m.content_len() 172 | # 2. escape_keys 173 | m.update(ebuns.pool,{'content':'abc'}) 174 | assert 3==m.content_len() 175 | # 3. raw_keys 176 | m.update(ebuns.pool,{},{'content':"'abcd'"}) 177 | assert 4==m.content_len() 178 | 179 | @pytest.mark.xfail 180 | def test_specialfield_nullhandling(): 181 | raise NotImplementedError # what's this supposed to do? 182 | 183 | @pytest.mark.xfail 184 | def test_refkeys(): 185 | # todo(PREOPENSOURCE): write this. for now, this is handled by test_syncapi.test_doc_refkeys in the oes codebase. 186 | raise NotImplementedError('todo') 187 | 188 | @pytest.mark.xfail 189 | def test_json_write(): 190 | "make sure JSON_WRITE toggle in cursor/pool works as advertised" 191 | raise NotImplementedError('todo') 192 | 193 | @pytest.mark.xfail 194 | def test_special_serialize(): 195 | "serialization for insert_all and kwinsert_mk is tricky, relies on JSON_READ, JSON_WRITE" 196 | # for insert_all 197 | # for kwinsert_mk 198 | raise NotImplementedError('todo') 199 | -------------------------------------------------------------------------------- /test_pg13/test_sqparse.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pg13 import sqparse2,pgmock 3 | 4 | def test_parse_arraylit(): 5 | v=sqparse2.ArrayLit((sqparse2.Literal(1),sqparse2.Literal(2),sqparse2.Literal("three"))) 6 | assert v==sqparse2.parse("array[1,2,'three']") 7 | assert v==sqparse2.parse("{1,2,'three'}") 8 | 9 | def test_parse_select(): 10 | # this is also testing nesting and various whatever 11 | from pg13.sqparse2 import NameX,CommaX,SelectX,Literal,ArrayLit,BinX,UnX,OpX,CallX,AsterX 12 | selx=sqparse2.parse('select *,coalesce(x+3,0),{4,5},array[1,2],(select i from tbl) from whatever where (z+-1=10) and (y<5.5)') 13 | assert selx.cols==CommaX(( 14 | AsterX(), 15 | CallX('coalesce',CommaX([ 16 | BinX(OpX('+'),NameX('x'),Literal(3)), 17 | Literal(0), 18 | ])), 19 | ArrayLit((Literal(4),Literal(5))), 20 | ArrayLit((Literal(1),Literal(2))), 21 | SelectX(CommaX([NameX('i')]),['tbl'],None,None,None,None,None), 22 | )) 23 | # nested select in comma 24 | assert selx.tables==['whatever'] 25 | assert selx.where==BinX( 26 | OpX('and'), 27 | BinX( 28 | OpX('='), 29 | BinX(OpX('+'),NameX('z'),UnX(OpX('-'),Literal(1))), 30 | Literal(10) 31 | ), 32 | BinX(OpX('<'),NameX('y'),Literal(5.5)) 33 | ) 34 | assert selx.limit is None and selx.order is None and selx.offset is None 35 | 36 | def test_parse_create(): 37 | # todo: real tests here instead of repr comparison 38 | from pg13.sqparse2 import Literal,NameX,CreateX,ColX,PKeyX,NullX,CheckX,BinX,Literal,OpX,TypeX 39 | assert sqparse2.parse('create table tbl (a int, b int, c text[])')==CreateX( 40 | False, 'tbl', [ 41 | ColX('a',TypeX('int',None),False,False,None,False), 42 | ColX('b',TypeX('int',None),False,False,None,False), 43 | ColX('c',TypeX('text',None),True,False,None,False), 44 | ], None, [], None 45 | ) 46 | assert sqparse2.parse('create table tbl (a int, b int, primary key (a,b))')==CreateX( 47 | False, 'tbl', [ 48 | ColX('a',TypeX('int',None),False,False,None,False), 49 | ColX('b',TypeX('int',None),False,False,None,False), 50 | ], PKeyX(['a','b']), [], None 51 | ) 52 | ex=sqparse2.parse('create table t1 (a int default 7, b int default null, d int primary key)') 53 | assert ex.cols[0].default==Literal(7) and ex.cols[1].default==NullX() and ex.cols[2].pkey 54 | assert sqparse2.parse('create table t1 (a int not null)').cols[0].not_null 55 | assert sqparse2.parse('create table if not exists t1 (a int not null)').nexists 56 | assert sqparse2.parse('create table t2 (check (a=5)) inherits (t1)') == CreateX( 57 | False,'t2',[],None,[CheckX(BinX(OpX('='),NameX('a'),Literal(5)))],['t1'] 58 | ) 59 | # test duplicate primary key 60 | with pytest.raises(sqparse2.SQLSyntaxError): sqparse2.parse('create table t (primary key (a,b),primary key (c,d))') 61 | # test varchar 62 | assert sqparse2.parse('create table t1 (a varchar(10))').cols[0].coltp == TypeX('varchar',10) 63 | 64 | def test_parse_insert(): 65 | from pg13.sqparse2 import InsertX,NameX,CommaX,Literal,ReturnX,AsterX 66 | assert sqparse2.parse('insert into t1 (a,b) values (1,2)')==InsertX( 67 | 't1', ['a','b'], [Literal(1),Literal(2)], None 68 | ) 69 | assert sqparse2.parse('insert into t1 values (1,2)')==InsertX( 70 | 't1', None, [Literal(1),Literal(2)], None 71 | ) 72 | assert sqparse2.parse('insert into t1 values (1,2) returning *')==InsertX( 73 | 't1', None, [Literal(1),Literal(2)], ReturnX(AsterX()) 74 | ) 75 | assert sqparse2.parse('insert into t1 values (1,2) returning (a,b)')==InsertX( 76 | 't1', None, [Literal(1),Literal(2)], ReturnX(CommaX((NameX('a'),NameX('b')))) 77 | ) 78 | 79 | def test_parse_update(): 80 | from pg13.sqparse2 import NameX,AssignX,BinX,OpX,Literal,ReturnX,CommaX 81 | x=sqparse2.parse('update t1 set a=5,d=x+9 where 35 > 50 returning (a,b+1)') 82 | assert x.tables==['t1'] 83 | assert x.assigns==[ 84 | AssignX('a',Literal(5)), 85 | AssignX('d',BinX(OpX('+'),NameX('x'),Literal(9))), 86 | ] 87 | assert x.where==BinX(OpX('>'),Literal(35),Literal(50)) 88 | assert x.ret==ReturnX(CommaX(( 89 | NameX('a'), 90 | BinX(OpX('+'),NameX('b'),Literal(1)), 91 | ))) 92 | 93 | def test_strlit(): 94 | from pg13.sqparse2 import Literal 95 | x=sqparse2.parse("select 'literal1','literal two','literal \\'three\\'' from t1") 96 | assert x.cols==sqparse2.CommaX((Literal('literal1'),Literal('literal two'),Literal("literal 'three'"))) 97 | 98 | def test_boolx(): 99 | "small-scale test of boolx parsing" 100 | from pg13.sqparse2 import Literal,NameX,OpX,BinX,UnX 101 | assert BinX(OpX('and'),BinX(OpX('<'),NameX('a'),Literal(5)),BinX(OpX('='),NameX('z'),Literal(3)))==sqparse2.parse('a<5 and z=3') 102 | assert BinX(OpX('<'),NameX('a'),UnX(OpX('-'),Literal(5)))==sqparse2.parse('a<-5') 103 | 104 | def is_balanced(binx): 105 | "helper for test_precedence" 106 | def unbalanced(outer, inner): return not is_balanced(inner) or inner.op < outer.op 107 | if isinstance(binx.left, sqparse2.BinX) and unbalanced(binx, binx.left): return False 108 | if isinstance(binx.right, sqparse2.BinX) and unbalanced(binx, binx.right): return False 109 | return True 110 | def test_precedence(): 111 | "check order of operations in boolx" 112 | # this isn't an awesome test; the parser might accidentally get it right. better than nothing. 113 | assert is_balanced(sqparse2.parse('a+1<5 and z=3 or z=6')) 114 | def test_unary_precedence(): 115 | assert isinstance(sqparse2.parse('select * from t1 where not a=0').where,sqparse2.UnX) 116 | assert isinstance(sqparse2.parse('select -a+1 from t1').cols.children[0],sqparse2.BinX) # warning: I'm not ensuring this outcome, it just happens to work. 117 | 118 | def test_parse_sub(): 119 | assert sqparse2.parse('select * from t1 where x=%s').where.right is sqparse2.SubLit 120 | 121 | def test_select_emptywhere(): 122 | with pytest.raises(sqparse2.SQLSyntaxError): sqparse2.parse('select * from t1 where') 123 | 124 | def test_multi_stmt(): 125 | "make sure that multi statement strings fail loudly (rather than silently skipping the extras)" 126 | with pytest.raises(sqparse2.SQLSyntaxError): sqparse2.parse('select * from t1; update t2 set a=3') 127 | 128 | def test_case(): 129 | "parse case stmt" 130 | x=sqparse2.parse('select case when x=3 then 10 when x=4 then 20 else 30 end from t1') 131 | assert len(x.cols.children)==1 # i.e. make sure the case isn't getting distributed across columns somehow 132 | casex,=x.cols.children 133 | assert len(casex.cases)==2 134 | assert casex.elsex==sqparse2.Literal(30) 135 | 136 | def test_parse_tuple_in(): 137 | x=sqparse2.parse('select * from t1 where (a,b) in %s') 138 | assert isinstance(x.where.left,sqparse2.CommaX) 139 | 140 | def test_parse_is_not(): assert sqparse2.parse('select * from t1 where a is not null').where.op.op=='is not' 141 | 142 | def test_parse_index(): 143 | stmts=[ 144 | sqparse2.parse('create index on t1 (a,b)'), 145 | sqparse2.parse('create index on t1 (a,b) where a<30'), 146 | sqparse2.parse('create index on t1 using gist (a,b) where x=5'), 147 | ] 148 | assert all(isinstance(x,sqparse2.IndexX) for x in stmts) 149 | 150 | def test_parse_delete(): 151 | from pg13.sqparse2 import NameX,OpX,BinX,Literal 152 | assert sqparse2.parse('delete from t1 where a=3')==sqparse2.DeleteX( 153 | 't1', 154 | BinX(OpX('='), NameX('a'), Literal(3)), 155 | None 156 | ) 157 | 158 | def test_attr(): 159 | from pg13.sqparse2 import AttrX,NameX 160 | assert sqparse2.parse('a.b')==AttrX(NameX('a'),NameX('b')) 161 | assert sqparse2.parse('a.*')==AttrX(NameX('a'),sqparse2.AsterX()) 162 | with pytest.raises(sqparse2.SQLSyntaxError): sqparse2.parse('a.b.c') 163 | 164 | def test_join_syntax(): 165 | from pg13.sqparse2 import JoinX,BinX,OpX,NameX,AttrX,CommaX,AsterX,JoinTypeX 166 | inner_join = JoinTypeX(None,False,None) 167 | ex=sqparse2.parse('select * from t1,t2 where t1.x=t2.y') 168 | assert all(isinstance(x,sqparse2.AttrX) for x in (ex.where.left,ex.where.right)) 169 | assert sqparse2.parse('select * from t1 join t2').tables==[ 170 | JoinX('t1','t2',None,inner_join) 171 | ] 172 | assert sqparse2.parse('select * from t1 join t2 on x=y').tables==[JoinX( 173 | 't1','t2', 174 | BinX(OpX('='), NameX('x'), NameX('y')), 175 | inner_join 176 | )] 177 | x = sqparse2.parse('select t1.* from t1 join t2 on x=y and z=a') 178 | assert x.cols==CommaX([AttrX(NameX('t1'),AsterX())]) 179 | assert x.tables==[JoinX( 180 | 't1','t2', 181 | BinX(OpX('and'),BinX(OpX('='),NameX('x'),NameX('y')),BinX(OpX('='),NameX('z'),NameX('a'))), 182 | inner_join 183 | )] 184 | assert sqparse2.parse('select t1.*,t2.* from t1 join t2 on x=y').cols==CommaX([ 185 | AttrX(NameX('t1'),AsterX()), 186 | AttrX(NameX('t2'),AsterX()) 187 | ]) 188 | assert sqparse2.parse('select * from t1 join t2 on t1.x=t2.y').tables==[JoinX( 189 | 't1','t2', 190 | BinX(OpX('='),AttrX(NameX('t1'),NameX('x')),AttrX(NameX('t2'),NameX('y'))), 191 | inner_join 192 | )] 193 | 194 | def test_jointype(): 195 | from pg13.sqparse2 import JoinTypeX 196 | assert JoinTypeX('left',True,None) == sqparse2.parse('select * from t1 left outer join t2').tables[0].jointype 197 | assert JoinTypeX(None,False,None) == sqparse2.parse('select * from t1 inner join t2').tables[0].jointype 198 | assert JoinTypeX('full',True,None) == sqparse2.parse('select * from t1 full join t2').tables[0].jointype 199 | 200 | def test_xgetset(): 201 | "tree-aware getitem/setitem for expressions" 202 | from pg13.sqparse2 import NameX,AttrX 203 | x = sqparse2.parse('select * from t1,t2 where t1.x=t2.y') 204 | # named get 205 | assert x['where','left','attr']==NameX('x') 206 | # named set 207 | x['where','left','attr'] = 'hello' 208 | assert x['where','left']==AttrX(NameX('t1'),'hello') 209 | # index get 210 | assert x[('tables',0),]=='t1' 211 | # index set 212 | x[('tables',0),] = 'hello' 213 | assert x.tables[0] == 'hello' 214 | assert x[('tables',0),] == 'hello' 215 | 216 | def test_mult_vs_aster(): 217 | "make sure that asterisk is handled differently from the multiplication operator" 218 | from pg13.sqparse2 import AsterX,BinX,OpX,NameX 219 | assert sqparse2.parse('select *,a*b from t1').cols.children==[ 220 | AsterX(), 221 | BinX(OpX('*'),NameX('a'),NameX('b')) 222 | ] 223 | -------------------------------------------------------------------------------- /pg13/pgmock_dbapi2.py: -------------------------------------------------------------------------------- 1 | "dbapi2 interface to pgmock" 2 | 3 | import functools, contextlib, collections 4 | from . import pgmock, sqparse2, pg 5 | # pylint: disable=arguments-differ 6 | 7 | # globals 8 | # pylint: disable=invalid-name 9 | apilevel = '2.0' 10 | threadsafety = 1 # 1 means module-level. I think pgmock with transaction locking as-is is fully threadsafe, so write tests and bump this to 3. 11 | paramstyle = 'format' 12 | 13 | # global dictionary of databases (necessary so different connections can access the same DB) 14 | DATABASES = {} 15 | NEXT_DB_ID = 0 16 | 17 | def add_db(): 18 | # pylint: disable=global-statement 19 | global NEXT_DB_ID 20 | db_id, NEXT_DB_ID = NEXT_DB_ID, NEXT_DB_ID + 1 21 | DATABASES[db_id] = pgmock.TablesDict() 22 | print('created db %i' % db_id) 23 | return db_id 24 | 25 | # todo: catch pgmock errors and raise these 26 | class Error(Exception): 27 | pass 28 | 29 | class InterfaceError(Error): 30 | pass 31 | 32 | class DatabaseError(Error): 33 | pass 34 | 35 | class DataError(DatabaseError): 36 | pass 37 | 38 | class OperationalError(DatabaseError): 39 | pass 40 | 41 | class IntegrityError(DatabaseError): 42 | pass 43 | 44 | class InternalError(DatabaseError): 45 | pass 46 | 47 | class ProgrammingError(DatabaseError): 48 | pass 49 | 50 | class NotSupportedError(DatabaseError): 51 | pass 52 | 53 | # types 54 | class PGMockType: 55 | "base" 56 | 57 | class Date(PGMockType): 58 | def __init__(self, year, month, day): 59 | pass 60 | 61 | class Time(PGMockType): 62 | def __init__(self, hour, minute, second): 63 | pass 64 | 65 | class Timestamp(PGMockType): 66 | def __init__(self, year, month, day, hour, minute, second): 67 | pass 68 | 69 | class DateFromTicks(PGMockType): 70 | def __init__(self, ticks): 71 | pass 72 | 73 | class TimeFromTicks(PGMockType): 74 | def __init__(self, ticks): 75 | pass 76 | 77 | class TimestampFromTicks(PGMockType): 78 | def __init__(self, ticks): 79 | pass 80 | 81 | class Binary(PGMockType): 82 | def __init__(self, string): 83 | pass 84 | 85 | class STRING(PGMockType): 86 | pass 87 | 88 | class BINARY(PGMockType): 89 | pass 90 | 91 | class NUMBER(PGMockType): 92 | pass 93 | 94 | class DATETIME(PGMockType): 95 | pass 96 | 97 | class ROWID(PGMockType): 98 | pass 99 | 100 | def expression_type(con, topx, ex): 101 | "take a BaseX descendant from sqparse2, return a type class from above" 102 | if isinstance(ex, sqparse2.Literal): 103 | if isinstance(ex.val, str): 104 | return STRING 105 | else: 106 | raise NotImplementedError('literal', type(ex.val)) 107 | elif isinstance(ex, sqparse2.AttrX): 108 | if ex.parent.name in con.db: # warning: what if it's not a table? what if it's aliased? 109 | return expression_type(con, topx, con.db[ex.parent.name].get_column(ex.attr.name).coltp) 110 | else: 111 | raise NotImplementedError(ex.parent) 112 | elif isinstance(ex, sqparse2.TypeX): 113 | return dict( 114 | integer=NUMBER, 115 | text=STRING, 116 | )[ex.type.lower()] 117 | else: 118 | raise NotImplementedError('unk type', type(ex)) 119 | 120 | Description = collections.namedtuple('Description', 'name type_code display_size internal_size precision scale null_ok') 121 | def description_from_colx(con, ex, colx): 122 | if isinstance(colx, sqparse2.AliasX): 123 | return Description(colx.alias, expression_type(con, ex, colx.name), *(None,) * 5) 124 | elif isinstance(colx, sqparse2.NameX): 125 | raise NotImplementedError 126 | # return Description(,*(None,)*5) 127 | else: 128 | raise NotImplementedError(ex) # probably math expressions and anonymous fields 129 | 130 | class Cursor(pg.Cursor): 131 | def __init__(self, connection): 132 | self.connection = connection 133 | self.arraysize = 1 134 | self.rows = None 135 | self.rownumber = None 136 | self.lastx = None 137 | # todo: self.lastrowid. SQLAlchemy uses it. 138 | @property 139 | def rowcount(self): 140 | return -1 if self.rows is None else len(self.rows) 141 | @property 142 | def description(self): 143 | "this is only a property so it can raise; make it an attr once it works" 144 | if self.lastx is None: 145 | return None 146 | if not isinstance(self.lastx, (sqparse2.SelectX, sqparse2.UpdateX, sqparse2.InsertX)): 147 | return None 148 | if isinstance(self.lastx, (sqparse2.UpdateX, sqparse2.InsertX)) and self.lastx.ret is None: 149 | return None 150 | # at this point we know this is an operation that returns rows 151 | if isinstance(self.lastx, (sqparse2.UpdateX, sqparse2.InsertX)): 152 | raise NotImplementedError('todo: Cursor.description for non-select') 153 | else: # select case 154 | return [description_from_colx(self.connection, self.lastx, colx) for colx in self.lastx.cols.children] 155 | def callproc(self, procname, parameters=None): 156 | raise NotImplementedError("pgmock doesn't support stored procs yet") 157 | def __del__(self): 158 | self.close() 159 | def close(self): 160 | pass # for now pgmock doesn't have cursor resources to close 161 | def execute(self, operation, parameters=None): 162 | ex = self.lastx = sqparse2.parse(operation) 163 | if not self.connection.transaction_open and not self.connection.autocommit: 164 | self.connection.begin() 165 | self.rows = self.connection.db.apply_sql(ex, parameters or (), self.connection) 166 | self.rownumber = 0 # always? 167 | def executemany(self, operation, seq_of_parameters): 168 | for param in seq_of_parameters: 169 | self.execute(operation, param) 170 | def fetchone(self): 171 | ret = self.rows[self.rownumber] 172 | self.rownumber += 1 173 | return ret 174 | def fetchmany(self, size=None): 175 | raise NotImplementedError # hoping nobody cares about this one 176 | def fetchall(self): 177 | if self.rows is None: 178 | raise Error('no query to fetch') 179 | ret, self.rows = self.rows, None 180 | return ret 181 | def nextset(self): 182 | raise NotImplementedError('are we supporting multi result sets?') 183 | def setinputsizes(self, sizes): 184 | raise NotImplementedError 185 | def setoutputsize(self, size, column=None): 186 | raise NotImplementedError 187 | def __enter__(self): 188 | return self 189 | def __exit__(self, etype, error, tb): 190 | self.close() 191 | def scroll(self, value, mode='relative'): 192 | raise NotImplementedError 193 | def __iter__(self): 194 | rownum, self.rownumber = self.rownumber, None 195 | return iter(self.rows[rownum:]) 196 | 197 | def open_only(f): 198 | "decorator -- raises an error if the function is called with self.closed == True" 199 | @functools.wraps(f) 200 | def f2(self, *args, **kwargs): 201 | if self.closed: 202 | raise NotSupportedError('connection is closed') 203 | return f(self, *args, **kwargs) 204 | return f2 205 | 206 | class Connection: 207 | # todo: does this need autocommit and begin()? 208 | def __init__(self, db_id=None): 209 | "pass None as db_id to create a new pgmock database" 210 | self.closed = False 211 | self.db_id = add_db() if db_id is None else db_id 212 | self.db = DATABASES[self.db_id] 213 | print('connected db %i' % self.db_id) 214 | self._autocommit = False 215 | self.transaction_open = False 216 | 217 | @property 218 | def autocommit(self): 219 | return self._autocommit 220 | 221 | @autocommit.setter 222 | def autocommit(self, value): 223 | if value and self.transaction_open: 224 | raise OperationalError("can't set autocommit with open transaction") 225 | self._autocommit = value 226 | 227 | @open_only 228 | def close(self): 229 | if self.transaction_open: 230 | self.rollback() 231 | self.closed = True 232 | self.db_id = None 233 | self.db = None 234 | 235 | def __del__(self): 236 | if not self.closed: 237 | self.close() 238 | 239 | @open_only 240 | def begin(self): 241 | if self.transaction_open: 242 | raise OperationalError("can't begin() with transaction_open") 243 | self.db.trans_start(self) 244 | self.transaction_open = True 245 | 246 | @open_only 247 | def commit(self): 248 | if not self.transaction_open: 249 | raise OperationalError("can't commit without transaction_open") 250 | self.db.trans_commit() 251 | self.transaction_open = False 252 | print('commit') 253 | 254 | @open_only 255 | def rollback(self): 256 | if not self.transaction_open: 257 | raise OperationalError("can't rollback without transaction_open") 258 | self.db.trans_rollback() 259 | self.transaction_open = False 260 | print('rollback') 261 | 262 | @open_only 263 | def cursor(self): 264 | return Cursor(self) 265 | 266 | @open_only 267 | def __enter__(self): 268 | self.begin() 269 | return self 270 | 271 | @open_only 272 | def __exit__(self, etype, error, tb): 273 | (self.commit if etype is None else self.rollback)() 274 | 275 | connect = Connection 276 | 277 | def call_cur(f): 278 | "decorator for opening a connection and passing a cursor to the function" 279 | @functools.wraps(f) 280 | def f2(self, *args, **kwargs): 281 | with self.withcur() as cur: 282 | return f(self, cur, *args, **kwargs) 283 | return f2 284 | 285 | class PgPoolMock(pg.Pool): # only inherits so isinstance tests pass 286 | def __init__(self): 287 | # pylint: disable=super-init-not-called 288 | self.db_id = add_db() 289 | @property 290 | def tables(self): 291 | return DATABASES[self.db_id] 292 | @call_cur 293 | def select(self, cursor, qstring, vals=()): 294 | "careful: don't pass cursor (it's from decorator)" 295 | cursor.execute(qstring, vals) # hmm; do I not want to commit at the end of this? 296 | return cursor.fetchall() 297 | @call_cur 298 | def commit(self, cursor, qstring, vals=()): 299 | "careful: don't pass cursor (it's from decorator)" 300 | cursor.execute(qstring, vals) 301 | @call_cur 302 | def commitreturn(self, cursor, qstring, vals=()): 303 | "careful: don't pass cursor (it's from decorator)" 304 | cursor.execute(qstring, vals) 305 | return cursor.fetchall()[0] 306 | def close(self): 307 | pass # todo: is this closeall? 308 | @contextlib.contextmanager 309 | def __call__(self): 310 | with Connection(self.db_id) as con: 311 | yield con 312 | @contextlib.contextmanager 313 | def withcur(self): 314 | "don't pass cursor" 315 | with self() as con, con.cursor() as cur: 316 | yield cur 317 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # pg13 documentation build configuration file, created by 4 | # sphinx-quickstart on Wed Jan 14 17:55:29 2015. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | import sys 16 | import os 17 | 18 | # If extensions (or modules to document with autodoc) are in another directory, 19 | # add these directories to sys.path here. If the directory is relative to the 20 | # documentation root, use os.path.abspath to make it absolute, like shown here. 21 | #sys.path.insert(0, os.path.abspath('.')) 22 | 23 | # -- General configuration ------------------------------------------------ 24 | 25 | # If your documentation needs a minimal Sphinx version, state it here. 26 | #needs_sphinx = '1.0' 27 | 28 | # Add any Sphinx extension module names here, as strings. They can be 29 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 30 | # ones. 31 | extensions = [ 32 | 'sphinx.ext.autodoc', 33 | 'sphinx.ext.viewcode', 34 | ] 35 | 36 | # Add any paths that contain templates here, relative to this directory. 37 | templates_path = ['_templates'] 38 | 39 | # The suffix of source filenames. 40 | source_suffix = '.rst' 41 | 42 | # The encoding of source files. 43 | #source_encoding = 'utf-8-sig' 44 | 45 | # The master toctree document. 46 | master_doc = 'index' 47 | 48 | # General information about the project. 49 | project = 'pg13' 50 | copyright = '2015, Author' 51 | 52 | # The version info for the project you're documenting, acts as replacement for 53 | # |version| and |release|, also used in various other places throughout the 54 | # built documents. 55 | # 56 | # The short X.Y version. 57 | version = '' 58 | # The full version, including alpha/beta/rc tags. 59 | release = '' 60 | 61 | # The language for content autogenerated by Sphinx. Refer to documentation 62 | # for a list of supported languages. 63 | #language = None 64 | 65 | # There are two options for replacing |today|: either, you set today to some 66 | # non-false value, then it is used: 67 | #today = '' 68 | # Else, today_fmt is used as the format for a strftime call. 69 | #today_fmt = '%B %d, %Y' 70 | 71 | # List of patterns, relative to source directory, that match files and 72 | # directories to ignore when looking for source files. 73 | exclude_patterns = ['_build'] 74 | 75 | # The reST default role (used for this markup: `text`) to use for all 76 | # documents. 77 | #default_role = None 78 | 79 | # If true, '()' will be appended to :func: etc. cross-reference text. 80 | #add_function_parentheses = True 81 | 82 | # If true, the current module name will be prepended to all description 83 | # unit titles (such as .. function::). 84 | #add_module_names = True 85 | 86 | # If true, sectionauthor and moduleauthor directives will be shown in the 87 | # output. They are ignored by default. 88 | #show_authors = False 89 | 90 | # The name of the Pygments (syntax highlighting) style to use. 91 | pygments_style = 'sphinx' 92 | 93 | # A list of ignored prefixes for module index sorting. 94 | #modindex_common_prefix = [] 95 | 96 | # If true, keep warnings as "system message" paragraphs in the built documents. 97 | #keep_warnings = False 98 | 99 | 100 | # -- Options for HTML output ---------------------------------------------- 101 | 102 | # The theme to use for HTML and HTML Help pages. See the documentation for 103 | # a list of builtin themes. 104 | html_theme = 'default' 105 | 106 | # Theme options are theme-specific and customize the look and feel of a theme 107 | # further. For a list of options available for each theme, see the 108 | # documentation. 109 | #html_theme_options = {} 110 | 111 | # Add any paths that contain custom themes here, relative to this directory. 112 | #html_theme_path = [] 113 | 114 | # The name for this set of Sphinx documents. If None, it defaults to 115 | # " v documentation". 116 | #html_title = None 117 | 118 | # A shorter title for the navigation bar. Default is the same as html_title. 119 | #html_short_title = None 120 | 121 | # The name of an image file (relative to this directory) to place at the top 122 | # of the sidebar. 123 | #html_logo = None 124 | 125 | # The name of an image file (within the static path) to use as favicon of the 126 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 127 | # pixels large. 128 | #html_favicon = None 129 | 130 | # Add any paths that contain custom static files (such as style sheets) here, 131 | # relative to this directory. They are copied after the builtin static files, 132 | # so a file named "default.css" will overwrite the builtin "default.css". 133 | html_static_path = ['_static'] 134 | 135 | # Add any extra paths that contain custom files (such as robots.txt or 136 | # .htaccess) here, relative to this directory. These files are copied 137 | # directly to the root of the documentation. 138 | #html_extra_path = [] 139 | 140 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 141 | # using the given strftime format. 142 | #html_last_updated_fmt = '%b %d, %Y' 143 | 144 | # If true, SmartyPants will be used to convert quotes and dashes to 145 | # typographically correct entities. 146 | #html_use_smartypants = True 147 | 148 | # Custom sidebar templates, maps document names to template names. 149 | #html_sidebars = {} 150 | 151 | # Additional templates that should be rendered to pages, maps page names to 152 | # template names. 153 | #html_additional_pages = {} 154 | 155 | # If false, no module index is generated. 156 | #html_domain_indices = True 157 | 158 | # If false, no index is generated. 159 | #html_use_index = True 160 | 161 | # If true, the index is split into individual pages for each letter. 162 | #html_split_index = False 163 | 164 | # If true, links to the reST sources are added to the pages. 165 | #html_show_sourcelink = True 166 | 167 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 168 | #html_show_sphinx = True 169 | 170 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 171 | #html_show_copyright = True 172 | 173 | # If true, an OpenSearch description file will be output, and all pages will 174 | # contain a tag referring to it. The value of this option must be the 175 | # base URL from which the finished HTML is served. 176 | #html_use_opensearch = '' 177 | 178 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 179 | #html_file_suffix = None 180 | 181 | # Output file base name for HTML help builder. 182 | htmlhelp_basename = 'pg13doc' 183 | 184 | 185 | # -- Options for LaTeX output --------------------------------------------- 186 | 187 | latex_elements = { 188 | # The paper size ('letterpaper' or 'a4paper'). 189 | #'papersize': 'letterpaper', 190 | 191 | # The font size ('10pt', '11pt' or '12pt'). 192 | #'pointsize': '10pt', 193 | 194 | # Additional stuff for the LaTeX preamble. 195 | #'preamble': '', 196 | } 197 | 198 | # Grouping the document tree into LaTeX files. List of tuples 199 | # (source start file, target name, title, 200 | # author, documentclass [howto, manual, or own class]). 201 | latex_documents = [ 202 | ('index', 'pg13.tex', 'pg13 Documentation', 203 | 'Author', 'manual'), 204 | ] 205 | 206 | # The name of an image file (relative to this directory) to place at the top of 207 | # the title page. 208 | #latex_logo = None 209 | 210 | # For "manual" documents, if this is true, then toplevel headings are parts, 211 | # not chapters. 212 | #latex_use_parts = False 213 | 214 | # If true, show page references after internal links. 215 | #latex_show_pagerefs = False 216 | 217 | # If true, show URL addresses after external links. 218 | #latex_show_urls = False 219 | 220 | # Documents to append as an appendix to all manuals. 221 | #latex_appendices = [] 222 | 223 | # If false, no module index is generated. 224 | #latex_domain_indices = True 225 | 226 | 227 | # -- Options for manual page output --------------------------------------- 228 | 229 | # One entry per manual page. List of tuples 230 | # (source start file, name, description, authors, manual section). 231 | man_pages = [ 232 | ('index', 'pg13', 'pg13 Documentation', 233 | ['Author'], 1) 234 | ] 235 | 236 | # If true, show URL addresses after external links. 237 | #man_show_urls = False 238 | 239 | 240 | # -- Options for Texinfo output ------------------------------------------- 241 | 242 | # Grouping the document tree into Texinfo files. List of tuples 243 | # (source start file, target name, title, author, 244 | # dir menu entry, description, category) 245 | texinfo_documents = [ 246 | ('index', 'pg13', 'pg13 Documentation', 247 | 'Author', 'pg13', 'One line description of project.', 248 | 'Miscellaneous'), 249 | ] 250 | 251 | # Documents to append as an appendix to all manuals. 252 | #texinfo_appendices = [] 253 | 254 | # If false, no module index is generated. 255 | #texinfo_domain_indices = True 256 | 257 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 258 | #texinfo_show_urls = 'footnote' 259 | 260 | # If true, do not generate a @detailmenu in the "Top" node's menu. 261 | #texinfo_no_detailmenu = False 262 | 263 | 264 | # -- Options for Epub output ---------------------------------------------- 265 | 266 | # Bibliographic Dublin Core info. 267 | epub_title = 'pg13' 268 | epub_author = 'Author' 269 | epub_publisher = 'Author' 270 | epub_copyright = '2015, Author' 271 | 272 | # The basename for the epub file. It defaults to the project name. 273 | #epub_basename = u'pg13' 274 | 275 | # The HTML theme for the epub output. Since the default themes are not optimized 276 | # for small screen space, using the same theme for HTML and epub output is 277 | # usually not wise. This defaults to 'epub', a theme designed to save visual 278 | # space. 279 | #epub_theme = 'epub' 280 | 281 | # The language of the text. It defaults to the language option 282 | # or en if the language is not set. 283 | #epub_language = '' 284 | 285 | # The scheme of the identifier. Typical schemes are ISBN or URL. 286 | #epub_scheme = '' 287 | 288 | # The unique identifier of the text. This can be a ISBN number 289 | # or the project homepage. 290 | #epub_identifier = '' 291 | 292 | # A unique identification for the text. 293 | #epub_uid = '' 294 | 295 | # A tuple containing the cover image and cover page html template filenames. 296 | #epub_cover = () 297 | 298 | # A sequence of (type, uri, title) tuples for the guide element of content.opf. 299 | #epub_guide = () 300 | 301 | # HTML files that should be inserted before the pages created by sphinx. 302 | # The format is a list of tuples containing the path and title. 303 | #epub_pre_files = [] 304 | 305 | # HTML files shat should be inserted after the pages created by sphinx. 306 | # The format is a list of tuples containing the path and title. 307 | #epub_post_files = [] 308 | 309 | # A list of files that should not be packed into the epub file. 310 | epub_exclude_files = ['search.html'] 311 | 312 | # The depth of the table of contents in toc.ncx. 313 | #epub_tocdepth = 3 314 | 315 | # Allow duplicate toc entries. 316 | #epub_tocdup = True 317 | 318 | # Choose between 'default' and 'includehidden'. 319 | #epub_tocscope = 'default' 320 | 321 | # Fix unsupported image types using the PIL. 322 | #epub_fix_images = False 323 | 324 | # Scale large images. 325 | #epub_max_image_width = 0 326 | 327 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 328 | #epub_show_urls = 'inline' 329 | 330 | # If false, no index is generated. 331 | #epub_use_index = True 332 | -------------------------------------------------------------------------------- /pg13/pg.py: -------------------------------------------------------------------------------- 1 | "To use pg13 for ORM, inherit your model classes from Row." 2 | # todo: make everything take a pool_or_cursor instead of just a pool (at least make them all check for it) 3 | # todo: don't allow JSONFIELDS to overlap with primary key? think about it. >> is this obsolete with SpecialField? 4 | # todo: add profiling hooks 5 | # todo: need deserialize for SELECT / RETURNING values according to JSON_WRITE, JSON_READ 6 | 7 | import contextlib, json, functools, collections, re 8 | from . import errors 9 | 10 | class Select1Error(Exception): 11 | "base for select1 error conditions" 12 | class Missing(Select1Error): 13 | pass 14 | class NotUnique(Select1Error): 15 | pass 16 | class SchemaError(Exception): 17 | "base for schema-related errors" 18 | class FieldError(SchemaError): 19 | "no such field in model" 20 | class NullJsonError(Exception): 21 | pass 22 | class DupeInsert(Exception): 23 | pass 24 | 25 | def eqexpr(key, value): 26 | "for automatic x is null vs x = value stmts" 27 | return key+(' is %s' if value is None else ' = %s') 28 | 29 | class Cursor: 30 | "base class for cursor wrappers. necessary for error-wrapping." 31 | # see pool_psyco.py for docs on JSON_WRITE/JSON_READ 32 | JSON_WRITE = None 33 | JSON_READ = None 34 | def execute(self, qstring, vals=()): 35 | raise NotImplementedError 36 | def __iter__(self): 37 | raise NotImplementedError 38 | def fetchone(self): 39 | raise NotImplementedError 40 | 41 | class Pool: 42 | "base class for pool wrappers. Most of the Row methods expect one of these as the first argument" 43 | # see pool_psyco.py for docs on JSON_WRITE/JSON_READ 44 | JSON_WRITE = None 45 | JSON_READ = None 46 | def __init__(self, dbargs): 47 | raise NotImplementedError 48 | def select(self, qstring, vals=()): 49 | raise NotImplementedError 50 | def commit(self, qstring, vals=()): 51 | raise NotImplementedError 52 | def commitreturn(self, qstring, vals=()): 53 | raise NotImplementedError 54 | def close(self): 55 | raise NotImplementedError 56 | @contextlib.contextmanager 57 | def __call__(self): 58 | raise NotImplementedError 59 | 60 | def is_serdes(instance): 61 | "todo: once there's a SerDes based class, replace all calls with isinstance()" 62 | return hasattr(instance, 'ser') and hasattr(instance, 'des') 63 | 64 | def set_options(pool_or_cursor, row_instance): 65 | "for connection-level options that need to be set on Row instances" 66 | # todo: move around an Options object instead 67 | for option in ('JSON_READ', ): 68 | setattr(row_instance, option, getattr(pool_or_cursor, option, None)) 69 | return row_instance 70 | 71 | def transform_specialfield(jsonify, field, value): 72 | "helper for serialize_row" 73 | raw = field.ser(value) if is_serdes(field) else value 74 | return json.dumps(raw) if not isinstance(field, str) and jsonify else raw 75 | 76 | def dirty(field, ttl=None): 77 | "decorator to cache the result of a function until a field changes" 78 | if ttl is not None: 79 | raise NotImplementedError('pg.dirty ttl feature') 80 | def decorator(func): 81 | @functools.wraps(func) 82 | def wrapper(self, *args, **kwargs): 83 | # warning: not reentrant 84 | dict_ = self.dirty_cache[field] if field in self.dirty_cache else self.dirty_cache.setdefault(field, {}) 85 | return dict_[func.__name__] if func.__name__ in dict_ else dict_.setdefault(func.__name__, func(self, *args, **kwargs)) 86 | return wrapper 87 | return decorator 88 | 89 | def commit_or_execute(pool_or_cursor, qstring, vals=()): 90 | if isinstance(pool_or_cursor, Pool): 91 | pool_or_cursor.commit(qstring, vals) 92 | elif isinstance(pool_or_cursor, Cursor): 93 | pool_or_cursor.execute(qstring, vals) 94 | else: 95 | raise TypeError('bad_pool_or_cursor_type', type(pool_or_cursor)) 96 | 97 | def select_or_execute(pool_or_cursor, qstring, vals=()): 98 | if isinstance(pool_or_cursor, Pool): 99 | return pool_or_cursor.select(qstring, vals) 100 | elif isinstance(pool_or_cursor, Cursor): 101 | pool_or_cursor.execute(qstring, vals) 102 | return pool_or_cursor 103 | else: 104 | raise TypeError('bad_pool_or_cursor_type', type(pool_or_cursor)) 105 | 106 | def commitreturn_or_fetchone(pool_or_cursor, qstring, vals=()): 107 | if isinstance(pool_or_cursor, Pool): 108 | return pool_or_cursor.commitreturn(qstring, vals) 109 | elif isinstance(pool_or_cursor, Cursor): 110 | pool_or_cursor.execute(qstring, vals) 111 | return pool_or_cursor.fetchone() 112 | else: 113 | raise TypeError('bad_pool_or_cursor_type', type(pool_or_cursor)) 114 | 115 | class Row: 116 | "base class for models" 117 | # todo: metaclass stuff to check field names on class creation? forbidden column names: returning 118 | # todo: metaclass for converting fields to a namedtuple 119 | FIELDS = [] 120 | PKEY = '' 121 | INDEXES = [] 122 | TABLE = '' 123 | REFKEYS = {} # this is used by syncschema. see syncschema.py for usage. 124 | SENDRAW = [] # used by syncschema to send non-syncable fields 'raw' 125 | JSON_READ = None 126 | 127 | @classmethod 128 | def create_indexes(cls, pool_or_cursor): 129 | "this gets called by create_table, but if you've created an index you can use it to add it (assuming none exist)" 130 | for index in cls.INDEXES: 131 | # note: these are specified as either 'field, field, field' or a runnable query. you can put any query you want in there 132 | query = index if 'create index' in index.lower() else 'create index on %s (%s)'%(cls.TABLE, index) 133 | commit_or_execute(pool_or_cursor, query) 134 | 135 | @classmethod 136 | def create_table(cls, pool_or_cursor): 137 | "uses FIELDS, PKEY, INDEXES and TABLE members to create a sql table for the model" 138 | def mkfield(pair): 139 | name, type_ = pair 140 | return name, (type_ if isinstance(type_, str) else 'jsonb') 141 | 142 | fields = ','.join(map(' '.join, list(map(mkfield, cls.FIELDS)))) 143 | base = 'create table if not exists %s (%s' % (cls.TABLE, fields) 144 | if cls.PKEY: 145 | base += ', primary key (%s)' % cls.PKEY 146 | base += ')' 147 | commit_or_execute(pool_or_cursor, base) 148 | cls.create_indexes(pool_or_cursor) 149 | 150 | @classmethod 151 | def names(cls): 152 | "helper; returns list of the FIELDS names" 153 | return [name for name, val in cls.FIELDS] 154 | 155 | def __eq__(self, other): 156 | return isinstance(other, type(self)) and all(self[k] == other[k] for k in self.names()) 157 | 158 | def __neq__(self, other): 159 | return not self == other 160 | 161 | def __init__(self, *cols): 162 | "note: __init__ takes strings for SpecialField fields instead of the deserialized object because it expects to construct from DB rows" 163 | if len(cols) != len(self.FIELDS): 164 | raise ValueError(len(cols), len(self.FIELDS)) 165 | self.values = list(cols) 166 | self.dirty_cache = {} 167 | 168 | def __getitem__(self, name): 169 | "note: supporting nulls here is complicated and I'm not sure it's the right thing. I guess *not* supporting them can break some inserts.\ 170 | Converting nulls to empties in Row.insert() will solve some cases." 171 | if name is Ellipsis: 172 | return self.values 173 | try: 174 | index = self.index(name) 175 | except ValueError: 176 | raise FieldError("%s.%s"%(self.__class__.__name__, name)) 177 | val = self.values[index] 178 | field = self.FIELDS[index][1] 179 | # todo: typecheck val on readback 180 | parsed_val = json.loads(val) if isinstance(field, str) and self.JSON_READ else val 181 | return field.des(parsed_val) if is_serdes(field) else parsed_val 182 | 183 | @classmethod 184 | def index(cls, name): 185 | "helper; returns index of field name in row" 186 | return cls.names().index(name) 187 | 188 | @classmethod 189 | def split_pkey(cls): 190 | "get pkey, split by whitespace-agnostic comma" 191 | return re.split(r',\s*', cls.PKEY) 192 | 193 | @classmethod 194 | def pkey_get(cls, pool_or_cursor, *vals): 195 | "lookup by primary keys in order" 196 | pkey = cls.split_pkey() 197 | if len(vals) != len(pkey): 198 | raise ValueError("%i args != %i-len primary key for %s"%(len(vals), len(pkey), cls.TABLE)) 199 | rows = list(cls.select(pool_or_cursor, **dict(list(zip(pkey, vals))))) 200 | if not rows: 201 | raise Missing 202 | return set_options(pool_or_cursor, cls(*rows[0])) 203 | 204 | @classmethod 205 | def pkey_get_withref(cls, pool_or_cursor, *vals): 206 | "get the thing and the stuff from REFKEYS in a single roundtrip" 207 | # create an array_agg per thing in REFKEYS. 208 | # this requires a DB stored proc (or a really complicated select) to unpack the versioned fields. 209 | raise NotImplementedError('todo') # pragma: no cover 210 | 211 | @classmethod 212 | def select(cls, pool_or_cursor, **kwargs): 213 | "note: This returns a generator, not a list. All your expectations will be violated" 214 | columns = kwargs.pop('columns', '*') 215 | # todo: write a test for whether eqexpr matters; figure out pg behavior and apply to pgmock 216 | query = "select %s from %s"%(columns, cls.TABLE) 217 | if kwargs: 218 | query += ' where %s' % ' and '.join('%s = %%s' % k for k in kwargs) 219 | return select_or_execute(pool_or_cursor, query, tuple(kwargs.values())) 220 | 221 | @classmethod 222 | def select_models(cls, pool_or_cursor, **kwargs): 223 | "returns generator yielding instances of the class" 224 | if 'columns' in kwargs: 225 | raise ValueError("don't pass 'columns' to select_models") 226 | return (set_options(pool_or_cursor, cls(*row)) for row in cls.select(pool_or_cursor, **kwargs)) 227 | 228 | @classmethod 229 | def selectwhere(cls, pool_or_cursor, userid, qtail, vals=(), needs_and=True): 230 | qstring = ('select * from %s where userid = %i '%(cls.TABLE, userid))+('and ' if needs_and else '')+qtail 231 | return (set_options(pool_or_cursor, cls(*row)) for row in select_or_execute(pool_or_cursor, qstring, tuple(vals))) 232 | 233 | @classmethod 234 | def select_xiny(cls, pool_or_cursor, userid, field, values): 235 | return cls.selectwhere(pool_or_cursor, userid, '%s in %%s'%field, (tuple(values), )) if values else [] 236 | 237 | @classmethod 238 | def serialize_row(cls, pool_or_cursor, fieldnames, vals, for_read=False): 239 | fieldtypes = [cls.FIELDS[cls.index(name)][1] for name in fieldnames] 240 | jsonify = pool_or_cursor.JSON_READ if for_read else pool_or_cursor.JSON_WRITE 241 | return tuple(transform_specialfield(jsonify, ft, v) for ft, v in zip(fieldtypes, vals)) 242 | 243 | @classmethod 244 | def insert_all(cls, pool_or_cursor, *vals): 245 | # note: it would be nice to write this on top of cls.insert, but this returns an object and that has a returning feature. tricky semantics. 246 | if len(cls.FIELDS) != len(vals): 247 | raise ValueError('fv_len_mismatch', len(cls.FIELDS), len(vals), cls.TABLE) 248 | serialized_vals = cls.serialize_row(pool_or_cursor, [f[0] for f in cls.FIELDS], vals) 249 | query = "insert into %s values (%s)"%(cls.TABLE, ','.join(['%s']*len(serialized_vals))) 250 | try: 251 | commit_or_execute(pool_or_cursor, query, serialized_vals) 252 | except errors.PgPoolError as err: 253 | # todo: need cross-db, cross-version, cross-driver testing to get this right 254 | raise DupeInsert(cls.TABLE, err) # note: pgmock raises DupeInsert directly, so catching this works in caller. (but args are different) 255 | return set_options( 256 | pool_or_cursor, 257 | cls(*cls.serialize_row(pool_or_cursor, cls.names(), vals, for_read=True)) 258 | ) 259 | 260 | @classmethod 261 | def insert(cls, pool_or_cursor, fields, vals, returning=None): 262 | if len(fields) != len(vals): 263 | raise ValueError('fv_len_mismatch', len(fields), len(vals), cls.TABLE) 264 | vals = cls.serialize_row(pool_or_cursor, fields, vals) 265 | query = "insert into %s (%s) values (%s)"%(cls.TABLE, ','.join(fields), ', '.join(['%s']*len(vals))) 266 | if returning: 267 | return commitreturn_or_fetchone(pool_or_cursor, query + ' returning ' + returning, vals) 268 | else: 269 | commit_or_execute(pool_or_cursor, query, vals) 270 | return None 271 | 272 | @classmethod 273 | def kwinsert(cls, pool_or_cursor, **kwargs): 274 | "kwargs version of insert" 275 | returning = kwargs.pop('returning', None) 276 | fields, vals = list(zip(*list(kwargs.items()))) 277 | # note: don't do SpecialField resolution here; cls.insert takes care of it 278 | return cls.insert(pool_or_cursor, fields, vals, returning=returning) 279 | 280 | @classmethod 281 | def kwinsert_mk(cls, pool_or_cursor, **kwargs): 282 | "wrapper for kwinsert that returns a constructed class. use this over kwinsert in most cases" 283 | if 'returning' in kwargs: 284 | raise ValueError("don't call kwinsert_mk with 'returning'") 285 | return set_options( 286 | pool_or_cursor, 287 | cls(*cls.kwinsert(pool_or_cursor, returning='*', **kwargs)) 288 | ) 289 | 290 | @classmethod 291 | def checkdb(cls, pool_or_cursor): 292 | raise NotImplementedError("check that DB table matches our fields") # pragma: no cover 293 | 294 | @classmethod 295 | def insert_mtac(cls, pool_or_cursor, restrict, incfield, fields=(), vals=()): 296 | "todo: doc what mtac stands for" 297 | if not isinstance(cls.FIELDS[cls.index(incfield)][1], str): 298 | raise TypeError('mtac_specialfield_unsupported', 'incfield', incfield) 299 | if any(not isinstance(cls.FIELDS[cls.index(f)][1], str) for f in restrict): 300 | raise TypeError('mtac_specialfield_unsupported', 'restrict') 301 | if len(fields) != len(vals): 302 | raise ValueError("insert_mtac len(fields) != len(vals)") 303 | vals = cls.serialize_row(pool_or_cursor, fields, vals) 304 | where = ' and '.join('%s = %s'%tup for tup in list(restrict.items())) 305 | mtac = '(select coalesce(max(%s), -1)+1 from %s where %s)'%(incfield, cls.TABLE, where) 306 | qcols = ','.join([incfield]+list(restrict.keys())+list(fields)) 307 | # todo: are both vals ever empty? if yes this breaks. 308 | qvals = tuple(restrict.values())+tuple(vals) 309 | valstring = ','.join(['%s']*len(qvals)) 310 | query = 'insert into %s (%s) values (%s, %s) returning *'%(cls.TABLE, qcols, mtac, valstring) 311 | return set_options(pool_or_cursor, cls(*commitreturn_or_fetchone(pool_or_cursor, query, qvals))) 312 | 313 | @classmethod 314 | def pkey_update(cls, pool_or_cursor, pkey_vals, escape_keys, raw_keys=None): 315 | if not cls.PKEY: 316 | raise ValueError("can't update %s, no primary key"%cls.TABLE) 317 | if any(not isinstance(cls.FIELDS[cls.index(f)][1], str) for f in (raw_keys or ())): 318 | raise TypeError('rawkeys_specialfield_unsupported') 319 | escape_keys = dict(list(zip(escape_keys, cls.serialize_row(pool_or_cursor, escape_keys, list(escape_keys.values()))))) 320 | pkey = cls.split_pkey() 321 | raw_keys = raw_keys or {} 322 | if any(k in pkey for k in list(escape_keys)+list(raw_keys)): 323 | raise ValueError("pkey field updates not allowed") # todo: why? 324 | if len(pkey_vals) != len(pkey): 325 | raise ValueError("len(pkey_vals) %i != len(pkey) %i"%(len(pkey_vals), len(pkey))) 326 | # todo: if I'm going to allow SpecialField in primary key vals, serialize here 327 | whereclause = ' and '.join('%s = %%s'%k for k in pkey) 328 | setclause = ','.join(['%s = %%s'%k for k in escape_keys]+['%s = %s'%tup for tup in list(raw_keys.items())]) 329 | # note: raw_keys could contain %s as well as a lot of other poison 330 | query = 'update %s set %s where %s'%(cls.TABLE, setclause, whereclause) 331 | vals = tuple(escape_keys.values())+pkey_vals 332 | if raw_keys: 333 | return commitreturn_or_fetchone(pool_or_cursor, query+' returning '+','.join(raw_keys), vals) 334 | else: 335 | commit_or_execute(pool_or_cursor, query, vals) 336 | return None 337 | 338 | def pkey_vals(self): 339 | if not hasattr(self, 'PKEY') or not self.PKEY: 340 | raise KeyError("no primary key on %s"%self.TABLE) 341 | return tuple(map(self.__getitem__, self.split_pkey())) 342 | 343 | def update(self, pool_or_cursor, escape_keys, raw_keys=None): 344 | pkey_vals = self.pkey_vals() 345 | # note: do not serialize SpecialField. pkey_update takes care of it. 346 | rawvals = self.pkey_update(pool_or_cursor, pkey_vals, escape_keys, raw_keys) 347 | # note: Row has no __setitem__ because this is the only time we want to modify our copy of data (after an update to reflect DB) 348 | if raw_keys: 349 | for key, val in zip(raw_keys, rawvals): 350 | self.values[self.index(key)] = val # this is necessary because raw_keys can contain expressions 351 | for key in raw_keys: 352 | if key in self.dirty_cache: 353 | self.dirty_cache.pop(key) 354 | escape_keys = dict(list(zip(escape_keys, self.serialize_row(pool_or_cursor, escape_keys, list(escape_keys.values()), for_read=True)))) # ugly; doing it in pkey_update and this 355 | for key, val in list(escape_keys.items()): 356 | self.values[self.index(key)] = val 357 | for key in escape_keys: 358 | if key in self.dirty_cache: 359 | self.dirty_cache.pop(key) 360 | 361 | @classmethod 362 | def updatewhere(cls, pool_or_cursor, where_keys, **update_keys): 363 | "this doesn't allow raw_keys for now" 364 | # if cls.JSONFIELDS: raise NotImplementedError # todo: do I need to make the same change for SpecialField? 365 | if not where_keys or not update_keys: 366 | raise ValueError 367 | setclause = ','.join(k+' = %s' for k in update_keys) 368 | whereclause = ' and '.join(eqexpr(k, v) for k, v in list(where_keys.items())) 369 | query = 'update %s set %s where %s'%(cls.TABLE, setclause, whereclause) 370 | vals = tuple(list(update_keys.values())+list(where_keys.values())) 371 | commit_or_execute(pool_or_cursor, query, vals) 372 | 373 | def delete(self, pool_or_cursor): 374 | ".. warning:: pgmock doesn't support delete yet, so this isn't tested" 375 | vals = self.pkey_vals() 376 | whereclause = ' and '.join('%s = %%s'%k for k in self.split_pkey()) 377 | query = 'delete from %s where %s'%(self.TABLE, whereclause) 378 | commit_or_execute(pool_or_cursor, query, vals) 379 | 380 | def refkeys(self, fields): 381 | "returns {ModelClass:list_of_pkey_tuples}. see syncschema.RefKey. Don't use this yet." 382 | # todo doc: better explanation of what refkeys are and how fields plays in 383 | ddlist = collections.defaultdict(list) 384 | if any(f not in self.REFKEYS for f in fields): 385 | raise ValueError(fields, 'not all in', list(self.REFKEYS.keys())) 386 | for field in fields: 387 | refkeys = self.REFKEYS[field] 388 | for model in refkeys.refmodels: 389 | ddlist[model].extend(refkeys.pkeys(self, field)) 390 | return ddlist 391 | 392 | def __repr__(self): 393 | pkey = ' %s'%','.join('%s:%s'%(k, self[k]) for k in self.split_pkey()) if self.PKEY else '' 394 | return '<%s(pg.Row)%s>'%(self.__class__.__name__, pkey) 395 | -------------------------------------------------------------------------------- /test_pg13/test_pgmock.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pg13 import pgmock, sqparse2, pg, sqex, pgmock_dbapi2, table 3 | 4 | def prep(create_stmt): 5 | "helper for table setup" 6 | tables=pgmock.TablesDict() 7 | tables.apply_sql(sqparse2.parse(create_stmt),(),None) 8 | def runsql(stmt,vals=()): return tables.apply_sql(sqparse2.parse(stmt),vals,None) 9 | return tables,runsql 10 | 11 | def test_default_null_vs_notprovided(): 12 | "test some none vs missing cases" 13 | tables,runsql=prep("create table t1 (a int, b int default 7, c int default null)") 14 | # 1. missing with default null 15 | runsql("insert into t1 (a,b) values (1,2)") 16 | # 2. missing triggers default value 17 | runsql("insert into t1 (a,c) values (1,3)") 18 | # 3. passed-in None overrides default 19 | runsql("insert into t1 (a,b) values (1,null)") 20 | assert tables['t1'].rows==[[1,2,None],[1,7,3],[1,None,None]] 21 | 22 | def test_insert(): 23 | tables,runsql=prep("create table t1 (a int, b int, c int)") 24 | runsql("insert into t1 (a,b,c) values (1,2,3)") 25 | assert tables['t1'].rows[0]==[1,2,3] 26 | 27 | def test_insert_endcols(): 28 | "test len(insert fields)>> with pool() as con,con.cursor() as cur: 52 | ... cur.execute('create temp table t1 (a int, b int, c int)') 53 | ... cur.execute('insert into t1 values (1,2,3) returning b') 54 | ... print list(cur) 55 | ... cur.execute('insert into t1 values (2,2,3) returning *') 56 | ... print list(cur) 57 | ... 58 | [(2,)] 59 | [(2, 2, 3)] 60 | """ 61 | tables,runsql=prep("create table t1 (a int, b int, c int)") 62 | assert [[2]]==runsql("insert into t1 values (1,2,3) returning (b)") 63 | assert [[2,2,3]]==runsql('insert into t1 values (2,2,3) returning *') 64 | 65 | def test_select(): 66 | tables,runsql=prep("create table t1 (a int, b int, c int)") 67 | runsql("insert into t1 values (1,2,3)") 68 | runsql("insert into t1 values (2,2,3)") 69 | runsql("insert into t1 values (3,1,3)") 70 | assert [[1,2,3],[2,2,3]]==runsql("select * from t1 where b=2") 71 | 72 | def test_select_some(): 73 | "select columnlist instead of *" 74 | tables,runsql=prep("create table t1 (a int, b int, c int)") 75 | runsql("insert into t1 values (1,2,3)") 76 | assert [[3,1]]==runsql("select c,a from t1") 77 | 78 | def test_select_math(): 79 | "unary and binary math & select literal" 80 | tables,runsql=prep('create table t1(a int)') 81 | runsql('insert into t1 values(1)') 82 | assert [[2,-1,5]]==runsql('select a+1,-a,5 from t1') 83 | 84 | def test_select_max(): 85 | tables,runsql=prep("create table t1 (a int, b int, c int)") 86 | assert [None]==runsql("select max(a) from t1") 87 | runsql("insert into t1 (a,b,c) values (1,2,3)") 88 | runsql("insert into t1 (a,b,c) values (4,5,6)") 89 | assert [4]==runsql("select max(a) from t1") 90 | with pytest.raises(sqparse2.SQLSyntaxError): runsql('select a,max(a) from t1') # todo: spec support 91 | 92 | def test_select_coalesce(): 93 | tables,runsql=prep("create table t1 (a int, b int, c int)") 94 | assert [1]==runsql("select coalesce(max(b),1) from t1") 95 | for i in range(2): runsql("insert into t1 (a,b,c) values (%s,2,3)",(i,)) 96 | assert [[1],[1]]==runsql("select coalesce(null,1) from t1") 97 | 98 | def test_insert_select(): 99 | tables,runsql=prep("create table t1 (a int, b int, c int)") 100 | runsql("insert into t1 (a,b,c) values (1,2,3)") 101 | print(sqparse2.parse('insert into t1 (a,b,c) values (2,3,(select c from t1 where a=1))')) 102 | runsql("insert into t1 (a,b,c) values (2,3,(select c from t1 where a=1))") 103 | assert tables['t1'].rows==[[1,2,3],[2,3,3]] 104 | 105 | @pytest.mark.xfail 106 | def test_insert_missing_pkey(): 107 | raise NotImplementedError # look up when the spec cares about missing pkey. only when not null specified on the column? 108 | tables,runsql=prep("create table t1 (a int, b int, c int, primary key (a,b))") 109 | runsql("insert into t1 (a) values (1)") 110 | 111 | def test_create_pkey(): 112 | tables,runsql = prep('create table t1 (a int, b int, primary key (a))') 113 | with pytest.raises(sqparse2.SQLSyntaxError): 114 | runsql('create table t2 (a int primary key, b int, primary key (a))') 115 | assert tables['t1'].pkey == ['a'] 116 | runsql('create table t2 (a int primary key, b int)') 117 | assert tables['t2'].pkey == ['a'] 118 | 119 | def test_update(): 120 | tables,runsql=prep("create table t1 (a int, b int, c int)") 121 | runsql("insert into t1 (a,b,c) values (1,2,3)") 122 | runsql("insert into t1 (a,b,c) values (2,2,3)") 123 | runsql("update t1 set b=1,c=2 where a=1") 124 | assert tables['t1'].rows==[[1,1,2],[2,2,3]] 125 | 126 | def test_update_returning(): 127 | tables,runsql=prep('create table t1(a int,b int,c int)') 128 | runsql('insert into t1 values(1,2,3)') 129 | assert [[1,3]]==runsql('update t1 set b=5 where a<5 returning a,c') 130 | assert [[1,3]]==runsql('update t1 set b=5 where a<5 returning (a,c)') 131 | 132 | assert [[3]]==runsql('update t1 set b=5 where a<5 returning c') # todo: make sure list of rows is the right return type 133 | 134 | def test_in_operator(): 135 | tables,runsql=prep("create table t1 (a int, b int, c int)") 136 | runsql("insert into t1 (a,b,c) values (1,2,3)") 137 | runsql("insert into t1 (a,b,c) values (2,2,3)") 138 | runsql("insert into t1 (a,b,c) values (3,2,3)") 139 | assert [[1,2,3],[3,2,3]]==runsql("select * from t1 where a in %s",((1,3),)) 140 | 141 | def test_select_xcomma(): 142 | tables,runsql=prep('create table t1 (a int, b int, c int)') 143 | tables['t1'].rows=[[1,2,3],[2,3,4]] 144 | assert [[2,3,4]]==runsql('select * from t1 where (a,b) in %s',([(2,3)],)) 145 | 146 | def test_not(): 147 | "todo: double-check operator precedence of not vs =" 148 | tables,runsql=prep("create table t1 (a int,b int)") 149 | tables['t1'].rows=[[0,0],[1,1]] 150 | print(sqparse2.parse("select * from t1 where not a=0")) 151 | assert [[1,1]]==runsql("select * from t1 where not a=0") 152 | 153 | def test_null_handling(): 154 | # https://en.wikipedia.org/wiki/Null_(SQL)#Law_of_the_excluded_fourth_.28in_WHERE_clauses.29 155 | tables,runsql=prep("create table t1 (a int,b int)") 156 | tables['t1'].rows=[ 157 | [0,0], 158 | [0,None], 159 | [None,None], 160 | ] 161 | # these two queries are equivalent 162 | assert [[0,0],[0,None]]==runsql("select * from t1 where (a=0) or not (a=0)") 163 | assert [[0,0],[0,None]]==runsql("select * from t1 where a is not null") 164 | 165 | assert [[0,0]]==runsql("select * from t1 where a=b") 166 | assert [[None,None]]==runsql("select * from t1 where a is null") 167 | assert []==runsql("select * from t1 where a=null") # I think null=null eval to false or unk or something 168 | 169 | def test_case(): 170 | tables,runsql=prep('create table t1 (a int,b int)') 171 | tables['t1'].rows=[[0,1],[1,2],[2,3]] 172 | assert [[2],[6],[9]]==runsql('select case when a=0 then 2*b else 3*b end from t1') 173 | print(runsql('select case when a=0 then 2*b end from t1')) 174 | assert [[2],[None],[None]]==runsql('select case when a=0 then 2*b end from t1') # i.e. missing else returns null 175 | 176 | def test_array_ops(): 177 | tables,runsql=prep('create table t1 (a int,b int[])') 178 | runsql('insert into t1 values(8,{1,2,3})') 179 | runsql('insert into t1 values(9,%s)',([4,5,6],)) 180 | runsql('insert into t1 values(10,array[1,2,3])') 181 | assert tables['t1'].rows==[[8,[1,2,3]],[9,[4,5,6]],[10,[1,2,3]]] 182 | assert [[True],[False],[True]]==runsql('select b@>array[1] from t1') 183 | assert [[[1,2,3,1]],[[4,5,6,1]],[[1,2,3,1]]]==runsql('select b||array[1] from t1') 184 | 185 | def test_select_order(): 186 | # todo: asc/desc, test more complicated expressions 187 | tables,runsql=prep('create table t1 (a int,b int)') 188 | tables['t1'].rows=[[i,0] for i in range(10,0,-1)] 189 | print(sqparse2.parse('select * from t1 order by a')) 190 | rows=runsql('select * from t1 order by a') 191 | print('tso',rows) 192 | assert rows==sorted(rows) 193 | 194 | def setup_join_test(): 195 | tables,runsql=prep('create table t1 (a int,b int)') 196 | runsql('create table t2 (c int, d int)') 197 | tables['t1'].rows=[[1,2],[3,4]] 198 | tables['t2'].rows=[[1,3],[2,5]] 199 | return tables,runsql 200 | 201 | def test_join_on(): 202 | tables,runsql = setup_join_test() 203 | assert [[1,2,1,3]] == runsql('select * from t1 join t2 on a=c') 204 | 205 | def test_implicit_join(): 206 | tables,runsql = setup_join_test() 207 | # todo: make sure real SQL behaves this way 208 | assert [[1,2,1,3]]==runsql('select * from t1,t2 where a=c') 209 | 210 | def test_table_as(): 211 | tables,runsql = setup_join_test() 212 | assert [[1,2],[3,4]]==runsql('select * from t1 as t') 213 | # todo below: make sure this is what real SQL does. or more generally, run all tests against postgres as well as pgmock. 214 | assert [[1,2,1,3]]==runsql('select * from t1 as t,t2 where t.a=t2.c') 215 | 216 | def test_join_attr(): 217 | tables,runsql = setup_join_test() 218 | with pytest.raises(table.BadFieldName): runsql('select t1.* from t1 join t2 on t1.a=t2.a') # todo: this deserves its own test 219 | assert [[1,2]]==runsql('select t1.* from t1 join t2 on t1.a=t2.c') 220 | 221 | # todo: test_name_indexer should be in test_sqex except for reliance on tables_dict. move Table to its own file. 222 | def test_name_indexer(): 223 | from pg13.sqparse2 import NameX,AttrX,AsterX 224 | x = sqparse2.parse('select * from t1, t2 as alias') 225 | ni = sqex.NameIndexer.ctor_fromlist(x.tables) 226 | assert ni.table_order==['t1','t2'] 227 | tables,runsql=prep('create table t1 (a int,b int)') 228 | runsql('create table t2 (a int,c int)') 229 | ni.resolve_aonly(tables,table.Table) 230 | assert (0,1)==ni.index_tuple(tables,'b',False) 231 | assert (0,1)==ni.index_tuple(tables,sqparse2.NameX('b'),False) # make sure NameX handling works 232 | assert (1,1)==ni.index_tuple(tables,'c',False) 233 | with pytest.raises(sqex.ColumnNameError): ni.index_tuple(tables,'a',False) 234 | assert (0, 0) == ni.index_tuple(tables,AttrX(NameX('t1'),'a'),False) 235 | assert (1, 0) == ni.index_tuple(tables,AttrX(NameX('alias'),'a'),False) 236 | assert (1,) == ni.index_tuple(tables,AttrX(NameX('alias'),AsterX()),False) 237 | assert (0,) == ni.index_tuple(tables,AttrX(NameX('t1'),AsterX()),False) 238 | assert (1,) == ni.index_tuple(tables,AttrX(NameX('t2'),AsterX()),False) 239 | with pytest.raises(sqex.TableNameError): ni.index_tuple(tables,sqparse2.AttrX(NameX('bad_alias'),'e'),False) 240 | with pytest.raises(ValueError): ni.index_tuple(tables,sqparse2.AttrX(NameX('t2'),AsterX()),True) 241 | 242 | def test_nested_select(): 243 | "nested select has cardinality issues; add cases as they come up" 244 | tables,runsql=prep('create table t1 (a int, b int)') 245 | runsql('create table t2 (a int, b int)') 246 | tables['t1'].rows=[[0,1],[1,2],[3,4],[8,9]] 247 | tables['t2'].rows=[[0,1],[0,3],[0,5],[6,1]] 248 | assert [[1,2]]==runsql('select * from t1 where a=(select b from t2 where a=6)') 249 | assert []==runsql('select * from t1 where a=(select b from t2 where a=7)') 250 | 251 | def test_alias_only(): 252 | tables,runsql=prep('create table t1 (a int, b int)') 253 | tables['t1'].rows=[[0,0],[1,1],[2,2]] 254 | assert [[0,0],[1,1]]==runsql('select * from (select * from t1 where a < 2) as sub') 255 | assert [[0],[1]]==runsql('select a from (select * from t1 where a < 2) as sub') 256 | assert [[0],[1]]==runsql('select * from (select a from t1 where a < 2) as sub') 257 | assert [[0],[1]]==runsql('select a from (select a from t1 where a < 2) as sub') 258 | with pytest.raises(sqex.ColumnNameError): runsql('select b from (select a from t1 where a < 2) as sub') 259 | with pytest.raises(sqex.ColumnNameError): runsql('select c from (select a from t1 where a < 2) as sub') 260 | with pytest.raises(sqex.ColumnNameError): runsql('select c from (select * from t1 where a < 2) as sub') 261 | runsql('create table t2 (a int, c int)') 262 | tables['t2'].rows=[[0,0]] 263 | with pytest.raises(sqex.ColumnNameError): runsql('select a,c from (select * from t1) as sub1,(select * from t2) as sub2') 264 | assert [[0,0],[1,0],[2,0]]==runsql('select b,c from (select * from t1) as sub1,(select * from t2) as sub2') 265 | 266 | def test_call_as(): 267 | tables,runsql=prep('create table t1 (a int, b int)') 268 | tables['t1'].rows=[[None,1],[1,2]] 269 | assert [[0],[1]]==runsql('select coalesce(a,0) as c from t1') 270 | assert [[0],[1]]==runsql('select c from (select coalesce(a,0) as c from t1) as sub') 271 | 272 | @pytest.mark.xfail # delete without a where clause is broken 273 | def test_delete(): 274 | tables,runsql=prep('create table t1 (a int, b int)') 275 | tables['t1'].rows=[[0,1],[1,1],[2,0],[2,1]] 276 | runsql('delete from t1 where b=1') 277 | assert tables['t1'].rows==[[2,0]] 278 | runsql('delete from t1') 279 | assert tables['t1'].rows==[] 280 | 281 | def test_unnest(): 282 | tables,runsql=prep('create table t1 (a int, b int[])') 283 | tables['t1'].rows=[[0,[1,2,3]]] 284 | assert [[1],[2],[3]]==runsql('select unnest(b) from t1') 285 | assert [[0,1],[0,2],[0,3]]==runsql('select a,unnest(b) from t1') 286 | @pytest.mark.xfail 287 | def test_max_unnest(): 288 | "more generally, this is testing produces_rows inside consumes_rows" 289 | tables,runsql=prep('create table t1 (a int, b int[])') 290 | tables['t1'].rows=[[0,[1,2,3]]] 291 | assert [3]==runsql('select max(unnest(b)) from t1') 292 | 293 | def test_groupby(): 294 | tables,runsql=prep('create table t1 (a int, b int)') 295 | tables['t1'].rows=[[0,1],[0,2],[1,3],[1,4]] 296 | assert [[0,2,2],[1,2,4]]==runsql('select a,count(a),max(b) from t1 group by a') 297 | assert [[0,2,1],[1,2,3]]==runsql('select a,count(a),min(b) from t1 group by a') 298 | assert [[0,2],[1,2]]==runsql('select a,count(*) from t1 group by a') 299 | 300 | def test_textsearch(): 301 | tables,runsql=prep('create table t1 (a int, b text)') 302 | tables['t1'].rows=[[0,'one two three okay'],[1,'four five six okay']] 303 | assert []==runsql('select a from t1 where to_tsvector(b) @@ to_tsquery(%s)',('unk_token',)) 304 | assert [[0]]==runsql('select a from t1 where to_tsvector(b) @@ to_tsquery(%s)',('one',)) 305 | assert [[1]]==runsql('select a from t1 where to_tsvector(b) @@ to_tsquery(%s)',('four',)) 306 | assert [[0],[1]]==runsql('select a from t1 where to_tsvector(b) @@ to_tsquery(%s)',('okay',)) 307 | 308 | def test_serial(): 309 | "make sure default does the right thing for serial column type" 310 | tables,runsql=prep('create table t1 (a serial, b int)') 311 | for i in range(3): 312 | runsql('insert into t1 (b) values (%s)',(i,)) 313 | assert tables['t1'].rows == [[0,0],[1,1],[2,2]] 314 | # warning: what's supposed to happen when a value is passed for serial? 315 | 316 | def test_cast(): 317 | tables,runsql=prep('create table t1 (a int, b text)') 318 | # cast existing column 319 | runsql('insert into t1 (a) values (1)') 320 | runsql('update t1 set b=a::text') 321 | assert tables['t1'].rows[0] == [1,'1'] 322 | # cast literal 323 | runsql('insert into t1 values (2, 345::text)') 324 | assert tables['t1'].rows[1] == [2,'345'] 325 | 326 | def test_default(): 327 | tables, runsql = prep('create table t1(a int, b boolean default false)') 328 | runsql('insert into t1 (a) values (0)') 329 | assert tables['t1'].rows == [[0, False]] 330 | 331 | def test_tempkeys(): 332 | td = pgmock.TablesDict() 333 | td['a'] = [1,2,3] 334 | with td.tempkeys(): 335 | td['b'] = td['a'] 336 | td['b'].append(4) 337 | assert td['b'] is td['a'] 338 | assert td['a'] == [1,2,3,4] 339 | with pytest.raises(KeyError): td['b'] 340 | 341 | def test_transaction_basics(): 342 | ppm = pgmock_dbapi2.PgPoolMock() 343 | # 1. test that create table persists past commit 344 | with ppm.withcur() as cursor: 345 | cursor.execute('create table t1 (a int, b int)') 346 | assert list(ppm.tables.keys()) == ['t1'] 347 | # 2. test that insert persists past commit 348 | with ppm.withcur() as cursor: 349 | cursor.execute('insert into t1 values (1,3)') 350 | assert len(ppm.tables['t1'].rows) == 1 351 | class IgnorableError(Exception): pass 352 | # 3. test that create table doesn't survive a rollback 353 | try: 354 | with ppm.withcur() as cursor: 355 | cursor.execute('create table t2 (a int, b int)') 356 | raise IgnorableError 357 | except IgnorableError: pass 358 | assert list(ppm.tables.keys()) == ['t1'] 359 | # 4. test that insert doesn't survive a rollback 360 | try: 361 | with ppm.withcur() as cursor: 362 | cursor.execute('insert into t1 values (1,4)') 363 | raise IgnorableError 364 | except IgnorableError: pass 365 | assert len(ppm.tables['t1'].rows) == 1 366 | 367 | def test_create_nexists(): 368 | # 1. create if not exists, table exists 369 | ppm = pgmock_dbapi2.PgPoolMock() 370 | with ppm.withcur() as cursor: 371 | cursor.execute('create table t1 (a int)') 372 | cursor.execute('create table if not exists t1 (a int, b int)') 373 | assert len(ppm.tables['t1'].fields) == 1 374 | # 2. create if not exists, table doesn't exist 375 | ppm = pgmock_dbapi2.PgPoolMock() 376 | with ppm.withcur() as cursor: 377 | cursor.execute('create table if not exists t1 (a int)') 378 | assert 't1' in ppm.tables 379 | # 3. create, table exists 380 | with pgmock_dbapi2.PgPoolMock().withcur() as cursor: 381 | cursor.execute('create table t1 (a int)') 382 | with pytest.raises(ValueError) as e: 383 | cursor.execute('create table t1 (a int, b int)') 384 | assert e.value.args == ('table_exists','t1') 385 | 386 | def test_drop(): 387 | ppm = pgmock_dbapi2.PgPoolMock() 388 | with ppm.withcur() as cursor: 389 | cursor.execute('create table t1 (a int)') 390 | assert 't1' in ppm.tables 391 | cursor.execute('drop table t1') 392 | assert 't1' not in ppm.tables 393 | with pgmock_dbapi2.PgPoolMock().withcur() as cursor: 394 | cursor.execute('drop table if exists t1') 395 | with pytest.raises(KeyError) as e: 396 | cursor.execute('drop table t1') 397 | assert e.value.args == ('t1',) 398 | 399 | def test_create_inherit(): 400 | ppm = pgmock_dbapi2.PgPoolMock() 401 | with ppm.withcur() as cursor: 402 | cursor.execute('create table t1 (a int)') 403 | cursor.execute('create table t1a inherits (t1)') 404 | assert ppm.tables['t1'].child_tables == [ppm.tables['t1a']] 405 | assert ppm.tables['t1a'].parent_table == ppm.tables['t1'] 406 | 407 | def test_drop_inherit(): 408 | ppm = pgmock_dbapi2.PgPoolMock() 409 | with ppm.withcur() as cursor: 410 | cursor.execute('create table t1 (a int)') 411 | cursor.execute('create table t1a inherits (t1)') 412 | with pytest.raises(table.IntegrityError) as e: # drop parent fails without cascade 413 | cursor.execute('drop table t1') 414 | assert 't1a' in ppm.tables and 't1' in ppm.tables 415 | cursor.execute('drop table t1 cascade') # drop succeeds with cascade 416 | assert 't1a' not in ppm.tables and 't1' not in ppm.tables 417 | ppm = pgmock_dbapi2.PgPoolMock() 418 | with ppm.withcur() as cursor: 419 | cursor.execute('create table t1 (a int)') 420 | cursor.execute('create table t1a inherits (t1)') 421 | cursor.execute('drop table t1a') 422 | assert 't1a' not in ppm.tables and 't1' in ppm.tables 423 | # todo: multi-level inherit 424 | 425 | @pytest.mark.xfail 426 | def test_drop_fkey_cascade(): 427 | raise NotImplementedError 428 | 429 | @pytest.mark.xfail 430 | def test_alias_product(): 431 | raise NotImplementedError 432 | """ 433 | create table t1 (a int, b int); 434 | create table t2 (a int, b int); 435 | insert into t1 values (1); 436 | insert into t1 values (2); 437 | insert into t2 values (1,0); 438 | insert into t2 values (0,2); 439 | 440 | # it outputs this: 441 | testjoin=# select t1.a, t2.a as a2, t2.b as b2, t3.a as a3, t3.b as b3 from t1, t2, t2 as t3 where t1.a = t2.a or t1.a = t3.b; 442 | a | a2 | b2 | a3 | b3 443 | ---+----+----+----+---- 444 | 1 | 1 | 0 | 1 | 0 445 | 1 | 1 | 0 | 0 | 2 446 | 2 | 1 | 0 | 0 | 2 447 | 2 | 0 | 2 | 0 | 2 448 | (4 rows) 449 | """ 450 | -------------------------------------------------------------------------------- /pg13/sqparse2.py: -------------------------------------------------------------------------------- 1 | "parsing for pgmock -- rewritten in PLY" 2 | # pylint: disable=invalid-name,no-self-use,super-init-not-called 3 | 4 | # todo: be more consistent about CommaX vs raw tuple 5 | # todo: factor out the AST-path stuff to its own file 6 | 7 | # differences vs real SQL: 8 | # 1. sql probably allows 'table' as a table name. I think I'm stricter about keywords (and I don't allow quoting columns) 9 | 10 | import itertools 11 | import ply.lex, ply.yacc 12 | from . import treepath 13 | 14 | # errors 15 | class PgMockError(Exception): 16 | pass 17 | 18 | class SQLSyntaxError(PgMockError): 19 | """base class for errors during parsing. 20 | beware: this gets called during table execution for things we should catch during parsing. 21 | """ 22 | 23 | class BaseX(treepath.PathTree): 24 | "base class for expressions" 25 | ATTRS = () 26 | VARLEN = () 27 | 28 | def __init__(self, *args): 29 | if len(args) != len(self.ATTRS): 30 | raise TypeError('wrong_n_args', len(args), len(self.ATTRS), args) 31 | for attr, arg in zip(self.ATTRS, args): 32 | setattr(self, attr, arg) 33 | 34 | def __eq__(self, other): 35 | return type(self) is type(other) and all(getattr(self, attr) == getattr(other, attr) for attr in self.ATTRS) 36 | 37 | def __repr__(self): 38 | return '%s(%s)' % (self.__class__.__name__, ','.join(map(repr, (getattr(self, attr) for attr in self.ATTRS)))) 39 | 40 | class Literal(BaseX): 41 | ATTRS = ('val',) 42 | def toliteral(self): 43 | return self.val # pylint: disable=no-member 44 | 45 | class ArrayLit(BaseX): # todo: this isn't always a literal. what if there's a select stmt inside? yikes. 46 | ATTRS = ('vals',) 47 | VARLEN = ('vals',) 48 | def __init__(self, vals): 49 | self.vals = list(vals) 50 | 51 | def toliteral(self): 52 | return self.vals # todo: get rid? 53 | 54 | class SubLit: 55 | pass 56 | 57 | class NameX(BaseX): 58 | ATTRS = ('name',) 59 | 60 | class AsterX(BaseX): 61 | pass 62 | 63 | class NullX(BaseX): 64 | pass 65 | 66 | class AliasX(BaseX): 67 | "warning: 'name' attr is actually an expression" 68 | ATTRS = ('name', 'alias') 69 | 70 | class JoinTypeX(BaseX): 71 | ATTRS = ('side', 'outer', 'natural') 72 | @property 73 | def inner(self): 74 | return not self.is_outer # pylint: disable=no-member 75 | 76 | class JoinX(BaseX): 77 | ATTRS = ('a', 'b', 'on_stmt', 'jointype') 78 | 79 | class OpX(BaseX): 80 | PRIORITY = ('or', 'and', 'not', '>', '<', '@>', '@@', '||', '!=', '=', 'is not', 'is', 'in', '*', '/', '+', '-') 81 | ATTRS = ('op',) 82 | def __init__(self, op): 83 | self.op = op 84 | if op not in self.PRIORITY: 85 | raise SQLSyntaxError('unk_op', op) 86 | def __lt__(self, other): 87 | "this is for order of operations" 88 | if not isinstance(other, OpX): 89 | raise TypeError 90 | return self.PRIORITY.index(self.op) < self.PRIORITY.index(other.op) 91 | 92 | class BinX(BaseX): 93 | ATTRS = ('op', 'left', 'right') 94 | 95 | class UnX(BaseX): 96 | ATTRS = ('op', 'val') 97 | 98 | class CommaX(BaseX): 99 | # todo: implement an __iter__ for this; I 100 | ATTRS = ('children',) 101 | VARLEN = ('children',) 102 | 103 | def __init__(self, children): 104 | self.children = list(children) # needs to be a list (i.e. mutable) for SubLit substitution (in sqex.sub_slots) 105 | 106 | def __iter__(self): 107 | raise NotImplementedError("don't iterate CommaX directly -- loop on x.children") 108 | 109 | class CallX(BaseX): 110 | ATTRS = ('f', 'args') # args is not VARLEN; it's a commax because it's passed to sqex.Evaluator I think 111 | 112 | class WhenX(BaseX): 113 | ATTRS = ('when', 'then') 114 | 115 | class CaseX(BaseX): 116 | ATTRS = ('cases', 'elsex') 117 | VARLEN = ('cases',) 118 | 119 | class AttrX(BaseX): 120 | ATTRS = ('parent', 'attr') 121 | 122 | class TypeX(BaseX): 123 | ATTRS = ('type', 'width') # width is e.g. 20 for VARCHAR(20), null for e.g. TEXT. 124 | 125 | class CastX(BaseX): 126 | ATTRS = ('expr', 'to_type') 127 | 128 | class CommandX(BaseX): 129 | "base class for top-level commands. probably won't ever be used." 130 | 131 | class SelectX(CommandX): 132 | ATTRS = ('cols', 'tables', 'where', 'group', 'order', 'limit', 'offset') 133 | VARLEN = ('tables',) 134 | 135 | class ColX(BaseX): 136 | ATTRS = ('name', 'coltp', 'isarray', 'not_null', 'default', 'pkey') 137 | 138 | class PKeyX(BaseX): 139 | ATTRS = ('fields',) 140 | VARLEN = ('fields',) 141 | 142 | class TableConstraintX(BaseX): 143 | "intermediate base class for table constraints. PKeyX isn't included in this because it has its own slot in CreateX." 144 | 145 | class CheckX(TableConstraintX): 146 | ATTRS = ('expr',) 147 | 148 | class CreateX(CommandX): 149 | "note: technically pkey is a table_constraint but it also comes from cols so its separate" 150 | ATTRS = ('nexists', 'name', 'cols', 'pkey', 'table_constraints', 'inherits') 151 | VARLEN = ('cols', 'table_constraints') # todo: is pkey varlen or CommaX? 152 | 153 | class DropX(CommandX): 154 | ATTRS = ('ifexists', 'name', 'cascade') 155 | 156 | class ReturnX(BaseX): 157 | ATTRS = ('expr',) 158 | 159 | class InsertX(CommandX): 160 | ATTRS = ('table', 'cols', 'values', 'ret') 161 | VARLEN = ('cols', 'values') 162 | 163 | class AssignX(BaseX): 164 | ATTRS = ('col', 'expr') 165 | 166 | class UpdateX(CommandX): 167 | ATTRS = ('tables', 'assigns', 'where', 'ret') 168 | VARLEN = ('tables', 'assigns') 169 | 170 | class IndexX(CommandX): 171 | ATTRS = ('string',) 172 | 173 | class DeleteX(CommandX): 174 | ATTRS = ('table', 'where', 'returnx') 175 | 176 | class StartX(CommandX): 177 | ATTRS = () 178 | 179 | class CommitX(CommandX): 180 | ATTRS = () 181 | 182 | class RollbackX(CommandX): 183 | ATTRS = () 184 | 185 | def bin_priority(op, left, right): 186 | "I don't know how to handle order of operations in the LR grammar, so here it is" 187 | # note: recursion limits protect this from infinite looping. I'm serious. (i.e. it will crash rather than hanging) 188 | if isinstance(left, BinX) and left.op < op: 189 | return bin_priority(left.op, left.left, bin_priority(op, left.right, right)) 190 | elif isinstance(left, UnX) and left.op < op: 191 | return un_priority(left.op, BinX(op, left.val, right)) # note: obviously, no need to do this when right is a UnX 192 | elif isinstance(right, BinX) and right.op < op: 193 | return bin_priority(right.op, bin_priority(op, left, right.left), right.right) 194 | else: 195 | return BinX(op, left, right) 196 | 197 | def un_priority(op, val): 198 | "unary expression order-of-operations helper" 199 | if isinstance(val, BinX) and val.op < op: 200 | return bin_priority(val.op, UnX(op, val.left), val.right) 201 | else: 202 | return UnX(op, val) 203 | 204 | KEYWORDS = {w: 'kw_' + w for w in 'array case when then else end as join on from where order by limit offset select is not and or in null default primary key if exists create table insert into values returning update set delete group inherits check constraint start transaction commit rollback left right full inner outer using drop cascade cast'.split()} 205 | class SqlGrammar: 206 | # todo: adhere more closely to the spec. http://www.postgresql.org/docs/9.1/static/sql-syntax-lexical.html 207 | t_STRLIT = "'((?<=\\\\)'|[^'])+'" 208 | t_INTLIT = r'\d+' 209 | t_SUBLIT = '%s' 210 | t_ARITH = r'\|\||\/|\+' 211 | t_CMP = r'\!=|@>|@@|<|>' 212 | t_CAST = '::' 213 | def t_NAME(self, t): 214 | r'[A-Za-z]\w*|\"char\"' 215 | # warning: this allows stuff like SeLeCt with mixed case. who cares. 216 | t.type = KEYWORDS[t.value.lower()] if t.value.lower() in KEYWORDS else 'BOOL' if t.value.lower() in ('is', 'not') else 'NAME' 217 | return t 218 | literals = ('[', ']', '(', ')', '{', '}', ',', '.', '*', '=', '-') 219 | t_ignore = ' \n\t' 220 | 221 | def t_error(self, t): 222 | raise SQLSyntaxError(t) # I think t is LexToken(error,unparsed_tail) 223 | 224 | tokens = ( 225 | # general 226 | 'STRLIT', 'INTLIT', 'NAME', 'SUBLIT', 227 | # operators 228 | 'ARITH', 'CMP', 'CAST', 229 | ) + tuple(KEYWORDS.values()) 230 | 231 | precedence = ( 232 | # ('left','DOT'), 233 | ) 234 | 235 | def p_name(self, t): 236 | "expression : NAME" 237 | t[0] = NameX(t[1]) 238 | def p_float(self, t): 239 | "expression : INTLIT '.' INTLIT" 240 | t[0] = Literal(float('%s.%s' % (t[1], t[3]))) 241 | def p_int(self, t): 242 | "expression : INTLIT" 243 | t[0] = Literal(int(t[1])) 244 | def p_strlit(self, t): 245 | "expression : STRLIT" 246 | t[0] = Literal(t[1][1:-1].replace("\\'", "'")) # warning: this is not safe 247 | def p_asterx(self, t): 248 | "expression : '*'" 249 | t[0] = AsterX() 250 | def p_null(self, t): 251 | "expression : kw_null" 252 | t[0] = NullX() 253 | def p_sublit(self, t): 254 | "expression : SUBLIT" 255 | t[0] = SubLit 256 | def p_unop(self, t): 257 | "unop : '-' \n | kw_not" 258 | t[0] = OpX(t[1]) 259 | def p_isnot(self, t): 260 | "isnot : kw_is kw_not" 261 | t[0] = 'is not' 262 | def p_boolop(self, t): 263 | "boolop : kw_and \n | kw_or \n | kw_in" 264 | t[0] = t[1] 265 | def p_typename(self, t): 266 | "typename : NAME \n | NAME '(' INTLIT ')'" 267 | t[0] = TypeX(t[1], None) if len(t) == 2 else TypeX(t[1], int(t[3])) 268 | def p_castx(self, t): 269 | "expression : expression CAST typename" 270 | t[0] = CastX(t[1], t[3]) 271 | def p_castx2(self, t): 272 | "expression : kw_cast '(' expression kw_as typename ')'" 273 | # the second expression should be some kind of type spec. use it in createx and 'x cast y' also 274 | t[0] = CastX(t[3], t[5]) 275 | def p_binop(self, t): 276 | "binop : ARITH \n | CMP \n | boolop \n | isnot \n | '=' \n | '-' \n | '*' \n | kw_is" 277 | t[0] = OpX(t[1]) 278 | def p_x_boolx(self, t): 279 | """expression : unop expression 280 | | expression binop expression 281 | """ 282 | # todo: ply exposes precedence with %prec, use it. 283 | if len(t) == 4: 284 | t[0] = bin_priority(t[2], t[1], t[3]) 285 | elif len(t) == 3: 286 | t[0] = un_priority(t[1], t[2]) 287 | else: 288 | raise NotImplementedError('unk_len', len(t)) # pragma: no cover 289 | 290 | def p_x_commalist(self, t): 291 | """commalist : commalist ',' expression 292 | | expression 293 | """ 294 | if len(t) == 2: 295 | t[0] = CommaX([t[1]]) 296 | elif len(t) == 4: 297 | t[0] = CommaX(t[1].children + [t[3]]) 298 | else: 299 | raise NotImplementedError('unk_len', len(t)) # pragma: no cover 300 | 301 | def p_array(self, t): 302 | """expression : '{' commalist '}' 303 | | kw_array '[' commalist ']' 304 | """ 305 | if len(t) == 4: 306 | t[0] = ArrayLit(t[2].children) 307 | elif len(t) == 5: 308 | t[0] = ArrayLit(t[3].children) 309 | else: 310 | raise NotImplementedError('unk_len', len(t)) # pragma: no cover 311 | 312 | def p_whenlist(self, t): 313 | """whenlist : whenlist kw_when expression kw_then expression 314 | | kw_when expression kw_then expression 315 | """ 316 | if len(t) == 5: 317 | t[0] = [WhenX(t[2], t[4])] 318 | elif len(t) == 6: 319 | t[0] = t[1] + [WhenX(t[3], t[5])] 320 | else: 321 | raise NotImplementedError('unk_len', len(t)) # pragma: no cover 322 | 323 | def p_case(self, t): 324 | """expression : kw_case whenlist kw_else expression kw_end 325 | | kw_case whenlist kw_end 326 | """ 327 | if len(t) == 4: 328 | t[0] = CaseX(t[2], None) 329 | elif len(t) == 6: 330 | t[0] = CaseX(t[2], t[4]) 331 | else: 332 | raise NotImplementedError('unk_len', len(t)) # pragma: no cover 333 | 334 | def p_call(self, t): 335 | "expression : NAME '(' commalist ')'" 336 | t[0] = CallX(t[1], t[3]) 337 | 338 | def p_attr(self, t): 339 | """attr : NAME '.' NAME 340 | | NAME '.' '*' 341 | """ 342 | # careful: sqex.infer_columns relies on AttrX not containing anything but a name 343 | t[0] = AttrX(NameX(t[1]), AsterX() if t[3] == '*' else NameX(t[3])) 344 | 345 | def p_attrx(self, t): 346 | "expression : attr" 347 | t[0] = t[1] 348 | 349 | def p_aliasx(self, t): 350 | "aliasx : expression kw_as NAME" 351 | t[0] = AliasX(t[1], t[3]) 352 | 353 | def p_paren(self, t): 354 | "expression : '(' expression ')' \n | '(' commalist ')'" # todo doc: think about this 355 | t[0] = t[2] 356 | 357 | def p_fromtable(self, t): 358 | """fromtable : NAME 359 | | aliasx 360 | | '(' selectx ')' kw_as NAME 361 | """ 362 | if len(t) == 6: 363 | t[0] = AliasX(t[2], t[5]) 364 | elif len(t) == 2: 365 | t[0] = t[1] 366 | else: 367 | raise NotImplementedError('unk_len', len(t)) # pragma: no cover 368 | 369 | def p_outerjoin(self, t): 370 | "outerjoin : kw_left \n | kw_right \n | kw_full" 371 | t[0] = t[1] 372 | 373 | def p_jointype(self, t): 374 | """jointype : kw_join 375 | | kw_inner kw_join 376 | | outerjoin kw_outer kw_join 377 | | outerjoin kw_join 378 | """ 379 | if len(t) <= 2 or t[1] == 'inner': 380 | t[0] = JoinTypeX(None, False, None) 381 | else: 382 | t[0] = JoinTypeX(t[1], True, None) 383 | 384 | def p_joinx(self, t): 385 | # todo: support join types http://www.postgresql.org/docs/9.4/static/queries-table-expressions.html#QUERIES-JOIN 386 | """joinx : fromtable jointype fromtable 387 | | fromtable jointype fromtable kw_on expression 388 | | fromtable jointype fromtable kw_using '(' namelist ')' 389 | """ 390 | if len(t) == 4: 391 | t[0] = JoinX(t[1], t[3], None, t[2]) 392 | elif len(t) == 6: 393 | t[0] = JoinX(t[1], t[3], t[5], t[2]) 394 | else: 395 | raise NotImplementedError('todo: join .. using') 396 | 397 | def p_fromitem(self, t): 398 | "fromitem : fromtable \n | joinx" 399 | t[0] = t[1] 400 | 401 | def p_fromitem_list(self, t): 402 | """fromitem_list : fromitem_list ',' fromitem 403 | | fromitem 404 | """ 405 | if len(t) == 2: 406 | t[0] = [t[1]] 407 | elif len(t) == 4: 408 | t[0] = t[1] + [t[3]] 409 | else: 410 | raise NotImplementedError('unk_len', len(t)) # pragma: no cover 411 | 412 | def p_fromlist(self, t): 413 | "fromlist : kw_from fromitem_list \n | " 414 | t[0] = t[2] if len(t) == 3 else [] 415 | def p_wherex(self, t): 416 | "wherex : kw_where expression \n | " 417 | t[0] = t[2] if len(t) == 3 else None 418 | def p_order(self, t): 419 | "order : kw_order kw_by expression \n | " 420 | t[0] = t[3] if len(t) == 4 else None 421 | def p_limit(self, t): 422 | "limit : kw_limit expression \n | " 423 | t[0] = t[2] if len(t) == 3 else None 424 | def p_offset(self, t): 425 | "offset : kw_offset expression \n | " 426 | t[0] = t[2] if len(t) == 3 else None 427 | def p_group(self, t): 428 | "group : kw_group kw_by expression \n | " 429 | t[0] = t[3] if len(t) == 4 else None 430 | def p_selectx(self, t): 431 | "selectx : kw_select commalist fromlist wherex group order limit offset" 432 | t[0] = SelectX(*t[2:]) 433 | def p_extra_x(self, t): 434 | "expression : selectx \n | aliasx" 435 | t[0] = t[1] # expressions that also need to be separately addressable 436 | def p_isarray(self, t): 437 | "is_array : '[' ']' \n | " 438 | t[0] = len(t) > 1 439 | def p_isnotnull(self, t): 440 | "is_notnull : kw_not kw_null \n | " 441 | t[0] = len(t) > 1 442 | def p_default(self, t): 443 | "default : kw_default expression \n | " 444 | t[0] = t[2] if len(t) > 1 else None 445 | def p_ispkey(self, t): 446 | "is_pkey : kw_primary kw_key \n | " 447 | t[0] = len(t) > 1 448 | def p_colspec(self, t): 449 | "col_spec : NAME typename is_array is_notnull default is_pkey" 450 | # todo: integrate is_array into typename 451 | t[0] = ColX(*t[1:]) 452 | def p_namelist(self, t): 453 | "namelist : namelist ',' NAME \n | NAME" 454 | if len(t) == 2: 455 | t[0] = [t[1]] 456 | elif len(t) == 4: 457 | t[0] = t[1] + [t[3]] 458 | else: 459 | raise NotImplementedError('unk_len', len(t)) # pragma: no cover 460 | def p_pkey(self, t): 461 | "pkey_stmt : kw_primary kw_key '(' namelist ')'" 462 | t[0] = PKeyX(t[4]) 463 | def p_nexists(self, t): 464 | "nexists : kw_if kw_not kw_exists \n | " 465 | t[0] = len(t) > 1 466 | def p_opt_inheritx(self, t): 467 | "opt_inheritx : inheritx \n | " 468 | t[0] = None if len(t) == 1 else t[1] 469 | def p_inheritx(self, t): 470 | "inheritx : kw_inherits '(' namelist ')'" 471 | t[0] = t[3] 472 | def p_constraint_name(self, t): 473 | "opt_constraint_name : kw_constraint NAME \n | " 474 | t[0] = None if len(t) == 1 else t[2] 475 | def p_tconstraint_check(self, t): 476 | "table_constraint : opt_constraint_name kw_check '(' expression ')'" 477 | t[0] = CheckX(t[4]) 478 | def p_tablespec(self, t): 479 | "tablespec : col_spec \n | pkey_stmt \n | table_constraint" 480 | t[0] = t[1] 481 | def p_tablespecs(self, t): 482 | "tablespecs : tablespecs ',' tablespec \n | tablespec" 483 | t[0] = [t[1]] if len(t) == 2 else t[1] + [t[3]] 484 | def p_createx(self, t): 485 | """expression : kw_create kw_table nexists NAME '(' tablespecs ')' opt_inheritx 486 | | kw_create kw_table nexists NAME inheritx 487 | """ 488 | if len(t) == 6: 489 | t[0] = CreateX(t[3], t[4], [], None, [], t[5]) 490 | else: 491 | all_constraints = {k: list(group) for k, group in itertools.groupby(t[6], type)} 492 | pkey = (all_constraints.get(PKeyX) or [None])[0] # todo: get pkey from column constraints as well 493 | if PKeyX in all_constraints and len(all_constraints[PKeyX]) != 1: 494 | raise SQLSyntaxError('too_many_pkeyx', all_constraints[PKeyX]) 495 | # note below: this is a rare case where issubclass is safe 496 | table_constraints = sum([v for k, v in list(all_constraints.items()) if issubclass(k, TableConstraintX)], []) 497 | t[0] = CreateX(t[3], t[4], all_constraints.get(ColX) or [], pkey, table_constraints, t[8]) 498 | def p_ifexists(self, t): 499 | "ifexists : kw_if kw_exists \n | " 500 | t[0] = len(t) > 1 501 | def p_cascade(self, t): 502 | "cascade : kw_cascade \n | " 503 | t[0] = len(t) > 1 504 | def p_dropx(self, t): 505 | "expression : kw_drop kw_table ifexists NAME cascade" 506 | t[0] = DropX(t[3], t[4], t[5]) 507 | def p_returnx(self, t): 508 | "opt_returnx : kw_returning commalist \n | " 509 | # note: this gets weird because '(' commalist ')' is an expression but we need bare commalist to support non-paren returns 510 | t[0] = None if len(t) == 1 else ReturnX(t[2].children[0] if len(t[2].children) == 1 else t[2]) 511 | def p_optparennamelist(self, t): 512 | "opt_paren_namelist : '(' namelist ')' \n | " 513 | t[0] = t[2] if len(t) > 1 else None 514 | def p_insertx(self, t): 515 | "expression : kw_insert kw_into NAME opt_paren_namelist kw_values '(' commalist ')' opt_returnx" 516 | t[0] = InsertX(t[3], t[4], t[7].children, t[9]) 517 | def p_assign(self, t): 518 | "assign : NAME '=' expression \n | attr '=' expression" 519 | t[0] = AssignX(t[1], t[3]) 520 | def p_assignlist(self, t): 521 | "assignlist : assignlist ',' assign \n | assign" 522 | if len(t) == 4: 523 | t[0] = t[1] + [t[3]] 524 | elif len(t) == 2: 525 | t[0] = [t[1]] 526 | else: 527 | raise NotImplementedError('unk_len', len(t)) # pragma: no cover 528 | def p_updatex(self, t): 529 | "expression : kw_update namelist kw_set assignlist wherex opt_returnx" 530 | t[0] = UpdateX(t[2], t[4], t[5], t[6]) 531 | def p_deletex(self, t): 532 | "expression : kw_delete kw_from NAME wherex opt_returnx" 533 | t[0] = DeleteX(t[3], t[4], t[5]) 534 | 535 | # todo: these aren't really expressions; they can only be used at top-level. sqex will catch it. should the syntax know? 536 | def p_startx(self, t): 537 | "expression : kw_start kw_transaction" 538 | t[0] = StartX() 539 | 540 | def p_commitx(self, t): 541 | "expression : kw_commit" 542 | t[0] = CommitX() 543 | 544 | def p_rollbackx(self, t): 545 | "expression : kw_rollback" 546 | t[0] = RollbackX() 547 | 548 | def p_error(self, t): 549 | raise SQLSyntaxError(t) 550 | 551 | LEXER = ply.lex.lex(module=SqlGrammar()) 552 | def lex(string): 553 | "this is only used by tests" 554 | safe_lexer = LEXER.clone() # reentrant? I can't tell, I hate implicit globals. do a threading test 555 | safe_lexer.input(string) 556 | a = [] 557 | while 1: 558 | t = safe_lexer.token() 559 | if t: 560 | a.append(t) 561 | else: 562 | break 563 | return a 564 | 565 | YACC = ply.yacc.yacc(module=SqlGrammar(), debug=0, write_tables=0) 566 | def parse(string): 567 | "return a BaseX tree for the string" 568 | print(string) 569 | if string.strip().lower().startswith('create index'): 570 | return IndexX(string) 571 | return YACC.parse(string, lexer=LEXER.clone()) 572 | -------------------------------------------------------------------------------- /pg13/sqex.py: -------------------------------------------------------------------------------- 1 | "expression evaluation helpers for pgmock. has duck-dependencies on pgmock's Table class, needs redesign." 2 | # todo: most of the heavy lifting happens here. profile and identify candidates for Cython port. 3 | 4 | import itertools, collections 5 | from . import sqparse2, threevl, misc, treepath 6 | 7 | # todo: derive errors below from something pg13-specific 8 | class ColumnNameError(Exception): 9 | "name not in any tables or name matches too many tables" 10 | class TableNameError(Exception): 11 | "expression referencing unk table" 12 | 13 | # todo doc: explain why it's not necessary to do these checks on the whereclause 14 | def consumes_rows(ex): 15 | return isinstance(ex, sqparse2.CallX) and ex.f in ('min', 'max', 'count') 16 | def returns_rows(ex): 17 | return isinstance(ex, sqparse2.CallX) and ex.f in ('unnest', ) 18 | def contains(expr, field): 19 | return bool(treepath.sub_slots(expr, field, match=True)) 20 | 21 | # pylint: disable=too-many-return-statements, too-many-branches 22 | def evalop(oper, left, right): 23 | "this takes evaluated left and right (i.e. values not expressions)" 24 | if oper in ('=', ' != ', '>', '<'): 25 | return threevl.ThreeVL.compare(oper, left, right) 26 | elif oper in ('+', '-', '*', '/'): 27 | # todo: does arithmetic require threevl? 28 | if oper == '/': 29 | raise NotImplementedError('todo: spec about int/float division') 30 | return (left + right) if oper == '+' else (left - right) if oper == '-' else (left * right) if oper == '*' else (left/right) 31 | elif oper == 'in': 32 | return (tuple(left) in right) if isinstance(left, list) and isinstance(right[0], tuple) else (left in right) 33 | elif oper in ('and', 'or'): 34 | return threevl.ThreeVL.andor(oper, left, right) 35 | elif oper in ('is not', 'is'): 36 | if right is not None: 37 | raise NotImplementedError('can null be on either side? what if neither value is null?') 38 | return (left is not None) if (oper == 'is not') else (left is None) 39 | elif oper == '@>': 40 | # todo: support a TextSearchDoc that will overload a lot of these operators 41 | if not all(isinstance(x, list) for x in (left, right)): 42 | raise TypeError('non-array-args', oper, left, right) 43 | return set(left) >= set(right) 44 | elif oper == '||': 45 | if not all(isinstance(x, list) for x in (left, right)): 46 | raise TypeError('non-array-args', oper, left, right) 47 | return left+right 48 | elif oper == '@@': 49 | if not all(isinstance(x, set) for x in (left, right)): 50 | raise TypeError('non_set_args', oper, type(left), type(right)) 51 | return bool(left & right) 52 | else: 53 | raise NotImplementedError(oper, left, right) # pragma: no cover 54 | 55 | def uniqify(list_): 56 | "inefficient on long lists; short lists only. preserves order." 57 | arr = [] 58 | for item in list_: 59 | if item not in arr: 60 | arr.append(item) 61 | return arr 62 | 63 | def eliminate_sequential_children(paths): 64 | "helper for infer_columns. removes paths that are direct children of the n-1 or n-2 path" 65 | return [p for i, p in enumerate(paths) if not ((i > 0 and paths[i-1] == p[:-1]) or (i > 1 and paths[i-2] == p[:-1]))] 66 | 67 | def infer_columns(selectx, tables_dict): 68 | """infer the columns for a subselect that creates an implicit table. 69 | the output of this *can* contain duplicate names, fingers crossed that downstream code uses the first. 70 | (Look up SQL spec on dupe names.) 71 | todo: I think there's common logic here and inside NameIndexer that can be merged. 72 | todo: this is a beast 73 | """ 74 | # todo: support CTEs -- with all this plumbing, might as well 75 | table2fields = {} 76 | table_order = [] 77 | for table in selectx.tables: 78 | if isinstance(table, str): 79 | table2fields[table] = tables_dict[table].fields 80 | table_order.append(table) 81 | elif isinstance(table, sqparse2.AliasX): 82 | if isinstance(table.name, str): 83 | table2fields[table] = tables_dict[table] 84 | table_order.append(table.name) 85 | elif isinstance(table.name, sqparse2.SelectX): 86 | raise NotImplementedError('todo: inner subquery') 87 | else: 88 | raise TypeError('AliasX.name', type(table.name)) 89 | else: 90 | raise TypeError('table', type(table)) 91 | # the forms are: *, x.*, x.y, y. expressions are anonymous unless they have an 'as' (which I don't support) 92 | table_order = uniqify(table_order) 93 | cols = [] 94 | for col in selectx.cols.children: 95 | if isinstance(col, sqparse2.AsterX): 96 | for table in table_order: 97 | cols.extend(table2fields[table]) 98 | elif isinstance(col, sqparse2.BaseX): 99 | all_paths = treepath.sub_slots(col, lambda x: isinstance(x, (sqparse2.AttrX, sqparse2.NameX, sqparse2.AliasX)), match=True) 100 | paths = eliminate_sequential_children(all_paths) # this eliminates NameX under AttrX 101 | for path in paths: 102 | item = col[path] 103 | if isinstance(item, sqparse2.AttrX): 104 | if not isinstance(item.parent, sqparse2.NameX): 105 | raise TypeError('parent_not_name', type(item.parent)) 106 | if isinstance(item.attr, sqparse2.NameX): 107 | raise NotImplementedError # todo 108 | elif isinstance(item.attr, sqparse2.AsterX): 109 | cols.extend(table2fields[item.parent.name]) 110 | else: 111 | raise TypeError('attr_unk_type', type(item.attr)) 112 | elif isinstance(item, sqparse2.NameX): 113 | matching_fields = [_f for _f in (next((field for field in table2fields[tab] if field.name == item.name), None) for tab in table_order) if _f] 114 | if len(matching_fields) != 1: 115 | raise sqparse2.SQLSyntaxError('missing_or_dupe_field', item, matching_fields) 116 | cols.append(matching_fields[0]) 117 | elif isinstance(item, sqparse2.AliasX): 118 | cols.append(sqparse2.ColX(item.alias, None, None, None, None, None)) 119 | else: 120 | raise TypeError('unk_item_type', type(item)) # pragma: no cover 121 | else: 122 | raise TypeError('unk_col_type', type(col)) # pragma: no cover 123 | return cols 124 | 125 | class NameIndexer: 126 | """helper that takes str, NameX or attrx and returns the right thing (or raises an error on ambiguity). 127 | Note: alias-only tables 'select from (nested select) as alias' currently live here. replace_subqueries might be a better place. 128 | warning: a-only tables probably need to work out their dependency graph 129 | """ 130 | @staticmethod 131 | def update_aliases(aliases, aonly, item): 132 | "helper for ctor. takes AliasX or string as second arg" 133 | if isinstance(item, str): 134 | aliases[item] = item 135 | elif isinstance(item, sqparse2.AliasX): 136 | if not isinstance(item.alias, str): 137 | raise TypeError('alias not string', type(item.alias)) 138 | if isinstance(item.name, sqparse2.NameX): 139 | aliases.update({item.alias:item.name.name, item.name.name:item.name.name}) 140 | elif isinstance(item.name, sqparse2.SelectX): 141 | aliases.update({item.alias:item.alias}) 142 | aonly[item.alias] = item.name 143 | else: 144 | raise TypeError('aliasx_unk_thing', type(item.name)) # pragma: no cover 145 | else: 146 | raise TypeError(type(item)) # pragma: no cover 147 | 148 | @classmethod 149 | def ctor_fromlist(cls, fromlistx): 150 | aliases = {} 151 | aonly = {} 152 | for from_item in fromlistx: 153 | if isinstance(from_item, str): 154 | cls.update_aliases(aliases, aonly, from_item) 155 | elif isinstance(from_item, sqparse2.AliasX): 156 | cls.update_aliases(aliases, aonly, from_item) 157 | elif isinstance(from_item, sqparse2.JoinX): 158 | cls.update_aliases(aliases, aonly, from_item.a) 159 | cls.update_aliases(aliases, aonly, from_item.b) 160 | else: 161 | raise TypeError(type(from_item)) # pragma: no cover 162 | table_order = sorted(set(aliases.values())) 163 | return cls(aliases, table_order, aonly) 164 | 165 | @classmethod 166 | def ctor_name(cls, name): 167 | return cls({name:name}, [name], {}) 168 | 169 | def __init__(self, aliases, table_order, alias_only_tables): 170 | self.aliases, self.table_order, self.aonly = aliases, table_order, alias_only_tables 171 | self.aonly_resolved = False 172 | @misc.meth_once 173 | def resolve_aonly(self, tables_dict, table_ctor): 174 | "circular depends on pgmock.Table. refactor." 175 | for alias, selectx in list(self.aonly.items()): 176 | table = table_ctor(alias, infer_columns(selectx, tables_dict), None) 177 | table.rows = run_select(selectx, tables_dict, table_ctor) 178 | self.aonly[alias] = table 179 | self.aonly_resolved = True 180 | def index_tuple(self, tables_dict, index, is_set): 181 | "helper for rowget/rowset" 182 | if not self.aonly_resolved: 183 | raise RuntimeError('resolve_aonly() before querying nix') 184 | with tables_dict.tempkeys(): 185 | tables_dict.update(self.aonly) # todo: find spec support for aliases overwriting existing tables. (more likely, it's an error) 186 | index = index.name if isinstance(index, sqparse2.NameX) else index 187 | if isinstance(index, str): 188 | candidates = [t for t in self.table_order if any(f.name == index for f in tables_dict[t].fields)] 189 | if len(candidates) != 1: 190 | raise ColumnNameError(("ambiguous_column" if candidates else "no_such_column"), index) 191 | tname, = candidates 192 | return self.table_order.index(tname), tables_dict[tname].lookup(index).index 193 | elif isinstance(index, sqparse2.AttrX): 194 | if index.parent.name not in self.aliases: 195 | raise TableNameError('table_notin_x', index.parent, self.aliases) 196 | tname = self.aliases[index.parent.name] 197 | tindex = self.table_order.index(tname) 198 | if isinstance(index.attr, sqparse2.AsterX): 199 | if is_set: 200 | raise ValueError('cant_set_asterisk') # todo: better error class 201 | else: 202 | return (tindex, ) 203 | else: 204 | return (tindex, tables_dict[tname].lookup(index.attr).index) 205 | # todo: stronger typing here. make sure both fields of the AttrX are always strings. 206 | else: 207 | raise TypeError(type(index)) # pragma: no cover 208 | def rowget(self, tables_dict, row_list, index): 209 | "row_list in self.row_order" 210 | tmp = row_list 211 | for i in self.index_tuple(tables_dict, index, False): 212 | tmp = tmp[i] 213 | return tmp 214 | def rowset(self, tables_dict, row_list, index): 215 | raise NotImplementedError # note: shouldn't need this until update uses this 216 | def __repr__(self): 217 | return ''%self.table_order 218 | 219 | def decompose_select(selectx): 220 | "return [(parent, setter) for scalar_subquery], wherex_including_on, NameIndexer. helper for run_select" 221 | nix = NameIndexer.ctor_fromlist(selectx.tables) 222 | where = [] 223 | for fromx in selectx.tables: 224 | if isinstance(fromx, sqparse2.JoinX) and fromx.on_stmt is not None: 225 | # todo: what happens if on_stmt columns are non-ambiguous in the context of the join tables but ambiguous overall? yuck. 226 | where.append(fromx.on_stmt) 227 | if selectx.where: 228 | where.append(selectx.where) 229 | return nix, where 230 | 231 | def eval_where(where_list, composite_row, nix, tables_dict): 232 | "join-friendly whereclause evaluator. composite_row is a list or tuple of row lists. where_list is the thing from decompose_select." 233 | # todo: do I need to use 3vl instead of all() to merge where_list? 234 | evaluator = Evaluator(composite_row, nix, tables_dict) 235 | return all(evaluator.eval(w) for w in where_list) 236 | 237 | def flatten_scalar(whatever): 238 | "warning: there's a systematic way to do this and I'm doing it blindly. In particular, this will screw up arrays." 239 | try: 240 | flat1 = whatever[0] 241 | except IndexError: 242 | return None 243 | try: 244 | return flat1[0] 245 | except TypeError: 246 | return flat1 247 | 248 | def replace_subqueries(ex, tables, table_ctor): 249 | "this mutates passed in ex (any BaseX), replacing nested selects with their (flattened) output" 250 | # http://www.postgresql.org/docs/9.1/static/sql-expressions.html#SQL-SYNTAX-SCALAR-SUBQUERIES 251 | # see here for subquery conditions that *do* use multi-rows. ug. http://www.postgresql.org/docs/9.1/static/functions-subquery.html 252 | if isinstance(ex, sqparse2.SelectX): 253 | old_tables, ex.tables = ex.tables, [] # we *don't* recurse into tables because selects in here get transformed into tables 254 | for path in treepath.sub_slots(ex, lambda x: isinstance(x, sqparse2.SelectX)): 255 | ex[path] = sqparse2.Literal(flatten_scalar(run_select(ex[path], tables, table_ctor))) 256 | if isinstance(ex, sqparse2.SelectX): 257 | ex.tables = old_tables 258 | return ex # but it was modified in place, too 259 | 260 | def unnest_helper(cols, row): 261 | wrapped = [val if contains(col, returns_rows) else [val] for col, val in zip(cols.children, row)] 262 | return list(map(list, itertools.product(*wrapped))) 263 | 264 | def collapse_group_expr(groupx, cols, ret_row): 265 | "collapses columns matching the group expression. I'm sure this is buggy; look at a real DB's imp of this." 266 | for i, col in enumerate(cols.children): 267 | if col == groupx: 268 | ret_row[i] = ret_row[i][0] 269 | return ret_row 270 | 271 | def run_select(ex, tables, table_ctor): 272 | nix, where = decompose_select(ex) 273 | nix.resolve_aonly(tables, table_ctor) 274 | with tables.tempkeys(): 275 | # so aliases are temporary. todo doc: why am I doing this here *and* in index_tuple? 276 | tables.update(nix.aonly) 277 | composite_rows = [c_row for c_row in itertools.product(*(tables[t].rows for t in nix.table_order)) if eval_where(where, c_row, nix, tables)] 278 | if ex.order: # note: order comes before limit / offset 279 | composite_rows.sort(key=lambda c_row: Evaluator(c_row, nix, tables).eval(ex.order)) 280 | if ex.limit or ex.offset: # pragma: no cover 281 | print(ex.limit, ex.offset) 282 | raise NotImplementedError('notimp: limit, offset, group') 283 | if ex.group: 284 | # todo: non-aggregate expressions are allowed if they consume only the group expression 285 | # todo: does the group expression have to be a NameX? for now it can be any expression. check specs. 286 | # todo: this block shares logic with other parts of the function. needs refactor. 287 | badcols = [col for col in ex.cols.children if not col == ex.group and not contains(col, consumes_rows)] 288 | if badcols: 289 | raise ValueError('illegal_cols_in_group', badcols) 290 | if contains(ex.cols, returns_rows): 291 | raise NotImplementedError('todo: unnest with grouping') 292 | groups = collections.OrderedDict() 293 | for row in composite_rows: 294 | k = Evaluator(row, nix, tables).eval(ex.group) 295 | if k not in groups: 296 | groups[k] = [] 297 | groups[k].append(row) 298 | return [collapse_group_expr(ex.group, ex.cols, Evaluator(g_rows, nix, tables).eval(ex.cols)) for g_rows in list(groups.values())] 299 | if contains(ex.cols, consumes_rows): 300 | if not all(contains(col, consumes_rows) for col in ex.cols.children): 301 | # todo: this isn't good enough. what about nesting cases like max(min(whatever)) 302 | raise sqparse2.SQLSyntaxError('not_all_aggregate') # is this the way real PG works? aim at giving PG error codes 303 | return Evaluator(composite_rows, nix, tables).eval(ex.cols) 304 | else: 305 | ret = [Evaluator(r, nix, tables).eval(ex.cols) for r in composite_rows] 306 | return sum((unnest_helper(ex.cols, row) for row in ret), []) if contains(ex.cols, returns_rows) else ret 307 | 308 | def starlike(tok): 309 | "weird things happen to cardinality when working with * in comma-lists. this detects when to do that." 310 | # todo: is '* as name' a thing? 311 | return isinstance(tok, sqparse2.AsterX) or isinstance(tok, sqparse2.AttrX) and isinstance(tok.attr, sqparse2.AsterX) 312 | 313 | class Evaluator: 314 | # todo: use intermediate types: Scalar, Row, RowList, Table. 315 | # Row and Table might be able to bundle into RowList. RowList should know the type and names of its columns. 316 | # This will solve a lot of cardinality confusion. 317 | def __init__(self, c_row, nix, tables): 318 | "c_row is a composite row, i.e. a list/tuple of rows from all the query's tables, ordered by nix.table_order" 319 | self.c_row, self.nix, self.tables = c_row, nix, tables 320 | 321 | def eval_agg_call(self, exp): 322 | "helper for eval_callx; evaluator for CallX that consume multiple rows" 323 | if not isinstance(self.c_row, list): 324 | raise TypeError('aggregate function expected a list of rows') 325 | if len(exp.args.children) != 1: 326 | raise ValueError('aggregate function expected a single value', exp.args) 327 | arg, = exp.args.children # intentional: error if len != 1 328 | vals = [Evaluator(c_r, self.nix, self.tables).eval(arg) for c_r in self.c_row] 329 | if not vals: 330 | return None 331 | if exp.f == 'min': 332 | return min(vals) 333 | elif exp.f == 'max': 334 | return max(vals) 335 | elif exp.f == 'count': 336 | return len(vals) 337 | else: 338 | raise NotImplementedError('unk_func', exp.f) # pragma: no cover 339 | 340 | def eval_nonagg_call(self, exp): 341 | "helper for eval_callx; evaluator for CallX that consume a single value" 342 | # todo: get more concrete about argument counts 343 | args = self.eval(exp.args) 344 | if exp.f == 'coalesce': 345 | left, right = args # todo: does coalesce take more than 2 args? 346 | return right if left is None else left 347 | elif exp.f == 'unnest': 348 | return self.eval(exp.args)[0] # note: run_select does some work in this case too 349 | elif exp.f in ('to_tsquery', 'to_tsvector'): 350 | return set(self.eval(exp.args.children[0]).split()) 351 | else: 352 | raise NotImplementedError('unk_function', exp.f) # pragma: no cover 353 | 354 | def eval_callx(self, exp): 355 | "dispatch for CallX" 356 | # below: this isn't contains(exp, consumes_row) -- it's just checking the current expression 357 | return (self.eval_agg_call if consumes_rows(exp) else self.eval_nonagg_call)(exp) 358 | 359 | def eval_unx(self, exp): 360 | "unary expressions" 361 | inner = self.eval(exp.val) 362 | if exp.op.op == '+': 363 | return inner 364 | elif exp.op.op == '-': 365 | return -inner # pylint: disable=invalid-unary-operand-type 366 | elif exp.op.op == 'not': 367 | return threevl.ThreeVL.nein(inner) 368 | else: 369 | raise NotImplementedError('unk_op', exp.op) # pragma: no cover 370 | 371 | def eval(self, exp): 372 | "main dispatch for expression evaluation" 373 | # todo: this needs an AST-assert that all BaseX descendants are being handled 374 | if isinstance(exp, sqparse2.BinX): 375 | return evalop(exp.op.op, *list(map(self.eval, (exp.left, exp.right)))) 376 | elif isinstance(exp, sqparse2.UnX): 377 | return self.eval_unx(exp) 378 | elif isinstance(exp, sqparse2.NameX): 379 | return self.nix.rowget(self.tables, self.c_row, exp) 380 | elif isinstance(exp, sqparse2.AsterX): 381 | return sum(self.c_row, []) # todo doc: how does this get disassembled by caller? 382 | elif isinstance(exp, sqparse2.ArrayLit): 383 | return list(map(self.eval, exp.vals)) 384 | elif isinstance(exp, (sqparse2.Literal, sqparse2.ArrayLit)): 385 | return exp.toliteral() 386 | elif isinstance(exp, sqparse2.CommaX): 387 | # todo: think about getting rid of CommaX everywhere; it complicates syntax tree navigation. 388 | # a lot of things that are CommaX now should become weval.Row. 389 | ret = [] 390 | for child in exp.children: 391 | (ret.extend if starlike(child) else ret.append)(self.eval(child)) 392 | return ret 393 | elif isinstance(exp, sqparse2.CallX): 394 | return self.eval_callx(exp) 395 | elif isinstance(exp, sqparse2.SelectX): 396 | raise NotImplementedError('subqueries should have been evaluated earlier') # todo: specific error class 397 | elif isinstance(exp, sqparse2.AttrX): 398 | return self.nix.rowget(self.tables, self.c_row, exp) 399 | elif isinstance(exp, sqparse2.CaseX): 400 | for case in exp.cases: 401 | if self.eval(case.when): 402 | return self.eval(case.then) 403 | return self.eval(exp.elsex) 404 | elif isinstance(exp, sqparse2.CastX): 405 | if exp.to_type.type.lower() in ('text', 'varchar'): 406 | return str(self.eval(exp.expr)) 407 | else: 408 | raise NotImplementedError('unhandled_cast_type', exp.to_type) 409 | elif isinstance(exp, (int, str, float, type(None))): 410 | return exp # I think Table.insert is creating this in expand_row 411 | # todo: why tuple, list, dict below? throw some asserts in here and see where these are coming from. 412 | elif isinstance(exp, tuple): 413 | return tuple(map(self.eval, exp)) 414 | elif isinstance(exp, list): 415 | return list(map(self.eval, exp)) 416 | elif isinstance(exp, dict): 417 | return exp 418 | elif isinstance(exp, sqparse2.NullX): 419 | return None 420 | elif isinstance(exp, sqparse2.ReturnX): 421 | # todo: I think ReturnX is *always* CommaX now; revisit this 422 | ret = self.eval(exp.expr) 423 | print("warning: not sure what I'm doing here with cardinality tweak on CommaX") 424 | return [ret] if isinstance(exp.expr, (sqparse2.CommaX, sqparse2.AsterX)) else [[ret]] # todo: update parser so this is always * or a commalist 425 | elif isinstance(exp, sqparse2.AliasX): 426 | return self.eval(exp.name) # todo: rename AliasX 'name' to 'expr' 427 | else: 428 | raise NotImplementedError(type(exp), exp) # pragma: no cover 429 | 430 | def depth_first_sub(expr, values): 431 | "replace SubLit with literals in expr. (expr is mutated)." 432 | arr = treepath.sub_slots(expr, lambda elt: elt is sqparse2.SubLit) 433 | if len(arr) != len(values): 434 | raise ValueError('len', len(arr), len(values)) 435 | for path, val in zip(arr, values): 436 | # todo: does ArrayLit get us anything? tree traversal? 437 | if isinstance(val, (str, int, float, type(None), dict)): 438 | expr[path] = sqparse2.Literal(val) 439 | elif isinstance(val, (list, tuple)): 440 | expr[path] = sqparse2.ArrayLit(val) 441 | else: 442 | raise TypeError('unk_sub_type', type(val), val) # pragma: no cover 443 | return expr 444 | --------------------------------------------------------------------------------