├── .gitignore
├── CHANGELOG.md
├── LICENSE
├── README.md
├── setup.py
└── windyquery
├── __init__.py
├── builder
├── __init__.py
├── _crud_base.py
├── _schema_base.py
├── alter.py
├── create.py
├── delete.py
├── drop.py
├── insert.py
├── raw.py
├── rrule.py
├── select.py
├── update.py
└── with_values.py
├── collector
├── __init__.py
├── _base.py
├── alter.py
├── create.py
├── delete.py
├── drop.py
├── from_table.py
├── group_by.py
├── insert.py
├── join.py
├── limit.py
├── offset.py
├── on_conflict.py
├── order_by.py
├── raw.py
├── returning.py
├── rrule.py
├── schema.py
├── select.py
├── table.py
├── update.py
├── where.py
└── with_values.py
├── combiner
├── __init__.py
├── _base.py
├── alter_stmt.py
├── create_stmt.py
├── delete_stmt.py
├── drop_stmt.py
├── insert_stmt.py
├── parsetab.py
├── raw_stmt.py
├── select_stmt.py
└── update_stmt.py
├── connection.py
├── ctx.py
├── exceptions.py
├── listener.py
├── provider
├── __init__.py
├── _base.py
├── biop.py
├── fieldlist.py
├── glue.py
├── param.py
├── parentheses.py
└── record.py
├── scripts
├── __init__.py
├── migration.py
└── migration_templates.py
├── tests
├── __init__.py
├── conftest.py
├── seed_test_data.sql
├── test_alter_table.py
├── test_create_table.py
├── test_delete.py
├── test_group_by.py
├── test_insert.py
├── test_join.py
├── test_listener.py
├── test_migrations.py
├── test_raw.py
├── test_rrule.py
├── test_scheme.py
├── test_select.py
├── test_select_limit_order_by.py
├── test_update.py
├── test_where.py
└── test_with_values.py
├── utils.py
└── validator
├── __init__.py
├── _base.py
├── alias.py
├── alter.py
├── column.py
├── conflict_action.py
├── conflict_target.py
├── constraint.py
├── create.py
├── empty.py
├── expr.py
├── field.py
├── fullname.py
├── fullname_json.py
├── join.py
├── lextab.py
├── limit.py
├── name.py
├── number.py
├── offset.py
├── operators
├── __init__.py
├── bracket.py
├── comma.py
├── dot.py
├── equal.py
├── minus.py
├── negate.py
├── operator.py
└── paren.py
├── order_by.py
├── parsetab.py
├── schema.py
├── select.py
├── table.py
├── update.py
├── value_list.py
├── values
├── __init__.py
├── default.py
├── false.py
├── holder.py
├── null.py
├── text_val.py
└── true.py
└── where.py
/.gitignore:
--------------------------------------------------------------------------------
1 | # Build results
2 | [Dd]ebug/
3 | [Dd]ebugPublic/
4 | [Rr]elease/
5 | [Rr]eleases/
6 | x64/
7 | x86/
8 | bld/
9 | [Bb]in/
10 | [Oo]bj/
11 | [Ll]og/
12 |
13 | # Visual Studio 2015 cache/options directory
14 | .vs/
15 | # Uncomment if you have tasks that create the project's static files in wwwroot
16 | #wwwroot/
17 |
18 | # MSTest test Results
19 | [Tt]est[Rr]esult*/
20 | [Bb]uild[Ll]og.*
21 |
22 | # NUNIT
23 | *.VisualState.xml
24 | TestResult.xml
25 |
26 | # Build Results of an ATL Project
27 | [Dd]ebugPS/
28 | [Rr]eleasePS/
29 | dlldata.c
30 |
31 | # DNX
32 | project.lock.json
33 | project.fragment.lock.json
34 | artifacts/
35 |
36 | *_i.c
37 | *_p.c
38 | *_i.h
39 | *.ilk
40 | *.meta
41 | *.obj
42 | *.pch
43 | *.pdb
44 | *.pgc
45 | *.pgd
46 | *.rsp
47 | *.sbr
48 | *.tlb
49 | *.tli
50 | *.tlh
51 | *.tmp
52 | *.tmp_proj
53 | *.log
54 | *.vspscc
55 | *.vssscc
56 | .builds
57 | *.pidb
58 | *.svclog
59 | *.scc
60 |
61 | # Chutzpah Test files
62 | _Chutzpah*
63 |
64 | # Visual C++ cache files
65 | ipch/
66 | *.aps
67 | *.ncb
68 | *.opendb
69 | *.opensdf
70 | *.sdf
71 | *.cachefile
72 | *.VC.db
73 | *.VC.VC.opendb
74 |
75 | # Visual Studio profiler
76 | *.psess
77 | *.vsp
78 | *.vspx
79 | *.sap
80 |
81 | # TFS 2012 Local Workspace
82 | $tf/
83 |
84 | # Guidance Automation Toolkit
85 | *.gpState
86 |
87 | # ReSharper is a .NET coding add-in
88 | _ReSharper*/
89 | *.[Rr]e[Ss]harper
90 | *.DotSettings.user
91 |
92 | # JustCode is a .NET coding add-in
93 | .JustCode
94 |
95 | # TeamCity is a build add-in
96 | _TeamCity*
97 |
98 | # DotCover is a Code Coverage Tool
99 | *.dotCover
100 |
101 | # NCrunch
102 | _NCrunch_*
103 | .*crunch*.local.xml
104 | nCrunchTemp_*
105 |
106 | # MightyMoose
107 | *.mm.*
108 | AutoTest.Net/
109 |
110 | # Web workbench (sass)
111 | .sass-cache/
112 |
113 | # Installshield output folder
114 | [Ee]xpress/
115 |
116 | # DocProject is a documentation generator add-in
117 | DocProject/buildhelp/
118 | DocProject/Help/*.HxT
119 | DocProject/Help/*.HxC
120 | DocProject/Help/*.hhc
121 | DocProject/Help/*.hhk
122 | DocProject/Help/*.hhp
123 | DocProject/Help/Html2
124 | DocProject/Help/html
125 |
126 | # Click-Once directory
127 | publish/
128 |
129 | # Publish Web Output
130 | *.[Pp]ublish.xml
131 | *.azurePubxml
132 | # TODO: Comment the next line if you want to checkin your web deploy settings
133 | # but database connection strings (with potential passwords) will be unencrypted
134 | #*.pubxml
135 | *.publishproj
136 |
137 | # Microsoft Azure Web App publish settings. Comment the next line if you want to
138 | # checkin your Azure Web App publish settings, but sensitive information contained
139 | # in these scripts will be unencrypted
140 | PublishScripts/
141 |
142 | # NuGet Packages
143 | *.nupkg
144 | # The packages folder can be ignored because of Package Restore
145 | **/packages/*
146 | # except build/, which is used as an MSBuild target.
147 | !**/packages/build/
148 | # Uncomment if necessary however generally it will be regenerated when needed
149 | #!**/packages/repositories.config
150 | # NuGet v3's project.json files produces more ignoreable files
151 | *.nuget.props
152 | *.nuget.targets
153 |
154 | # Microsoft Azure Build Output
155 | csx/
156 | *.build.csdef
157 |
158 | # Microsoft Azure Emulator
159 | ecf/
160 | rcf/
161 |
162 | # Windows Store app package directories and files
163 | AppPackages/
164 | BundleArtifacts/
165 | Package.StoreAssociation.xml
166 | _pkginfo.txt
167 |
168 | # Visual Studio cache files
169 | # files ending in .cache can be ignored
170 | *.[Cc]ache
171 | # but keep track of directories ending in .cache
172 | !*.[Cc]ache/
173 |
174 | # Others
175 | ClientBin/
176 | ~$*
177 | *~
178 | *.dbmdl
179 | *.dbproj.schemaview
180 | *.jfm
181 | *.pfx
182 | *.publishsettings
183 | node_modules/
184 | orleans.codegen.cs
185 |
186 | # Since there are multiple workflows, uncomment next line to ignore bower_components
187 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
188 | #bower_components/
189 |
190 | # RIA/Silverlight projects
191 | Generated_Code/
192 |
193 | # Backup & report files from converting an old project file
194 | # to a newer Visual Studio version. Backup files are not needed,
195 | # because we have git ;-)
196 | _UpgradeReport_Files/
197 | Backup*/
198 | UpgradeLog*.XML
199 | UpgradeLog*.htm
200 |
201 | # SQL Server files
202 | *.mdf
203 | *.ldf
204 |
205 | # Business Intelligence projects
206 | *.rdl.data
207 | *.bim.layout
208 | *.bim_*.settings
209 |
210 | # Microsoft Fakes
211 | FakesAssemblies/
212 |
213 | # GhostDoc plugin setting file
214 | *.GhostDoc.xml
215 |
216 | # Node.js Tools for Visual Studio
217 | .ntvs_analysis.dat
218 |
219 | # Visual Studio 6 build log
220 | *.plg
221 |
222 | # Visual Studio 6 workspace options file
223 | *.opt
224 |
225 | # Visual Studio LightSwitch build output
226 | **/*.HTMLClient/GeneratedArtifacts
227 | **/*.DesktopClient/GeneratedArtifacts
228 | **/*.DesktopClient/ModelManifest.xml
229 | **/*.Server/GeneratedArtifacts
230 | **/*.Server/ModelManifest.xml
231 | _Pvt_Extensions
232 |
233 | # Paket dependency manager
234 | .paket/paket.exe
235 | paket-files/
236 |
237 | # FAKE - F# Make
238 | .fake/
239 |
240 | # JetBrains Rider
241 | .idea/
242 | *.sln.iml
243 |
244 | # CodeRush
245 | .cr/
246 |
247 | # Python Tools for Visual Studio (PTVS)
248 | __pycache__/
249 | *.pyc
250 | .pytest_cache/
251 | .env
252 | .vscode/settings.json
253 |
254 | # Setuptools distribution folder.
255 | /dist/
256 | /build/
257 |
258 | # Python egg metadata, regenerated from source files by setuptools.
259 | /*.egg-info
260 |
261 | test_cache/
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 |
2 | # 0.0.33 (2022-01-25)
3 | ### support UUID type so can directly use uuid in WITH clauses
4 | | Commit | Description |
5 | | -- | -- |
6 | | [1c77032](https://github.com/bluerelay/windyquery/commit/1c77032bed6d4ea60b98d1775f5f9f154814911f) | WITH clause can handle UUID datatype |
7 |
8 |
9 | # 0.0.32 (2022-01-08)
10 | ### support UUID type so can directly use uuid in WHERE and INSERT clauses
11 | | Commit | Description |
12 | | -- | -- |
13 | | [ff26940](https://github.com/bluerelay/windyquery/commit/ff269403aa946f393b378d605b4a19e843d1a9de) | able to insert UUID datatype |
14 |
15 |
16 | # 0.0.31 (2022-01-08)
17 | ### CREATE TABLE can set default by using functions
18 | | Commit | Description |
19 | | -- | -- |
20 | | [e9e06a7](https://github.com/bluerelay/windyquery/commit/e9e06a7b503f3adfc40bd524eca4e37a4615c789) | use function to generate default on table create |
21 |
22 |
23 | # 0.0.30 (2022-01-04)
24 | ### able to run multiple queries in raw method
25 | | Commit | Description |
26 | | -- | -- |
27 | | [0ddf803](https://github.com/bluerelay/windyquery/commit/0ddf8035327e982846d2e7b5b9d360749d6fe3e1) | raw method can run multiple queries |
28 |
29 |
30 | # 0.0.29 (2021-12-15)
31 | ### encode connection strs for migration cmd
32 | | Commit | Description |
33 | | -- | -- |
34 | | [dbd8f0e](https://github.com/bluerelay/windyquery/commit/dbd8f0ea4be4fdb0af4008c95501814ee4a6c522) | encode migration db password and other conn strs |
35 |
36 |
37 | # 0.0.28 (2021-08-26)
38 | ### raise exception when the connection is closed during listening on a channel
39 | | Commit | Description |
40 | | -- | -- |
41 | | [830859a](https://github.com/bluerelay/windyquery/commit/830859a0d23207717a75b73737c48b25ddd8e1f9) | raise error if connection is closed during listen |
42 |
43 |
44 | # 0.0.27 (2021-07-21)
45 | ### WITH Clause using the VALUES Lists
46 | | Commit | Description |
47 | | -- | -- |
48 | | [8d2be18](https://github.com/bluerelay/windyquery/commit/8d2be18d2bf0d2d89a36746ae05d1ec2e67e375b) | add the first support of WITH clause but limited to VALUES Lists |
49 |
50 |
51 | # 0.0.26 (2021-07-15)
52 | ### add after, before, and between to rrule
53 | | Commit | Description |
54 | | -- | -- |
55 | | [054547b](https://github.com/bluerelay/windyquery/commit/054547bbf5ac81b96ec152bde8942bed45415187) | add after, before, and between for RRULE CTE |
56 |
57 |
58 | # 0.0.25 (2021-07-12)
59 | ### exrule
60 | | Commit | Description |
61 | | -- | -- |
62 | | [cfc94ba](https://github.com/bluerelay/windyquery/commit/cfc94babd8b4a44f03316680d96e0cd257c81963) | include exrule for RRULE CTE |
63 |
64 |
65 | # 0.0.24 (2021-07-06)
66 | ### migrations
67 | | Commit | Description |
68 | | -- | -- |
69 | | [6eb7bb8](https://github.com/bluerelay/windyquery/commit/6eb7bb8690d9c610d5cae1fe163a3f3aefe67607) | add an initial support for database migrations |
70 |
71 |
72 | # init (2021-02-18)
73 | ### abandoned code
74 | the original code has been abandoned.
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2018 The Python Packaging Authority
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy
4 | of this software and associated documentation files (the "Software"), to deal
5 | in the Software without restriction, including without limitation the rights
6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 | copies of the Software, and to permit persons to whom the Software is
8 | furnished to do so, subject to the following conditions:
9 |
10 | The above copyright notice and this permission notice shall be included in all
11 | copies or substantial portions of the Software.
12 |
13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
19 | SOFTWARE.
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | import setuptools
2 |
3 | with open("README.md", "r") as fh:
4 | long_description = fh.read()
5 |
6 | setuptools.setup(
7 | name="windyquery",
8 | version="0.0.33",
9 | author="windymile.it",
10 | author_email="windymile.it@gmail.com",
11 | description="A non-blocking PostgreSQL query builder using Asyncio",
12 | long_description=long_description,
13 | long_description_content_type="text/markdown",
14 | url="https://github.com/bluerelay/windyquery",
15 | packages=setuptools.find_packages(),
16 | install_requires=[
17 | 'asyncpg==0.23.0',
18 | 'ply==3.11',
19 | 'python-dateutil==2.8.1',
20 | 'fire==0.4.0',
21 | ],
22 | classifiers=[
23 | "Programming Language :: Python :: 3.6",
24 | "License :: OSI Approved :: MIT License",
25 | "Operating System :: OS Independent",
26 | ],
27 | entry_points={
28 | 'console_scripts': [
29 | 'wq=windyquery.scripts:main',
30 | ],
31 | },
32 | )
33 |
--------------------------------------------------------------------------------
/windyquery/__init__.py:
--------------------------------------------------------------------------------
1 | from .builder import DB
2 |
--------------------------------------------------------------------------------
/windyquery/builder/__init__.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Tuple
2 | import asyncpg
3 |
4 | from windyquery.collector import Collector
5 | from windyquery.combiner import Combiner
6 | from windyquery.connection import Connection
7 | from windyquery.listener import Listener
8 | from .select import Select
9 | from .update import Update
10 | from .insert import Insert
11 | from .delete import Delete
12 | from .create import Create
13 | from .drop import Drop
14 | from .alter import Alter
15 | from .raw import Raw
16 | from .rrule import Rrule
17 | from .with_values import WithValues
18 |
19 |
20 | class DB(Select, Update, Insert, Delete, Create, Drop, Alter, Rrule, Raw, WithValues):
21 | """DB class"""
22 |
23 | def __init__(self):
24 | super().__init__()
25 | self.pool_connection = Connection()
26 | self.default_connection = None
27 | self.mode = None
28 | self._reset()
29 |
30 | def _reset(self):
31 | self.collector = Collector()
32 | self.combiner = Combiner(self.collector)
33 | self.pool = None
34 |
35 | def _get_pool(self):
36 | if self.pool:
37 | return self.pool
38 | elif self.default_connection:
39 | self.connection(self.default_connection)
40 | return self.pool
41 | raise UserWarning('no connection set up for the DB instance')
42 |
43 | def toSql(self) -> Tuple[str, Any]:
44 | try:
45 | args = []
46 | if self.mode == 'crud':
47 | sql, args = self.build_crud()
48 | elif self.mode == 'schema':
49 | sql = self.build_schema()
50 | elif self.mode == 'raw':
51 | sql, args = self.build_raw()
52 | else:
53 | raise UserWarning('the sql build is incomplete')
54 | return str(sql), args
55 | finally:
56 | self._reset()
57 |
58 | async def exec(self):
59 | pool = self._get_pool()
60 | sql, args = self.toSql()
61 | async with pool.acquire() as conn:
62 | # multiple queries
63 | if len(args) == 0 and ';' in sql:
64 | return await conn.execute(sql)
65 | return await conn.fetch(sql, *args)
66 |
67 | def __await__(self):
68 | return self.exec().__await__()
69 |
70 | # connection interface
71 | async def connect(self, connection_name: str, config, default=False, min_size=10, max_size=10, max_queries=50000, max_inactive_connection_lifetime=300.0, setup=None, init=None, loop=None, connection_class=asyncpg.connection.Connection, **connect_kwargs):
72 | self.pool = await self.pool_connection.connect(connection_name, config, min_size, max_size, max_queries,
73 | max_inactive_connection_lifetime, setup, init, loop, connection_class, **connect_kwargs)
74 | if default or self.default_connection is None:
75 | self.default_connection = connection_name
76 | return self
77 |
78 | async def disconnect(self, connection_name):
79 | await self.pool_connection.disconnect(connection_name)
80 | if connection_name == self.default_connection:
81 | self.default_connection = None
82 |
83 | async def stop(self):
84 | await self.pool_connection.stop()
85 |
86 | def connection(self, connection_name):
87 | self.pool = self.pool_connection.connection(connection_name)
88 | return self
89 |
90 | def listen(self, channel: str):
91 | pool = self._get_pool()
92 | return Listener(pool, channel)
93 |
--------------------------------------------------------------------------------
/windyquery/builder/_crud_base.py:
--------------------------------------------------------------------------------
1 | from typing import Any
2 |
3 | from windyquery.collector import Collector
4 | from windyquery.combiner import Combiner
5 |
6 |
7 | class CrudBase:
8 | """Base class for crud builder methods"""
9 | collector: Collector
10 | combiner: Combiner
11 |
12 | def table(self, name: str):
13 | self.mode = 'crud'
14 | self.collector.table(name)
15 | return self
16 |
17 | def where(self, sql: str, *items: Any):
18 | self.collector.where(sql, *items)
19 | return self
20 |
21 | def returning(self, *items: str):
22 | self.collector.returning(*items)
23 | return self
24 |
25 | def build_select(self, _):
26 | return None, None
27 |
28 | def build_update(self, _):
29 | return None, None
30 |
31 | def build_insert(self, _):
32 | return None, None
33 |
34 | def build_delete(self, _):
35 | return None, None
36 |
37 | def build_rrule(self, _):
38 | return None, None
39 |
40 | def build_with_values(self, _):
41 | return None, None
42 |
43 | def build_crud(self):
44 | result = self.combiner.run()
45 | if result['_id'] == 'select':
46 | sql, args = self.build_select(result)
47 | elif result['_id'] == 'update':
48 | sql, args = self.build_update(result)
49 | elif result['_id'] == 'insert':
50 | sql, args = self.build_insert(result)
51 | elif result['_id'] == 'delete':
52 | sql, args = self.build_delete(result)
53 | elif result['_id'] == 'error':
54 | raise UserWarning(result['message'])
55 | else:
56 | raise Exception(f"not implemented: {result['_id']!r}")
57 |
58 | # WITH clause
59 | withClauses = []
60 | # RRULE
61 | if 'RRULE' in result:
62 | withClauses.append(self.build_rrule(result['RRULE']))
63 | # CTE using VALUES list
64 | if 'WITH_VALUES' in result:
65 | withClauses.append(self.build_with_values(result['WITH_VALUES']))
66 | if len(withClauses) > 0:
67 | sql = 'WITH ' + ', '.join(withClauses) + ' ' + sql
68 |
69 | return sql, args
70 |
--------------------------------------------------------------------------------
/windyquery/builder/_schema_base.py:
--------------------------------------------------------------------------------
1 | from windyquery.collector import Collector
2 | from windyquery.combiner import Combiner
3 |
4 |
5 | class SchemaBase:
6 | """Base class for schema builder methods"""
7 | collector: Collector
8 | combiner: Combiner
9 |
10 | def schema(self, s: str):
11 | self.mode = 'schema'
12 | self.collector.schema(s)
13 | return self
14 |
15 | def build_create(self, _):
16 | return None, None
17 |
18 | def build_drop(self, _):
19 | return None, None
20 |
21 | def build_alter(self, _):
22 | return None, None
23 |
24 | def build_schema(self):
25 | result = self.combiner.run()
26 | if result['_id'] == 'create':
27 | sql = self.build_create(result)
28 | elif result['_id'] == 'drop':
29 | sql = self.build_drop(result)
30 | elif result['_id'] == 'alter':
31 | sql = self.build_alter(result)
32 | elif result['_id'] == 'error':
33 | raise UserWarning(result['message'])
34 | else:
35 | raise Exception(f"not implemented: {result['_id']!r}")
36 | return sql
37 |
--------------------------------------------------------------------------------
/windyquery/builder/alter.py:
--------------------------------------------------------------------------------
1 | from ._schema_base import SchemaBase
2 |
3 |
4 | class Alter(SchemaBase):
5 |
6 | def alter(self, *items: str):
7 | self.collector.alter(*items)
8 | return self
9 |
10 | def build_alter(self, data):
11 | # ALTER
12 | alters = ', '.join(data["ALTER"])
13 | sql = f'ALTER {data["SCHEMA"]} {alters}'
14 |
15 | return sql
16 |
--------------------------------------------------------------------------------
/windyquery/builder/create.py:
--------------------------------------------------------------------------------
1 | from ._schema_base import SchemaBase
2 |
3 |
4 | class Create(SchemaBase):
5 |
6 | def create(self, *items: str):
7 | self.collector.create(*items)
8 | return self
9 |
10 | def build_create(self, data):
11 | sql = f'CREATE {data["SCHEMA"]}'
12 | sql += ' (' + ', '.join(data['CREATE']) + ')'
13 | # WHERE
14 | if 'WHERE' in data:
15 | ws = []
16 | for w in data['WHERE']:
17 | ws.append(w['sql'])
18 | if len(ws) > 0:
19 | sql += ' WHERE ' + ' AND '.join(ws)
20 | return sql
21 |
--------------------------------------------------------------------------------
/windyquery/builder/delete.py:
--------------------------------------------------------------------------------
1 | from ._crud_base import CrudBase
2 |
3 |
4 | class Delete(CrudBase):
5 |
6 | def delete(self):
7 | self.collector.delete()
8 | return self
9 |
10 | def build_delete(self, data):
11 | # TABLE
12 | sql = f'DELETE FROM {data["TABLE"]}'
13 | # WHERE
14 | if 'WHERE' in data:
15 | ws = []
16 | for w in data['WHERE']:
17 | ws.append(w['sql'])
18 | if len(ws) > 0:
19 | sql += ' WHERE ' + ' AND '.join(ws)
20 | # RETURNING
21 | if 'RETURNING' in data:
22 | items = data['RETURNING']
23 | if len(items) == 0:
24 | items = ['*']
25 | sql += ' RETURNING ' + ', '.join(items)
26 | return sql, data['_params']
27 |
--------------------------------------------------------------------------------
/windyquery/builder/drop.py:
--------------------------------------------------------------------------------
1 | from ._schema_base import SchemaBase
2 |
3 |
4 | class Drop(SchemaBase):
5 |
6 | def drop(self, *items: str):
7 | self.collector.drop(*items)
8 | return self
9 |
10 | def build_drop(self, data):
11 | sql = f'DROP {data["SCHEMA"]}'
12 | if 'DROP' in data and data['DROP'] is not None:
13 | sql += f' {data["DROP"]}'
14 | return sql
15 |
--------------------------------------------------------------------------------
/windyquery/builder/insert.py:
--------------------------------------------------------------------------------
1 | from typing import Dict
2 |
3 | from ._crud_base import CrudBase
4 |
5 |
6 | class Insert(CrudBase):
7 |
8 | def insert(self, *items: Dict):
9 | if len(items) == 0:
10 | raise UserWarning('inserts cannot be empty')
11 | columns = list(items[0].keys())
12 | if len(columns) == 0:
13 | raise UserWarning('insert cannot be empty dict')
14 | values = []
15 | for item in items:
16 | val = []
17 | for col in columns:
18 | val.append(item.get(col, 'DEFAULT'))
19 | values.append(val)
20 | self.collector.insert(columns, values)
21 | return self
22 |
23 | def on_conflict(self, *items):
24 | if len(items) < 2:
25 | raise UserWarning(
26 | 'on_conflict requires at least 2 inputs: target and action')
27 | self.collector.on_conflict(*items)
28 | return self
29 |
30 | def build_insert(self, data):
31 | # TABLE
32 | sql = f'INSERT INTO {data["TABLE"]}'
33 | # COLUMNS
34 | columns = data['INSERT'][0]['columns']
35 | sql += f' {columns} VALUES'
36 | # INSERT
37 | inserts = []
38 | key = data['INSERT'][0]['key']
39 | for ins in data['INSERT']:
40 | if ins['key'] != key:
41 | raise UserWarning(
42 | f'different inserts found: {key} and {ins["key"]}')
43 | inserts.append(ins['values'])
44 | sql += ' ' + ', '.join(inserts)
45 | # ON CONFLICT
46 | if 'ON_CONFLICT' in data:
47 | target = data['ON_CONFLICT']['target']
48 | action = data['ON_CONFLICT']['action']
49 | sql += f' ON CONFLICT {target} {action}'
50 | # RETURNING
51 | if 'RETURNING' in data:
52 | items = data['RETURNING']
53 | if len(items) == 0:
54 | items = ['*']
55 | sql += ' RETURNING ' + ', '.join(items)
56 | return sql, data['_params']
57 |
--------------------------------------------------------------------------------
/windyquery/builder/raw.py:
--------------------------------------------------------------------------------
1 | class Raw:
2 | """class for raw queries"""
3 |
4 | def raw(self, s: str, *args):
5 | self.mode = 'raw'
6 | self.collector.raw(s, *args)
7 | return self
8 |
9 | def build_raw(self):
10 | result = self.combiner.run()
11 | if result['_id'] == 'error':
12 | raise UserWarning(result['message'])
13 |
14 | sql = result['RAW']['sql']
15 |
16 | # with clause
17 | withClauses = []
18 | # RRULE
19 | if 'RRULE' in result:
20 | withClauses.append(self.build_rrule(result['RRULE']))
21 | # CTE using VALUES list
22 | if 'WITH_VALUES' in result:
23 | withClauses.append(self.build_with_values(result['WITH_VALUES']))
24 | if len(withClauses) > 0:
25 | sql = 'WITH ' + ', '.join(withClauses) + ' ' + sql
26 |
27 | return sql, result['_params']
28 |
--------------------------------------------------------------------------------
/windyquery/builder/rrule.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from typing import Dict
3 | from dateutil import rrule
4 | from dateutil import parser
5 | from windyquery.exceptions import RruleNoResults
6 |
7 | from ._crud_base import CrudBase
8 |
9 |
10 | class Rrule(CrudBase):
11 |
12 | def rrule(self, name: str, *items: Dict):
13 | if len(items) == 0:
14 | raise UserWarning('rrule cannot be empty')
15 | columns = list(items[0].keys())
16 | if len(columns) == 0:
17 | raise UserWarning('rrule cannot be empty dict')
18 | # keep only custom fields
19 | if 'rrule' in columns:
20 | columns.remove('rrule')
21 | if 'exrule' in columns:
22 | columns.remove('exrule')
23 | if 'rdate' in columns:
24 | columns.remove('rdate')
25 | if 'exdate' in columns:
26 | columns.remove('exdate')
27 | if 'rrule_slice' in columns:
28 | columns.remove('rrule_slice')
29 | if 'rrule_after' in columns:
30 | columns.remove('rrule_after')
31 | if 'rrule_before' in columns:
32 | columns.remove('rrule_before')
33 | if 'rrule_between' in columns:
34 | columns.remove('rrule_between')
35 | # faltten the Dict's into List's and extract rruleset
36 | values = []
37 | for item in items:
38 | val = []
39 | # get rruleset
40 | rrset = rrule.rruleset()
41 | rruleExist = False
42 | if item.get('rrule', False):
43 | rruleRawVal = item.get('rrule')
44 | rruleVals = []
45 | if isinstance(rruleRawVal, list) or isinstance(rruleRawVal, tuple):
46 | rruleVals = list(rruleRawVal)
47 | elif isinstance(rruleRawVal, str):
48 | rruleVals = [rruleRawVal]
49 | else:
50 | raise UserWarning(f'invalid rrule input {rruleRawVal}')
51 | if len(rruleVals) > 0:
52 | rruleExist = True
53 | for rruleVal in rruleVals:
54 | try:
55 | rrset.rrule(rrule.rrulestr(rruleVal))
56 | except:
57 | raise UserWarning(
58 | f'invalid rrule: {rruleVal}') from None
59 | if item.get('exrule', False):
60 | exruleRawVal = item.get('exrule')
61 | exruleVals = []
62 | if isinstance(exruleRawVal, list) or isinstance(exruleRawVal, tuple):
63 | exruleVals = list(exruleRawVal)
64 | elif isinstance(exruleRawVal, str):
65 | exruleVals = [exruleRawVal]
66 | else:
67 | raise UserWarning(f'invalid exrule input {exruleRawVal}')
68 | for exruleVal in exruleVals:
69 | try:
70 | rrset.exrule(rrule.rrulestr(exruleVal))
71 | except:
72 | raise UserWarning(
73 | f'invalid exrule: {exruleVal}') from None
74 | if item.get('rdate', False):
75 | rdateRawVal = item.get('rdate')
76 | rdateVals = []
77 | if isinstance(rdateRawVal, list) or isinstance(rdateRawVal, tuple):
78 | rdateVals = list(rdateRawVal)
79 | elif isinstance(rdateRawVal, str):
80 | rdateVals = [rdateRawVal]
81 | else:
82 | raise UserWarning(f'invalid rdate input {rdateRawVal}')
83 | if len(rdateVals) > 0:
84 | rruleExist = True
85 | for rdateVal in rdateVals:
86 | try:
87 | rrset.rdate(parser.parse(rdateVal))
88 | except:
89 | raise UserWarning(
90 | f'invalid rdate: {rdateVal}') from None
91 | if item.get('exdate', False):
92 | exdateRawVal = item.get('exdate')
93 | exdateVals = []
94 | if isinstance(exdateRawVal, list) or isinstance(exdateRawVal, tuple):
95 | exdateVals = list(exdateRawVal)
96 | elif isinstance(exdateRawVal, str):
97 | exdateVals = [exdateRawVal]
98 | else:
99 | raise UserWarning(f'invalid exdate input {exdateRawVal}')
100 | for exdateVal in exdateVals:
101 | try:
102 | rrset.exdate(parser.parse(exdateVal))
103 | except:
104 | raise UserWarning(
105 | f'invalid exdate: {exdateVal}') from None
106 | if not rruleExist:
107 | raise UserWarning(
108 | f'the input dict {item} must contain a "rrule" or "rdate" field')
109 | val.append(rrset)
110 | # get rrule_slice
111 | sliceVal = item.get('rrule_slice', None)
112 | if sliceVal is not None and not isinstance(sliceVal, slice):
113 | raise UserWarning(f'invalid slice: {sliceVal}') from None
114 | val.append(sliceVal)
115 | # get rrule_after
116 | afterVal = item.get('rrule_after', None)
117 | if afterVal is not None:
118 | dt = None
119 | inc = False
120 | if (isinstance(afterVal, list) or isinstance(afterVal, tuple)) and len(afterVal) >= 1:
121 | if isinstance(afterVal[0], datetime):
122 | dt = afterVal[0]
123 | elif isinstance(afterVal[0], str):
124 | dt = parser.parse(afterVal[0])
125 | if len(afterVal) == 2:
126 | inc = afterVal[1]
127 | elif isinstance(afterVal, dict):
128 | if 'dt' in afterVal:
129 | if isinstance(afterVal['dt'], datetime):
130 | dt = afterVal['dt']
131 | elif isinstance(afterVal['dt'], str):
132 | dt = parser.parse(afterVal['dt'])
133 | if 'inc' in afterVal:
134 | inc = afterVal['inc']
135 | else:
136 | raise UserWarning(
137 | f'invalid rrule_after: {afterVal}') from None
138 | if dt is None:
139 | raise UserWarning(
140 | f'a datetime.datetime parameter dt is required for rrule_after: {afterVal}') from None
141 | if not isinstance(inc, bool):
142 | raise UserWarning(
143 | f'the parameter inc needs to be boolean: {afterVal}') from None
144 | afterVal = (dt, inc)
145 | val.append(afterVal)
146 | # get rrule_before
147 | beforeVal = item.get('rrule_before', None)
148 | if beforeVal is not None:
149 | dt = None
150 | inc = False
151 | if (isinstance(beforeVal, list) or isinstance(beforeVal, tuple)) and len(beforeVal) >= 1:
152 | if isinstance(beforeVal[0], datetime):
153 | dt = beforeVal[0]
154 | elif isinstance(beforeVal[0], str):
155 | dt = parser.parse(beforeVal[0])
156 | if len(beforeVal) == 2:
157 | inc = beforeVal[1]
158 | elif isinstance(beforeVal, dict):
159 | if 'dt' in beforeVal:
160 | if isinstance(beforeVal['dt'], datetime):
161 | dt = beforeVal['dt']
162 | elif isinstance(beforeVal['dt'], str):
163 | dt = parser.parse(beforeVal['dt'])
164 | if 'inc' in beforeVal:
165 | inc = beforeVal['inc']
166 | else:
167 | raise UserWarning(
168 | f'invalid rrule_before: {beforeVal}') from None
169 | if dt is None:
170 | raise UserWarning(
171 | f'a datetime.datetime parameter dt is required for rrule_before: {beforeVal}') from None
172 | if not isinstance(inc, bool):
173 | raise UserWarning(
174 | f'the parameter inc needs to be boolean: {beforeVal}') from None
175 | beforeVal = (dt, inc)
176 | val.append(beforeVal)
177 | # get rrule_between
178 | betweenVal = item.get('rrule_between', None)
179 | if betweenVal is not None:
180 | after = None
181 | before = None
182 | inc = False
183 | count = 1
184 | if (isinstance(betweenVal, list) or isinstance(betweenVal, tuple)) and len(betweenVal) >= 2:
185 | if isinstance(betweenVal[0], datetime):
186 | after = betweenVal[0]
187 | elif isinstance(betweenVal[0], str):
188 | after = parser.parse(betweenVal[0])
189 | if isinstance(betweenVal[1], datetime):
190 | before = betweenVal[1]
191 | elif isinstance(betweenVal[1], str):
192 | before = parser.parse(betweenVal[1])
193 | if len(betweenVal) == 3:
194 | inc = betweenVal[2]
195 | if len(betweenVal) == 4:
196 | count = betweenVal[3]
197 | elif isinstance(betweenVal, dict):
198 | if 'after' in betweenVal:
199 | if isinstance(betweenVal['after'], datetime):
200 | after = betweenVal['after']
201 | elif isinstance(betweenVal['after'], str):
202 | after = parser.parse(betweenVal['after'])
203 | if 'before' in betweenVal:
204 | if isinstance(betweenVal['before'], datetime):
205 | before = betweenVal['before']
206 | elif isinstance(betweenVal['before'], str):
207 | before = parser.parse(betweenVal['before'])
208 | if 'inc' in betweenVal:
209 | inc = betweenVal['inc']
210 | if 'count' in betweenVal:
211 | count = betweenVal['count']
212 | else:
213 | raise UserWarning(
214 | f'invalid rrule_between: {betweenVal}') from None
215 | if after is None:
216 | raise UserWarning(
217 | f'the parameter after needs to be datetime.datetime for rrule_between: {betweenVal}') from None
218 | if before is None:
219 | raise UserWarning(
220 | f'the parameter before needs to be datetime.datetime for rrule_between: {betweenVal}') from None
221 | if not isinstance(inc, bool):
222 | raise UserWarning(
223 | f'the parameter inc needs to be bool for rrule_between: {betweenVal}') from None
224 | if not isinstance(count, int):
225 | raise UserWarning(
226 | f'the parameter count needs to be int for rrule_between: {betweenVal}') from None
227 | betweenVal = (after, before, inc, count)
228 | val.append(betweenVal)
229 | # get the rest custom fields
230 | for col in columns:
231 | val.append(item.get(col, 'NULL'))
232 | values.append(val)
233 | self.collector.rrule(name, columns, values)
234 | return self
235 |
236 | def build_rrule(self, items) -> str:
237 | parsedItems = []
238 | for item in items:
239 | name = item['name']
240 | columns = item['columns']
241 | values = item['values']
242 | if not values:
243 | raise RruleNoResults(
244 | f'the rrule for {name} returns no results')
245 | parsedItem = f'{name} {columns} AS (VALUES {values})'
246 | parsedItems.append(parsedItem)
247 | sql = ', '.join(parsedItems)
248 | return sql
249 |
--------------------------------------------------------------------------------
/windyquery/builder/select.py:
--------------------------------------------------------------------------------
1 | from typing import Any
2 |
3 | from ._crud_base import CrudBase
4 |
5 |
6 | class Select(CrudBase):
7 |
8 | def select(self, *items: str):
9 | self.collector.select(*items)
10 | return self
11 |
12 | def limit(self, size: int):
13 | self.collector.limit(size)
14 | return self
15 |
16 | def offset(self, size: int):
17 | self.collector.offset(size)
18 | return self
19 |
20 | def order_by(self, *items: str):
21 | self.collector.order_by(*items)
22 | return self
23 |
24 | def group_by(self, *items: str):
25 | self.collector.group_by(*items)
26 | return self
27 |
28 | def join(self, tbl: str, *cond: Any):
29 | self.collector.join(tbl, *cond)
30 | return self
31 |
32 | def build_select(self, data):
33 | # SELECT
34 | sql = 'SELECT '
35 | items = data['SELECT']
36 | if len(items) == 0:
37 | sql += '*'
38 | else:
39 | sql += ', '.join(items)
40 | # TABLE
41 | sql += ' FROM ' + data['TABLE']
42 | # JOIN
43 | if 'JOIN' in data:
44 | jns = []
45 | for jn in data['JOIN']:
46 | jns.append(jn['sql'])
47 | if len(jns) > 0:
48 | sql += ' ' + ' '.join(jns)
49 | # WHERE
50 | if 'WHERE' in data:
51 | ws = []
52 | for w in data['WHERE']:
53 | ws.append(w['sql'])
54 | if len(ws) > 0:
55 | sql += ' WHERE ' + ' AND '.join(ws)
56 | # GROUP BY
57 | if 'GROUP_BY' in data:
58 | gps = []
59 | for gp in data['GROUP_BY']:
60 | gps += gp
61 | if len(gps) > 0:
62 | sql += ' GROUP BY ' + ', '.join(gps)
63 | # ORDER BY
64 | if 'ORDER_BY' in data:
65 | ods = []
66 | for od in data['ORDER_BY']:
67 | ods += od
68 | if len(ods) > 0:
69 | sql += ' ORDER BY ' + ', '.join(ods)
70 | # LIMIT
71 | if 'LIMIT' in data and data['LIMIT']:
72 | lmt = data['LIMIT']
73 | sql += ' ' + lmt['sql']
74 | # OFFSET
75 | if 'OFFSET' in data and data['OFFSET']:
76 | ofs = data['OFFSET']
77 | sql += ' ' + ofs['sql']
78 | return sql, data['_params']
79 |
--------------------------------------------------------------------------------
/windyquery/builder/update.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Dict
2 |
3 | from windyquery.utils import process_value
4 | from ._crud_base import CrudBase
5 |
6 |
7 | class Update(CrudBase):
8 |
9 | def update(self, *items: Any):
10 | if len(items) == 1 and isinstance(items[0], Dict):
11 | _sqls = []
12 | params = []
13 | for name, val in items[0].items():
14 | val, p = process_value(val)
15 | if p is not None:
16 | params.append(p)
17 | _sqls.append(f'{name} = {val}')
18 | sql = ', '.join(_sqls)
19 | elif len(items) > 0:
20 | sql = items[0]
21 | params = items[1:]
22 | else:
23 | raise UserWarning(f'not valid updates: {items}')
24 | self.collector.update(sql, *params)
25 | return self
26 |
27 | def from_table(self, name: str):
28 | self.collector.from_table(name)
29 | return self
30 |
31 | def build_update(self, data):
32 | sql = f'UPDATE {data["TABLE"]}'
33 | _updates = []
34 | for item in data['UPDATE']:
35 | _updates.append(item['sql'])
36 | sql += ' SET ' + ', '.join(_updates)
37 | # FROM
38 | if 'FROM_TABLE' in data:
39 | sql += f' FROM {data["FROM_TABLE"]}'
40 | # JOIN
41 | if 'JOIN' in data:
42 | jns = []
43 | for jn in data['JOIN']:
44 | jns.append(jn['sql'])
45 | if len(jns) > 0:
46 | sql += ' ' + ' '.join(jns)
47 | # WHERE
48 | if 'WHERE' in data:
49 | ws = []
50 | for w in data['WHERE']:
51 | ws.append(w['sql'])
52 | if len(ws) > 0:
53 | sql += ' WHERE ' + ' AND '.join(ws)
54 | # RETURNING
55 | if 'RETURNING' in data:
56 | items = data['RETURNING']
57 | if len(items) == 0:
58 | items = ['*']
59 | sql += ' RETURNING ' + ', '.join(items)
60 | return sql, data['_params']
61 |
--------------------------------------------------------------------------------
/windyquery/builder/with_values.py:
--------------------------------------------------------------------------------
1 | from typing import Dict
2 |
3 | from ._crud_base import CrudBase
4 |
5 |
6 | class WithValues(CrudBase):
7 |
8 | def with_values(self, name: str, *values: Dict):
9 | if len(values) == 0:
10 | raise UserWarning('with_values cannot be empty')
11 | columns = list(values[0].keys())
12 | if len(columns) == 0:
13 | raise UserWarning('with_values cannot be empty dict')
14 | # faltten the Dict's into List's
15 | valuesList = []
16 | for val in values:
17 | row = []
18 | for col in columns:
19 | row.append(val.get(col, 'NULL'))
20 | valuesList.append(row)
21 | self.collector.with_values(name, columns, valuesList)
22 | return self
23 |
24 | def build_with_values(self, items) -> str:
25 | parsedItems = []
26 | for item in items:
27 | name = item['name']
28 | columns = item['columns']
29 | values = item['values']
30 | parsedItem = f'{name} {columns} AS (VALUES {values})'
31 | parsedItems.append(parsedItem)
32 | sql = ', '.join(parsedItems)
33 | return sql
34 |
--------------------------------------------------------------------------------
/windyquery/collector/__init__.py:
--------------------------------------------------------------------------------
1 | from .table import Table
2 | from .select import Select
3 | from .where import Where
4 | from .limit import Limit
5 | from .offset import Offset
6 | from .group_by import GroupBy
7 | from .order_by import OrderBy
8 | from .join import Join
9 | from .update import Update
10 | from .from_table import FromTable
11 | from .insert import Insert
12 | from .returning import Returning
13 | from .delete import Delete
14 | from .schema import Schema
15 | from .create import Create
16 | from .drop import Drop
17 | from .alter import Alter
18 | from .on_conflict import OnConflict
19 | from .raw import Raw
20 | from .rrule import Rrule
21 | from .with_values import WithValues
22 |
23 |
24 | tokens = (
25 | 'START_SELECT', 'START_UPDATE', 'START_INSERT', 'START_DELETE', 'START_CREATE', 'START_DROP', 'START_ALTER', 'START_RAW',
26 | 'TABLE', 'SELECT', 'WHERE', 'LIMIT', 'OFFSET', 'GROUP_BY', 'ORDER_BY', 'JOIN', 'UPDATE',
27 | 'FROM_TABLE', 'INSERT', 'RETURNING', 'DELETE', 'SCHEMA', 'CREATE', 'DROP',
28 | 'ALTER', 'ON_CONFLICT', 'RRULE', 'RAW', 'WITH_VALUES'
29 | )
30 |
31 |
32 | class Collector(Table, Select, Where, Limit, Offset, GroupBy, OrderBy, Join, Update, FromTable, Insert,
33 | Returning, Delete, Schema, Create, Drop, Alter, OnConflict, Raw, Rrule, WithValues):
34 | """collect user input"""
35 | pass
36 |
--------------------------------------------------------------------------------
/windyquery/collector/_base.py:
--------------------------------------------------------------------------------
1 | from ply.lex import LexToken
2 |
3 | from windyquery.validator import Validator
4 |
5 |
6 | class StartSelectToken(LexToken):
7 | def __init__(self):
8 | self.type = 'START_SELECT'
9 | self.value = 'START_SELECT'
10 | self.lineno = 0
11 | self.lexpos = 0
12 |
13 |
14 | class StartUpdateToken(LexToken):
15 | def __init__(self):
16 | self.type = 'START_UPDATE'
17 | self.value = 'START_UPDATE'
18 | self.lineno = 0
19 | self.lexpos = 0
20 |
21 |
22 | class StartInsertToken(LexToken):
23 | def __init__(self):
24 | self.type = 'START_INSERT'
25 | self.value = 'START_INSERT'
26 | self.lineno = 0
27 | self.lexpos = 0
28 |
29 |
30 | class StartDeleteToken(LexToken):
31 | def __init__(self):
32 | self.type = 'START_DELETE'
33 | self.value = 'START_DELETE'
34 | self.lineno = 0
35 | self.lexpos = 0
36 |
37 |
38 | class StartCreateToken(LexToken):
39 | def __init__(self):
40 | self.type = 'START_CREATE'
41 | self.value = 'START_CREATE'
42 | self.lineno = 0
43 | self.lexpos = 0
44 |
45 |
46 | class StartDropToken(LexToken):
47 | def __init__(self):
48 | self.type = 'START_DROP'
49 | self.value = 'START_DROP'
50 | self.lineno = 0
51 | self.lexpos = 0
52 |
53 |
54 | class StartAlterToken(LexToken):
55 | def __init__(self):
56 | self.type = 'START_ALTER'
57 | self.value = 'START_ALTER'
58 | self.lineno = 0
59 | self.lexpos = 0
60 |
61 |
62 | class StartRawToken(LexToken):
63 | def __init__(self):
64 | self.type = 'START_RAW'
65 | self.value = 'START_RAW'
66 | self.lineno = 0
67 | self.lexpos = 0
68 |
69 |
70 | class Base:
71 | """base class for Markers"""
72 |
73 | def __init__(self):
74 | self.validator = Validator()
75 | self.idx = 0
76 | self.tokens = []
77 | self.tokenpos = 0
78 | self.paramOffset = 1
79 | self.startAdded = False
80 |
81 | def token(self):
82 | t = None
83 | if self.idx < len(self.tokens):
84 | t = self.tokens[self.idx]
85 | self.idx += 1
86 | return t
87 |
88 | def append(self, t):
89 | t.lexpos = self.tokenpos
90 | self.tokenpos += 1
91 | self.tokens.append(t)
92 |
93 | def prepend(self, t):
94 | t.lexpos = self.tokenpos
95 | self.tokenpos += 1
96 | self.tokens.insert(0, t)
97 |
98 | def add_start(self, t):
99 | if not self.startAdded:
100 | t.lexpos = self.tokenpos
101 | self.tokenpos += 1
102 | self.tokens.insert(0, t)
103 | self.startAdded = True
104 |
--------------------------------------------------------------------------------
/windyquery/collector/alter.py:
--------------------------------------------------------------------------------
1 | from ply.lex import LexToken
2 |
3 | from windyquery.validator import ValidationError
4 | from ._base import Base, StartAlterToken
5 |
6 |
7 | TOKEN = 'ALTER'
8 |
9 |
10 | class AlterToken(LexToken):
11 | def __init__(self, value):
12 | self.type = TOKEN
13 | self.value = value
14 | self.lineno = 0
15 | self.lexpos = 0
16 |
17 |
18 | class Alter(Base):
19 | def alter(self, *items: str):
20 | try:
21 | _items = [self.validator.validate_alter(item) for item in items]
22 | except ValidationError as err:
23 | raise UserWarning(f'invalid ALTER: {err}') from None
24 | self.append(AlterToken(_items))
25 | self.add_start(StartAlterToken())
26 |
--------------------------------------------------------------------------------
/windyquery/collector/create.py:
--------------------------------------------------------------------------------
1 | from ply.lex import LexToken
2 |
3 | from windyquery.validator import ValidationError
4 | from ._base import Base, StartCreateToken
5 |
6 |
7 | TOKEN = 'CREATE'
8 |
9 |
10 | class CreateToken(LexToken):
11 | def __init__(self, value):
12 | self.type = TOKEN
13 | self.value = value
14 | self.lineno = 0
15 | self.lexpos = 0
16 |
17 |
18 | class Create(Base):
19 | def create(self, *items: str):
20 | try:
21 | _items = [self.validator.validate_create(item) for item in items]
22 | except ValidationError as err:
23 | raise UserWarning(f'invalid CREATE: {err}') from None
24 | self.append(CreateToken(_items))
25 | self.add_start(StartCreateToken())
26 |
--------------------------------------------------------------------------------
/windyquery/collector/delete.py:
--------------------------------------------------------------------------------
1 | from ply.lex import LexToken
2 |
3 | from ._base import Base, StartDeleteToken
4 |
5 | TOKEN = 'DELETE'
6 |
7 |
8 | class DeleteToken(LexToken):
9 | def __init__(self):
10 | self.type = TOKEN
11 | self.value = None
12 | self.lineno = 0
13 | self.lexpos = 0
14 |
15 |
16 | class Delete(Base):
17 | def delete(self):
18 | self.append(DeleteToken())
19 | self.add_start(StartDeleteToken())
20 |
--------------------------------------------------------------------------------
/windyquery/collector/drop.py:
--------------------------------------------------------------------------------
1 | from ply.lex import LexToken
2 |
3 | from ._base import Base, StartDropToken
4 |
5 |
6 | TOKEN = 'DROP'
7 |
8 |
9 | class DropToken(LexToken):
10 | def __init__(self, value):
11 | self.type = TOKEN
12 | self.value = value
13 | self.lineno = 0
14 | self.lexpos = 0
15 |
16 |
17 | class Drop(Base):
18 | def drop(self, *items: str):
19 | val = None
20 | if len(items) > 1:
21 | raise UserWarning(f'Invalid DROP: {items}')
22 | elif len(items) == 1:
23 | val = items[0].upper()
24 | if val not in ('CASCADE', 'RESTRICT'):
25 | raise UserWarning(f'Invalid DROP: {items[0]}')
26 | self.append(DropToken(val))
27 | self.add_start(StartDropToken())
28 |
--------------------------------------------------------------------------------
/windyquery/collector/from_table.py:
--------------------------------------------------------------------------------
1 | from ply.lex import LexToken
2 |
3 | from windyquery.validator import ValidationError
4 | from ._base import Base
5 |
6 | TOKEN = 'FROM_TABLE'
7 |
8 |
9 | class FromTableToken(LexToken):
10 | def __init__(self, value):
11 | self.type = TOKEN
12 | self.value = value
13 | self.lineno = 0
14 | self.lexpos = 0
15 |
16 |
17 | class FromTable(Base):
18 | def from_table(self, name: str):
19 | try:
20 | name = self.validator.validate_tablename(name)
21 | except ValidationError as err:
22 | raise UserWarning(f'invalid table name: {err}') from None
23 | self.append(FromTableToken(name))
24 |
--------------------------------------------------------------------------------
/windyquery/collector/group_by.py:
--------------------------------------------------------------------------------
1 | from ply.lex import LexToken
2 |
3 | from windyquery.validator import ValidationError
4 | from ._base import Base
5 |
6 | TOKEN = 'GROUP_BY'
7 |
8 |
9 | class GroupByToken(LexToken):
10 | def __init__(self, value):
11 | self.type = TOKEN
12 | self.value = value
13 | self.lineno = 0
14 | self.lexpos = 0
15 |
16 |
17 | class GroupBy(Base):
18 | def group_by(self, *items: str):
19 | try:
20 | _items = [self.validator.validate_group_by(item) for item in items]
21 | except ValidationError as err:
22 | raise UserWarning(f'invalid ORDER BY: {err}') from None
23 |
24 | self.append(GroupByToken(_items))
25 |
--------------------------------------------------------------------------------
/windyquery/collector/insert.py:
--------------------------------------------------------------------------------
1 | from typing import List, Any
2 | from ply.lex import LexToken
3 |
4 | from windyquery.ctx import Ctx
5 | from windyquery.validator import ValidationError
6 | from ._base import Base, StartInsertToken
7 |
8 |
9 | TOKEN = 'INSERT'
10 |
11 |
12 | class InsertToken(LexToken):
13 | def __init__(self, value):
14 | self.type = TOKEN
15 | self.value = value
16 | self.lineno = 0
17 | self.lexpos = 0
18 |
19 |
20 | class Insert(Base):
21 | def insert(self, columns: List[str], values: List[Any]):
22 | try:
23 | sqlColumns = self.validator.validate_insert_columns(columns)
24 | sqlValues = []
25 | args = []
26 | for row in values:
27 | ctx = Ctx(self.paramOffset, [])
28 | sqlValues.append(
29 | self.validator.validate_insert_values(row, ctx))
30 | self.paramOffset += len(ctx.args)
31 | args += ctx.args
32 | except ValidationError as err:
33 | raise UserWarning(f'invalid INSERT: {err}') from None
34 |
35 | columns.sort()
36 | key = ','.join(columns)
37 | self.append(InsertToken(
38 | {'columns': sqlColumns, 'values': ', '.join(sqlValues), 'params': args, 'key': key}))
39 | self.add_start(StartInsertToken())
40 |
--------------------------------------------------------------------------------
/windyquery/collector/join.py:
--------------------------------------------------------------------------------
1 | from typing import Any
2 | from ply.lex import LexToken
3 |
4 | from windyquery.ctx import Ctx
5 | from windyquery.validator import ValidationError
6 | from ._base import Base
7 |
8 | TOKEN = 'JOIN'
9 |
10 |
11 | class JoinToken(LexToken):
12 | def __init__(self, value):
13 | self.type = TOKEN
14 | self.value = value
15 | self.lineno = 0
16 | self.lexpos = 0
17 |
18 |
19 | class Join(Base):
20 | def join(self, tbl: str, *cond: Any):
21 | if len(cond) == 0:
22 | raise UserWarning(f'JOIN cannot be empty')
23 |
24 | if len(cond) == 1 or '?' in cond[0]:
25 | _join = cond[0]
26 | params = cond[1:]
27 | elif len(cond) == 3:
28 | _join = f'{cond[0]} {cond[1]} {cond[2]}'
29 | params = []
30 | else:
31 | raise UserWarning(f"Invalid JOIN: {tbl} {cond}")
32 |
33 | try:
34 | ctx = Ctx(self.paramOffset, params)
35 | sql = self.validator.validate_join(tbl, _join, ctx)
36 | except ValidationError as err:
37 | raise UserWarning(f'invalid JOIN: {err}') from None
38 | value = {'sql': sql, 'params': ctx.args}
39 |
40 | self.paramOffset += len(value['params'])
41 | self.append(JoinToken(value))
42 |
--------------------------------------------------------------------------------
/windyquery/collector/limit.py:
--------------------------------------------------------------------------------
1 | from ply.lex import LexToken
2 |
3 | from windyquery.ctx import Ctx
4 | from windyquery.validator import ValidationError
5 | from ._base import Base
6 |
7 | TOKEN = 'LIMIT'
8 |
9 |
10 | class LimitToken(LexToken):
11 | def __init__(self, value):
12 | self.type = TOKEN
13 | self.value = value
14 | self.lineno = 0
15 | self.lexpos = 0
16 |
17 |
18 | class Limit(Base):
19 | def limit(self, size: int):
20 | try:
21 | ctx = Ctx(self.paramOffset, [size])
22 | sql = self.validator.validate_limit(str(size), ctx)
23 | except ValidationError as err:
24 | raise UserWarning(f'invalid LIMIT: {err}') from None
25 |
26 | self.paramOffset += 1
27 | self.append(LimitToken({'sql': sql, 'params': ctx.args}))
28 |
--------------------------------------------------------------------------------
/windyquery/collector/offset.py:
--------------------------------------------------------------------------------
1 | from ply.lex import LexToken
2 |
3 | from windyquery.ctx import Ctx
4 | from windyquery.validator import ValidationError
5 | from ._base import Base
6 |
7 | TOKEN = 'OFFSET'
8 |
9 |
10 | class OffsetToken(LexToken):
11 | def __init__(self, value):
12 | self.type = TOKEN
13 | self.value = value
14 | self.lineno = 0
15 | self.lexpos = 0
16 |
17 |
18 | class Offset(Base):
19 | def offset(self, size: int):
20 | try:
21 | ctx = Ctx(self.paramOffset, [size])
22 | sql = self.validator.validate_offset(str(size), ctx)
23 | except ValidationError as err:
24 | raise UserWarning(f'invalid OFFSET: {err}') from None
25 |
26 | self.paramOffset += 1
27 | self.append(OffsetToken({'sql': sql, 'params': ctx.args}))
28 |
--------------------------------------------------------------------------------
/windyquery/collector/on_conflict.py:
--------------------------------------------------------------------------------
1 | from ply.lex import LexToken
2 |
3 | from windyquery.ctx import Ctx
4 | from windyquery.validator import ValidationError
5 | from ._base import Base
6 |
7 |
8 | TOKEN = 'ON_CONFLICT'
9 |
10 |
11 | class OnConflictToken(LexToken):
12 | def __init__(self, value):
13 | self.type = TOKEN
14 | self.value = value
15 | self.lineno = 0
16 | self.lexpos = 0
17 |
18 |
19 | class OnConflict(Base):
20 | def on_conflict(self, *items):
21 | try:
22 | ctx = Ctx(self.paramOffset, items[2:])
23 | target = self.validator.validate_conflict_target(items[0])
24 | action = self.validator.validate_conflict_action(items[1], ctx)
25 | self.paramOffset += len(ctx.args)
26 | params = ctx.args
27 | except ValidationError as err:
28 | raise UserWarning(f'invalid ON CONFLICT: {err}') from None
29 |
30 | self.append(OnConflictToken(
31 | {'target': target, 'action': action, 'params': params}))
32 |
--------------------------------------------------------------------------------
/windyquery/collector/order_by.py:
--------------------------------------------------------------------------------
1 | from typing import Any
2 | from ply.lex import LexToken
3 |
4 | from windyquery.validator import ValidationError
5 | from ._base import Base
6 |
7 | TOKEN = 'ORDER_BY'
8 |
9 |
10 | class OrderByToken(LexToken):
11 | def __init__(self, value):
12 | self.type = TOKEN
13 | self.value = value
14 | self.lineno = 0
15 | self.lexpos = 0
16 |
17 |
18 | class OrderBy(Base):
19 | def order_by(self, *items: str):
20 | try:
21 | _items = [self.validator.validate_order_by(item) for item in items]
22 | except ValidationError as err:
23 | raise UserWarning(f'invalid ORDER BY: {err}') from None
24 |
25 | self.append(OrderByToken(_items))
26 |
--------------------------------------------------------------------------------
/windyquery/collector/raw.py:
--------------------------------------------------------------------------------
1 | from typing import Any
2 | from ply.lex import LexToken
3 |
4 | from ._base import Base, StartRawToken
5 |
6 |
7 | TOKEN = 'RAW'
8 |
9 |
10 | class RawToken(LexToken):
11 | def __init__(self, value):
12 | self.type = TOKEN
13 | self.value = value
14 | self.lineno = 0
15 | self.lexpos = 0
16 |
17 |
18 | class Raw(Base):
19 | def raw(self, sql: str, *params: Any):
20 | args = list(params)
21 | self.paramOffset += len(args)
22 | value = {'sql': sql, 'params': args}
23 | self.append(RawToken(value))
24 | self.add_start(StartRawToken())
25 |
--------------------------------------------------------------------------------
/windyquery/collector/returning.py:
--------------------------------------------------------------------------------
1 | from ply.lex import LexToken
2 |
3 | from windyquery.validator import ValidationError
4 | from ._base import Base
5 |
6 |
7 | TOKEN = 'RETURNING'
8 |
9 |
10 | class ReturningToken(LexToken):
11 | def __init__(self, value):
12 | self.type = TOKEN
13 | self.value = value
14 | self.lineno = 0
15 | self.lexpos = 0
16 |
17 |
18 | class Returning(Base):
19 | def returning(self, *items: str):
20 | try:
21 | _items = [self.validator.validate_select(item) for item in items]
22 | except ValidationError as err:
23 | raise UserWarning(f'invalid RETURNING: {err}') from None
24 |
25 | self.append(ReturningToken(_items))
26 |
--------------------------------------------------------------------------------
/windyquery/collector/rrule.py:
--------------------------------------------------------------------------------
1 | from typing import List, Any
2 | from ply.lex import LexToken
3 |
4 | from windyquery.ctx import Ctx
5 | from windyquery.validator import ValidationError
6 | from ._base import Base
7 |
8 |
9 | TOKEN = 'RRULE'
10 |
11 |
12 | class RruleToken(LexToken):
13 | def __init__(self, value):
14 | self.type = TOKEN
15 | self.value = value
16 | self.lineno = 0
17 | self.lexpos = 0
18 |
19 |
20 | class Rrule(Base):
21 | def rrule(self, name: str, columns: List[str], values: List[Any]):
22 | try:
23 | name = self.validator.validate_identifier(name)
24 | sqlColumns = self.validator.validate_rrule_columns(columns)
25 | sqlValues = []
26 | args = []
27 | for row in values:
28 | ctx = Ctx(self.paramOffset, [])
29 | occurrences = slice(100000)
30 | rrulesetVal = row[0]
31 | sliceVal = row[1]
32 | afterVal = row[2]
33 | beforeVal = row[3]
34 | betweenVal = row[4]
35 | if sliceVal is not None:
36 | occurrences = sliceVal
37 | del row[0] # del rruleset
38 | del row[0] # del rrule_slice
39 | del row[0] # del rrule_after
40 | del row[0] # del rrule_before
41 | del row[0] # del rrule_between
42 | sqlVal = self.validator.validate_rrule_values(
43 | ctx, row, rrulesetVal, occurrences, afterVal, beforeVal, betweenVal)
44 | if sqlVal:
45 | sqlValues.append(sqlVal)
46 | self.paramOffset += len(ctx.args)
47 | args += ctx.args
48 | except ValidationError as err:
49 | raise UserWarning(f'invalid RRULE: {err}') from None
50 |
51 | self.append(RruleToken(
52 | {'name': name, 'columns': sqlColumns, 'values': ', '.join(sqlValues), 'params': args}))
53 |
--------------------------------------------------------------------------------
/windyquery/collector/schema.py:
--------------------------------------------------------------------------------
1 | from ply.lex import LexToken
2 |
3 | from windyquery.validator import ValidationError
4 | from ._base import Base
5 |
6 | TOKEN = 'SCHEMA'
7 |
8 |
9 | class SchemaToken(LexToken):
10 | def __init__(self, value):
11 | self.type = TOKEN
12 | self.value = value
13 | self.lineno = 0
14 | self.lexpos = 0
15 |
16 |
17 | class Schema(Base):
18 | def schema(self, s: str):
19 | try:
20 | s = self.validator.validate_schema(s)
21 | except ValidationError as err:
22 | raise UserWarning(f'invalid schema: {err}') from None
23 | self.append(SchemaToken(s))
24 |
--------------------------------------------------------------------------------
/windyquery/collector/select.py:
--------------------------------------------------------------------------------
1 | from ply.lex import LexToken
2 |
3 | from windyquery.validator import ValidationError
4 | from ._base import Base, StartSelectToken
5 |
6 |
7 | TOKEN = 'SELECT'
8 |
9 |
10 | class SelectToken(LexToken):
11 | def __init__(self, value):
12 | self.type = TOKEN
13 | self.value = value
14 | self.lineno = 0
15 | self.lexpos = 0
16 |
17 |
18 | class Select(Base):
19 | def select(self, *items: str):
20 | try:
21 | _items = [self.validator.validate_select(item) for item in items]
22 | except ValidationError as err:
23 | raise UserWarning(f'invalid SELECT: {err}') from None
24 |
25 | self.append(SelectToken(_items))
26 | self.add_start(StartSelectToken())
27 |
--------------------------------------------------------------------------------
/windyquery/collector/table.py:
--------------------------------------------------------------------------------
1 | from ply.lex import LexToken
2 |
3 | from windyquery.validator import ValidationError
4 | from ._base import Base
5 |
6 | TOKEN = 'TABLE'
7 |
8 |
9 | class TableToken(LexToken):
10 | def __init__(self, value):
11 | self.type = TOKEN
12 | self.value = value
13 | self.lineno = 0
14 | self.lexpos = 0
15 |
16 |
17 | class Table(Base):
18 | def table(self, name: str):
19 | try:
20 | name = self.validator.validate_tablename(name)
21 | except ValidationError as err:
22 | raise UserWarning(f'invalid table name: {err}') from None
23 | self.append(TableToken(name))
24 |
--------------------------------------------------------------------------------
/windyquery/collector/update.py:
--------------------------------------------------------------------------------
1 | from typing import Any
2 | from ply.lex import LexToken
3 |
4 | from windyquery.ctx import Ctx
5 | from windyquery.validator import ValidationError
6 | from ._base import Base, StartUpdateToken
7 |
8 | TOKEN = 'UPDATE'
9 |
10 |
11 | class UpdateToken(LexToken):
12 | def __init__(self, value):
13 | self.type = TOKEN
14 | self.value = value
15 | self.lineno = 0
16 | self.lexpos = 0
17 |
18 |
19 | class Update(Base):
20 | def update(self, sql: str, *params: Any):
21 | try:
22 | ctx = Ctx(self.paramOffset, params)
23 | sql = self.validator.validate_update(sql, ctx)
24 | self.paramOffset += len(ctx.args)
25 | except ValidationError as err:
26 | raise UserWarning(f'invalid UPDATE: {err}') from None
27 | value = {'sql': sql, 'params': ctx.args}
28 |
29 | self.append(UpdateToken(value))
30 | self.add_start(StartUpdateToken())
31 |
--------------------------------------------------------------------------------
/windyquery/collector/where.py:
--------------------------------------------------------------------------------
1 | from typing import Any
2 | from ply.lex import LexToken
3 |
4 | from windyquery.ctx import Ctx
5 | from windyquery.validator import ValidationError
6 | from windyquery.utils import process_value
7 | from ._base import Base
8 |
9 | TOKEN = 'WHERE'
10 |
11 |
12 | class WhereToken(LexToken):
13 | def __init__(self, value):
14 | self.type = TOKEN
15 | self.value = value
16 | self.lineno = 0
17 | self.lexpos = 0
18 |
19 |
20 | class Where(Base):
21 | def where(self, _where: str, *items: Any):
22 | if len(items) == 0 or '?' in _where:
23 | try:
24 | ctx = Ctx(self.paramOffset, items)
25 | sql = self.validator.validate_where(_where, ctx)
26 | except ValidationError as err:
27 | raise UserWarning(f'invalid WHERE: {err}') from None
28 | value = {'sql': sql, 'params': ctx.args}
29 | elif len(items) <= 2:
30 | if len(items) == 1:
31 | if isinstance(items[0], list):
32 | operator = 'IN'
33 | else:
34 | operator = '='
35 | val = items[0]
36 | if val is None:
37 | operator = 'IS'
38 | else:
39 | operator = items[0]
40 | val = items[1]
41 |
42 | _where += f' {operator}'
43 | if val is None:
44 | _where += ' NULL'
45 | params = []
46 | else:
47 | jsonbTest = '->' in _where
48 | if operator == 'IN' or operator == 'NOT IN':
49 | params = []
50 | vs = []
51 | for v in val:
52 | processed, _ = process_value(v)
53 | if processed == '?' or jsonbTest:
54 | params.append(v)
55 | vs.append('?')
56 | else:
57 | vs.append(str(processed))
58 | _where += ' (' + ', '.join(vs) + ')'
59 | else:
60 | params = []
61 | processed, _ = process_value(val)
62 | if processed == '?' or jsonbTest:
63 | params.append(val)
64 | _where += ' ?'
65 | else:
66 | _where += f' {processed}'
67 |
68 | try:
69 | ctx = Ctx(self.paramOffset, params)
70 | sql = self.validator.validate_where(_where, ctx)
71 | except ValidationError as err:
72 | raise UserWarning(f'invalid WHERE: {err}') from None
73 | value = {'sql': sql, 'params': ctx.args}
74 | else:
75 | raise UserWarning(f"Invalid WHERE: {_where} {items}")
76 |
77 | self.paramOffset += len(value['params'])
78 | self.append(WhereToken(value))
79 |
--------------------------------------------------------------------------------
/windyquery/collector/with_values.py:
--------------------------------------------------------------------------------
1 | from typing import List, Any
2 | from ply.lex import LexToken
3 |
4 | from windyquery.ctx import Ctx
5 | from windyquery.validator import ValidationError
6 | from ._base import Base
7 |
8 |
9 | TOKEN = 'WITH_VALUES'
10 |
11 |
12 | class WithValuesToken(LexToken):
13 | def __init__(self, value):
14 | self.type = TOKEN
15 | self.value = value
16 | self.lineno = 0
17 | self.lexpos = 0
18 |
19 |
20 | class WithValues(Base):
21 | def with_values(self, name: str, columns: List[str], values: List[Any]):
22 | try:
23 | name = self.validator.validate_identifier(name)
24 | sqlColumns = self.validator.validate_with_columns(columns)
25 | sqlValues = []
26 | args = []
27 | for row in values:
28 | ctx = Ctx(self.paramOffset, [])
29 | sqlVal = self.validator.validate_with_values(ctx, row)
30 | if sqlVal:
31 | sqlValues.append(sqlVal)
32 | self.paramOffset += len(ctx.args)
33 | args += ctx.args
34 | except ValidationError as err:
35 | raise UserWarning(f'invalid WITH VALUES: {err}') from None
36 |
37 | self.append(WithValuesToken(
38 | {'name': name, 'columns': sqlColumns, 'values': ', '.join(sqlValues), 'params': args}))
39 |
--------------------------------------------------------------------------------
/windyquery/combiner/__init__.py:
--------------------------------------------------------------------------------
1 | from typing import Any, MutableMapping
2 |
3 | from ._base import Base
4 | from .select_stmt import SelectStmt
5 | from .update_stmt import UpdateStmt
6 | from .insert_stmt import InsertStmt
7 | from .delete_stmt import DeleteStmt
8 | from .create_stmt import CreateStmt
9 | from .drop_stmt import DropStmt
10 | from .alter_stmt import AlterStmt
11 | from .raw_stmt import RawStmt
12 |
13 |
14 | class RuleParser(Base, SelectStmt, UpdateStmt, InsertStmt, DeleteStmt, CreateStmt, DropStmt, AlterStmt, RawStmt):
15 | pass
16 |
17 |
18 | _parser = RuleParser()
19 |
20 |
21 | class Combiner:
22 | """combine tokens"""
23 | result: MutableMapping[str, Any]
24 |
25 | def __init__(self, collector):
26 | self.collector = collector
27 | self.result = {'_id': '', '_params': []}
28 | self.occurred = {}
29 |
30 | def set_id(self, _id: str):
31 | self.result['_id'] = _id
32 |
33 | def required(self, *items: str):
34 | for item in items:
35 | if item not in self.occurred:
36 | raise UserWarning(f'{item} is required')
37 |
38 | def append(self, typ, val, limit=None):
39 | if limit == 1:
40 | if typ in self.result:
41 | raise UserWarning(
42 | f'cannot have more than {limit}: {typ} - {val}')
43 | self.result[typ] = val
44 | else:
45 | if typ in self.result:
46 | self.result[typ].append(val)
47 | else:
48 | self.result[typ] = [val]
49 | if limit is not None and len(self.result[typ]) > limit:
50 | raise UserWarning(
51 | f'cannot have more than {limit}: {typ} - {val}')
52 | self.occurred[typ] = self.occurred.get(typ, 0) + 1
53 | if isinstance(val, dict) and 'params' in val:
54 | self.result['_params'] += val['params']
55 | return self.result
56 |
57 | def prepend(self, typ, val, limit=None):
58 | if limit == 1:
59 | if typ in self.result:
60 | raise UserWarning(
61 | f'cannot have more than {limit}: {typ} - {val}')
62 | self.result[typ] = val
63 | else:
64 | if typ in self.result:
65 | self.result[typ].insert(0, val)
66 | else:
67 | self.result[typ] = [val]
68 | if limit is not None and len(self.result[typ]) > limit:
69 | raise UserWarning(
70 | f'cannot have more than {limit}: {typ} - {val}')
71 | self.occurred[typ] = self.occurred.get(typ, 0) + 1
72 | if isinstance(val, dict) and 'params' in val:
73 | self.result['_params'] = val['params'] + self.result['_params']
74 | return self.result
75 |
76 | def token(self):
77 | return self.collector.token()
78 |
79 | def run(self):
80 | _parser.parse(self)
81 | return self.result
82 |
--------------------------------------------------------------------------------
/windyquery/combiner/_base.py:
--------------------------------------------------------------------------------
1 | import ply.yacc as yacc
2 |
3 |
4 | def _rule(r):
5 | def decorate(func):
6 | func.__doc__ = r
7 | return func
8 | return decorate
9 |
10 |
11 | class Base:
12 | from windyquery.collector import tokens
13 |
14 | @_rule('''sql : START_SELECT select_stmts
15 | | START_UPDATE update_stmts
16 | | START_INSERT insert_stmts
17 | | START_DELETE delete_stmts
18 | | START_CREATE create_stmts
19 | | START_DROP drop_stmts
20 | | START_ALTER alter_stmts
21 | | START_RAW raw_stmts''')
22 | def p_sql(self, p):
23 | if p[1] == 'START_SELECT':
24 | p.lexer.set_id('select')
25 | p.lexer.required('SELECT', 'TABLE')
26 | elif p[1] == 'START_UPDATE':
27 | p.lexer.set_id('update')
28 | p.lexer.required('UPDATE', 'TABLE')
29 | elif p[1] == 'START_INSERT':
30 | p.lexer.set_id('insert')
31 | p.lexer.required('INSERT', 'TABLE')
32 | elif p[1] == 'START_DELETE':
33 | p.lexer.set_id('delete')
34 | p.lexer.required('DELETE', 'TABLE')
35 | elif p[1] == 'START_CREATE':
36 | p.lexer.set_id('create')
37 | p.lexer.required('SCHEMA', 'CREATE')
38 | elif p[1] == 'START_DROP':
39 | p.lexer.set_id('drop')
40 | p.lexer.required('SCHEMA', 'DROP')
41 | elif p[1] == 'START_ALTER':
42 | p.lexer.set_id('alter')
43 | p.lexer.required('SCHEMA', 'ALTER')
44 | elif p[1] == 'START_RAW':
45 | p.lexer.set_id('raw')
46 | p.lexer.required('RAW')
47 | else:
48 | raise UserWarning(f'not implemented: {p[1]}')
49 |
50 | def p_error(self, p):
51 | if p:
52 | msg = f"invalid using {p.type!r} at pos {p.lexpos!r}"
53 | p.lexer.result = {'_id': 'error', 'message': msg}
54 | else:
55 | raise UserWarning('sql is not complete')
56 |
57 | def __init__(self):
58 | self._parser = yacc.yacc(module=self, start='sql', debug=False)
59 |
60 | def parse(self, combiner):
61 | self._parser.parse(lexer=combiner)
62 |
--------------------------------------------------------------------------------
/windyquery/combiner/alter_stmt.py:
--------------------------------------------------------------------------------
1 | from ._base import _rule
2 |
3 |
4 | class AlterStmt:
5 | @_rule('alter_stmt : SCHEMA')
6 | def p_alter_schema(self, p):
7 | p.lexer.append('SCHEMA', p[1], 1)
8 |
9 | @_rule('alter_stmt : ALTER')
10 | def p_alter_alter(self, p):
11 | p.lexer.append('ALTER', p[1], 1)
12 |
13 | @_rule('''alter_stmts : alter_stmt alter_stmts
14 | | alter_stmt''')
15 | def p_alter_stmts(self, p):
16 | pass
17 |
--------------------------------------------------------------------------------
/windyquery/combiner/create_stmt.py:
--------------------------------------------------------------------------------
1 | from ._base import _rule
2 |
3 |
4 | class CreateStmt:
5 | @_rule('create_stmt : SCHEMA')
6 | def p_create_schema(self, p):
7 | p.lexer.append('SCHEMA', p[1], 1)
8 |
9 | @_rule('create_stmt : CREATE')
10 | def p_create_create(self, p):
11 | p.lexer.append('CREATE', p[1], 1)
12 |
13 | @_rule('create_stmt : WHERE')
14 | def p_create_where(self, p):
15 | p.lexer.append('WHERE', p[1])
16 |
17 | @_rule('''create_stmts : create_stmt create_stmts
18 | | create_stmt''')
19 | def p_create_stmts(self, p):
20 | pass
21 |
--------------------------------------------------------------------------------
/windyquery/combiner/delete_stmt.py:
--------------------------------------------------------------------------------
1 | from ._base import _rule
2 |
3 |
4 | class DeleteStmt:
5 | @_rule('delete_stmt : TABLE')
6 | def p_delete_table(self, p):
7 | p.lexer.append('TABLE', p[1], 1)
8 |
9 | @_rule('delete_stmt : DELETE')
10 | def p_delete_delete(self, p):
11 | p.lexer.append('DELETE', p[1], 1)
12 |
13 | @_rule('delete_stmt : WHERE')
14 | def p_delete_where(self, p):
15 | p.lexer.append('WHERE', p[1])
16 |
17 | @_rule('delete_stmt : RETURNING')
18 | def p_delete_returning(self, p):
19 | p.lexer.append('RETURNING', p[1], 1)
20 |
21 | @_rule('''delete_stmts : delete_stmt delete_stmts
22 | | delete_stmt''')
23 | def p_delete_stmts(self, p):
24 | pass
25 |
--------------------------------------------------------------------------------
/windyquery/combiner/drop_stmt.py:
--------------------------------------------------------------------------------
1 | from ._base import _rule
2 |
3 |
4 | class DropStmt:
5 | @_rule('drop_stmt : SCHEMA')
6 | def p_drop_schema(self, p):
7 | p.lexer.append('SCHEMA', p[1], 1)
8 |
9 | @_rule('drop_stmt : DROP')
10 | def p_drop_drop(self, p):
11 | p.lexer.append('DROP', p[1], 1)
12 |
13 | @_rule('''drop_stmts : drop_stmt drop_stmts
14 | | drop_stmt''')
15 | def p_drop_stmts(self, p):
16 | pass
17 |
--------------------------------------------------------------------------------
/windyquery/combiner/insert_stmt.py:
--------------------------------------------------------------------------------
1 | from ._base import _rule
2 |
3 |
4 | class InsertStmt:
5 | @_rule('insert_stmt : TABLE')
6 | def p_insert_table(self, p):
7 | p.lexer.append('TABLE', p[1], 1)
8 |
9 | @_rule('insert_stmt : INSERT')
10 | def p_insert_insert(self, p):
11 | p.lexer.append('INSERT', p[1])
12 |
13 | @_rule('insert_stmt : RETURNING')
14 | def p_insert_returning(self, p):
15 | p.lexer.append('RETURNING', p[1], 1)
16 |
17 | @_rule('insert_stmt : ON_CONFLICT')
18 | def p_insert_on_conflict(self, p):
19 | p.lexer.append('ON_CONFLICT', p[1], 1)
20 |
21 | @_rule('''insert_stmts : insert_stmt insert_stmts
22 | | insert_stmt''')
23 | def p_insert_stmts(self, p):
24 | pass
25 |
--------------------------------------------------------------------------------
/windyquery/combiner/parsetab.py:
--------------------------------------------------------------------------------
1 |
2 | # parsetab.py
3 | # This file is automatically generated. Do not edit.
4 | # pylint: disable=W,C,R
5 | _tabversion = '3.10'
6 |
7 | _lr_method = 'LALR'
8 |
9 | _lr_signature = 'sqlALTER CREATE DELETE DROP FROM_TABLE GROUP_BY INSERT JOIN LIMIT OFFSET ON_CONFLICT ORDER_BY RAW RETURNING RRULE SCHEMA SELECT START_ALTER START_CREATE START_DELETE START_DROP START_INSERT START_RAW START_SELECT START_UPDATE TABLE UPDATE WHERE WITH_VALUESalter_stmt : SCHEMAcreate_stmt : SCHEMAdelete_stmt : TABLEdrop_stmt : SCHEMAinsert_stmt : TABLEraw_stmt : RAWselect_stmt : WHEREupdate_stmt : TABLEalter_stmt : ALTERcreate_stmt : CREATEdelete_stmt : DELETEdrop_stmt : DROPinsert_stmt : INSERTraw_stmt : RRULEselect_stmt : ORDER_BYupdate_stmt : UPDATEalter_stmts : alter_stmt alter_stmts\n | alter_stmtcreate_stmt : WHEREdelete_stmt : WHEREdrop_stmts : drop_stmt drop_stmts\n | drop_stmtinsert_stmt : RETURNINGraw_stmt : WITH_VALUESselect_stmt : GROUP_BYupdate_stmt : FROM_TABLEsql : START_SELECT select_stmts\n | START_UPDATE update_stmts\n | START_INSERT insert_stmts\n | START_DELETE delete_stmts\n | START_CREATE create_stmts\n | START_DROP drop_stmts\n | START_ALTER alter_stmts\n | START_RAW raw_stmtscreate_stmts : create_stmt create_stmts\n | create_stmtdelete_stmt : RETURNINGinsert_stmt : ON_CONFLICTraw_stmts : raw_stmt\n | raw_stmt raw_stmtsselect_stmt : JOINupdate_stmt : JOINdelete_stmts : delete_stmt delete_stmts\n | delete_stmtinsert_stmts : insert_stmt insert_stmts\n | insert_stmtselect_stmt : LIMITupdate_stmt : WHEREselect_stmt : OFFSETupdate_stmt : RRULEselect_stmt : SELECTupdate_stmt : RETURNINGselect_stmt : TABLEupdate_stmt : WITH_VALUESselect_stmt : RRULEupdate_stmts : update_stmt update_stmts\n | update_stmtselect_stmt : WITH_VALUESselect_stmts : select_stmt\n | select_stmt select_stmts'
10 |
11 | _lr_action_items = {'START_SELECT':([0,],[2,]),'START_UPDATE':([0,],[3,]),'START_INSERT':([0,],[4,]),'START_DELETE':([0,],[5,]),'START_CREATE':([0,],[6,]),'START_DROP':([0,],[7,]),'START_ALTER':([0,],[8,]),'START_RAW':([0,],[9,]),'$end':([1,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,],[0,-27,-59,-7,-15,-25,-41,-47,-49,-51,-53,-55,-58,-28,-57,-8,-16,-26,-42,-48,-50,-52,-54,-29,-46,-5,-13,-23,-38,-30,-44,-3,-11,-20,-37,-31,-36,-2,-10,-19,-32,-22,-4,-12,-33,-18,-1,-9,-34,-39,-6,-14,-24,-60,-56,-45,-43,-35,-21,-17,-40,]),'WHERE':([2,3,5,6,11,12,13,14,15,16,17,18,19,20,21,23,24,25,26,27,28,29,30,31,39,40,41,42,43,45,46,47,48,],[12,28,42,48,12,-7,-15,-25,-41,-47,-49,-51,-53,-55,-58,28,-8,-16,-26,-42,-48,-50,-52,-54,42,-3,-11,-20,-37,48,-2,-10,-19,]),'ORDER_BY':([2,11,12,13,14,15,16,17,18,19,20,21,],[13,13,-7,-15,-25,-41,-47,-49,-51,-53,-55,-58,]),'GROUP_BY':([2,11,12,13,14,15,16,17,18,19,20,21,],[14,14,-7,-15,-25,-41,-47,-49,-51,-53,-55,-58,]),'JOIN':([2,3,11,12,13,14,15,16,17,18,19,20,21,23,24,25,26,27,28,29,30,31,],[15,27,15,-7,-15,-25,-41,-47,-49,-51,-53,-55,-58,27,-8,-16,-26,-42,-48,-50,-52,-54,]),'LIMIT':([2,11,12,13,14,15,16,17,18,19,20,21,],[16,16,-7,-15,-25,-41,-47,-49,-51,-53,-55,-58,]),'OFFSET':([2,11,12,13,14,15,16,17,18,19,20,21,],[17,17,-7,-15,-25,-41,-47,-49,-51,-53,-55,-58,]),'SELECT':([2,11,12,13,14,15,16,17,18,19,20,21,],[18,18,-7,-15,-25,-41,-47,-49,-51,-53,-55,-58,]),'TABLE':([2,3,4,5,11,12,13,14,15,16,17,18,19,20,21,23,24,25,26,27,28,29,30,31,33,34,35,36,37,39,40,41,42,43,],[19,24,34,40,19,-7,-15,-25,-41,-47,-49,-51,-53,-55,-58,24,-8,-16,-26,-42,-48,-50,-52,-54,34,-5,-13,-23,-38,40,-3,-11,-20,-37,]),'RRULE':([2,3,9,11,12,13,14,15,16,17,18,19,20,21,23,24,25,26,27,28,29,30,31,58,59,60,61,],[20,29,60,20,-7,-15,-25,-41,-47,-49,-51,-53,-55,-58,29,-8,-16,-26,-42,-48,-50,-52,-54,60,-6,-14,-24,]),'WITH_VALUES':([2,3,9,11,12,13,14,15,16,17,18,19,20,21,23,24,25,26,27,28,29,30,31,58,59,60,61,],[21,31,61,21,-7,-15,-25,-41,-47,-49,-51,-53,-55,-58,31,-8,-16,-26,-42,-48,-50,-52,-54,61,-6,-14,-24,]),'UPDATE':([3,23,24,25,26,27,28,29,30,31,],[25,25,-8,-16,-26,-42,-48,-50,-52,-54,]),'FROM_TABLE':([3,23,24,25,26,27,28,29,30,31,],[26,26,-8,-16,-26,-42,-48,-50,-52,-54,]),'RETURNING':([3,4,5,23,24,25,26,27,28,29,30,31,33,34,35,36,37,39,40,41,42,43,],[30,36,43,30,-8,-16,-26,-42,-48,-50,-52,-54,36,-5,-13,-23,-38,43,-3,-11,-20,-37,]),'INSERT':([4,33,34,35,36,37,],[35,35,-5,-13,-23,-38,]),'ON_CONFLICT':([4,33,34,35,36,37,],[37,37,-5,-13,-23,-38,]),'DELETE':([5,39,40,41,42,43,],[41,41,-3,-11,-20,-37,]),'SCHEMA':([6,7,8,45,46,47,48,50,51,52,54,55,56,],[46,51,55,46,-2,-10,-19,51,-4,-12,55,-1,-9,]),'CREATE':([6,45,46,47,48,],[47,47,-2,-10,-19,]),'DROP':([7,50,51,52,],[52,52,-4,-12,]),'ALTER':([8,54,55,56,],[56,56,-1,-9,]),'RAW':([9,58,59,60,61,],[59,59,-6,-14,-24,]),}
12 |
13 | _lr_action = {}
14 | for _k, _v in _lr_action_items.items():
15 | for _x,_y in zip(_v[0],_v[1]):
16 | if not _x in _lr_action: _lr_action[_x] = {}
17 | _lr_action[_x][_k] = _y
18 | del _lr_action_items
19 |
20 | _lr_goto_items = {'sql':([0,],[1,]),'select_stmts':([2,11,],[10,62,]),'select_stmt':([2,11,],[11,11,]),'update_stmts':([3,23,],[22,63,]),'update_stmt':([3,23,],[23,23,]),'insert_stmts':([4,33,],[32,64,]),'insert_stmt':([4,33,],[33,33,]),'delete_stmts':([5,39,],[38,65,]),'delete_stmt':([5,39,],[39,39,]),'create_stmts':([6,45,],[44,66,]),'create_stmt':([6,45,],[45,45,]),'drop_stmts':([7,50,],[49,67,]),'drop_stmt':([7,50,],[50,50,]),'alter_stmts':([8,54,],[53,68,]),'alter_stmt':([8,54,],[54,54,]),'raw_stmts':([9,58,],[57,69,]),'raw_stmt':([9,58,],[58,58,]),}
21 |
22 | _lr_goto = {}
23 | for _k, _v in _lr_goto_items.items():
24 | for _x, _y in zip(_v[0], _v[1]):
25 | if not _x in _lr_goto: _lr_goto[_x] = {}
26 | _lr_goto[_x][_k] = _y
27 | del _lr_goto_items
28 | _lr_productions = [
29 | ("S' -> sql","S'",1,None,None,None),
30 | ('alter_stmt -> SCHEMA','alter_stmt',1,'p_alter_schema','alter_stmt.py',6),
31 | ('create_stmt -> SCHEMA','create_stmt',1,'p_create_schema','create_stmt.py',6),
32 | ('delete_stmt -> TABLE','delete_stmt',1,'p_delete_table','delete_stmt.py',6),
33 | ('drop_stmt -> SCHEMA','drop_stmt',1,'p_drop_schema','drop_stmt.py',6),
34 | ('insert_stmt -> TABLE','insert_stmt',1,'p_insert_table','insert_stmt.py',6),
35 | ('raw_stmt -> RAW','raw_stmt',1,'p_raw_raw','raw_stmt.py',6),
36 | ('select_stmt -> WHERE','select_stmt',1,'p_select_where','select_stmt.py',6),
37 | ('update_stmt -> TABLE','update_stmt',1,'p_update_table','update_stmt.py',6),
38 | ('alter_stmt -> ALTER','alter_stmt',1,'p_alter_alter','alter_stmt.py',10),
39 | ('create_stmt -> CREATE','create_stmt',1,'p_create_create','create_stmt.py',10),
40 | ('delete_stmt -> DELETE','delete_stmt',1,'p_delete_delete','delete_stmt.py',10),
41 | ('drop_stmt -> DROP','drop_stmt',1,'p_drop_drop','drop_stmt.py',10),
42 | ('insert_stmt -> INSERT','insert_stmt',1,'p_insert_insert','insert_stmt.py',10),
43 | ('raw_stmt -> RRULE','raw_stmt',1,'p_raw_rrule','raw_stmt.py',10),
44 | ('select_stmt -> ORDER_BY','select_stmt',1,'p_select_order_by','select_stmt.py',10),
45 | ('update_stmt -> UPDATE','update_stmt',1,'p_update_update','update_stmt.py',10),
46 | ('alter_stmts -> alter_stmt alter_stmts','alter_stmts',2,'p_alter_stmts','alter_stmt.py',14),
47 | ('alter_stmts -> alter_stmt','alter_stmts',1,'p_alter_stmts','alter_stmt.py',15),
48 | ('create_stmt -> WHERE','create_stmt',1,'p_create_where','create_stmt.py',14),
49 | ('delete_stmt -> WHERE','delete_stmt',1,'p_delete_where','delete_stmt.py',14),
50 | ('drop_stmts -> drop_stmt drop_stmts','drop_stmts',2,'p_drop_stmts','drop_stmt.py',14),
51 | ('drop_stmts -> drop_stmt','drop_stmts',1,'p_drop_stmts','drop_stmt.py',15),
52 | ('insert_stmt -> RETURNING','insert_stmt',1,'p_insert_returning','insert_stmt.py',14),
53 | ('raw_stmt -> WITH_VALUES','raw_stmt',1,'p_raw_with_values','raw_stmt.py',14),
54 | ('select_stmt -> GROUP_BY','select_stmt',1,'p_select_group_by','select_stmt.py',14),
55 | ('update_stmt -> FROM_TABLE','update_stmt',1,'p_update_from','update_stmt.py',14),
56 | ('sql -> START_SELECT select_stmts','sql',2,'p_sql','_base.py',15),
57 | ('sql -> START_UPDATE update_stmts','sql',2,'p_sql','_base.py',16),
58 | ('sql -> START_INSERT insert_stmts','sql',2,'p_sql','_base.py',17),
59 | ('sql -> START_DELETE delete_stmts','sql',2,'p_sql','_base.py',18),
60 | ('sql -> START_CREATE create_stmts','sql',2,'p_sql','_base.py',19),
61 | ('sql -> START_DROP drop_stmts','sql',2,'p_sql','_base.py',20),
62 | ('sql -> START_ALTER alter_stmts','sql',2,'p_sql','_base.py',21),
63 | ('sql -> START_RAW raw_stmts','sql',2,'p_sql','_base.py',22),
64 | ('create_stmts -> create_stmt create_stmts','create_stmts',2,'p_create_stmts','create_stmt.py',18),
65 | ('create_stmts -> create_stmt','create_stmts',1,'p_create_stmts','create_stmt.py',19),
66 | ('delete_stmt -> RETURNING','delete_stmt',1,'p_delete_returning','delete_stmt.py',18),
67 | ('insert_stmt -> ON_CONFLICT','insert_stmt',1,'p_insert_on_conflict','insert_stmt.py',18),
68 | ('raw_stmts -> raw_stmt','raw_stmts',1,'p_raw_stmts','raw_stmt.py',18),
69 | ('raw_stmts -> raw_stmt raw_stmts','raw_stmts',2,'p_raw_stmts','raw_stmt.py',19),
70 | ('select_stmt -> JOIN','select_stmt',1,'p_select_join','select_stmt.py',18),
71 | ('update_stmt -> JOIN','update_stmt',1,'p_update_join','update_stmt.py',18),
72 | ('delete_stmts -> delete_stmt delete_stmts','delete_stmts',2,'p_delete_stmts','delete_stmt.py',22),
73 | ('delete_stmts -> delete_stmt','delete_stmts',1,'p_delete_stmts','delete_stmt.py',23),
74 | ('insert_stmts -> insert_stmt insert_stmts','insert_stmts',2,'p_insert_stmts','insert_stmt.py',22),
75 | ('insert_stmts -> insert_stmt','insert_stmts',1,'p_insert_stmts','insert_stmt.py',23),
76 | ('select_stmt -> LIMIT','select_stmt',1,'p_select_limit','select_stmt.py',22),
77 | ('update_stmt -> WHERE','update_stmt',1,'p_update_where','update_stmt.py',22),
78 | ('select_stmt -> OFFSET','select_stmt',1,'p_select_offset','select_stmt.py',26),
79 | ('update_stmt -> RRULE','update_stmt',1,'p_update_rrule','update_stmt.py',26),
80 | ('select_stmt -> SELECT','select_stmt',1,'p_select_select','select_stmt.py',30),
81 | ('update_stmt -> RETURNING','update_stmt',1,'p_update_returning','update_stmt.py',30),
82 | ('select_stmt -> TABLE','select_stmt',1,'p_select_table','select_stmt.py',34),
83 | ('update_stmt -> WITH_VALUES','update_stmt',1,'p_update_with_values','update_stmt.py',34),
84 | ('select_stmt -> RRULE','select_stmt',1,'p_select_rrule','select_stmt.py',38),
85 | ('update_stmts -> update_stmt update_stmts','update_stmts',2,'p_update_stmts','update_stmt.py',38),
86 | ('update_stmts -> update_stmt','update_stmts',1,'p_update_stmts','update_stmt.py',39),
87 | ('select_stmt -> WITH_VALUES','select_stmt',1,'p_select_with_values','select_stmt.py',42),
88 | ('select_stmts -> select_stmt','select_stmts',1,'p_select_stmts','select_stmt.py',46),
89 | ('select_stmts -> select_stmt select_stmts','select_stmts',2,'p_select_stmts','select_stmt.py',47),
90 | ]
91 |
--------------------------------------------------------------------------------
/windyquery/combiner/raw_stmt.py:
--------------------------------------------------------------------------------
1 | from ._base import _rule
2 |
3 |
4 | class RawStmt:
5 | @_rule('raw_stmt : RAW')
6 | def p_raw_raw(self, p):
7 | p.lexer.append('RAW', p[1], 1)
8 |
9 | @_rule('raw_stmt : RRULE')
10 | def p_raw_rrule(self, p):
11 | p.lexer.append('RRULE', p[1])
12 |
13 | @_rule('raw_stmt : WITH_VALUES')
14 | def p_raw_with_values(self, p):
15 | p.lexer.append('WITH_VALUES', p[1])
16 |
17 | @_rule('''raw_stmts : raw_stmt
18 | | raw_stmt raw_stmts''')
19 | def p_raw_stmts(self, p):
20 | pass
21 |
--------------------------------------------------------------------------------
/windyquery/combiner/select_stmt.py:
--------------------------------------------------------------------------------
1 | from ._base import _rule
2 |
3 |
4 | class SelectStmt:
5 | @_rule('select_stmt : WHERE')
6 | def p_select_where(self, p):
7 | p.lexer.append('WHERE', p[1])
8 |
9 | @_rule('select_stmt : ORDER_BY')
10 | def p_select_order_by(self, p):
11 | p.lexer.append('ORDER_BY', p[1])
12 |
13 | @_rule('select_stmt : GROUP_BY')
14 | def p_select_group_by(self, p):
15 | p.lexer.append('GROUP_BY', p[1])
16 |
17 | @_rule('select_stmt : JOIN')
18 | def p_select_join(self, p):
19 | p.lexer.append('JOIN', p[1])
20 |
21 | @_rule('select_stmt : LIMIT')
22 | def p_select_limit(self, p):
23 | p.lexer.append('LIMIT', p[1], 1)
24 |
25 | @_rule('select_stmt : OFFSET')
26 | def p_select_offset(self, p):
27 | p.lexer.append('OFFSET', p[1], 1)
28 |
29 | @_rule('select_stmt : SELECT')
30 | def p_select_select(self, p):
31 | p.lexer.append('SELECT', p[1], 1)
32 |
33 | @_rule('select_stmt : TABLE')
34 | def p_select_table(self, p):
35 | p.lexer.append('TABLE', p[1], 1)
36 |
37 | @_rule('select_stmt : RRULE')
38 | def p_select_rrule(self, p):
39 | p.lexer.append('RRULE', p[1])
40 |
41 | @_rule('select_stmt : WITH_VALUES')
42 | def p_select_with_values(self, p):
43 | p.lexer.append('WITH_VALUES', p[1])
44 |
45 | @_rule('''select_stmts : select_stmt
46 | | select_stmt select_stmts''')
47 | def p_select_stmts(self, p):
48 | pass
49 |
--------------------------------------------------------------------------------
/windyquery/combiner/update_stmt.py:
--------------------------------------------------------------------------------
1 | from ._base import _rule
2 |
3 |
4 | class UpdateStmt:
5 | @_rule('update_stmt : TABLE')
6 | def p_update_table(self, p):
7 | p.lexer.append('TABLE', p[1], 1)
8 |
9 | @_rule('update_stmt : UPDATE')
10 | def p_update_update(self, p):
11 | p.lexer.append('UPDATE', p[1])
12 |
13 | @_rule('update_stmt : FROM_TABLE')
14 | def p_update_from(self, p):
15 | p.lexer.append('FROM_TABLE', p[1], 1)
16 |
17 | @_rule('update_stmt : JOIN')
18 | def p_update_join(self, p):
19 | p.lexer.append('JOIN', p[1])
20 |
21 | @_rule('update_stmt : WHERE')
22 | def p_update_where(self, p):
23 | p.lexer.append('WHERE', p[1])
24 |
25 | @_rule('update_stmt : RRULE')
26 | def p_update_rrule(self, p):
27 | p.lexer.append('RRULE', p[1])
28 |
29 | @_rule('update_stmt : RETURNING')
30 | def p_update_returning(self, p):
31 | p.lexer.append('RETURNING', p[1], 1)
32 |
33 | @_rule('update_stmt : WITH_VALUES')
34 | def p_update_with_values(self, p):
35 | p.lexer.append('WITH_VALUES', p[1])
36 |
37 | @_rule('''update_stmts : update_stmt update_stmts
38 | | update_stmt''')
39 | def p_update_stmts(self, p):
40 | pass
41 |
--------------------------------------------------------------------------------
/windyquery/connection.py:
--------------------------------------------------------------------------------
1 | import asyncpg
2 |
3 |
4 | class Connection:
5 | """The base class for DB connection"""
6 |
7 | def __init__(self):
8 | self.conn_pools = {}
9 |
10 | async def connect(self, connection_name, config, min_size=10, max_size=10, max_queries=50000, max_inactive_connection_lifetime=300.0, setup=None, init=None, loop=None, connection_class=asyncpg.connection.Connection, **connect_kwargs):
11 | if connection_name in self.conn_pools:
12 | raise UserWarning(
13 | "connection: {} already exists".format(connection_name))
14 |
15 | dsn = "postgresql://{}:{}@{}:{}/{}".format(
16 | config['username'],
17 | config['password'],
18 | config['host'],
19 | config['port'],
20 | config['database']
21 | )
22 | self.conn_pools[connection_name] = await asyncpg.create_pool(
23 | dsn=dsn,
24 | min_size=min_size,
25 | max_size=max_size,
26 | max_queries=max_queries,
27 | max_inactive_connection_lifetime=max_inactive_connection_lifetime,
28 | setup=setup,
29 | init=init,
30 | loop=loop,
31 | connection_class=connection_class,
32 | **connect_kwargs)
33 | return self.conn_pools[connection_name]
34 |
35 | async def disconnect(self, connection_name):
36 | if connection_name not in self.conn_pools:
37 | raise UserWarning(
38 | "connection: {} does not exist".format(connection_name))
39 | pool = self.conn_pools[connection_name]
40 | await pool.close()
41 | pool.terminate()
42 | del self.conn_pools[connection_name]
43 |
44 | async def stop(self):
45 | for _, pool in self.conn_pools.items():
46 | await pool.close()
47 | pool.terminate()
48 | self.default = ''
49 | self.conn_pools = {}
50 |
51 | def connection(self, connection_name):
52 | if connection_name not in self.conn_pools:
53 | raise UserWarning(
54 | "connection: {} does not exists".format(connection_name))
55 | return self.conn_pools[connection_name]
56 |
--------------------------------------------------------------------------------
/windyquery/ctx.py:
--------------------------------------------------------------------------------
1 | from typing import List, Any
2 |
3 |
4 | class Ctx:
5 | param_offset: int = 1
6 | args: List[Any] = []
7 |
8 | def __init__(self, param_offset=1, args=[]):
9 | self.param_offset = param_offset
10 | self.args = args
11 |
--------------------------------------------------------------------------------
/windyquery/exceptions.py:
--------------------------------------------------------------------------------
1 | class RruleNoResults(Exception):
2 | pass
3 |
4 |
5 | class ListenConnectionClosed(Exception):
6 | pass
7 |
--------------------------------------------------------------------------------
/windyquery/listener.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 | from windyquery.exceptions import ListenConnectionClosed
4 |
5 |
6 | class Listener:
7 | """The class for listen postgres notifications"""
8 | MAX_BUFFER_SIZE = 1000000
9 |
10 | def __init__(self, pool, channel):
11 | self.pool = pool
12 | self.channel = channel
13 | self.connection = None
14 | self.results = []
15 | self.error = None
16 |
17 | async def start(self):
18 | if self.connection is not None:
19 | raise UserWarning(f"already listening on channel: {self.channel}")
20 | self.connection = await self.pool.acquire()
21 | self.results = [asyncio.Future()]
22 | self.connection.add_termination_listener(self.handle_connection_closed)
23 | await self.connection.add_listener(self.channel, self.handle_notifications)
24 |
25 | async def stop(self):
26 | try:
27 | for f in self.results:
28 | f.cancel()
29 | await self.connection.remove_listener(self.channel, self.handle_notifications)
30 | except:
31 | # the connection could be released, so ignore any errors
32 | pass
33 | finally:
34 | await self.pool.release(self.connection)
35 | self.connection = None
36 | self.results = []
37 |
38 | def handle_notifications(self, conn, pid, channel, payload):
39 | if(len(self.results) >= self.MAX_BUFFER_SIZE):
40 | raise UserWarning(
41 | "too many unprocessed notifications: {}".format(self.channel))
42 | f = self.results[-1]
43 | self.results.append(asyncio.Future())
44 | f.set_result({
45 | 'listener_pid': conn.get_server_pid(),
46 | 'notifier_pid': pid,
47 | 'channel': channel,
48 | 'payload': payload
49 | })
50 |
51 | def handle_connection_closed(self, conn):
52 | self.error = ListenConnectionClosed(
53 | f'connection is closed when listening on {self.channel}.')
54 | for f in self.results:
55 | f.cancel()
56 |
57 | async def next(self):
58 | if self.connection is None:
59 | await self.start()
60 | if(len(self.results) == 0):
61 | return None
62 | try:
63 | result = await self.results[0]
64 | self.results.pop(0)
65 | return result
66 | except:
67 | err = self.error
68 | self.error = None
69 | if err:
70 | raise err from None
71 | raise
72 |
73 | def __await__(self):
74 | return self.next().__await__()
75 |
76 | async def __aenter__(self):
77 | await self.start()
78 | return self
79 |
80 | async def __aexit__(self, exc_type, exc, tb):
81 | await self.stop()
82 |
--------------------------------------------------------------------------------
/windyquery/provider/__init__.py:
--------------------------------------------------------------------------------
1 | from typing import List
2 |
3 | from windyquery.ctx import Ctx
4 | from .param import Param
5 | from .biop import Biop
6 | from .record import Record
7 | from .fieldlist import Fieldlist
8 | from .parentheses import Parentheses
9 | from .glue import Glue
10 |
11 |
12 | class ValidationError(Exception):
13 | pass
14 |
15 |
16 | class Provider:
17 | ctx: Ctx
18 | params: List[Param] = []
19 |
20 | def __init__(self, ctx: Ctx):
21 | self.ctx = ctx
22 | self.params = []
23 |
24 | def process(self):
25 | if len(self.ctx.args) != len(self.params):
26 | raise ValidationError(
27 | f"Number of params dose not match number of values: {self.params} {self.ctx.args}")
28 | for pos, i in enumerate(self.params):
29 | i.set_pos(pos)
30 |
31 | def new_param(self, prepend=False) -> Param:
32 | i = Param(self.ctx)
33 | if prepend:
34 | self.params.insert(0, i)
35 | else:
36 | self.params.append(i)
37 | return i
38 |
39 | def new_parentheses(self, wrapped) -> Parentheses:
40 | return Parentheses(self.ctx, wrapped)
41 |
42 | def new_record(self, value, kind=None) -> Record:
43 | return Record(self.ctx, value, kind)
44 |
45 | def new_biop(self, op, l, r) -> Biop:
46 | return Biop(self.ctx, op, l, r)
47 |
48 | def new_fieldlist(self, *items) -> Fieldlist:
49 | return Fieldlist(self.ctx, items)
50 |
51 | def new_glue(self, *items) -> Glue:
52 | return Glue(self.ctx, items)
53 |
--------------------------------------------------------------------------------
/windyquery/provider/_base.py:
--------------------------------------------------------------------------------
1 | from windyquery.ctx import Ctx
2 |
3 |
4 | PARAM = 'PARAM'
5 | JSONB = 'JSONB'
6 | FIELDLIST = 'FIELDLIST'
7 |
8 |
9 | class Base:
10 | def __init__(self, ctx: Ctx):
11 | self.kind = None
12 | self.ctx = ctx
13 |
14 | def __str__(self):
15 | return ''
16 |
--------------------------------------------------------------------------------
/windyquery/provider/biop.py:
--------------------------------------------------------------------------------
1 | from windyquery.ctx import Ctx
2 |
3 | from ._base import Base, PARAM, FIELDLIST
4 |
5 |
6 | class Biop(Base):
7 | def __init__(self, ctx: Ctx, op, l, r):
8 | super().__init__(ctx)
9 | self.op = op
10 | self.l = l
11 | self.r = r
12 |
13 | def __str__(self):
14 | if self.l.kind in (PARAM, FIELDLIST):
15 | self.l.match(self.r.kind)
16 | elif self.r.kind in (PARAM, FIELDLIST):
17 | self.r.match(self.l.kind)
18 | return f'{self.l} {self.op} {self.r}'
19 |
--------------------------------------------------------------------------------
/windyquery/provider/fieldlist.py:
--------------------------------------------------------------------------------
1 | from windyquery.ctx import Ctx
2 |
3 | from ._base import Base, PARAM, FIELDLIST
4 |
5 |
6 | class Fieldlist(Base):
7 | def __init__(self, ctx: Ctx, items):
8 | super().__init__(ctx)
9 | self.kind = FIELDLIST
10 | self.fields = list(items)
11 |
12 | def __str__(self):
13 | return '(' + ', '.join([str(i) for i in self.fields]) + ')'
14 |
15 | def append(self, i):
16 | self.fields.append(i)
17 | return self
18 |
19 | def match(self, kind):
20 | for i in self.fields:
21 | if i.kind == PARAM:
22 | i.match(kind)
23 | return self
24 |
--------------------------------------------------------------------------------
/windyquery/provider/glue.py:
--------------------------------------------------------------------------------
1 | from windyquery.ctx import Ctx
2 |
3 | from ._base import Base
4 |
5 |
6 | class Glue(Base):
7 | def __init__(self, ctx: Ctx, items):
8 | super().__init__(ctx)
9 | self.items = list(filter(None, items))
10 | self._sep = ' '
11 |
12 | def __str__(self):
13 | return self._sep.join([str(i) for i in self.items])
14 |
15 | def append(self, i):
16 | if i is not None:
17 | self.items.append(i)
18 | return self
19 |
20 | def separator(self, s: str):
21 | self._sep = s
22 | return self
23 |
--------------------------------------------------------------------------------
/windyquery/provider/param.py:
--------------------------------------------------------------------------------
1 | import json
2 | from windyquery.ctx import Ctx
3 |
4 | from ._base import Base, PARAM, JSONB
5 |
6 |
7 | class Param(Base):
8 | def __init__(self, ctx: Ctx):
9 | super().__init__(ctx)
10 | self.kind = PARAM
11 | self.pos = 0
12 |
13 | def __str__(self):
14 | return f'${self.pos + self.ctx.param_offset}'
15 |
16 | def set_pos(self, pos):
17 | self.pos = pos
18 | return self
19 |
20 | def match(self, kind):
21 | if kind == JSONB:
22 | value = self.ctx.args[self.pos]
23 | self.ctx.args[self.pos] = json.dumps(value)
24 | return self
25 |
--------------------------------------------------------------------------------
/windyquery/provider/parentheses.py:
--------------------------------------------------------------------------------
1 | from windyquery.ctx import Ctx
2 |
3 | from ._base import Base
4 |
5 |
6 | class Parentheses(Base):
7 | def __init__(self, ctx: Ctx, wrapped):
8 | super().__init__(ctx)
9 | self.wrapped = wrapped
10 |
11 | def __str__(self):
12 | return f'({self.wrapped})'
13 |
--------------------------------------------------------------------------------
/windyquery/provider/record.py:
--------------------------------------------------------------------------------
1 | from windyquery.ctx import Ctx
2 |
3 | from ._base import Base
4 |
5 |
6 | class Record(Base):
7 | def __init__(self, ctx: Ctx, value, kind):
8 | super().__init__(ctx)
9 | self.kind = kind
10 | self.value = value
11 |
12 | def __str__(self):
13 | return str(self.value)
14 |
--------------------------------------------------------------------------------
/windyquery/scripts/__init__.py:
--------------------------------------------------------------------------------
1 | import os
2 | import asyncio
3 | from urllib.parse import quote_plus
4 | import fire
5 |
6 | from .migration import make_migration
7 | from .migration import init_db
8 | from .migration import ensure_migrations_table
9 | from .migration import migrate
10 |
11 |
12 | class Scripts:
13 | """the command line interface included with windyquery"""
14 |
15 | default_migrations_dir = ['database', 'migrations']
16 |
17 | def get_migrations_dir(self, migrations_dir):
18 | if migrations_dir:
19 | migrations_dir = migrations_dir.split('/')
20 | else:
21 | migrations_dir = self.default_migrations_dir
22 | return migrations_dir
23 |
24 | def example(self):
25 | """show usage examples"""
26 | print('---------- make an empty migration file ----------')
27 | print('wq make_migration --name=create_my_table')
28 | print('\n---------- make a migration file with templates for "create table" ----------')
29 | print('wq make_migration --name=create_my_table --template="create table"')
30 | print('\n---------- make a migration file with all available templates ----------')
31 | print('wq make_migration --name=create_my_table --template=all')
32 | print('\n---------- run outstanding migrations ----------')
33 | print('wq migrate --host=localhost --port=5432 --database=my-db --username=my-name --password=my-pass')
34 | print('\n---------- run outstanding migrations (DB configured by environment variables) ----------')
35 | print('DB_HOST=localhost DB_PORT=5432 DB_DATABASE=my-db DB_USERNAME=my-name DB_PASSWORD=my-pass wq migrate')
36 | print('\n---------- make a migration file in a custom directory ----------')
37 | print('wq make_migration --name=create_my_table --migrations_dir="my_db_work/migrations"')
38 | print('\n---------- run outstanding migrations in a custom directory ----------')
39 | print('wq migrate --host=localhost --port=5432 --database=my-db --username=my-name --password=my-pass --migrations_dir="my_db_work/migrations"')
40 | print('\n---------- run outstanding migrations and store finished ones in a custom migrations table ----------')
41 | print('wq migrate --host=localhost --port=5432 --database=my-db --username=my-name --password=my-pass --migrations_table=my_migrations')
42 |
43 | def make_migration(self, name, template=None, migrations_dir=None):
44 | """generate a database migration file"""
45 | return make_migration(name, template, self.get_migrations_dir(migrations_dir))
46 |
47 | def migrate(self, host=None, port=None, database=None, username=None, password=None, migrations_dir=None, migrations_table=None, loop=None):
48 | """run all of the outstanding migrations"""
49 | host = host if host else os.getenv('DB_HOST')
50 | port = port if port else os.getenv('DB_PORT')
51 | database = database if database else os.getenv('DB_DATABASE')
52 | username = username if username else os.getenv('DB_USERNAME')
53 | password = password if password else os.getenv('DB_PASSWORD')
54 | migrations_table = migrations_table if migrations_table else 'migrations'
55 |
56 | async def run():
57 | db = await init_db(quote_plus(host), port, quote_plus(database), quote_plus(username), quote_plus(password))
58 | await ensure_migrations_table(db, migrations_table)
59 | result = await migrate(db, self.get_migrations_dir(migrations_dir), migrations_table)
60 | await db.stop()
61 | return result
62 | if loop:
63 | return loop.run_until_complete(run())
64 | else:
65 | return asyncio.run(run())
66 |
67 |
68 | def main():
69 | fire.Fire(Scripts)
70 |
--------------------------------------------------------------------------------
/windyquery/scripts/migration.py:
--------------------------------------------------------------------------------
1 | import os
2 | import time
3 | import re
4 | import glob
5 | import os
6 | import warnings
7 | from pathlib import Path
8 |
9 | from windyquery import DB
10 | from .migration_templates import templates
11 |
12 |
13 | def make_migration(name, template_search, migrations_dir):
14 | if not name:
15 | print("name is required - usage: wq make_migration --name=create_my_table")
16 | return -1
17 |
18 | # search the tempaltes that matches the user search str
19 | _templates = {}
20 | if template_search:
21 | if template_search == 'all':
22 | _templates = templates
23 | else:
24 | _templates = dict(
25 | filter(lambda item: template_search in item[0], templates.items()))
26 | # create the migration dir if not exists
27 | Path(os.path.join(*migrations_dir)).mkdir(parents=True, exist_ok=True)
28 | prefix = time.strftime("%Y%m%d%H%M%S")
29 | name = prefix + "_" + name.replace(".py", "") + ".py"
30 | full_name = os.path.join(*migrations_dir, name)
31 | if len(_templates) > 0:
32 | templateStr = '\n'.join(
33 | f' # ------ {desc} ------{code}' for desc, code in _templates.items())
34 | else:
35 | templateStr = ' pass'
36 | with open(full_name, 'w') as fp:
37 | fp.write(f'async def run(db):\n{templateStr}')
38 | print("[created] %s" % (full_name))
39 |
40 |
41 | async def init_db(host, port, database, username, password):
42 | db = DB()
43 | await db.connect(database, {
44 | 'host': host,
45 | 'port': port,
46 | 'database': database,
47 | 'username': username,
48 | 'password': password,
49 | }, default=True, min_size=1, max_size=1)
50 | return db
51 |
52 |
53 | async def ensure_migrations_table(db, migrations_table):
54 | return await db.schema(f'TABLE IF NOT EXISTS {migrations_table}').create(
55 | 'id bigserial PRIMARY KEY',
56 | 'name text not null',
57 | )
58 |
59 |
60 | async def migrate(db, migrations_dir, migrations_table):
61 | # find the last migration id if it exists
62 | results = await db.table(migrations_table).select('*').order_by('id DESC').limit(1)
63 | latest_migration = results[0] if len(results) > 0 else None
64 | latest_tm = None
65 | if latest_migration and len(latest_migration) > 0:
66 | latest_tm = latest_migration['id']
67 |
68 | # find all file names under the migrations_dir
69 | files = glob.glob(os.path.join(
70 | *migrations_dir, '20[1-9][0-9][0-1][0-9][0-3][0-9][0-2][0-9][0-5][0-9][0-5][0-9]_*.py'))
71 |
72 | # find all haven't been migrated
73 | mm_re = re.compile(r'.*(\d{14}).*')
74 | pending_migrations = {}
75 | for f in files:
76 | match = mm_re.match(f)
77 | if(match):
78 | tm = match.group(1)
79 | if latest_tm is None or int(tm) > latest_tm:
80 | pos = f.find(tm)
81 | mod_name = "."+f[pos:].replace('.py', '')
82 | pending_migrations[mod_name] = int(tm)
83 | else:
84 | warnings.warn('unrecognized migration file {}'.format(f))
85 |
86 | # migrate
87 | for mod_name in sorted(pending_migrations):
88 | src = f'{mod_name[1:]}.py'
89 | with open(os.path.join(*migrations_dir, src), 'r') as fp:
90 | exec(fp.read())
91 | await locals()['run'](db)
92 | await db.table(migrations_table).insert(
93 | {
94 | 'id': pending_migrations[mod_name],
95 | 'name': mod_name[16:]
96 | })
97 | print("[finished] %s" % (mod_name[1:]))
98 |
99 | return "migration finished successfully."
100 |
--------------------------------------------------------------------------------
/windyquery/scripts/migration_templates.py:
--------------------------------------------------------------------------------
1 | templates = {
2 | 'create table': '''
3 | await db.schema('TABLE IF NOT EXISTS users').create(
4 | 'id serial PRIMARY KEY',
5 | 'group_id integer references groups (id) ON DELETE CASCADE',
6 | 'team_id integer DEFAULT 0',
7 | 'created_at timestamp not null DEFAULT NOW()',
8 | 'deleted_at timestamptz DEFAULT NOW()',
9 | 'email text not null unique',
10 | 'is_admin boolean not null default false',
11 | 'address jsonb',
12 | 'num_of_pets integer CHECK (num_of_pets >= 0) DEFAULT 0',
13 | 'payday integer not null',
14 | 'salary smallint CONSTRAINT salarynullable NULL CONSTRAINT salarypositive CHECK (salary > 0)',
15 | 'department_id integer references departments (id) ON DELETE CASCADE',
16 | 'FOREIGN KEY (team_id) REFERENCES teams (id) ON DELETE SET DEFAULT',
17 | 'CONSTRAINT unique_email UNIQUE(group_id, email)',
18 | 'UNIQUE(group_id, email)',
19 | 'check(payday > 0 and payday < 8)',
20 | )
21 | ''',
22 | 'create table like another table': '''
23 | await db.schema('TABLE accounts').create(
24 | 'like users'
25 | )
26 | ''',
27 | 'alter table': '''
28 | await db.schema('TABLE users').alter(
29 | 'alter id TYPE bigint',
30 | 'alter name SET DEFAULT \\'no_name\\'',
31 | 'alter COLUMN address DROP DEFAULT',
32 | 'alter "user info" SET NOT NULL',
33 | 'add CONSTRAINT check(payday > 1 and payday < 6)',
34 | 'add UNIQUE(name, email) WITH (fillfactor=70)',
35 | 'add FOREIGN KEY (group_id) REFERENCES groups (id) ON DELETE SET NULL',
36 | 'drop CONSTRAINT IF EXISTS idx_email CASCADE',
37 | )
38 | ''',
39 | 'add column': '''
40 | await db.schema('TABLE users').alter('ADD COLUMN address text')
41 | ''',
42 | 'drop column': '''
43 | await db.schema('TABLE users').alter('DROP address')
44 | ''',
45 | 'create index ON table (column1, column2)': '''
46 | await db.schema('INDEX idx_email ON users').create('name', 'email')
47 | ''',
48 | 'create unique index ON table (column1) WHERE condition': '''
49 | await db.schema('UNIQUE INDEX unique_name ON users').create('name',).where('soft_deleted', False)
50 | ''',
51 | 'drop index': '''
52 | await db.schema('INDEX idx_email').drop('CASCADE')
53 | ''',
54 | 'rename table': '''
55 | await db.schema('TABLE users').alter('RENAME TO accounts')
56 | ''',
57 | 'rename column': '''
58 | await db.schema('TABLE users').alter('RENAME email TO email_address')
59 | ''',
60 | 'rename constraint': '''
61 | await db.schema('TABLE users').alter('RENAME CONSTRAINT idx_name TO index_name')
62 | ''',
63 | 'drop table': '''
64 | await db.schema('TABLE users').drop()
65 | ''',
66 | 'create table with raw': '''
67 | await db.raw("""
68 | CREATE TABLE users(
69 | id INT NOT NULL,
70 | created_at DATE NOT NULL,
71 | first_name VARCHAR(100) NOT NULL,
72 | last_name VARCHAR(100) NOT NULL,
73 | birthday_mmddyyyy CHAR(10) NOT NULL,
74 | )
75 | """)
76 | ''',
77 | 'create trigger function': '''
78 | await db.raw(r"""CREATE OR REPLACE FUNCTION users_changed() RETURNS trigger LANGUAGE 'plpgsql' AS
79 | $define$
80 | BEGIN
81 | PERFORM pg_notify('users', 'changed');
82 | RETURN NULL;
83 | END;
84 | $define$""")
85 | ''',
86 | 'create trigger': '''
87 | await db.raw("""
88 | CREATE TRIGGER users_changed_trigger
89 | AFTER INSERT OR UPDATE OF name, email, address ON users
90 | FOR EACH STATEMENT
91 | EXECUTE PROCEDURE users_changed();
92 | """)
93 | ''',
94 | 'drop trigger': '''
95 | await db.raw("DROP TRIGGER users_changed_trigger ON users")
96 | ''',
97 | 'drop trigger funtion': '''
98 | await db.raw("DROP FUNCTION users_changed")
99 | ''',
100 | 'test_create table': '''
101 | await db.schema('TABLE test_tmp_users').create(
102 | 'id serial PRIMARY KEY',
103 | 'name text not null unique',
104 | )
105 | ''',
106 | 'test_drop table': '''
107 | await db.schema('TABLE test_tmp_users').drop()
108 | ''',
109 | }
110 |
--------------------------------------------------------------------------------
/windyquery/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/windyquery/tests/conftest.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import pytest
3 |
4 | from windyquery import DB
5 |
6 |
7 | class Config:
8 | DB_HOST = "localhost"
9 | DB_PORT = "5432"
10 | DB_TEST = "windyquery-test"
11 | DB_USER = "windyquery-test"
12 | DB_PASS = "windyquery-test"
13 |
14 |
15 | @pytest.fixture(scope="module")
16 | def config():
17 | yield Config
18 |
19 |
20 | @pytest.fixture(scope="module")
21 | def db(config):
22 | app_db = DB()
23 |
24 | async def init_db():
25 | return await app_db.connect('db_test', {
26 | 'host': config.DB_HOST,
27 | 'port': config.DB_PORT,
28 | 'database': config.DB_TEST,
29 | 'username': config.DB_USER,
30 | 'password': config.DB_PASS
31 | }, default=True)
32 | asyncio.get_event_loop().run_until_complete(init_db())
33 | yield app_db
34 | asyncio.get_event_loop().run_until_complete(app_db.stop())
35 |
--------------------------------------------------------------------------------
/windyquery/tests/seed_test_data.sql:
--------------------------------------------------------------------------------
1 | CREATE TABLE test ("id" serial PRIMARY KEY, "name" VARCHAR(255));
2 | INSERT INTO test ("name") VALUES ('test');
3 |
4 | CREATE TABLE cards (id serial PRIMARY KEY, board_id INTEGER, "data" JSONB);
5 | INSERT INTO cards ("board_id", "data") VALUES
6 | (2, '{ "name": "Wash dishes", "tags":["Clean", "Kitchen"], "finished":false }'::jsonb),
7 | (7, '{ "name": "Cook lunch", "tags":["Cook", "Kitchen", "Tacos"] }'::jsonb),
8 | (1, '{}'::jsonb),
9 | (2, '{ "name": "Hang paintings", "tags":["Improvements", "Office"], "finished":true }'::jsonb),
10 | (3, '{}'::jsonb),
11 | (9, '{ "address": { "city": "Chicago" } }'::jsonb),
12 | (5, '{ "skill": { "java": "good" } }'::jsonb),
13 | (11, '[{"a":"foo"},{"b":"bar"},{"c":"baz"}]'::jsonb),
14 | (21, '{}'::jsonb);
15 |
16 |
17 | CREATE TABLE users ("id" serial PRIMARY KEY, "email" VARCHAR(255), "password" VARCHAR(255), "admin" BOOLEAN);
18 | INSERT INTO users ("email", "password", "admin") VALUES
19 | ('test@example.com', 'mypass', 'true'),
20 | ('test2@example.com', 'mypass2', 'false'),
21 | ('test3@example.com', 'mypass3', 'false'),
22 | ('secret@example.com', 'secret', 'false'),
23 | ('secret@example.com', 'secret', 'false');
24 |
25 |
26 | CREATE TABLE boards ("id" serial PRIMARY KEY, "user_id" INTEGER, "location" VARCHAR(255));
27 | INSERT INTO boards ("user_id", "location") VALUES
28 | (1, 'southwest'),
29 | (2, 'dining room'),
30 | (3, 'south door');
31 |
32 |
33 | CREATE TABLE country ("numeric_code" INTEGER PRIMARY KEY, "name" VARCHAR(255), "alpha2" CHAR(2));
34 |
35 | CREATE TABLE cards_copy (id INTEGER, board_id INTEGER);
36 |
37 | CREATE OR REPLACE FUNCTION cards_after_insert() RETURNS trigger LANGUAGE 'plpgsql' AS
38 | $$
39 | BEGIN
40 | PERFORM (SELECT pg_notify('cards', 'after insert'));
41 | RETURN NEW;
42 | END;
43 | $$;
44 |
45 | CREATE TRIGGER cards_trigger AFTER INSERT ON cards FOR EACH ROW EXECUTE PROCEDURE cards_after_insert();
46 |
47 | CREATE TABLE students ("id" serial PRIMARY KEY, "firstname" TEXT, "lastname" TEXT);
48 |
49 | CREATE TABLE tasks ("id" serial PRIMARY KEY, "name" TEXT);
50 | INSERT INTO tasks ("id", "name") VALUES
51 | (1, 'tax return'),
52 | (2, 'pick up kids'),
53 | (3, 'Tom task'),
54 | (4, 'Jerry task');
55 |
56 | CREATE TABLE task_results ("id" serial PRIMARY KEY, "created_at" timestamp with time zone, "task_id" INTEGER, "result" TEXT);
57 | INSERT INTO task_results ("id", "task_id", "result") VALUES
58 | (1, 1, ''),
59 | (2, 2, '');
60 |
61 | CREATE TABLE tasks_uuid_pkey ("id" uuid PRIMARY KEY DEFAULT gen_random_uuid(), "name" TEXT);
62 |
63 | CREATE SCHEMA test1;
64 |
--------------------------------------------------------------------------------
/windyquery/tests/test_alter_table.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import pytest
3 | import asyncpg
4 |
5 | from windyquery import DB
6 |
7 |
8 | loop = asyncio.get_event_loop()
9 |
10 |
11 | def test_add_column(db: DB):
12 | async def add_col():
13 | await db.schema('TABLE users').alter(
14 | "ADD COLUMN test_col varchar(50) NOT NULL DEFAULT 'test_col_default'",
15 | )
16 |
17 | async def drop_col():
18 | return await db.schema('TABLE users').alter(
19 | 'drop COLUMN test_col',
20 | )
21 |
22 | loop.run_until_complete(add_col())
23 | rows = loop.run_until_complete(db.table('users').insert({
24 | 'email': 'test@test.com',
25 | 'password': 'my precious'
26 | }).returning())
27 | assert rows[0]['test_col'] == 'test_col_default'
28 |
29 | loop.run_until_complete(drop_col())
30 | rows = loop.run_until_complete(db.table('users').select())
31 | assert not hasattr(rows[0], 'test_col')
32 |
33 |
34 | def test_alter_unique_index(db: DB):
35 | indexname = 'boards_user_id_location_idx'
36 |
37 | async def add_unique():
38 | return await db.schema(f'UNIQUE INDEX {indexname} ON boards').create(
39 | 'user_id', 'location'
40 | )
41 |
42 | async def drop_unique():
43 | return await db.schema(f'INDEX {indexname}').drop()
44 |
45 | loop.run_until_complete(add_unique())
46 | row = {'user_id': 1399, 'location': 'northwest'}
47 | loop.run_until_complete(db.table('boards').insert(row))
48 |
49 | # inserting 2nd time violates unique constraint
50 | with pytest.raises(asyncpg.exceptions.UniqueViolationError) as excinfo:
51 | loop.run_until_complete(db.table('boards').insert(row))
52 | assert type(excinfo.value) is asyncpg.exceptions.UniqueViolationError
53 | loop.run_until_complete(db.table('boards').where('user_id', 1399).delete())
54 | loop.run_until_complete(drop_unique())
55 |
56 |
57 | def test_alter_index(db: DB):
58 | indexname = 'boards_user_id_location_idx'
59 |
60 | async def add_index():
61 | return await db.schema(f'UNIQUE INDEX {indexname} ON boards').create(
62 | 'user_id', 'location'
63 | )
64 |
65 | async def drop_index():
66 | return await db.schema(f'INDEX {indexname}').drop()
67 |
68 | loop.run_until_complete(add_index())
69 | rows = loop.run_until_complete(
70 | db.table('pg_indexes').select().where('indexname', indexname))
71 | assert len(rows) == 1
72 | loop.run_until_complete(drop_index())
73 |
74 |
75 | def test_alter_primary_key(db: DB):
76 | pkname = 'id_board_id_pky'
77 |
78 | async def add_primary():
79 | return await db.schema('TABLE cards_copy').alter(
80 | f'add CONSTRAINT {pkname} PRIMARY KEY (id, board_id)',
81 | )
82 |
83 | async def drop_primary():
84 | await db.schema('TABLE cards_copy').alter(
85 | f'DROP CONSTRAINT {pkname}'
86 | )
87 |
88 | # pkey is (id, board_id)
89 | loop.run_until_complete(add_primary())
90 | rows = loop.run_until_complete(
91 | db.table('pg_indexes').select().where('tablename', 'cards_copy'))
92 | assert rows[0]['indexdef'].find('(id, board_id)') != -1
93 |
94 | # pkey is deleted
95 | loop.run_until_complete(drop_primary())
96 | rows = loop.run_until_complete(
97 | db.table('pg_indexes').select().where('tablename', 'cards_copy'))
98 | assert len(rows) == 0
99 |
--------------------------------------------------------------------------------
/windyquery/tests/test_create_table.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 | from windyquery import DB
4 |
5 | loop = asyncio.get_event_loop()
6 |
7 |
8 | def test_create_user_tmp(db: DB):
9 | async def create_user_tmp():
10 | return await db.schema('TABLE users_tmp').create(
11 | 'id serial PRIMARY KEY',
12 | 'email text not null unique',
13 | 'password text not null',
14 | 'registered_on timestamp not null DEFAULT NOW()',
15 | 'admin boolean not null default false',
16 | )
17 |
18 | async def drop_table():
19 | return await db.schema('TABLE users_tmp').drop()
20 |
21 | loop.run_until_complete(create_user_tmp())
22 | rows = loop.run_until_complete(db.table('information_schema.columns').select(
23 | 'column_name', 'column_default', 'is_nullable', 'data_type'
24 | ).where('table_schema', 'public').where('table_name', 'users_tmp'))
25 | loop.run_until_complete(drop_table())
26 |
27 | # verify each column
28 | assert len(rows) == 5
29 | assert rows[0]['column_name'] == 'id'
30 | assert rows[0]['column_default'].find('nextval') != -1
31 | assert rows[0]['is_nullable'] == 'NO'
32 | assert rows[0]['data_type'] == 'integer'
33 | assert rows[1]['column_name'] == 'email'
34 | assert rows[1]['column_default'] is None
35 | assert rows[1]['is_nullable'] == 'NO'
36 | assert rows[1]['data_type'] == 'text'
37 | assert rows[2]['column_name'] == 'password'
38 | assert rows[2]['column_default'] is None
39 | assert rows[2]['is_nullable'] == 'NO'
40 | assert rows[2]['data_type'] == 'text'
41 | assert rows[3]['column_name'] == 'registered_on'
42 | assert rows[3]['column_default'] == 'now()'
43 | assert rows[3]['is_nullable'] == 'NO'
44 | assert rows[3]['data_type'] == 'timestamp without time zone'
45 | assert rows[4]['column_name'] == 'admin'
46 | assert rows[4]['column_default'] == 'false'
47 | assert rows[4]['is_nullable'] == 'NO'
48 | assert rows[4]['data_type'] == 'boolean'
49 |
50 |
51 | def test_create_unique_index(db: DB):
52 | uniqueIdx = 'users_tmp_user_id_name_key'
53 |
54 | async def create_unique_index():
55 | return await db.schema('TABLE users_tmp').create(
56 | 'id serial PRIMARY KEY',
57 | 'name text not null',
58 | 'user_id integer not null',
59 | f'CONSTRAINT {uniqueIdx} UNIQUE(user_id, name)',
60 | 'created_at timestamp not null DEFAULT NOW()',
61 | 'updated_at timestamp not null DEFAULT NOW()',
62 | 'deleted_at timestamp null',
63 | )
64 |
65 | async def drop_table():
66 | return await db.schema('TABLE users_tmp').drop()
67 |
68 | loop.run_until_complete(create_unique_index())
69 | rows = loop.run_until_complete(db.table('pg_indexes').select().where(
70 | 'indexname', uniqueIdx))
71 | loop.run_until_complete(drop_table())
72 | assert rows[0]['indexdef'].find('UNIQUE INDEX') != -1
73 | assert rows[0]['indexdef'].find('(user_id, name)') != -1
74 |
75 |
76 | def test_create_primary_key(db: DB):
77 | async def create_user_tmp():
78 | return await db.schema('TABLE users_tmp').create(
79 | 'name text not null',
80 | 'email text not null',
81 | 'PRIMARY KEY(name, email)',
82 | 'password text not null',
83 | 'registered_on timestamp not null DEFAULT NOW()',
84 | 'admin boolean not null default false',
85 | )
86 |
87 | async def drop_table():
88 | return await db.schema('TABLE users_tmp').drop()
89 |
90 | loop.run_until_complete(create_user_tmp())
91 | rows = loop.run_until_complete(db.table('pg_indexes').select().where(
92 | 'indexname', 'users_tmp_pkey'))
93 | loop.run_until_complete(drop_table())
94 | assert rows[0]['indexdef'].find('(name, email)') != -1
95 |
96 |
97 | def test_create_index_key(db: DB):
98 | indexName = 'users_tmp_name_email_idx'
99 |
100 | async def create_user_tmp():
101 | return await db.schema('TABLE users_tmp').create(
102 | 'name text not null',
103 | 'email text not null',
104 | f'CONSTRAINT {indexName} UNIQUE(name, email)',
105 | 'password text not null',
106 | 'registered_on timestamp not null DEFAULT NOW()',
107 | 'admin boolean not null default false',
108 | )
109 |
110 | async def drop_table():
111 | return await db.schema('TABLE users_tmp').drop()
112 |
113 | loop.run_until_complete(create_user_tmp())
114 | rows = loop.run_until_complete(db.table('pg_indexes').select().where(
115 | 'indexname', indexName))
116 | loop.run_until_complete(drop_table())
117 | assert rows[0]['indexdef'].find('(name, email)') != -1
118 |
119 |
120 | def test_drop_nonexists(db: DB):
121 | # create a simple table and test DROP on it
122 | loop.run_until_complete(
123 | db.schema('TABLE users_tmp').create('name text not null'))
124 | rows1 = loop.run_until_complete(db.table('information_schema.columns').select(
125 | 'column_name').where('table_schema', 'public').where('table_name', 'users_tmp'))
126 | loop.run_until_complete(db.schema('TABLE IF EXISTS users_tmp').drop())
127 | rows2 = loop.run_until_complete(db.table('information_schema.columns').select(
128 | 'column_name').where('table_schema', 'public').where('table_name', 'users_tmp'))
129 | assert len(rows1) == 1
130 | assert len(rows2) == 0
131 |
132 |
133 | def test_create_jsonb(db: DB):
134 | async def create_jsonb():
135 | return await db.schema('TABLE cards_tmp').create(
136 | 'id integer not null',
137 | 'board_id integer not null',
138 | 'data jsonb',
139 | )
140 |
141 | async def drop_table():
142 | return await db.schema('TABLE cards_tmp').drop()
143 |
144 | loop.run_until_complete(create_jsonb())
145 | rows = loop.run_until_complete(db.table('information_schema.columns').select(
146 | 'data_type').where('table_schema', 'public').where('table_name', 'cards_tmp'))
147 | loop.run_until_complete(drop_table())
148 | assert len(rows) == 3
149 | assert rows[2]['data_type'] == 'jsonb'
150 |
151 |
152 | def test_create_default_function(db: DB):
153 | async def create_user_tmp():
154 | return await db.schema('TABLE users_uuid_pkey').create(
155 | 'id uuid PRIMARY KEY DEFAULT gen_random_uuid()',
156 | )
157 |
158 | async def drop_table():
159 | return await db.schema('TABLE users_uuid_pkey').drop()
160 |
161 | loop.run_until_complete(create_user_tmp())
162 | rows = loop.run_until_complete(db.table('information_schema.columns').select(
163 | 'column_name', 'column_default', 'is_nullable', 'data_type'
164 | ).where('table_schema', 'public').where('table_name', 'users_uuid_pkey'))
165 | loop.run_until_complete(drop_table())
166 |
167 | # verify each column
168 | assert len(rows) == 1
169 | assert rows[0]['column_name'] == 'id'
170 | assert rows[0]['data_type'] == 'uuid'
171 | assert rows[0]['column_default'] == 'gen_random_uuid()'
172 |
--------------------------------------------------------------------------------
/windyquery/tests/test_delete.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 | from windyquery import DB
4 |
5 | loop = asyncio.get_event_loop()
6 |
7 |
8 | def test_delete(db: DB):
9 | rows = loop.run_until_complete(db.table('users').insert(
10 | {'email': 'test@test.com', 'password': 'test pass'}).returning())
11 | assert rows[0]['email'] == 'test@test.com'
12 | loop.run_until_complete(
13 | db.table('users').where('id', rows[0]['id']).delete())
14 | rows = loop.run_until_complete(
15 | db.table('users').select().where('id', rows[0]['id']))
16 | assert len(rows) == 0
17 |
--------------------------------------------------------------------------------
/windyquery/tests/test_group_by.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 | from windyquery import DB
4 |
5 | loop = asyncio.get_event_loop()
6 |
7 |
8 | def test_group_by(db: DB):
9 | async def create_table():
10 | return await db.schema('TABLE cards_tmp').create(
11 | 'id serial PRIMARY KEY',
12 | 'board_id integer not null',
13 | )
14 |
15 | async def drop_table():
16 | return await db.schema('TABLE cards_tmp').drop()
17 |
18 | loop.run_until_complete(create_table())
19 | loop.run_until_complete(db.table('cards_tmp').insert(
20 | {'board_id': 1},
21 | {'board_id': 1},
22 | {'board_id': 2},
23 | ))
24 | rows = loop.run_until_complete(
25 | db.table('cards_tmp').select('board_id').group_by('board_id'))
26 | loop.run_until_complete(drop_table())
27 | assert len(rows) == 2
28 | rows.sort(key=lambda x: x['board_id'])
29 | assert rows[0]['board_id'] == 1
30 | assert rows[1]['board_id'] == 2
31 |
--------------------------------------------------------------------------------
/windyquery/tests/test_insert.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import string
3 | import random
4 | import json
5 | import datetime
6 | import asyncpg
7 |
8 | from windyquery import DB
9 |
10 | loop = asyncio.get_event_loop()
11 |
12 |
13 | def test_insert_user(db: DB):
14 | async def insert_user(email1, email2):
15 | return await db.table('users').insert(
16 | {'email': email1, 'password': 'my precious'},
17 | {'email': email2, 'password': 'my precious'}
18 | )
19 |
20 | async def get_user(email):
21 | return await db.table('users').select().where('email', email)
22 |
23 | email1 = ''.join(random.choice(string.ascii_letters) for i in range(6))
24 | email2 = ''.join(random.choice(string.ascii_letters) for i in range(6))
25 | loop.run_until_complete(insert_user(email1, email2))
26 | rows1 = loop.run_until_complete(get_user(email1))
27 | rows2 = loop.run_until_complete(get_user(email2))
28 | loop.run_until_complete(db.table('users').where('email', email1).delete())
29 | loop.run_until_complete(db.table('users').where('email', email2).delete())
30 | assert rows1[0]['email'] == email1
31 | assert rows2[0]['email'] == email2
32 |
33 |
34 | def test_insert_jsonb(db: DB):
35 | async def insert_jsonb(test_id):
36 | return await db.table('cards').insert({
37 | 'id': test_id,
38 | 'board_id': random.randint(1, 100),
39 | 'data': {'name': f'I am {test_id}', 'address': {'city': 'Chicago', 'state': 'IL'}}
40 | })
41 |
42 | async def get_jsonb(test_id):
43 | return await db.table('cards').select('data->>name AS name').where('id', test_id)
44 |
45 | test_id = random.randint(10000, 90000)
46 | loop.run_until_complete(insert_jsonb(test_id))
47 | rows = loop.run_until_complete(get_jsonb(test_id))
48 | loop.run_until_complete(db.table('cards').where('id', test_id).delete())
49 | assert rows[0]['name'] == f'I am {test_id}'
50 |
51 |
52 | def test_insert1(db: DB):
53 | async def insert_fn():
54 | result = await db.table('users').insert({
55 | 'email': 'new_insert@gmail.com',
56 | 'password': 'pwdxxxxx',
57 | 'admin': None
58 | }, {
59 | 'email': 'new_insert2@gmail.com',
60 | 'password': 'pwdxxxxx2',
61 | 'admin': 'DEFAULT'
62 | }).insert({
63 | 'email': 'new_insert3@gmail.com',
64 | 'password': 'pwdxxx3',
65 | 'admin': 'DEFAULT'
66 | }).returning('id', 'email e')
67 | return result
68 | rows = loop.run_until_complete(insert_fn())
69 | assert len(rows) == 3
70 | assert rows[0]['e'] == 'new_insert@gmail.com'
71 | assert rows[1]['e'] == 'new_insert2@gmail.com'
72 | assert rows[2]['e'] == 'new_insert3@gmail.com'
73 | loop.run_until_complete(
74 | db.table('users').where('id', rows[0]['id']).delete())
75 | loop.run_until_complete(
76 | db.table('users').where('id', rows[1]['id']).delete())
77 | loop.run_until_complete(
78 | db.table('users').where('id', rows[2]['id']).delete())
79 |
80 |
81 | def test_insert2(db: DB):
82 | async def insert_fn():
83 | result = await db.table('cards').insert({
84 | 'board_id': 2,
85 | 'data': {
86 | 'address': {
87 | 'city': 'insert Chicago'
88 | }
89 | }
90 | }).returning()
91 | return result
92 | rows = loop.run_until_complete(insert_fn())
93 | assert len(rows) == 1
94 | assert rows[0]['board_id'] == 2
95 | data = json.loads(rows[0]['data'])
96 | assert data['address']['city'] == 'insert Chicago'
97 | cardId = rows[0]['id']
98 | rows = loop.run_until_complete(
99 | db.table('cards').where('id', rows[0]['id']).delete().returning())
100 | assert len(rows) == 1
101 | assert rows[0]['id'] == cardId
102 |
103 |
104 | def test_insert3(db: DB):
105 | insertId = 1000
106 | email = 'email1000@test.com'
107 |
108 | # first insert a record
109 | async def insert_fn():
110 | results = await db.table('users').insert({
111 | 'id': insertId,
112 | 'email': email,
113 | 'password': 'pwd',
114 | 'admin': None
115 | }).returning('id', 'email')
116 | return results
117 | rows = loop.run_until_complete(insert_fn())
118 | assert len(rows) == 1
119 | assert rows[0]['email'] == email
120 |
121 | # insert the same record with on conflic do nothing
122 | async def insert_fn2():
123 | results = await db.table('users').insert({
124 | 'id': insertId,
125 | 'email': f'{email} x2',
126 | 'password': 'pwd',
127 | 'admin': None
128 | }).on_conflict('(id)', 'DO NOTHING').returning('id', 'email')
129 | return results
130 | rows = loop.run_until_complete(insert_fn2())
131 | assert len(rows) == 0
132 |
133 | # insert the same record with on conflic do update
134 | async def insert_fn3():
135 | results = await db.table('users AS u').insert({
136 | 'id': insertId,
137 | 'email': f'{email} x3',
138 | 'password': 'pwd',
139 | 'admin': None
140 | }).on_conflict('ON CONSTRAINT users_pkey', "DO UPDATE SET email = EXCLUDED.email || ' (formerly ' || u.email || ')'").\
141 | returning('id', 'email')
142 | return results
143 | rows = loop.run_until_complete(insert_fn3())
144 | assert len(rows) == 1
145 | assert rows[0]['email'] == f'{email} x3 (formerly {email})'
146 |
147 | rows = loop.run_until_complete(
148 | db.table('users').where('id', insertId).delete().returning())
149 | assert len(rows) == 1
150 | assert rows[0]['id'] == insertId
151 |
152 |
153 | def test_insert_datetime(db: DB):
154 | createdAt = datetime.datetime(
155 | 2021, 3, 8, 23, 50, tzinfo=datetime.timezone.utc)
156 |
157 | async def insert_fn():
158 | results = await db.table('task_results').insert({
159 | 'task_id': 100,
160 | 'created_at': createdAt,
161 | }).returning('id')
162 | return results
163 |
164 | rows = loop.run_until_complete(insert_fn())
165 | assert len(rows) == 1
166 | insertedId = rows[0]['id']
167 | rows = loop.run_until_complete(
168 | db.table('task_results').where('id', rows[0]['id']).select())
169 | assert len(rows) == 1
170 | assert rows[0]['created_at'] == createdAt
171 | rows = loop.run_until_complete(
172 | db.table('task_results').where('id', rows[0]['id']).delete().returning())
173 | assert len(rows) == 1
174 | assert rows[0]['id'] == insertedId
175 |
176 |
177 | def test_insert_newline(db: DB):
178 | createdAt = datetime.datetime(
179 | 2021, 3, 8, 23, 50, tzinfo=datetime.timezone.utc)
180 |
181 | async def insert_fn():
182 | results = ['started', 'finished']
183 | results = await db.table('task_results').insert({
184 | 'task_id': 100,
185 | 'created_at': createdAt,
186 | 'result': '\n'.join(results)
187 | }).returning('id')
188 | return results
189 |
190 | rows = loop.run_until_complete(insert_fn())
191 | assert len(rows) == 1
192 | insertedId = rows[0]['id']
193 | rows = loop.run_until_complete(
194 | db.table('task_results').where('id', rows[0]['id']).select())
195 | assert len(rows) == 1
196 | assert '\n' in rows[0]['result']
197 | rows = loop.run_until_complete(
198 | db.table('task_results').where('id', rows[0]['id']).delete().returning())
199 | assert len(rows) == 1
200 | assert rows[0]['id'] == insertedId
201 |
202 |
203 | def test_insert_uuid(db: DB):
204 | async def insert_fn():
205 | results = await db.table('tasks_uuid_pkey').insert({
206 | 'name': 'test'
207 | }).returning('id')
208 | return results
209 |
210 | async def insert_fn2(rid):
211 | results = await db.table('tasks_uuid_pkey').insert({
212 | 'id': rid,
213 | 'name': 'test'
214 | }).returning('id')
215 | return results
216 |
217 | rows = loop.run_until_complete(insert_fn())
218 | assert len(rows) == 1
219 | insertedId = rows[0]['id']
220 | assert isinstance(insertedId, asyncpg.pgproto.pgproto.UUID)
221 |
222 | rows = loop.run_until_complete(
223 | db.table('tasks_uuid_pkey').where('id', insertedId).delete().returning())
224 | assert len(rows) == 1
225 |
226 | rows = loop.run_until_complete(insert_fn2(insertedId))
227 | assert len(rows) == 1
228 | insertedId2 = rows[0]['id']
229 | assert isinstance(insertedId2, asyncpg.pgproto.pgproto.UUID)
230 | assert str(insertedId2) == str(insertedId)
231 |
232 | rows = loop.run_until_complete(
233 | db.table('tasks_uuid_pkey').where('id', insertedId2).delete().returning())
234 | assert len(rows) == 1
235 |
--------------------------------------------------------------------------------
/windyquery/tests/test_join.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 | from windyquery import DB
4 |
5 | loop = asyncio.get_event_loop()
6 |
7 |
8 | def test_simple_join(db: DB):
9 | async def simple_join():
10 | # test join with existing data
11 | return await db.table('cards').join(
12 | 'boards', 'cards.board_id', '=', 'boards.id'
13 | ).join(
14 | 'users', 'boards.user_id', '=', 'users.id'
15 | ).select(
16 | 'users.email', 'boards.*'
17 | ).where("users.id", 1).where('users.admin', '=', True)
18 |
19 | rows = loop.run_until_complete(simple_join())
20 | row = rows[0]
21 | assert row['email'] == 'test@example.com'
22 | assert row['location'] == 'southwest'
23 | assert row['id'] == 1
24 | assert row['user_id'] == 1
25 |
26 |
27 | def test_join1(db: DB):
28 | async def join_fn():
29 | result = await db.table('cards').join(
30 | 'boards', 'cards.board_id', '=', 'boards.id'
31 | ).join(
32 | 'users', 'boards.user_id = ?', 1
33 | ).select(
34 | 'users.email', 'boards.*'
35 | ).where("users.id", 1).where('users.admin', '=', True)
36 | return result
37 | rows = loop.run_until_complete(join_fn())
38 | assert len(rows) > 0
39 | row = rows[0]
40 | assert row['email'] == 'test@example.com'
41 | assert row['location'] == 'southwest'
42 |
--------------------------------------------------------------------------------
/windyquery/tests/test_listener.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 | from windyquery import DB
4 |
5 | loop = asyncio.get_event_loop()
6 |
7 |
8 | def test_listen_insert(db: DB):
9 |
10 | async def cards_after_insert():
11 | test_id = 8000
12 | listener = db.listen('cards')
13 | await listener.start()
14 | await db.table('cards').insert({
15 | 'id': test_id,
16 | 'board_id': test_id * 10,
17 | 'data': {'name': 'test'}
18 | })
19 | result = await listener.next()
20 | await db.table('cards').where('id', test_id).delete()
21 | await listener.stop()
22 | return result
23 |
24 | result = loop.run_until_complete(cards_after_insert())
25 | assert result['channel'] == 'cards'
26 | assert result['payload'] == 'after insert'
27 | assert result['listener_pid'] > 0
28 | assert result['notifier_pid'] > 0
29 |
30 |
31 | def test_listen_insert_with_stmt(db: DB):
32 |
33 | async def cards_after_insert():
34 | test_id = 8000
35 | result = None
36 | async with db.listen('cards') as listener:
37 | await db.table('cards').insert({
38 | 'id': test_id,
39 | 'board_id': test_id * 10,
40 | 'data': {'name': 'test'}
41 | })
42 | result = await listener
43 | await db.table('cards').where('id', test_id).delete()
44 | return result
45 |
46 | result = loop.run_until_complete(cards_after_insert())
47 | assert result['channel'] == 'cards'
48 | assert result['payload'] == 'after insert'
49 | assert result['listener_pid'] > 0
50 | assert result['notifier_pid'] > 0
51 |
--------------------------------------------------------------------------------
/windyquery/tests/test_migrations.py:
--------------------------------------------------------------------------------
1 | import os
2 | import glob
3 | import asyncio
4 | import time
5 |
6 | from windyquery import DB
7 | from .conftest import Config
8 | from windyquery.scripts import Scripts
9 |
10 | loop = asyncio.get_event_loop()
11 | s = Scripts()
12 |
13 |
14 | def test_make_migration():
15 | s.make_migration('create_my_table', migrations_dir='test_tmp/migrations')
16 | files = glob.glob(os.path.join(
17 | 'test_tmp', 'migrations', '20[1-9][0-9][0-1][0-9][0-3][0-9][0-2][0-9][0-5][0-9][0-5][0-9]_*.py'))
18 | assert len(files) == 1
19 | assert 'test_tmp' in files[0]
20 | assert 'migrations' in files[0]
21 | assert 'create_my_table' in files[0]
22 | # cleanup
23 | os.remove(files[0])
24 | os.rmdir(os.path.join('test_tmp', 'migrations'))
25 | os.rmdir('test_tmp')
26 |
27 |
28 | def test_migrate(db: DB, config: Config):
29 | s.make_migration('create_test_table', template='test_create table',
30 | migrations_dir='test_tmp/migrations')
31 | files = glob.glob(os.path.join(
32 | 'test_tmp', 'migrations', '20[1-9][0-9][0-1][0-9][0-3][0-9][0-2][0-9][0-5][0-9][0-5][0-9]_*.py'))
33 | assert len(files) == 1
34 | s.migrate(config.DB_HOST, config.DB_PORT, config.DB_TEST, config.DB_USER, config.DB_PASS,
35 | migrations_dir='test_tmp/migrations', migrations_table='test_tmp_migrations', loop=loop)
36 | # wait at least 1 sec for a different migration timestamp
37 | time.sleep(1)
38 | s.make_migration('drop_test_table', template='test_drop table',
39 | migrations_dir='test_tmp/migrations')
40 | s.migrate(config.DB_HOST, config.DB_PORT, config.DB_TEST, config.DB_USER, config.DB_PASS,
41 | migrations_dir='test_tmp/migrations', migrations_table='test_tmp_migrations', loop=loop)
42 | files = glob.glob(os.path.join(
43 | 'test_tmp', 'migrations', '20[1-9][0-9][0-1][0-9][0-3][0-9][0-2][0-9][0-5][0-9][0-5][0-9]_*.py'))
44 | assert len(files) == 2
45 | # cleanup
46 | os.remove(files[0])
47 | os.remove(files[1])
48 | os.rmdir(os.path.join('test_tmp', 'migrations'))
49 | os.rmdir('test_tmp')
50 |
51 | # drop the test migrations table
52 | async def drop_table():
53 | await db.schema('TABLE test_tmp_migrations').drop()
54 | loop.run_until_complete(drop_table())
55 |
--------------------------------------------------------------------------------
/windyquery/tests/test_raw.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import random
3 | import string
4 |
5 | from windyquery import DB
6 |
7 | loop = asyncio.get_event_loop()
8 |
9 |
10 | def test_raw_select(db: DB):
11 | async def raw_select():
12 | return await db.raw('SELECT * FROM cards WHERE board_id = $1', 5)
13 | rows = loop.run_until_complete(raw_select())
14 | assert rows[0]['id'] == 7
15 |
16 |
17 | def test_select_raw(db: DB):
18 | async def select_raw():
19 | return await db.raw('SELECT ROUND(AVG(board_id),1) AS avg_id, COUNT(1) AS copies FROM cards WHERE id in ($1, $2, $3)', 4, 5, 6)
20 | rows = loop.run_until_complete(select_raw())
21 | from decimal import Decimal
22 | row = rows[0]
23 | assert row['avg_id'] == Decimal('4.7')
24 | assert row['copies'] == 3
25 |
26 |
27 | def test_insert_raw(db: DB):
28 | user_id = 29998
29 | location = ''.join(random.choice(string.ascii_letters) for i in range(6))
30 |
31 | # test inserRaw with a rather complex query
32 | sql = 'INSERT INTO boards ("user_id", "location") SELECT $1, $2 WHERE NOT EXISTS (SELECT "user_id" FROM boards WHERE "user_id" = $1)'
33 |
34 | loop.run_until_complete(db.raw(sql, user_id, location))
35 |
36 | # insert it again that has no new row gets inserted
37 | loop.run_until_complete(db.raw(sql, user_id, location))
38 |
39 | # verify that only 1 row was inserted
40 | rows = loop.run_until_complete(
41 | db.table('boards').select().where('user_id', user_id))
42 | loop.run_until_complete(
43 | db.table('boards').where('user_id', user_id).delete())
44 | assert len(rows) == 1
45 | assert rows[0]['user_id'] == user_id
46 | assert rows[0]['location'] == location
47 |
48 |
49 | def test_raw_multi_queries(db: DB):
50 | sql = 'SELECT 1 as one; SELECT 2 as two; SELECT 3 as three'
51 |
52 | try:
53 | loop.run_until_complete(db.raw(sql))
54 | except Exception as exc:
55 | assert False, f"'test_raw_multi_queries' raised an exception {exc}"
56 |
--------------------------------------------------------------------------------
/windyquery/tests/test_select.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 | from windyquery import DB
4 |
5 | loop = asyncio.get_event_loop()
6 |
7 |
8 | def test_select_toSql(db: DB):
9 | sql, _ = db.table('test').select().toSql()
10 | assert sql == 'SELECT * FROM "test"'
11 |
12 |
13 | def test_simple_select(db: DB):
14 | rows = loop.run_until_complete(db.table('test').select())
15 | assert rows[0]['name'] == 'test'
16 |
17 |
18 | def test_select_with_alias(db: DB):
19 | async def select_with_alias():
20 | return await db.table('test').select('test.id AS name1', 'test.name')
21 | rows = loop.run_until_complete(select_with_alias())
22 | row = rows[0]
23 | assert row['name1'] == 1
24 | assert row['name'] == 'test'
25 |
26 |
27 | def test_select_with_jsonb(db: DB):
28 | async def select_with_jsonb():
29 | return await db.table('cards').select('data->name AS name', 'data->>name AS name_text', 'data->tags AS tags', 'data->finished').where('id', 1)
30 | rows = loop.run_until_complete(select_with_jsonb())
31 | row = rows[0]
32 | assert row['name'] == '"Wash dishes"'
33 | assert row['name_text'] == 'Wash dishes'
34 | assert row['tags'] == '["Clean", "Kitchen"]'
35 | assert row['?column?'] == 'false'
36 |
37 |
38 | def test_select_nested_jsonb(db: DB):
39 | async def select_nested_jsonb():
40 | return await db.table('cards').select('data->address->>city AS city').where('id', 6)
41 | rows = loop.run_until_complete(select_nested_jsonb())
42 | assert rows[0]['city'] == 'Chicago'
43 |
44 |
45 | def test_select1(db: DB):
46 | async def select_fn():
47 | result = await db.table('cards').\
48 | select('*').\
49 | where('1 = 1').\
50 | order_by('id DESC').\
51 | order_by('board_id ASC', 'cards.data->address->city').\
52 | group_by('cards.id', 'board_id', 'cards.data')
53 | return result
54 | rows = loop.run_until_complete(select_fn())
55 | assert len(rows) > 2
56 | assert int(rows[0]['id']) > int(rows[1]['id'])
57 |
58 |
59 | def test_select2(db: DB):
60 | async def select_fn():
61 | result = await db.table('cards').\
62 | limit(100).\
63 | select('cards.data->address->>city as b', ' cards.data->address->city c').\
64 | where('id = ? AND 1 = ?', 6, 1).\
65 | where('data->address->city', 'NOT IN', ['Chicago1', 'Denvor']).\
66 | where('data->address->city', 'IS NOT', None).\
67 | where('data->address->>city', 'LIKE', 'C%').\
68 | where('data->address->>city', 'ilike', 'c%').\
69 | where('(1 = 1) and (2 > 3 or 3 > 2)').\
70 | where('1', '=', 1).\
71 | where('2', '!=', 3).\
72 | where('2', '<>', 3).\
73 | where('1', '<', 2).\
74 | where('1', '<=', 2).\
75 | where('2', '>', 1).\
76 | where('2', '>=', 1).\
77 | offset(0)
78 | return result
79 | rows = loop.run_until_complete(select_fn())
80 | assert len(rows) > 0
81 | assert rows[0]['b'] == 'Chicago'
82 | assert rows[0]['c'] == '"Chicago"'
83 |
84 |
85 | def test_select3(db: DB):
86 | async def select_fn():
87 | result = await db.table('cards').select('data->tags->1 as b').where('id = ? AND 1 = ?', 4, 1)
88 | return result[0]
89 |
90 | async def select_fn1():
91 | result = await db.table('cards').select('data->tags->-1 as b').where('id = ? AND 1 = ?', 4, 1)
92 | return result[0]
93 | row = loop.run_until_complete(select_fn())
94 | assert row['b'] == '"Office"'
95 | row = loop.run_until_complete(select_fn1())
96 | assert row['b'] == '"Office"'
97 |
--------------------------------------------------------------------------------
/windyquery/tests/test_select_limit_order_by.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 | from windyquery import DB
4 |
5 | loop = asyncio.get_event_loop()
6 |
7 |
8 | def test_order_by(db: DB):
9 | async def order_by():
10 | return await db.table('users').select().order_by('id ASC', 'email ASC', 'password DESC')
11 | rows = loop.run_until_complete(order_by())
12 | row = rows[0]
13 | assert row['email'] == 'test@example.com'
14 | assert row['id'] == 1
15 |
16 |
17 | def test_order_by_with_table(db: DB):
18 | async def order_by_with_table():
19 | return await db.table('users').select().order_by('users.id ASC', 'users.email ASC', 'password DESC')
20 | rows = loop.run_until_complete(order_by_with_table())
21 | row = rows[0]
22 | assert row['email'] == 'test@example.com'
23 | assert row['id'] == 1
24 |
25 |
26 | def test_order_by_with_jsonb(db: DB):
27 | async def order_by_with_jsonb():
28 | return await db.table('cards').select('data->>name AS name').order_by('cards.data->name', 'id DESC')
29 | rows = loop.run_until_complete(order_by_with_jsonb())
30 | row = rows[0]
31 | assert row['name'] == 'Cook lunch'
32 |
33 |
34 | def test_limit(db: DB):
35 | rows = loop.run_until_complete(db.table('cards').select().limit(3))
36 | assert len(rows) == 3
37 |
--------------------------------------------------------------------------------
/windyquery/tests/test_where.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import asyncpg
3 |
4 | from windyquery import DB
5 |
6 | loop = asyncio.get_event_loop()
7 |
8 |
9 | def test_single_where(db: DB):
10 | rows = loop.run_until_complete(
11 | db.table('users').select().where('email', 'test@example.com'))
12 | assert rows[0]['email'] == 'test@example.com'
13 |
14 |
15 | def test_josnb_where(db: DB):
16 | async def jsonb_where():
17 | return await db.table('cards').select('id', 'data->tags').where("data->name", 'Cook lunch')
18 | rows = loop.run_until_complete(jsonb_where())
19 | row = rows[0]
20 | assert row['id'] == 2
21 | assert row['?column?'] == '["Cook", "Kitchen", "Tacos"]'
22 |
23 |
24 | def test_josnb_text_where(db: DB):
25 | async def jsonb_where():
26 | return await db.table('cards').select('id', 'data->>tags').where("data->name", 'Cook lunch')
27 | rows = loop.run_until_complete(jsonb_where())
28 | row = rows[0]
29 | assert row['id'] == 2
30 | assert row['?column?'] == '["Cook", "Kitchen", "Tacos"]'
31 |
32 |
33 | def test_multi_where(db: DB):
34 | async def jsonb_where():
35 | return await db.table('cards').select('id', 'data->>tags').where("data->>name", 'Cook lunch').where('board_id', '=', 7)
36 | rows = loop.run_until_complete(jsonb_where())
37 | assert rows[0]['?column?'] == '["Cook", "Kitchen", "Tacos"]'
38 |
39 |
40 | def test_where_in(db: DB):
41 | rows = loop.run_until_complete(
42 | db.table('cards').select().where("id", 'IN', [5, 3]))
43 | assert len(rows) == 2
44 |
45 |
46 | def test_where_in_implicit(db: DB):
47 | rows = loop.run_until_complete(
48 | db.table('cards').select().where("id", [5, 3]))
49 | assert len(rows) == 2
50 |
51 |
52 | def test_where_in_by_params(db: DB):
53 | rows = loop.run_until_complete(
54 | db.table('cards').select().where("id IN (?, ?)", 5, 3))
55 | assert len(rows) == 2
56 |
57 |
58 | def test_where_with_uuid(db: DB):
59 | async def insert_fn():
60 | results = await db.table('tasks_uuid_pkey').insert({
61 | 'name': 'test'
62 | }).returning('id')
63 | return results
64 |
65 | rows = loop.run_until_complete(insert_fn())
66 | assert len(rows) == 1
67 | insertedId1 = rows[0]['id']
68 | assert isinstance(insertedId1, asyncpg.pgproto.pgproto.UUID)
69 |
70 | rows = loop.run_until_complete(insert_fn())
71 | assert len(rows) == 1
72 | insertedId2 = rows[0]['id']
73 | assert isinstance(insertedId2, asyncpg.pgproto.pgproto.UUID)
74 |
75 | rows = loop.run_until_complete(
76 | db.table('tasks_uuid_pkey').select().where("id", insertedId1))
77 | assert len(rows) == 1
78 |
79 | rows = loop.run_until_complete(
80 | db.table('tasks_uuid_pkey').select().where("id = ? OR id = ?", insertedId1, insertedId2))
81 | assert len(rows) == 2
82 |
83 | rows = loop.run_until_complete(
84 | db.table('tasks_uuid_pkey').where('id', insertedId1).delete().returning())
85 | assert len(rows) == 1
86 |
87 | rows = loop.run_until_complete(
88 | db.table('tasks_uuid_pkey').where('id', insertedId2).delete().returning())
89 | assert len(rows) == 1
90 |
--------------------------------------------------------------------------------
/windyquery/tests/test_with_values.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | from dateutil.parser import parse
3 | import datetime
4 |
5 | import asyncpg
6 |
7 | from windyquery import DB
8 |
9 | loop = asyncio.get_event_loop()
10 |
11 |
12 | def test_with_values_select(db: DB):
13 | async def test_fn():
14 | result = await db.with_values('my_values', {
15 | 'text_col': 'Tom',
16 | 'bool_col': True,
17 | 'num_col': 2,
18 | 'dict_col': {'id': 1},
19 | 'datetime_col': parse('20210720T100000Z'),
20 | 'null_col': 'null',
21 | 'null_col2': None
22 | }).table('my_values').select()
23 | return result
24 | rows = loop.run_until_complete(test_fn())
25 | assert len(rows) == 1
26 | assert rows[0]['text_col'] == 'Tom'
27 | assert rows[0]['bool_col'] == True
28 | assert rows[0]['num_col'] == 2
29 | assert rows[0]['dict_col'] == '{"id": 1}'
30 | assert rows[0]['datetime_col'] == datetime.datetime(
31 | 2021, 7, 20, 10, 0, 0, 0, tzinfo=datetime.timezone.utc)
32 | assert rows[0]['null_col'] is None
33 | assert rows[0]['null_col2'] is None
34 |
35 |
36 | def test_with_values_join(db: DB):
37 | async def test_fn():
38 | result = await db.with_values('workers', {
39 | 'task_id': 1,
40 | 'name': 'Tom'
41 | }, {
42 | 'task_id': 2,
43 | 'name': 'Jerry'
44 | }).table('workers').select('workers.name AS worker_name', 'tasks.name AS task_name').\
45 | join('tasks', 'workers.task_id = tasks.id').order_by('tasks.id')
46 | return result
47 | rows = loop.run_until_complete(test_fn())
48 | assert len(rows) == 2
49 | assert rows[0]['worker_name'] == 'Tom'
50 | assert rows[1]['worker_name'] == 'Jerry'
51 |
52 |
53 | def test_with_values_multi_with(db: DB):
54 | async def test_fn():
55 | result = await db.with_values('workers1', {
56 | 'task_id': 1,
57 | 'name': 'Tom'
58 | }, {
59 | 'task_id': 2,
60 | 'name': 'Jerry'
61 | }).with_values('workers2', {
62 | 'task_id': 1,
63 | 'name': 'Topsy'
64 | }, {
65 | 'task_id': 2,
66 | 'name': 'Nibbles'
67 | }).table('tasks').select(
68 | 'workers1.name AS primary_worker_name',
69 | 'workers2.name AS secondary_worker_name',
70 | 'tasks.name AS task_name'
71 | ).join('workers1', 'workers1.task_id = tasks.id').\
72 | join('workers2', 'workers2.task_id = tasks.id').\
73 | order_by('tasks.id')
74 | return result
75 | rows = loop.run_until_complete(test_fn())
76 | assert len(rows) == 2
77 | assert rows[0]['primary_worker_name'] == 'Tom'
78 | assert rows[0]['secondary_worker_name'] == 'Topsy'
79 | assert rows[1]['primary_worker_name'] == 'Jerry'
80 | assert rows[1]['secondary_worker_name'] == 'Nibbles'
81 |
82 |
83 | def test_with_values_update(db: DB):
84 | async def test_fn():
85 | result = await db.with_values('workers', {
86 | 'task_id': 3,
87 | 'name': 'Tom'
88 | }, {
89 | 'task_id': 4,
90 | 'name': 'Jerry'
91 | }).table('tasks').\
92 | update("name = tasks.name || ' (worked by ' || workers.name || ')'").\
93 | from_table('workers').\
94 | where('workers.task_id = tasks.id').\
95 | returning('workers.name AS worker_name', 'tasks.name AS task_name')
96 | return result
97 | rows = loop.run_until_complete(test_fn())
98 | assert len(rows) == 2
99 | assert rows[0]['worker_name'] == 'Tom'
100 | assert rows[0]['task_name'] == 'Tom task (worked by Tom)'
101 | assert rows[1]['worker_name'] == 'Jerry'
102 | assert rows[1]['task_name'] == 'Jerry task (worked by Jerry)'
103 |
104 | async def test_fn2():
105 | result = await db.with_values('workers', {
106 | 'task_id': 3,
107 | 'name': 'Tom'
108 | }, {
109 | 'task_id': 4,
110 | 'name': 'Jerry'
111 | }).table('tasks').\
112 | update("name = workers.name || ' task'").\
113 | from_table('workers').\
114 | where('workers.task_id = tasks.id').\
115 | returning('workers.name AS worker_name', 'tasks.name AS task_name')
116 | return result
117 | rows = loop.run_until_complete(test_fn2())
118 | assert len(rows) == 2
119 | assert rows[0]['worker_name'] == 'Tom'
120 | assert rows[0]['task_name'] == 'Tom task'
121 | assert rows[1]['worker_name'] == 'Jerry'
122 | assert rows[1]['task_name'] == 'Jerry task'
123 |
124 |
125 | def test_with_values_raw(db: DB):
126 | async def test_fn():
127 | result = await db.with_values('workers', {
128 | 'task_id': 1,
129 | 'name': 'Tom'
130 | }, {
131 | 'task_id': 2,
132 | 'name': 'Jerry'
133 | }).raw("""
134 | SELECT * FROM tasks
135 | WHERE EXISTS(
136 | SELECT 1 FROM workers
137 | JOIN task_results ON workers.task_id = task_results.task_id
138 | where workers.task_id = tasks.id
139 | )
140 | """)
141 | return result
142 | rows = loop.run_until_complete(test_fn())
143 | assert len(rows) == 2
144 | assert rows[0]['name'] == 'tax return'
145 | assert rows[1]['name'] == 'pick up kids'
146 |
147 |
148 | def test_with_values_rrule(db: DB):
149 | async def test_fn():
150 | rruleStr = "DTSTART:20210715T100000Z\nRRULE:FREQ=DAILY;COUNT=5"
151 | exruleStr = "DTSTART:20210715T100000Z\nRRULE:FREQ=DAILY;BYWEEKDAY=SA,SU"
152 | result = await db.rrule('my_rrules', {
153 | 'rrule': rruleStr,
154 | 'exrule': exruleStr,
155 | 'rrule_after': ['20210716T100000Z'],
156 | 'task_id': 1
157 | }).with_values('workers', {
158 | 'task_id': 1,
159 | 'name': 'Tom'
160 | }, {
161 | 'task_id': 2,
162 | 'name': 'Jerry'
163 | }).table('tasks').\
164 | join('workers', 'workers.task_id = tasks.id').\
165 | join('my_rrules', 'my_rrules.task_id = tasks.id').\
166 | select('workers.name AS worker_name',
167 | 'rrule AS worked_at', 'tasks.name AS task_name')
168 | return result
169 | rows = loop.run_until_complete(test_fn())
170 | assert len(rows) == 1
171 | assert rows[0]['worker_name'] == 'Tom'
172 | assert rows[0]['worked_at'] == datetime.datetime(
173 | 2021, 7, 19, 10, 0, tzinfo=datetime.timezone.utc)
174 | assert rows[0]['task_name'] == 'tax return'
175 |
176 |
177 | def test_with_values_rrule_using_raw(db: DB):
178 | async def test_fn():
179 | rruleStr = "DTSTART:20210715T100000Z\nRRULE:FREQ=DAILY;COUNT=5"
180 | exruleStr = "DTSTART:20210715T100000Z\nRRULE:FREQ=DAILY;BYWEEKDAY=SA,SU"
181 | result = await db.rrule('my_rrules', {
182 | 'rrule': rruleStr,
183 | 'exrule': exruleStr,
184 | 'rrule_after': ['20210716T100000Z'],
185 | 'task_id': 1
186 | }).with_values('workers', {
187 | 'task_id': 1,
188 | 'name': 'Tom'
189 | }, {
190 | 'task_id': 2,
191 | 'name': 'Jerry'
192 | }).raw("""
193 | SELECT workers.name AS worker_name,
194 | rrule AS worked_at,
195 | tasks.name AS task_name
196 | FROM tasks
197 | JOIN workers ON workers.task_id = tasks.id
198 | JOIN my_rrules ON my_rrules.task_id = tasks.id
199 | """)
200 | return result
201 | rows = loop.run_until_complete(test_fn())
202 | assert len(rows) == 1
203 | assert rows[0]['worker_name'] == 'Tom'
204 | assert rows[0]['worked_at'] == datetime.datetime(
205 | 2021, 7, 19, 10, 0, tzinfo=datetime.timezone.utc)
206 | assert rows[0]['task_name'] == 'tax return'
207 |
208 |
209 | def test_with_values_uuid(db: DB):
210 | async def insert_fn():
211 | results = await db.table('tasks_uuid_pkey').insert({
212 | 'name': 'test123'
213 | }).returning('id')
214 | results = await db.with_values('uuid_vals', *results).\
215 | table('tasks_uuid_pkey').\
216 | select('tasks_uuid_pkey.name', 'tasks_uuid_pkey.id').\
217 | join('uuid_vals', 'uuid_vals.id = tasks_uuid_pkey.id')
218 | return results
219 |
220 | rows = loop.run_until_complete(insert_fn())
221 | assert len(rows) == 1
222 | insertedId = rows[0]['id']
223 | assert isinstance(insertedId, asyncpg.pgproto.pgproto.UUID)
224 |
225 | rows = loop.run_until_complete(
226 | db.table('tasks_uuid_pkey').where('id', insertedId).delete().returning())
227 | assert len(rows) == 1
228 |
--------------------------------------------------------------------------------
/windyquery/utils.py:
--------------------------------------------------------------------------------
1 | import json
2 | import datetime
3 | from typing import Any, Tuple
4 | import asyncpg
5 |
6 |
7 | # add quotes to identifier
8 | def quote_identifier(name: str) -> str:
9 | name = name.replace('"', '""')
10 | return '"' + name + '"'
11 |
12 |
13 | # add quotes to literal
14 | def quote_literal(name: str) -> str:
15 | name = name.replace("'", "''")
16 | if '\n' in name:
17 | name = name.replace('\n', '\\n')
18 | if '\t' in name:
19 | name = name.replace('\t', '\\t')
20 | if '\v' in name:
21 | name = name.replace('\v', '\\v')
22 | name = "'" + name + "'"
23 | return name
24 |
25 |
26 | # prefix E if it contains escape chars
27 | def prefix_E(name: str) -> str:
28 | if name.startswith("'") and '\\' in name:
29 | name = " E" + name
30 | return name
31 |
32 |
33 | # remove quotes from literal
34 | def unquote_literal(name: str) -> str:
35 | name = name.replace("''", "'")
36 | if name.startswith("'") and name.endswith("'"):
37 | name = name[1:-1]
38 | return name
39 |
40 |
41 | def process_value(val: Any) -> Tuple[str, Any]:
42 | param = None
43 | if val is None or val == 'NULL' or val == 'null':
44 | val = 'NULL'
45 | elif val == 'DEFAULT':
46 | val = 'DEFAULT'
47 | elif isinstance(val, dict) or isinstance(val, list):
48 | param = json.dumps(val)
49 | val = '?'
50 | elif isinstance(val, datetime.datetime) or isinstance(val, datetime.date):
51 | param = val
52 | val = '?'
53 | elif isinstance(val, bool):
54 | if val:
55 | val = 'TRUE'
56 | else:
57 | val = 'FALSE'
58 | elif isinstance(val, asyncpg.pgproto.pgproto.UUID):
59 | val = quote_literal(str(val))
60 | elif isinstance(val, str):
61 | val = quote_literal(val)
62 | return val, param
63 |
--------------------------------------------------------------------------------
/windyquery/validator/__init__.py:
--------------------------------------------------------------------------------
1 | from typing import List, Any
2 | from dateutil.tz import UTC
3 | from dateutil import rrule
4 | import datetime
5 |
6 | import asyncpg
7 |
8 | from windyquery.ctx import Ctx
9 | from windyquery.utils import process_value, quote_literal
10 |
11 | from .field import Field
12 | from .table import Table
13 | from .select import Select
14 | from .where import Where
15 | from .limit import Limit
16 | from .offset import Offset
17 | from .order_by import OrderBy
18 | from .join import Join
19 | from .update import Update
20 | from .value_list import ValueList
21 | from .schema import Schema
22 | from .create import Create
23 | from .alter import Alter
24 | from .conflict_target import ConflictTarget
25 | from .conflict_action import ConflictAction
26 |
27 | from ._base import ValidationError
28 |
29 |
30 | _field = Field()
31 | _table = Table()
32 | _select = Select()
33 | _where = Where()
34 | _limit = Limit()
35 | _offset = Offset()
36 | _order_by = OrderBy()
37 | _join = Join()
38 | _update = Update()
39 | _value_list = ValueList()
40 | _schema = Schema()
41 | _create = Create()
42 | _alter = Alter()
43 | _conflict_target = ConflictTarget()
44 | _conflict_action = ConflictAction()
45 |
46 |
47 | class Validator:
48 | """validate the input"""
49 |
50 | def validate_tablename(self, s: str) -> str:
51 | return _table.parse(s, Ctx())
52 |
53 | def validate_select(self, s: str) -> str:
54 | return _select.parse(s, Ctx())
55 |
56 | def validate_order_by(self, s: str) -> str:
57 | return _order_by.parse(s, Ctx())
58 |
59 | def validate_group_by(self, s: str) -> str:
60 | return _field.parse(s, Ctx())
61 |
62 | def validate_limit(self, s: str, ctx: Ctx) -> str:
63 | return _limit.parse(s, ctx)
64 |
65 | def validate_offset(self, s: str, ctx: Ctx) -> str:
66 | return _offset.parse(s, ctx)
67 |
68 | def validate_where(self, s: str, ctx: Ctx) -> str:
69 | return _where.parse(s, ctx)
70 |
71 | def validate_join(self, tbl, s: str, ctx: Ctx) -> str:
72 | tbl = _field.sanitize_identifier(tbl)
73 | return f'JOIN {tbl} ON ' + _join.parse(s, ctx)
74 |
75 | def validate_update(self, s: str, ctx: Ctx) -> str:
76 | return _update.parse(s, ctx)
77 |
78 | def validate_insert_columns(self, columns: List[str]) -> str:
79 | cols = [_field.sanitize_identifier(col) for col in columns]
80 | return '(' + ', '.join(cols) + ')'
81 |
82 | def validate_insert_values(self, values: List[Any], ctx: Ctx) -> str:
83 | results = []
84 | for val in values:
85 | val, p = process_value(val)
86 | if p is not None:
87 | ctx.args.append(p)
88 | results.append(str(val))
89 | return '(' + _value_list.parse(','.join(results), ctx) + ')'
90 |
91 | def validate_identifier(self, s: str) -> str:
92 | return _field.sanitize_identifier(s)
93 |
94 | def validate_schema(self, s: str) -> str:
95 | return _schema.parse(s, Ctx())
96 |
97 | def validate_create(self, s: str) -> str:
98 | return _create.parse(s, Ctx())
99 |
100 | def validate_alter(self, s: str) -> str:
101 | return _alter.parse(s, Ctx())
102 |
103 | def validate_conflict_target(self, s: str) -> str:
104 | return _conflict_target.parse(s, Ctx())
105 |
106 | def validate_conflict_action(self, s: str, ctx: Ctx) -> str:
107 | return _conflict_action.parse(s, ctx)
108 |
109 | def validate_rrule_columns(self, columns: List[str]) -> str:
110 | cols = [_field.sanitize_identifier(col) for col in columns]
111 | # the 1st is rrule timestamp; refer to validate_rrule_value
112 | cols.insert(0, 'rrule')
113 | return '(' + ', '.join(cols) + ')'
114 |
115 | def validate_rrule_values(self, ctx: Ctx, values: List[Any], rrulesetVal: rrule.rruleset, occurrences: slice, afterVal: tuple, beforeVal: tuple, betweenVal: tuple) -> str:
116 | tms = []
117 | # try rrule_after, rrule_before, and rrule_between
118 | if afterVal is not None or beforeVal is not None or betweenVal is not None:
119 | if afterVal is not None:
120 | tm = rrulesetVal.after(*afterVal)
121 | if tm:
122 | tms.append(tm)
123 | if len(tms) == 0 and beforeVal is not None:
124 | tm = rrulesetVal.before(*beforeVal)
125 | if tm:
126 | tms.append(tm)
127 | if len(tms) == 0 and betweenVal is not None:
128 | tms = rrulesetVal.between(*betweenVal)
129 | else:
130 | tms = rrulesetVal
131 |
132 | # set a limit in case the rrule is unbound
133 | results = []
134 | row = [None] # slot for rrule timestamp
135 | for val in values:
136 | row.append(val)
137 | for tm in tms[occurrences]:
138 | row[0] = tm
139 | result = '(' + self.validate_value_list(ctx, row) + ')'
140 | results.append(result)
141 | return ', '.join(results)
142 |
143 | def validate_value_list(self, ctx: Ctx, values: List[Any]) -> str:
144 | replaces = {}
145 | transformedValues = []
146 | args = []
147 | for val in values:
148 | if isinstance(val, datetime.datetime) or isinstance(val, datetime.date):
149 | val, _ = process_value(str(val.astimezone(UTC)))
150 | replaces[val] = f'{val}::timestamptz'
151 | elif isinstance(val, asyncpg.pgproto.pgproto.UUID):
152 | val = quote_literal(str(val))
153 | replaces[val] = f'{val}::uuid'
154 | else:
155 | val, p = process_value(val)
156 | if p is not None:
157 | args.append(p)
158 | val = str(val)
159 | transformedValues.append(val)
160 |
161 | nestedCtx = Ctx(ctx.param_offset + len(ctx.args), args)
162 | result = _value_list.parse(','.join(transformedValues), nestedCtx)
163 | for rep in replaces:
164 | result = result.replace(rep, replaces[rep])
165 | ctx.args.extend(args)
166 | return result
167 |
168 | def validate_with_columns(self, columns: List[str]) -> str:
169 | cols = [_field.sanitize_identifier(col) for col in columns]
170 | return '(' + ', '.join(cols) + ')'
171 |
172 | def validate_with_values(self, ctx: Ctx, values: List[Any]) -> str:
173 | result = '(' + self.validate_value_list(ctx, values) + ')'
174 | return result
175 |
--------------------------------------------------------------------------------
/windyquery/validator/_base.py:
--------------------------------------------------------------------------------
1 | import ply.yacc as yacc
2 | import ply.lex as lex
3 | from asyncpg import utils
4 |
5 | from windyquery.ctx import Ctx
6 | from windyquery.provider import Provider
7 |
8 |
9 | def _rule(r):
10 | def decorate(func):
11 | func.__doc__ = r
12 | return func
13 | return decorate
14 |
15 |
16 | class ValidationError(Exception):
17 | pass
18 |
19 |
20 | class Base:
21 | provider: Provider
22 | reserved = {}
23 | tokens = ()
24 |
25 | # Ignored characters
26 | t_ignore = " \t\n"
27 |
28 | def t_error(self, t):
29 | raise ValidationError(f"Illegal character {t.value[0]!r}")
30 |
31 | # rules
32 | _start = ''
33 |
34 | def p_error(self, p):
35 | if p:
36 | val = p.value
37 | else:
38 | val = 'Unknown'
39 | raise ValidationError(f"error at {val!r}")
40 |
41 | def __init__(self):
42 | self._lexer = lex.lex(module=self, optimize=1)
43 | self._parser = yacc.yacc(module=self, start=self._start, debug=False)
44 |
45 | def parse(self, s: str, ctx: Ctx):
46 | self.provider = Provider(ctx)
47 | l = self._lexer.clone()
48 | l.input(s)
49 | root = self._parser.parse(lexer=l)
50 | self.provider.process()
51 | sql = str(root)
52 | ctx.param_offset += len(ctx.args)
53 | return sql
54 |
55 | def sanitize_identifier(self, item):
56 | # do not escape information_schema as identifier
57 | if item == 'information_schema':
58 | return item
59 | elif item == 'EXCLUDED':
60 | return item
61 | else:
62 | return utils._quote_ident(item)
63 |
64 | def sanitize_literal(self, item):
65 | return utils._quote_literal(item)
66 |
--------------------------------------------------------------------------------
/windyquery/validator/alias.py:
--------------------------------------------------------------------------------
1 | class Alias:
2 | reserved = {'AS': 'AS'}
3 | tokens = ('AS',)
4 |
5 | # Tokens
6 | t_AS = r'AS'
7 |
--------------------------------------------------------------------------------
/windyquery/validator/alter.py:
--------------------------------------------------------------------------------
1 | from ._base import _rule
2 | from .constraint import Constraint
3 |
4 |
5 | class Alter(Constraint):
6 | reserved = {
7 | **Constraint.reserved,
8 | 'RENAME': 'RENAME',
9 | 'COLUMN': 'COLUMN',
10 | 'SCHEMA': 'SCHEMA',
11 | 'ADD': 'ADD',
12 | 'DROP': 'DROP',
13 | 'ALTER': 'ALTER',
14 | 'IF': 'IF',
15 | 'EXISTS': 'EXISTS',
16 | 'TYPE': 'TYPE',
17 | 'DATA': 'DATA',
18 | 'RESTART': 'RESTART',
19 | 'STATISTICS': 'STATISTICS',
20 | 'STORAGE': 'STORAGE',
21 | 'PLAIN': 'PLAIN',
22 | 'EXTERNAL': 'EXTERNAL',
23 | 'EXTENDED': 'EXTENDED',
24 | 'MAIN': 'MAIN',
25 | }
26 |
27 | tokens = Constraint.tokens + \
28 | ('RENAME', 'COLUMN', 'SCHEMA', 'ADD', 'DROP', 'ALTER', 'IF', 'EXISTS', 'TYPE',
29 | 'DATA', 'DOUBLE_COLON', 'RESTART', 'STATISTICS', 'PLAIN', 'EXTERNAL',
30 | 'EXTENDED', 'MAIN', 'STORAGE',)
31 |
32 | precedence = Constraint.precedence
33 |
34 | # Tokens
35 | t_DOUBLE_COLON = r'::'
36 |
37 | # rules
38 | _start = 'alter'
39 |
40 | @_rule('alter : RENAME TO fullname')
41 | def p_alter_renametable(self, p):
42 | p[0] = self.provider.new_glue('RENAME TO', p[3])
43 |
44 | @_rule('alter : RENAME optcolumn fullname TO fullname')
45 | def p_alter_renamecolumn(self, p):
46 | p[0] = self.provider.new_glue('RENAME', p[2], p[3], 'TO', p[5])
47 |
48 | @_rule('alter : RENAME CONSTRAINT fullname TO fullname')
49 | def p_alter_renameconstraint(self, p):
50 | p[0] = self.provider.new_glue('RENAME CONSTRAINT', p[3], 'TO', p[5])
51 |
52 | @_rule('alter : SET SCHEMA fullname')
53 | def p_alter_setschema(self, p):
54 | p[0] = self.provider.new_glue('SET SCHEMA', p[3])
55 |
56 | @_rule('alter : actions')
57 | def p_alter_actions(self, p):
58 | p[0] = p[1]
59 |
60 | @_rule('actions : action')
61 | def p_actions_action(self, p):
62 | p[0] = self.provider.new_glue(p[1]).separator(', ')
63 |
64 | @_rule('actions : actions action')
65 | def p_actions_actions(self, p):
66 | p[0] = p[1].append(p[2])
67 |
68 | @_rule('action : ADD constraintname tableconstraint')
69 | def p_action_addtableconstraint(self, p):
70 | p[0] = self.provider.new_glue('ADD', p[2], p[3])
71 |
72 | @_rule('action : ADD optcolumn optifnotexists column')
73 | def p_action_addcolumn(self, p):
74 | p[0] = self.provider.new_glue('ADD', p[2], p[3], p[4])
75 |
76 | @_rule('action : DROP optcolumn optifexists fullname dropcond')
77 | def p_action_dropcolumn(self, p):
78 | p[0] = self.provider.new_glue('DROP', p[2], p[3], p[4], p[5])
79 |
80 | @_rule('action : DROP CONSTRAINT optifexists unitname dropcond')
81 | def p_action_dropconstraint(self, p):
82 | p[0] = self.provider.new_glue('DROP CONSTRAINT', p[3], p[4], p[5])
83 |
84 | @_rule('action : ALTER optcolumn unitname colaction')
85 | def p_action_colaction(self, p):
86 | p[0] = self.provider.new_glue('ALTER', p[2], p[3], p[4])
87 |
88 | @_rule('colaction : typekey datatype optusing')
89 | def p_colaction_altertype(self, p):
90 | p[0] = self.provider.new_glue(p[1], p[2], p[3])
91 |
92 | @_rule('colaction : SET datadefault')
93 | def p_colaction_alterdefault(self, p):
94 | p[0] = self.provider.new_glue('SET', p[2])
95 |
96 | @_rule('colaction : DROP DEFAULT')
97 | def p_colaction_alterdedropfault(self, p):
98 | p[0] = 'DROP DEFAULT'
99 |
100 | @_rule('colaction : SET NOT NULL')
101 | def p_colaction_altersetnotnull(self, p):
102 | p[0] = 'SET NOT NULL'
103 |
104 | @_rule('colaction : DROP NOT NULL')
105 | def p_colaction_alterdropnotnull(self, p):
106 | p[0] = 'DROP NOT NULL'
107 |
108 | @_rule('colaction : ADD GENERATED generatedoption AS IDENTITY sequenceoption')
109 | def p_colaction_addgenerated(self, p):
110 | p[0] = self.provider.new_glue(
111 | 'ADD GENERATED', p[3], 'AS IDENTITY', p[6])
112 |
113 | @_rule('colaction : generateditems')
114 | def p_colaction_setgenerated(self, p):
115 | p[0] = p[1]
116 |
117 | @_rule('colaction : DROP IDENTITY optifexists')
118 | def p_colaction_dropidentity(self, p):
119 | p[0] = self.provider.new_glue('DROP IDENTITY', p[3])
120 |
121 | @_rule('colaction : SET STATISTICS NUMBER')
122 | def p_colaction_setstatistics(self, p):
123 | p[0] = self.provider.new_glue('SET STATISTICS', p[3])
124 |
125 | @_rule('colaction : SET STATISTICS MINUS NUMBER')
126 | def p_colaction_unsetstatistics(self, p):
127 | p[0] = self.provider.new_glue('SET STATISTICS -', p[4]).separator('')
128 |
129 | @_rule('''colaction : SET STORAGE PLAIN
130 | | SET STORAGE EXTERNAL
131 | | SET STORAGE EXTENDED
132 | | SET STORAGE MAIN''')
133 | def p_colaction_setstorage(self, p):
134 | p[0] = f'SET STORAGE {p[3]}'
135 |
136 | @_rule('generateditems : generateditem')
137 | def p_action_setgenerated_generateditems_item(self, p):
138 | p[0] = self.provider.new_glue(p[1])
139 |
140 | @_rule('generateditems : generateditems generateditem')
141 | def p_action_setgenerated_generateditems_items(self, p):
142 | p[0] = p[1].append(p[2])
143 |
144 | @_rule('generateditem : SET GENERATED generatedoption')
145 | def p_generateditem1(self, p):
146 | p[0] = self.provider.new_glue('SET GENERATED', p[3])
147 |
148 | @_rule('generateditem : SET startwith incrementby')
149 | def p_generateditem2(self, p):
150 | p[0] = self.provider.new_glue('SET', p[2], p[3])
151 |
152 | @_rule('generateditem : RESTART WITH NUMBER')
153 | def p_generateditem3(self, p):
154 | p[0] = self.provider.new_glue('RESTART WITH', p[3])
155 |
156 | @_rule('generateditem : RESTART NUMBER')
157 | def p_generateditem4(self, p):
158 | p[0] = self.provider.new_glue('RESTART', p[2])
159 |
160 | @_rule('typekey : TYPE')
161 | def p_typekey_type(self, p):
162 | p[0] = 'TYPE'
163 |
164 | @_rule('typekey : SET DATA TYPE')
165 | def p_typekey_setdatatype(self, p):
166 | p[0] = 'SET DATA TYPE'
167 |
168 | @_rule('optusing : empty')
169 | def p_optusing_emtpy(self, p):
170 | p[0] = None
171 |
172 | @_rule('optusing : USING unitname DOUBLE_COLON datatype')
173 | def p_optusing(self, p):
174 | conv = self.provider.new_glue(p[2], '::', p[4]).separator('')
175 | p[0] = f'USING {conv}'
176 |
177 | @_rule('dropcond : empty')
178 | def p_dropcond_empty(self, p):
179 | p[0] = None
180 |
181 | @_rule('''dropcond : RESTRICT
182 | | CASCADE''')
183 | def p_dropcond(self, p):
184 | p[0] = p[1].upper()
185 |
186 | @_rule('optcolumn : empty')
187 | def p_optcolumn_emtpy(self, p):
188 | p[0] = None
189 |
190 | @_rule('optcolumn : COLUMN')
191 | def p_optcolumn(self, p):
192 | p[0] = 'COLUMN'
193 |
194 | @_rule('optifexists : empty')
195 | def p_optifexists_emtpy(self, p):
196 | p[0] = None
197 |
198 | @_rule('optifexists : IF EXISTS')
199 | def p_optifexists(self, p):
200 | p[0] = 'IF EXISTS'
201 |
202 | @_rule('optifnotexists : empty')
203 | def p_optifnotexists_emtpy(self, p):
204 | p[0] = None
205 |
206 | @_rule('optifnotexists : IF NOT EXISTS')
207 | def p_optifnotexists(self, p):
208 | p[0] = 'IF NOT EXISTS'
209 |
--------------------------------------------------------------------------------
/windyquery/validator/conflict_action.py:
--------------------------------------------------------------------------------
1 | from ._base import _rule
2 | from .update import Update
3 |
4 |
5 | class ConflictAction(Update):
6 | reserved = {**Update.reserved, 'DO': 'DO',
7 | 'NOTHING': 'NOTHING', 'UPDATE': 'UPDATE', 'SET': 'SET'}
8 | tokens = Update.tokens + ('DO', 'NOTHING', 'UPDATE', 'SET',)
9 |
10 | # rules
11 | _start = 'conflictaction'
12 |
13 | @_rule('conflictaction : DO NOTHING')
14 | def p_conflictaction_do_nothing(self, p):
15 | p[0] = 'DO NOTHING'
16 |
17 | @_rule('conflictaction : DO UPDATE SET updates')
18 | def p_conflictaction_do_update(self, p):
19 | p[0] = self.provider.new_glue('DO UPDATE SET', p[4])
20 |
--------------------------------------------------------------------------------
/windyquery/validator/conflict_target.py:
--------------------------------------------------------------------------------
1 | from ._base import _rule
2 | from .operators.paren import Paren
3 | from .fullname import Fullname
4 |
5 |
6 | class ConflictTarget(Fullname, Paren):
7 | reserved = {
8 | **Fullname.reserved, **Paren.reserved, 'ON': 'ON', 'CONSTRAINT': 'CONSTRAINT',
9 | }
10 |
11 | tokens = Fullname.tokens + Paren.tokens + ('ON', 'CONSTRAINT',)
12 |
13 | precedence = Fullname.precedence
14 |
15 | # rules
16 | _start = 'conflicttarget'
17 |
18 | @_rule('conflicttarget : ON CONSTRAINT fullname')
19 | def p_conflicttarget_on_constraint(self, p):
20 | p[0] = f'ON CONSTRAINT {p[3]}'
21 |
22 | @_rule('conflicttarget : LPAREN fullname RPAREN')
23 | def p_tableconstraint_index_name(self, p):
24 | p[0] = f'({p[2]})'
25 |
--------------------------------------------------------------------------------
/windyquery/validator/constraint.py:
--------------------------------------------------------------------------------
1 | from ._base import _rule
2 | from .column import Column
3 |
4 |
5 | class Constraint(Column):
6 | reserved = {
7 | **Column.reserved,
8 | 'FILLFACTOR': 'FILLFACTOR',
9 | 'INCLUDE': 'INCLUDE',
10 | 'USING': 'USING',
11 | 'INDEX': 'INDEX',
12 | 'TABLESPACE': 'TABLESPACE',
13 | 'FOREIGN': 'FOREIGN',
14 | }
15 |
16 | tokens = Column.tokens + \
17 | ('FILLFACTOR', 'INCLUDE', 'USING', 'INDEX', 'TABLESPACE', 'FOREIGN',)
18 |
19 | precedence = Column.precedence
20 |
21 | # rules
22 | _start = 'tableconstraint'
23 |
24 | @_rule('tableconstraint : CHECK LPAREN checkexpr RPAREN')
25 | def p_tableconstraint_check(self, p):
26 | p[0] = f'CHECK({p[3]})'
27 |
28 | @_rule('tableconstraint : FOREIGN KEY LPAREN namelist RPAREN REFERENCES fullname LPAREN namelist RPAREN referenceaction')
29 | def p_tableconstraint_references(self, p):
30 | p[0] = f'FOREIGN KEY({p[4]}) REFERENCES {p[7]} ({p[9]})'
31 | if p[11]:
32 | p[0] += f' {p[11]}'
33 |
34 | @_rule('tableconstraint : UNIQUE LPAREN namelist RPAREN indexparameters')
35 | def p_tableconstraint_unique(self, p):
36 | p[0] = f'UNIQUE({p[3]})'
37 | if p[5]:
38 | p[0] += f' {p[5]}'
39 |
40 | @_rule('tableconstraint : PRIMARY KEY LPAREN namelist RPAREN indexparameters')
41 | def p_tableconstraint_primary_key(self, p):
42 | p[0] = f'PRIMARY KEY({p[4]})'
43 | if p[6]:
44 | p[0] += f' {p[6]}'
45 |
46 | @_rule('indexparameters : empty')
47 | def p_index_parameters_empty(self, p):
48 | p[0] = None
49 |
50 | @_rule('indexparameters : WITH LPAREN FILLFACTOR EQ NUMBER RPAREN')
51 | def p_index_parameters_with(self, p):
52 | p[0] = f'WITH (fillfactor={p[5]})'
53 |
54 | @_rule('indexparameters : INCLUDE LPAREN namelist RPAREN')
55 | def p_index_parameters_include(self, p):
56 | p[0] = f'INCLUDE ({p[3]})'
57 |
58 | @_rule('indexparameters : USING INDEX TABLESPACE fullname')
59 | def p_index_parameters_using(self, p):
60 | p[0] = f'USING INDEX TABLESPACE {p[4]}'
61 |
--------------------------------------------------------------------------------
/windyquery/validator/create.py:
--------------------------------------------------------------------------------
1 | from ._base import _rule
2 | from .constraint import Constraint
3 |
4 |
5 | class Create(Constraint):
6 | reserved = {
7 | **Constraint.reserved,
8 | }
9 |
10 | tokens = Constraint.tokens
11 |
12 | precedence = Constraint.precedence
13 |
14 | # rules
15 | _start = 'create'
16 |
17 | @_rule('create : column')
18 | def p_create_column(self, p):
19 | p[0] = p[1]
20 |
21 | @_rule('create : constraintname tableconstraint')
22 | def p_create_tableconstraint(self, p):
23 | p[0] = self.provider.new_glue(p[1], p[2])
24 |
25 | @_rule('create : LIKE fullname')
26 | def p_create_like(self, p):
27 | p[0] = f'LIKE {p[2]}'
28 |
29 | @_rule('create : fullname_json')
30 | def p_create_index(self, p):
31 | p[0] = p[1]
32 |
33 | @_rule('create : LPAREN fullname_json RPAREN')
34 | def p_create_index_paren(self, p):
35 | p[0] = self.provider.new_glue('(', p[2], ')').separator('')
36 |
--------------------------------------------------------------------------------
/windyquery/validator/empty.py:
--------------------------------------------------------------------------------
1 | from ._base import _rule
2 |
3 |
4 | class Empty:
5 | reserved = {}
6 | tokens = ()
7 |
8 | @_rule('empty :')
9 | def p_empty(self, p):
10 | pass
11 |
--------------------------------------------------------------------------------
/windyquery/validator/expr.py:
--------------------------------------------------------------------------------
1 | from ._base import _rule
2 | from .field import Field
3 | from .operators.operator import Operator
4 |
5 |
6 | class Expr(Field, Operator):
7 | reserved = {**Field.reserved, **Operator.reserved}
8 | tokens = Field.tokens + Operator.tokens
9 | precedence = Field.precedence + Operator.precedence
10 |
11 | # rules
12 | _start = 'expr'
13 |
14 | @_rule('''expr : expr EQ expr
15 | | expr NE expr
16 | | expr NN expr
17 | | expr LE expr
18 | | expr LS expr
19 | | expr GE expr
20 | | expr GT expr
21 | | expr IS expr
22 | | expr LIKE expr
23 | | expr ILIKE expr
24 | | expr DPIPE expr
25 | | expr MINUS expr
26 | | expr PLUS expr
27 | | expr MULTI expr
28 | | expr DIVIDE expr
29 | | expr MODULAR expr''')
30 | def p_expr(self, p):
31 | p[0] = self.provider.new_biop(p[2].upper(), p[1], p[3])
32 |
33 | @_rule('expr : expr IS NOT expr')
34 | def p_expr2(self, p):
35 | p[0] = self.provider.new_biop('IS NOT', p[1], p[4])
36 |
37 | @_rule('expr : expr IS DISTINCT FROM expr')
38 | def p_expr3(self, p):
39 | p[0] = self.provider.new_biop('IS DISTINCT FROM', p[1], p[5])
40 |
41 | @_rule('expr : expr IS NOT DISTINCT FROM expr')
42 | def p_expr4(self, p):
43 | p[0] = self.provider.new_biop('IS NOT DISTINCT FROM', p[1], p[6])
44 |
45 | @_rule('expr : expr IN LPAREN exprs RPAREN')
46 | def p_expr5(self, p):
47 | p[0] = self.provider.new_biop('IN', p[1], p[4])
48 |
49 | @_rule('expr : expr NOT IN LPAREN exprs RPAREN')
50 | def p_expr6(self, p):
51 | p[0] = self.provider.new_biop('NOT IN', p[1], p[5])
52 |
53 | @_rule('exprs : expr')
54 | def p_exprs_expr(self, p):
55 | p[0] = self.provider.new_fieldlist(p[1])
56 |
57 | @_rule('exprs : exprs COMMA expr')
58 | def p_exprs_comma_expr(self, p):
59 | p[0] = p[1].append(p[3])
60 |
61 | @_rule('expr : field')
62 | def p_expr_field(self, p):
63 | p[0] = p[1]
64 |
--------------------------------------------------------------------------------
/windyquery/validator/field.py:
--------------------------------------------------------------------------------
1 | from ._base import Base, _rule
2 | from .fullname_json import FullnameJson
3 | from .values.text_val import TextVal
4 | from .values.null import NULL
5 | from .values.holder import Holder
6 | from .values.true import TRUE
7 | from .values.false import FALSE
8 |
9 |
10 | class Field(FullnameJson, TextVal, NULL, Holder, TRUE, FALSE):
11 | reserved = {**Base.reserved, **TextVal.reserved,
12 | **FullnameJson.reserved, **NULL.reserved, **Holder.reserved, **TRUE.reserved, **FALSE.reserved}
13 | tokens = Base.tokens + TextVal.tokens + \
14 | FullnameJson.tokens + NULL.tokens + Holder.tokens + TRUE.tokens + FALSE.tokens
15 | precedence = FullnameJson.precedence
16 |
17 | # Tokens
18 |
19 | # rules
20 | _start = 'field'
21 |
22 | @_rule('''field : STAR
23 | | NUMBER
24 | | TEXTVAL
25 | | NULL
26 | | TRUE
27 | | FALSE''')
28 | def p_field_items(self, p):
29 | p[0] = self.provider.new_record(p[1])
30 |
31 | @_rule('field : HOLDER')
32 | def p_field_param(self, p):
33 | p[0] = self.provider.new_param()
34 |
35 | @_rule('field : fullname_json')
36 | def p_field_name(self, p):
37 | p[0] = p[1]
38 |
--------------------------------------------------------------------------------
/windyquery/validator/fullname.py:
--------------------------------------------------------------------------------
1 | from ._base import Base, _rule
2 | from .name import Name
3 | from .empty import Empty
4 | from .operators.dot import Dot
5 |
6 |
7 | class Fullname(Base, Name, Empty, Dot):
8 | reserved = {**Base.reserved, **Name.reserved,
9 | **Empty.reserved, **Dot.reserved}
10 | tokens = Base.tokens + Name.tokens + \
11 | Empty.tokens + Dot.tokens + ('QUOTED_NAME', 'STAR',)
12 |
13 | # Tokens
14 | t_QUOTED_NAME = r'"(?:.(?!"))*.?"'
15 | t_STAR = r'\*'
16 |
17 | # rules
18 | _start = 'fullname'
19 |
20 | @_rule('fullname : unitname dotname')
21 | def p_fullname(self, p):
22 | p[0] = self.provider.new_glue(p[1], p[2]).separator('')
23 |
24 | @_rule('unitname : NAME')
25 | def p_unitname_name(self, p):
26 | p[0] = self.sanitize_identifier(p[1])
27 |
28 | @_rule('unitname : QUOTED_NAME')
29 | def p_unitname_quoted_name(self, p):
30 | p[0] = p[1]
31 |
32 | @_rule('dotname : DOT unitname dotname')
33 | def p_dotname_dot(self, p):
34 | p[0] = self.provider.new_glue('.', p[2], p[3]).separator('')
35 |
36 | @_rule('dotname : DOT STAR')
37 | def p_dotname_star(self, p):
38 | p[0] = f'.*'
39 |
40 | @_rule('dotname : empty')
41 | def p_dotname_empty(self, p):
42 | p[0] = None
43 |
--------------------------------------------------------------------------------
/windyquery/validator/fullname_json.py:
--------------------------------------------------------------------------------
1 | from windyquery.provider._base import JSONB
2 | from ._base import _rule
3 | from .fullname import Fullname
4 | from .number import Number
5 | from .operators.minus import Minus
6 |
7 |
8 | class FullnameJson(Fullname, Number, Minus):
9 | reserved = {**Fullname.reserved, **Number.reserved, **Minus.reserved}
10 | tokens = Fullname.tokens + Number.tokens + \
11 | Minus.tokens + ('ARROW', 'DARROW',)
12 | precedence = Fullname.precedence + Minus.precedence
13 |
14 | # Tokens
15 | t_ARROW = r'->'
16 | t_DARROW = r'->>'
17 |
18 | # rules
19 | _start = 'fullname_json'
20 |
21 | @_rule('fullname_json : fullname attribute')
22 | def p_fullname_json(self, p):
23 | p[0] = self.provider.new_record(f'{p[1]}{p[2]}', p[2].kind)
24 | p[0].path = [p[1]] + p[2].path
25 |
26 | @_rule('attribute : ARROW NAME attribute')
27 | def p_attribute(self, p):
28 | p2 = self.sanitize_literal(p[2])
29 | if p[3].value:
30 | kind = p[3].kind
31 | path = [p[2]] + p[3].path
32 | else:
33 | kind = JSONB
34 | path = [p[2]]
35 | p[0] = self.provider.new_record(f'->{p2}{p[3]}', kind)
36 | p[0].path = path
37 |
38 | @_rule('attribute : ARROW NUMBER attribute')
39 | def p_attribute_num(self, p):
40 | if p[3].value:
41 | kind = p[3].kind
42 | path = [f'{p[2]}'] + p[3].path
43 | else:
44 | kind = JSONB
45 | path = [f'{p[2]}']
46 | p[0] = self.provider.new_record(f'->{p[2]}{p[3]}', kind)
47 | p[0].path = path
48 |
49 | @_rule('attribute : ARROW MINUS NUMBER attribute')
50 | def p_attribute_minus_num(self, p):
51 | if p[4].value:
52 | kind = p[4].kind
53 | path = [f'-{p[3]}'] + p[4].path
54 | else:
55 | kind = JSONB
56 | path = [f'-{p[3]}']
57 | p[0] = self.provider.new_record(f'->-{p[3]}{p[4]}', kind)
58 | p[0].path = path
59 |
60 | @_rule('attribute : DARROW NAME')
61 | def p_attribute_darrow(self, p):
62 | p2 = self.sanitize_literal(p[2])
63 | p[0] = self.provider.new_record(f'->>{p2}')
64 | p[0].path = [p[2]]
65 |
66 | @_rule('attribute : DARROW NUMBER')
67 | def p_attribute_darrow_num(self, p):
68 | p[0] = self.provider.new_record(f'->>{p[2]}')
69 | p[0].path = [f'{p[2]}']
70 |
71 | @_rule('attribute : DARROW MINUS NUMBER')
72 | def p_attribute_darrow_minus_num(self, p):
73 | p[0] = self.provider.new_record(f'->>-{p[3]}')
74 | p[0].path = [f'-{p[3]}']
75 |
76 | @ _rule('attribute : empty')
77 | def p_attribute_empty(self, p):
78 | p[0] = self.provider.new_record('')
79 | p[0].path = []
80 |
--------------------------------------------------------------------------------
/windyquery/validator/join.py:
--------------------------------------------------------------------------------
1 | from ._base import _rule
2 | from .expr import Expr
3 |
4 |
5 | class Join(Expr):
6 | reserved = {**Expr.reserved,
7 | 'AND': 'AND',
8 | 'OR': 'OR'}
9 | tokens = Expr.tokens + ('AND', 'OR')
10 |
11 | # Tokens
12 | t_AND = r'AND'
13 | t_OR = r'OR'
14 |
15 | # rules
16 | _start = 'join'
17 |
18 | precedence = Expr.precedence + (
19 | ('left', 'OR'),
20 | ('left', 'AND'),
21 | )
22 |
23 | @_rule('''join : join AND join
24 | | join OR join''')
25 | def p_join_and(self, p):
26 | p[0] = self.provider.new_glue(p[1], f'{p[2].upper()}', p[3])
27 |
28 | @_rule('join : LPAREN join RPAREN')
29 | def p_join_group(self, p):
30 | p[0] = self.provider.new_parentheses(p[2])
31 |
32 | @_rule('join : expr')
33 | def p_join_op(self, p):
34 | p[0] = p[1]
35 |
--------------------------------------------------------------------------------
/windyquery/validator/lextab.py:
--------------------------------------------------------------------------------
1 | # lextab.py. This file automatically created by PLY (version 3.11). Don't edit!
2 | _tabversion = '3.10'
3 | _lextokens = set(('ARROW', 'COMMA', 'DARROW', 'DISTINCT', 'DIVIDE', 'DO', 'DOT', 'DPIPE', 'EQ', 'FALSE', 'FROM', 'GE', 'GT', 'HOLDER', 'ILIKE', 'IN', 'IS', 'LE', 'LIKE', 'LPAREN', 'LS', 'MINUS', 'MODULAR', 'MULTI', 'NAME', 'NE', 'NN', 'NOT', 'NOTHING', 'NULL', 'NUMBER', 'PLUS', 'QUOTED_NAME', 'RPAREN', 'SET', 'STAR', 'TEXTVAL', 'TRUE', 'UPDATE'))
4 | _lexreflags = 64
5 | _lexliterals = ''
6 | _lexstateinfo = {'INITIAL': 'inclusive'}
7 | _lexstatere = {'INITIAL': [('(?P[a-zA-Z_][a-zA-Z0-9_]*)|(?P\\d+)|(?P\'\'|(\'|E\')(?:.(?!(?"(?:.(?!"))*.?")|(?PDISTINCT)|(?PILIKE)|(?P\\|\\|)|(?PFROM)|(?PLIKE)|(?PNULL)|(?PTRUE)|(?P->>)|(?P\\!=)|(?PNOT)|(?P->)|(?P\\.)|(?P>=)|(?P\\?)|(?PIN)|(?PIS)|(?P<=)|(?P\\()|(?P\\*)|(?P<>)|(?P\\+)|(?P\\))|(?P\\*)|(?P,)|(?P/)|(?P=)|(?P>)|(?P<)|(?P-)|(?P%)', [None, ('t_NAME', 'NAME'), ('t_NUMBER', 'NUMBER'), ('t_TEXTVAL', 'TEXTVAL'), None, (None, 'QUOTED_NAME'), (None, 'DISTINCT'), (None, 'ILIKE'), (None, 'DPIPE'), (None, 'FROM'), (None, 'LIKE'), (None, 'NULL'), (None, 'TRUE'), (None, 'DARROW'), (None, 'NE'), (None, 'NOT'), (None, 'ARROW'), (None, 'DOT'), (None, 'GE'), (None, 'HOLDER'), (None, 'IN'), (None, 'IS'), (None, 'LE'), (None, 'LPAREN'), (None, 'MULTI'), (None, 'NN'), (None, 'PLUS'), (None, 'RPAREN'), (None, 'STAR'), (None, 'COMMA'), (None, 'DIVIDE'), (None, 'EQ'), (None, 'GT'), (None, 'LS'), (None, 'MINUS'), (None, 'MODULAR')])]}
8 | _lexstateignore = {'INITIAL': ' \t\n'}
9 | _lexstateerrorf = {'INITIAL': 't_error'}
10 | _lexstateeoff = {}
11 |
--------------------------------------------------------------------------------
/windyquery/validator/limit.py:
--------------------------------------------------------------------------------
1 | from ._base import Base, _rule
2 | from .number import Number
3 |
4 |
5 | class Limit(Base, Number):
6 | reserved = {**Base.reserved, **Number.reserved}
7 | tokens = Base.tokens + Number.tokens
8 |
9 | # rules
10 | _start = 'limit'
11 |
12 | @_rule('limit : NUMBER')
13 | def p_limit(self, p):
14 | param = self.provider.new_param()
15 | p[0] = self.provider.new_glue('LIMIT', param)
16 |
--------------------------------------------------------------------------------
/windyquery/validator/name.py:
--------------------------------------------------------------------------------
1 | from ._base import _rule
2 |
3 |
4 | class Name:
5 | reserved = {}
6 | tokens = ('NAME',)
7 |
8 | # Tokens
9 | @_rule(r'[a-zA-Z_][a-zA-Z0-9_]*')
10 | def t_NAME(self, t):
11 | t.type = self.reserved.get(t.value.upper(), 'NAME')
12 | return t
13 |
--------------------------------------------------------------------------------
/windyquery/validator/number.py:
--------------------------------------------------------------------------------
1 | from ._base import _rule
2 |
3 |
4 | class Number:
5 | reserved = {}
6 | tokens = ('NUMBER',)
7 |
8 | # Tokens
9 | @_rule(r'\d+')
10 | def t_NUMBER(self, t):
11 | t.value = int(t.value)
12 | return t
13 |
--------------------------------------------------------------------------------
/windyquery/validator/offset.py:
--------------------------------------------------------------------------------
1 | from ._base import Base, _rule
2 | from .number import Number
3 |
4 |
5 | class Offset(Base, Number):
6 | reserved = {**Base.reserved, **Number.reserved}
7 | tokens = Base.tokens + Number.tokens
8 |
9 | # rules
10 | _start = 'offset'
11 |
12 | @_rule('offset : NUMBER')
13 | def p_offset(self, p):
14 | param = self.provider.new_param()
15 | p[0] = self.provider.new_glue('OFFSET', param)
16 |
--------------------------------------------------------------------------------
/windyquery/validator/operators/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluerelay/windyquery/44881b3f58a1913442074352d117beb434e27443/windyquery/validator/operators/__init__.py
--------------------------------------------------------------------------------
/windyquery/validator/operators/bracket.py:
--------------------------------------------------------------------------------
1 | class Bracket:
2 | reserved = {}
3 | tokens = ('LBRACKET', 'RBRACKET',)
4 |
5 | # Tokens
6 | t_LBRACKET = r'\['
7 | t_RBRACKET = r'\]'
8 |
--------------------------------------------------------------------------------
/windyquery/validator/operators/comma.py:
--------------------------------------------------------------------------------
1 | class Comma:
2 | reserved = {}
3 | tokens = ('COMMA',)
4 |
5 | # Tokens
6 | t_COMMA = r','
7 |
8 | precedence = (
9 | ('left', 'COMMA'),
10 | )
11 |
--------------------------------------------------------------------------------
/windyquery/validator/operators/dot.py:
--------------------------------------------------------------------------------
1 | class Dot:
2 | reserved = {}
3 | tokens = ('DOT',)
4 |
5 | # Tokens
6 | t_DOT = r'\.'
7 |
8 | precedence = ()
9 |
--------------------------------------------------------------------------------
/windyquery/validator/operators/equal.py:
--------------------------------------------------------------------------------
1 | class Equal:
2 | reserved = {}
3 | tokens = ('EQ',)
4 |
5 | # Tokens
6 | t_EQ = r'='
7 |
8 | precedence = (
9 | ('right', 'EQ'),
10 | )
11 |
--------------------------------------------------------------------------------
/windyquery/validator/operators/minus.py:
--------------------------------------------------------------------------------
1 | class Minus:
2 | reserved = {}
3 | tokens = ('MINUS',)
4 |
5 | # Tokens
6 | t_MINUS = r'-'
7 |
8 | precedence = (
9 | ('left', 'MINUS'),
10 | )
11 |
--------------------------------------------------------------------------------
/windyquery/validator/operators/negate.py:
--------------------------------------------------------------------------------
1 | class Negate:
2 | reserved = {
3 | 'NOT': 'NOT',
4 | }
5 | tokens = ('NOT',)
6 |
7 | # Tokens
8 | t_NOT = r'NOT'
9 |
10 | precedence = (
11 | ('right', 'NOT'),
12 | )
13 |
--------------------------------------------------------------------------------
/windyquery/validator/operators/operator.py:
--------------------------------------------------------------------------------
1 | from .comma import Comma
2 | from .equal import Equal
3 | from .paren import Paren
4 | from .negate import Negate
5 |
6 |
7 | class Operator(Comma, Equal, Paren, Negate):
8 | reserved = {**Comma.reserved,
9 | **Equal.reserved,
10 | **Paren.reserved,
11 | **Negate.reserved,
12 | 'IN': 'IN',
13 | 'IS': 'IS',
14 | 'LIKE': 'LIKE',
15 | 'ILIKE': 'ILIKE',
16 | 'DISTINCT': 'DISTINCT',
17 | 'FROM': 'FROM'}
18 | tokens = Comma.tokens + \
19 | Equal.tokens + \
20 | Paren.tokens + \
21 | Negate.tokens + \
22 | ('IN', 'IS', 'LIKE', 'ILIKE', 'DISTINCT', 'FROM',
23 | 'LE', 'LS', 'GE', 'GT', 'NE', 'NN', 'DPIPE',
24 | 'PLUS', 'MULTI', 'DIVIDE', 'MODULAR',)
25 |
26 | # Tokens
27 | t_IN = r'IN'
28 | t_IS = r'IS'
29 | t_LIKE = r'LIKE'
30 | t_ILIKE = r'ILIKE'
31 | t_DISTINCT = r'DISTINCT'
32 | t_FROM = r'FROM'
33 | t_LE = r'<='
34 | t_LS = r'<'
35 | t_GE = r'>='
36 | t_GT = r'>'
37 | t_NE = r'\!='
38 | t_NN = r'<>'
39 | t_DPIPE = r'\|\|'
40 | t_PLUS = r'\+'
41 | t_MULTI = r'\*'
42 | t_DIVIDE = r'/'
43 | t_MODULAR = r'%'
44 |
45 | precedence = Comma.precedence + \
46 | Equal.precedence + \
47 | Negate.precedence + (
48 | ('left', 'IN'),
49 | ('left', 'IS'),
50 | ('left', 'LIKE'),
51 | ('left', 'ILIKE'),
52 | ('left', 'LE'),
53 | ('left', 'LS'),
54 | ('left', 'GE'),
55 | ('left', 'GT'),
56 | ('left', 'NE'),
57 | ('left', 'NN'),
58 | ('left', 'DPIPE'),
59 | ('left', 'DISTINCT'),
60 | ('left', 'FROM'),
61 | ('left', 'PLUS'),
62 | ('left', 'MULTI'),
63 | ('left', 'DIVIDE'),
64 | ('left', 'MODULAR')
65 | )
66 |
--------------------------------------------------------------------------------
/windyquery/validator/operators/paren.py:
--------------------------------------------------------------------------------
1 | class Paren:
2 | reserved = {}
3 | tokens = ('LPAREN', 'RPAREN',)
4 |
5 | # Tokens
6 | t_LPAREN = r'\('
7 | t_RPAREN = r'\)'
8 |
--------------------------------------------------------------------------------
/windyquery/validator/order_by.py:
--------------------------------------------------------------------------------
1 | from ._base import _rule
2 | from .field import Field
3 |
4 |
5 | class OrderBy(Field):
6 | reserved = {**Field.reserved, 'ASC': 'ASC', 'DESC': 'DESC'}
7 | tokens = Field.tokens + ('ASC', 'DESC',)
8 |
9 | # Tokens
10 | t_ASC = r'ASC'
11 | t_DESC = r'DESC'
12 |
13 | # rules
14 | _start = 'order_by'
15 |
16 | @_rule('order_by : field')
17 | def p_order_by(self, p):
18 | p[0] = p[1]
19 |
20 | @_rule('''order_by : field ASC
21 | | field DESC''')
22 | def p_order_by_dir(self, p):
23 | p[0] = self.provider.new_record(f'{p[1]} {p[2]}')
24 |
--------------------------------------------------------------------------------
/windyquery/validator/schema.py:
--------------------------------------------------------------------------------
1 | from ._base import _rule
2 | from .fullname import Fullname
3 | from .empty import Empty
4 | from .operators.comma import Comma
5 | from .operators.negate import Negate
6 |
7 |
8 | class Schema(Fullname, Empty, Comma, Negate):
9 | reserved = {**Fullname.reserved, **Empty.reserved, **Comma.reserved, **Negate.reserved,
10 | 'TABLE': 'TABLE',
11 | 'IF': 'IF',
12 | 'EXISTS': 'EXISTS',
13 | 'ONLY': 'ONLY',
14 | 'ON': 'ON',
15 | 'INDEX': 'INDEX',
16 | 'UNIQUE': 'UNIQUE',
17 | 'CONCURRENTLY': 'CONCURRENTLY',
18 | 'USING': 'USING',
19 | 'BTREE': 'BTREE',
20 | 'HASH': 'HASH',
21 | 'GIST': 'GIST',
22 | 'SPGIST': 'SPGIST',
23 | 'GIN': 'GIN',
24 | 'BRIN': 'BRIN',
25 | }
26 | tokens = Fullname.tokens + Empty.tokens + Comma.tokens + Negate.tokens + \
27 | ('TABLE', 'IF', 'EXISTS', 'ONLY', 'ON', 'INDEX', 'UNIQUE', 'CONCURRENTLY', 'USING',
28 | 'BTREE', 'HASH', 'GIST', 'SPGIST', 'GIN', 'BRIN',)
29 |
30 | precedence = Comma.precedence
31 |
32 | t_DOT = r'\.'
33 |
34 | # rules
35 | _start = 'schema'
36 |
37 | @_rule('schema : TABLE exists onlys names')
38 | def p_schema_table(self, p):
39 | p[0] = self.provider.new_glue('TABLE', p[2], p[3], p[4])
40 |
41 | @_rule('schema : optunique INDEX optconcurrently exists optnames ON onlys fullname optusing')
42 | def p_schema_index(self, p):
43 | p[0] = self.provider.new_glue(
44 | p[1], 'INDEX', p[3], p[4], p[5], 'ON', p[7], p[8], p[9])
45 |
46 | @_rule('schema : optunique INDEX optconcurrently exists optnames')
47 | def p_schema_index_for_drop(self, p):
48 | p[0] = self.provider.new_glue(
49 | p[1], 'INDEX', p[3], p[4], p[5])
50 |
51 | @_rule('optunique : empty')
52 | def p_optunique_empty(self, p):
53 | p[0] = None
54 |
55 | @_rule('optunique : UNIQUE')
56 | def p_optunique(self, p):
57 | p[0] = 'UNIQUE'
58 |
59 | @_rule('optconcurrently : empty')
60 | def p_optconcurrently_empty(self, p):
61 | p[0] = None
62 |
63 | @_rule('optconcurrently : CONCURRENTLY')
64 | def p_optconcurrently(self, p):
65 | p[0] = 'CONCURRENTLY'
66 |
67 | @_rule('optnames : empty')
68 | def p_optname_empty(self, p):
69 | p[0] = None
70 |
71 | @_rule('optnames : names')
72 | def p_optname(self, p):
73 | p[0] = p[1]
74 |
75 | @_rule('optusing : empty')
76 | def p_optusing_empty(self, p):
77 | p[0] = None
78 |
79 | @_rule('''optusing : USING BTREE
80 | | USING HASH
81 | | USING GIST
82 | | USING SPGIST
83 | | USING GIN
84 | | USING BRIN''')
85 | def p_optusing(self, p):
86 | p[0] = f'USING {p[2]}'
87 |
88 | @_rule('exists : empty')
89 | def p_exists_empty(self, p):
90 | p[0] = None
91 |
92 | @_rule('exists : IF EXISTS')
93 | def p_exists_exists(self, p):
94 | p[0] = 'IF EXISTS'
95 |
96 | @_rule('exists : IF NOT EXISTS')
97 | def p_exists_not_exists(self, p):
98 | p[0] = 'IF NOT EXISTS'
99 |
100 | @_rule('onlys : empty')
101 | def p_onlys_empty(self, p):
102 | p[0] = None
103 |
104 | @_rule('onlys : ONLY')
105 | def p_onlys_only(self, p):
106 | p[0] = 'ONLY'
107 |
108 | @_rule('names : fullname')
109 | def p_names_name(self, p):
110 | p[0] = self.provider.new_glue(p[1]).separator(', ')
111 |
112 | @_rule('names : names COMMA fullname')
113 | def p_names_comma_name(self, p):
114 | p[0] = p[1].append(p[3])
115 |
--------------------------------------------------------------------------------
/windyquery/validator/select.py:
--------------------------------------------------------------------------------
1 | from ._base import _rule
2 | from .alias import Alias
3 | from .field import Field
4 |
5 |
6 | class Select(Field, Alias):
7 | reserved = {**Field.reserved, **Alias.reserved}
8 | tokens = Field.tokens + Alias.tokens
9 |
10 | # rules
11 | _start = 'select'
12 |
13 | @_rule('select : field')
14 | def p_select_field(self, p):
15 | p[0] = p[1]
16 |
17 | @_rule('select : field NAME')
18 | def p_select_field_as_name(self, p):
19 | p2 = self.sanitize_identifier(p[2])
20 | p[0] = self.provider.new_glue(p[1], p2)
21 |
22 | @_rule('select : field AS NAME')
23 | def p_select_field_as_name2(self, p):
24 | p3 = self.sanitize_identifier(p[3])
25 | p[0] = self.provider.new_glue(p[1], 'AS', p3)
26 |
--------------------------------------------------------------------------------
/windyquery/validator/table.py:
--------------------------------------------------------------------------------
1 | from ._base import _rule
2 | from .fullname import Fullname
3 | from .alias import Alias
4 |
5 |
6 | class Table(Fullname, Alias):
7 | reserved = {**Fullname.reserved, **Alias.reserved}
8 | tokens = Fullname.tokens + Alias.tokens
9 |
10 | # rules
11 | _start = 'table'
12 |
13 | @_rule('table : fullname')
14 | def p_table_name(self, p):
15 | p[0] = self.provider.new_record(p[1])
16 |
17 | @_rule('table : fullname NAME')
18 | def p_table_name_as(self, p):
19 | p2 = self.sanitize_identifier(p[2])
20 | p[0] = self.provider.new_glue(p[1], p2)
21 |
22 | @_rule('table : fullname AS NAME')
23 | def p_table_name_as2(self, p):
24 | p3 = self.sanitize_identifier(p[3])
25 | p[0] = self.provider.new_glue(p[1], 'AS', p3)
26 |
--------------------------------------------------------------------------------
/windyquery/validator/update.py:
--------------------------------------------------------------------------------
1 | import json
2 | from ._base import _rule
3 | from .expr import Expr
4 | from windyquery.provider._base import JSONB
5 | from windyquery.utils import unquote_literal
6 |
7 |
8 | class Update(Expr):
9 | reserved = {**Expr.reserved, }
10 | tokens = Expr.tokens
11 | precedence = Expr.precedence
12 |
13 | # rules
14 | _start = 'updates'
15 |
16 | @_rule('updates : update')
17 | def p_updates_update(self, p):
18 | p[0] = p[1]
19 |
20 | @_rule('updates : updates COMMA update')
21 | def p_updates_comma_update(self, p):
22 | p[0] = self.provider.new_glue(p[1], p[3]).separator(', ')
23 |
24 | @_rule('''update : field EQ expr''')
25 | def p_update(self, p):
26 | if p[2] == '=' and p[1].kind == JSONB:
27 | jsonbCol = p[1].path[0]
28 | jsonbPath = '{' + ', '.join(p[1].path[1:]) + '}'
29 | jsonbVal = p[3].value
30 | if isinstance(jsonbVal, str):
31 | jsonbVal = unquote_literal(jsonbVal)
32 | jsonbVal = json.dumps(jsonbVal)
33 | p[0] = self.provider.new_glue(
34 | jsonbCol, p[2], f'jsonb_set({jsonbCol}, \'{jsonbPath}\', \'{jsonbVal}\')')
35 | else:
36 | p[0] = self.provider.new_glue(p[1], p[2].upper(), p[3])
37 |
--------------------------------------------------------------------------------
/windyquery/validator/value_list.py:
--------------------------------------------------------------------------------
1 | from ._base import Base, _rule
2 | from .number import Number
3 | from .values.text_val import TextVal
4 | from .values.null import NULL
5 | from .values.default import Default
6 | from .values.holder import Holder
7 | from .values.true import TRUE
8 | from .values.false import FALSE
9 | from .operators.comma import Comma
10 |
11 |
12 | class ValueList(Base, Number, TextVal, NULL, Default, Holder, TRUE, FALSE, Comma):
13 | reserved = {**Base.reserved, **Number.reserved, **TextVal.reserved,
14 | **NULL.reserved, **Default.reserved, **Holder.reserved,
15 | **TRUE.reserved, **FALSE.reserved, **Comma.reserved}
16 | tokens = Base.tokens + Number.tokens + TextVal.tokens + \
17 | NULL.tokens + Default.tokens + Holder.tokens + Comma.tokens + \
18 | TRUE.tokens + FALSE.tokens
19 |
20 | # Tokens
21 |
22 | # rules
23 | _start = 'values'
24 |
25 | @_rule('''value : DEFAULT
26 | | NUMBER
27 | | TEXTVAL
28 | | TRUE
29 | | FALSE
30 | | NULL''')
31 | def p_value_items(self, p):
32 | p[0] = self.provider.new_record(p[1])
33 |
34 | @_rule('value : HOLDER')
35 | def p_value_holder(self, p):
36 | p[0] = self.provider.new_param()
37 |
38 | @_rule('values : value')
39 | def p_values_value(self, p):
40 | p[0] = self.provider.new_glue(p[1]).separator(', ')
41 |
42 | @_rule('values : values COMMA value')
43 | def p_values_comma(self, p):
44 | p[0] = p[1].append(p[3])
45 |
--------------------------------------------------------------------------------
/windyquery/validator/values/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluerelay/windyquery/44881b3f58a1913442074352d117beb434e27443/windyquery/validator/values/__init__.py
--------------------------------------------------------------------------------
/windyquery/validator/values/default.py:
--------------------------------------------------------------------------------
1 | class Default:
2 | reserved = {'DEFAULT': 'DEFAULT'}
3 | tokens = ('DEFAULT',)
4 |
5 | # Tokens
6 | t_DEFAULT = r'DEFAULT'
7 |
--------------------------------------------------------------------------------
/windyquery/validator/values/false.py:
--------------------------------------------------------------------------------
1 | class FALSE:
2 | reserved = {'FALSE': 'FALSE'}
3 | tokens = ('FALSE',)
4 |
5 | # Tokens
6 | t_TRUE = r'FALSE'
7 |
--------------------------------------------------------------------------------
/windyquery/validator/values/holder.py:
--------------------------------------------------------------------------------
1 | class Holder:
2 | reserved = {}
3 | tokens = ('HOLDER',)
4 |
5 | # Tokens
6 | t_HOLDER = r'\?'
7 |
--------------------------------------------------------------------------------
/windyquery/validator/values/null.py:
--------------------------------------------------------------------------------
1 | class NULL:
2 | reserved = {'NULL': 'NULL'}
3 | tokens = ('NULL',)
4 |
5 | # Tokens
6 | t_NULL = r'NULL'
7 |
--------------------------------------------------------------------------------
/windyquery/validator/values/text_val.py:
--------------------------------------------------------------------------------
1 | from .._base import _rule
2 | from windyquery.utils import prefix_E
3 |
4 |
5 | class TextVal:
6 | reserved = {}
7 | tokens = ('TEXTVAL',)
8 |
9 | @_rule(r"''|('|E')(?:.(?!(?