├── .gitignore ├── LICENSE ├── README ├── Readme.md ├── benchmarks ├── benchmark.py ├── profile_norm.py └── shuttle_data_example.py ├── norm ├── __init__.py ├── cached_property.py ├── connection.py ├── norm.py ├── norm_mssql.py ├── norm_myssql_connector.py ├── norm_psycopg2.py ├── norm_pymssql.py ├── norm_sqlalchemy.py ├── norm_sqlite3.py └── rows.py ├── poetry.lock ├── pyproject.toml ├── pytest.ini ├── tests ├── test_connection.py ├── test_norm.py ├── test_pymssql.py └── test_rows.py └── tox.ini /.gitignore: -------------------------------------------------------------------------------- 1 | *~ 2 | *.pyc 3 | *.pyo 4 | \#*\# 5 | *.DS_Store 6 | .coverage 7 | htmlcov/ 8 | .cache/ 9 | norm.egg-info 10 | dist 11 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2011 Justin Van Winkle (justin.vanwinkle@gmail.com) 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions 6 | are met: 7 | 1. Redistributions of source code must retain the above copyright 8 | notice, this list of conditions and the following disclaimer. 9 | 2. Redistributions in binary form must reproduce the above copyright 10 | notice, this list of conditions and the following disclaimer in the 11 | documentation and/or other materials provided with the distribution. 12 | 3. The name of the author may not be used to endorse or promote products 13 | derived from this software without specific prior written permission. 14 | 15 | THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 16 | IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 17 | OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 18 | IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, 19 | INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT 20 | NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 21 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 22 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 23 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF 24 | THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 25 | -------------------------------------------------------------------------------- /README: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/justinvanwinkle/Norm/dd8c1d8272913d7bee7e1e02bb85f2013b8c28a6/README -------------------------------------------------------------------------------- /Readme.md: -------------------------------------------------------------------------------- 1 | # norm is like sql 2 | 3 | 4 | ## Query Generation 5 | The primary purpose of Norm is to make it easier to generate SQL. 6 | 7 | ### The SELECT class 8 | #### Basic queries 9 | 10 | ```python 11 | In [1]: from norm.norm_sqlite3 import SQLI_SELECT as SELECT 12 | 13 | In [2]: s = (SELECT('val') 14 | ...: .FROM('foos')) 15 | 16 | In [3]: print(s.query) 17 | SELECT val 18 | FROM foos; 19 | 20 | In [4]: s2 = s.WHERE(val = 5) 21 | 22 | In [5]: s2 = s2.SELECT('foos_id') 23 | 24 | In [6]: print(s2.query) 25 | SELECT val, 26 | foos_id 27 | FROM foos 28 | WHERE val = :val_bind_0; 29 | 30 | In [7]: print(s.query) 31 | SELECT val 32 | FROM foos; 33 | 34 | ``` 35 | 36 | Bind parameters can be automatically handled, for example in `.WHERE(val=5)` 37 | 38 | ```python 39 | In [8]: print(s2.binds) 40 | {'val_bind_0': 5} 41 | 42 | ``` 43 | 44 | Using .query and .binds seperately lets you use norm wherever you can execute SQL. For example, with a SQLAlchemy Session object: 45 | 46 | ```python 47 | res = Session.execute(s.query, s.binds) 48 | ``` 49 | 50 | #### More powerful query generation 51 | In addition to the simple, static queries above, it is possible to add query clauses. 52 | 53 | ```python 54 | In [9]: print(s.query) 55 | SELECT val 56 | FROM foos; 57 | 58 | In [10]: s = s.WHERE('val * 2 = 4') 59 | 60 | In [11]: print(s.query) 61 | SELECT val 62 | FROM foos 63 | WHERE val * 2 = 4; 64 | 65 | In [12]: s = s.JOIN('bars', ON='foos.val = bars.bar_id') 66 | 67 | In [13]: print(s.query) 68 | SELECT val 69 | FROM foos 70 | JOIN bars 71 | ON foos.val = bars.bar_id 72 | WHERE val * 2 = 4; 73 | ``` 74 | 75 | Of course you can put it all together: 76 | ```python 77 | In [14]: s = (SELECT('val') 78 | ...: .FROM('foos') 79 | ...: .JOIN('bars', ON='foos.val = bars.bar_id') 80 | ...: .WHERE(val=5) 81 | ...: .WHERE('val * 2 = 4')) 82 | 83 | In [15]: print(s.query) 84 | SELECT val 85 | FROM foos 86 | JOIN bars 87 | ON foos.val = bars.bar_id 88 | WHERE val = :val_bind_0 AND 89 | val * 2 = 4; 90 | ``` 91 | 92 | Or you can evolve queries dynamically: 93 | 94 | ```python 95 | def get_users(cursor, user_ids, only_girls=False, minimum_age=0): 96 | s = (SELECT('first_name', 'age') 97 | .FROM('people') 98 | .WHERE('user_ids IN :user_ids') 99 | .bind(user_ids=user_ids)) 100 | if only_girls: 101 | s = s.WHERE(gender='f') 102 | if minimum_age: 103 | s = (s.WHERE('age >= :minimum_age') 104 | .bind(minimum_age=minimum_age)) 105 | return cursor.run_query(s) 106 | ``` 107 | 108 | You can also add `SELECT` statements dynamically. 109 | 110 | ```python 111 | def get_users(cursor, user_ids, has_dog=False): 112 | s = (SELECT('p.first_name', 'p.age') 113 | .FROM('p.people') 114 | .WHERE('user_ids IN :user_ids') 115 | .bind(user_ids=user_ids)) 116 | 117 | if has_dog: 118 | s = (s.SELECT('d.dog_name', 'd.breed') 119 | .JOIN('dogs as d', ON='p.person_id = d.owner_id')) 120 | 121 | return cursor.run_query(s) 122 | ``` 123 | 124 | Calling methods on a query object does not change the object, it returns a new query object. 125 | 126 | In other words, query objects are immutable. This means it is always safe to create a base query and add clauses without modifying it. 127 | 128 | ```python 129 | 130 | _user_query = (SELECT('first_name', 'last_name') 131 | .FROM('people')) 132 | 133 | 134 | def get_old_people(conn, min_age=65): 135 | old_people_query = s.WHERE('age > :min_age').bind(min_age=min_age) 136 | return conn.run_query(old_people_query) 137 | 138 | 139 | def get_karls(conn): 140 | karl_query = s.WHERE(first_name='Karl') 141 | return conn.run_query(karl_query) 142 | ``` 143 | 144 | 145 | ### UPDATE, DELETE 146 | 147 | UPDATE and DELETE work basically the same as SELECT 148 | 149 | ```python 150 | In [1]: from norm import UPDATE 151 | 152 | In [2]: from norm import SELECT 153 | 154 | In [3]: fix_karls = (UPDATE('people') 155 | ...: .SET(first_name='Karl') 156 | ...: .WHERE(first_name='karl')) 157 | 158 | In [4]: print(fix_karls.query) 159 | UPDATE people 160 | SET first_name = %(first_name_bind)s 161 | WHERE first_name = %(first_name_bind_1)s; 162 | 163 | In [5]: print(fix_karls.binds) 164 | {'first_name_bind': 'Karl', 'first_name_bind_1': 'karl'} 165 | 166 | ``` 167 | 168 | ```python 169 | In [8]: from norm import DELETE 170 | 171 | In [9]: remove_karls = (DELETE('people') 172 | ...: .WHERE(first_name='Karl')) 173 | 174 | In [10]: print(remove_karls.query) 175 | DELETE FROM people 176 | WHERE first_name = %(first_name_bind_0)s; 177 | 178 | In [11]: print(remove_karls.binds) 179 | {'first_name_bind_0': 'Karl'} 180 | ``` 181 | 182 | ### INSERT 183 | Inserts just take dictionaries and treat them like rows. 184 | 185 | All the rows are inserted as one large INSERT statement with many bind parameters. This means that if your database or library doesn't support large numbers of bind parameters, you may have to break the rows you wish to insert into several batches. 186 | 187 | ```python 188 | rows = [dict(first_name='bob', last_name='dobs', age=132), 189 | dict(first_name='bill, last_name='gates, age=67), 190 | dict(first_name='steve', last_name='jobs', age=60), 191 | dict(first_name='bob', last_name='jones'), 192 | dict(first_name='mike', last_name='jones', age=15)] 193 | i = INSERT('people', rows) 194 | conn.execute(i) 195 | ``` 196 | 197 | The behavior for missing keys depends on the database/library norm backend you are using. For psycopg2/postgres it will fill in mising keys with DEFAULT. For most databases which do not provide an AsIs DBAPI wrapper, the default default is None (NULL). This can be overridden: 198 | 199 | ```python 200 | i = INSERT('people', default=AsIs('DEFAULT')) 201 | ``` 202 | 203 | This should not be used with a value like `5` or something, it is meant to be a way to specify the DEAULT keyword for the library/database you are using. For psycopg2/postgresql, it will automatically fill in DEFAULT, using http://initd.org/psycopg/docs/extensions.html#psycopg2.extensions.AsIs For inferior databases there may not be a defined way to do this safely. To allow literal SQL to be included as part of an insert, there is `norm.NormAsIs`. 204 | 205 | 206 | ### WITH (Commont Table Expressions) 207 | 208 | For `WITH`, a `WITH` object can be used to wrap other queries into CTE tables. The final query in the CTE is provided by calling the WITH instance. 209 | 210 | ```python 211 | In [1]: from norm import WITH 212 | 213 | In [2]: from norm import SELECT 214 | 215 | In [3]: all_active_players = (SELECT('player_id') 216 | ...: .FROM('players') 217 | ...: .WHERE(status='ACTIVE')) 218 | 219 | In [4]: career_runs_scored = (SELECT('player_id', 220 | ...: 'SUM(runs_scored) AS total_runs') 221 | ...: .FROM('games') 222 | ...: .GROUP_BY('player_id')) 223 | 224 | In [5]: w = WITH(all_active_players=all_active_players, 225 | ...: career_runs_scored=career_runs_scored) 226 | 227 | In [6]: active_players_total_runs = (SELECT('crs.player_id AS player_id', 228 | ...: 'crs.total_runs AS total_runs') 229 | ...: .FROM('all_active_players aap') 230 | ...: .JOIN('career_runs_scored crs', 231 | ...: ON='crs.player_id = aap.player_id')) 232 | 233 | In [7]: w = w(active_players_total_runs) 234 | 235 | In [8]: print(w.query) 236 | WITH all_active_players AS 237 | (SELECT player_id 238 | FROM players 239 | WHERE status = %(status_bind_0)s), 240 | career_runs_scored AS 241 | (SELECT player_id, 242 | SUM(runs_scored) AS total_runs 243 | FROM games 244 | GROUP BY player_id) 245 | 246 | SELECT crs.player_id AS player_id, 247 | crs.total_runs AS total_runs 248 | FROM all_active_players aap 249 | JOIN career_runs_scored crs 250 | ON crs.player_id = aap.player_id; 251 | 252 | In [9]: outer_w = WITH(active_players_total_runs=w) 253 | 254 | In [10]: outer_w = outer_w(SELECT('aptr.player_id') 255 | ...: .FROM('active_players_total_runs aptr') 256 | ...: .WHERE('aptr.total_runs > 500')) 257 | 258 | In [11]: print(outer_w.query) 259 | WITH active_players_total_runs AS 260 | (WITH all_active_players AS 261 | (SELECT player_id 262 | FROM players 263 | WHERE status = %(status_bind_0)s), 264 | career_runs_scored AS 265 | (SELECT player_id, 266 | SUM(runs_scored) AS total_runs 267 | FROM games 268 | GROUP BY player_id) 269 | 270 | SELECT crs.player_id AS player_id, 271 | crs.total_runs AS total_runs 272 | FROM all_active_players aap 273 | JOIN career_runs_scored crs 274 | ON crs.player_id = aap.player_id) 275 | 276 | SELECT aptr.player_id 277 | FROM active_players_total_runs aptr 278 | WHERE aptr.total_runs > 500; 279 | 280 | # This example is a little contrived, there are obviously 281 | # better ways to do this query 282 | 283 | ``` 284 | 285 | ### LIMIT / OFFSET 286 | ```python 287 | In [1]: from norm import SELECT 288 | 289 | In [2]: s = (SELECT('FirstName as first_name', 290 | ...: 'LastName as last_name') 291 | ...: .FROM('people') 292 | ...: .LIMIT(1)) 293 | 294 | In [3]: print(s.query) 295 | SELECT FirstName as first_name, 296 | LastName as last_name 297 | FROM people 298 | LIMIT 1; 299 | 300 | In [5]: s = s.bind(my_limit=5) 301 | 302 | In [6]: print(s.query) 303 | SELECT FirstName as first_name, 304 | LastName as last_name 305 | FROM people 306 | LIMIT %(my_limit)s; 307 | 308 | In [7]: print(s.binds) 309 | {'my_limit': 5} 310 | 311 | ``` 312 | LIMIT and OFFSET can only appear in a given SQL statement one time. Rather than being built up like the SELECT columns, WHERE clauses, etc, the Norm query will take the final value or expression provided. 313 | 314 | ```python 315 | In [2]: s = (SELECT('FirstName as first_name', 316 | ...: 'LastName as last_name') 317 | ...: .FROM('people') 318 | ...: .LIMIT('%(my_limit)s')) 319 | 320 | In [3]: s = s.LIMIT(250) 321 | 322 | In [4]: print(s.query) 323 | SELECT FirstName as first_name, 324 | LastName as last_name 325 | FROM people 326 | LIMIT 250; 327 | 328 | In [5]: s = s.OFFSET(10) 329 | 330 | In [6]: print(s.query) 331 | SELECT FirstName as first_name, 332 | LastName as last_name 333 | FROM people 334 | LIMIT 250 335 | OFFSET 10; 336 | 337 | In [7]: s = s.OFFSET(99) 338 | 339 | In [8]: print(s.query) 340 | SELECT FirstName as first_name, 341 | LastName as last_name 342 | FROM people 343 | LIMIT 250 344 | OFFSET 99; 345 | ``` 346 | 347 | #### LIMIT vs. TOP 348 | 349 | While many SQL flavors prefer `LIMIT`, MS SQL Server favors `TOP`. 350 | 351 | ```python 352 | In [1]: from norm.norm_pymssql import PYMSSQL_SELECT as SELECT 353 | 354 | In [2]: s = (SELECT('FirstName as first_name', 355 | ...: 'LastName as last_name') 356 | ...: .FROM('people') 357 | ...: .TOP(1)) 358 | 359 | In [3]: print(s.query) 360 | SELECT TOP 1 361 | FirstName as first_name, 362 | LastName as last_name 363 | FROM people; 364 | ``` 365 | 366 | ### GROUP BY, ORDER BY, HAVING, RETURNING 367 | These methods work much like WHERE in that they can be stacked. 368 | 369 | ```python 370 | In [4]: s = SELECT('u.user_id', 'u.first_name').FROM('users u') 371 | 372 | In [5]: print(s.query) 373 | SELECT u.user_id, 374 | u.first_name 375 | FROM users u; 376 | 377 | In [6]: s = SELECT('u.first_name').FROM('users u') 378 | 379 | In [7]: print(s.query) 380 | SELECT u.first_name 381 | FROM users u; 382 | 383 | In [8]: s = s.GROUP_BY('u.first_name') 384 | 385 | In [9]: s = s.SELECT('COUNT(*) AS cnt') 386 | 387 | In [10]: print(s.query) 388 | SELECT u.first_name, 389 | COUNT(*) AS cnt 390 | FROM users u 391 | GROUP BY u.first_name; 392 | 393 | In [11]: s = s.HAVING('COUNT(*) > 3') 394 | 395 | In [12]: print(s.query) 396 | SELECT u.first_name, 397 | COUNT(*) AS cnt 398 | FROM users u 399 | GROUP BY u.first_name 400 | HAVING COUNT(*) > 3; 401 | 402 | In [13]: s = s.ORDER_BY('COUNT(*)') 403 | 404 | In [14]: print(s.query) 405 | SELECT u.first_name, 406 | COUNT(*) AS cnt 407 | FROM users u 408 | GROUP BY u.first_name 409 | HAVING COUNT(*) > 3 410 | ORDER BY COUNT(*); 411 | 412 | In [15]: s = s.ORDER_BY('u.first_name') 413 | 414 | In [16]: print(s.query) 415 | SELECT u.first_name, 416 | COUNT(*) AS cnt 417 | FROM users u 418 | GROUP BY u.first_name 419 | HAVING COUNT(*) > 3 420 | ORDER BY COUNT(*), 421 | u.first_name; 422 | 423 | 424 | In [17]: u = UPDATE('users').SET(first_name='Bob').WHERE(first_name='Robert') 425 | 426 | In [18]: print(u.query) 427 | UPDATE users 428 | SET first_name = %(first_name_bind)s 429 | WHERE first_name = %(first_name_bind_1)s; 430 | 431 | In [19]: print(u.binds) 432 | {'first_name_bind': 'Bob', 'first_name_bind_1': 'Robert'} 433 | 434 | In [20]: u = u.RETURNING('u.user_id') 435 | 436 | In [21]: print(u.query) 437 | UPDATE users 438 | SET first_name = %(first_name_bind)s 439 | WHERE first_name = %(first_name_bind_1)s 440 | RETURNING u.user_id; 441 | 442 | ``` 443 | 444 | 445 | ### Connection Factory 446 | The norm connection factory is a helper to produce norm.connection.ConnectionProxy wrapped DB-API connection objects. 447 | 448 | #### Connection Factory Example: 449 | ```python 450 | import sqlite3 451 | 452 | # notice that there is a specific class for each type of database 453 | from norm.norm_sqlite3 import SQLI_ConnectionFactory as ConnectionFactory 454 | 455 | def _make_connection(): 456 | conn = sqlite3.connect(':memory:') 457 | return conn 458 | 459 | my_connection_factory = ConnectionFactory(_make_connection) 460 | 461 | 462 | # now we can get connections and use them 463 | conn = my_connection_factory() 464 | 465 | row = conn.run_queryone('SELECT 1 AS test_column') 466 | # row == {'test_column': 1} 467 | 468 | ``` 469 | 470 | #### Connection Proxy 471 | A norm connection factory will return connection objects that look a lot like whatever dbapi connection object you are used to from the library you use to create connections (psycopg2, pymssql, sqlite3, etc) but with some important exceptions. While it passes on any method call to the actual connection object, it intercepts .cursor. Additionally, it adds .run_query, .run_queryone and .execute. 472 | 473 | 474 | ##### .cursor 475 | the .cursor(...) method passes all arguments to the .cursor method of the actual connection object. However, it wraps the cursor which is returned inside a CursorProxy object. 476 | 477 | ##### .execute 478 | Calling .execute on a ConnectionProxy creates a cursor, executes the sql provided, and then closes the cursor. It is meant as a convenience to avoid creating a cursor for queries where you do not care about any data returned. 479 | 480 | ##### .run_query 481 | Calling this returns a generator which produces rows in the form of dictionaries. 482 | 483 | ```python 484 | import sqlite3 485 | from norm.norm_sqlite3 import SQLI_ConnectionFactory as ConnectionFactory 486 | 487 | def conn_maker(): 488 | conn = sqlite3.connect(':memory:') 489 | conn.execute( 490 | '''CREATE TABLE users ( 491 | user_id INTEGER PRIMARY KEY AUTOINCREMENT, 492 | first_name VARCHAR(64) 493 | )''') 494 | conn.commit() 495 | return conn 496 | 497 | 498 | cf = ConnectionFactory(conn_maker) 499 | 500 | conn = cf() 501 | 502 | conn.execute( 503 | '''CREATE TABLE foos ( 504 | val VARCHAR(64) 505 | )''') 506 | 507 | for val in range(10): 508 | conn.execute('INSERT INTO foos VALUES (:value)', 509 | dict(value=val)) 510 | 511 | rows = conn.run_query('SELECT val FROM foos') 512 | print(rows) 513 | # prints: 514 | 515 | print(list(rows)) 516 | # prints: [{'val': '0'}, {'val': '1'}, {'val': '2'}, ...] 517 | 518 | ``` 519 | 520 | 521 | #### Example: Efficiently move and join data between databases. 522 | see `benchmarks/shuttle_data_example.py` 523 | 524 | A run on my desktop produced: 525 | ``` 526 | Moved 1000000 rows in 20.09 seconds 527 | Streaming memory usage peak was 0.05MB 528 | Moved 1000000 rows in 16.58 seconds 529 | Fetchall memory usage peak was 212.13MB 530 | ``` 531 | 532 | #### Example: Faster INSERTs by reducing the number of queries executed 533 | see `benchmarks/benchmark.py` 534 | 535 | When you are inserting many rows, most Python DB-API libraries will produce 1 INSERT statement for each row. There is a cursor.executemany method that many libraries provide, but this usually still produces a seperate INSERT statement per row inserted, and simply does these in a for loop. 536 | 537 | Lets say you are inserting 1000 rows into a table. With Norm you can batch these into a single INSERT object and execute it, and a single large SQL statement is produced. Back and forth trips to the database == 1. 538 | 539 | Lets say you use SQLAlchemy or even directly use one of the sql libraries. Even if you use executemany, this will produce 1000 back and forth trips to the database. 540 | 541 | In a datacenter environment, ping times to the db will be short, lets estimate the time to send a query to a database, plus the time to parse a short INSERT statement to be 1ms. For 1000 rows, just the round trips to the database will add 1 second to the execution time. 542 | 543 | A run on my desktop, writing to a local postgresql database (latency is effectively 0), showed the INSERT was still completed in 1/4th the time. Adding network latency will make the improvement more dramatic. 544 | ``` 545 | *** Begin sqlalchemy_insert_bench 546 | Elapsed Time: 3.4474 547 | Faster than SQLA factor: 1.0000 548 | *** Begin norm_insert_bench 549 | Elapsed Time: 0.7697 550 | Faster than SQLA factor: 4.4790 551 | ``` 552 | -------------------------------------------------------------------------------- /benchmarks/benchmark.py: -------------------------------------------------------------------------------- 1 | from time import monotonic 2 | import psycopg2 3 | 4 | from sqlalchemy import Table, Column, Integer, String, MetaData, ForeignKey 5 | from sqlalchemy.sql import select 6 | from sqlalchemy import create_engine 7 | 8 | from norm.norm_psycopg2 import PG_ConnectionFactory 9 | from norm.norm_psycopg2 import PG_SELECT as SELECT 10 | from norm.norm_psycopg2 import PG_INSERT as INSERT 11 | 12 | _loops = 50000 13 | 14 | metadata = MetaData() 15 | users = Table( 16 | 'users', metadata, 17 | Column('user_id', Integer, primary_key=True), 18 | Column('name', String), 19 | Column('fullname', String)) 20 | 21 | addresses = Table( 22 | 'addresses', metadata, 23 | Column('address_id', Integer, primary_key=True), 24 | Column('user_id', None, ForeignKey('users.id')), 25 | Column('email_address', String, nullable=False)) 26 | 27 | 28 | fake_users = [{'user_id': id, 'name': 'Bob', 'fullname': 'Bob Loblaw'} 29 | for id in range(5000)] 30 | 31 | fake_addresses = [{'user_id': user_id, 'email_address': 'bob@loblaw.com'} 32 | for user_id in range(5000)] * 8 33 | 34 | setup_ddl = '''\ 35 | DROP TABLE IF EXISTS addresses; 36 | DROP TABLE IF EXISTS users; 37 | 38 | CREATE TABLE users ( 39 | user_id INTEGER PRIMARY KEY, 40 | name VARCHAR(255), 41 | fullname VARCHAR(255)); 42 | 43 | CREATE TABLE addresses ( 44 | address_id SERIAL PRIMARY KEY, 45 | user_id INTEGER NOT NULL REFERENCES users (user_id), 46 | email_address VARCHAR(255) NOT NULL);''' 47 | 48 | 49 | def make_db_conn(): 50 | return psycopg2.connect(dbname='scratch') 51 | 52 | 53 | def setup_db(): 54 | conn = make_db_conn() 55 | conn.cursor().execute(setup_ddl) 56 | conn.commit() 57 | 58 | 59 | def make_sqla_conn(): 60 | engine = create_engine('postgresql:///scratch') 61 | return engine.connect() 62 | 63 | 64 | def norm_conn(): 65 | return PG_ConnectionFactory(make_db_conn)() 66 | 67 | 68 | def sqlalchemy_bench(): 69 | s = (select([users.c.name, users.c.fullname, 70 | addresses.c.email_address], 71 | users.c.user_id == addresses.c.user_id) 72 | .where(users.c.user_id > 1) 73 | .where(users.c.name.startswith('Justin'))) 74 | return str(s) 75 | 76 | 77 | def sqlalchemy_insert_bench(): 78 | conn = make_sqla_conn() 79 | # conn.connection.connection.set_trace_callback(print) 80 | with conn.begin() as trans: 81 | conn.execute(users.insert(), fake_users) 82 | conn.execute(addresses.insert(), fake_addresses) 83 | trans.commit() 84 | user_count = conn.execute('SELECT COUNT(*) from users').fetchone() 85 | print('** user count', user_count, len(fake_users)) 86 | 87 | 88 | def norm_insert_bench(): 89 | conn = norm_conn() 90 | # conn.set_trace_callback(print) 91 | conn.execute(INSERT('users', fake_users)) 92 | conn.execute(INSERT('addresses', fake_addresses)) 93 | conn.commit() 94 | 95 | 96 | def norm_bench(): 97 | s = (SELECT('users.name', 98 | 'users.fullname', 99 | 'addresses.email_address') 100 | .FROM('users') 101 | .JOIN('addresses', ON='users.id = addresses.user_id') 102 | .WHERE('users.id > %(user_id)s').bind(user_id=1) 103 | .WHERE("users.name LIKE %(name)s") 104 | .bind(name='Justin%')) 105 | 106 | return s.query 107 | 108 | 109 | def raw_bench(): 110 | s = """SELECT users.name, 111 | users.fullname, 112 | addresses.email_address 113 | FROM users 114 | JOIN addresses 115 | ON users.user_id = addresses.user_id 116 | WHERE users.id > %(user_id)s AND 117 | users.name LIKE %(name)s;""" 118 | return s 119 | 120 | 121 | def time_it(f, last=None): 122 | start = monotonic() 123 | for x in range(_loops): 124 | query = f() 125 | 126 | elapsed = monotonic() - start 127 | faster = 1 128 | if last is not None: 129 | faster = last / elapsed 130 | print(f'*** Begin {f.__name__}') 131 | print(f'Elapsed Time: {elapsed:.4f}') 132 | print(f'Faster than SQLA factor: {faster:.4f}') 133 | print(query) 134 | return elapsed 135 | 136 | 137 | def time_insert(f, last=None): 138 | start = monotonic() 139 | f() 140 | elapsed = monotonic() - start 141 | faster = 1 142 | if last is not None: 143 | faster = last / elapsed 144 | print(f'*** Begin {f.__name__}') 145 | print(f'Elapsed Time: {elapsed:.4f}') 146 | print(f'Faster than SQLA factor: {faster:.4f}') 147 | return elapsed 148 | 149 | 150 | def run_benchmark(): 151 | print('*' * 70) 152 | print('*' * 70) 153 | sqla_time = time_it(sqlalchemy_bench) 154 | norm_time = time_it(norm_bench, sqla_time) 155 | time_it(raw_bench, norm_time) 156 | print('*' * 70) 157 | print('*' * 70) 158 | setup_db() 159 | sqla_time = time_insert(sqlalchemy_insert_bench) 160 | setup_db() 161 | time_insert(norm_insert_bench, sqla_time) 162 | 163 | 164 | if __name__ == '__main__': 165 | run_benchmark() 166 | -------------------------------------------------------------------------------- /benchmarks/profile_norm.py: -------------------------------------------------------------------------------- 1 | from norm.norm_psycopg2 import PG_SELECT as SELECT 2 | 3 | 4 | def norm_bench(): 5 | s = (SELECT('users.name', 6 | 'users.fullname', 7 | 'addresses.email_address') 8 | .FROM('users') 9 | .JOIN('addresses', ON='users.id = addresses.user_id') 10 | .WHERE('users.id > %(user_id)s').bind(user_id=1) 11 | .WHERE("users.name LIKE %(name)s") 12 | .bind(name='Justin%')) 13 | 14 | return s.query, s.binds 15 | 16 | 17 | def main(): 18 | for _ in range(100000): 19 | norm_bench() 20 | 21 | 22 | if __name__ == '__main__': 23 | main() 24 | -------------------------------------------------------------------------------- /benchmarks/shuttle_data_example.py: -------------------------------------------------------------------------------- 1 | ''' 2 | This is an example of efficiently combining data between two databases 3 | using Norm's fast INSERT queries. 4 | ''' 5 | from time import monotonic 6 | from tempfile import mktemp 7 | import tracemalloc 8 | 9 | import sqlite3 10 | import itertools 11 | 12 | from norm.norm_sqlite3 import SQLI_INSERT 13 | from norm.norm_sqlite3 import SQLI_SELECT 14 | from norm.norm_sqlite3 import SQLI_ConnectionFactory 15 | 16 | # To try this example you need two databases, which you can set up by 17 | # modifying the connection functions and the imports above. 18 | 19 | _sqlite_source_db_fn = mktemp() 20 | _sqlite_dest_db_fn = mktemp() 21 | _chunk_size = 50 22 | 23 | 24 | def make_source_db_conn(): 25 | # assumes db exists and can be accessed from a local socket 26 | return sqlite3.connect(_sqlite_source_db_fn) 27 | 28 | 29 | def make_dest_db_conn(): 30 | return sqlite3.connect(_sqlite_dest_db_fn) 31 | 32 | 33 | # setup connection factories 34 | 35 | source_db_cf = SQLI_ConnectionFactory(make_source_db_conn) 36 | dest_db_cf = SQLI_ConnectionFactory(make_dest_db_conn) 37 | 38 | 39 | def chunk_up(iterable, size): 40 | it = iter(iterable) 41 | while True: 42 | chunk = tuple(itertools.islice(it, size)) 43 | if not chunk: 44 | break 45 | yield chunk 46 | 47 | 48 | def shuttle_rows(source_cur, dest_cur, table_name, chunk_size=_chunk_size): 49 | for chunk in chunk_up(source_cur, chunk_size): 50 | dest_cur.execute(SQLI_INSERT(table_name, chunk)) 51 | 52 | 53 | clean_up_source_table = '''\ 54 | DROP TABLE IF EXISTS norm_shuttle_example_source;''' 55 | 56 | create_source_table = '''\ 57 | CREATE TABLE norm_shuttle_example_source ( 58 | example_id INTEGER PRIMARY KEY AUTOINCREMENT, 59 | some_data_column TEXT);''' 60 | 61 | create_sqlite_table = '''\ 62 | CREATE TABLE norm_shuttle_example_dest ( 63 | example_id integer, 64 | some_data_column TEXT, 65 | method TEXT);''' 66 | 67 | 68 | def setup_test_tables(): 69 | # lets set up some test data 70 | source_conn = source_db_cf() 71 | source_conn.execute(clean_up_source_table) 72 | source_conn.execute(create_source_table) 73 | source_conn.commit() 74 | 75 | dest_conn = dest_db_cf() 76 | dest_conn.execute(create_sqlite_table) 77 | dest_conn.commit() 78 | 79 | test_data_insert = SQLI_INSERT( 80 | 'norm_shuttle_example_source', 81 | [{'some_data_column': str(n)} for n in range(1000)]) 82 | # lets insert it 1000 times so we have 1000 * 1000 == 1M rows 83 | for batch in range(1, 1001): 84 | if batch % 100 == 0: 85 | print(f'Inserting batch {batch}') 86 | source_conn.execute(test_data_insert) 87 | source_conn.commit() 88 | 89 | 90 | def run_example_streaming(): 91 | # lets set up some test data 92 | source_conn = source_db_cf() 93 | dest_conn = dest_db_cf() 94 | 95 | # move the data 96 | source_cur = source_conn.cursor() 97 | dest_cur = dest_conn.cursor() 98 | 99 | start = monotonic() 100 | # execute the select so the source_cursor can be read from 101 | source_cur.execute(SQLI_SELECT('example_id', 102 | 'some_data_column', 103 | "'example_streaming' AS method") 104 | .FROM('norm_shuttle_example_source')) 105 | 106 | shuttle_rows(source_cur, dest_cur, 'norm_shuttle_example_dest') 107 | dest_conn.commit() 108 | end = monotonic() 109 | 110 | row_count = dest_conn.run_queryone(SQLI_SELECT('COUNT(*) AS rows_moved') 111 | .FROM('norm_shuttle_example_dest') 112 | .WHERE(method='example_streaming')) 113 | print(f'Moved {row_count["rows_moved"]} rows in {end-start:.2f} seconds') 114 | 115 | 116 | def run_example_all(): 117 | # lets set up some test data 118 | source_conn = source_db_cf() 119 | dest_conn = dest_db_cf() 120 | 121 | # move the data 122 | source_cur = source_conn.cursor() 123 | dest_cur = dest_conn.cursor() 124 | 125 | start = monotonic() 126 | # execute the select so the source_cursor can be read from 127 | source_cur.execute(SQLI_SELECT('example_id', 128 | 'some_data_column', 129 | "'example_all' AS method") 130 | .FROM('norm_shuttle_example_source')) 131 | 132 | all_rows = source_cur.fetchall() 133 | 134 | # we still have to insert in chunks due to sqlite limits 135 | for rows in chunk_up(all_rows, _chunk_size): 136 | i = SQLI_INSERT('norm_shuttle_example_dest', rows) 137 | dest_cur.execute(i) 138 | dest_conn.commit() 139 | end = monotonic() 140 | 141 | row_count = dest_conn.run_queryone(SQLI_SELECT('COUNT(*) AS rows_moved') 142 | .FROM('norm_shuttle_example_dest') 143 | .WHERE(method='example_all')) 144 | print(f'Moved {row_count["rows_moved"]} rows in {end-start:.2f} seconds') 145 | 146 | 147 | if __name__ == '__main__': 148 | setup_test_tables() 149 | 150 | tracemalloc.start() 151 | run_example_streaming() 152 | current, peak = tracemalloc.get_traced_memory() 153 | tracemalloc.stop() 154 | print(f"Streaming memory usage peak was {peak / 10**6:.2f}MB") 155 | 156 | tracemalloc.start() 157 | run_example_all() 158 | current, peak = tracemalloc.get_traced_memory() 159 | tracemalloc.stop() 160 | print(f"Fetchall memory usage peak was {peak / 10**6:.2f}MB") 161 | -------------------------------------------------------------------------------- /norm/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from .norm import SELECT 4 | from .norm import UPDATE 5 | from .norm import INSERT 6 | from .norm import DELETE 7 | from .norm import UNION 8 | from .norm import UNION_ALL 9 | from .norm import WITH 10 | from .norm import EXISTS 11 | from .norm import NOT_EXISTS 12 | from norm.rows import RowsProxy 13 | from norm.connection import ConnectionProxy 14 | from norm.connection import ConnectionFactory 15 | from norm.connection import CursorProxy 16 | 17 | enable_logging = False 18 | max_query_log_length = 5000 19 | 20 | if int(os.getenv('NORM_LOG_QUERIES', 0)): 21 | enable_logging = True 22 | 23 | 24 | __all__ = [SELECT, 25 | UPDATE, 26 | DELETE, 27 | INSERT, 28 | WITH, 29 | UNION, 30 | UNION_ALL, 31 | EXISTS, 32 | NOT_EXISTS, 33 | RowsProxy, 34 | ConnectionProxy, 35 | ConnectionFactory, 36 | CursorProxy] 37 | -------------------------------------------------------------------------------- /norm/cached_property.py: -------------------------------------------------------------------------------- 1 | class cached_property: 2 | def __init__(self, func): 3 | self.__doc__ = getattr(func, "__doc__") 4 | self.func = func 5 | 6 | def __get__(self, obj, cls): 7 | if obj is None: 8 | return self 9 | 10 | value = obj.__dict__[self.func.__name__] = self.func(obj) 11 | return value 12 | -------------------------------------------------------------------------------- /norm/connection.py: -------------------------------------------------------------------------------- 1 | from time import monotonic 2 | import sys 3 | 4 | import norm 5 | from norm.rows import RowsProxy 6 | 7 | 8 | def _to_query_binds(q, params): 9 | if params is None: 10 | params = {} 11 | if not isinstance(q, str): 12 | return q.query, q.binds 13 | else: 14 | return q, params 15 | 16 | 17 | class CursorProxy(object): 18 | def __init__(self, cursor): 19 | self.cursor = cursor 20 | 21 | def __getattr__(self, name): 22 | return getattr(self.cursor, name) 23 | 24 | @property 25 | def column_names(self): 26 | if self.description is None: 27 | return 28 | return [d[0] for d in self.description] 29 | 30 | def execute(self, query, params=None): 31 | sql_query, sql_binds = _to_query_binds(query, params) 32 | start = monotonic() 33 | if sql_binds: 34 | res = self.cursor.execute(sql_query, sql_binds) 35 | else: 36 | res = self.cursor.execute(sql_query) 37 | end = monotonic() 38 | 39 | if norm.enable_logging: 40 | try: 41 | loggable_query = self._query_to_log( 42 | query, sql_query, sql_binds) 43 | loggable_query = loggable_query[:norm.max_query_log_length] 44 | print( 45 | f'\nQuery took {end-start:.2f} seconds:\n' 46 | f'{loggable_query}\n\n', 47 | file=sys.stderr) 48 | except Exception: 49 | pass 50 | return res 51 | 52 | def _query_to_log(self, query, sql_query, params): 53 | if hasattr(query, '_loggable_query'): 54 | loggable_query = query._loggable_query 55 | else: 56 | loggable_query = sql_query 57 | return loggable_query 58 | 59 | def run_query(self, query, params=None): 60 | self.execute(query, params) 61 | return self.fetchall() 62 | 63 | def run_queryone(self, query, params=None): 64 | self.execute(query, params) 65 | result = self.fetchone() 66 | return result 67 | 68 | def fetchall(self): 69 | return RowsProxy(self.cursor.fetchall(), self.column_names) 70 | 71 | def fetchmany(self, count): 72 | return RowsProxy(self.cursor.fetchmany(count), self.column_names) 73 | 74 | def fetchone(self): 75 | row = self.cursor.fetchone() 76 | if row is None: 77 | return row 78 | return dict(zip(self.column_names, row)) 79 | 80 | def __iter__(self): 81 | for row in self.cursor: 82 | yield dict(zip(self.column_names, row)) 83 | 84 | 85 | class ConnectionProxy(object): 86 | cursor_proxy = CursorProxy 87 | 88 | def __init__(self, conn): 89 | self.conn = conn 90 | 91 | def __getattr__(self, name): 92 | return getattr(self.conn, name) 93 | 94 | def cursor(self, *args, **kw): 95 | return self.cursor_proxy(self.conn.cursor(*args, **kw)) 96 | 97 | def execute(self, query, params=None): 98 | cur = self.cursor() 99 | try: 100 | cur.execute(query, params) 101 | finally: 102 | cur.close() 103 | 104 | def run_query(self, query, params=None): 105 | cur = self.cursor() 106 | try: 107 | return cur.run_query(query, params) 108 | finally: 109 | cur.close() 110 | 111 | def run_queryone(self, query, params=None): 112 | cur = self.cursor() 113 | try: 114 | return cur.run_queryone(query, params) 115 | finally: 116 | cur.close() 117 | 118 | 119 | class ConnectionFactory(object): 120 | connection_proxy = ConnectionProxy 121 | 122 | def __init__(self, connection_maker): 123 | self.connection_maker = connection_maker 124 | 125 | def __call__(self): 126 | start = monotonic() 127 | conn = self.connection_proxy(self.connection_maker()) 128 | end = monotonic() 129 | 130 | try: 131 | if norm.enable_logging: 132 | print(f'\nConnecting to db with {self.connection_maker}' 133 | f' took {end-start:.2f} seconds', 134 | file=sys.stderr) 135 | except Exception: 136 | pass 137 | 138 | return conn 139 | 140 | 141 | __all__ = [CursorProxy, 142 | ConnectionProxy, 143 | ConnectionFactory] 144 | -------------------------------------------------------------------------------- /norm/norm.py: -------------------------------------------------------------------------------- 1 | from .cached_property import cached_property 2 | 3 | QUERY_TYPE = 'qt' 4 | COLUMN = 'c' 5 | FROM = 'f' 6 | WHERE = 'w' 7 | HAVING = 'h' 8 | GROUP_BY = 'gb' 9 | ORDER_BY = 'ob' 10 | TOP = 'top' 11 | LIMIT = 'l' 12 | OFFSET = 'os' 13 | EXTRA = 'ex' 14 | TABLE = 't' 15 | SET = 's' 16 | RETURNING = 'r' 17 | DISTINCT_ON = 'do' 18 | JOIN_LATERAL = 'jl' 19 | 20 | SELECT_QT = 's' 21 | UPDATE_QT = 'u' 22 | DELETE_QT = 'd' 23 | INSERT_QT = 'i' 24 | 25 | INNER_JOIN = 'INNER_JOIN' 26 | LEFT_JOIN = 'LEFT_JOIN' 27 | RIGHT_JOIN = 'RIGHT_JOIN' 28 | FULL_JOIN = 'FULL_JOIN' 29 | 30 | SEP = '\n ' 31 | WHERE_SEP = ' AND\n ' 32 | GROUP_BY_SEP = ',\n ' 33 | ORDER_BY_SEP = ',\n ' 34 | HAVING_SEP = ' AND\n ' 35 | COLUMN_SEP = ',' + SEP 36 | INSERT_COLUMNS_SEP = ',\n ' 37 | INSERT_VALUES_SEP = ',\n ' 38 | 39 | 40 | class _default: 41 | pass 42 | 43 | 44 | class NormAsIs: 45 | def __init__(self, value): 46 | self.value = value 47 | 48 | def __repr__(self): 49 | return f'AsIs({repr(self.value)})' 50 | 51 | def __eq__(self, o): 52 | try: 53 | if o.value == self.value: 54 | return True 55 | return False 56 | except AttributeError: 57 | return False 58 | 59 | @classmethod 60 | def isasis(cls, o): 61 | return isinstance(o, cls) 62 | 63 | 64 | class BogusQuery(Exception): 65 | pass 66 | 67 | 68 | def indent_string(s, indent=0, skip_first=False): 69 | if not indent: 70 | return s 71 | lines = s.splitlines() 72 | if skip_first: 73 | rest = lines[1:] 74 | indented = lines[0] 75 | if rest: 76 | indented += '\n' + '\n'.join(' ' * indent + line 77 | for line in lines[1:]) 78 | return indented 79 | return '\n'.join(' ' * indent + line for line in lines) 80 | 81 | 82 | def compile(chain, query_type): 83 | table = None 84 | distinct_on = [] 85 | columns = [] 86 | from_ = [] 87 | where = [] 88 | having = [] 89 | group_by = [] 90 | order_by = [] 91 | top = None 92 | limit = None 93 | offset = None 94 | set_ = [] 95 | extra = [] 96 | returning = [] 97 | 98 | for op, option in chain: 99 | if op == DISTINCT_ON: 100 | distinct_on.append(option) 101 | elif op == COLUMN: 102 | columns.append(option) 103 | elif op == WHERE: 104 | where.append(option) 105 | elif op == FROM: 106 | expr, join, op, criteria = option 107 | if not join: 108 | if from_: 109 | from_[-1] += ',' 110 | from_.append(expr) 111 | else: 112 | from_[-1] += '\n ' + join + ' ' + expr 113 | if op is not None: 114 | from_[-1] += SEP + op + ' ' + criteria 115 | elif op == TABLE: 116 | table = option 117 | elif op == SET: 118 | set_.append(option) 119 | elif op == GROUP_BY: 120 | group_by.append(option) 121 | elif op == ORDER_BY: 122 | order_by.append(option) 123 | elif op == TOP: 124 | top = option 125 | elif op == LIMIT: 126 | limit = option 127 | elif op == OFFSET: 128 | offset = option 129 | elif op == HAVING: 130 | having.append(option) 131 | elif op == EXTRA: 132 | extra.append(option) 133 | elif op == RETURNING: 134 | returning.append(option) 135 | else: 136 | raise BogusQuery('There was a fatal error compiling query.') 137 | 138 | query = '' 139 | if query_type == SELECT_QT: 140 | query += 'SELECT ' 141 | if top is not None: 142 | query += 'TOP ' + top + SEP 143 | if distinct_on: 144 | query += 'DISTINCT ON (' + ', '.join(distinct_on) + ')' 145 | query += SEP 146 | 147 | query += COLUMN_SEP.join(columns) 148 | 149 | if from_: 150 | query += '\n FROM ' + SEP.join(from_) 151 | if where: 152 | query += '\n WHERE ' + indent_string(' AND\n'.join(where), 7, True) 153 | if group_by: 154 | query += '\nGROUP BY ' + GROUP_BY_SEP.join(group_by) 155 | if having: 156 | query += '\nHAVING ' + HAVING_SEP.join(having) 157 | if order_by: 158 | query += '\nORDER BY ' + ORDER_BY_SEP.join(order_by) 159 | if limit is not None: 160 | query += '\n LIMIT ' + limit 161 | if offset is not None: 162 | query += '\nOFFSET ' + offset 163 | if extra: 164 | query += '\n'.join(extra) 165 | elif query_type == UPDATE_QT: 166 | query += 'UPDATE ' + table 167 | if set_: 168 | query += '\n SET ' + (',' + SEP).join(set_) 169 | if from_: 170 | query += '\n FROM ' + SEP.join(from_) 171 | if where: 172 | query += '\n WHERE ' + WHERE_SEP.join(where) 173 | if extra: 174 | query += '\n'.join(extra) 175 | if returning: 176 | query += '\nRETURNING ' + ', '.join(returning) 177 | elif query_type == DELETE_QT: 178 | query += 'DELETE FROM ' + table 179 | if from_: 180 | query += '\n FROM ' + SEP.join(from_) 181 | if where: 182 | query += '\n WHERE ' + WHERE_SEP.join(where) 183 | if returning: 184 | query += '\nRETURNING ' + ', '.join(returning) 185 | 186 | query += ';' 187 | return query 188 | 189 | 190 | class Query: 191 | query_type = None 192 | bind_prefix = '%(' 193 | bind_postfix = ')s' 194 | 195 | def __init__(self): 196 | self.parent = None 197 | self.chain = [] 198 | self._binds = [] 199 | self._query = None 200 | 201 | @classmethod 202 | def clean_bind_name(cls, s): 203 | s = s.replace('.', '___') 204 | return s 205 | 206 | @classmethod 207 | def bnd(cls, s): 208 | s = cls.clean_bind_name(s) 209 | return "%s%s%s" % (cls.bind_prefix, s, cls.bind_postfix) 210 | 211 | @property 212 | def bind_len(self): 213 | total = 0 214 | if self.parent is not None: 215 | total += self.parent.bind_len 216 | total += len(self._binds) 217 | return total 218 | 219 | @property 220 | def bind_items(self): 221 | if self.parent is not None: 222 | yield from self.parent.bind_items 223 | if self._binds: 224 | yield from self._binds 225 | 226 | @property 227 | def binds(self): 228 | isasis = NormAsIs.isasis 229 | operative_binds = {} 230 | for key, value in self.bind_items: 231 | if isasis(value): 232 | operative_binds.pop(key, None) 233 | continue 234 | operative_binds[key] = value 235 | return operative_binds 236 | 237 | def bind(self, **binds): 238 | s = self.child() 239 | final_binds = [] 240 | for name, value in binds.items(): 241 | name = self.clean_bind_name(name) 242 | final_binds.append((name, value)) 243 | s._binds = final_binds 244 | return s 245 | 246 | def child(self): 247 | s = self.__class__() 248 | s.parent = self 249 | return s 250 | 251 | def build_chain(self): 252 | if self.parent is not None: 253 | chain = self.parent.build_chain() 254 | else: 255 | chain = [] 256 | return chain + self.chain 257 | 258 | @property 259 | def query(self): 260 | isasis = NormAsIs.isasis 261 | 262 | if self._query is None: 263 | self._query = compile(self.build_chain(), self.query_type) 264 | query = self._query 265 | 266 | final_binds = {} 267 | for key, value in self.bind_items: 268 | final_binds[key] = value 269 | for key in sorted(final_binds.keys(), key=len, reverse=True): 270 | value = final_binds[key] 271 | if isasis(value): 272 | query = query.replace(self.bnd(key), value.value) 273 | return query 274 | 275 | @property 276 | def _loggable_query(self): 277 | query = self.query 278 | for key, value in self.binds.items(): 279 | query = query.replace(self.bnd(key), repr(value)) 280 | return query 281 | 282 | def _merge_subquery(self, subquery, indent=0): 283 | if isinstance(subquery, str): 284 | return subquery 285 | 286 | try: 287 | query = subquery.query.rstrip(';') 288 | for pair in subquery.bind_items: 289 | self._binds.append(pair) 290 | return indent_string(query, indent) 291 | 292 | except AttributeError: 293 | raise BogusQuery("don't know how to handle this") 294 | 295 | 296 | class _SELECT_UPDATE(Query): 297 | def WHERE(self, *args, **kw): 298 | # TODO: handle OR 299 | s = self.child() 300 | for stmt in args: 301 | clause = s._merge_subquery(stmt) 302 | s.chain.append((WHERE, clause)) 303 | for column_name, value in kw.items(): 304 | bind_val_name = '%s_bind_%s' % ( 305 | self.clean_bind_name(column_name), s.bind_len) 306 | s._binds.append((bind_val_name, value)) 307 | expr = column_name + ' = ' + self.bnd(bind_val_name) 308 | s.chain.append((WHERE, expr)) 309 | return s 310 | 311 | def FROM(self, *args): 312 | s = self.child() 313 | for stmt in args: 314 | s.chain.append((FROM, (stmt, False, None, None))) 315 | return s 316 | 317 | def JOIN(self, 318 | stmt, 319 | ON=None, 320 | USING=None, 321 | outer=_default, 322 | join_type=INNER_JOIN): 323 | 324 | if outer is not _default: 325 | import warnings 326 | warnings.warn( 327 | 'JOIN will stop accepting outer as an argument soon, ' 328 | 'use the named methods (such as .LEFTJOIN) instead') 329 | if outer: 330 | join_type = LEFT_JOIN 331 | else: 332 | join_type = INNER_JOIN 333 | 334 | if join_type == INNER_JOIN: 335 | keyword = 'JOIN' 336 | elif join_type == LEFT_JOIN: 337 | keyword = 'LEFT JOIN' 338 | elif join_type == RIGHT_JOIN: 339 | keyword = 'RIGHT JOIN' 340 | elif join_type == FULL_JOIN: 341 | keyword = 'FULL JOIN' 342 | else: 343 | raise BogusQuery(f"Unknown join type {join_type!r}") 344 | if ON is not None and USING is not None: 345 | raise BogusQuery("You can't specify both ON and USING.") 346 | elif ON is not None: 347 | op = 'ON' 348 | criteria = ON 349 | elif USING is not None: 350 | op = 'USING' 351 | if isinstance(USING, str): 352 | criteria = f'({USING})' 353 | else: 354 | criteria = '(' + ', '.join(USING) + ')' 355 | else: 356 | raise BogusQuery('No join criteria specified.') 357 | 358 | s = self.child() 359 | s.chain.append((FROM, (stmt, keyword, op, criteria))) 360 | return s 361 | 362 | def LEFTJOIN(self, *args, **kw): 363 | return self.JOIN(*args, join_type=LEFT_JOIN, **kw) 364 | 365 | def RIGHTJOIN(self, *args, **kw): 366 | return self.JOIN(*args, join_type=RIGHT_JOIN, **kw) 367 | 368 | def FULLJOIN(self, *args, **kw): 369 | return self.JOIN(*args, join_type=FULL_JOIN, **kw) 370 | 371 | def RETURNING(self, *args): 372 | s = self.child() 373 | for arg in args: 374 | self.chain.append((RETURNING, arg)) 375 | return s 376 | 377 | 378 | class SELECT(_SELECT_UPDATE): 379 | query_type = SELECT_QT 380 | 381 | def __init__(self, *args): 382 | _SELECT_UPDATE.__init__(self) 383 | 384 | for stmt in args: 385 | try: 386 | stmt = str(int(stmt)) 387 | except ValueError: 388 | pass 389 | 390 | self.chain.append((COLUMN, stmt)) 391 | 392 | def SELECT(self, *args): 393 | s = self.child() 394 | for stmt in args: 395 | s.chain.append((COLUMN, stmt)) 396 | return s 397 | 398 | def HAVING(self, *args): 399 | s = self.child() 400 | for arg in args: 401 | s.chain.append((HAVING, arg)) 402 | return s 403 | 404 | def ORDER_BY(self, *args): 405 | s = self.child() 406 | for arg in args: 407 | s.chain.append((ORDER_BY, arg)) 408 | return s 409 | 410 | def GROUP_BY(self, *args): 411 | s = self.child() 412 | for arg in args: 413 | s.chain.append((GROUP_BY, arg)) 414 | return s 415 | 416 | def DISTINCT_ON(self, *args): 417 | s = self.child() 418 | for arg in args: 419 | s.chain.append((DISTINCT_ON, arg)) 420 | return s 421 | 422 | def TOP(self, stmt): 423 | if isinstance(stmt, int): 424 | stmt = str(stmt) 425 | s = self.child() 426 | s.chain.append((TOP, stmt)) 427 | return s 428 | 429 | def LIMIT(self, stmt): 430 | if isinstance(stmt, int): 431 | stmt = str(stmt) 432 | s = self.child() 433 | s.chain.append((LIMIT, stmt)) 434 | return s 435 | 436 | def OFFSET(self, stmt): 437 | if isinstance(stmt, int): 438 | stmt = str(stmt) 439 | s = self.child() 440 | s.chain.append((OFFSET, stmt)) 441 | return s 442 | 443 | 444 | class EXISTS(SELECT): 445 | @property 446 | def query(self): 447 | query = 'EXISTS (\n' 448 | query += indent_string(super().query.rstrip(';'), 2) + ')' 449 | return query 450 | 451 | 452 | class NOT_EXISTS(EXISTS): 453 | @property 454 | def query(self): 455 | return 'NOT ' + super().query 456 | 457 | 458 | class UPDATE(_SELECT_UPDATE): 459 | query_type = UPDATE_QT 460 | 461 | def __init__(self, table=None): 462 | super(UPDATE, self).__init__() 463 | 464 | if table is not None: 465 | self.chain.append((TABLE, table)) 466 | 467 | def SET(self, *args, **kw): 468 | s = self.child() 469 | for stmt in args: 470 | self.chain.append((SET, stmt)) 471 | 472 | for column_name, value in kw.items(): 473 | clean_column_name = self.clean_bind_name(column_name) 474 | bind_name = clean_column_name + '_bind' 475 | s._binds.append((bind_name, value)) 476 | expr = str(column_name) + ' = ' + self.bnd(bind_name) 477 | s.chain.append((SET, expr)) 478 | return s 479 | 480 | def EXTRA(self, *args): 481 | pass 482 | 483 | 484 | class DELETE(_SELECT_UPDATE): 485 | query_type = DELETE_QT 486 | 487 | def __init__(self, table=None): 488 | super(DELETE, self).__init__() 489 | 490 | if table is not None: 491 | self.chain.append((TABLE, table)) 492 | 493 | 494 | class INSERT: 495 | bind_prefix = '%(' 496 | bind_postfix = ')s' 497 | defaultdefault = None 498 | 499 | def __init__(self, 500 | table, 501 | data=None, 502 | columns=None, 503 | statement=None, 504 | default=_default, 505 | on_conflict=None, 506 | returning=None): 507 | self.table = table 508 | self.data = data 509 | self._columns = columns 510 | self.statement = statement 511 | if default is _default: 512 | self.default = self.defaultdefault 513 | else: 514 | self.default = default 515 | self.on_conflict = on_conflict 516 | self.returning = returning 517 | 518 | @classmethod 519 | def bnd(cls, s): 520 | return "%s%s%s" % (cls.bind_prefix, s, cls.bind_postfix) 521 | 522 | @property 523 | def binds(self): 524 | isasis = NormAsIs.isasis 525 | binds = {} 526 | if self.statement: 527 | binds.update(self.statement.binds) 528 | if self.data is None: 529 | return binds 530 | 531 | if self.multi_data: 532 | data = self.data 533 | else: 534 | data = [self.data] 535 | 536 | for index, d in enumerate(data): 537 | for col_name in self.columns: 538 | key = f'{col_name}_{index}' 539 | 540 | if col_name in d: 541 | if not isasis(d[col_name]): 542 | binds[key] = d[col_name] 543 | else: 544 | if not isasis(self.default): 545 | binds[key] = self.default 546 | 547 | return binds 548 | 549 | @cached_property 550 | def multi_data(self): 551 | if hasattr(self.data, 'keys'): 552 | return False 553 | return True 554 | 555 | @cached_property 556 | def columns(self): 557 | if self._columns is None: 558 | if self.data is None: 559 | return None 560 | if not self.multi_data: 561 | return list(sorted([key for key in self.data])) 562 | else: 563 | columns = set() 564 | for d in self.data: 565 | columns |= set(d) 566 | self._columns = list(sorted(columns)) 567 | return self._columns 568 | 569 | @property 570 | def query(self): 571 | if self.multi_data: 572 | return self._query(self.data) 573 | else: 574 | return self._query([self.data]) 575 | 576 | def _bind_param_name(self, col_name, index): 577 | return f'{self.bind_prefix}{col_name}_{index}{self.bind_postfix}' 578 | 579 | def _query(self, data): 580 | isasis = NormAsIs.isasis 581 | q = 'INSERT INTO %s ' % self.table 582 | 583 | if self.columns: 584 | q += '(' 585 | q += ', '.join(col_name for col_name in self.columns) 586 | q += ')' 587 | 588 | if self.statement: 589 | q += '\n' + indent_string(self.statement.query[:-1], 2) 590 | elif self.data is None: 591 | q += 'DEFAULT VALUES' 592 | else: 593 | q += '\n VALUES\n' 594 | for index, d in enumerate(data): 595 | if index > 0: 596 | q += ',\n' 597 | 598 | q += '(' 599 | last_col_ix = len(self.columns) - 1 600 | for ix, col_name in enumerate(self.columns): 601 | col_val = d.get(col_name, self.default) 602 | if isasis(col_val): 603 | q += col_val.value 604 | else: 605 | q += self._bind_param_name(col_name, index) 606 | if last_col_ix != ix: 607 | q += ', ' 608 | 609 | q += ')' 610 | if self.on_conflict: 611 | q += f'\nON CONFLICT {self.on_conflict}' 612 | 613 | if self.returning: 614 | if isinstance(self.returning, str): 615 | returning = [self.returning] 616 | else: 617 | returning = self.returning 618 | q += '\nRETURNING ' + ', '.join(returning) 619 | 620 | q += ';' 621 | 622 | return q 623 | 624 | 625 | class WITH(Query): 626 | def __init__(self, **kw): 627 | Query.__init__(self) 628 | self.tables = kw 629 | self.primary = None 630 | 631 | def __call__(self, primary): 632 | self.primary = primary 633 | return self 634 | 635 | @property 636 | def query(self): 637 | parts = [] 638 | for name, query in self.tables.items(): 639 | query_part = query.query[:-1] # TODO: HACK! 640 | query_section = indent_string(f'({query_part})\n', 8)[1:] 641 | parts.append(f'{name} AS\n{query_section}') 642 | return ('WITH ' + 643 | ',\n '.join(parts) + 644 | '\n\n' + 645 | self.primary.query) 646 | 647 | @property 648 | def binds(self): 649 | d = dict() 650 | for query in self.tables.values(): 651 | d.update(query.binds) 652 | if self.primary: 653 | d.update(self.primary.binds) 654 | return d 655 | 656 | 657 | class UNION(Query): 658 | def __init__(self, *args): 659 | super().__init__() 660 | 661 | self.op = 'UNION' 662 | self.queries = list(args) 663 | 664 | def append(self, query): 665 | return self.queries.append(query) 666 | 667 | @property 668 | def query(self): 669 | parts = [] 670 | for query in self.queries: 671 | parts.append(query.query[:-1]) # TODO: HACK! 672 | return f'\n{self.op}\n'.join(parts) + ';' 673 | 674 | @property 675 | def binds(self): 676 | d = dict() 677 | for query in self.queries: 678 | d.update(query.binds) 679 | return d 680 | 681 | 682 | class UNION_ALL(UNION): 683 | def __init__(self, *args): 684 | super().__init__(*args) 685 | self.op = 'UNION ALL' 686 | 687 | 688 | __all__ = [Query, 689 | SELECT, 690 | UPDATE, 691 | DELETE, 692 | INSERT, 693 | WITH, 694 | UNION, 695 | UNION_ALL, 696 | BogusQuery] 697 | -------------------------------------------------------------------------------- /norm/norm_mssql.py: -------------------------------------------------------------------------------- 1 | 2 | from norm.norm import SELECT 3 | from norm.norm import INSERT 4 | from norm.norm import UPDATE 5 | from norm.norm import DELETE 6 | from norm.connection import ConnectionFactory 7 | from norm.connection import ConnectionProxy 8 | from norm.connection import CursorProxy 9 | 10 | 11 | class MSSQL_INSERT(INSERT): 12 | bind_prefix = ':' 13 | bind_postfix = '' 14 | 15 | 16 | class MSSQL_SELECT(SELECT): 17 | bind_prefix = ':' 18 | bind_postfix = '' 19 | 20 | 21 | class MSSQL_UPDATE(UPDATE): 22 | bind_prefix = ':' 23 | bind_postfix = '' 24 | 25 | 26 | class MSSQL_DELETE(DELETE): 27 | bind_prefix = ':' 28 | bind_postfix = '' 29 | 30 | 31 | class MSSQL_CursorProxy(CursorProxy): 32 | pass 33 | 34 | 35 | class MSSQL_ConnectionProxy(ConnectionProxy): 36 | cursor_proxy = MSSQL_CursorProxy 37 | 38 | 39 | class MSSQL_ConnectionFactory(ConnectionFactory): 40 | connection_proxy = MSSQL_ConnectionProxy 41 | 42 | 43 | __all__ = [MSSQL_INSERT, 44 | MSSQL_SELECT, 45 | MSSQL_UPDATE, 46 | MSSQL_DELETE, 47 | MSSQL_CursorProxy, 48 | MSSQL_ConnectionProxy, 49 | MSSQL_ConnectionFactory] 50 | -------------------------------------------------------------------------------- /norm/norm_myssql_connector.py: -------------------------------------------------------------------------------- 1 | from .norm import SELECT 2 | from .norm import INSERT 3 | from .norm import UPDATE 4 | from .norm import DELETE 5 | from .connection import ConnectionFactory 6 | from .connection import ConnectionProxy 7 | from .connection import CursorProxy 8 | 9 | 10 | class MY_CON_INSERT(INSERT): 11 | pass 12 | 13 | 14 | class MY_CON_SELECT(SELECT): 15 | pass 16 | 17 | 18 | class MY_CON_UPDATE(UPDATE): 19 | pass 20 | 21 | 22 | class MY_CON_DELETE(DELETE): 23 | pass 24 | 25 | 26 | class MY_CON_CursorProxy(CursorProxy): 27 | pass 28 | 29 | 30 | class MY_CON_ConnectionProxy(ConnectionProxy): 31 | cursor_proxy = MY_CON_CursorProxy 32 | 33 | 34 | class MY_CON_ConnectionFactory(ConnectionFactory): 35 | connection_proxy = MY_CON_ConnectionProxy 36 | 37 | 38 | __all__ = [MY_CON_INSERT, 39 | MY_CON_SELECT, 40 | MY_CON_UPDATE, 41 | MY_CON_DELETE, 42 | MY_CON_CursorProxy, 43 | MY_CON_ConnectionProxy, 44 | MY_CON_ConnectionFactory] 45 | -------------------------------------------------------------------------------- /norm/norm_psycopg2.py: -------------------------------------------------------------------------------- 1 | 2 | from psycopg2.extensions import AsIs 3 | 4 | from .norm import SELECT 5 | from .norm import INSERT 6 | from .norm import UPDATE 7 | from .norm import DELETE 8 | from .connection import ConnectionFactory 9 | from .connection import ConnectionProxy 10 | from .connection import CursorProxy 11 | 12 | DEFAULT = AsIs('DEFAULT') 13 | 14 | PG_NormAsIs = AsIs 15 | 16 | 17 | class PG_INSERT(INSERT): 18 | defaultdefault = DEFAULT 19 | 20 | 21 | class PG_SELECT(SELECT): 22 | pass 23 | 24 | 25 | class PG_UPDATE(UPDATE): 26 | pass 27 | 28 | 29 | class PG_DELETE(DELETE): 30 | pass 31 | 32 | 33 | class PG_CursorProxy(CursorProxy): 34 | def _query_to_log(self, query, sql_query, params): 35 | return self.mogrify(sql_query, params).decode(self.connection.encoding) 36 | 37 | 38 | class PG_ConnectionProxy(ConnectionProxy): 39 | cursor_proxy = PG_CursorProxy 40 | 41 | 42 | class PG_ConnectionFactory(ConnectionFactory): 43 | connection_proxy = PG_ConnectionProxy 44 | 45 | 46 | __all__ = [DEFAULT, 47 | PG_INSERT, 48 | PG_SELECT, 49 | PG_UPDATE, 50 | PG_DELETE, 51 | PG_CursorProxy, 52 | PG_ConnectionProxy, 53 | PG_ConnectionFactory] 54 | -------------------------------------------------------------------------------- /norm/norm_pymssql.py: -------------------------------------------------------------------------------- 1 | 2 | from norm.norm import SELECT 3 | from norm.norm import INSERT 4 | from norm.norm import UPDATE 5 | from norm.norm import DELETE 6 | from norm.norm import _default 7 | from norm.norm import NormAsIs 8 | from norm.connection import ConnectionFactory 9 | from norm.connection import ConnectionProxy 10 | from norm.connection import CursorProxy 11 | 12 | 13 | PYMSSQL_AsIs = NormAsIs 14 | PYMSSQL_DEFAULT = NormAsIs('DEFAULT') 15 | 16 | _encrypt_statement = ( 17 | "EncryptByKey(Key_GUID('{key_name}'), CAST({bind} AS VARCHAR(4000)))") 18 | 19 | 20 | class PymssqlLoggingMixin: 21 | @property 22 | def _loggable_query(self): 23 | from _mssql import quote_data 24 | query = self.query 25 | for key, value in self.binds.items(): 26 | quoted_data = quote_data(value) 27 | if isinstance(quoted_data, bytes): 28 | quoted_data = quoted_data.decode('utf-8') 29 | query = query.replace(self.bnd(key), quoted_data) 30 | return query 31 | 32 | 33 | class PYMSSQL_INSERT(INSERT): 34 | defaultdefault = PYMSSQL_DEFAULT 35 | 36 | def __init__(self, 37 | table, 38 | data=None, 39 | columns=None, 40 | statement=None, 41 | default=_default, 42 | on_conflict=None, 43 | returning=None, 44 | encrypted_columns=None, 45 | encryption_key=None): 46 | super().__init__(table, 47 | data=data, 48 | columns=columns, 49 | statement=statement, 50 | default=default, 51 | on_conflict=on_conflict, 52 | returning=returning) 53 | 54 | if encrypted_columns is None: 55 | self.encrypted_columns = set() 56 | else: 57 | self.encrypted_columns = set(encrypted_columns) 58 | self.encryption_key = encryption_key 59 | 60 | if self.encrypted_columns and not self.encryption_key: 61 | raise RuntimeError('You must supply an encryption key name when' 62 | ' using encrypted columns') 63 | 64 | def _bind_param_name(self, col_name, index): 65 | bind = f'{self.bind_prefix}{col_name}_{index}{self.bind_postfix}' 66 | if col_name in self.encrypted_columns: 67 | return _encrypt_statement.format(key_name=self.encryption_key, 68 | bind=bind) 69 | return bind 70 | 71 | 72 | class PYMSSQL_SELECT(PymssqlLoggingMixin, SELECT): 73 | pass 74 | 75 | 76 | class PYMSSQL_UPDATE(PymssqlLoggingMixin, UPDATE): 77 | pass 78 | 79 | 80 | class PYMSSQL_DELETE(PymssqlLoggingMixin, DELETE): 81 | pass 82 | 83 | 84 | class PYMSSQL_CursorProxy(CursorProxy): 85 | pass 86 | 87 | 88 | class PYMSSQL_ConnectionProxy(ConnectionProxy): 89 | cursor_proxy = PYMSSQL_CursorProxy 90 | 91 | 92 | class PYMSSQL_ConnectionFactory(ConnectionFactory): 93 | connection_proxy = PYMSSQL_ConnectionProxy 94 | 95 | 96 | __all__ = [PYMSSQL_INSERT, 97 | PYMSSQL_SELECT, 98 | PYMSSQL_UPDATE, 99 | PYMSSQL_DELETE, 100 | PYMSSQL_CursorProxy, 101 | PYMSSQL_ConnectionProxy, 102 | PYMSSQL_ConnectionFactory] 103 | -------------------------------------------------------------------------------- /norm/norm_sqlalchemy.py: -------------------------------------------------------------------------------- 1 | 2 | from norm.norm import SELECT 3 | from norm.norm import INSERT 4 | from norm.norm import UPDATE 5 | from norm.norm import DELETE 6 | from norm.connection import ConnectionFactory 7 | from norm.connection import ConnectionProxy 8 | from norm.connection import CursorProxy 9 | 10 | 11 | class SQLA_INSERT(INSERT): 12 | bind_prefix = ':' 13 | bind_postfix = '' 14 | 15 | 16 | class SQLA_SELECT(SELECT): 17 | bind_prefix = ':' 18 | bind_postfix = '' 19 | 20 | 21 | class SQLA_UPDATE(UPDATE): 22 | bind_prefix = ':' 23 | bind_postfix = '' 24 | 25 | 26 | class SQLA_DELETE(DELETE): 27 | bind_prefix = ':' 28 | bind_postfix = '' 29 | 30 | 31 | class SQLA_CursorProxy(CursorProxy): 32 | pass 33 | 34 | 35 | class SQLA_ConnectionProxy(ConnectionProxy): 36 | cursor_proxy = SQLA_CursorProxy 37 | 38 | 39 | class SQLA_ConnectionFactory(ConnectionFactory): 40 | connection_proxy = SQLA_ConnectionProxy 41 | 42 | 43 | __all__ = [SQLA_INSERT, 44 | SQLA_SELECT, 45 | SQLA_UPDATE, 46 | SQLA_DELETE, 47 | SQLA_CursorProxy, 48 | SQLA_ConnectionProxy, 49 | SQLA_ConnectionFactory] 50 | -------------------------------------------------------------------------------- /norm/norm_sqlite3.py: -------------------------------------------------------------------------------- 1 | 2 | from norm.norm import SELECT 3 | from norm.norm import INSERT 4 | from norm.norm import UPDATE 5 | from norm.norm import DELETE 6 | from norm.connection import ConnectionFactory 7 | from norm.connection import ConnectionProxy 8 | from norm.connection import CursorProxy 9 | 10 | 11 | class SQLI_INSERT(INSERT): 12 | bind_prefix = ':' 13 | bind_postfix = '' 14 | 15 | 16 | class SQLI_SELECT(SELECT): 17 | bind_prefix = ':' 18 | bind_postfix = '' 19 | 20 | 21 | class SQLI_UPDATE(UPDATE): 22 | bind_prefix = ':' 23 | bind_postfix = '' 24 | 25 | 26 | class SQLI_DELETE(DELETE): 27 | bind_prefix = ':' 28 | bind_postfix = '' 29 | 30 | 31 | class SQLI_CursorProxy(CursorProxy): 32 | pass 33 | 34 | 35 | class SQLI_ConnectionProxy(ConnectionProxy): 36 | cursor_proxy = SQLI_CursorProxy 37 | 38 | 39 | class SQLI_ConnectionFactory(ConnectionFactory): 40 | connection_proxy = SQLI_ConnectionProxy 41 | 42 | 43 | __all__ = [SQLI_INSERT, 44 | SQLI_SELECT, 45 | SQLI_UPDATE, 46 | SQLI_DELETE, 47 | SQLI_CursorProxy, 48 | SQLI_ConnectionProxy, 49 | SQLI_ConnectionFactory] 50 | -------------------------------------------------------------------------------- /norm/rows.py: -------------------------------------------------------------------------------- 1 | from itertools import groupby 2 | 3 | 4 | class RowsProxy(object): 5 | def __init__(self, rows, column_names=None): 6 | self.rows = rows 7 | self.column_names = column_names 8 | 9 | def __len__(self): 10 | return len(self.rows) 11 | 12 | def __call__(self, *args): 13 | def key_func(row): 14 | if len(args) == 1: 15 | return row.get(args[0]) 16 | cols = [] 17 | for key in args: 18 | cols.append(row.get(key)) 19 | return tuple(cols) 20 | 21 | for key, sub_rows_iter in groupby(self, key=key_func): 22 | yield key, RowsProxy(sub_rows_iter) 23 | 24 | def __iter__(self): 25 | column_names = self.column_names 26 | for row in self.rows: 27 | if column_names is None or hasattr(row, 'get'): 28 | yield row 29 | else: 30 | yield dict(zip(column_names, row)) 31 | 32 | 33 | __all__ = [RowsProxy] 34 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | name = "appnope" 3 | version = "0.1.2" 4 | description = "Disable App Nap on macOS >= 10.9" 5 | category = "dev" 6 | optional = false 7 | python-versions = "*" 8 | 9 | [[package]] 10 | name = "atomicwrites" 11 | version = "1.4.0" 12 | description = "Atomic file writes." 13 | category = "dev" 14 | optional = false 15 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 16 | 17 | [[package]] 18 | name = "attrs" 19 | version = "21.4.0" 20 | description = "Classes Without Boilerplate" 21 | category = "dev" 22 | optional = false 23 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 24 | 25 | [package.extras] 26 | dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] 27 | docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] 28 | tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] 29 | tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] 30 | 31 | [[package]] 32 | name = "backcall" 33 | version = "0.2.0" 34 | description = "Specifications for callback functions passed in to an API" 35 | category = "dev" 36 | optional = false 37 | python-versions = "*" 38 | 39 | [[package]] 40 | name = "colorama" 41 | version = "0.4.4" 42 | description = "Cross-platform colored terminal text." 43 | category = "dev" 44 | optional = false 45 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 46 | 47 | [[package]] 48 | name = "decorator" 49 | version = "5.1.1" 50 | description = "Decorators for Humans" 51 | category = "dev" 52 | optional = false 53 | python-versions = ">=3.5" 54 | 55 | [[package]] 56 | name = "distlib" 57 | version = "0.3.4" 58 | description = "Distribution utilities" 59 | category = "dev" 60 | optional = false 61 | python-versions = "*" 62 | 63 | [[package]] 64 | name = "filelock" 65 | version = "3.4.2" 66 | description = "A platform independent file lock." 67 | category = "dev" 68 | optional = false 69 | python-versions = ">=3.7" 70 | 71 | [package.extras] 72 | docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"] 73 | testing = ["covdefaults (>=1.2.0)", "coverage (>=4)", "pytest (>=4)", "pytest-cov", "pytest-timeout (>=1.4.2)"] 74 | 75 | [[package]] 76 | name = "flake8" 77 | version = "4.0.1" 78 | description = "the modular source code checker: pep8 pyflakes and co" 79 | category = "dev" 80 | optional = false 81 | python-versions = ">=3.6" 82 | 83 | [package.dependencies] 84 | importlib-metadata = {version = "<4.3", markers = "python_version < \"3.8\""} 85 | mccabe = ">=0.6.0,<0.7.0" 86 | pycodestyle = ">=2.8.0,<2.9.0" 87 | pyflakes = ">=2.4.0,<2.5.0" 88 | 89 | [[package]] 90 | name = "greenlet" 91 | version = "1.1.2" 92 | description = "Lightweight in-process concurrent programming" 93 | category = "dev" 94 | optional = false 95 | python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" 96 | 97 | [package.extras] 98 | docs = ["sphinx"] 99 | 100 | [[package]] 101 | name = "importlib-metadata" 102 | version = "4.2.0" 103 | description = "Read metadata from Python packages" 104 | category = "dev" 105 | optional = false 106 | python-versions = ">=3.6" 107 | 108 | [package.dependencies] 109 | typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} 110 | zipp = ">=0.5" 111 | 112 | [package.extras] 113 | docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] 114 | testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] 115 | 116 | [[package]] 117 | name = "iniconfig" 118 | version = "1.1.1" 119 | description = "iniconfig: brain-dead simple config-ini parsing" 120 | category = "dev" 121 | optional = false 122 | python-versions = "*" 123 | 124 | [[package]] 125 | name = "ipython" 126 | version = "7.31.1" 127 | description = "IPython: Productive Interactive Computing" 128 | category = "dev" 129 | optional = false 130 | python-versions = ">=3.7" 131 | 132 | [package.dependencies] 133 | appnope = {version = "*", markers = "sys_platform == \"darwin\""} 134 | backcall = "*" 135 | colorama = {version = "*", markers = "sys_platform == \"win32\""} 136 | decorator = "*" 137 | jedi = ">=0.16" 138 | matplotlib-inline = "*" 139 | pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} 140 | pickleshare = "*" 141 | prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" 142 | pygments = "*" 143 | traitlets = ">=4.2" 144 | 145 | [package.extras] 146 | all = ["Sphinx (>=1.3)", "ipykernel", "ipyparallel", "ipywidgets", "nbconvert", "nbformat", "nose (>=0.10.1)", "notebook", "numpy (>=1.17)", "pygments", "qtconsole", "requests", "testpath"] 147 | doc = ["Sphinx (>=1.3)"] 148 | kernel = ["ipykernel"] 149 | nbconvert = ["nbconvert"] 150 | nbformat = ["nbformat"] 151 | notebook = ["notebook", "ipywidgets"] 152 | parallel = ["ipyparallel"] 153 | qtconsole = ["qtconsole"] 154 | test = ["nose (>=0.10.1)", "requests", "testpath", "pygments", "nbformat", "ipykernel", "numpy (>=1.17)"] 155 | 156 | [[package]] 157 | name = "jedi" 158 | version = "0.18.1" 159 | description = "An autocompletion tool for Python that can be used for text editors." 160 | category = "dev" 161 | optional = false 162 | python-versions = ">=3.6" 163 | 164 | [package.dependencies] 165 | parso = ">=0.8.0,<0.9.0" 166 | 167 | [package.extras] 168 | qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] 169 | testing = ["Django (<3.1)", "colorama", "docopt", "pytest (<7.0.0)"] 170 | 171 | [[package]] 172 | name = "matplotlib-inline" 173 | version = "0.1.3" 174 | description = "Inline Matplotlib backend for Jupyter" 175 | category = "dev" 176 | optional = false 177 | python-versions = ">=3.5" 178 | 179 | [package.dependencies] 180 | traitlets = "*" 181 | 182 | [[package]] 183 | name = "mccabe" 184 | version = "0.6.1" 185 | description = "McCabe checker, plugin for flake8" 186 | category = "dev" 187 | optional = false 188 | python-versions = "*" 189 | 190 | [[package]] 191 | name = "packaging" 192 | version = "21.3" 193 | description = "Core utilities for Python packages" 194 | category = "dev" 195 | optional = false 196 | python-versions = ">=3.6" 197 | 198 | [package.dependencies] 199 | pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" 200 | 201 | [[package]] 202 | name = "parso" 203 | version = "0.8.3" 204 | description = "A Python Parser" 205 | category = "dev" 206 | optional = false 207 | python-versions = ">=3.6" 208 | 209 | [package.extras] 210 | qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] 211 | testing = ["docopt", "pytest (<6.0.0)"] 212 | 213 | [[package]] 214 | name = "pexpect" 215 | version = "4.8.0" 216 | description = "Pexpect allows easy control of interactive console applications." 217 | category = "dev" 218 | optional = false 219 | python-versions = "*" 220 | 221 | [package.dependencies] 222 | ptyprocess = ">=0.5" 223 | 224 | [[package]] 225 | name = "pickleshare" 226 | version = "0.7.5" 227 | description = "Tiny 'shelve'-like database with concurrency support" 228 | category = "dev" 229 | optional = false 230 | python-versions = "*" 231 | 232 | [[package]] 233 | name = "platformdirs" 234 | version = "2.4.1" 235 | description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." 236 | category = "dev" 237 | optional = false 238 | python-versions = ">=3.7" 239 | 240 | [package.extras] 241 | docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] 242 | test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] 243 | 244 | [[package]] 245 | name = "pluggy" 246 | version = "1.0.0" 247 | description = "plugin and hook calling mechanisms for python" 248 | category = "dev" 249 | optional = false 250 | python-versions = ">=3.6" 251 | 252 | [package.dependencies] 253 | importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} 254 | 255 | [package.extras] 256 | dev = ["pre-commit", "tox"] 257 | testing = ["pytest", "pytest-benchmark"] 258 | 259 | [[package]] 260 | name = "prompt-toolkit" 261 | version = "3.0.24" 262 | description = "Library for building powerful interactive command lines in Python" 263 | category = "dev" 264 | optional = false 265 | python-versions = ">=3.6.2" 266 | 267 | [package.dependencies] 268 | wcwidth = "*" 269 | 270 | [[package]] 271 | name = "psycopg2-binary" 272 | version = "2.9.3" 273 | description = "psycopg2 - Python-PostgreSQL Database Adapter" 274 | category = "dev" 275 | optional = false 276 | python-versions = ">=3.6" 277 | 278 | [[package]] 279 | name = "ptyprocess" 280 | version = "0.7.0" 281 | description = "Run a subprocess in a pseudo terminal" 282 | category = "dev" 283 | optional = false 284 | python-versions = "*" 285 | 286 | [[package]] 287 | name = "py" 288 | version = "1.11.0" 289 | description = "library with cross-python path, ini-parsing, io, code, log facilities" 290 | category = "dev" 291 | optional = false 292 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 293 | 294 | [[package]] 295 | name = "pycodestyle" 296 | version = "2.8.0" 297 | description = "Python style guide checker" 298 | category = "dev" 299 | optional = false 300 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 301 | 302 | [[package]] 303 | name = "pyflakes" 304 | version = "2.4.0" 305 | description = "passive checker of Python programs" 306 | category = "dev" 307 | optional = false 308 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 309 | 310 | [[package]] 311 | name = "pygments" 312 | version = "2.11.2" 313 | description = "Pygments is a syntax highlighting package written in Python." 314 | category = "dev" 315 | optional = false 316 | python-versions = ">=3.5" 317 | 318 | [[package]] 319 | name = "pyparsing" 320 | version = "3.0.6" 321 | description = "Python parsing module" 322 | category = "dev" 323 | optional = false 324 | python-versions = ">=3.6" 325 | 326 | [package.extras] 327 | diagrams = ["jinja2", "railroad-diagrams"] 328 | 329 | [[package]] 330 | name = "pytest" 331 | version = "6.2.5" 332 | description = "pytest: simple powerful testing with Python" 333 | category = "dev" 334 | optional = false 335 | python-versions = ">=3.6" 336 | 337 | [package.dependencies] 338 | atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} 339 | attrs = ">=19.2.0" 340 | colorama = {version = "*", markers = "sys_platform == \"win32\""} 341 | importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} 342 | iniconfig = "*" 343 | packaging = "*" 344 | pluggy = ">=0.12,<2.0" 345 | py = ">=1.8.2" 346 | toml = "*" 347 | 348 | [package.extras] 349 | testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] 350 | 351 | [[package]] 352 | name = "pytest-flake8-v2" 353 | version = "1.1.0" 354 | description = "pytest plugin to check FLAKE8 requirements" 355 | category = "dev" 356 | optional = false 357 | python-versions = ">=3.4" 358 | 359 | [package.dependencies] 360 | flake8 = ">=3.5" 361 | pytest = ">=3.5" 362 | 363 | [[package]] 364 | name = "six" 365 | version = "1.16.0" 366 | description = "Python 2 and 3 compatibility utilities" 367 | category = "dev" 368 | optional = false 369 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" 370 | 371 | [[package]] 372 | name = "sqlalchemy" 373 | version = "1.4.29" 374 | description = "Database Abstraction Library" 375 | category = "dev" 376 | optional = false 377 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" 378 | 379 | [package.dependencies] 380 | greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} 381 | importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} 382 | 383 | [package.extras] 384 | aiomysql = ["greenlet (!=0.4.17)", "aiomysql"] 385 | aiosqlite = ["typing_extensions (!=3.10.0.1)", "greenlet (!=0.4.17)", "aiosqlite"] 386 | asyncio = ["greenlet (!=0.4.17)"] 387 | asyncmy = ["greenlet (!=0.4.17)", "asyncmy (>=0.2.3)"] 388 | mariadb_connector = ["mariadb (>=1.0.1)"] 389 | mssql = ["pyodbc"] 390 | mssql_pymssql = ["pymssql"] 391 | mssql_pyodbc = ["pyodbc"] 392 | mypy = ["sqlalchemy2-stubs", "mypy (>=0.910)"] 393 | mysql = ["mysqlclient (>=1.4.0,<2)", "mysqlclient (>=1.4.0)"] 394 | mysql_connector = ["mysql-connector-python"] 395 | oracle = ["cx_oracle (>=7,<8)", "cx_oracle (>=7)"] 396 | postgresql = ["psycopg2 (>=2.7)"] 397 | postgresql_asyncpg = ["greenlet (!=0.4.17)", "asyncpg"] 398 | postgresql_pg8000 = ["pg8000 (>=1.16.6)"] 399 | postgresql_psycopg2binary = ["psycopg2-binary"] 400 | postgresql_psycopg2cffi = ["psycopg2cffi"] 401 | pymysql = ["pymysql (<1)", "pymysql"] 402 | sqlcipher = ["sqlcipher3-binary"] 403 | 404 | [[package]] 405 | name = "toml" 406 | version = "0.10.2" 407 | description = "Python Library for Tom's Obvious, Minimal Language" 408 | category = "dev" 409 | optional = false 410 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" 411 | 412 | [[package]] 413 | name = "tox" 414 | version = "3.24.5" 415 | description = "tox is a generic virtualenv management and test command line tool" 416 | category = "dev" 417 | optional = false 418 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" 419 | 420 | [package.dependencies] 421 | colorama = {version = ">=0.4.1", markers = "platform_system == \"Windows\""} 422 | filelock = ">=3.0.0" 423 | importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} 424 | packaging = ">=14" 425 | pluggy = ">=0.12.0" 426 | py = ">=1.4.17" 427 | six = ">=1.14.0" 428 | toml = ">=0.9.4" 429 | virtualenv = ">=16.0.0,<20.0.0 || >20.0.0,<20.0.1 || >20.0.1,<20.0.2 || >20.0.2,<20.0.3 || >20.0.3,<20.0.4 || >20.0.4,<20.0.5 || >20.0.5,<20.0.6 || >20.0.6,<20.0.7 || >20.0.7" 430 | 431 | [package.extras] 432 | docs = ["pygments-github-lexers (>=0.0.5)", "sphinx (>=2.0.0)", "sphinxcontrib-autoprogram (>=0.1.5)", "towncrier (>=18.5.0)"] 433 | testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pytest (>=4.0.0)", "pytest-cov (>=2.5.1)", "pytest-mock (>=1.10.0)", "pytest-randomly (>=1.0.0)", "psutil (>=5.6.1)", "pathlib2 (>=2.3.3)"] 434 | 435 | [[package]] 436 | name = "traitlets" 437 | version = "5.1.1" 438 | description = "Traitlets Python configuration system" 439 | category = "dev" 440 | optional = false 441 | python-versions = ">=3.7" 442 | 443 | [package.extras] 444 | test = ["pytest"] 445 | 446 | [[package]] 447 | name = "typing-extensions" 448 | version = "4.0.1" 449 | description = "Backported and Experimental Type Hints for Python 3.6+" 450 | category = "dev" 451 | optional = false 452 | python-versions = ">=3.6" 453 | 454 | [[package]] 455 | name = "virtualenv" 456 | version = "20.13.0" 457 | description = "Virtual Python Environment builder" 458 | category = "dev" 459 | optional = false 460 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" 461 | 462 | [package.dependencies] 463 | distlib = ">=0.3.1,<1" 464 | filelock = ">=3.2,<4" 465 | importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} 466 | platformdirs = ">=2,<3" 467 | six = ">=1.9.0,<2" 468 | 469 | [package.extras] 470 | docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"] 471 | testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"] 472 | 473 | [[package]] 474 | name = "wcwidth" 475 | version = "0.2.5" 476 | description = "Measures the displayed width of unicode strings in a terminal" 477 | category = "dev" 478 | optional = false 479 | python-versions = "*" 480 | 481 | [[package]] 482 | name = "zipp" 483 | version = "3.7.0" 484 | description = "Backport of pathlib-compatible object wrapper for zip files" 485 | category = "dev" 486 | optional = false 487 | python-versions = ">=3.7" 488 | 489 | [package.extras] 490 | docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] 491 | testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] 492 | 493 | [metadata] 494 | lock-version = "1.1" 495 | python-versions = "^3.7" 496 | content-hash = "fb55e7bf1606ee2b9588e2181ed1d9ce443ace52ed4f6a42460ccf3dc9bb2e6f" 497 | 498 | [metadata.files] 499 | appnope = [ 500 | {file = "appnope-0.1.2-py2.py3-none-any.whl", hash = "sha256:93aa393e9d6c54c5cd570ccadd8edad61ea0c4b9ea7a01409020c9aa019eb442"}, 501 | {file = "appnope-0.1.2.tar.gz", hash = "sha256:dd83cd4b5b460958838f6eb3000c660b1f9caf2a5b1de4264e941512f603258a"}, 502 | ] 503 | atomicwrites = [ 504 | {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, 505 | {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, 506 | ] 507 | attrs = [ 508 | {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, 509 | {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, 510 | ] 511 | backcall = [ 512 | {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, 513 | {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, 514 | ] 515 | colorama = [ 516 | {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, 517 | {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, 518 | ] 519 | decorator = [ 520 | {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, 521 | {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, 522 | ] 523 | distlib = [ 524 | {file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"}, 525 | {file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"}, 526 | ] 527 | filelock = [ 528 | {file = "filelock-3.4.2-py3-none-any.whl", hash = "sha256:cf0fc6a2f8d26bd900f19bf33915ca70ba4dd8c56903eeb14e1e7a2fd7590146"}, 529 | {file = "filelock-3.4.2.tar.gz", hash = "sha256:38b4f4c989f9d06d44524df1b24bd19e167d851f19b50bf3e3559952dddc5b80"}, 530 | ] 531 | flake8 = [ 532 | {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, 533 | {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, 534 | ] 535 | greenlet = [ 536 | {file = "greenlet-1.1.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:58df5c2a0e293bf665a51f8a100d3e9956febfbf1d9aaf8c0677cf70218910c6"}, 537 | {file = "greenlet-1.1.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:aec52725173bd3a7b56fe91bc56eccb26fbdff1386ef123abb63c84c5b43b63a"}, 538 | {file = "greenlet-1.1.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:833e1551925ed51e6b44c800e71e77dacd7e49181fdc9ac9a0bf3714d515785d"}, 539 | {file = "greenlet-1.1.2-cp27-cp27m-win32.whl", hash = "sha256:aa5b467f15e78b82257319aebc78dd2915e4c1436c3c0d1ad6f53e47ba6e2713"}, 540 | {file = "greenlet-1.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:40b951f601af999a8bf2ce8c71e8aaa4e8c6f78ff8afae7b808aae2dc50d4c40"}, 541 | {file = "greenlet-1.1.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:95e69877983ea39b7303570fa6760f81a3eec23d0e3ab2021b7144b94d06202d"}, 542 | {file = "greenlet-1.1.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:356b3576ad078c89a6107caa9c50cc14e98e3a6c4874a37c3e0273e4baf33de8"}, 543 | {file = "greenlet-1.1.2-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8639cadfda96737427330a094476d4c7a56ac03de7265622fcf4cfe57c8ae18d"}, 544 | {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97e5306482182170ade15c4b0d8386ded995a07d7cc2ca8f27958d34d6736497"}, 545 | {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6a36bb9474218c7a5b27ae476035497a6990e21d04c279884eb10d9b290f1b1"}, 546 | {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abb7a75ed8b968f3061327c433a0fbd17b729947b400747c334a9c29a9af6c58"}, 547 | {file = "greenlet-1.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b336501a05e13b616ef81ce329c0e09ac5ed8c732d9ba7e3e983fcc1a9e86965"}, 548 | {file = "greenlet-1.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:14d4f3cd4e8b524ae9b8aa567858beed70c392fdec26dbdb0a8a418392e71708"}, 549 | {file = "greenlet-1.1.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:17ff94e7a83aa8671a25bf5b59326ec26da379ace2ebc4411d690d80a7fbcf23"}, 550 | {file = "greenlet-1.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9f3cba480d3deb69f6ee2c1825060177a22c7826431458c697df88e6aeb3caee"}, 551 | {file = "greenlet-1.1.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:fa877ca7f6b48054f847b61d6fa7bed5cebb663ebc55e018fda12db09dcc664c"}, 552 | {file = "greenlet-1.1.2-cp35-cp35m-win32.whl", hash = "sha256:7cbd7574ce8e138bda9df4efc6bf2ab8572c9aff640d8ecfece1b006b68da963"}, 553 | {file = "greenlet-1.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:903bbd302a2378f984aef528f76d4c9b1748f318fe1294961c072bdc7f2ffa3e"}, 554 | {file = "greenlet-1.1.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:049fe7579230e44daef03a259faa24511d10ebfa44f69411d99e6a184fe68073"}, 555 | {file = "greenlet-1.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:dd0b1e9e891f69e7675ba5c92e28b90eaa045f6ab134ffe70b52e948aa175b3c"}, 556 | {file = "greenlet-1.1.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7418b6bfc7fe3331541b84bb2141c9baf1ec7132a7ecd9f375912eca810e714e"}, 557 | {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9d29ca8a77117315101425ec7ec2a47a22ccf59f5593378fc4077ac5b754fce"}, 558 | {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21915eb821a6b3d9d8eefdaf57d6c345b970ad722f856cd71739493ce003ad08"}, 559 | {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eff9d20417ff9dcb0d25e2defc2574d10b491bf2e693b4e491914738b7908168"}, 560 | {file = "greenlet-1.1.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b8c008de9d0daba7b6666aa5bbfdc23dcd78cafc33997c9b7741ff6353bafb7f"}, 561 | {file = "greenlet-1.1.2-cp36-cp36m-win32.whl", hash = "sha256:32ca72bbc673adbcfecb935bb3fb1b74e663d10a4b241aaa2f5a75fe1d1f90aa"}, 562 | {file = "greenlet-1.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f0214eb2a23b85528310dad848ad2ac58e735612929c8072f6093f3585fd342d"}, 563 | {file = "greenlet-1.1.2-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:b92e29e58bef6d9cfd340c72b04d74c4b4e9f70c9fa7c78b674d1fec18896dc4"}, 564 | {file = "greenlet-1.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:fdcec0b8399108577ec290f55551d926d9a1fa6cad45882093a7a07ac5ec147b"}, 565 | {file = "greenlet-1.1.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:93f81b134a165cc17123626ab8da2e30c0455441d4ab5576eed73a64c025b25c"}, 566 | {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e12bdc622676ce47ae9abbf455c189e442afdde8818d9da983085df6312e7a1"}, 567 | {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c790abda465726cfb8bb08bd4ca9a5d0a7bd77c7ac1ca1b839ad823b948ea28"}, 568 | {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f276df9830dba7a333544bd41070e8175762a7ac20350786b322b714b0e654f5"}, 569 | {file = "greenlet-1.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c5d5b35f789a030ebb95bff352f1d27a93d81069f2adb3182d99882e095cefe"}, 570 | {file = "greenlet-1.1.2-cp37-cp37m-win32.whl", hash = "sha256:64e6175c2e53195278d7388c454e0b30997573f3f4bd63697f88d855f7a6a1fc"}, 571 | {file = "greenlet-1.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b11548073a2213d950c3f671aa88e6f83cda6e2fb97a8b6317b1b5b33d850e06"}, 572 | {file = "greenlet-1.1.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:9633b3034d3d901f0a46b7939f8c4d64427dfba6bbc5a36b1a67364cf148a1b0"}, 573 | {file = "greenlet-1.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:eb6ea6da4c787111adf40f697b4e58732ee0942b5d3bd8f435277643329ba627"}, 574 | {file = "greenlet-1.1.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:f3acda1924472472ddd60c29e5b9db0cec629fbe3c5c5accb74d6d6d14773478"}, 575 | {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e859fcb4cbe93504ea18008d1df98dee4f7766db66c435e4882ab35cf70cac43"}, 576 | {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00e44c8afdbe5467e4f7b5851be223be68adb4272f44696ee71fe46b7036a711"}, 577 | {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec8c433b3ab0419100bd45b47c9c8551248a5aee30ca5e9d399a0b57ac04651b"}, 578 | {file = "greenlet-1.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2bde6792f313f4e918caabc46532aa64aa27a0db05d75b20edfc5c6f46479de2"}, 579 | {file = "greenlet-1.1.2-cp38-cp38-win32.whl", hash = "sha256:288c6a76705dc54fba69fbcb59904ae4ad768b4c768839b8ca5fdadec6dd8cfd"}, 580 | {file = "greenlet-1.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:8d2f1fb53a421b410751887eb4ff21386d119ef9cde3797bf5e7ed49fb51a3b3"}, 581 | {file = "greenlet-1.1.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:166eac03e48784a6a6e0e5f041cfebb1ab400b394db188c48b3a84737f505b67"}, 582 | {file = "greenlet-1.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:572e1787d1460da79590bf44304abbc0a2da944ea64ec549188fa84d89bba7ab"}, 583 | {file = "greenlet-1.1.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:be5f425ff1f5f4b3c1e33ad64ab994eed12fc284a6ea71c5243fd564502ecbe5"}, 584 | {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1692f7d6bc45e3200844be0dba153612103db241691088626a33ff1f24a0d88"}, 585 | {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7227b47e73dedaa513cdebb98469705ef0d66eb5a1250144468e9c3097d6b59b"}, 586 | {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ff61ff178250f9bb3cd89752df0f1dd0e27316a8bd1465351652b1b4a4cdfd3"}, 587 | {file = "greenlet-1.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0051c6f1f27cb756ffc0ffbac7d2cd48cb0362ac1736871399a739b2885134d3"}, 588 | {file = "greenlet-1.1.2-cp39-cp39-win32.whl", hash = "sha256:f70a9e237bb792c7cc7e44c531fd48f5897961701cdaa06cf22fc14965c496cf"}, 589 | {file = "greenlet-1.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:013d61294b6cd8fe3242932c1c5e36e5d1db2c8afb58606c5a67efce62c1f5fd"}, 590 | {file = "greenlet-1.1.2.tar.gz", hash = "sha256:e30f5ea4ae2346e62cedde8794a56858a67b878dd79f7df76a0767e356b1744a"}, 591 | ] 592 | importlib-metadata = [ 593 | {file = "importlib_metadata-4.2.0-py3-none-any.whl", hash = "sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b"}, 594 | {file = "importlib_metadata-4.2.0.tar.gz", hash = "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31"}, 595 | ] 596 | iniconfig = [ 597 | {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, 598 | {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, 599 | ] 600 | ipython = [ 601 | {file = "ipython-7.31.1-py3-none-any.whl", hash = "sha256:55df3e0bd0f94e715abd968bedd89d4e8a7bce4bf498fb123fed4f5398fea874"}, 602 | {file = "ipython-7.31.1.tar.gz", hash = "sha256:b5548ec5329a4bcf054a5deed5099b0f9622eb9ea51aaa7104d215fece201d8c"}, 603 | ] 604 | jedi = [ 605 | {file = "jedi-0.18.1-py2.py3-none-any.whl", hash = "sha256:637c9635fcf47945ceb91cd7f320234a7be540ded6f3e99a50cb6febdfd1ba8d"}, 606 | {file = "jedi-0.18.1.tar.gz", hash = "sha256:74137626a64a99c8eb6ae5832d99b3bdd7d29a3850fe2aa80a4126b2a7d949ab"}, 607 | ] 608 | matplotlib-inline = [ 609 | {file = "matplotlib-inline-0.1.3.tar.gz", hash = "sha256:a04bfba22e0d1395479f866853ec1ee28eea1485c1d69a6faf00dc3e24ff34ee"}, 610 | {file = "matplotlib_inline-0.1.3-py3-none-any.whl", hash = "sha256:aed605ba3b72462d64d475a21a9296f400a19c4f74a31b59103d2a99ffd5aa5c"}, 611 | ] 612 | mccabe = [ 613 | {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, 614 | {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, 615 | ] 616 | packaging = [ 617 | {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, 618 | {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, 619 | ] 620 | parso = [ 621 | {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, 622 | {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, 623 | ] 624 | pexpect = [ 625 | {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, 626 | {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, 627 | ] 628 | pickleshare = [ 629 | {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, 630 | {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, 631 | ] 632 | platformdirs = [ 633 | {file = "platformdirs-2.4.1-py3-none-any.whl", hash = "sha256:1d7385c7db91728b83efd0ca99a5afb296cab9d0ed8313a45ed8ba17967ecfca"}, 634 | {file = "platformdirs-2.4.1.tar.gz", hash = "sha256:440633ddfebcc36264232365d7840a970e75e1018d15b4327d11f91909045fda"}, 635 | ] 636 | pluggy = [ 637 | {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, 638 | {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, 639 | ] 640 | prompt-toolkit = [ 641 | {file = "prompt_toolkit-3.0.24-py3-none-any.whl", hash = "sha256:e56f2ff799bacecd3e88165b1e2f5ebf9bcd59e80e06d395fa0cc4b8bd7bb506"}, 642 | {file = "prompt_toolkit-3.0.24.tar.gz", hash = "sha256:1bb05628c7d87b645974a1bad3f17612be0c29fa39af9f7688030163f680bad6"}, 643 | ] 644 | psycopg2-binary = [ 645 | {file = "psycopg2-binary-2.9.3.tar.gz", hash = "sha256:761df5313dc15da1502b21453642d7599d26be88bff659382f8f9747c7ebea4e"}, 646 | {file = "psycopg2_binary-2.9.3-cp310-cp310-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:539b28661b71da7c0e428692438efbcd048ca21ea81af618d845e06ebfd29478"}, 647 | {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e82d38390a03da28c7985b394ec3f56873174e2c88130e6966cb1c946508e65"}, 648 | {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57804fc02ca3ce0dbfbef35c4b3a4a774da66d66ea20f4bda601294ad2ea6092"}, 649 | {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:083a55275f09a62b8ca4902dd11f4b33075b743cf0d360419e2051a8a5d5ff76"}, 650 | {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_24_ppc64le.whl", hash = "sha256:0a29729145aaaf1ad8bafe663131890e2111f13416b60e460dae0a96af5905c9"}, 651 | {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a79d622f5206d695d7824cbf609a4f5b88ea6d6dab5f7c147fc6d333a8787e4"}, 652 | {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:090f3348c0ab2cceb6dfbe6bf721ef61262ddf518cd6cc6ecc7d334996d64efa"}, 653 | {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a9e1f75f96ea388fbcef36c70640c4efbe4650658f3d6a2967b4cc70e907352e"}, 654 | {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c3ae8e75eb7160851e59adc77b3a19a976e50622e44fd4fd47b8b18208189d42"}, 655 | {file = "psycopg2_binary-2.9.3-cp310-cp310-win32.whl", hash = "sha256:7b1e9b80afca7b7a386ef087db614faebbf8839b7f4db5eb107d0f1a53225029"}, 656 | {file = "psycopg2_binary-2.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:8b344adbb9a862de0c635f4f0425b7958bf5a4b927c8594e6e8d261775796d53"}, 657 | {file = "psycopg2_binary-2.9.3-cp36-cp36m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:e847774f8ffd5b398a75bc1c18fbb56564cda3d629fe68fd81971fece2d3c67e"}, 658 | {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:68641a34023d306be959101b345732360fc2ea4938982309b786f7be1b43a4a1"}, 659 | {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3303f8807f342641851578ee7ed1f3efc9802d00a6f83c101d21c608cb864460"}, 660 | {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_24_aarch64.whl", hash = "sha256:e3699852e22aa68c10de06524a3721ade969abf382da95884e6a10ff798f9281"}, 661 | {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_24_ppc64le.whl", hash = "sha256:526ea0378246d9b080148f2d6681229f4b5964543c170dd10bf4faaab6e0d27f"}, 662 | {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:b1c8068513f5b158cf7e29c43a77eb34b407db29aca749d3eb9293ee0d3103ca"}, 663 | {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:15803fa813ea05bef089fa78835118b5434204f3a17cb9f1e5dbfd0b9deea5af"}, 664 | {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:152f09f57417b831418304c7f30d727dc83a12761627bb826951692cc6491e57"}, 665 | {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:404224e5fef3b193f892abdbf8961ce20e0b6642886cfe1fe1923f41aaa75c9d"}, 666 | {file = "psycopg2_binary-2.9.3-cp36-cp36m-win32.whl", hash = "sha256:1f6b813106a3abdf7b03640d36e24669234120c72e91d5cbaeb87c5f7c36c65b"}, 667 | {file = "psycopg2_binary-2.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:2d872e3c9d5d075a2e104540965a1cf898b52274a5923936e5bfddb58c59c7c2"}, 668 | {file = "psycopg2_binary-2.9.3-cp37-cp37m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:10bb90fb4d523a2aa67773d4ff2b833ec00857f5912bafcfd5f5414e45280fb1"}, 669 | {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:874a52ecab70af13e899f7847b3e074eeb16ebac5615665db33bce8a1009cf33"}, 670 | {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a29b3ca4ec9defec6d42bf5feb36bb5817ba3c0230dd83b4edf4bf02684cd0ae"}, 671 | {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:12b11322ea00ad8db8c46f18b7dfc47ae215e4df55b46c67a94b4effbaec7094"}, 672 | {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_24_ppc64le.whl", hash = "sha256:53293533fcbb94c202b7c800a12c873cfe24599656b341f56e71dd2b557be063"}, 673 | {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c381bda330ddf2fccbafab789d83ebc6c53db126e4383e73794c74eedce855ef"}, 674 | {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d29409b625a143649d03d0fd7b57e4b92e0ecad9726ba682244b73be91d2fdb"}, 675 | {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:183a517a3a63503f70f808b58bfbf962f23d73b6dccddae5aa56152ef2bcb232"}, 676 | {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:15c4e4cfa45f5a60599d9cec5f46cd7b1b29d86a6390ec23e8eebaae84e64554"}, 677 | {file = "psycopg2_binary-2.9.3-cp37-cp37m-win32.whl", hash = "sha256:adf20d9a67e0b6393eac162eb81fb10bc9130a80540f4df7e7355c2dd4af9fba"}, 678 | {file = "psycopg2_binary-2.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:2f9ffd643bc7349eeb664eba8864d9e01f057880f510e4681ba40a6532f93c71"}, 679 | {file = "psycopg2_binary-2.9.3-cp38-cp38-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:def68d7c21984b0f8218e8a15d514f714d96904265164f75f8d3a70f9c295667"}, 680 | {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dffc08ca91c9ac09008870c9eb77b00a46b3378719584059c034b8945e26b272"}, 681 | {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:280b0bb5cbfe8039205c7981cceb006156a675362a00fe29b16fbc264e242834"}, 682 | {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:af9813db73395fb1fc211bac696faea4ca9ef53f32dc0cfa27e4e7cf766dcf24"}, 683 | {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_24_ppc64le.whl", hash = "sha256:63638d875be8c2784cfc952c9ac34e2b50e43f9f0a0660b65e2a87d656b3116c"}, 684 | {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ffb7a888a047696e7f8240d649b43fb3644f14f0ee229077e7f6b9f9081635bd"}, 685 | {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0c9d5450c566c80c396b7402895c4369a410cab5a82707b11aee1e624da7d004"}, 686 | {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:d1c1b569ecafe3a69380a94e6ae09a4789bbb23666f3d3a08d06bbd2451f5ef1"}, 687 | {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8fc53f9af09426a61db9ba357865c77f26076d48669f2e1bb24d85a22fb52307"}, 688 | {file = "psycopg2_binary-2.9.3-cp38-cp38-win32.whl", hash = "sha256:6472a178e291b59e7f16ab49ec8b4f3bdada0a879c68d3817ff0963e722a82ce"}, 689 | {file = "psycopg2_binary-2.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:35168209c9d51b145e459e05c31a9eaeffa9a6b0fd61689b48e07464ffd1a83e"}, 690 | {file = "psycopg2_binary-2.9.3-cp39-cp39-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:47133f3f872faf28c1e87d4357220e809dfd3fa7c64295a4a148bcd1e6e34ec9"}, 691 | {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91920527dea30175cc02a1099f331aa8c1ba39bf8b7762b7b56cbf54bc5cce42"}, 692 | {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:887dd9aac71765ac0d0bac1d0d4b4f2c99d5f5c1382d8b770404f0f3d0ce8a39"}, 693 | {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:1f14c8b0942714eb3c74e1e71700cbbcb415acbc311c730370e70c578a44a25c"}, 694 | {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_24_ppc64le.whl", hash = "sha256:7af0dd86ddb2f8af5da57a976d27cd2cd15510518d582b478fbb2292428710b4"}, 695 | {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93cd1967a18aa0edd4b95b1dfd554cf15af657cb606280996d393dadc88c3c35"}, 696 | {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bda845b664bb6c91446ca9609fc69f7db6c334ec5e4adc87571c34e4f47b7ddb"}, 697 | {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:01310cf4cf26db9aea5158c217caa92d291f0500051a6469ac52166e1a16f5b7"}, 698 | {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:99485cab9ba0fa9b84f1f9e1fef106f44a46ef6afdeec8885e0b88d0772b49e8"}, 699 | {file = "psycopg2_binary-2.9.3-cp39-cp39-win32.whl", hash = "sha256:46f0e0a6b5fa5851bbd9ab1bc805eef362d3a230fbdfbc209f4a236d0a7a990d"}, 700 | {file = "psycopg2_binary-2.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:accfe7e982411da3178ec690baaceaad3c278652998b2c45828aaac66cd8285f"}, 701 | ] 702 | ptyprocess = [ 703 | {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, 704 | {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, 705 | ] 706 | py = [ 707 | {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, 708 | {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, 709 | ] 710 | pycodestyle = [ 711 | {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, 712 | {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, 713 | ] 714 | pyflakes = [ 715 | {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, 716 | {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, 717 | ] 718 | pygments = [ 719 | {file = "Pygments-2.11.2-py3-none-any.whl", hash = "sha256:44238f1b60a76d78fc8ca0528ee429702aae011c265fe6a8dd8b63049ae41c65"}, 720 | {file = "Pygments-2.11.2.tar.gz", hash = "sha256:4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a"}, 721 | ] 722 | pyparsing = [ 723 | {file = "pyparsing-3.0.6-py3-none-any.whl", hash = "sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4"}, 724 | {file = "pyparsing-3.0.6.tar.gz", hash = "sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81"}, 725 | ] 726 | pytest = [ 727 | {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, 728 | {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, 729 | ] 730 | pytest-flake8-v2 = [ 731 | {file = "pytest-flake8-v2-1.1.0.tar.gz", hash = "sha256:363e053de8bf9db15e584141df13a79a241864f7c1d9b2ab007100ade021c683"}, 732 | {file = "pytest_flake8_v2-1.1.0-py3-none-any.whl", hash = "sha256:f2cfeb92f770390aeaf1477206a5feaebceabd942e6279ffc728d326aa6478f1"}, 733 | ] 734 | six = [ 735 | {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, 736 | {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, 737 | ] 738 | sqlalchemy = [ 739 | {file = "SQLAlchemy-1.4.29-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:da64423c05256f4ab8c0058b90202053b201cbe3a081f3a43eb590cd554395ab"}, 740 | {file = "SQLAlchemy-1.4.29-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0fc4eec2f46b40bdd42112b3be3fbbf88e194bcf02950fbb88bcdc1b32f07dc7"}, 741 | {file = "SQLAlchemy-1.4.29-cp27-cp27m-win32.whl", hash = "sha256:101d2e100ba9182c9039699588e0b2d833c54b3bad46c67c192159876c9f27ea"}, 742 | {file = "SQLAlchemy-1.4.29-cp27-cp27m-win_amd64.whl", hash = "sha256:ceac84dd9abbbe115e8be0c817bed85d9fa639b4d294e7817f9e61162d5f766c"}, 743 | {file = "SQLAlchemy-1.4.29-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:15b65887b6c324cad638c7671cb95985817b733242a7eb69edd7cdf6953be1e0"}, 744 | {file = "SQLAlchemy-1.4.29-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:78abc507d17753ed434b6cc0c0693126279723d5656d9775bfcac966a99a899b"}, 745 | {file = "SQLAlchemy-1.4.29-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb8c993706e86178ce15a6b86a335a2064f52254b640e7f53365e716423d33f4"}, 746 | {file = "SQLAlchemy-1.4.29-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:804e22d5b6165a4f3f019dd9c94bec5687de985a9c54286b93ded9f7846b8c82"}, 747 | {file = "SQLAlchemy-1.4.29-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56d9d62021946263d4478c9ca012fbd1805f10994cb615c88e7bfd1ae14604d8"}, 748 | {file = "SQLAlchemy-1.4.29-cp310-cp310-win32.whl", hash = "sha256:027f356c727db24f3c75828c7feb426f87ce1241242d08958e454bd025810660"}, 749 | {file = "SQLAlchemy-1.4.29-cp310-cp310-win_amd64.whl", hash = "sha256:debaf09a823061f88a8dee04949814cf7e82fb394c5bca22c780cb03172ca23b"}, 750 | {file = "SQLAlchemy-1.4.29-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:dc27dcc6c72eb38be7f144e9c2c4372d35a3684d3a6dd43bd98c1238358ee17c"}, 751 | {file = "SQLAlchemy-1.4.29-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4ddd4f2e247128c58bb3dd4489922874afce157d2cff0b2295d67fcd0f22494"}, 752 | {file = "SQLAlchemy-1.4.29-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9ce960a1dc60524136cf6f75621588e2508a117e04a6e3eedb0968bd13b8c824"}, 753 | {file = "SQLAlchemy-1.4.29-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5919e647e1d4805867ea556ed4967c68b4d8b266059fa35020dbaed8ffdd60f3"}, 754 | {file = "SQLAlchemy-1.4.29-cp36-cp36m-win32.whl", hash = "sha256:886359f734b95ad1ef443b13bb4518bcade4db4f9553c9ce33d6d04ebda8d44e"}, 755 | {file = "SQLAlchemy-1.4.29-cp36-cp36m-win_amd64.whl", hash = "sha256:e9cc6d844e24c307c3272677982a9b33816aeb45e4977791c3bdd47637a8d810"}, 756 | {file = "SQLAlchemy-1.4.29-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:5e9cd33459afa69c88fa648e803d1f1245e3caa60bfe8b80a9595e5edd3bda9c"}, 757 | {file = "SQLAlchemy-1.4.29-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eeaebceb24b46e884c4ad3c04f37feb178b81f6ce720af19bfa2592ca32fdef7"}, 758 | {file = "SQLAlchemy-1.4.29-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e89347d3bd2ef873832b47e85f4bbd810a5e626c5e749d90a07638da100eb1c8"}, 759 | {file = "SQLAlchemy-1.4.29-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a717c2e70fd1bb477161c4cc85258e41d978584fbe5522613618195f7e87d9b"}, 760 | {file = "SQLAlchemy-1.4.29-cp37-cp37m-win32.whl", hash = "sha256:f74d6c05d2d163464adbdfbc1ab85048cc15462ff7d134b8aed22bd521e1faa5"}, 761 | {file = "SQLAlchemy-1.4.29-cp37-cp37m-win_amd64.whl", hash = "sha256:621854dbb4d2413c759a5571564170de45ef37299df52e78e62b42e2880192e1"}, 762 | {file = "SQLAlchemy-1.4.29-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:f3909194751bb6cb7c5511dd18bcf77e6e3f0b31604ed4004dffa9461f71e737"}, 763 | {file = "SQLAlchemy-1.4.29-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd49d21d1f03c81fbec9080ecdc4486d5ddda67e7fbb75ebf48294465c022cdc"}, 764 | {file = "SQLAlchemy-1.4.29-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e5f6959466a42b6569774c257e55f9cd85200d5b0ba09f0f5d8b5845349c5822"}, 765 | {file = "SQLAlchemy-1.4.29-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0072f9887aabe66db23f818bbe950cfa1b6127c5cb769b00bcc07935b3adb0ad"}, 766 | {file = "SQLAlchemy-1.4.29-cp38-cp38-win32.whl", hash = "sha256:ad618d687d26d4cbfa9c6fa6141d59e05bcdfc60cb6e1f1d3baa18d8c62fef5f"}, 767 | {file = "SQLAlchemy-1.4.29-cp38-cp38-win_amd64.whl", hash = "sha256:878daecb6405e786b07f97e1c77a9cfbbbec17432e8a90c487967e32cfdecb33"}, 768 | {file = "SQLAlchemy-1.4.29-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:e027bdf0a4cf6bd0a3ad3b998643ea374d7991bd117b90bf9982e41ceb742941"}, 769 | {file = "SQLAlchemy-1.4.29-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5de7adfb91d351f44062b8dedf29f49d4af7cb765be65816e79223a4e31062b"}, 770 | {file = "SQLAlchemy-1.4.29-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fbc6e63e481fa323036f305ada96a3362e1d60dd2bfa026cac10c3553e6880e9"}, 771 | {file = "SQLAlchemy-1.4.29-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7dd0502cb091660ad0d89c5e95a29825f37cde2a5249957838e975871fbffaad"}, 772 | {file = "SQLAlchemy-1.4.29-cp39-cp39-win32.whl", hash = "sha256:37b46bfc4af3dc226acb6fa28ecd2e1fd223433dc5e15a2bad62bf0a0cbb4e8b"}, 773 | {file = "SQLAlchemy-1.4.29-cp39-cp39-win_amd64.whl", hash = "sha256:08cfd35eecaba79be930c9bfd2e1f0c67a7e1314355d83a378f9a512b1cf7587"}, 774 | {file = "SQLAlchemy-1.4.29.tar.gz", hash = "sha256:fa2bad14e1474ba649cfc969c1d2ec915dd3e79677f346bbfe08e93ef9020b39"}, 775 | ] 776 | toml = [ 777 | {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, 778 | {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, 779 | ] 780 | tox = [ 781 | {file = "tox-3.24.5-py2.py3-none-any.whl", hash = "sha256:be3362472a33094bce26727f5f771ca0facf6dafa217f65875314e9a6600c95c"}, 782 | {file = "tox-3.24.5.tar.gz", hash = "sha256:67e0e32c90e278251fea45b696d0fef3879089ccbe979b0c556d35d5a70e2993"}, 783 | ] 784 | traitlets = [ 785 | {file = "traitlets-5.1.1-py3-none-any.whl", hash = "sha256:2d313cc50a42cd6c277e7d7dc8d4d7fedd06a2c215f78766ae7b1a66277e0033"}, 786 | {file = "traitlets-5.1.1.tar.gz", hash = "sha256:059f456c5a7c1c82b98c2e8c799f39c9b8128f6d0d46941ee118daace9eb70c7"}, 787 | ] 788 | typing-extensions = [ 789 | {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"}, 790 | {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"}, 791 | ] 792 | virtualenv = [ 793 | {file = "virtualenv-20.13.0-py2.py3-none-any.whl", hash = "sha256:339f16c4a86b44240ba7223d0f93a7887c3ca04b5f9c8129da7958447d079b09"}, 794 | {file = "virtualenv-20.13.0.tar.gz", hash = "sha256:d8458cf8d59d0ea495ad9b34c2599487f8a7772d796f9910858376d1600dd2dd"}, 795 | ] 796 | wcwidth = [ 797 | {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, 798 | {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, 799 | ] 800 | zipp = [ 801 | {file = "zipp-3.7.0-py3-none-any.whl", hash = "sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375"}, 802 | {file = "zipp-3.7.0.tar.gz", hash = "sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d"}, 803 | ] 804 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "norm" 3 | version = "1.6.1" 4 | description = "Easy peasy SQL generation" 5 | authors = ["Justin Van Winkle "] 6 | license = "MIT" 7 | 8 | [tool.poetry.dependencies] 9 | python = "^3.7" 10 | 11 | [tool.poetry.dev-dependencies] 12 | psycopg2-binary = "^2.8.6" 13 | ipython = "*" 14 | pytest = "*" 15 | sqlalchemy = "*" 16 | tox = "^3.23.0" 17 | pytest-flake8-v2 = "*" 18 | 19 | [build-system] 20 | requires = ["poetry>=0.12"] 21 | build-backend = "poetry.masonry.api" 22 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | addopts = --flake8 --doctest-modules 3 | -------------------------------------------------------------------------------- /tests/test_connection.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals 2 | 3 | import sqlite3 4 | 5 | from norm.norm_sqlite3 import SQLI_ConnectionFactory as ConnectionFactory 6 | from norm.norm_sqlite3 import SQLI_SELECT as SELECT 7 | from norm.norm_sqlite3 import SQLI_INSERT as INSERT 8 | 9 | 10 | def conn_maker(): 11 | conn = sqlite3.connect(':memory:') 12 | conn.execute( 13 | '''CREATE TABLE users ( 14 | user_id INTEGER PRIMARY KEY AUTOINCREMENT, 15 | first_name VARCHAR(64) 16 | )''') 17 | conn.commit() 18 | return conn 19 | 20 | 21 | def test_connection_factory_and_connection_proxy_cursor(): 22 | cf = ConnectionFactory(conn_maker) 23 | conn = cf() 24 | # make sure we get a 'connection' 25 | conn.cursor() 26 | 27 | 28 | def test_run_query(): 29 | cf = ConnectionFactory(conn_maker) 30 | conn = cf() 31 | 32 | s = SELECT('user_id').FROM('users') 33 | user_ids = conn.run_query(s) 34 | 35 | assert list(user_ids) == [] 36 | 37 | i = INSERT('users', data={'first_name': 'Justin'}) 38 | conn.run_query(i) 39 | conn.commit() 40 | 41 | user_ids = conn.run_query(s) 42 | assert list(user_ids) == [{'user_id': 1}] 43 | 44 | user_ids = conn.run_query('SELECT user_id FROM users WHERE user_id = 1') 45 | assert list(user_ids) == [{'user_id': 1}] 46 | 47 | 48 | def test_runqueryone(): 49 | cf = ConnectionFactory(conn_maker) 50 | conn = cf() 51 | 52 | s = SELECT('user_id').FROM('users') 53 | row = conn.run_queryone(s) 54 | assert row is None 55 | 56 | i = INSERT('users', data={'first_name': 'Justin'}) 57 | conn.run_query(i) 58 | conn.commit() 59 | 60 | s = SELECT('user_id').FROM('users') 61 | row = conn.run_queryone(s) 62 | assert row == {'user_id': 1} 63 | -------------------------------------------------------------------------------- /tests/test_norm.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | 3 | from norm import SELECT 4 | from norm import UPDATE 5 | from norm import DELETE 6 | from norm import INSERT 7 | from norm import UNION 8 | from norm import UNION_ALL 9 | from norm import WITH 10 | from norm import EXISTS 11 | from norm import NOT_EXISTS 12 | from norm.norm import NormAsIs 13 | 14 | 15 | simple_select_query = """\ 16 | SELECT tbl1.column1 AS col1 17 | FROM table1 AS tbl1 18 | WHERE tbl1.col2 = 'testval';""" 19 | 20 | 21 | def test_simple_select(): 22 | s = (SELECT("tbl1.column1 AS col1") 23 | .FROM("table1 AS tbl1") 24 | .WHERE("tbl1.col2 = 'testval'")) 25 | 26 | assert s.query == simple_select_query 27 | assert s.binds == {} 28 | 29 | 30 | simple_asis_query = """\ 31 | SELECT tbl1.column1 AS col1 32 | FROM table1 AS tbl1 33 | WHERE tbl1.col2 = FOO AND 34 | x = CURRENT_TIMESTAMP;""" 35 | 36 | 37 | def test_simple_asis(): 38 | literal2 = NormAsIs('CURRENT_TIMESTAMP') 39 | s = (SELECT("tbl1.column1 AS col1") 40 | .FROM("table1 AS tbl1") 41 | .WHERE("tbl1.col2 = %(literal1)s", 42 | x=literal2) 43 | .bind(literal1=NormAsIs('FOO'))) 44 | 45 | assert s.query == simple_asis_query 46 | assert s.binds == {} 47 | 48 | 49 | kw_alias_query = """\ 50 | SELECT tbl1.column1 AS col1, 51 | tbl2.column2 AS col2, 52 | tbl2.column3 AS col3 53 | FROM table1 AS tbl1 54 | JOIN table2 AS tbl2 55 | ON tbl1.tid = tbl2.tid 56 | WHERE tbl1.col2 = %(tbl1___col2_bind_0)s;""" 57 | 58 | 59 | def test_kw_aliases(): 60 | s = (SELECT("tbl1.column1 AS col1") 61 | .FROM("table1 AS tbl1") 62 | .JOIN("table2 AS tbl2", ON='tbl1.tid = tbl2.tid') 63 | .SELECT("tbl2.column2 AS col2", 64 | "tbl2.column3 AS col3") 65 | .WHERE(**{"tbl1.col2": 'testval'})) 66 | 67 | assert s.query == kw_alias_query 68 | assert s.binds == {'tbl1___col2_bind_0': 'testval'} 69 | 70 | 71 | simple_inner_join_select_expected = """\ 72 | SELECT tbl1.column1 AS col1, 73 | tbl2.column2 AS col2, 74 | tbl2.column3 AS col3 75 | FROM table1 AS tbl1 76 | JOIN table2 AS tbl2 77 | ON tbl1.tid = tbl2.tid 78 | WHERE tbl1.col2 = 'testval';""" 79 | 80 | 81 | def test_simple_inner_join_select(): 82 | s = (SELECT("tbl1.column1 AS col1") 83 | .FROM("table1 AS tbl1") 84 | .JOIN("table2 AS tbl2", ON='tbl1.tid = tbl2.tid') 85 | .SELECT("tbl2.column2 AS col2", 86 | "tbl2.column3 AS col3") 87 | .WHERE("tbl1.col2 = 'testval'")) 88 | 89 | assert s.query == simple_inner_join_select_expected 90 | assert s.binds == {} 91 | 92 | 93 | simple_inner_using_join_select_expected = """\ 94 | SELECT tbl1.column1 AS col1, 95 | tbl2.column2 AS col2, 96 | tbl2.column3 AS col3 97 | FROM table1 AS tbl1 98 | JOIN table2 AS tbl2 99 | USING (tubs) 100 | WHERE tbl1.col2 = 'testval';""" 101 | 102 | 103 | def test_simple_inner_using_join_select(): 104 | s = (SELECT("tbl1.column1 AS col1") 105 | .FROM("table1 AS tbl1") 106 | .JOIN("table2 AS tbl2", USING='tubs') 107 | .SELECT("tbl2.column2 AS col2", 108 | "tbl2.column3 AS col3") 109 | .WHERE("tbl1.col2 = 'testval'")) 110 | 111 | assert s.query == simple_inner_using_join_select_expected 112 | assert s.binds == {} 113 | 114 | 115 | simple_inner_using_multi_join_select_expected = """\ 116 | SELECT tbl1.column1 AS col1, 117 | tbl2.column2 AS col2, 118 | tbl2.column3 AS col3 119 | FROM table1 AS tbl1 120 | JOIN table2 AS tbl2 121 | USING (tubs, bubs) 122 | WHERE tbl1.col2 = 'testval';""" 123 | 124 | 125 | def test_simple_inner_using_multi_join_select(): 126 | s = (SELECT("tbl1.column1 AS col1") 127 | .FROM("table1 AS tbl1") 128 | .JOIN("table2 AS tbl2", USING=('tubs', 'bubs')) 129 | .SELECT("tbl2.column2 AS col2", 130 | "tbl2.column3 AS col3") 131 | .WHERE("tbl1.col2 = 'testval'")) 132 | 133 | assert s.query == simple_inner_using_multi_join_select_expected 134 | assert s.binds == {} 135 | 136 | 137 | simple_left_join_expected = """\ 138 | SELECT tbl1.column1 AS col1, 139 | tbl2.column2 AS col2, 140 | tbl2.column3 AS col3 141 | FROM table1 AS tbl1 142 | LEFT JOIN table2 AS tbl2 143 | ON tbl1.tid = tbl2.tid 144 | WHERE tbl1.col2 = 'testval';""" 145 | 146 | 147 | def test_simple_left_join_select(): 148 | s = (SELECT("tbl1.column1 AS col1") 149 | .FROM("table1 AS tbl1") 150 | .LEFTJOIN("table2 AS tbl2", ON='tbl1.tid = tbl2.tid') 151 | .SELECT("tbl2.column2 AS col2", 152 | "tbl2.column3 AS col3") 153 | .WHERE("tbl1.col2 = 'testval'")) 154 | 155 | assert s.query == simple_left_join_expected 156 | assert s.binds == {} 157 | 158 | 159 | simple_right_join_expected = """\ 160 | SELECT tbl1.column1 AS col1, 161 | tbl2.column2 AS col2, 162 | tbl2.column3 AS col3 163 | FROM table1 AS tbl1 164 | RIGHT JOIN table2 AS tbl2 165 | ON tbl1.tid = tbl2.tid 166 | WHERE tbl1.col2 = 'testval';""" 167 | 168 | 169 | def test_simple_right_join_select(): 170 | s = (SELECT("tbl1.column1 AS col1") 171 | .FROM("table1 AS tbl1") 172 | .RIGHTJOIN("table2 AS tbl2", ON='tbl1.tid = tbl2.tid') 173 | .SELECT("tbl2.column2 AS col2", 174 | "tbl2.column3 AS col3") 175 | .WHERE("tbl1.col2 = 'testval'")) 176 | 177 | assert s.query == simple_right_join_expected 178 | assert s.binds == {} 179 | 180 | 181 | simple_full_join_expected = """\ 182 | SELECT tbl1.column1 AS col1, 183 | tbl2.column2 AS col2, 184 | tbl2.column3 AS col3 185 | FROM table1 AS tbl1 186 | FULL JOIN table2 AS tbl2 187 | ON tbl1.tid = tbl2.tid 188 | WHERE tbl1.col2 = 'testval';""" 189 | 190 | 191 | def test_simple_full_join_select(): 192 | s = (SELECT("tbl1.column1 AS col1") 193 | .FROM("table1 AS tbl1") 194 | .FULLJOIN("table2 AS tbl2", ON='tbl1.tid = tbl2.tid') 195 | .SELECT("tbl2.column2 AS col2", 196 | "tbl2.column3 AS col3") 197 | .WHERE("tbl1.col2 = 'testval'")) 198 | 199 | assert s.query == simple_full_join_expected 200 | assert s.binds == {} 201 | 202 | 203 | distinct_on_expected = """\ 204 | SELECT DISTINCT ON (tbl1.column1) 205 | tbl1.column1 AS col1, 206 | tbl2.column2 AS col2, 207 | tbl2.column3 AS col3 208 | FROM table1 AS tbl1 209 | LEFT JOIN table2 AS tbl2 210 | ON tbl1.tid = tbl2.tid 211 | WHERE tbl1.col2 = 'testval';""" 212 | 213 | 214 | def test_distinct_on(): 215 | s = (SELECT("tbl1.column1 AS col1") 216 | .FROM("table1 AS tbl1") 217 | .LEFTJOIN("table2 AS tbl2", ON='tbl1.tid = tbl2.tid') 218 | .SELECT("tbl2.column2 AS col2", 219 | "tbl2.column3 AS col3") 220 | .WHERE("tbl1.col2 = 'testval'") 221 | .DISTINCT_ON('tbl1.column1')) 222 | 223 | assert s.query == distinct_on_expected 224 | assert s.binds == {} 225 | 226 | 227 | exists_subquery_expected = """\ 228 | SELECT tbl1.column1 AS col1, 229 | tbl2.column2 AS col2, 230 | tbl2.column3 AS col3 231 | FROM table1 AS tbl1 232 | LEFT JOIN table2 AS tbl2 233 | ON tbl1.tid = tbl2.tid 234 | WHERE tbl1.col2 = 'testval' AND 235 | EXISTS ( 236 | SELECT 1 237 | FROM foo 238 | WHERE foobar = tbl1.column1 AND 239 | bugs = %(bugs)s);""" 240 | 241 | 242 | def test_exists_subquery(): 243 | sub = (EXISTS(1) 244 | .FROM('foo') 245 | .WHERE('foobar = tbl1.column1', 246 | 'bugs = %(bugs)s') 247 | .bind(bugs='spiders')) 248 | 249 | s = (SELECT("tbl1.column1 AS col1") 250 | .FROM("table1 AS tbl1") 251 | .LEFTJOIN("table2 AS tbl2", ON='tbl1.tid = tbl2.tid') 252 | .SELECT("tbl2.column2 AS col2", 253 | "tbl2.column3 AS col3") 254 | .WHERE("tbl1.col2 = 'testval'", 255 | sub)) 256 | 257 | assert s.query == exists_subquery_expected 258 | assert s.binds == {'bugs': 'spiders'} 259 | 260 | 261 | not_exists_subquery_expected = """\ 262 | SELECT tbl1.column1 AS col1, 263 | tbl2.column2 AS col2, 264 | tbl2.column3 AS col3 265 | FROM table1 AS tbl1 266 | LEFT JOIN table2 AS tbl2 267 | ON tbl1.tid = tbl2.tid 268 | WHERE tbl1.col2 = 'testval' AND 269 | NOT EXISTS ( 270 | SELECT 1 271 | FROM foo 272 | WHERE foobar = tbl1.column1 AND 273 | bugs = %(bugs)s);""" 274 | 275 | 276 | def test_not_exists_subquery(): 277 | sub = (NOT_EXISTS(1) 278 | .FROM('foo') 279 | .WHERE('foobar = tbl1.column1', 280 | 'bugs = %(bugs)s')) 281 | 282 | s = (SELECT("tbl1.column1 AS col1") 283 | .FROM("table1 AS tbl1") 284 | .LEFTJOIN("table2 AS tbl2", ON='tbl1.tid = tbl2.tid') 285 | .SELECT("tbl2.column2 AS col2", 286 | "tbl2.column3 AS col3") 287 | .WHERE("tbl1.col2 = 'testval'", 288 | sub) 289 | .bind(bugs='spiders')) 290 | 291 | assert s.query == not_exists_subquery_expected 292 | assert s.binds == {'bugs': 'spiders'} 293 | 294 | 295 | multiple_where_expected = """\ 296 | SELECT tbl1.column1 AS col1 297 | FROM table1 AS tbl1 298 | WHERE tbl1.col2 = 'testval' AND 299 | tbl1.col3 = 'otherval';""" 300 | 301 | 302 | def test_multiple_where(): 303 | s = (SELECT("tbl1.column1 AS col1") 304 | .FROM("table1 AS tbl1") 305 | .WHERE("tbl1.col2 = 'testval'") 306 | .WHERE("tbl1.col3 = 'otherval'")) 307 | 308 | assert s.query == multiple_where_expected 309 | assert s.binds == {} 310 | 311 | 312 | all_select_methods_expected = """\ 313 | SELECT tbl1.column1 AS col1, 314 | table2.blah 315 | FROM table1 AS tbl1 316 | JOIN table2 317 | ON table2.blah = tbl1.col2 318 | WHERE tbl1.col2 = 'testval' 319 | GROUP BY table2.blah, 320 | col1 321 | HAVING count(*) > 5 AND 322 | count(*) > 6 323 | ORDER BY count(*) 324 | LIMIT 5 325 | OFFSET 3;""" 326 | 327 | 328 | def test_all_select_methods(): 329 | s = (SELECT("tbl1.column1 AS col1") 330 | .FROM("table1 AS tbl1") 331 | .WHERE("tbl1.col2 = 'testval'") 332 | .JOIN("table2", ON="table2.blah = tbl1.col2") 333 | .SELECT("table2.blah") 334 | .HAVING("count(*) > 5", 335 | "count(*) > 6") 336 | .GROUP_BY("table2.blah", "col1") 337 | .ORDER_BY("count(*)") 338 | .LIMIT(5) 339 | .OFFSET(3)) 340 | 341 | assert s.query == all_select_methods_expected 342 | assert s.binds == {} 343 | 344 | 345 | top_select_expected = """\ 346 | SELECT TOP 5 347 | tbl1.column1 AS col1, 348 | table2.blah 349 | FROM table1 AS tbl1 350 | JOIN table2 351 | ON table2.blah = tbl1.col2 352 | WHERE tbl1.col2 = 'testval' 353 | GROUP BY table2.blah, 354 | col1 355 | HAVING count(*) > 5 AND 356 | count(*) > 6 357 | ORDER BY count(*);""" 358 | 359 | 360 | def test_top_select(): 361 | s = (SELECT("tbl1.column1 AS col1") 362 | .FROM("table1 AS tbl1") 363 | .WHERE("tbl1.col2 = 'testval'") 364 | .JOIN("table2", ON="table2.blah = tbl1.col2") 365 | .SELECT("table2.blah") 366 | .HAVING("count(*) > 5", 367 | "count(*) > 6") 368 | .GROUP_BY("table2.blah", "col1") 369 | .ORDER_BY("count(*)") 370 | .TOP(5)) 371 | 372 | assert s.query == top_select_expected 373 | assert s.binds == {} 374 | 375 | 376 | overwriting_select_methods_expected = """\ 377 | SELECT TOP 2 378 | tbl1.column1 AS col1, 379 | table2.blah 380 | FROM table1 AS tbl1 381 | JOIN table2 382 | ON table2.blah = tbl1.col2 383 | WHERE tbl1.col2 = 'testval' 384 | GROUP BY table1.column1, 385 | table2.blah 386 | HAVING count(*) < 10 AND 387 | count(*) > 5 388 | ORDER BY table1.column1, 389 | count(*) 390 | LIMIT 5 391 | OFFSET 3;""" 392 | 393 | 394 | def test_overwriting_select_methods(): 395 | s = (SELECT("tbl1.column1 AS col1") 396 | .FROM("table1 AS tbl1") 397 | .WHERE("tbl1.col2 = 'testval'") 398 | .JOIN("table2", ON="table2.blah = tbl1.col2") 399 | .SELECT("table2.blah") 400 | .HAVING("count(*) < 10") 401 | .HAVING("count(*) > 5") 402 | .GROUP_BY("table1.column1") 403 | .GROUP_BY("table2.blah") 404 | .ORDER_BY("table1.column1") 405 | .ORDER_BY("count(*)") 406 | .TOP('no way') 407 | .TOP(2) 408 | .LIMIT('no way') 409 | .LIMIT(5) 410 | .OFFSET('should not see this') 411 | .OFFSET(3)) 412 | 413 | assert s.query == overwriting_select_methods_expected 414 | assert s.binds == {} 415 | 416 | 417 | binds_expected = """\ 418 | SELECT tbl1.column1 AS col1 419 | FROM table1 AS tbl1 420 | WHERE tbl1.col2 = 'testval' AND 421 | tbl1.col3 = %(bind1)s;""" 422 | 423 | 424 | def test_binds(): 425 | s1 = (SELECT("tbl1.column1 AS col1") 426 | .FROM("table1 AS tbl1") 427 | .WHERE("tbl1.col2 = 'testval'") 428 | .WHERE("tbl1.col3 = %(bind1)s") 429 | .bind(bind1='bind1value')) 430 | 431 | assert s1.query == binds_expected 432 | assert s1.binds == {'bind1': 'bind1value'} 433 | 434 | 435 | generate_binds_expected = """\ 436 | SELECT tbl1.column1 AS col1 437 | FROM table1 AS tbl1 438 | WHERE id = %(id_bind_0)s AND 439 | name = %(name_bind_1)s AND 440 | occupation = %(occupation_bind_2)s AND 441 | salary = %(salary_bind_3)s AND 442 | tbl1.col3 = %(bind1)s;""" 443 | 444 | 445 | def test_generate_binds(): 446 | s1 = (SELECT("tbl1.column1 AS col1") 447 | .FROM("table1 AS tbl1") 448 | .WHERE(id=1) 449 | .WHERE(name='bossanova') 450 | .WHERE(occupation='rascal', salary=None) 451 | .WHERE("tbl1.col3 = %(bind1)s") 452 | .bind(bind1='bind1value')) 453 | 454 | assert s1.query == generate_binds_expected 455 | assert s1.binds == {'bind1': 'bind1value', 456 | 'id_bind_0': 1, 457 | 'name_bind_1': 'bossanova', 458 | 'occupation_bind_2': 'rascal', 459 | 'salary_bind_3': None} 460 | 461 | 462 | def test_generative_query(): 463 | s1 = (SELECT("tbl1.column1 AS col1") 464 | .FROM("table1 AS tbl1") 465 | .WHERE("tbl1.col2 = 'testval'") 466 | .WHERE("tbl1.col3 = 'otherval'")) 467 | 468 | s2 = s1.WHERE("tbl1.col4 = 'otherother'") 469 | 470 | s3 = s2.JOIN("table2 AS tbl2", USING="somecol").bind(val='whatevs') 471 | s4 = s3.JOIN("table3 AS tbl3", ON="tbl3.colx = tbl2.coly") 472 | s5 = s4.SELECT("tbl3.whatever AS whatever").bind(test='test2', val='nope') 473 | 474 | expected1 = '\n'.join([ 475 | "SELECT tbl1.column1 AS col1", 476 | " FROM table1 AS tbl1", 477 | " WHERE tbl1.col2 = 'testval' AND", 478 | " tbl1.col3 = 'otherval';"]) 479 | 480 | expected2 = '\n'.join([ 481 | "SELECT tbl1.column1 AS col1", 482 | " FROM table1 AS tbl1", 483 | " WHERE tbl1.col2 = 'testval' AND", 484 | " tbl1.col3 = 'otherval' AND", 485 | " tbl1.col4 = 'otherother';"]) 486 | 487 | expected3 = '\n'.join([ 488 | "SELECT tbl1.column1 AS col1", 489 | " FROM table1 AS tbl1", 490 | " JOIN table2 AS tbl2", 491 | " USING (somecol)", 492 | " WHERE tbl1.col2 = 'testval' AND", 493 | " tbl1.col3 = 'otherval' AND", 494 | " tbl1.col4 = 'otherother';"]) 495 | 496 | expected4 = '\n'.join([ 497 | "SELECT tbl1.column1 AS col1", 498 | " FROM table1 AS tbl1", 499 | " JOIN table2 AS tbl2", 500 | " USING (somecol)", 501 | " JOIN table3 AS tbl3", 502 | " ON tbl3.colx = tbl2.coly", 503 | " WHERE tbl1.col2 = 'testval' AND", 504 | " tbl1.col3 = 'otherval' AND", 505 | " tbl1.col4 = 'otherother';"]) 506 | 507 | expected5 = '\n'.join([ 508 | "SELECT tbl1.column1 AS col1,", 509 | " tbl3.whatever AS whatever", 510 | " FROM table1 AS tbl1", 511 | " JOIN table2 AS tbl2", 512 | " USING (somecol)", 513 | " JOIN table3 AS tbl3", 514 | " ON tbl3.colx = tbl2.coly", 515 | " WHERE tbl1.col2 = 'testval' AND", 516 | " tbl1.col3 = 'otherval' AND", 517 | " tbl1.col4 = 'otherother';"]) 518 | 519 | assert s5.query == expected5 520 | assert s5.binds == {'test': 'test2', 'val': 'nope'} 521 | assert s4.query == expected4 522 | assert s4.binds == {'val': 'whatevs'} 523 | assert s3.query == expected3 524 | assert s3.binds == {'val': 'whatevs'} 525 | assert s2.query == expected2 526 | assert s2.binds == {} 527 | assert s1.query == expected1 528 | assert s1.binds == {} 529 | 530 | 531 | simple_update_expected = """\ 532 | UPDATE table1 533 | SET col1 = 'test', 534 | col2 = 'test2';""" 535 | 536 | 537 | def test_simple_update(): 538 | u = (UPDATE("table1") 539 | .SET("col1 = 'test'") 540 | .SET("col2 = 'test2'")) 541 | 542 | assert u.query == simple_update_expected 543 | assert u.binds == {} 544 | 545 | 546 | simple_update_star_kw_query = """\ 547 | UPDATE table1 548 | SET col1 = %(col1_bind)s, 549 | col2 = %(col2_bind)s;""" 550 | 551 | 552 | def test_simple_update_star_kw(): 553 | u = (UPDATE("table1") 554 | .SET(**{'col1': 'test', 555 | 'col2': 'test2'})) 556 | 557 | assert u.query == simple_update_star_kw_query 558 | assert u.binds == dict(col1_bind='test', 559 | col2_bind='test2') 560 | 561 | 562 | update_one_row_query = """\ 563 | UPDATE table1 564 | SET col1 = 'test', 565 | col2 = 'test2' 566 | WHERE id = %(id_bind_0)s;""" 567 | 568 | 569 | def test_update_one_row(): 570 | u = (UPDATE("table1") 571 | .SET("col1 = 'test'") 572 | .SET("col2 = 'test2'") 573 | .WHERE(id=5)) 574 | 575 | assert u.query == update_one_row_query 576 | assert u.binds == {'id_bind_0': 5} 577 | 578 | 579 | named_arg_update_query = """\ 580 | UPDATE table1 581 | SET col1 = %(col1_bind)s, 582 | col2 = 'test2' 583 | WHERE id = %(id_bind_1)s;""" 584 | 585 | 586 | def test_named_arg_update(): 587 | u = (UPDATE("table1") 588 | .SET(col1='test') 589 | .SET("col2 = 'test2'") 590 | .WHERE(id=5)) 591 | 592 | assert u.query == named_arg_update_query 593 | assert u.binds == {'col1_bind': 'test', 594 | 'id_bind_1': 5} 595 | 596 | 597 | update_returning_query = """\ 598 | UPDATE table1 599 | SET col1 = %(col1_bind)s 600 | RETURNING test, test1;""" 601 | 602 | 603 | def test_update_returning(): 604 | u = (UPDATE("table1") 605 | .SET(col1='test') 606 | .RETURNING('test', 'test1')) 607 | 608 | assert u.query == update_returning_query 609 | assert u.binds == {'col1_bind': 'test'} 610 | 611 | 612 | def test_simple_delete(): 613 | d = DELETE('table1') 614 | 615 | assert d.query == 'DELETE FROM table1;' 616 | assert d.binds == {} 617 | 618 | 619 | delete_where_query = """\ 620 | DELETE FROM table2 621 | WHERE x > 5;""" 622 | 623 | 624 | def test_delete_where(): 625 | d = (DELETE('table2') 626 | .WHERE('x > 5')) 627 | 628 | assert d.query == delete_where_query 629 | assert d.binds == {} 630 | 631 | 632 | def test_delete_where_autobind(): 633 | d = (DELETE('table3') 634 | .WHERE(x=25)) 635 | 636 | assert d.query == '\n'.join([ 637 | "DELETE FROM table3", 638 | " WHERE x = %(x_bind_0)s;"]) 639 | assert d.binds == {'x_bind_0': 25} 640 | 641 | 642 | def test_delete_returning(): 643 | d = (DELETE('table3') 644 | .WHERE(x=25) 645 | .RETURNING('this', 'that')) 646 | 647 | assert d.query == '\n'.join([ 648 | "DELETE FROM table3", 649 | " WHERE x = %(x_bind_0)s", 650 | "RETURNING this, that;"]) 651 | assert d.binds == {'x_bind_0': 25} 652 | 653 | 654 | row1 = {'name': 'justin', 'zipcode': 23344} 655 | row2 = {'name': 'nintendo', 'phone': '1112223333'} 656 | 657 | 658 | def test_basic_insert(): 659 | i = INSERT('table1', data=row1) 660 | 661 | assert i.binds == {'name_0': 'justin', 'zipcode_0': 23344} 662 | assert i.query == ('INSERT INTO table1 (name, zipcode)\n' 663 | ' VALUES\n' 664 | '(%(name_0)s, %(zipcode_0)s);') 665 | 666 | 667 | def test_multi_insert(): 668 | i = INSERT('table1', data=[row1, row2]) 669 | assert i.binds == {'name_0': 'justin', 670 | 'phone_0': None, 671 | 'zipcode_0': 23344, 672 | 'name_1': 'nintendo', 673 | 'phone_1': '1112223333', 674 | 'zipcode_1': None} 675 | 676 | assert i.query == ('INSERT INTO table1 (name, phone, zipcode)\n' 677 | ' VALUES\n' 678 | '(%(name_0)s, %(phone_0)s, %(zipcode_0)s),\n' 679 | '(%(name_1)s, %(phone_1)s, %(zipcode_1)s);') 680 | 681 | 682 | def test_setting_default(): 683 | i = INSERT('table1', data=[row1, row2], default=2) 684 | assert i.binds == {'name_0': 'justin', 685 | 'phone_0': 2, 686 | 'zipcode_0': 23344, 687 | 'name_1': 'nintendo', 688 | 'phone_1': '1112223333', 689 | 'zipcode_1': 2} 690 | 691 | 692 | def test_setting_columns(): 693 | i = INSERT('table1', data=row1, columns=['name', 'address']) 694 | assert i.binds == {'name_0': 'justin', 'address_0': None} 695 | assert i.query == ('INSERT INTO table1 (name, address)\n' 696 | ' VALUES\n' 697 | '(%(name_0)s, %(address_0)s);') 698 | 699 | 700 | def test_setting_columns_default(): 701 | i = INSERT('table1', data=[row1, row2], columns=['phone'], default='blah') 702 | assert i.binds == {'phone_0': 'blah', 703 | 'phone_1': '1112223333'} 704 | 705 | assert i.query == ('INSERT INTO table1 (phone)\n' 706 | ' VALUES\n' 707 | '(%(phone_0)s),\n' 708 | '(%(phone_1)s);') 709 | 710 | 711 | def test_insert_no_columns(): 712 | i = INSERT('table1', data=[row1, row2], columns=['phone'], default='blah') 713 | assert i.binds == {'phone_0': 'blah', 714 | 'phone_1': '1112223333'} 715 | 716 | assert i.query == ('INSERT INTO table1 (phone)\n' 717 | ' VALUES\n' 718 | '(%(phone_0)s),\n' 719 | '(%(phone_1)s);') 720 | 721 | 722 | def test_insert_default_values(): 723 | i = INSERT('table1') 724 | 725 | assert i.binds == {} 726 | assert i.query == 'INSERT INTO table1 DEFAULT VALUES;' 727 | 728 | 729 | def test_insert_on_conflict(): 730 | i = INSERT('table1', 731 | data={'col1': 'val1', 'col2': 'val2'}, 732 | on_conflict='(col1) DO NOTHING') 733 | 734 | assert i.binds == dict(col1_0='val1', col2_0='val2') 735 | assert i.query == ('INSERT INTO table1 (col1, col2)\n' 736 | ' VALUES\n' 737 | '(%(col1_0)s, %(col2_0)s)' 738 | '\nON CONFLICT (col1) DO NOTHING;') 739 | 740 | 741 | def test_asis_insert(): 742 | row = {'name': NormAsIs('now()'), 'zipcode': 23344} 743 | 744 | i = INSERT('table1', data=[row]) 745 | assert i.query == ('INSERT INTO table1 (name, zipcode)\n' 746 | ' VALUES\n' 747 | '(now(), %(zipcode_0)s);') 748 | assert i.binds == {'zipcode_0': 23344} 749 | 750 | 751 | test_with_query = """\ 752 | WITH my_fake_table AS 753 | (UPDATE sometable 754 | SET foo = %(foo_bind)s 755 | RETURNING foo, bar, whatever) 756 | 757 | INSERT INTO my_table (foo, bar, whatever) 758 | SELECT foo, 759 | bub, 760 | derp 761 | FROM my_fake_table;""" 762 | 763 | 764 | def test_with(): 765 | u = (UPDATE('sometable') 766 | .SET(foo='123') 767 | .RETURNING('foo', 'bar', 'whatever')) 768 | w = WITH(my_fake_table=u) 769 | w = w(INSERT('my_table', 770 | columns=('foo', 'bar', 'whatever'), 771 | statement=(SELECT('foo', 'bub', 'derp') 772 | .FROM('my_fake_table')))) 773 | 774 | assert w.query == test_with_query 775 | assert w.binds == {'foo_bind': '123'} 776 | 777 | 778 | with_multiple_query = """\ 779 | WITH cte_table_1 AS 780 | (SELECT mt.row1, 781 | mt.row2 AS row2 782 | FROM mytable AS mt 783 | JOIN othertable AS ot 784 | ON ot.bub = mt.foo 785 | WHERE 1 = 1), 786 | cte_table_2 AS 787 | (SELECT mt.row1, 788 | mt.row2 AS row2 789 | FROM mytable AS mt) 790 | 791 | SELECT ct1.row1 792 | FROM cte_table_1 ct1;""" 793 | 794 | 795 | def test_with_multiple(): 796 | s1 = (SELECT('mt.row1', 'mt.row2 AS row2') 797 | .FROM('mytable AS mt') 798 | .JOIN('othertable AS ot', 799 | ON='ot.bub = mt.foo') 800 | .WHERE('1 = 1')) 801 | s2 = (SELECT('mt.row1', 'mt.row2 AS row2') 802 | .FROM('mytable AS mt')) 803 | 804 | w = WITH(cte_table_1=s1, cte_table_2=s2) 805 | w = w(SELECT('ct1.row1').FROM('cte_table_1 ct1')) 806 | 807 | assert w.query == with_multiple_query 808 | assert w.binds == {} 809 | 810 | 811 | loggable_query_test = """\ 812 | UPDATE table1 813 | SET col1 = 'test', 814 | col2 = datetime.datetime(1997, 11, 15, 1, 1) 815 | RETURNING test, test1;""" 816 | 817 | 818 | def test_loggable_query(): 819 | u = (UPDATE("table1") 820 | .SET(col1='test', 821 | col2=datetime(1997, 11, 15, 1, 1)) 822 | .RETURNING('test', 'test1')) 823 | 824 | assert u._loggable_query == loggable_query_test 825 | 826 | 827 | union_query = """\ 828 | SELECT foo, 829 | bub, 830 | derp 831 | FROM my_fake_table 832 | WHERE foo = %(foo_bind_0)s;""" 833 | 834 | 835 | def test_union(): 836 | s = (SELECT('foo', 'bub', 'derp') 837 | .FROM('my_fake_table') 838 | .WHERE(foo=1)) 839 | u = UNION() 840 | u.append(s) 841 | 842 | assert u.query == union_query 843 | assert u.binds == {'foo_bind_0': 1} 844 | 845 | 846 | union_multi_query = """\ 847 | SELECT foo, 848 | bub, 849 | derp 850 | FROM my_fake_table 851 | WHERE foo = %(foo_bind_0)s 852 | UNION 853 | SELECT foo, 854 | bub, 855 | derp 856 | FROM my_fake_table 857 | WHERE foo = %(foo_bind_0)s;""" 858 | 859 | 860 | def test_union_multi(): 861 | s1 = (SELECT('foo', 'bub', 'derp') 862 | .FROM('my_fake_table') 863 | .WHERE(foo=1)) 864 | s2 = (SELECT('foo', 'bub', 'derp') 865 | .FROM('my_fake_table') 866 | .WHERE(foo=2)) 867 | u = UNION() 868 | u.append(s1) 869 | u.append(s2) 870 | 871 | assert u.query == union_multi_query 872 | assert u.binds == {'foo_bind_0': 2} 873 | 874 | 875 | union_all_multi_query = """\ 876 | SELECT foo, 877 | bub, 878 | derp 879 | FROM my_fake_table 880 | WHERE foo = %(foo_bind_0)s 881 | UNION ALL 882 | SELECT foo, 883 | bub, 884 | derp 885 | FROM my_fake_table 886 | WHERE foo = %(foo_bind_0)s;""" 887 | 888 | 889 | def test_union_all_multi(): 890 | s1 = (SELECT('foo', 'bub', 'derp') 891 | .FROM('my_fake_table') 892 | .WHERE(foo=1)) 893 | s2 = (SELECT('foo', 'bub', 'derp') 894 | .FROM('my_fake_table') 895 | .WHERE(foo=2)) 896 | u = UNION_ALL() 897 | u.append(s1) 898 | u.append(s2) 899 | 900 | assert u.query == union_all_multi_query 901 | assert u.binds == {'foo_bind_0': 2} 902 | -------------------------------------------------------------------------------- /tests/test_pymssql.py: -------------------------------------------------------------------------------- 1 | from pytest import raises 2 | 3 | from norm.norm_pymssql import PYMSSQL_INSERT as INSERT 4 | 5 | rows = [{'test': 'good', 'bub': 5}] 6 | 7 | 8 | def test_insert(): 9 | i = INSERT('my_table', rows) 10 | 11 | assert i.query == ( 12 | 'INSERT INTO my_table (bub, test)\n VALUES\n(%(bub_0)s, %(test_0)s);') 13 | 14 | 15 | def test_encrypted_insert(): 16 | i = INSERT('my_table', 17 | rows, 18 | encrypted_columns=['bub'], 19 | encryption_key='fookey') 20 | assert i.query == ( 21 | 'INSERT INTO my_table (bub, test)\n' 22 | " VALUES\n" 23 | "(EncryptByKey(Key_GUID('fookey'), CAST(%(bub_0)s AS VARCHAR(4000))), " 24 | '%(test_0)s);') 25 | 26 | 27 | def test_error_on_no_key(): 28 | with raises(RuntimeError): 29 | INSERT('my_table', rows, encrypted_columns=['bub']) 30 | -------------------------------------------------------------------------------- /tests/test_rows.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals 2 | 3 | from norm import RowsProxy 4 | 5 | column_names = ('user_id', 'opponent_name', 'game_id', 'score') 6 | 7 | 8 | def make_rows(): 9 | return [ 10 | (5, 'John', 55, 34.14), 11 | (5, 'John', 57, 35.14), 12 | (5, 'Dirk', 59, 37.14), 13 | (5, 'Dirk', 60, 38.14), 14 | (6, 'Gabe', 95, 32.14), 15 | (6, 'Gabe', 31, 31.14), 16 | (6, 'Ted', 5, 4.14), 17 | (7, 'Jim', 27, 8.14)] 18 | 19 | 20 | def test_grouping(): 21 | empty_rp = RowsProxy([], column_names) 22 | assert len(empty_rp) == 0 23 | assert not bool(empty_rp) 24 | 25 | rp = RowsProxy(make_rows(), column_names) 26 | 27 | assert len(rp) == 8 28 | assert bool(rp) 29 | 30 | id_score_map = {} 31 | for user_id, rows in rp('user_id'): 32 | scores = [] 33 | for row in rows: 34 | scores.append(row.get('score')) 35 | id_score_map[user_id] = scores 36 | 37 | assert id_score_map == { 38 | 5: [34.14, 35.14, 37.14, 38.14], 39 | 6: [32.14, 31.14, 4.14], 40 | 7: [8.14]} 41 | 42 | unique_games = [] 43 | for (user_id, opponent_name), rows in rp('user_id', 'opponent_name'): 44 | unique_games.append((user_id, opponent_name)) 45 | assert unique_games == [ 46 | (5, 'John'), 47 | (5, 'Dirk'), 48 | (6, 'Gabe'), 49 | (6, 'Ted'), 50 | (7, 'Jim')] 51 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | skipsdist = true 3 | envlist = py37, py38, py39 4 | 5 | [testenv] 6 | whitelist_externals = poetry 7 | commands = 8 | poetry install -q 9 | pytest 10 | --------------------------------------------------------------------------------