├── config ├── .keep └── config.tf ├── lambda ├── pymysql │ ├── constants │ │ ├── __init__.py │ │ ├── CR.pyc │ │ ├── ER.pyc │ │ ├── FLAG.pyc │ │ ├── CLIENT.pyc │ │ ├── COMMAND.pyc │ │ ├── __init__.pyc │ │ ├── FIELD_TYPE.pyc │ │ ├── SERVER_STATUS.pyc │ │ ├── FLAG.py │ │ ├── SERVER_STATUS.py │ │ ├── FIELD_TYPE.py │ │ ├── COMMAND.py │ │ ├── CLIENT.py │ │ ├── CR.py │ │ └── ER.py │ ├── err.pyc │ ├── util.pyc │ ├── _compat.pyc │ ├── charset.pyc │ ├── cursors.pyc │ ├── times.pyc │ ├── __init__.pyc │ ├── _socketio.pyc │ ├── connections.pyc │ ├── converters.pyc │ ├── optionfile.pyc │ ├── tests │ │ ├── base.pyc │ │ ├── __init__.pyc │ │ ├── test_err.pyc │ │ ├── test_basic.pyc │ │ ├── test_cursor.pyc │ │ ├── test_issues.pyc │ │ ├── test_SSCursor.pyc │ │ ├── test_nextset.pyc │ │ ├── test_DictCursor.pyc │ │ ├── test_connection.pyc │ │ ├── test_converters.pyc │ │ ├── test_load_local.pyc │ │ ├── test_optionfile.pyc │ │ ├── thirdparty │ │ │ ├── __init__.pyc │ │ │ ├── test_MySQLdb │ │ │ │ ├── dbapi20.pyc │ │ │ │ ├── __init__.pyc │ │ │ │ ├── capabilities.pyc │ │ │ │ ├── test_MySQLdb_dbapi20.pyc │ │ │ │ ├── test_MySQLdb_nonstandard.pyc │ │ │ │ ├── test_MySQLdb_capabilities.pyc │ │ │ │ ├── __init__.py │ │ │ │ ├── test_MySQLdb_nonstandard.py │ │ │ │ ├── test_MySQLdb_capabilities.py │ │ │ │ ├── test_MySQLdb_dbapi20.py │ │ │ │ └── capabilities.py │ │ │ └── __init__.py │ │ ├── __init__.py │ │ ├── test_err.py │ │ ├── test_optionfile.py │ │ ├── test_nextset.py │ │ ├── test_converters.py │ │ ├── base.py │ │ ├── test_load_local.py │ │ ├── test_SSCursor.py │ │ ├── test_cursor.py │ │ ├── test_DictCursor.py │ │ ├── test_basic.py │ │ └── test_issues.py │ ├── util.py │ ├── times.py │ ├── _compat.py │ ├── optionfile.py │ ├── err.py │ ├── _socketio.py │ ├── __init__.py │ ├── converters.py │ ├── charset.py │ └── cursors.py ├── lambda-demo.zip └── lambda-demo.py ├── .gitignore ├── screens ├── pic1.jpg ├── pic2.jpg └── pic3.jpg ├── tf ├── variables.tf ├── aurora.tf ├── vpc.tf └── lambda.tf ├── params ├── README.md └── manage /config/.keep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lambda/pymysql/constants/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .terraform 2 | *tfstate* 3 | .tfbin 4 | rds_config.py 5 | -------------------------------------------------------------------------------- /screens/pic1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/screens/pic1.jpg -------------------------------------------------------------------------------- /screens/pic2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/screens/pic2.jpg -------------------------------------------------------------------------------- /screens/pic3.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/screens/pic3.jpg -------------------------------------------------------------------------------- /lambda/lambda-demo.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/lambda-demo.zip -------------------------------------------------------------------------------- /lambda/pymysql/err.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/err.pyc -------------------------------------------------------------------------------- /lambda/pymysql/util.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/util.pyc -------------------------------------------------------------------------------- /lambda/pymysql/_compat.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/_compat.pyc -------------------------------------------------------------------------------- /lambda/pymysql/charset.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/charset.pyc -------------------------------------------------------------------------------- /lambda/pymysql/cursors.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/cursors.pyc -------------------------------------------------------------------------------- /lambda/pymysql/times.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/times.pyc -------------------------------------------------------------------------------- /lambda/pymysql/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/__init__.pyc -------------------------------------------------------------------------------- /lambda/pymysql/_socketio.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/_socketio.pyc -------------------------------------------------------------------------------- /lambda/pymysql/connections.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/connections.pyc -------------------------------------------------------------------------------- /lambda/pymysql/constants/CR.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/constants/CR.pyc -------------------------------------------------------------------------------- /lambda/pymysql/constants/ER.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/constants/ER.pyc -------------------------------------------------------------------------------- /lambda/pymysql/converters.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/converters.pyc -------------------------------------------------------------------------------- /lambda/pymysql/optionfile.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/optionfile.pyc -------------------------------------------------------------------------------- /lambda/pymysql/tests/base.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/tests/base.pyc -------------------------------------------------------------------------------- /lambda/pymysql/constants/FLAG.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/constants/FLAG.pyc -------------------------------------------------------------------------------- /lambda/pymysql/tests/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/tests/__init__.pyc -------------------------------------------------------------------------------- /lambda/pymysql/tests/test_err.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/tests/test_err.pyc -------------------------------------------------------------------------------- /lambda/pymysql/constants/CLIENT.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/constants/CLIENT.pyc -------------------------------------------------------------------------------- /lambda/pymysql/constants/COMMAND.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/constants/COMMAND.pyc -------------------------------------------------------------------------------- /lambda/pymysql/tests/test_basic.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/tests/test_basic.pyc -------------------------------------------------------------------------------- /lambda/pymysql/tests/test_cursor.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/tests/test_cursor.pyc -------------------------------------------------------------------------------- /lambda/pymysql/tests/test_issues.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/tests/test_issues.pyc -------------------------------------------------------------------------------- /lambda/pymysql/constants/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/constants/__init__.pyc -------------------------------------------------------------------------------- /lambda/pymysql/tests/test_SSCursor.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/tests/test_SSCursor.pyc -------------------------------------------------------------------------------- /lambda/pymysql/tests/test_nextset.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/tests/test_nextset.pyc -------------------------------------------------------------------------------- /lambda/pymysql/constants/FIELD_TYPE.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/constants/FIELD_TYPE.pyc -------------------------------------------------------------------------------- /lambda/pymysql/tests/test_DictCursor.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/tests/test_DictCursor.pyc -------------------------------------------------------------------------------- /lambda/pymysql/tests/test_connection.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/tests/test_connection.pyc -------------------------------------------------------------------------------- /lambda/pymysql/tests/test_converters.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/tests/test_converters.pyc -------------------------------------------------------------------------------- /lambda/pymysql/tests/test_load_local.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/tests/test_load_local.pyc -------------------------------------------------------------------------------- /lambda/pymysql/tests/test_optionfile.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/tests/test_optionfile.pyc -------------------------------------------------------------------------------- /lambda/pymysql/constants/SERVER_STATUS.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/constants/SERVER_STATUS.pyc -------------------------------------------------------------------------------- /lambda/pymysql/tests/thirdparty/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/tests/thirdparty/__init__.pyc -------------------------------------------------------------------------------- /lambda/pymysql/tests/thirdparty/test_MySQLdb/dbapi20.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/tests/thirdparty/test_MySQLdb/dbapi20.pyc -------------------------------------------------------------------------------- /lambda/pymysql/tests/thirdparty/test_MySQLdb/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/tests/thirdparty/test_MySQLdb/__init__.pyc -------------------------------------------------------------------------------- /lambda/pymysql/tests/thirdparty/test_MySQLdb/capabilities.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/tests/thirdparty/test_MySQLdb/capabilities.pyc -------------------------------------------------------------------------------- /lambda/pymysql/tests/thirdparty/test_MySQLdb/test_MySQLdb_dbapi20.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/tests/thirdparty/test_MySQLdb/test_MySQLdb_dbapi20.pyc -------------------------------------------------------------------------------- /lambda/pymysql/tests/thirdparty/test_MySQLdb/test_MySQLdb_nonstandard.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/tests/thirdparty/test_MySQLdb/test_MySQLdb_nonstandard.pyc -------------------------------------------------------------------------------- /lambda/pymysql/tests/thirdparty/test_MySQLdb/test_MySQLdb_capabilities.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cicdteam/lambda-aurora-example/HEAD/lambda/pymysql/tests/thirdparty/test_MySQLdb/test_MySQLdb_capabilities.pyc -------------------------------------------------------------------------------- /lambda/pymysql/tests/thirdparty/__init__.py: -------------------------------------------------------------------------------- 1 | from .test_MySQLdb import * 2 | 3 | if __name__ == "__main__": 4 | try: 5 | import unittest2 as unittest 6 | except ImportError: 7 | import unittest 8 | unittest.main() 9 | -------------------------------------------------------------------------------- /config/config.tf: -------------------------------------------------------------------------------- 1 | variable "region" {} 2 | 3 | provider "aws" { 4 | region = "${var.region}" 5 | max_retries = "10" 6 | } 7 | 8 | module "main" { 9 | source = "../tf" 10 | region = "${var.region}" 11 | } 12 | -------------------------------------------------------------------------------- /lambda/pymysql/constants/FLAG.py: -------------------------------------------------------------------------------- 1 | NOT_NULL = 1 2 | PRI_KEY = 2 3 | UNIQUE_KEY = 4 4 | MULTIPLE_KEY = 8 5 | BLOB = 16 6 | UNSIGNED = 32 7 | ZEROFILL = 64 8 | BINARY = 128 9 | ENUM = 256 10 | AUTO_INCREMENT = 512 11 | TIMESTAMP = 1024 12 | SET = 2048 13 | PART_KEY = 16384 14 | GROUP = 32767 15 | UNIQUE = 65536 16 | -------------------------------------------------------------------------------- /lambda/pymysql/tests/thirdparty/test_MySQLdb/__init__.py: -------------------------------------------------------------------------------- 1 | from .test_MySQLdb_capabilities import test_MySQLdb as test_capabilities 2 | from .test_MySQLdb_nonstandard import * 3 | from .test_MySQLdb_dbapi20 import test_MySQLdb as test_dbapi2 4 | 5 | if __name__ == "__main__": 6 | import unittest 7 | unittest.main() 8 | -------------------------------------------------------------------------------- /lambda/pymysql/constants/SERVER_STATUS.py: -------------------------------------------------------------------------------- 1 | 2 | SERVER_STATUS_IN_TRANS = 1 3 | SERVER_STATUS_AUTOCOMMIT = 2 4 | SERVER_MORE_RESULTS_EXISTS = 8 5 | SERVER_QUERY_NO_GOOD_INDEX_USED = 16 6 | SERVER_QUERY_NO_INDEX_USED = 32 7 | SERVER_STATUS_CURSOR_EXISTS = 64 8 | SERVER_STATUS_LAST_ROW_SENT = 128 9 | SERVER_STATUS_DB_DROPPED = 256 10 | SERVER_STATUS_NO_BACKSLASH_ESCAPES = 512 11 | SERVER_STATUS_METADATA_CHANGED = 1024 12 | -------------------------------------------------------------------------------- /lambda/pymysql/util.py: -------------------------------------------------------------------------------- 1 | import struct 2 | 3 | 4 | def byte2int(b): 5 | if isinstance(b, int): 6 | return b 7 | else: 8 | return struct.unpack("!B", b)[0] 9 | 10 | 11 | def int2byte(i): 12 | return struct.pack("!B", i) 13 | 14 | 15 | def join_bytes(bs): 16 | if len(bs) == 0: 17 | return "" 18 | else: 19 | rv = bs[0] 20 | for b in bs[1:]: 21 | rv += b 22 | return rv 23 | -------------------------------------------------------------------------------- /lambda/pymysql/times.py: -------------------------------------------------------------------------------- 1 | from time import localtime 2 | from datetime import date, datetime, time, timedelta 3 | 4 | 5 | Date = date 6 | Time = time 7 | TimeDelta = timedelta 8 | Timestamp = datetime 9 | 10 | 11 | def DateFromTicks(ticks): 12 | return date(*localtime(ticks)[:3]) 13 | 14 | 15 | def TimeFromTicks(ticks): 16 | return time(*localtime(ticks)[3:6]) 17 | 18 | 19 | def TimestampFromTicks(ticks): 20 | return datetime(*localtime(ticks)[:6]) 21 | -------------------------------------------------------------------------------- /tf/variables.tf: -------------------------------------------------------------------------------- 1 | ################# 2 | # External vars # 3 | ################# 4 | 5 | variable "region" {} 6 | 7 | ################# 8 | # Internal vars # 9 | ################# 10 | 11 | # DB credentials 12 | 13 | variable "db_username" { 14 | default = "demouser" 15 | } 16 | variable "db_password" { 17 | default = "password" 18 | } 19 | 20 | # Networking... 21 | # 22 | 23 | variable "vpc_cidr" { 24 | description = "CIDR for the whole VPC" 25 | default = "10.99.0.0/16" 26 | } 27 | -------------------------------------------------------------------------------- /lambda/pymysql/constants/FIELD_TYPE.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | DECIMAL = 0 4 | TINY = 1 5 | SHORT = 2 6 | LONG = 3 7 | FLOAT = 4 8 | DOUBLE = 5 9 | NULL = 6 10 | TIMESTAMP = 7 11 | LONGLONG = 8 12 | INT24 = 9 13 | DATE = 10 14 | TIME = 11 15 | DATETIME = 12 16 | YEAR = 13 17 | NEWDATE = 14 18 | VARCHAR = 15 19 | BIT = 16 20 | JSON = 245 21 | NEWDECIMAL = 246 22 | ENUM = 247 23 | SET = 248 24 | TINY_BLOB = 249 25 | MEDIUM_BLOB = 250 26 | LONG_BLOB = 251 27 | BLOB = 252 28 | VAR_STRING = 253 29 | STRING = 254 30 | GEOMETRY = 255 31 | 32 | CHAR = TINY 33 | INTERVAL = ENUM 34 | -------------------------------------------------------------------------------- /lambda/pymysql/_compat.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | PY2 = sys.version_info[0] == 2 4 | PYPY = hasattr(sys, 'pypy_translation_info') 5 | JYTHON = sys.platform.startswith('java') 6 | IRONPYTHON = sys.platform == 'cli' 7 | CPYTHON = not PYPY and not JYTHON and not IRONPYTHON 8 | 9 | if PY2: 10 | import __builtin__ 11 | range_type = xrange 12 | text_type = unicode 13 | long_type = long 14 | str_type = basestring 15 | unichr = __builtin__.unichr 16 | else: 17 | range_type = range 18 | text_type = str 19 | long_type = int 20 | str_type = str 21 | unichr = chr 22 | -------------------------------------------------------------------------------- /params: -------------------------------------------------------------------------------- 1 | ################################ 2 | # Project details and defaults # 3 | ################################ 4 | 5 | # AWS region where to create AWS resources 6 | region=us-east-1 7 | 8 | ################### 9 | # AWS credentials # 10 | ################### 11 | 12 | # You could set credentials here but better use environment variables exported in shell 13 | aws_access_key_id=$AWS_ACCESS_KEY_ID 14 | aws_secret_access_key=$AWS_SECRET_ACCESS_KEY 15 | 16 | ##################### 17 | # Terrafrom details # 18 | ##################### 19 | 20 | # Terrafrom version to use 21 | terraform_ver=0.8.5 22 | -------------------------------------------------------------------------------- /lambda/pymysql/optionfile.py: -------------------------------------------------------------------------------- 1 | from ._compat import PY2 2 | 3 | if PY2: 4 | import ConfigParser as configparser 5 | else: 6 | import configparser 7 | 8 | 9 | class Parser(configparser.RawConfigParser): 10 | 11 | def __remove_quotes(self, value): 12 | quotes = ["'", "\""] 13 | for quote in quotes: 14 | if len(value) >= 2 and value[0] == value[-1] == quote: 15 | return value[1:-1] 16 | return value 17 | 18 | def get(self, section, option): 19 | value = configparser.RawConfigParser.get(self, section, option) 20 | return self.__remove_quotes(value) 21 | -------------------------------------------------------------------------------- /lambda/pymysql/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Sorted by alphabetical order 2 | from pymysql.tests.test_DictCursor import * 3 | from pymysql.tests.test_SSCursor import * 4 | from pymysql.tests.test_basic import * 5 | from pymysql.tests.test_connection import * 6 | from pymysql.tests.test_converters import * 7 | from pymysql.tests.test_cursor import * 8 | from pymysql.tests.test_err import * 9 | from pymysql.tests.test_issues import * 10 | from pymysql.tests.test_load_local import * 11 | from pymysql.tests.test_nextset import * 12 | from pymysql.tests.test_optionfile import * 13 | 14 | from pymysql.tests.thirdparty import * 15 | 16 | if __name__ == "__main__": 17 | import unittest2 18 | unittest2.main() 19 | -------------------------------------------------------------------------------- /lambda/pymysql/tests/test_err.py: -------------------------------------------------------------------------------- 1 | import unittest2 2 | 3 | from pymysql import err 4 | 5 | 6 | __all__ = ["TestRaiseException"] 7 | 8 | 9 | class TestRaiseException(unittest2.TestCase): 10 | 11 | def test_raise_mysql_exception(self): 12 | data = b"\xff\x15\x04Access denied" 13 | with self.assertRaises(err.OperationalError) as cm: 14 | err.raise_mysql_exception(data) 15 | self.assertEqual(cm.exception.args, (1045, 'Access denied')) 16 | 17 | def test_raise_mysql_exception_client_protocol_41(self): 18 | data = b"\xff\x15\x04#28000Access denied" 19 | with self.assertRaises(err.OperationalError) as cm: 20 | err.raise_mysql_exception(data) 21 | self.assertEqual(cm.exception.args, (1045, 'Access denied')) 22 | -------------------------------------------------------------------------------- /lambda/pymysql/constants/COMMAND.py: -------------------------------------------------------------------------------- 1 | 2 | COM_SLEEP = 0x00 3 | COM_QUIT = 0x01 4 | COM_INIT_DB = 0x02 5 | COM_QUERY = 0x03 6 | COM_FIELD_LIST = 0x04 7 | COM_CREATE_DB = 0x05 8 | COM_DROP_DB = 0x06 9 | COM_REFRESH = 0x07 10 | COM_SHUTDOWN = 0x08 11 | COM_STATISTICS = 0x09 12 | COM_PROCESS_INFO = 0x0a 13 | COM_CONNECT = 0x0b 14 | COM_PROCESS_KILL = 0x0c 15 | COM_DEBUG = 0x0d 16 | COM_PING = 0x0e 17 | COM_TIME = 0x0f 18 | COM_DELAYED_INSERT = 0x10 19 | COM_CHANGE_USER = 0x11 20 | COM_BINLOG_DUMP = 0x12 21 | COM_TABLE_DUMP = 0x13 22 | COM_CONNECT_OUT = 0x14 23 | COM_REGISTER_SLAVE = 0x15 24 | COM_STMT_PREPARE = 0x16 25 | COM_STMT_EXECUTE = 0x17 26 | COM_STMT_SEND_LONG_DATA = 0x18 27 | COM_STMT_CLOSE = 0x19 28 | COM_STMT_RESET = 0x1a 29 | COM_SET_OPTION = 0x1b 30 | COM_STMT_FETCH = 0x1c 31 | COM_DAEMON = 0x1d 32 | COM_BINLOG_DUMP_GTID = 0x1e 33 | COM_END = 0x1f 34 | -------------------------------------------------------------------------------- /lambda/pymysql/tests/test_optionfile.py: -------------------------------------------------------------------------------- 1 | from pymysql.optionfile import Parser 2 | from unittest import TestCase 3 | from pymysql._compat import PY2 4 | 5 | try: 6 | from cStringIO import StringIO 7 | except ImportError: 8 | from io import StringIO 9 | 10 | 11 | __all__ = ['TestParser'] 12 | 13 | 14 | _cfg_file = (r""" 15 | [default] 16 | string = foo 17 | quoted = "bar" 18 | single_quoted = 'foobar' 19 | """) 20 | 21 | 22 | class TestParser(TestCase): 23 | 24 | def test_string(self): 25 | parser = Parser() 26 | if PY2: 27 | parser.readfp(StringIO(_cfg_file)) 28 | else: 29 | parser.read_file(StringIO(_cfg_file)) 30 | self.assertEqual(parser.get("default", "string"), "foo") 31 | self.assertEqual(parser.get("default", "quoted"), "bar") 32 | self.assertEqual(parser.get("default", "single_quoted"), "foobar") 33 | -------------------------------------------------------------------------------- /lambda/pymysql/constants/CLIENT.py: -------------------------------------------------------------------------------- 1 | # https://dev.mysql.com/doc/internals/en/capability-flags.html#packet-Protocol::CapabilityFlags 2 | LONG_PASSWORD = 1 3 | FOUND_ROWS = 1 << 1 4 | LONG_FLAG = 1 << 2 5 | CONNECT_WITH_DB = 1 << 3 6 | NO_SCHEMA = 1 << 4 7 | COMPRESS = 1 << 5 8 | ODBC = 1 << 6 9 | LOCAL_FILES = 1 << 7 10 | IGNORE_SPACE = 1 << 8 11 | PROTOCOL_41 = 1 << 9 12 | INTERACTIVE = 1 << 10 13 | SSL = 1 << 11 14 | IGNORE_SIGPIPE = 1 << 12 15 | TRANSACTIONS = 1 << 13 16 | SECURE_CONNECTION = 1 << 15 17 | MULTI_STATEMENTS = 1 << 16 18 | MULTI_RESULTS = 1 << 17 19 | PS_MULTI_RESULTS = 1 << 18 20 | PLUGIN_AUTH = 1 << 19 21 | PLUGIN_AUTH_LENENC_CLIENT_DATA = 1 << 21 22 | CAPABILITIES = ( 23 | LONG_PASSWORD | LONG_FLAG | PROTOCOL_41 | TRANSACTIONS 24 | | SECURE_CONNECTION | MULTI_STATEMENTS | MULTI_RESULTS 25 | | PLUGIN_AUTH | PLUGIN_AUTH_LENENC_CLIENT_DATA) 26 | 27 | # Not done yet 28 | CONNECT_ATTRS = 1 << 20 29 | HANDLE_EXPIRED_PASSWORDS = 1 << 22 30 | SESSION_TRACK = 1 << 23 31 | DEPRECATE_EOF = 1 << 24 32 | -------------------------------------------------------------------------------- /lambda/lambda-demo.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | import sys 3 | import logging 4 | import rds_config 5 | import pymysql 6 | 7 | rds_host = rds_config.db_endpoint 8 | name = rds_config.db_username 9 | password = rds_config.db_password 10 | db_name = rds_config.db_name 11 | port = 3306 12 | 13 | logger = logging.getLogger() 14 | logger.setLevel(logging.INFO) 15 | 16 | try: 17 | conn = pymysql.connect(rds_host, user=name, 18 | passwd=password, db=db_name, connect_timeout=5) 19 | except: 20 | logger.error("ERROR: Unexpected error: Could not connect to Aurora instance.") 21 | sys.exit() 22 | 23 | logger.info("SUCCESS: Connection to RDS Aurora instance succeeded") 24 | def handler(event, context): 25 | """ 26 | This function inserts content into Aurora RDS instance 27 | """ 28 | item_count = 0 29 | 30 | with conn.cursor() as cur: 31 | cur.execute("drop table if exists Employee3") 32 | cur.execute("create table Employee3 (EmpID int NOT NULL, Name varchar(255) NOT NULL, PRIMARY KEY (EmpID))") 33 | cur.execute('insert into Employee3 (EmpID, Name) values(1, "Joe")') 34 | cur.execute('insert into Employee3 (EmpID, Name) values(2, "Bob")') 35 | cur.execute('insert into Employee3 (EmpID, Name) values(3, "Mary")') 36 | conn.commit() 37 | cur.execute("select * from Employee3") 38 | for row in cur: 39 | item_count += 1 40 | logger.info(row) 41 | return "Added %d items to RDS Aurora table" %(item_count) 42 | -------------------------------------------------------------------------------- /tf/aurora.tf: -------------------------------------------------------------------------------- 1 | resource "aws_db_subnet_group" "demo" { 2 | name = "lambda-demo" 3 | subnet_ids = ["${aws_subnet.demo.*.id}"] 4 | tags { 5 | Name = "DB subnet group for lambda-demo" 6 | } 7 | } 8 | 9 | resource "aws_rds_cluster" "demo" { 10 | cluster_identifier = "lambda-demo" 11 | database_name = "lambdademo" 12 | master_username = "${var.db_username}" 13 | master_password = "${var.db_password}" 14 | db_subnet_group_name = "${aws_db_subnet_group.demo.id}" 15 | vpc_security_group_ids = ["${aws_security_group.demo.id}"] 16 | } 17 | 18 | resource "aws_rds_cluster_instance" "demo" { 19 | count = 2 20 | identifier = "lambda-demo-${count.index}" 21 | cluster_identifier = "${aws_rds_cluster.demo.id}" 22 | instance_class = "db.t2.medium" 23 | db_subnet_group_name = "${aws_db_subnet_group.demo.id}" 24 | } 25 | 26 | 27 | # Create rds_config.py file and zip all lambda python files to archive 28 | # 29 | resource "null_resource" "demo" { 30 | triggers { 31 | aurora = "${aws_rds_cluster.demo.endpoint}" 32 | db_user = "${var.db_username}" 33 | db_pass = "${var.db_password}" 34 | code = "${file("${path.module}/../lambda/lambda-demo.py")}" 35 | } 36 | 37 | provisioner "local-exec" { 38 | command = <rds_config.py; 41 | echo '#config file containing credentials for rds aurora instance' >>rds_config.py; 42 | echo 'db_username = "${var.db_username}"' >>rds_config.py; 43 | echo 'db_password = "${var.db_password}"' >>rds_config.py; 44 | echo 'db_name = "lambdademo"' >>rds_config.py; 45 | echo 'db_endpoint = "${aws_rds_cluster.demo.endpoint}"' >>rds_config.py; 46 | chmod 755 lambda-demo.py rds_config.py; 47 | zip -q -r lambda-demo.zip pymysql lambda-demo.py rds_config.py) 48 | EOF 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /tf/vpc.tf: -------------------------------------------------------------------------------- 1 | # Define AWS Virtual Private Cloud 2 | # 3 | resource "aws_vpc" "demo" { 4 | cidr_block = "${var.vpc_cidr}" 5 | enable_dns_support = true 6 | enable_dns_hostnames = true 7 | tags { 8 | Name = "lambda-demo-vpc" 9 | } 10 | } 11 | 12 | # Define AWS Subnet 13 | # 14 | data "aws_availability_zones" "available" { 15 | state = "available" 16 | } 17 | 18 | resource "aws_subnet" "demo" { 19 | count = 2 20 | vpc_id = "${aws_vpc.demo.id}" 21 | availability_zone = "${data.aws_availability_zones.available.names[count.index]}" 22 | cidr_block = "${cidrsubnet(aws_vpc.demo.cidr_block, 8, count.index)}" 23 | tags { 24 | Name = "lambda-demo-subnet-${count.index}" 25 | } 26 | map_public_ip_on_launch = true 27 | } 28 | 29 | # Define Internet Gateway 30 | # 31 | resource "aws_internet_gateway" "demo" { 32 | vpc_id = "${aws_vpc.demo.id}" 33 | tags { 34 | Name = "lambda-demo-gw" 35 | } 36 | } 37 | 38 | # Define route tables 39 | # 40 | resource "aws_route_table" "demo" { 41 | vpc_id = "${aws_vpc.demo.id}" 42 | 43 | route { 44 | cidr_block = "0.0.0.0/0" 45 | gateway_id = "${aws_internet_gateway.demo.id}" 46 | } 47 | tags { 48 | Name = "lambda-demo-routing" 49 | } 50 | } 51 | 52 | resource "aws_route_table_association" "demo" { 53 | count = 2 54 | subnet_id = "${element(aws_subnet.demo.*.id, count.index)}" 55 | route_table_id = "${aws_route_table.demo.id}" 56 | } 57 | 58 | # Define internal security group for VPC 59 | # 60 | resource "aws_security_group" "demo" { 61 | name = "lambda-demo-sg" 62 | vpc_id = "${aws_vpc.demo.id}" 63 | description = "Security group for lambda-demo" 64 | tags { 65 | Name = "lambda-demo-sg" 66 | } 67 | ingress { 68 | protocol = -1 # no limit inside VPC 69 | from_port = 0 70 | to_port = 0 71 | cidr_blocks = ["${var.vpc_cidr}"] 72 | } 73 | egress { 74 | protocol = -1 75 | from_port = 0 76 | to_port = 0 77 | cidr_blocks = ["0.0.0.0/0"] 78 | } 79 | lifecycle { 80 | create_before_destroy = true 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /tf/lambda.tf: -------------------------------------------------------------------------------- 1 | resource "aws_iam_role" "demo" { 2 | name = "iam_for_lambda_demo" 3 | assume_role_policy = <= version_tuple 42 | 43 | def setUp(self): 44 | self.connections = [] 45 | for params in self.databases: 46 | self.connections.append(pymysql.connect(**params)) 47 | self.addCleanup(self._teardown_connections) 48 | 49 | def _teardown_connections(self): 50 | for connection in self.connections: 51 | connection.close() 52 | 53 | def safe_create_table(self, connection, tablename, ddl, cleanup=True): 54 | """create a table. 55 | 56 | Ensures any existing version of that table is first dropped. 57 | 58 | Also adds a cleanup rule to drop the table after the test 59 | completes. 60 | """ 61 | cursor = connection.cursor() 62 | 63 | with warnings.catch_warnings(): 64 | warnings.simplefilter("ignore") 65 | cursor.execute("drop table if exists `%s`" % (tablename,)) 66 | cursor.execute(ddl) 67 | cursor.close() 68 | if cleanup: 69 | self.addCleanup(self.drop_table, connection, tablename) 70 | 71 | def drop_table(self, connection, tablename): 72 | cursor = connection.cursor() 73 | with warnings.catch_warnings(): 74 | warnings.simplefilter("ignore") 75 | cursor.execute("drop table if exists `%s`" % (tablename,)) 76 | cursor.close() 77 | 78 | def safe_gc_collect(self): 79 | """Ensure cycles are collected via gc. 80 | 81 | Runs additional times on non-CPython platforms. 82 | 83 | """ 84 | gc.collect() 85 | if not CPYTHON: 86 | gc.collect() 87 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # AWS Lambda function example 2 | 3 | This repo contains example of AWS lambda function interacting with AWS RDS Aurora cluster in VPC. 4 | To create AWS resources used [Terrafom](http://terraform.io). 5 | 6 | ### TL;DR 7 | 8 | ``` 9 | 1. Inspect 'params' file 10 | 2. Set environment variables with AWS credentials 11 | 2.1. export AWS_ACCESS_KEY_ID="YOUAWSACCESSKEY" 12 | 2.2. export AWS_SECRET_ACCESS_KEY="YoUseCretAcCessKeyHeRe" 13 | 3. Run 'manage' script to check what AWS resources should be created 14 | 3.1. ./manage plan 15 | 4. Run 'manage' script to deploy resources over AWS 16 | 4.1. ./manage deploy 17 | 5. Test Lambda function 'lambda-demo' in AWS colnsole 18 | 6. Run 'manage' script to destroy resources 19 | 6.1. ./manage destroy 20 | ``` 21 | 22 | ### AWS resources 23 | 24 | There is [manage](manage) Bash script used to create/destroy all necessary demo resources over AWS: 25 | 26 | - AWS VPC in specified region (configurable in [params](params) file) 27 | - Two Subnets in VPC 28 | - Internet gateway for subnets 29 | - Route table with linked subnets pointed to Internet gateway as default route 30 | - Security group with ingress access only within VPC 31 | - AWS RDS Aurora cluster 32 | - DB subnet group in VPC 33 | - Two RDS Aurora instances (`db.t2.medium`) joined in cluster 34 | - AWS Lambda function `lambda-demo` 35 | - IAM role for Lambda function 36 | - IAM Role Policy for Lambda fucntion (used to allow Lambda have access to resources in VPC) 37 | 38 | Run `manage` script without agruments to see usage info: 39 | 40 | ``` 41 | $ ./manage 42 | 43 | Usage: manage 44 | 45 | Commands: 46 | 47 | plan check what should be deployed 48 | deploy create AWS resources or deploy changes 49 | destroy destroy AWS resources totally (be carefull, no roll back) 50 | 51 | ``` 52 | 53 | ### Lambda function source 54 | 55 | **[Lambda-demo](lambda/lambda-demo.py)** is simple Python application that do: 56 | 57 | - connect to Aurora RDS cluster 58 | - drop table `Emploee3` if it exists 59 | - create table `Emploee3` 60 | - insert 3 rows in table (names `Joe`,`Bob`,`Mary`) 61 | 62 | Source code placed in [lambda](lambda) directory (Python lib `pymysql` used) 63 | 64 | For connecting to Aurora RDS cluster [rds_config.py](lambda/rds_config.py) file used. It created dinamically when `manage` script finished. 65 | This file contains credentials and endpoint for Aurora cluster created in AWS. 66 | 67 | ### Important note 68 | 69 | Aurora RDS resides in VPC. Lambda must have ability to run fucntions (create temporary instances) in VPC too. It managed 70 | by configuring corresponding IAM Policy for lambda. Terrafrom code creates and destroys all necessary resources automatically. 71 | You could inspect IAM polices in **[lambda.tf](https://github.com/pureclouds/lambda-aurora-example/blob/master/tf/lambda.tf#L26-L55)** file. 72 | 73 | ### Testing Lambda function 74 | 75 | You could test Lambda in AWS console: 76 | 77 | #### Choose **Lambda** in console, select `lambda-demo` function and press 'Test function' in 'Actions' menu 78 | 79 | ![pic1](screens/pic1.jpg) 80 | 81 | #### Input blank test event and press 'Save and test' button 82 | 83 | ![pic2](screens/pic2.jpg) 84 | 85 | #### Inspect results and logs (in CloudWatch logs) 86 | 87 | ![pic3](screens/pic3.jpg) 88 | 89 | ### Destroy demo resources 90 | 91 | Do not forget destroy all demo resources by `./manage destroy` 92 | -------------------------------------------------------------------------------- /lambda/pymysql/tests/thirdparty/test_MySQLdb/test_MySQLdb_nonstandard.py: -------------------------------------------------------------------------------- 1 | import sys 2 | try: 3 | import unittest2 as unittest 4 | except ImportError: 5 | import unittest 6 | 7 | import pymysql 8 | _mysql = pymysql 9 | from pymysql.constants import FIELD_TYPE 10 | from pymysql.tests import base 11 | from pymysql._compat import PY2, long_type 12 | 13 | if not PY2: 14 | basestring = str 15 | 16 | 17 | class TestDBAPISet(unittest.TestCase): 18 | def test_set_equality(self): 19 | self.assertTrue(pymysql.STRING == pymysql.STRING) 20 | 21 | def test_set_inequality(self): 22 | self.assertTrue(pymysql.STRING != pymysql.NUMBER) 23 | 24 | def test_set_equality_membership(self): 25 | self.assertTrue(FIELD_TYPE.VAR_STRING == pymysql.STRING) 26 | 27 | def test_set_inequality_membership(self): 28 | self.assertTrue(FIELD_TYPE.DATE != pymysql.STRING) 29 | 30 | 31 | class CoreModule(unittest.TestCase): 32 | """Core _mysql module features.""" 33 | 34 | def test_NULL(self): 35 | """Should have a NULL constant.""" 36 | self.assertEqual(_mysql.NULL, 'NULL') 37 | 38 | def test_version(self): 39 | """Version information sanity.""" 40 | self.assertTrue(isinstance(_mysql.__version__, basestring)) 41 | 42 | self.assertTrue(isinstance(_mysql.version_info, tuple)) 43 | self.assertEqual(len(_mysql.version_info), 5) 44 | 45 | def test_client_info(self): 46 | self.assertTrue(isinstance(_mysql.get_client_info(), basestring)) 47 | 48 | def test_thread_safe(self): 49 | self.assertTrue(isinstance(_mysql.thread_safe(), int)) 50 | 51 | 52 | class CoreAPI(unittest.TestCase): 53 | """Test _mysql interaction internals.""" 54 | 55 | def setUp(self): 56 | kwargs = base.PyMySQLTestCase.databases[0].copy() 57 | kwargs["read_default_file"] = "~/.my.cnf" 58 | self.conn = _mysql.connect(**kwargs) 59 | 60 | def tearDown(self): 61 | self.conn.close() 62 | 63 | def test_thread_id(self): 64 | tid = self.conn.thread_id() 65 | self.assertTrue(isinstance(tid, (int, long_type)), 66 | "thread_id didn't return an integral value.") 67 | 68 | self.assertRaises(TypeError, self.conn.thread_id, ('evil',), 69 | "thread_id shouldn't accept arguments.") 70 | 71 | def test_affected_rows(self): 72 | self.assertEqual(self.conn.affected_rows(), 0, 73 | "Should return 0 before we do anything.") 74 | 75 | 76 | #def test_debug(self): 77 | ## FIXME Only actually tests if you lack SUPER 78 | #self.assertRaises(pymysql.OperationalError, 79 | #self.conn.dump_debug_info) 80 | 81 | def test_charset_name(self): 82 | self.assertTrue(isinstance(self.conn.character_set_name(), basestring), 83 | "Should return a string.") 84 | 85 | def test_host_info(self): 86 | assert isinstance(self.conn.get_host_info(), basestring), "should return a string" 87 | 88 | def test_proto_info(self): 89 | self.assertTrue(isinstance(self.conn.get_proto_info(), int), 90 | "Should return an int.") 91 | 92 | def test_server_info(self): 93 | if sys.version_info[0] == 2: 94 | self.assertTrue(isinstance(self.conn.get_server_info(), basestring), 95 | "Should return an str.") 96 | else: 97 | self.assertTrue(isinstance(self.conn.get_server_info(), basestring), 98 | "Should return an str.") 99 | 100 | if __name__ == "__main__": 101 | unittest.main() 102 | -------------------------------------------------------------------------------- /lambda/pymysql/tests/thirdparty/test_MySQLdb/test_MySQLdb_capabilities.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from . import capabilities 3 | try: 4 | import unittest2 as unittest 5 | except ImportError: 6 | import unittest 7 | import pymysql 8 | from pymysql.tests import base 9 | import warnings 10 | 11 | warnings.filterwarnings('error') 12 | 13 | class test_MySQLdb(capabilities.DatabaseTest): 14 | 15 | db_module = pymysql 16 | connect_args = () 17 | connect_kwargs = base.PyMySQLTestCase.databases[0].copy() 18 | connect_kwargs.update(dict(read_default_file='~/.my.cnf', 19 | use_unicode=True, 20 | charset='utf8', sql_mode="ANSI,STRICT_TRANS_TABLES,TRADITIONAL")) 21 | 22 | create_table_extra = "ENGINE=INNODB CHARACTER SET UTF8" 23 | leak_test = False 24 | 25 | def quote_identifier(self, ident): 26 | return "`%s`" % ident 27 | 28 | def test_TIME(self): 29 | from datetime import timedelta 30 | def generator(row,col): 31 | return timedelta(0, row*8000) 32 | self.check_data_integrity( 33 | ('col1 TIME',), 34 | generator) 35 | 36 | def test_TINYINT(self): 37 | # Number data 38 | def generator(row,col): 39 | v = (row*row) % 256 40 | if v > 127: 41 | v = v-256 42 | return v 43 | self.check_data_integrity( 44 | ('col1 TINYINT',), 45 | generator) 46 | 47 | def test_stored_procedures(self): 48 | db = self.connection 49 | c = self.cursor 50 | try: 51 | self.create_table(('pos INT', 'tree CHAR(20)')) 52 | c.executemany("INSERT INTO %s (pos,tree) VALUES (%%s,%%s)" % self.table, 53 | list(enumerate('ash birch cedar larch pine'.split()))) 54 | db.commit() 55 | 56 | c.execute(""" 57 | CREATE PROCEDURE test_sp(IN t VARCHAR(255)) 58 | BEGIN 59 | SELECT pos FROM %s WHERE tree = t; 60 | END 61 | """ % self.table) 62 | db.commit() 63 | 64 | c.callproc('test_sp', ('larch',)) 65 | rows = c.fetchall() 66 | self.assertEqual(len(rows), 1) 67 | self.assertEqual(rows[0][0], 3) 68 | c.nextset() 69 | finally: 70 | c.execute("DROP PROCEDURE IF EXISTS test_sp") 71 | c.execute('drop table %s' % (self.table)) 72 | 73 | def test_small_CHAR(self): 74 | # Character data 75 | def generator(row,col): 76 | i = ((row+1)*(col+1)+62)%256 77 | if i == 62: return '' 78 | if i == 63: return None 79 | return chr(i) 80 | self.check_data_integrity( 81 | ('col1 char(1)','col2 char(1)'), 82 | generator) 83 | 84 | def test_bug_2671682(self): 85 | from pymysql.constants import ER 86 | try: 87 | self.cursor.execute("describe some_non_existent_table"); 88 | except self.connection.ProgrammingError as msg: 89 | self.assertEqual(msg.args[0], ER.NO_SUCH_TABLE) 90 | 91 | def test_ping(self): 92 | self.connection.ping() 93 | 94 | def test_literal_int(self): 95 | self.assertTrue("2" == self.connection.literal(2)) 96 | 97 | def test_literal_float(self): 98 | self.assertTrue("3.1415" == self.connection.literal(3.1415)) 99 | 100 | def test_literal_string(self): 101 | self.assertTrue("'foo'" == self.connection.literal("foo")) 102 | 103 | 104 | if __name__ == '__main__': 105 | if test_MySQLdb.leak_test: 106 | import gc 107 | gc.enable() 108 | gc.set_debug(gc.DEBUG_LEAK) 109 | unittest.main() 110 | -------------------------------------------------------------------------------- /lambda/pymysql/tests/test_load_local.py: -------------------------------------------------------------------------------- 1 | from pymysql import cursors, OperationalError, Warning 2 | from pymysql.tests import base 3 | 4 | import os 5 | import warnings 6 | 7 | __all__ = ["TestLoadLocal"] 8 | 9 | 10 | class TestLoadLocal(base.PyMySQLTestCase): 11 | def test_no_file(self): 12 | """Test load local infile when the file does not exist""" 13 | conn = self.connections[0] 14 | c = conn.cursor() 15 | c.execute("CREATE TABLE test_load_local (a INTEGER, b INTEGER)") 16 | try: 17 | self.assertRaises( 18 | OperationalError, 19 | c.execute, 20 | ("LOAD DATA LOCAL INFILE 'no_data.txt' INTO TABLE " 21 | "test_load_local fields terminated by ','") 22 | ) 23 | finally: 24 | c.execute("DROP TABLE test_load_local") 25 | c.close() 26 | 27 | def test_load_file(self): 28 | """Test load local infile with a valid file""" 29 | conn = self.connections[0] 30 | c = conn.cursor() 31 | c.execute("CREATE TABLE test_load_local (a INTEGER, b INTEGER)") 32 | filename = os.path.join(os.path.dirname(os.path.realpath(__file__)), 33 | 'data', 34 | 'load_local_data.txt') 35 | try: 36 | c.execute( 37 | ("LOAD DATA LOCAL INFILE '{0}' INTO TABLE " + 38 | "test_load_local FIELDS TERMINATED BY ','").format(filename) 39 | ) 40 | c.execute("SELECT COUNT(*) FROM test_load_local") 41 | self.assertEqual(22749, c.fetchone()[0]) 42 | finally: 43 | c.execute("DROP TABLE test_load_local") 44 | 45 | def test_unbuffered_load_file(self): 46 | """Test unbuffered load local infile with a valid file""" 47 | conn = self.connections[0] 48 | c = conn.cursor(cursors.SSCursor) 49 | c.execute("CREATE TABLE test_load_local (a INTEGER, b INTEGER)") 50 | filename = os.path.join(os.path.dirname(os.path.realpath(__file__)), 51 | 'data', 52 | 'load_local_data.txt') 53 | try: 54 | c.execute( 55 | ("LOAD DATA LOCAL INFILE '{0}' INTO TABLE " + 56 | "test_load_local FIELDS TERMINATED BY ','").format(filename) 57 | ) 58 | c.execute("SELECT COUNT(*) FROM test_load_local") 59 | self.assertEqual(22749, c.fetchone()[0]) 60 | finally: 61 | c.close() 62 | conn.close() 63 | conn.connect() 64 | c = conn.cursor() 65 | c.execute("DROP TABLE test_load_local") 66 | 67 | def test_load_warnings(self): 68 | """Test load local infile produces the appropriate warnings""" 69 | conn = self.connections[0] 70 | c = conn.cursor() 71 | c.execute("CREATE TABLE test_load_local (a INTEGER, b INTEGER)") 72 | filename = os.path.join(os.path.dirname(os.path.realpath(__file__)), 73 | 'data', 74 | 'load_local_warn_data.txt') 75 | try: 76 | with warnings.catch_warnings(record=True) as w: 77 | warnings.simplefilter('always') 78 | c.execute( 79 | ("LOAD DATA LOCAL INFILE '{0}' INTO TABLE " + 80 | "test_load_local FIELDS TERMINATED BY ','").format(filename) 81 | ) 82 | self.assertEqual(w[0].category, Warning) 83 | expected_message = "Incorrect integer value" 84 | if expected_message not in str(w[-1].message): 85 | self.fail("%r not in %r" % (expected_message, w[-1].message)) 86 | finally: 87 | c.execute("DROP TABLE test_load_local") 88 | c.close() 89 | 90 | 91 | if __name__ == "__main__": 92 | import unittest 93 | unittest.main() 94 | -------------------------------------------------------------------------------- /manage: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | exitcode=0 4 | curdir=$(pwd) 5 | curpath=$(dirname $0) 6 | terraform_bin='.tfbin' 7 | 8 | [ -f $(dirname $0)/params ] && source $(dirname $0)/params 9 | 10 | die () { 11 | echo; echo -e "ERROR: $1"; echo; cd $curdir; exit 1 12 | } 13 | 14 | usage () { 15 | echo 16 | echo "Usage: $(basename $0) " 17 | cat <<-EOT 18 | 19 | Commands: 20 | 21 | plan check what should be deployed 22 | deploy create AWS resources or deploy changes 23 | destroy destroy AWS resources totally (be carefull, no roll back) 24 | 25 | EOT 26 | } 27 | 28 | check_params () { 29 | param_list=(region aws_access_key_id aws_secret_access_key terraform_ver) 30 | local errors=0 31 | for i in "${param_list[@]}"; do 32 | if [ -z "${!i}" ]; then echo "Please set parametr \"$i\" in params file"; errors=1; fi 33 | done 34 | if [ $errors -ne 0 ]; then die "Check your params file"; fi 35 | export AWS_ACCESS_KEY_ID=$aws_access_key_id 36 | export AWS_SECRET_ACCESS_KEY=$aws_secret_access_key 37 | } 38 | 39 | check_utils () { 40 | [ $(uname) != 'Linux' ] && die "Sorry, you must use Linux to run this srcipt" 41 | [ -z $(which zip) ] && die "Zip not found! Please install it by \033[1;37msudo apt-get install zip\033[0m" 42 | 43 | if [ ! -f $curpath/$terraform_bin/terraform ]; then 44 | echo 'Install terraform locally' 45 | mkdir -p $curpath/$terraform_bin 46 | curl -sSL https://releases.hashicorp.com/terraform/${terraform_ver}/terraform_${terraform_ver}_linux_amd64.zip -o $curpath/$terraform_bin/tf.zip 47 | unzip -qq $curpath/$terraform_bin/tf.zip -d $curpath/$terraform_bin 48 | rm -f $curpath/$terraform_bin/tf.zip 49 | fi 50 | 51 | local tf_cur_ver 52 | tf_cur_ver=`$curpath/$terraform_bin/terraform version | grep -o 'Terraform v\([0-9]*\.\)\{2\}[0-9]*' | grep -o '\([0-9]*\.\)\{2\}[0-9]*'` 53 | 54 | if [ "$tf_cur_ver" != "$terraform_ver" ]; then 55 | echo 'Update terraform locally' 56 | curl -sSL https://releases.hashicorp.com/terraform/${terraform_ver}/terraform_${terraform_ver}_linux_amd64.zip -o $curpath/$terraform_bin/tf.zip 57 | unzip -qq -o $curpath/$terraform_bin/tf.zip -d $curpath/$terraform_bin 58 | rm -f $curpath/$terraform_bin/tf.zip 59 | fi 60 | } 61 | 62 | # Plan resources 63 | # 64 | plan () { 65 | cd $curpath/config/ 66 | $(pwd)/../$terraform_bin/terraform get || exitcode=$? 67 | $(pwd)/../$terraform_bin/terraform plan \ 68 | -var region=$region \ 69 | || exitcode=$? 70 | if [ $exitcode -ne 0 ]; then die "terraform got error while plan resources"; fi 71 | cd $curdir 72 | } 73 | 74 | # Deploy resources 75 | # 76 | deploy () { 77 | cd $curpath/config/ 78 | $(pwd)/../$terraform_bin/terraform get || exitcode=$? 79 | $(pwd)/../$terraform_bin/terraform apply \ 80 | -var region=$region \ 81 | || exitcode=$? 82 | if [ $exitcode -ne 0 ]; then die "terraform got error while deploy resources"; fi 83 | cd $curdir 84 | } 85 | 86 | # Destroy resources 87 | # 88 | destroy () { 89 | cd $curpath/config/ 90 | $(pwd)/../$terraform_bin/terraform get || exitcode=$? 91 | $(pwd)/../$terraform_bin/terraform destroy -force \ 92 | -var region=$region \ 93 | || exitcode=$? 94 | if [ $exitcode -ne 0 ]; then die "terraform got error while destroy resources"; fi 95 | cd $curdir 96 | } 97 | 98 | initiate () { 99 | echo 'Ckeck params'; check_params 100 | echo 'Check utils installed'; check_utils 101 | } 102 | 103 | # Main 104 | # 105 | case "$1" in 106 | plan) 107 | initiate 108 | plan 109 | ;; 110 | deploy) 111 | initiate 112 | deploy 113 | ;; 114 | destroy) 115 | initiate 116 | destroy 117 | ;; 118 | *) 119 | usage 120 | ;; 121 | esac 122 | -------------------------------------------------------------------------------- /lambda/pymysql/err.py: -------------------------------------------------------------------------------- 1 | import struct 2 | 3 | from .constants import ER 4 | 5 | 6 | class MySQLError(Exception): 7 | """Exception related to operation with MySQL.""" 8 | 9 | 10 | class Warning(Warning, MySQLError): 11 | """Exception raised for important warnings like data truncations 12 | while inserting, etc.""" 13 | 14 | 15 | class Error(MySQLError): 16 | """Exception that is the base class of all other error exceptions 17 | (not Warning).""" 18 | 19 | 20 | class InterfaceError(Error): 21 | """Exception raised for errors that are related to the database 22 | interface rather than the database itself.""" 23 | 24 | 25 | class DatabaseError(Error): 26 | """Exception raised for errors that are related to the 27 | database.""" 28 | 29 | 30 | class DataError(DatabaseError): 31 | """Exception raised for errors that are due to problems with the 32 | processed data like division by zero, numeric value out of range, 33 | etc.""" 34 | 35 | 36 | class OperationalError(DatabaseError): 37 | """Exception raised for errors that are related to the database's 38 | operation and not necessarily under the control of the programmer, 39 | e.g. an unexpected disconnect occurs, the data source name is not 40 | found, a transaction could not be processed, a memory allocation 41 | error occurred during processing, etc.""" 42 | 43 | 44 | class IntegrityError(DatabaseError): 45 | """Exception raised when the relational integrity of the database 46 | is affected, e.g. a foreign key check fails, duplicate key, 47 | etc.""" 48 | 49 | 50 | class InternalError(DatabaseError): 51 | """Exception raised when the database encounters an internal 52 | error, e.g. the cursor is not valid anymore, the transaction is 53 | out of sync, etc.""" 54 | 55 | 56 | class ProgrammingError(DatabaseError): 57 | """Exception raised for programming errors, e.g. table not found 58 | or already exists, syntax error in the SQL statement, wrong number 59 | of parameters specified, etc.""" 60 | 61 | 62 | class NotSupportedError(DatabaseError): 63 | """Exception raised in case a method or database API was used 64 | which is not supported by the database, e.g. requesting a 65 | .rollback() on a connection that does not support transaction or 66 | has transactions turned off.""" 67 | 68 | 69 | error_map = {} 70 | 71 | 72 | def _map_error(exc, *errors): 73 | for error in errors: 74 | error_map[error] = exc 75 | 76 | 77 | _map_error(ProgrammingError, ER.DB_CREATE_EXISTS, ER.SYNTAX_ERROR, 78 | ER.PARSE_ERROR, ER.NO_SUCH_TABLE, ER.WRONG_DB_NAME, 79 | ER.WRONG_TABLE_NAME, ER.FIELD_SPECIFIED_TWICE, 80 | ER.INVALID_GROUP_FUNC_USE, ER.UNSUPPORTED_EXTENSION, 81 | ER.TABLE_MUST_HAVE_COLUMNS, ER.CANT_DO_THIS_DURING_AN_TRANSACTION) 82 | _map_error(DataError, ER.WARN_DATA_TRUNCATED, ER.WARN_NULL_TO_NOTNULL, 83 | ER.WARN_DATA_OUT_OF_RANGE, ER.NO_DEFAULT, ER.PRIMARY_CANT_HAVE_NULL, 84 | ER.DATA_TOO_LONG, ER.DATETIME_FUNCTION_OVERFLOW) 85 | _map_error(IntegrityError, ER.DUP_ENTRY, ER.NO_REFERENCED_ROW, 86 | ER.NO_REFERENCED_ROW_2, ER.ROW_IS_REFERENCED, ER.ROW_IS_REFERENCED_2, 87 | ER.CANNOT_ADD_FOREIGN, ER.BAD_NULL_ERROR) 88 | _map_error(NotSupportedError, ER.WARNING_NOT_COMPLETE_ROLLBACK, 89 | ER.NOT_SUPPORTED_YET, ER.FEATURE_DISABLED, ER.UNKNOWN_STORAGE_ENGINE) 90 | _map_error(OperationalError, ER.DBACCESS_DENIED_ERROR, ER.ACCESS_DENIED_ERROR, 91 | ER.CON_COUNT_ERROR, ER.TABLEACCESS_DENIED_ERROR, 92 | ER.COLUMNACCESS_DENIED_ERROR) 93 | 94 | 95 | del _map_error, ER 96 | 97 | 98 | def raise_mysql_exception(data): 99 | errno = struct.unpack(' 10: 30 | break 31 | 32 | del cursor 33 | self.safe_gc_collect() 34 | 35 | c2 = conn.cursor() 36 | 37 | c2.execute("select 1") 38 | self.assertEqual(c2.fetchone(), (1,)) 39 | self.assertIsNone(c2.fetchone()) 40 | 41 | def test_cleanup_rows_buffered(self): 42 | conn = self.test_connection 43 | cursor = conn.cursor(pymysql.cursors.Cursor) 44 | 45 | cursor.execute("select * from test as t1, test as t2") 46 | for counter, row in enumerate(cursor): 47 | if counter > 10: 48 | break 49 | 50 | del cursor 51 | self.safe_gc_collect() 52 | 53 | c2 = conn.cursor() 54 | 55 | c2.execute("select 1") 56 | 57 | self.assertEqual( 58 | c2.fetchone(), (1,) 59 | ) 60 | self.assertIsNone(c2.fetchone()) 61 | 62 | def test_executemany(self): 63 | conn = self.test_connection 64 | cursor = conn.cursor(pymysql.cursors.Cursor) 65 | 66 | m = pymysql.cursors.RE_INSERT_VALUES.match("INSERT INTO TEST (ID, NAME) VALUES (%s, %s)") 67 | self.assertIsNotNone(m, 'error parse %s') 68 | self.assertEqual(m.group(3), '', 'group 3 not blank, bug in RE_INSERT_VALUES?') 69 | 70 | m = pymysql.cursors.RE_INSERT_VALUES.match("INSERT INTO TEST (ID, NAME) VALUES (%(id)s, %(name)s)") 71 | self.assertIsNotNone(m, 'error parse %(name)s') 72 | self.assertEqual(m.group(3), '', 'group 3 not blank, bug in RE_INSERT_VALUES?') 73 | 74 | m = pymysql.cursors.RE_INSERT_VALUES.match("INSERT INTO TEST (ID, NAME) VALUES (%(id_name)s, %(name)s)") 75 | self.assertIsNotNone(m, 'error parse %(id_name)s') 76 | self.assertEqual(m.group(3), '', 'group 3 not blank, bug in RE_INSERT_VALUES?') 77 | 78 | m = pymysql.cursors.RE_INSERT_VALUES.match("INSERT INTO TEST (ID, NAME) VALUES (%(id_name)s, %(name)s) ON duplicate update") 79 | self.assertIsNotNone(m, 'error parse %(id_name)s') 80 | self.assertEqual(m.group(3), ' ON duplicate update', 'group 3 not ON duplicate update, bug in RE_INSERT_VALUES?') 81 | 82 | # cursor._executed must bee "insert into test (data) values (0),(1),(2),(3),(4),(5),(6),(7),(8),(9)" 83 | # list args 84 | data = range(10) 85 | cursor.executemany("insert into test (data) values (%s)", data) 86 | self.assertTrue(cursor._executed.endswith(b",(7),(8),(9)"), 'execute many with %s not in one query') 87 | 88 | # dict args 89 | data_dict = [{'data': i} for i in range(10)] 90 | cursor.executemany("insert into test (data) values (%(data)s)", data_dict) 91 | self.assertTrue(cursor._executed.endswith(b",(7),(8),(9)"), 'execute many with %(data)s not in one query') 92 | 93 | # %% in column set 94 | cursor.execute("""\ 95 | CREATE TABLE percent_test ( 96 | `A%` INTEGER, 97 | `B%` INTEGER)""") 98 | try: 99 | q = "INSERT INTO percent_test (`A%%`, `B%%`) VALUES (%s, %s)" 100 | self.assertIsNotNone(pymysql.cursors.RE_INSERT_VALUES.match(q)) 101 | cursor.executemany(q, [(3, 4), (5, 6)]) 102 | self.assertTrue(cursor._executed.endswith(b"(3, 4),(5, 6)"), "executemany with %% not in one query") 103 | finally: 104 | cursor.execute("DROP TABLE IF EXISTS percent_test") 105 | -------------------------------------------------------------------------------- /lambda/pymysql/_socketio.py: -------------------------------------------------------------------------------- 1 | """ 2 | SocketIO imported from socket module in Python 3. 3 | 4 | Copyright (c) 2001-2013 Python Software Foundation; All Rights Reserved. 5 | """ 6 | 7 | from socket import * 8 | import io 9 | import errno 10 | 11 | __all__ = ['SocketIO'] 12 | 13 | EINTR = errno.EINTR 14 | _blocking_errnos = (errno.EAGAIN, errno.EWOULDBLOCK) 15 | 16 | class SocketIO(io.RawIOBase): 17 | 18 | """Raw I/O implementation for stream sockets. 19 | 20 | This class supports the makefile() method on sockets. It provides 21 | the raw I/O interface on top of a socket object. 22 | """ 23 | 24 | # One might wonder why not let FileIO do the job instead. There are two 25 | # main reasons why FileIO is not adapted: 26 | # - it wouldn't work under Windows (where you can't used read() and 27 | # write() on a socket handle) 28 | # - it wouldn't work with socket timeouts (FileIO would ignore the 29 | # timeout and consider the socket non-blocking) 30 | 31 | # XXX More docs 32 | 33 | def __init__(self, sock, mode): 34 | if mode not in ("r", "w", "rw", "rb", "wb", "rwb"): 35 | raise ValueError("invalid mode: %r" % mode) 36 | io.RawIOBase.__init__(self) 37 | self._sock = sock 38 | if "b" not in mode: 39 | mode += "b" 40 | self._mode = mode 41 | self._reading = "r" in mode 42 | self._writing = "w" in mode 43 | self._timeout_occurred = False 44 | 45 | def readinto(self, b): 46 | """Read up to len(b) bytes into the writable buffer *b* and return 47 | the number of bytes read. If the socket is non-blocking and no bytes 48 | are available, None is returned. 49 | 50 | If *b* is non-empty, a 0 return value indicates that the connection 51 | was shutdown at the other end. 52 | """ 53 | self._checkClosed() 54 | self._checkReadable() 55 | if self._timeout_occurred: 56 | raise IOError("cannot read from timed out object") 57 | while True: 58 | try: 59 | return self._sock.recv_into(b) 60 | except timeout: 61 | self._timeout_occurred = True 62 | raise 63 | except error as e: 64 | n = e.args[0] 65 | if n == EINTR: 66 | continue 67 | if n in _blocking_errnos: 68 | return None 69 | raise 70 | 71 | def write(self, b): 72 | """Write the given bytes or bytearray object *b* to the socket 73 | and return the number of bytes written. This can be less than 74 | len(b) if not all data could be written. If the socket is 75 | non-blocking and no bytes could be written None is returned. 76 | """ 77 | self._checkClosed() 78 | self._checkWritable() 79 | try: 80 | return self._sock.send(b) 81 | except error as e: 82 | # XXX what about EINTR? 83 | if e.args[0] in _blocking_errnos: 84 | return None 85 | raise 86 | 87 | def readable(self): 88 | """True if the SocketIO is open for reading. 89 | """ 90 | if self.closed: 91 | raise ValueError("I/O operation on closed socket.") 92 | return self._reading 93 | 94 | def writable(self): 95 | """True if the SocketIO is open for writing. 96 | """ 97 | if self.closed: 98 | raise ValueError("I/O operation on closed socket.") 99 | return self._writing 100 | 101 | def seekable(self): 102 | """True if the SocketIO is open for seeking. 103 | """ 104 | if self.closed: 105 | raise ValueError("I/O operation on closed socket.") 106 | return super().seekable() 107 | 108 | def fileno(self): 109 | """Return the file descriptor of the underlying socket. 110 | """ 111 | self._checkClosed() 112 | return self._sock.fileno() 113 | 114 | @property 115 | def name(self): 116 | if not self.closed: 117 | return self.fileno() 118 | else: 119 | return -1 120 | 121 | @property 122 | def mode(self): 123 | return self._mode 124 | 125 | def close(self): 126 | """Close the SocketIO object. This doesn't close the underlying 127 | socket, except if all references to it have disappeared. 128 | """ 129 | if self.closed: 130 | return 131 | io.RawIOBase.close(self) 132 | self._sock._decref_socketios() 133 | self._sock = None 134 | 135 | -------------------------------------------------------------------------------- /lambda/pymysql/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | PyMySQL: A pure-Python MySQL client library. 3 | 4 | Copyright (c) 2010-2016 PyMySQL contributors 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a copy 7 | of this software and associated documentation files (the "Software"), to deal 8 | in the Software without restriction, including without limitation the rights 9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | copies of the Software, and to permit persons to whom the Software is 11 | furnished to do so, subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included in 14 | all copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 22 | THE SOFTWARE. 23 | """ 24 | import sys 25 | 26 | from ._compat import PY2 27 | from .constants import FIELD_TYPE 28 | from .converters import escape_dict, escape_sequence, escape_string 29 | from .err import ( 30 | Warning, Error, InterfaceError, DataError, 31 | DatabaseError, OperationalError, IntegrityError, InternalError, 32 | NotSupportedError, ProgrammingError, MySQLError) 33 | from .times import ( 34 | Date, Time, Timestamp, 35 | DateFromTicks, TimeFromTicks, TimestampFromTicks) 36 | 37 | 38 | VERSION = (0, 7, 10, None) 39 | threadsafety = 1 40 | apilevel = "2.0" 41 | paramstyle = "pyformat" 42 | 43 | 44 | class DBAPISet(frozenset): 45 | 46 | def __ne__(self, other): 47 | if isinstance(other, set): 48 | return frozenset.__ne__(self, other) 49 | else: 50 | return other not in self 51 | 52 | def __eq__(self, other): 53 | if isinstance(other, frozenset): 54 | return frozenset.__eq__(self, other) 55 | else: 56 | return other in self 57 | 58 | def __hash__(self): 59 | return frozenset.__hash__(self) 60 | 61 | 62 | STRING = DBAPISet([FIELD_TYPE.ENUM, FIELD_TYPE.STRING, 63 | FIELD_TYPE.VAR_STRING]) 64 | BINARY = DBAPISet([FIELD_TYPE.BLOB, FIELD_TYPE.LONG_BLOB, 65 | FIELD_TYPE.MEDIUM_BLOB, FIELD_TYPE.TINY_BLOB]) 66 | NUMBER = DBAPISet([FIELD_TYPE.DECIMAL, FIELD_TYPE.DOUBLE, FIELD_TYPE.FLOAT, 67 | FIELD_TYPE.INT24, FIELD_TYPE.LONG, FIELD_TYPE.LONGLONG, 68 | FIELD_TYPE.TINY, FIELD_TYPE.YEAR]) 69 | DATE = DBAPISet([FIELD_TYPE.DATE, FIELD_TYPE.NEWDATE]) 70 | TIME = DBAPISet([FIELD_TYPE.TIME]) 71 | TIMESTAMP = DBAPISet([FIELD_TYPE.TIMESTAMP, FIELD_TYPE.DATETIME]) 72 | DATETIME = TIMESTAMP 73 | ROWID = DBAPISet() 74 | 75 | 76 | def Binary(x): 77 | """Return x as a binary type.""" 78 | if PY2: 79 | return bytearray(x) 80 | else: 81 | return bytes(x) 82 | 83 | 84 | def Connect(*args, **kwargs): 85 | """ 86 | Connect to the database; see connections.Connection.__init__() for 87 | more information. 88 | """ 89 | from .connections import Connection 90 | return Connection(*args, **kwargs) 91 | 92 | from . import connections as _orig_conn 93 | if _orig_conn.Connection.__init__.__doc__ is not None: 94 | Connect.__doc__ = _orig_conn.Connection.__init__.__doc__ 95 | del _orig_conn 96 | 97 | 98 | def get_client_info(): # for MySQLdb compatibility 99 | return '.'.join(map(str, VERSION)) 100 | 101 | connect = Connection = Connect 102 | 103 | # we include a doctored version_info here for MySQLdb compatibility 104 | version_info = (1,2,6,"final",0) 105 | 106 | NULL = "NULL" 107 | 108 | __version__ = get_client_info() 109 | 110 | def thread_safe(): 111 | return True # match MySQLdb.thread_safe() 112 | 113 | def install_as_MySQLdb(): 114 | """ 115 | After this function is called, any application that imports MySQLdb or 116 | _mysql will unwittingly actually use 117 | """ 118 | sys.modules["MySQLdb"] = sys.modules["_mysql"] = sys.modules["pymysql"] 119 | 120 | 121 | __all__ = [ 122 | 'BINARY', 'Binary', 'Connect', 'Connection', 'DATE', 'Date', 123 | 'Time', 'Timestamp', 'DateFromTicks', 'TimeFromTicks', 'TimestampFromTicks', 124 | 'DataError', 'DatabaseError', 'Error', 'FIELD_TYPE', 'IntegrityError', 125 | 'InterfaceError', 'InternalError', 'MySQLError', 'NULL', 'NUMBER', 126 | 'NotSupportedError', 'DBAPISet', 'OperationalError', 'ProgrammingError', 127 | 'ROWID', 'STRING', 'TIME', 'TIMESTAMP', 'Warning', 'apilevel', 'connect', 128 | 'connections', 'constants', 'converters', 'cursors', 129 | 'escape_dict', 'escape_sequence', 'escape_string', 'get_client_info', 130 | 'paramstyle', 'threadsafety', 'version_info', 131 | 132 | "install_as_MySQLdb", 133 | "NULL", "__version__", 134 | ] 135 | -------------------------------------------------------------------------------- /lambda/pymysql/tests/test_DictCursor.py: -------------------------------------------------------------------------------- 1 | from pymysql.tests import base 2 | import pymysql.cursors 3 | 4 | import datetime 5 | import warnings 6 | 7 | 8 | class TestDictCursor(base.PyMySQLTestCase): 9 | bob = {'name': 'bob', 'age': 21, 'DOB': datetime.datetime(1990, 2, 6, 23, 4, 56)} 10 | jim = {'name': 'jim', 'age': 56, 'DOB': datetime.datetime(1955, 5, 9, 13, 12, 45)} 11 | fred = {'name': 'fred', 'age': 100, 'DOB': datetime.datetime(1911, 9, 12, 1, 1, 1)} 12 | 13 | cursor_type = pymysql.cursors.DictCursor 14 | 15 | def setUp(self): 16 | super(TestDictCursor, self).setUp() 17 | self.conn = conn = self.connections[0] 18 | c = conn.cursor(self.cursor_type) 19 | 20 | # create a table ane some data to query 21 | with warnings.catch_warnings(): 22 | warnings.filterwarnings("ignore") 23 | c.execute("drop table if exists dictcursor") 24 | # include in filterwarnings since for unbuffered dict cursor warning for lack of table 25 | # will only be propagated at start of next execute() call 26 | c.execute("""CREATE TABLE dictcursor (name char(20), age int , DOB datetime)""") 27 | data = [("bob", 21, "1990-02-06 23:04:56"), 28 | ("jim", 56, "1955-05-09 13:12:45"), 29 | ("fred", 100, "1911-09-12 01:01:01")] 30 | c.executemany("insert into dictcursor values (%s,%s,%s)", data) 31 | 32 | def tearDown(self): 33 | c = self.conn.cursor() 34 | c.execute("drop table dictcursor") 35 | super(TestDictCursor, self).tearDown() 36 | 37 | def _ensure_cursor_expired(self, cursor): 38 | pass 39 | 40 | def test_DictCursor(self): 41 | bob, jim, fred = self.bob.copy(), self.jim.copy(), self.fred.copy() 42 | #all assert test compare to the structure as would come out from MySQLdb 43 | conn = self.conn 44 | c = conn.cursor(self.cursor_type) 45 | 46 | # try an update which should return no rows 47 | c.execute("update dictcursor set age=20 where name='bob'") 48 | bob['age'] = 20 49 | # pull back the single row dict for bob and check 50 | c.execute("SELECT * from dictcursor where name='bob'") 51 | r = c.fetchone() 52 | self.assertEqual(bob, r, "fetchone via DictCursor failed") 53 | self._ensure_cursor_expired(c) 54 | 55 | # same again, but via fetchall => tuple) 56 | c.execute("SELECT * from dictcursor where name='bob'") 57 | r = c.fetchall() 58 | self.assertEqual([bob], r, "fetch a 1 row result via fetchall failed via DictCursor") 59 | # same test again but iterate over the 60 | c.execute("SELECT * from dictcursor where name='bob'") 61 | for r in c: 62 | self.assertEqual(bob, r, "fetch a 1 row result via iteration failed via DictCursor") 63 | # get all 3 row via fetchall 64 | c.execute("SELECT * from dictcursor") 65 | r = c.fetchall() 66 | self.assertEqual([bob,jim,fred], r, "fetchall failed via DictCursor") 67 | #same test again but do a list comprehension 68 | c.execute("SELECT * from dictcursor") 69 | r = list(c) 70 | self.assertEqual([bob,jim,fred], r, "DictCursor should be iterable") 71 | # get all 2 row via fetchmany 72 | c.execute("SELECT * from dictcursor") 73 | r = c.fetchmany(2) 74 | self.assertEqual([bob, jim], r, "fetchmany failed via DictCursor") 75 | self._ensure_cursor_expired(c) 76 | 77 | def test_custom_dict(self): 78 | class MyDict(dict): pass 79 | 80 | class MyDictCursor(self.cursor_type): 81 | dict_type = MyDict 82 | 83 | keys = ['name', 'age', 'DOB'] 84 | bob = MyDict([(k, self.bob[k]) for k in keys]) 85 | jim = MyDict([(k, self.jim[k]) for k in keys]) 86 | fred = MyDict([(k, self.fred[k]) for k in keys]) 87 | 88 | cur = self.conn.cursor(MyDictCursor) 89 | cur.execute("SELECT * FROM dictcursor WHERE name='bob'") 90 | r = cur.fetchone() 91 | self.assertEqual(bob, r, "fetchone() returns MyDictCursor") 92 | self._ensure_cursor_expired(cur) 93 | 94 | cur.execute("SELECT * FROM dictcursor") 95 | r = cur.fetchall() 96 | self.assertEqual([bob, jim, fred], r, 97 | "fetchall failed via MyDictCursor") 98 | 99 | cur.execute("SELECT * FROM dictcursor") 100 | r = list(cur) 101 | self.assertEqual([bob, jim, fred], r, 102 | "list failed via MyDictCursor") 103 | 104 | cur.execute("SELECT * FROM dictcursor") 105 | r = cur.fetchmany(2) 106 | self.assertEqual([bob, jim], r, 107 | "list failed via MyDictCursor") 108 | self._ensure_cursor_expired(cur) 109 | 110 | 111 | class TestSSDictCursor(TestDictCursor): 112 | cursor_type = pymysql.cursors.SSDictCursor 113 | 114 | def _ensure_cursor_expired(self, cursor): 115 | list(cursor.fetchall_unbuffered()) 116 | 117 | if __name__ == "__main__": 118 | import unittest 119 | unittest.main() 120 | -------------------------------------------------------------------------------- /lambda/pymysql/tests/thirdparty/test_MySQLdb/test_MySQLdb_dbapi20.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from . import dbapi20 3 | import pymysql 4 | from pymysql.tests import base 5 | 6 | try: 7 | import unittest2 as unittest 8 | except ImportError: 9 | import unittest 10 | 11 | 12 | class test_MySQLdb(dbapi20.DatabaseAPI20Test): 13 | driver = pymysql 14 | connect_args = () 15 | connect_kw_args = base.PyMySQLTestCase.databases[0].copy() 16 | connect_kw_args.update(dict(read_default_file='~/.my.cnf', 17 | charset='utf8', 18 | sql_mode="ANSI,STRICT_TRANS_TABLES,TRADITIONAL")) 19 | 20 | def test_setoutputsize(self): pass 21 | def test_setoutputsize_basic(self): pass 22 | def test_nextset(self): pass 23 | 24 | """The tests on fetchone and fetchall and rowcount bogusly 25 | test for an exception if the statement cannot return a 26 | result set. MySQL always returns a result set; it's just that 27 | some things return empty result sets.""" 28 | 29 | def test_fetchall(self): 30 | con = self._connect() 31 | try: 32 | cur = con.cursor() 33 | # cursor.fetchall should raise an Error if called 34 | # without executing a query that may return rows (such 35 | # as a select) 36 | self.assertRaises(self.driver.Error, cur.fetchall) 37 | 38 | self.executeDDL1(cur) 39 | for sql in self._populate(): 40 | cur.execute(sql) 41 | 42 | # cursor.fetchall should raise an Error if called 43 | # after executing a a statement that cannot return rows 44 | ## self.assertRaises(self.driver.Error,cur.fetchall) 45 | 46 | cur.execute('select name from %sbooze' % self.table_prefix) 47 | rows = cur.fetchall() 48 | self.assertTrue(cur.rowcount in (-1,len(self.samples))) 49 | self.assertEqual(len(rows),len(self.samples), 50 | 'cursor.fetchall did not retrieve all rows' 51 | ) 52 | rows = [r[0] for r in rows] 53 | rows.sort() 54 | for i in range(0,len(self.samples)): 55 | self.assertEqual(rows[i],self.samples[i], 56 | 'cursor.fetchall retrieved incorrect rows' 57 | ) 58 | rows = cur.fetchall() 59 | self.assertEqual( 60 | len(rows),0, 61 | 'cursor.fetchall should return an empty list if called ' 62 | 'after the whole result set has been fetched' 63 | ) 64 | self.assertTrue(cur.rowcount in (-1,len(self.samples))) 65 | 66 | self.executeDDL2(cur) 67 | cur.execute('select name from %sbarflys' % self.table_prefix) 68 | rows = cur.fetchall() 69 | self.assertTrue(cur.rowcount in (-1,0)) 70 | self.assertEqual(len(rows),0, 71 | 'cursor.fetchall should return an empty list if ' 72 | 'a select query returns no rows' 73 | ) 74 | 75 | finally: 76 | con.close() 77 | 78 | def test_fetchone(self): 79 | con = self._connect() 80 | try: 81 | cur = con.cursor() 82 | 83 | # cursor.fetchone should raise an Error if called before 84 | # executing a select-type query 85 | self.assertRaises(self.driver.Error,cur.fetchone) 86 | 87 | # cursor.fetchone should raise an Error if called after 88 | # executing a query that cannnot return rows 89 | self.executeDDL1(cur) 90 | ## self.assertRaises(self.driver.Error,cur.fetchone) 91 | 92 | cur.execute('select name from %sbooze' % self.table_prefix) 93 | self.assertEqual(cur.fetchone(),None, 94 | 'cursor.fetchone should return None if a query retrieves ' 95 | 'no rows' 96 | ) 97 | self.assertTrue(cur.rowcount in (-1,0)) 98 | 99 | # cursor.fetchone should raise an Error if called after 100 | # executing a query that cannnot return rows 101 | cur.execute("insert into %sbooze values ('Victoria Bitter')" % ( 102 | self.table_prefix 103 | )) 104 | ## self.assertRaises(self.driver.Error,cur.fetchone) 105 | 106 | cur.execute('select name from %sbooze' % self.table_prefix) 107 | r = cur.fetchone() 108 | self.assertEqual(len(r),1, 109 | 'cursor.fetchone should have retrieved a single row' 110 | ) 111 | self.assertEqual(r[0],'Victoria Bitter', 112 | 'cursor.fetchone retrieved incorrect data' 113 | ) 114 | ## self.assertEqual(cur.fetchone(),None, 115 | ## 'cursor.fetchone should return None if no more rows available' 116 | ## ) 117 | self.assertTrue(cur.rowcount in (-1,1)) 118 | finally: 119 | con.close() 120 | 121 | # Same complaint as for fetchall and fetchone 122 | def test_rowcount(self): 123 | con = self._connect() 124 | try: 125 | cur = con.cursor() 126 | self.executeDDL1(cur) 127 | ## self.assertEqual(cur.rowcount,-1, 128 | ## 'cursor.rowcount should be -1 after executing no-result ' 129 | ## 'statements' 130 | ## ) 131 | cur.execute("insert into %sbooze values ('Victoria Bitter')" % ( 132 | self.table_prefix 133 | )) 134 | ## self.assertTrue(cur.rowcount in (-1,1), 135 | ## 'cursor.rowcount should == number or rows inserted, or ' 136 | ## 'set to -1 after executing an insert statement' 137 | ## ) 138 | cur.execute("select name from %sbooze" % self.table_prefix) 139 | self.assertTrue(cur.rowcount in (-1,1), 140 | 'cursor.rowcount should == number of rows returned, or ' 141 | 'set to -1 after executing a select statement' 142 | ) 143 | self.executeDDL2(cur) 144 | ## self.assertEqual(cur.rowcount,-1, 145 | ## 'cursor.rowcount not being reset to -1 after executing ' 146 | ## 'no-result statements' 147 | ## ) 148 | finally: 149 | con.close() 150 | 151 | def test_callproc(self): 152 | pass # performed in test_MySQL_capabilities 153 | 154 | def help_nextset_setUp(self,cur): 155 | ''' Should create a procedure called deleteme 156 | that returns two result sets, first the 157 | number of rows in booze then "name from booze" 158 | ''' 159 | sql=""" 160 | create procedure deleteme() 161 | begin 162 | select count(*) from %(tp)sbooze; 163 | select name from %(tp)sbooze; 164 | end 165 | """ % dict(tp=self.table_prefix) 166 | cur.execute(sql) 167 | 168 | def help_nextset_tearDown(self,cur): 169 | 'If cleaning up is needed after nextSetTest' 170 | cur.execute("drop procedure deleteme") 171 | 172 | def test_nextset(self): 173 | from warnings import warn 174 | con = self._connect() 175 | try: 176 | cur = con.cursor() 177 | if not hasattr(cur,'nextset'): 178 | return 179 | 180 | try: 181 | self.executeDDL1(cur) 182 | sql=self._populate() 183 | for sql in self._populate(): 184 | cur.execute(sql) 185 | 186 | self.help_nextset_setUp(cur) 187 | 188 | cur.callproc('deleteme') 189 | numberofrows=cur.fetchone() 190 | assert numberofrows[0]== len(self.samples) 191 | assert cur.nextset() 192 | names=cur.fetchall() 193 | assert len(names) == len(self.samples) 194 | s=cur.nextset() 195 | if s: 196 | empty = cur.fetchall() 197 | self.assertEqual(len(empty), 0, 198 | "non-empty result set after other result sets") 199 | #warn("Incompatibility: MySQL returns an empty result set for the CALL itself", 200 | # Warning) 201 | #assert s == None,'No more return sets, should return None' 202 | finally: 203 | self.help_nextset_tearDown(cur) 204 | 205 | finally: 206 | con.close() 207 | 208 | 209 | if __name__ == '__main__': 210 | unittest.main() 211 | -------------------------------------------------------------------------------- /lambda/pymysql/tests/thirdparty/test_MySQLdb/capabilities.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python -O 2 | """ Script to test database capabilities and the DB-API interface 3 | for functionality and memory leaks. 4 | 5 | Adapted from a script by M-A Lemburg. 6 | 7 | """ 8 | import sys 9 | from time import time 10 | try: 11 | import unittest2 as unittest 12 | except ImportError: 13 | import unittest 14 | 15 | PY2 = sys.version_info[0] == 2 16 | 17 | class DatabaseTest(unittest.TestCase): 18 | 19 | db_module = None 20 | connect_args = () 21 | connect_kwargs = dict(use_unicode=True, charset="utf8") 22 | create_table_extra = "ENGINE=INNODB CHARACTER SET UTF8" 23 | rows = 10 24 | debug = False 25 | 26 | def setUp(self): 27 | db = self.db_module.connect(*self.connect_args, **self.connect_kwargs) 28 | self.connection = db 29 | self.cursor = db.cursor() 30 | self.BLOBText = ''.join([chr(i) for i in range(256)] * 100); 31 | if PY2: 32 | self.BLOBUText = unicode().join(unichr(i) for i in range(16834)) 33 | else: 34 | self.BLOBUText = "".join(chr(i) for i in range(16834)) 35 | data = bytearray(range(256)) * 16 36 | self.BLOBBinary = self.db_module.Binary(data) 37 | 38 | leak_test = True 39 | 40 | def tearDown(self): 41 | if self.leak_test: 42 | import gc 43 | del self.cursor 44 | orphans = gc.collect() 45 | self.assertFalse(orphans, "%d orphaned objects found after deleting cursor" % orphans) 46 | 47 | del self.connection 48 | orphans = gc.collect() 49 | self.assertFalse(orphans, "%d orphaned objects found after deleting connection" % orphans) 50 | 51 | def table_exists(self, name): 52 | try: 53 | self.cursor.execute('select * from %s where 1=0' % name) 54 | except Exception: 55 | return False 56 | else: 57 | return True 58 | 59 | def quote_identifier(self, ident): 60 | return '"%s"' % ident 61 | 62 | def new_table_name(self): 63 | i = id(self.cursor) 64 | while True: 65 | name = self.quote_identifier('tb%08x' % i) 66 | if not self.table_exists(name): 67 | return name 68 | i = i + 1 69 | 70 | def create_table(self, columndefs): 71 | 72 | """ Create a table using a list of column definitions given in 73 | columndefs. 74 | 75 | generator must be a function taking arguments (row_number, 76 | col_number) returning a suitable data object for insertion 77 | into the table. 78 | 79 | """ 80 | self.table = self.new_table_name() 81 | self.cursor.execute('CREATE TABLE %s (%s) %s' % 82 | (self.table, 83 | ',\n'.join(columndefs), 84 | self.create_table_extra)) 85 | 86 | def check_data_integrity(self, columndefs, generator): 87 | # insert 88 | self.create_table(columndefs) 89 | insert_statement = ('INSERT INTO %s VALUES (%s)' % 90 | (self.table, 91 | ','.join(['%s'] * len(columndefs)))) 92 | data = [ [ generator(i,j) for j in range(len(columndefs)) ] 93 | for i in range(self.rows) ] 94 | if self.debug: 95 | print(data) 96 | self.cursor.executemany(insert_statement, data) 97 | self.connection.commit() 98 | # verify 99 | self.cursor.execute('select * from %s' % self.table) 100 | l = self.cursor.fetchall() 101 | if self.debug: 102 | print(l) 103 | self.assertEqual(len(l), self.rows) 104 | try: 105 | for i in range(self.rows): 106 | for j in range(len(columndefs)): 107 | self.assertEqual(l[i][j], generator(i,j)) 108 | finally: 109 | if not self.debug: 110 | self.cursor.execute('drop table %s' % (self.table)) 111 | 112 | def test_transactions(self): 113 | columndefs = ( 'col1 INT', 'col2 VARCHAR(255)') 114 | def generator(row, col): 115 | if col == 0: return row 116 | else: return ('%i' % (row%10))*255 117 | self.create_table(columndefs) 118 | insert_statement = ('INSERT INTO %s VALUES (%s)' % 119 | (self.table, 120 | ','.join(['%s'] * len(columndefs)))) 121 | data = [ [ generator(i,j) for j in range(len(columndefs)) ] 122 | for i in range(self.rows) ] 123 | self.cursor.executemany(insert_statement, data) 124 | # verify 125 | self.connection.commit() 126 | self.cursor.execute('select * from %s' % self.table) 127 | l = self.cursor.fetchall() 128 | self.assertEqual(len(l), self.rows) 129 | for i in range(self.rows): 130 | for j in range(len(columndefs)): 131 | self.assertEqual(l[i][j], generator(i,j)) 132 | delete_statement = 'delete from %s where col1=%%s' % self.table 133 | self.cursor.execute(delete_statement, (0,)) 134 | self.cursor.execute('select col1 from %s where col1=%s' % \ 135 | (self.table, 0)) 136 | l = self.cursor.fetchall() 137 | self.assertFalse(l, "DELETE didn't work") 138 | self.connection.rollback() 139 | self.cursor.execute('select col1 from %s where col1=%s' % \ 140 | (self.table, 0)) 141 | l = self.cursor.fetchall() 142 | self.assertTrue(len(l) == 1, "ROLLBACK didn't work") 143 | self.cursor.execute('drop table %s' % (self.table)) 144 | 145 | def test_truncation(self): 146 | columndefs = ( 'col1 INT', 'col2 VARCHAR(255)') 147 | def generator(row, col): 148 | if col == 0: return row 149 | else: return ('%i' % (row%10))*((255-self.rows//2)+row) 150 | self.create_table(columndefs) 151 | insert_statement = ('INSERT INTO %s VALUES (%s)' % 152 | (self.table, 153 | ','.join(['%s'] * len(columndefs)))) 154 | 155 | try: 156 | self.cursor.execute(insert_statement, (0, '0'*256)) 157 | except Warning: 158 | if self.debug: print(self.cursor.messages) 159 | except self.connection.DataError: 160 | pass 161 | else: 162 | self.fail("Over-long column did not generate warnings/exception with single insert") 163 | 164 | self.connection.rollback() 165 | 166 | try: 167 | for i in range(self.rows): 168 | data = [] 169 | for j in range(len(columndefs)): 170 | data.append(generator(i,j)) 171 | self.cursor.execute(insert_statement,tuple(data)) 172 | except Warning: 173 | if self.debug: print(self.cursor.messages) 174 | except self.connection.DataError: 175 | pass 176 | else: 177 | self.fail("Over-long columns did not generate warnings/exception with execute()") 178 | 179 | self.connection.rollback() 180 | 181 | try: 182 | data = [ [ generator(i,j) for j in range(len(columndefs)) ] 183 | for i in range(self.rows) ] 184 | self.cursor.executemany(insert_statement, data) 185 | except Warning: 186 | if self.debug: print(self.cursor.messages) 187 | except self.connection.DataError: 188 | pass 189 | else: 190 | self.fail("Over-long columns did not generate warnings/exception with executemany()") 191 | 192 | self.connection.rollback() 193 | self.cursor.execute('drop table %s' % (self.table)) 194 | 195 | def test_CHAR(self): 196 | # Character data 197 | def generator(row,col): 198 | return ('%i' % ((row+col) % 10)) * 255 199 | self.check_data_integrity( 200 | ('col1 char(255)','col2 char(255)'), 201 | generator) 202 | 203 | def test_INT(self): 204 | # Number data 205 | def generator(row,col): 206 | return row*row 207 | self.check_data_integrity( 208 | ('col1 INT',), 209 | generator) 210 | 211 | def test_DECIMAL(self): 212 | # DECIMAL 213 | def generator(row,col): 214 | from decimal import Decimal 215 | return Decimal("%d.%02d" % (row, col)) 216 | self.check_data_integrity( 217 | ('col1 DECIMAL(5,2)',), 218 | generator) 219 | 220 | def test_DATE(self): 221 | ticks = time() 222 | def generator(row,col): 223 | return self.db_module.DateFromTicks(ticks+row*86400-col*1313) 224 | self.check_data_integrity( 225 | ('col1 DATE',), 226 | generator) 227 | 228 | def test_TIME(self): 229 | ticks = time() 230 | def generator(row,col): 231 | return self.db_module.TimeFromTicks(ticks+row*86400-col*1313) 232 | self.check_data_integrity( 233 | ('col1 TIME',), 234 | generator) 235 | 236 | def test_DATETIME(self): 237 | ticks = time() 238 | def generator(row,col): 239 | return self.db_module.TimestampFromTicks(ticks+row*86400-col*1313) 240 | self.check_data_integrity( 241 | ('col1 DATETIME',), 242 | generator) 243 | 244 | def test_TIMESTAMP(self): 245 | ticks = time() 246 | def generator(row,col): 247 | return self.db_module.TimestampFromTicks(ticks+row*86400-col*1313) 248 | self.check_data_integrity( 249 | ('col1 TIMESTAMP',), 250 | generator) 251 | 252 | def test_fractional_TIMESTAMP(self): 253 | ticks = time() 254 | def generator(row,col): 255 | return self.db_module.TimestampFromTicks(ticks+row*86400-col*1313+row*0.7*col/3.0) 256 | self.check_data_integrity( 257 | ('col1 TIMESTAMP',), 258 | generator) 259 | 260 | def test_LONG(self): 261 | def generator(row,col): 262 | if col == 0: 263 | return row 264 | else: 265 | return self.BLOBUText # 'BLOB Text ' * 1024 266 | self.check_data_integrity( 267 | ('col1 INT', 'col2 LONG'), 268 | generator) 269 | 270 | def test_TEXT(self): 271 | def generator(row,col): 272 | if col == 0: 273 | return row 274 | else: 275 | return self.BLOBUText[:5192] # 'BLOB Text ' * 1024 276 | self.check_data_integrity( 277 | ('col1 INT', 'col2 TEXT'), 278 | generator) 279 | 280 | def test_LONG_BYTE(self): 281 | def generator(row,col): 282 | if col == 0: 283 | return row 284 | else: 285 | return self.BLOBBinary # 'BLOB\000Binary ' * 1024 286 | self.check_data_integrity( 287 | ('col1 INT','col2 LONG BYTE'), 288 | generator) 289 | 290 | def test_BLOB(self): 291 | def generator(row,col): 292 | if col == 0: 293 | return row 294 | else: 295 | return self.BLOBBinary # 'BLOB\000Binary ' * 1024 296 | self.check_data_integrity( 297 | ('col1 INT','col2 BLOB'), 298 | generator) 299 | -------------------------------------------------------------------------------- /lambda/pymysql/converters.py: -------------------------------------------------------------------------------- 1 | from ._compat import PY2, text_type, long_type, JYTHON, IRONPYTHON, unichr 2 | 3 | import datetime 4 | from decimal import Decimal 5 | import re 6 | import time 7 | 8 | from .constants import FIELD_TYPE, FLAG 9 | from .charset import charset_by_id, charset_to_encoding 10 | 11 | 12 | def escape_item(val, charset, mapping=None): 13 | if mapping is None: 14 | mapping = encoders 15 | encoder = mapping.get(type(val)) 16 | 17 | # Fallback to default when no encoder found 18 | if not encoder: 19 | try: 20 | encoder = mapping[text_type] 21 | except KeyError: 22 | raise TypeError("no default type converter defined") 23 | 24 | if encoder in (escape_dict, escape_sequence): 25 | val = encoder(val, charset, mapping) 26 | else: 27 | val = encoder(val, mapping) 28 | return val 29 | 30 | def escape_dict(val, charset, mapping=None): 31 | n = {} 32 | for k, v in val.items(): 33 | quoted = escape_item(v, charset, mapping) 34 | n[k] = quoted 35 | return n 36 | 37 | def escape_sequence(val, charset, mapping=None): 38 | n = [] 39 | for item in val: 40 | quoted = escape_item(item, charset, mapping) 41 | n.append(quoted) 42 | return "(" + ",".join(n) + ")" 43 | 44 | def escape_set(val, charset, mapping=None): 45 | return ','.join([escape_item(x, charset, mapping) for x in val]) 46 | 47 | def escape_bool(value, mapping=None): 48 | return str(int(value)) 49 | 50 | def escape_object(value, mapping=None): 51 | return str(value) 52 | 53 | def escape_int(value, mapping=None): 54 | return str(value) 55 | 56 | def escape_float(value, mapping=None): 57 | return ('%.15g' % value) 58 | 59 | _escape_table = [unichr(x) for x in range(128)] 60 | _escape_table[0] = u'\\0' 61 | _escape_table[ord('\\')] = u'\\\\' 62 | _escape_table[ord('\n')] = u'\\n' 63 | _escape_table[ord('\r')] = u'\\r' 64 | _escape_table[ord('\032')] = u'\\Z' 65 | _escape_table[ord('"')] = u'\\"' 66 | _escape_table[ord("'")] = u"\\'" 67 | 68 | def _escape_unicode(value, mapping=None): 69 | """escapes *value* without adding quote. 70 | 71 | Value should be unicode 72 | """ 73 | return value.translate(_escape_table) 74 | 75 | if PY2: 76 | def escape_string(value, mapping=None): 77 | """escape_string escapes *value* but not surround it with quotes. 78 | 79 | Value should be bytes or unicode. 80 | """ 81 | if isinstance(value, unicode): 82 | return _escape_unicode(value) 83 | assert isinstance(value, (bytes, bytearray)) 84 | value = value.replace('\\', '\\\\') 85 | value = value.replace('\0', '\\0') 86 | value = value.replace('\n', '\\n') 87 | value = value.replace('\r', '\\r') 88 | value = value.replace('\032', '\\Z') 89 | value = value.replace("'", "\\'") 90 | value = value.replace('"', '\\"') 91 | return value 92 | 93 | def escape_bytes(value, mapping=None): 94 | assert isinstance(value, (bytes, bytearray)) 95 | return b"_binary'%s'" % escape_string(value) 96 | else: 97 | escape_string = _escape_unicode 98 | 99 | # On Python ~3.5, str.decode('ascii', 'surrogateescape') is slow. 100 | # (fixed in Python 3.6, http://bugs.python.org/issue24870) 101 | # Workaround is str.decode('latin1') then translate 0x80-0xff into 0udc80-0udcff. 102 | # We can escape special chars and surrogateescape at once. 103 | _escape_bytes_table = _escape_table + [chr(i) for i in range(0xdc80, 0xdd00)] 104 | 105 | def escape_bytes(value, mapping=None): 106 | return "_binary'%s'" % value.decode('latin1').translate(_escape_bytes_table) 107 | 108 | 109 | def escape_unicode(value, mapping=None): 110 | return u"'%s'" % _escape_unicode(value) 111 | 112 | def escape_str(value, mapping=None): 113 | return "'%s'" % escape_string(str(value), mapping) 114 | 115 | def escape_None(value, mapping=None): 116 | return 'NULL' 117 | 118 | def escape_timedelta(obj, mapping=None): 119 | seconds = int(obj.seconds) % 60 120 | minutes = int(obj.seconds // 60) % 60 121 | hours = int(obj.seconds // 3600) % 24 + int(obj.days) * 24 122 | if obj.microseconds: 123 | fmt = "'{0:02d}:{1:02d}:{2:02d}.{3:06d}'" 124 | else: 125 | fmt = "'{0:02d}:{1:02d}:{2:02d}'" 126 | return fmt.format(hours, minutes, seconds, obj.microseconds) 127 | 128 | def escape_time(obj, mapping=None): 129 | if obj.microsecond: 130 | fmt = "'{0.hour:02}:{0.minute:02}:{0.second:02}.{0.microsecond:06}'" 131 | else: 132 | fmt = "'{0.hour:02}:{0.minute:02}:{0.second:02}'" 133 | return fmt.format(obj) 134 | 135 | def escape_datetime(obj, mapping=None): 136 | if obj.microsecond: 137 | fmt = "'{0.year:04}-{0.month:02}-{0.day:02} {0.hour:02}:{0.minute:02}:{0.second:02}.{0.microsecond:06}'" 138 | else: 139 | fmt = "'{0.year:04}-{0.month:02}-{0.day:02} {0.hour:02}:{0.minute:02}:{0.second:02}'" 140 | return fmt.format(obj) 141 | 142 | def escape_date(obj, mapping=None): 143 | fmt = "'{0.year:04}-{0.month:02}-{0.day:02}'" 144 | return fmt.format(obj) 145 | 146 | def escape_struct_time(obj, mapping=None): 147 | return escape_datetime(datetime.datetime(*obj[:6])) 148 | 149 | def _convert_second_fraction(s): 150 | if not s: 151 | return 0 152 | # Pad zeros to ensure the fraction length in microseconds 153 | s = s.ljust(6, '0') 154 | return int(s[:6]) 155 | 156 | DATETIME_RE = re.compile(r"(\d{1,4})-(\d{1,2})-(\d{1,2})[T ](\d{1,2}):(\d{1,2}):(\d{1,2})(?:.(\d{1,6}))?") 157 | 158 | 159 | def convert_datetime(obj): 160 | """Returns a DATETIME or TIMESTAMP column value as a datetime object: 161 | 162 | >>> datetime_or_None('2007-02-25 23:06:20') 163 | datetime.datetime(2007, 2, 25, 23, 6, 20) 164 | >>> datetime_or_None('2007-02-25T23:06:20') 165 | datetime.datetime(2007, 2, 25, 23, 6, 20) 166 | 167 | Illegal values are returned as None: 168 | 169 | >>> datetime_or_None('2007-02-31T23:06:20') is None 170 | True 171 | >>> datetime_or_None('0000-00-00 00:00:00') is None 172 | True 173 | 174 | """ 175 | if not PY2 and isinstance(obj, (bytes, bytearray)): 176 | obj = obj.decode('ascii') 177 | 178 | m = DATETIME_RE.match(obj) 179 | if not m: 180 | return convert_date(obj) 181 | 182 | try: 183 | groups = list(m.groups()) 184 | groups[-1] = _convert_second_fraction(groups[-1]) 185 | return datetime.datetime(*[ int(x) for x in groups ]) 186 | except ValueError: 187 | return convert_date(obj) 188 | 189 | TIMEDELTA_RE = re.compile(r"(-)?(\d{1,3}):(\d{1,2}):(\d{1,2})(?:.(\d{1,6}))?") 190 | 191 | 192 | def convert_timedelta(obj): 193 | """Returns a TIME column as a timedelta object: 194 | 195 | >>> timedelta_or_None('25:06:17') 196 | datetime.timedelta(1, 3977) 197 | >>> timedelta_or_None('-25:06:17') 198 | datetime.timedelta(-2, 83177) 199 | 200 | Illegal values are returned as None: 201 | 202 | >>> timedelta_or_None('random crap') is None 203 | True 204 | 205 | Note that MySQL always returns TIME columns as (+|-)HH:MM:SS, but 206 | can accept values as (+|-)DD HH:MM:SS. The latter format will not 207 | be parsed correctly by this function. 208 | """ 209 | if not PY2 and isinstance(obj, (bytes, bytearray)): 210 | obj = obj.decode('ascii') 211 | 212 | m = TIMEDELTA_RE.match(obj) 213 | if not m: 214 | return None 215 | 216 | try: 217 | groups = list(m.groups()) 218 | groups[-1] = _convert_second_fraction(groups[-1]) 219 | negate = -1 if groups[0] else 1 220 | hours, minutes, seconds, microseconds = groups[1:] 221 | 222 | tdelta = datetime.timedelta( 223 | hours = int(hours), 224 | minutes = int(minutes), 225 | seconds = int(seconds), 226 | microseconds = int(microseconds) 227 | ) * negate 228 | return tdelta 229 | except ValueError: 230 | return None 231 | 232 | TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})(?:.(\d{1,6}))?") 233 | 234 | 235 | def convert_time(obj): 236 | """Returns a TIME column as a time object: 237 | 238 | >>> time_or_None('15:06:17') 239 | datetime.time(15, 6, 17) 240 | 241 | Illegal values are returned as None: 242 | 243 | >>> time_or_None('-25:06:17') is None 244 | True 245 | >>> time_or_None('random crap') is None 246 | True 247 | 248 | Note that MySQL always returns TIME columns as (+|-)HH:MM:SS, but 249 | can accept values as (+|-)DD HH:MM:SS. The latter format will not 250 | be parsed correctly by this function. 251 | 252 | Also note that MySQL's TIME column corresponds more closely to 253 | Python's timedelta and not time. However if you want TIME columns 254 | to be treated as time-of-day and not a time offset, then you can 255 | use set this function as the converter for FIELD_TYPE.TIME. 256 | """ 257 | if not PY2 and isinstance(obj, (bytes, bytearray)): 258 | obj = obj.decode('ascii') 259 | 260 | m = TIME_RE.match(obj) 261 | if not m: 262 | return None 263 | 264 | try: 265 | groups = list(m.groups()) 266 | groups[-1] = _convert_second_fraction(groups[-1]) 267 | hours, minutes, seconds, microseconds = groups 268 | return datetime.time(hour=int(hours), minute=int(minutes), 269 | second=int(seconds), microsecond=int(microseconds)) 270 | except ValueError: 271 | return None 272 | 273 | 274 | def convert_date(obj): 275 | """Returns a DATE column as a date object: 276 | 277 | >>> date_or_None('2007-02-26') 278 | datetime.date(2007, 2, 26) 279 | 280 | Illegal values are returned as None: 281 | 282 | >>> date_or_None('2007-02-31') is None 283 | True 284 | >>> date_or_None('0000-00-00') is None 285 | True 286 | 287 | """ 288 | if not PY2 and isinstance(obj, (bytes, bytearray)): 289 | obj = obj.decode('ascii') 290 | try: 291 | return datetime.date(*[ int(x) for x in obj.split('-', 2) ]) 292 | except ValueError: 293 | return None 294 | 295 | 296 | def convert_mysql_timestamp(timestamp): 297 | """Convert a MySQL TIMESTAMP to a Timestamp object. 298 | 299 | MySQL >= 4.1 returns TIMESTAMP in the same format as DATETIME: 300 | 301 | >>> mysql_timestamp_converter('2007-02-25 22:32:17') 302 | datetime.datetime(2007, 2, 25, 22, 32, 17) 303 | 304 | MySQL < 4.1 uses a big string of numbers: 305 | 306 | >>> mysql_timestamp_converter('20070225223217') 307 | datetime.datetime(2007, 2, 25, 22, 32, 17) 308 | 309 | Illegal values are returned as None: 310 | 311 | >>> mysql_timestamp_converter('2007-02-31 22:32:17') is None 312 | True 313 | >>> mysql_timestamp_converter('00000000000000') is None 314 | True 315 | 316 | """ 317 | if not PY2 and isinstance(timestamp, (bytes, bytearray)): 318 | timestamp = timestamp.decode('ascii') 319 | if timestamp[4] == '-': 320 | return convert_datetime(timestamp) 321 | timestamp += "0"*(14-len(timestamp)) # padding 322 | year, month, day, hour, minute, second = \ 323 | int(timestamp[:4]), int(timestamp[4:6]), int(timestamp[6:8]), \ 324 | int(timestamp[8:10]), int(timestamp[10:12]), int(timestamp[12:14]) 325 | try: 326 | return datetime.datetime(year, month, day, hour, minute, second) 327 | except ValueError: 328 | return None 329 | 330 | def convert_set(s): 331 | if isinstance(s, (bytes, bytearray)): 332 | return set(s.split(b",")) 333 | return set(s.split(",")) 334 | 335 | 336 | def through(x): 337 | return x 338 | 339 | 340 | #def convert_bit(b): 341 | # b = "\x00" * (8 - len(b)) + b # pad w/ zeroes 342 | # return struct.unpack(">Q", b)[0] 343 | # 344 | # the snippet above is right, but MySQLdb doesn't process bits, 345 | # so we shouldn't either 346 | convert_bit = through 347 | 348 | 349 | def convert_characters(connection, field, data): 350 | field_charset = charset_by_id(field.charsetnr).name 351 | encoding = charset_to_encoding(field_charset) 352 | if field.flags & FLAG.SET: 353 | return convert_set(data.decode(encoding)) 354 | if field.flags & FLAG.BINARY: 355 | return data 356 | 357 | if connection.use_unicode: 358 | data = data.decode(encoding) 359 | elif connection.charset != field_charset: 360 | data = data.decode(encoding) 361 | data = data.encode(connection.encoding) 362 | return data 363 | 364 | encoders = { 365 | bool: escape_bool, 366 | int: escape_int, 367 | long_type: escape_int, 368 | float: escape_float, 369 | str: escape_str, 370 | text_type: escape_unicode, 371 | tuple: escape_sequence, 372 | list: escape_sequence, 373 | set: escape_sequence, 374 | frozenset: escape_sequence, 375 | dict: escape_dict, 376 | bytearray: escape_bytes, 377 | type(None): escape_None, 378 | datetime.date: escape_date, 379 | datetime.datetime: escape_datetime, 380 | datetime.timedelta: escape_timedelta, 381 | datetime.time: escape_time, 382 | time.struct_time: escape_struct_time, 383 | Decimal: escape_object, 384 | } 385 | 386 | if not PY2 or JYTHON or IRONPYTHON: 387 | encoders[bytes] = escape_bytes 388 | 389 | decoders = { 390 | FIELD_TYPE.BIT: convert_bit, 391 | FIELD_TYPE.TINY: int, 392 | FIELD_TYPE.SHORT: int, 393 | FIELD_TYPE.LONG: int, 394 | FIELD_TYPE.FLOAT: float, 395 | FIELD_TYPE.DOUBLE: float, 396 | FIELD_TYPE.LONGLONG: int, 397 | FIELD_TYPE.INT24: int, 398 | FIELD_TYPE.YEAR: int, 399 | FIELD_TYPE.TIMESTAMP: convert_mysql_timestamp, 400 | FIELD_TYPE.DATETIME: convert_datetime, 401 | FIELD_TYPE.TIME: convert_timedelta, 402 | FIELD_TYPE.DATE: convert_date, 403 | FIELD_TYPE.SET: convert_set, 404 | FIELD_TYPE.BLOB: through, 405 | FIELD_TYPE.TINY_BLOB: through, 406 | FIELD_TYPE.MEDIUM_BLOB: through, 407 | FIELD_TYPE.LONG_BLOB: through, 408 | FIELD_TYPE.STRING: through, 409 | FIELD_TYPE.VAR_STRING: through, 410 | FIELD_TYPE.VARCHAR: through, 411 | FIELD_TYPE.DECIMAL: Decimal, 412 | FIELD_TYPE.NEWDECIMAL: Decimal, 413 | } 414 | 415 | 416 | # for MySQLdb compatibility 417 | conversions = encoders.copy() 418 | conversions.update(decoders) 419 | Thing2Literal = escape_str 420 | -------------------------------------------------------------------------------- /lambda/pymysql/constants/ER.py: -------------------------------------------------------------------------------- 1 | 2 | ERROR_FIRST = 1000 3 | HASHCHK = 1000 4 | NISAMCHK = 1001 5 | NO = 1002 6 | YES = 1003 7 | CANT_CREATE_FILE = 1004 8 | CANT_CREATE_TABLE = 1005 9 | CANT_CREATE_DB = 1006 10 | DB_CREATE_EXISTS = 1007 11 | DB_DROP_EXISTS = 1008 12 | DB_DROP_DELETE = 1009 13 | DB_DROP_RMDIR = 1010 14 | CANT_DELETE_FILE = 1011 15 | CANT_FIND_SYSTEM_REC = 1012 16 | CANT_GET_STAT = 1013 17 | CANT_GET_WD = 1014 18 | CANT_LOCK = 1015 19 | CANT_OPEN_FILE = 1016 20 | FILE_NOT_FOUND = 1017 21 | CANT_READ_DIR = 1018 22 | CANT_SET_WD = 1019 23 | CHECKREAD = 1020 24 | DISK_FULL = 1021 25 | DUP_KEY = 1022 26 | ERROR_ON_CLOSE = 1023 27 | ERROR_ON_READ = 1024 28 | ERROR_ON_RENAME = 1025 29 | ERROR_ON_WRITE = 1026 30 | FILE_USED = 1027 31 | FILSORT_ABORT = 1028 32 | FORM_NOT_FOUND = 1029 33 | GET_ERRNO = 1030 34 | ILLEGAL_HA = 1031 35 | KEY_NOT_FOUND = 1032 36 | NOT_FORM_FILE = 1033 37 | NOT_KEYFILE = 1034 38 | OLD_KEYFILE = 1035 39 | OPEN_AS_READONLY = 1036 40 | OUTOFMEMORY = 1037 41 | OUT_OF_SORTMEMORY = 1038 42 | UNEXPECTED_EOF = 1039 43 | CON_COUNT_ERROR = 1040 44 | OUT_OF_RESOURCES = 1041 45 | BAD_HOST_ERROR = 1042 46 | HANDSHAKE_ERROR = 1043 47 | DBACCESS_DENIED_ERROR = 1044 48 | ACCESS_DENIED_ERROR = 1045 49 | NO_DB_ERROR = 1046 50 | UNKNOWN_COM_ERROR = 1047 51 | BAD_NULL_ERROR = 1048 52 | BAD_DB_ERROR = 1049 53 | TABLE_EXISTS_ERROR = 1050 54 | BAD_TABLE_ERROR = 1051 55 | NON_UNIQ_ERROR = 1052 56 | SERVER_SHUTDOWN = 1053 57 | BAD_FIELD_ERROR = 1054 58 | WRONG_FIELD_WITH_GROUP = 1055 59 | WRONG_GROUP_FIELD = 1056 60 | WRONG_SUM_SELECT = 1057 61 | WRONG_VALUE_COUNT = 1058 62 | TOO_LONG_IDENT = 1059 63 | DUP_FIELDNAME = 1060 64 | DUP_KEYNAME = 1061 65 | DUP_ENTRY = 1062 66 | WRONG_FIELD_SPEC = 1063 67 | PARSE_ERROR = 1064 68 | EMPTY_QUERY = 1065 69 | NONUNIQ_TABLE = 1066 70 | INVALID_DEFAULT = 1067 71 | MULTIPLE_PRI_KEY = 1068 72 | TOO_MANY_KEYS = 1069 73 | TOO_MANY_KEY_PARTS = 1070 74 | TOO_LONG_KEY = 1071 75 | KEY_COLUMN_DOES_NOT_EXITS = 1072 76 | BLOB_USED_AS_KEY = 1073 77 | TOO_BIG_FIELDLENGTH = 1074 78 | WRONG_AUTO_KEY = 1075 79 | READY = 1076 80 | NORMAL_SHUTDOWN = 1077 81 | GOT_SIGNAL = 1078 82 | SHUTDOWN_COMPLETE = 1079 83 | FORCING_CLOSE = 1080 84 | IPSOCK_ERROR = 1081 85 | NO_SUCH_INDEX = 1082 86 | WRONG_FIELD_TERMINATORS = 1083 87 | BLOBS_AND_NO_TERMINATED = 1084 88 | TEXTFILE_NOT_READABLE = 1085 89 | FILE_EXISTS_ERROR = 1086 90 | LOAD_INFO = 1087 91 | ALTER_INFO = 1088 92 | WRONG_SUB_KEY = 1089 93 | CANT_REMOVE_ALL_FIELDS = 1090 94 | CANT_DROP_FIELD_OR_KEY = 1091 95 | INSERT_INFO = 1092 96 | UPDATE_TABLE_USED = 1093 97 | NO_SUCH_THREAD = 1094 98 | KILL_DENIED_ERROR = 1095 99 | NO_TABLES_USED = 1096 100 | TOO_BIG_SET = 1097 101 | NO_UNIQUE_LOGFILE = 1098 102 | TABLE_NOT_LOCKED_FOR_WRITE = 1099 103 | TABLE_NOT_LOCKED = 1100 104 | BLOB_CANT_HAVE_DEFAULT = 1101 105 | WRONG_DB_NAME = 1102 106 | WRONG_TABLE_NAME = 1103 107 | TOO_BIG_SELECT = 1104 108 | UNKNOWN_ERROR = 1105 109 | UNKNOWN_PROCEDURE = 1106 110 | WRONG_PARAMCOUNT_TO_PROCEDURE = 1107 111 | WRONG_PARAMETERS_TO_PROCEDURE = 1108 112 | UNKNOWN_TABLE = 1109 113 | FIELD_SPECIFIED_TWICE = 1110 114 | INVALID_GROUP_FUNC_USE = 1111 115 | UNSUPPORTED_EXTENSION = 1112 116 | TABLE_MUST_HAVE_COLUMNS = 1113 117 | RECORD_FILE_FULL = 1114 118 | UNKNOWN_CHARACTER_SET = 1115 119 | TOO_MANY_TABLES = 1116 120 | TOO_MANY_FIELDS = 1117 121 | TOO_BIG_ROWSIZE = 1118 122 | STACK_OVERRUN = 1119 123 | WRONG_OUTER_JOIN = 1120 124 | NULL_COLUMN_IN_INDEX = 1121 125 | CANT_FIND_UDF = 1122 126 | CANT_INITIALIZE_UDF = 1123 127 | UDF_NO_PATHS = 1124 128 | UDF_EXISTS = 1125 129 | CANT_OPEN_LIBRARY = 1126 130 | CANT_FIND_DL_ENTRY = 1127 131 | FUNCTION_NOT_DEFINED = 1128 132 | HOST_IS_BLOCKED = 1129 133 | HOST_NOT_PRIVILEGED = 1130 134 | PASSWORD_ANONYMOUS_USER = 1131 135 | PASSWORD_NOT_ALLOWED = 1132 136 | PASSWORD_NO_MATCH = 1133 137 | UPDATE_INFO = 1134 138 | CANT_CREATE_THREAD = 1135 139 | WRONG_VALUE_COUNT_ON_ROW = 1136 140 | CANT_REOPEN_TABLE = 1137 141 | INVALID_USE_OF_NULL = 1138 142 | REGEXP_ERROR = 1139 143 | MIX_OF_GROUP_FUNC_AND_FIELDS = 1140 144 | NONEXISTING_GRANT = 1141 145 | TABLEACCESS_DENIED_ERROR = 1142 146 | COLUMNACCESS_DENIED_ERROR = 1143 147 | ILLEGAL_GRANT_FOR_TABLE = 1144 148 | GRANT_WRONG_HOST_OR_USER = 1145 149 | NO_SUCH_TABLE = 1146 150 | NONEXISTING_TABLE_GRANT = 1147 151 | NOT_ALLOWED_COMMAND = 1148 152 | SYNTAX_ERROR = 1149 153 | DELAYED_CANT_CHANGE_LOCK = 1150 154 | TOO_MANY_DELAYED_THREADS = 1151 155 | ABORTING_CONNECTION = 1152 156 | NET_PACKET_TOO_LARGE = 1153 157 | NET_READ_ERROR_FROM_PIPE = 1154 158 | NET_FCNTL_ERROR = 1155 159 | NET_PACKETS_OUT_OF_ORDER = 1156 160 | NET_UNCOMPRESS_ERROR = 1157 161 | NET_READ_ERROR = 1158 162 | NET_READ_INTERRUPTED = 1159 163 | NET_ERROR_ON_WRITE = 1160 164 | NET_WRITE_INTERRUPTED = 1161 165 | TOO_LONG_STRING = 1162 166 | TABLE_CANT_HANDLE_BLOB = 1163 167 | TABLE_CANT_HANDLE_AUTO_INCREMENT = 1164 168 | DELAYED_INSERT_TABLE_LOCKED = 1165 169 | WRONG_COLUMN_NAME = 1166 170 | WRONG_KEY_COLUMN = 1167 171 | WRONG_MRG_TABLE = 1168 172 | DUP_UNIQUE = 1169 173 | BLOB_KEY_WITHOUT_LENGTH = 1170 174 | PRIMARY_CANT_HAVE_NULL = 1171 175 | TOO_MANY_ROWS = 1172 176 | REQUIRES_PRIMARY_KEY = 1173 177 | NO_RAID_COMPILED = 1174 178 | UPDATE_WITHOUT_KEY_IN_SAFE_MODE = 1175 179 | KEY_DOES_NOT_EXITS = 1176 180 | CHECK_NO_SUCH_TABLE = 1177 181 | CHECK_NOT_IMPLEMENTED = 1178 182 | CANT_DO_THIS_DURING_AN_TRANSACTION = 1179 183 | ERROR_DURING_COMMIT = 1180 184 | ERROR_DURING_ROLLBACK = 1181 185 | ERROR_DURING_FLUSH_LOGS = 1182 186 | ERROR_DURING_CHECKPOINT = 1183 187 | NEW_ABORTING_CONNECTION = 1184 188 | DUMP_NOT_IMPLEMENTED = 1185 189 | FLUSH_MASTER_BINLOG_CLOSED = 1186 190 | INDEX_REBUILD = 1187 191 | MASTER = 1188 192 | MASTER_NET_READ = 1189 193 | MASTER_NET_WRITE = 1190 194 | FT_MATCHING_KEY_NOT_FOUND = 1191 195 | LOCK_OR_ACTIVE_TRANSACTION = 1192 196 | UNKNOWN_SYSTEM_VARIABLE = 1193 197 | CRASHED_ON_USAGE = 1194 198 | CRASHED_ON_REPAIR = 1195 199 | WARNING_NOT_COMPLETE_ROLLBACK = 1196 200 | TRANS_CACHE_FULL = 1197 201 | SLAVE_MUST_STOP = 1198 202 | SLAVE_NOT_RUNNING = 1199 203 | BAD_SLAVE = 1200 204 | MASTER_INFO = 1201 205 | SLAVE_THREAD = 1202 206 | TOO_MANY_USER_CONNECTIONS = 1203 207 | SET_CONSTANTS_ONLY = 1204 208 | LOCK_WAIT_TIMEOUT = 1205 209 | LOCK_TABLE_FULL = 1206 210 | READ_ONLY_TRANSACTION = 1207 211 | DROP_DB_WITH_READ_LOCK = 1208 212 | CREATE_DB_WITH_READ_LOCK = 1209 213 | WRONG_ARGUMENTS = 1210 214 | NO_PERMISSION_TO_CREATE_USER = 1211 215 | UNION_TABLES_IN_DIFFERENT_DIR = 1212 216 | LOCK_DEADLOCK = 1213 217 | TABLE_CANT_HANDLE_FT = 1214 218 | CANNOT_ADD_FOREIGN = 1215 219 | NO_REFERENCED_ROW = 1216 220 | ROW_IS_REFERENCED = 1217 221 | CONNECT_TO_MASTER = 1218 222 | QUERY_ON_MASTER = 1219 223 | ERROR_WHEN_EXECUTING_COMMAND = 1220 224 | WRONG_USAGE = 1221 225 | WRONG_NUMBER_OF_COLUMNS_IN_SELECT = 1222 226 | CANT_UPDATE_WITH_READLOCK = 1223 227 | MIXING_NOT_ALLOWED = 1224 228 | DUP_ARGUMENT = 1225 229 | USER_LIMIT_REACHED = 1226 230 | SPECIFIC_ACCESS_DENIED_ERROR = 1227 231 | LOCAL_VARIABLE = 1228 232 | GLOBAL_VARIABLE = 1229 233 | NO_DEFAULT = 1230 234 | WRONG_VALUE_FOR_VAR = 1231 235 | WRONG_TYPE_FOR_VAR = 1232 236 | VAR_CANT_BE_READ = 1233 237 | CANT_USE_OPTION_HERE = 1234 238 | NOT_SUPPORTED_YET = 1235 239 | MASTER_FATAL_ERROR_READING_BINLOG = 1236 240 | SLAVE_IGNORED_TABLE = 1237 241 | INCORRECT_GLOBAL_LOCAL_VAR = 1238 242 | WRONG_FK_DEF = 1239 243 | KEY_REF_DO_NOT_MATCH_TABLE_REF = 1240 244 | OPERAND_COLUMNS = 1241 245 | SUBQUERY_NO_1_ROW = 1242 246 | UNKNOWN_STMT_HANDLER = 1243 247 | CORRUPT_HELP_DB = 1244 248 | CYCLIC_REFERENCE = 1245 249 | AUTO_CONVERT = 1246 250 | ILLEGAL_REFERENCE = 1247 251 | DERIVED_MUST_HAVE_ALIAS = 1248 252 | SELECT_REDUCED = 1249 253 | TABLENAME_NOT_ALLOWED_HERE = 1250 254 | NOT_SUPPORTED_AUTH_MODE = 1251 255 | SPATIAL_CANT_HAVE_NULL = 1252 256 | COLLATION_CHARSET_MISMATCH = 1253 257 | SLAVE_WAS_RUNNING = 1254 258 | SLAVE_WAS_NOT_RUNNING = 1255 259 | TOO_BIG_FOR_UNCOMPRESS = 1256 260 | ZLIB_Z_MEM_ERROR = 1257 261 | ZLIB_Z_BUF_ERROR = 1258 262 | ZLIB_Z_DATA_ERROR = 1259 263 | CUT_VALUE_GROUP_CONCAT = 1260 264 | WARN_TOO_FEW_RECORDS = 1261 265 | WARN_TOO_MANY_RECORDS = 1262 266 | WARN_NULL_TO_NOTNULL = 1263 267 | WARN_DATA_OUT_OF_RANGE = 1264 268 | WARN_DATA_TRUNCATED = 1265 269 | WARN_USING_OTHER_HANDLER = 1266 270 | CANT_AGGREGATE_2COLLATIONS = 1267 271 | DROP_USER = 1268 272 | REVOKE_GRANTS = 1269 273 | CANT_AGGREGATE_3COLLATIONS = 1270 274 | CANT_AGGREGATE_NCOLLATIONS = 1271 275 | VARIABLE_IS_NOT_STRUCT = 1272 276 | UNKNOWN_COLLATION = 1273 277 | SLAVE_IGNORED_SSL_PARAMS = 1274 278 | SERVER_IS_IN_SECURE_AUTH_MODE = 1275 279 | WARN_FIELD_RESOLVED = 1276 280 | BAD_SLAVE_UNTIL_COND = 1277 281 | MISSING_SKIP_SLAVE = 1278 282 | UNTIL_COND_IGNORED = 1279 283 | WRONG_NAME_FOR_INDEX = 1280 284 | WRONG_NAME_FOR_CATALOG = 1281 285 | WARN_QC_RESIZE = 1282 286 | BAD_FT_COLUMN = 1283 287 | UNKNOWN_KEY_CACHE = 1284 288 | WARN_HOSTNAME_WONT_WORK = 1285 289 | UNKNOWN_STORAGE_ENGINE = 1286 290 | WARN_DEPRECATED_SYNTAX = 1287 291 | NON_UPDATABLE_TABLE = 1288 292 | FEATURE_DISABLED = 1289 293 | OPTION_PREVENTS_STATEMENT = 1290 294 | DUPLICATED_VALUE_IN_TYPE = 1291 295 | TRUNCATED_WRONG_VALUE = 1292 296 | TOO_MUCH_AUTO_TIMESTAMP_COLS = 1293 297 | INVALID_ON_UPDATE = 1294 298 | UNSUPPORTED_PS = 1295 299 | GET_ERRMSG = 1296 300 | GET_TEMPORARY_ERRMSG = 1297 301 | UNKNOWN_TIME_ZONE = 1298 302 | WARN_INVALID_TIMESTAMP = 1299 303 | INVALID_CHARACTER_STRING = 1300 304 | WARN_ALLOWED_PACKET_OVERFLOWED = 1301 305 | CONFLICTING_DECLARATIONS = 1302 306 | SP_NO_RECURSIVE_CREATE = 1303 307 | SP_ALREADY_EXISTS = 1304 308 | SP_DOES_NOT_EXIST = 1305 309 | SP_DROP_FAILED = 1306 310 | SP_STORE_FAILED = 1307 311 | SP_LILABEL_MISMATCH = 1308 312 | SP_LABEL_REDEFINE = 1309 313 | SP_LABEL_MISMATCH = 1310 314 | SP_UNINIT_VAR = 1311 315 | SP_BADSELECT = 1312 316 | SP_BADRETURN = 1313 317 | SP_BADSTATEMENT = 1314 318 | UPDATE_LOG_DEPRECATED_IGNORED = 1315 319 | UPDATE_LOG_DEPRECATED_TRANSLATED = 1316 320 | QUERY_INTERRUPTED = 1317 321 | SP_WRONG_NO_OF_ARGS = 1318 322 | SP_COND_MISMATCH = 1319 323 | SP_NORETURN = 1320 324 | SP_NORETURNEND = 1321 325 | SP_BAD_CURSOR_QUERY = 1322 326 | SP_BAD_CURSOR_SELECT = 1323 327 | SP_CURSOR_MISMATCH = 1324 328 | SP_CURSOR_ALREADY_OPEN = 1325 329 | SP_CURSOR_NOT_OPEN = 1326 330 | SP_UNDECLARED_VAR = 1327 331 | SP_WRONG_NO_OF_FETCH_ARGS = 1328 332 | SP_FETCH_NO_DATA = 1329 333 | SP_DUP_PARAM = 1330 334 | SP_DUP_VAR = 1331 335 | SP_DUP_COND = 1332 336 | SP_DUP_CURS = 1333 337 | SP_CANT_ALTER = 1334 338 | SP_SUBSELECT_NYI = 1335 339 | STMT_NOT_ALLOWED_IN_SF_OR_TRG = 1336 340 | SP_VARCOND_AFTER_CURSHNDLR = 1337 341 | SP_CURSOR_AFTER_HANDLER = 1338 342 | SP_CASE_NOT_FOUND = 1339 343 | FPARSER_TOO_BIG_FILE = 1340 344 | FPARSER_BAD_HEADER = 1341 345 | FPARSER_EOF_IN_COMMENT = 1342 346 | FPARSER_ERROR_IN_PARAMETER = 1343 347 | FPARSER_EOF_IN_UNKNOWN_PARAMETER = 1344 348 | VIEW_NO_EXPLAIN = 1345 349 | FRM_UNKNOWN_TYPE = 1346 350 | WRONG_OBJECT = 1347 351 | NONUPDATEABLE_COLUMN = 1348 352 | VIEW_SELECT_DERIVED = 1349 353 | VIEW_SELECT_CLAUSE = 1350 354 | VIEW_SELECT_VARIABLE = 1351 355 | VIEW_SELECT_TMPTABLE = 1352 356 | VIEW_WRONG_LIST = 1353 357 | WARN_VIEW_MERGE = 1354 358 | WARN_VIEW_WITHOUT_KEY = 1355 359 | VIEW_INVALID = 1356 360 | SP_NO_DROP_SP = 1357 361 | SP_GOTO_IN_HNDLR = 1358 362 | TRG_ALREADY_EXISTS = 1359 363 | TRG_DOES_NOT_EXIST = 1360 364 | TRG_ON_VIEW_OR_TEMP_TABLE = 1361 365 | TRG_CANT_CHANGE_ROW = 1362 366 | TRG_NO_SUCH_ROW_IN_TRG = 1363 367 | NO_DEFAULT_FOR_FIELD = 1364 368 | DIVISION_BY_ZERO = 1365 369 | TRUNCATED_WRONG_VALUE_FOR_FIELD = 1366 370 | ILLEGAL_VALUE_FOR_TYPE = 1367 371 | VIEW_NONUPD_CHECK = 1368 372 | VIEW_CHECK_FAILED = 1369 373 | PROCACCESS_DENIED_ERROR = 1370 374 | RELAY_LOG_FAIL = 1371 375 | PASSWD_LENGTH = 1372 376 | UNKNOWN_TARGET_BINLOG = 1373 377 | IO_ERR_LOG_INDEX_READ = 1374 378 | BINLOG_PURGE_PROHIBITED = 1375 379 | FSEEK_FAIL = 1376 380 | BINLOG_PURGE_FATAL_ERR = 1377 381 | LOG_IN_USE = 1378 382 | LOG_PURGE_UNKNOWN_ERR = 1379 383 | RELAY_LOG_INIT = 1380 384 | NO_BINARY_LOGGING = 1381 385 | RESERVED_SYNTAX = 1382 386 | WSAS_FAILED = 1383 387 | DIFF_GROUPS_PROC = 1384 388 | NO_GROUP_FOR_PROC = 1385 389 | ORDER_WITH_PROC = 1386 390 | LOGGING_PROHIBIT_CHANGING_OF = 1387 391 | NO_FILE_MAPPING = 1388 392 | WRONG_MAGIC = 1389 393 | PS_MANY_PARAM = 1390 394 | KEY_PART_0 = 1391 395 | VIEW_CHECKSUM = 1392 396 | VIEW_MULTIUPDATE = 1393 397 | VIEW_NO_INSERT_FIELD_LIST = 1394 398 | VIEW_DELETE_MERGE_VIEW = 1395 399 | CANNOT_USER = 1396 400 | XAER_NOTA = 1397 401 | XAER_INVAL = 1398 402 | XAER_RMFAIL = 1399 403 | XAER_OUTSIDE = 1400 404 | XAER_RMERR = 1401 405 | XA_RBROLLBACK = 1402 406 | NONEXISTING_PROC_GRANT = 1403 407 | PROC_AUTO_GRANT_FAIL = 1404 408 | PROC_AUTO_REVOKE_FAIL = 1405 409 | DATA_TOO_LONG = 1406 410 | SP_BAD_SQLSTATE = 1407 411 | STARTUP = 1408 412 | LOAD_FROM_FIXED_SIZE_ROWS_TO_VAR = 1409 413 | CANT_CREATE_USER_WITH_GRANT = 1410 414 | WRONG_VALUE_FOR_TYPE = 1411 415 | TABLE_DEF_CHANGED = 1412 416 | SP_DUP_HANDLER = 1413 417 | SP_NOT_VAR_ARG = 1414 418 | SP_NO_RETSET = 1415 419 | CANT_CREATE_GEOMETRY_OBJECT = 1416 420 | FAILED_ROUTINE_BREAK_BINLOG = 1417 421 | BINLOG_UNSAFE_ROUTINE = 1418 422 | BINLOG_CREATE_ROUTINE_NEED_SUPER = 1419 423 | EXEC_STMT_WITH_OPEN_CURSOR = 1420 424 | STMT_HAS_NO_OPEN_CURSOR = 1421 425 | COMMIT_NOT_ALLOWED_IN_SF_OR_TRG = 1422 426 | NO_DEFAULT_FOR_VIEW_FIELD = 1423 427 | SP_NO_RECURSION = 1424 428 | TOO_BIG_SCALE = 1425 429 | TOO_BIG_PRECISION = 1426 430 | M_BIGGER_THAN_D = 1427 431 | WRONG_LOCK_OF_SYSTEM_TABLE = 1428 432 | CONNECT_TO_FOREIGN_DATA_SOURCE = 1429 433 | QUERY_ON_FOREIGN_DATA_SOURCE = 1430 434 | FOREIGN_DATA_SOURCE_DOESNT_EXIST = 1431 435 | FOREIGN_DATA_STRING_INVALID_CANT_CREATE = 1432 436 | FOREIGN_DATA_STRING_INVALID = 1433 437 | CANT_CREATE_FEDERATED_TABLE = 1434 438 | TRG_IN_WRONG_SCHEMA = 1435 439 | STACK_OVERRUN_NEED_MORE = 1436 440 | TOO_LONG_BODY = 1437 441 | WARN_CANT_DROP_DEFAULT_KEYCACHE = 1438 442 | TOO_BIG_DISPLAYWIDTH = 1439 443 | XAER_DUPID = 1440 444 | DATETIME_FUNCTION_OVERFLOW = 1441 445 | CANT_UPDATE_USED_TABLE_IN_SF_OR_TRG = 1442 446 | VIEW_PREVENT_UPDATE = 1443 447 | PS_NO_RECURSION = 1444 448 | SP_CANT_SET_AUTOCOMMIT = 1445 449 | MALFORMED_DEFINER = 1446 450 | VIEW_FRM_NO_USER = 1447 451 | VIEW_OTHER_USER = 1448 452 | NO_SUCH_USER = 1449 453 | FORBID_SCHEMA_CHANGE = 1450 454 | ROW_IS_REFERENCED_2 = 1451 455 | NO_REFERENCED_ROW_2 = 1452 456 | SP_BAD_VAR_SHADOW = 1453 457 | TRG_NO_DEFINER = 1454 458 | OLD_FILE_FORMAT = 1455 459 | SP_RECURSION_LIMIT = 1456 460 | SP_PROC_TABLE_CORRUPT = 1457 461 | SP_WRONG_NAME = 1458 462 | TABLE_NEEDS_UPGRADE = 1459 463 | SP_NO_AGGREGATE = 1460 464 | MAX_PREPARED_STMT_COUNT_REACHED = 1461 465 | VIEW_RECURSIVE = 1462 466 | NON_GROUPING_FIELD_USED = 1463 467 | TABLE_CANT_HANDLE_SPKEYS = 1464 468 | NO_TRIGGERS_ON_SYSTEM_SCHEMA = 1465 469 | USERNAME = 1466 470 | HOSTNAME = 1467 471 | WRONG_STRING_LENGTH = 1468 472 | ERROR_LAST = 1468 473 | -------------------------------------------------------------------------------- /lambda/pymysql/charset.py: -------------------------------------------------------------------------------- 1 | MBLENGTH = { 2 | 8:1, 3 | 33:3, 4 | 88:2, 5 | 91:2 6 | } 7 | 8 | 9 | class Charset(object): 10 | def __init__(self, id, name, collation, is_default): 11 | self.id, self.name, self.collation = id, name, collation 12 | self.is_default = is_default == 'Yes' 13 | 14 | def __repr__(self): 15 | return "Charset(id=%s, name=%r, collation=%r)" % ( 16 | self.id, self.name, self.collation) 17 | 18 | @property 19 | def encoding(self): 20 | name = self.name 21 | if name == 'utf8mb4': 22 | return 'utf8' 23 | return name 24 | 25 | @property 26 | def is_binary(self): 27 | return self.id == 63 28 | 29 | 30 | class Charsets: 31 | def __init__(self): 32 | self._by_id = {} 33 | 34 | def add(self, c): 35 | self._by_id[c.id] = c 36 | 37 | def by_id(self, id): 38 | return self._by_id[id] 39 | 40 | def by_name(self, name): 41 | name = name.lower() 42 | for c in self._by_id.values(): 43 | if c.name == name and c.is_default: 44 | return c 45 | 46 | _charsets = Charsets() 47 | """ 48 | Generated with: 49 | 50 | mysql -N -s -e "select id, character_set_name, collation_name, is_default 51 | from information_schema.collations order by id;" | python -c "import sys 52 | for l in sys.stdin.readlines(): 53 | id, name, collation, is_default = l.split(chr(9)) 54 | print '_charsets.add(Charset(%s, \'%s\', \'%s\', \'%s\'))' \ 55 | % (id, name, collation, is_default.strip()) 56 | " 57 | 58 | """ 59 | _charsets.add(Charset(1, 'big5', 'big5_chinese_ci', 'Yes')) 60 | _charsets.add(Charset(2, 'latin2', 'latin2_czech_cs', '')) 61 | _charsets.add(Charset(3, 'dec8', 'dec8_swedish_ci', 'Yes')) 62 | _charsets.add(Charset(4, 'cp850', 'cp850_general_ci', 'Yes')) 63 | _charsets.add(Charset(5, 'latin1', 'latin1_german1_ci', '')) 64 | _charsets.add(Charset(6, 'hp8', 'hp8_english_ci', 'Yes')) 65 | _charsets.add(Charset(7, 'koi8r', 'koi8r_general_ci', 'Yes')) 66 | _charsets.add(Charset(8, 'latin1', 'latin1_swedish_ci', 'Yes')) 67 | _charsets.add(Charset(9, 'latin2', 'latin2_general_ci', 'Yes')) 68 | _charsets.add(Charset(10, 'swe7', 'swe7_swedish_ci', 'Yes')) 69 | _charsets.add(Charset(11, 'ascii', 'ascii_general_ci', 'Yes')) 70 | _charsets.add(Charset(12, 'ujis', 'ujis_japanese_ci', 'Yes')) 71 | _charsets.add(Charset(13, 'sjis', 'sjis_japanese_ci', 'Yes')) 72 | _charsets.add(Charset(14, 'cp1251', 'cp1251_bulgarian_ci', '')) 73 | _charsets.add(Charset(15, 'latin1', 'latin1_danish_ci', '')) 74 | _charsets.add(Charset(16, 'hebrew', 'hebrew_general_ci', 'Yes')) 75 | _charsets.add(Charset(18, 'tis620', 'tis620_thai_ci', 'Yes')) 76 | _charsets.add(Charset(19, 'euckr', 'euckr_korean_ci', 'Yes')) 77 | _charsets.add(Charset(20, 'latin7', 'latin7_estonian_cs', '')) 78 | _charsets.add(Charset(21, 'latin2', 'latin2_hungarian_ci', '')) 79 | _charsets.add(Charset(22, 'koi8u', 'koi8u_general_ci', 'Yes')) 80 | _charsets.add(Charset(23, 'cp1251', 'cp1251_ukrainian_ci', '')) 81 | _charsets.add(Charset(24, 'gb2312', 'gb2312_chinese_ci', 'Yes')) 82 | _charsets.add(Charset(25, 'greek', 'greek_general_ci', 'Yes')) 83 | _charsets.add(Charset(26, 'cp1250', 'cp1250_general_ci', 'Yes')) 84 | _charsets.add(Charset(27, 'latin2', 'latin2_croatian_ci', '')) 85 | _charsets.add(Charset(28, 'gbk', 'gbk_chinese_ci', 'Yes')) 86 | _charsets.add(Charset(29, 'cp1257', 'cp1257_lithuanian_ci', '')) 87 | _charsets.add(Charset(30, 'latin5', 'latin5_turkish_ci', 'Yes')) 88 | _charsets.add(Charset(31, 'latin1', 'latin1_german2_ci', '')) 89 | _charsets.add(Charset(32, 'armscii8', 'armscii8_general_ci', 'Yes')) 90 | _charsets.add(Charset(33, 'utf8', 'utf8_general_ci', 'Yes')) 91 | _charsets.add(Charset(34, 'cp1250', 'cp1250_czech_cs', '')) 92 | _charsets.add(Charset(35, 'ucs2', 'ucs2_general_ci', 'Yes')) 93 | _charsets.add(Charset(36, 'cp866', 'cp866_general_ci', 'Yes')) 94 | _charsets.add(Charset(37, 'keybcs2', 'keybcs2_general_ci', 'Yes')) 95 | _charsets.add(Charset(38, 'macce', 'macce_general_ci', 'Yes')) 96 | _charsets.add(Charset(39, 'macroman', 'macroman_general_ci', 'Yes')) 97 | _charsets.add(Charset(40, 'cp852', 'cp852_general_ci', 'Yes')) 98 | _charsets.add(Charset(41, 'latin7', 'latin7_general_ci', 'Yes')) 99 | _charsets.add(Charset(42, 'latin7', 'latin7_general_cs', '')) 100 | _charsets.add(Charset(43, 'macce', 'macce_bin', '')) 101 | _charsets.add(Charset(44, 'cp1250', 'cp1250_croatian_ci', '')) 102 | _charsets.add(Charset(45, 'utf8mb4', 'utf8mb4_general_ci', 'Yes')) 103 | _charsets.add(Charset(46, 'utf8mb4', 'utf8mb4_bin', '')) 104 | _charsets.add(Charset(47, 'latin1', 'latin1_bin', '')) 105 | _charsets.add(Charset(48, 'latin1', 'latin1_general_ci', '')) 106 | _charsets.add(Charset(49, 'latin1', 'latin1_general_cs', '')) 107 | _charsets.add(Charset(50, 'cp1251', 'cp1251_bin', '')) 108 | _charsets.add(Charset(51, 'cp1251', 'cp1251_general_ci', 'Yes')) 109 | _charsets.add(Charset(52, 'cp1251', 'cp1251_general_cs', '')) 110 | _charsets.add(Charset(53, 'macroman', 'macroman_bin', '')) 111 | _charsets.add(Charset(54, 'utf16', 'utf16_general_ci', 'Yes')) 112 | _charsets.add(Charset(55, 'utf16', 'utf16_bin', '')) 113 | _charsets.add(Charset(57, 'cp1256', 'cp1256_general_ci', 'Yes')) 114 | _charsets.add(Charset(58, 'cp1257', 'cp1257_bin', '')) 115 | _charsets.add(Charset(59, 'cp1257', 'cp1257_general_ci', 'Yes')) 116 | _charsets.add(Charset(60, 'utf32', 'utf32_general_ci', 'Yes')) 117 | _charsets.add(Charset(61, 'utf32', 'utf32_bin', '')) 118 | _charsets.add(Charset(63, 'binary', 'binary', 'Yes')) 119 | _charsets.add(Charset(64, 'armscii8', 'armscii8_bin', '')) 120 | _charsets.add(Charset(65, 'ascii', 'ascii_bin', '')) 121 | _charsets.add(Charset(66, 'cp1250', 'cp1250_bin', '')) 122 | _charsets.add(Charset(67, 'cp1256', 'cp1256_bin', '')) 123 | _charsets.add(Charset(68, 'cp866', 'cp866_bin', '')) 124 | _charsets.add(Charset(69, 'dec8', 'dec8_bin', '')) 125 | _charsets.add(Charset(70, 'greek', 'greek_bin', '')) 126 | _charsets.add(Charset(71, 'hebrew', 'hebrew_bin', '')) 127 | _charsets.add(Charset(72, 'hp8', 'hp8_bin', '')) 128 | _charsets.add(Charset(73, 'keybcs2', 'keybcs2_bin', '')) 129 | _charsets.add(Charset(74, 'koi8r', 'koi8r_bin', '')) 130 | _charsets.add(Charset(75, 'koi8u', 'koi8u_bin', '')) 131 | _charsets.add(Charset(77, 'latin2', 'latin2_bin', '')) 132 | _charsets.add(Charset(78, 'latin5', 'latin5_bin', '')) 133 | _charsets.add(Charset(79, 'latin7', 'latin7_bin', '')) 134 | _charsets.add(Charset(80, 'cp850', 'cp850_bin', '')) 135 | _charsets.add(Charset(81, 'cp852', 'cp852_bin', '')) 136 | _charsets.add(Charset(82, 'swe7', 'swe7_bin', '')) 137 | _charsets.add(Charset(83, 'utf8', 'utf8_bin', '')) 138 | _charsets.add(Charset(84, 'big5', 'big5_bin', '')) 139 | _charsets.add(Charset(85, 'euckr', 'euckr_bin', '')) 140 | _charsets.add(Charset(86, 'gb2312', 'gb2312_bin', '')) 141 | _charsets.add(Charset(87, 'gbk', 'gbk_bin', '')) 142 | _charsets.add(Charset(88, 'sjis', 'sjis_bin', '')) 143 | _charsets.add(Charset(89, 'tis620', 'tis620_bin', '')) 144 | _charsets.add(Charset(90, 'ucs2', 'ucs2_bin', '')) 145 | _charsets.add(Charset(91, 'ujis', 'ujis_bin', '')) 146 | _charsets.add(Charset(92, 'geostd8', 'geostd8_general_ci', 'Yes')) 147 | _charsets.add(Charset(93, 'geostd8', 'geostd8_bin', '')) 148 | _charsets.add(Charset(94, 'latin1', 'latin1_spanish_ci', '')) 149 | _charsets.add(Charset(95, 'cp932', 'cp932_japanese_ci', 'Yes')) 150 | _charsets.add(Charset(96, 'cp932', 'cp932_bin', '')) 151 | _charsets.add(Charset(97, 'eucjpms', 'eucjpms_japanese_ci', 'Yes')) 152 | _charsets.add(Charset(98, 'eucjpms', 'eucjpms_bin', '')) 153 | _charsets.add(Charset(99, 'cp1250', 'cp1250_polish_ci', '')) 154 | _charsets.add(Charset(101, 'utf16', 'utf16_unicode_ci', '')) 155 | _charsets.add(Charset(102, 'utf16', 'utf16_icelandic_ci', '')) 156 | _charsets.add(Charset(103, 'utf16', 'utf16_latvian_ci', '')) 157 | _charsets.add(Charset(104, 'utf16', 'utf16_romanian_ci', '')) 158 | _charsets.add(Charset(105, 'utf16', 'utf16_slovenian_ci', '')) 159 | _charsets.add(Charset(106, 'utf16', 'utf16_polish_ci', '')) 160 | _charsets.add(Charset(107, 'utf16', 'utf16_estonian_ci', '')) 161 | _charsets.add(Charset(108, 'utf16', 'utf16_spanish_ci', '')) 162 | _charsets.add(Charset(109, 'utf16', 'utf16_swedish_ci', '')) 163 | _charsets.add(Charset(110, 'utf16', 'utf16_turkish_ci', '')) 164 | _charsets.add(Charset(111, 'utf16', 'utf16_czech_ci', '')) 165 | _charsets.add(Charset(112, 'utf16', 'utf16_danish_ci', '')) 166 | _charsets.add(Charset(113, 'utf16', 'utf16_lithuanian_ci', '')) 167 | _charsets.add(Charset(114, 'utf16', 'utf16_slovak_ci', '')) 168 | _charsets.add(Charset(115, 'utf16', 'utf16_spanish2_ci', '')) 169 | _charsets.add(Charset(116, 'utf16', 'utf16_roman_ci', '')) 170 | _charsets.add(Charset(117, 'utf16', 'utf16_persian_ci', '')) 171 | _charsets.add(Charset(118, 'utf16', 'utf16_esperanto_ci', '')) 172 | _charsets.add(Charset(119, 'utf16', 'utf16_hungarian_ci', '')) 173 | _charsets.add(Charset(120, 'utf16', 'utf16_sinhala_ci', '')) 174 | _charsets.add(Charset(128, 'ucs2', 'ucs2_unicode_ci', '')) 175 | _charsets.add(Charset(129, 'ucs2', 'ucs2_icelandic_ci', '')) 176 | _charsets.add(Charset(130, 'ucs2', 'ucs2_latvian_ci', '')) 177 | _charsets.add(Charset(131, 'ucs2', 'ucs2_romanian_ci', '')) 178 | _charsets.add(Charset(132, 'ucs2', 'ucs2_slovenian_ci', '')) 179 | _charsets.add(Charset(133, 'ucs2', 'ucs2_polish_ci', '')) 180 | _charsets.add(Charset(134, 'ucs2', 'ucs2_estonian_ci', '')) 181 | _charsets.add(Charset(135, 'ucs2', 'ucs2_spanish_ci', '')) 182 | _charsets.add(Charset(136, 'ucs2', 'ucs2_swedish_ci', '')) 183 | _charsets.add(Charset(137, 'ucs2', 'ucs2_turkish_ci', '')) 184 | _charsets.add(Charset(138, 'ucs2', 'ucs2_czech_ci', '')) 185 | _charsets.add(Charset(139, 'ucs2', 'ucs2_danish_ci', '')) 186 | _charsets.add(Charset(140, 'ucs2', 'ucs2_lithuanian_ci', '')) 187 | _charsets.add(Charset(141, 'ucs2', 'ucs2_slovak_ci', '')) 188 | _charsets.add(Charset(142, 'ucs2', 'ucs2_spanish2_ci', '')) 189 | _charsets.add(Charset(143, 'ucs2', 'ucs2_roman_ci', '')) 190 | _charsets.add(Charset(144, 'ucs2', 'ucs2_persian_ci', '')) 191 | _charsets.add(Charset(145, 'ucs2', 'ucs2_esperanto_ci', '')) 192 | _charsets.add(Charset(146, 'ucs2', 'ucs2_hungarian_ci', '')) 193 | _charsets.add(Charset(147, 'ucs2', 'ucs2_sinhala_ci', '')) 194 | _charsets.add(Charset(159, 'ucs2', 'ucs2_general_mysql500_ci', '')) 195 | _charsets.add(Charset(160, 'utf32', 'utf32_unicode_ci', '')) 196 | _charsets.add(Charset(161, 'utf32', 'utf32_icelandic_ci', '')) 197 | _charsets.add(Charset(162, 'utf32', 'utf32_latvian_ci', '')) 198 | _charsets.add(Charset(163, 'utf32', 'utf32_romanian_ci', '')) 199 | _charsets.add(Charset(164, 'utf32', 'utf32_slovenian_ci', '')) 200 | _charsets.add(Charset(165, 'utf32', 'utf32_polish_ci', '')) 201 | _charsets.add(Charset(166, 'utf32', 'utf32_estonian_ci', '')) 202 | _charsets.add(Charset(167, 'utf32', 'utf32_spanish_ci', '')) 203 | _charsets.add(Charset(168, 'utf32', 'utf32_swedish_ci', '')) 204 | _charsets.add(Charset(169, 'utf32', 'utf32_turkish_ci', '')) 205 | _charsets.add(Charset(170, 'utf32', 'utf32_czech_ci', '')) 206 | _charsets.add(Charset(171, 'utf32', 'utf32_danish_ci', '')) 207 | _charsets.add(Charset(172, 'utf32', 'utf32_lithuanian_ci', '')) 208 | _charsets.add(Charset(173, 'utf32', 'utf32_slovak_ci', '')) 209 | _charsets.add(Charset(174, 'utf32', 'utf32_spanish2_ci', '')) 210 | _charsets.add(Charset(175, 'utf32', 'utf32_roman_ci', '')) 211 | _charsets.add(Charset(176, 'utf32', 'utf32_persian_ci', '')) 212 | _charsets.add(Charset(177, 'utf32', 'utf32_esperanto_ci', '')) 213 | _charsets.add(Charset(178, 'utf32', 'utf32_hungarian_ci', '')) 214 | _charsets.add(Charset(179, 'utf32', 'utf32_sinhala_ci', '')) 215 | _charsets.add(Charset(192, 'utf8', 'utf8_unicode_ci', '')) 216 | _charsets.add(Charset(193, 'utf8', 'utf8_icelandic_ci', '')) 217 | _charsets.add(Charset(194, 'utf8', 'utf8_latvian_ci', '')) 218 | _charsets.add(Charset(195, 'utf8', 'utf8_romanian_ci', '')) 219 | _charsets.add(Charset(196, 'utf8', 'utf8_slovenian_ci', '')) 220 | _charsets.add(Charset(197, 'utf8', 'utf8_polish_ci', '')) 221 | _charsets.add(Charset(198, 'utf8', 'utf8_estonian_ci', '')) 222 | _charsets.add(Charset(199, 'utf8', 'utf8_spanish_ci', '')) 223 | _charsets.add(Charset(200, 'utf8', 'utf8_swedish_ci', '')) 224 | _charsets.add(Charset(201, 'utf8', 'utf8_turkish_ci', '')) 225 | _charsets.add(Charset(202, 'utf8', 'utf8_czech_ci', '')) 226 | _charsets.add(Charset(203, 'utf8', 'utf8_danish_ci', '')) 227 | _charsets.add(Charset(204, 'utf8', 'utf8_lithuanian_ci', '')) 228 | _charsets.add(Charset(205, 'utf8', 'utf8_slovak_ci', '')) 229 | _charsets.add(Charset(206, 'utf8', 'utf8_spanish2_ci', '')) 230 | _charsets.add(Charset(207, 'utf8', 'utf8_roman_ci', '')) 231 | _charsets.add(Charset(208, 'utf8', 'utf8_persian_ci', '')) 232 | _charsets.add(Charset(209, 'utf8', 'utf8_esperanto_ci', '')) 233 | _charsets.add(Charset(210, 'utf8', 'utf8_hungarian_ci', '')) 234 | _charsets.add(Charset(211, 'utf8', 'utf8_sinhala_ci', '')) 235 | _charsets.add(Charset(223, 'utf8', 'utf8_general_mysql500_ci', '')) 236 | _charsets.add(Charset(224, 'utf8mb4', 'utf8mb4_unicode_ci', '')) 237 | _charsets.add(Charset(225, 'utf8mb4', 'utf8mb4_icelandic_ci', '')) 238 | _charsets.add(Charset(226, 'utf8mb4', 'utf8mb4_latvian_ci', '')) 239 | _charsets.add(Charset(227, 'utf8mb4', 'utf8mb4_romanian_ci', '')) 240 | _charsets.add(Charset(228, 'utf8mb4', 'utf8mb4_slovenian_ci', '')) 241 | _charsets.add(Charset(229, 'utf8mb4', 'utf8mb4_polish_ci', '')) 242 | _charsets.add(Charset(230, 'utf8mb4', 'utf8mb4_estonian_ci', '')) 243 | _charsets.add(Charset(231, 'utf8mb4', 'utf8mb4_spanish_ci', '')) 244 | _charsets.add(Charset(232, 'utf8mb4', 'utf8mb4_swedish_ci', '')) 245 | _charsets.add(Charset(233, 'utf8mb4', 'utf8mb4_turkish_ci', '')) 246 | _charsets.add(Charset(234, 'utf8mb4', 'utf8mb4_czech_ci', '')) 247 | _charsets.add(Charset(235, 'utf8mb4', 'utf8mb4_danish_ci', '')) 248 | _charsets.add(Charset(236, 'utf8mb4', 'utf8mb4_lithuanian_ci', '')) 249 | _charsets.add(Charset(237, 'utf8mb4', 'utf8mb4_slovak_ci', '')) 250 | _charsets.add(Charset(238, 'utf8mb4', 'utf8mb4_spanish2_ci', '')) 251 | _charsets.add(Charset(239, 'utf8mb4', 'utf8mb4_roman_ci', '')) 252 | _charsets.add(Charset(240, 'utf8mb4', 'utf8mb4_persian_ci', '')) 253 | _charsets.add(Charset(241, 'utf8mb4', 'utf8mb4_esperanto_ci', '')) 254 | _charsets.add(Charset(242, 'utf8mb4', 'utf8mb4_hungarian_ci', '')) 255 | _charsets.add(Charset(243, 'utf8mb4', 'utf8mb4_sinhala_ci', '')) 256 | _charsets.add(Charset(244, 'utf8mb4', 'utf8mb4_german2_ci', '')) 257 | _charsets.add(Charset(245, 'utf8mb4', 'utf8mb4_croatian_ci', '')) 258 | _charsets.add(Charset(246, 'utf8mb4', 'utf8mb4_unicode_520_ci', '')) 259 | _charsets.add(Charset(247, 'utf8mb4', 'utf8mb4_vietnamese_ci', '')) 260 | 261 | 262 | charset_by_name = _charsets.by_name 263 | charset_by_id = _charsets.by_id 264 | 265 | 266 | def charset_to_encoding(name): 267 | """Convert MySQL's charset name to Python's codec name""" 268 | if name == 'utf8mb4': 269 | return 'utf8' 270 | return name 271 | -------------------------------------------------------------------------------- /lambda/pymysql/tests/test_basic.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | import datetime 3 | import json 4 | import time 5 | import warnings 6 | 7 | from unittest2 import SkipTest 8 | 9 | from pymysql import util 10 | import pymysql.cursors 11 | from pymysql.tests import base 12 | from pymysql.err import ProgrammingError 13 | 14 | 15 | __all__ = ["TestConversion", "TestCursor", "TestBulkInserts"] 16 | 17 | 18 | class TestConversion(base.PyMySQLTestCase): 19 | def test_datatypes(self): 20 | """ test every data type """ 21 | conn = self.connections[0] 22 | c = conn.cursor() 23 | c.execute("create table test_datatypes (b bit, i int, l bigint, f real, s varchar(32), u varchar(32), bb blob, d date, dt datetime, ts timestamp, td time, t time, st datetime)") 24 | try: 25 | # insert values 26 | 27 | v = (True, -3, 123456789012, 5.7, "hello'\" world", u"Espa\xc3\xb1ol", "binary\x00data".encode(conn.charset), datetime.date(1988,2,2), datetime.datetime(2014, 5, 15, 7, 45, 57), datetime.timedelta(5,6), datetime.time(16,32), time.localtime()) 28 | c.execute("insert into test_datatypes (b,i,l,f,s,u,bb,d,dt,td,t,st) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)", v) 29 | c.execute("select b,i,l,f,s,u,bb,d,dt,td,t,st from test_datatypes") 30 | r = c.fetchone() 31 | self.assertEqual(util.int2byte(1), r[0]) 32 | self.assertEqual(v[1:10], r[1:10]) 33 | self.assertEqual(datetime.timedelta(0, 60 * (v[10].hour * 60 + v[10].minute)), r[10]) 34 | self.assertEqual(datetime.datetime(*v[-1][:6]), r[-1]) 35 | 36 | c.execute("delete from test_datatypes") 37 | 38 | # check nulls 39 | c.execute("insert into test_datatypes (b,i,l,f,s,u,bb,d,dt,td,t,st) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)", [None] * 12) 40 | c.execute("select b,i,l,f,s,u,bb,d,dt,td,t,st from test_datatypes") 41 | r = c.fetchone() 42 | self.assertEqual(tuple([None] * 12), r) 43 | 44 | c.execute("delete from test_datatypes") 45 | 46 | # check sequences type 47 | for seq_type in (tuple, list, set, frozenset): 48 | c.execute("insert into test_datatypes (i, l) values (2,4), (6,8), (10,12)") 49 | seq = seq_type([2,6]) 50 | c.execute("select l from test_datatypes where i in %s order by i", (seq,)) 51 | r = c.fetchall() 52 | self.assertEqual(((4,),(8,)), r) 53 | c.execute("delete from test_datatypes") 54 | 55 | finally: 56 | c.execute("drop table test_datatypes") 57 | 58 | def test_dict(self): 59 | """ test dict escaping """ 60 | conn = self.connections[0] 61 | c = conn.cursor() 62 | c.execute("create table test_dict (a integer, b integer, c integer)") 63 | try: 64 | c.execute("insert into test_dict (a,b,c) values (%(a)s, %(b)s, %(c)s)", {"a":1,"b":2,"c":3}) 65 | c.execute("select a,b,c from test_dict") 66 | self.assertEqual((1,2,3), c.fetchone()) 67 | finally: 68 | c.execute("drop table test_dict") 69 | 70 | def test_string(self): 71 | conn = self.connections[0] 72 | c = conn.cursor() 73 | c.execute("create table test_dict (a text)") 74 | test_value = "I am a test string" 75 | try: 76 | c.execute("insert into test_dict (a) values (%s)", test_value) 77 | c.execute("select a from test_dict") 78 | self.assertEqual((test_value,), c.fetchone()) 79 | finally: 80 | c.execute("drop table test_dict") 81 | 82 | def test_integer(self): 83 | conn = self.connections[0] 84 | c = conn.cursor() 85 | c.execute("create table test_dict (a integer)") 86 | test_value = 12345 87 | try: 88 | c.execute("insert into test_dict (a) values (%s)", test_value) 89 | c.execute("select a from test_dict") 90 | self.assertEqual((test_value,), c.fetchone()) 91 | finally: 92 | c.execute("drop table test_dict") 93 | 94 | def test_blob(self): 95 | """test binary data""" 96 | data = bytes(bytearray(range(256)) * 4) 97 | conn = self.connections[0] 98 | self.safe_create_table( 99 | conn, "test_blob", "create table test_blob (b blob)") 100 | 101 | with conn.cursor() as c: 102 | c.execute("insert into test_blob (b) values (%s)", (data,)) 103 | c.execute("select b from test_blob") 104 | self.assertEqual(data, c.fetchone()[0]) 105 | 106 | def test_untyped(self): 107 | """ test conversion of null, empty string """ 108 | conn = self.connections[0] 109 | c = conn.cursor() 110 | c.execute("select null,''") 111 | self.assertEqual((None,u''), c.fetchone()) 112 | c.execute("select '',null") 113 | self.assertEqual((u'',None), c.fetchone()) 114 | 115 | def test_timedelta(self): 116 | """ test timedelta conversion """ 117 | conn = self.connections[0] 118 | c = conn.cursor() 119 | c.execute("select time('12:30'), time('23:12:59'), time('23:12:59.05100'), time('-12:30'), time('-23:12:59'), time('-23:12:59.05100'), time('-00:30')") 120 | self.assertEqual((datetime.timedelta(0, 45000), 121 | datetime.timedelta(0, 83579), 122 | datetime.timedelta(0, 83579, 51000), 123 | -datetime.timedelta(0, 45000), 124 | -datetime.timedelta(0, 83579), 125 | -datetime.timedelta(0, 83579, 51000), 126 | -datetime.timedelta(0, 1800)), 127 | c.fetchone()) 128 | 129 | def test_datetime_microseconds(self): 130 | """ test datetime conversion w microseconds""" 131 | 132 | conn = self.connections[0] 133 | if not self.mysql_server_is(conn, (5, 6, 4)): 134 | raise SkipTest("target backend does not support microseconds") 135 | c = conn.cursor() 136 | dt = datetime.datetime(2013, 11, 12, 9, 9, 9, 123450) 137 | c.execute("create table test_datetime (id int, ts datetime(6))") 138 | try: 139 | c.execute( 140 | "insert into test_datetime values (%s, %s)", 141 | (1, dt) 142 | ) 143 | c.execute("select ts from test_datetime") 144 | self.assertEqual((dt,), c.fetchone()) 145 | finally: 146 | c.execute("drop table test_datetime") 147 | 148 | 149 | class TestCursor(base.PyMySQLTestCase): 150 | # this test case does not work quite right yet, however, 151 | # we substitute in None for the erroneous field which is 152 | # compatible with the DB-API 2.0 spec and has not broken 153 | # any unit tests for anything we've tried. 154 | 155 | #def test_description(self): 156 | # """ test description attribute """ 157 | # # result is from MySQLdb module 158 | # r = (('Host', 254, 11, 60, 60, 0, 0), 159 | # ('User', 254, 16, 16, 16, 0, 0), 160 | # ('Password', 254, 41, 41, 41, 0, 0), 161 | # ('Select_priv', 254, 1, 1, 1, 0, 0), 162 | # ('Insert_priv', 254, 1, 1, 1, 0, 0), 163 | # ('Update_priv', 254, 1, 1, 1, 0, 0), 164 | # ('Delete_priv', 254, 1, 1, 1, 0, 0), 165 | # ('Create_priv', 254, 1, 1, 1, 0, 0), 166 | # ('Drop_priv', 254, 1, 1, 1, 0, 0), 167 | # ('Reload_priv', 254, 1, 1, 1, 0, 0), 168 | # ('Shutdown_priv', 254, 1, 1, 1, 0, 0), 169 | # ('Process_priv', 254, 1, 1, 1, 0, 0), 170 | # ('File_priv', 254, 1, 1, 1, 0, 0), 171 | # ('Grant_priv', 254, 1, 1, 1, 0, 0), 172 | # ('References_priv', 254, 1, 1, 1, 0, 0), 173 | # ('Index_priv', 254, 1, 1, 1, 0, 0), 174 | # ('Alter_priv', 254, 1, 1, 1, 0, 0), 175 | # ('Show_db_priv', 254, 1, 1, 1, 0, 0), 176 | # ('Super_priv', 254, 1, 1, 1, 0, 0), 177 | # ('Create_tmp_table_priv', 254, 1, 1, 1, 0, 0), 178 | # ('Lock_tables_priv', 254, 1, 1, 1, 0, 0), 179 | # ('Execute_priv', 254, 1, 1, 1, 0, 0), 180 | # ('Repl_slave_priv', 254, 1, 1, 1, 0, 0), 181 | # ('Repl_client_priv', 254, 1, 1, 1, 0, 0), 182 | # ('Create_view_priv', 254, 1, 1, 1, 0, 0), 183 | # ('Show_view_priv', 254, 1, 1, 1, 0, 0), 184 | # ('Create_routine_priv', 254, 1, 1, 1, 0, 0), 185 | # ('Alter_routine_priv', 254, 1, 1, 1, 0, 0), 186 | # ('Create_user_priv', 254, 1, 1, 1, 0, 0), 187 | # ('Event_priv', 254, 1, 1, 1, 0, 0), 188 | # ('Trigger_priv', 254, 1, 1, 1, 0, 0), 189 | # ('ssl_type', 254, 0, 9, 9, 0, 0), 190 | # ('ssl_cipher', 252, 0, 65535, 65535, 0, 0), 191 | # ('x509_issuer', 252, 0, 65535, 65535, 0, 0), 192 | # ('x509_subject', 252, 0, 65535, 65535, 0, 0), 193 | # ('max_questions', 3, 1, 11, 11, 0, 0), 194 | # ('max_updates', 3, 1, 11, 11, 0, 0), 195 | # ('max_connections', 3, 1, 11, 11, 0, 0), 196 | # ('max_user_connections', 3, 1, 11, 11, 0, 0)) 197 | # conn = self.connections[0] 198 | # c = conn.cursor() 199 | # c.execute("select * from mysql.user") 200 | # 201 | # self.assertEqual(r, c.description) 202 | 203 | def test_fetch_no_result(self): 204 | """ test a fetchone() with no rows """ 205 | conn = self.connections[0] 206 | c = conn.cursor() 207 | c.execute("create table test_nr (b varchar(32))") 208 | try: 209 | data = "pymysql" 210 | c.execute("insert into test_nr (b) values (%s)", (data,)) 211 | self.assertEqual(None, c.fetchone()) 212 | finally: 213 | c.execute("drop table test_nr") 214 | 215 | def test_aggregates(self): 216 | """ test aggregate functions """ 217 | conn = self.connections[0] 218 | c = conn.cursor() 219 | try: 220 | c.execute('create table test_aggregates (i integer)') 221 | for i in range(0, 10): 222 | c.execute('insert into test_aggregates (i) values (%s)', (i,)) 223 | c.execute('select sum(i) from test_aggregates') 224 | r, = c.fetchone() 225 | self.assertEqual(sum(range(0,10)), r) 226 | finally: 227 | c.execute('drop table test_aggregates') 228 | 229 | def test_single_tuple(self): 230 | """ test a single tuple """ 231 | conn = self.connections[0] 232 | c = conn.cursor() 233 | self.safe_create_table( 234 | conn, 'mystuff', 235 | "create table mystuff (id integer primary key)") 236 | c.execute("insert into mystuff (id) values (1)") 237 | c.execute("insert into mystuff (id) values (2)") 238 | c.execute("select id from mystuff where id in %s", ((1,),)) 239 | self.assertEqual([(1,)], list(c.fetchall())) 240 | c.close() 241 | 242 | def test_json(self): 243 | args = self.databases[0].copy() 244 | args["charset"] = "utf8mb4" 245 | conn = pymysql.connect(**args) 246 | if not self.mysql_server_is(conn, (5, 7, 0)): 247 | raise SkipTest("JSON type is not supported on MySQL <= 5.6") 248 | 249 | self.safe_create_table(conn, "test_json", """\ 250 | create table test_json ( 251 | id int not null, 252 | json JSON not null, 253 | primary key (id) 254 | );""") 255 | cur = conn.cursor() 256 | 257 | json_str = u'{"hello": "こんにちは"}' 258 | cur.execute("INSERT INTO test_json (id, `json`) values (42, %s)", (json_str,)) 259 | cur.execute("SELECT `json` from `test_json` WHERE `id`=42") 260 | res = cur.fetchone()[0] 261 | self.assertEqual(json.loads(res), json.loads(json_str)) 262 | 263 | cur.execute("SELECT CAST(%s AS JSON) AS x", (json_str,)) 264 | res = cur.fetchone()[0] 265 | self.assertEqual(json.loads(res), json.loads(json_str)) 266 | 267 | 268 | class TestBulkInserts(base.PyMySQLTestCase): 269 | 270 | cursor_type = pymysql.cursors.DictCursor 271 | 272 | def setUp(self): 273 | super(TestBulkInserts, self).setUp() 274 | self.conn = conn = self.connections[0] 275 | c = conn.cursor(self.cursor_type) 276 | 277 | # create a table ane some data to query 278 | self.safe_create_table(conn, 'bulkinsert', """\ 279 | CREATE TABLE bulkinsert 280 | ( 281 | id int(11), 282 | name char(20), 283 | age int, 284 | height int, 285 | PRIMARY KEY (id) 286 | ) 287 | """) 288 | 289 | def _verify_records(self, data): 290 | conn = self.connections[0] 291 | cursor = conn.cursor() 292 | cursor.execute("SELECT id, name, age, height from bulkinsert") 293 | result = cursor.fetchall() 294 | self.assertEqual(sorted(data), sorted(result)) 295 | 296 | def test_bulk_insert(self): 297 | conn = self.connections[0] 298 | cursor = conn.cursor() 299 | 300 | data = [(0, "bob", 21, 123), (1, "jim", 56, 45), (2, "fred", 100, 180)] 301 | cursor.executemany("insert into bulkinsert (id, name, age, height) " 302 | "values (%s,%s,%s,%s)", data) 303 | self.assertEqual( 304 | cursor._last_executed, bytearray( 305 | b"insert into bulkinsert (id, name, age, height) values " 306 | b"(0,'bob',21,123),(1,'jim',56,45),(2,'fred',100,180)")) 307 | cursor.execute('commit') 308 | self._verify_records(data) 309 | 310 | def test_bulk_insert_multiline_statement(self): 311 | conn = self.connections[0] 312 | cursor = conn.cursor() 313 | data = [(0, "bob", 21, 123), (1, "jim", 56, 45), (2, "fred", 100, 180)] 314 | cursor.executemany("""insert 315 | into bulkinsert (id, name, 316 | age, height) 317 | values (%s, 318 | %s , %s, 319 | %s ) 320 | """, data) 321 | self.assertEqual(cursor._last_executed.strip(), bytearray(b"""insert 322 | into bulkinsert (id, name, 323 | age, height) 324 | values (0, 325 | 'bob' , 21, 326 | 123 ),(1, 327 | 'jim' , 56, 328 | 45 ),(2, 329 | 'fred' , 100, 330 | 180 )""")) 331 | cursor.execute('commit') 332 | self._verify_records(data) 333 | 334 | def test_bulk_insert_single_record(self): 335 | conn = self.connections[0] 336 | cursor = conn.cursor() 337 | data = [(0, "bob", 21, 123)] 338 | cursor.executemany("insert into bulkinsert (id, name, age, height) " 339 | "values (%s,%s,%s,%s)", data) 340 | cursor.execute('commit') 341 | self._verify_records(data) 342 | 343 | def test_issue_288(self): 344 | """executemany should work with "insert ... on update" """ 345 | conn = self.connections[0] 346 | cursor = conn.cursor() 347 | data = [(0, "bob", 21, 123), (1, "jim", 56, 45), (2, "fred", 100, 180)] 348 | cursor.executemany("""insert 349 | into bulkinsert (id, name, 350 | age, height) 351 | values (%s, 352 | %s , %s, 353 | %s ) on duplicate key update 354 | age = values(age) 355 | """, data) 356 | self.assertEqual(cursor._last_executed.strip(), bytearray(b"""insert 357 | into bulkinsert (id, name, 358 | age, height) 359 | values (0, 360 | 'bob' , 21, 361 | 123 ),(1, 362 | 'jim' , 56, 363 | 45 ),(2, 364 | 'fred' , 100, 365 | 180 ) on duplicate key update 366 | age = values(age)""")) 367 | cursor.execute('commit') 368 | self._verify_records(data) 369 | 370 | def test_warnings(self): 371 | con = self.connections[0] 372 | cur = con.cursor() 373 | with warnings.catch_warnings(record=True) as ws: 374 | warnings.simplefilter("always") 375 | cur.execute("drop table if exists no_exists_table") 376 | self.assertEqual(len(ws), 1) 377 | self.assertEqual(ws[0].category, pymysql.Warning) 378 | if u"no_exists_table" not in str(ws[0].message): 379 | self.fail("'no_exists_table' not in %s" % (str(ws[0].message),)) 380 | -------------------------------------------------------------------------------- /lambda/pymysql/cursors.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import print_function, absolute_import 3 | from functools import partial 4 | import re 5 | import warnings 6 | 7 | from ._compat import range_type, text_type, PY2 8 | from . import err 9 | 10 | 11 | #: Regular expression for :meth:`Cursor.executemany`. 12 | #: executemany only suports simple bulk insert. 13 | #: You can use it to load large dataset. 14 | RE_INSERT_VALUES = re.compile( 15 | r"\s*((?:INSERT|REPLACE)\s.+\sVALUES?\s+)" + 16 | r"(\(\s*(?:%s|%\(.+\)s)\s*(?:,\s*(?:%s|%\(.+\)s)\s*)*\))" + 17 | r"(\s*(?:ON DUPLICATE.*)?)\Z", 18 | re.IGNORECASE | re.DOTALL) 19 | 20 | 21 | class Cursor(object): 22 | """ 23 | This is the object you use to interact with the database. 24 | """ 25 | 26 | #: Max statement size which :meth:`executemany` generates. 27 | #: 28 | #: Max size of allowed statement is max_allowed_packet - packet_header_size. 29 | #: Default value of max_allowed_packet is 1048576. 30 | max_stmt_length = 1024000 31 | 32 | _defer_warnings = False 33 | 34 | def __init__(self, connection): 35 | """ 36 | Do not create an instance of a Cursor yourself. Call 37 | connections.Connection.cursor(). 38 | """ 39 | self.connection = connection 40 | self.description = None 41 | self.rownumber = 0 42 | self.rowcount = -1 43 | self.arraysize = 1 44 | self._executed = None 45 | self._result = None 46 | self._rows = None 47 | self._warnings_handled = False 48 | 49 | def close(self): 50 | """ 51 | Closing a cursor just exhausts all remaining data. 52 | """ 53 | conn = self.connection 54 | if conn is None: 55 | return 56 | try: 57 | while self.nextset(): 58 | pass 59 | finally: 60 | self.connection = None 61 | 62 | def __enter__(self): 63 | return self 64 | 65 | def __exit__(self, *exc_info): 66 | del exc_info 67 | self.close() 68 | 69 | def _get_db(self): 70 | if not self.connection: 71 | raise err.ProgrammingError("Cursor closed") 72 | return self.connection 73 | 74 | def _check_executed(self): 75 | if not self._executed: 76 | raise err.ProgrammingError("execute() first") 77 | 78 | def _conv_row(self, row): 79 | return row 80 | 81 | def setinputsizes(self, *args): 82 | """Does nothing, required by DB API.""" 83 | 84 | def setoutputsizes(self, *args): 85 | """Does nothing, required by DB API.""" 86 | 87 | def _nextset(self, unbuffered=False): 88 | """Get the next query set""" 89 | conn = self._get_db() 90 | current_result = self._result 91 | # for unbuffered queries warnings are only available once whole result has been read 92 | if unbuffered: 93 | self._show_warnings() 94 | if current_result is None or current_result is not conn._result: 95 | return None 96 | if not current_result.has_next: 97 | return None 98 | conn.next_result(unbuffered=unbuffered) 99 | self._do_get_result() 100 | return True 101 | 102 | def nextset(self): 103 | return self._nextset(False) 104 | 105 | def _ensure_bytes(self, x, encoding=None): 106 | if isinstance(x, text_type): 107 | x = x.encode(encoding) 108 | elif isinstance(x, (tuple, list)): 109 | x = type(x)(self._ensure_bytes(v, encoding=encoding) for v in x) 110 | return x 111 | 112 | def _escape_args(self, args, conn): 113 | ensure_bytes = partial(self._ensure_bytes, encoding=conn.encoding) 114 | 115 | if isinstance(args, (tuple, list)): 116 | if PY2: 117 | args = tuple(map(ensure_bytes, args)) 118 | return tuple(conn.literal(arg) for arg in args) 119 | elif isinstance(args, dict): 120 | if PY2: 121 | args = dict((ensure_bytes(key), ensure_bytes(val)) for 122 | (key, val) in args.items()) 123 | return dict((key, conn.literal(val)) for (key, val) in args.items()) 124 | else: 125 | # If it's not a dictionary let's try escaping it anyways. 126 | # Worst case it will throw a Value error 127 | if PY2: 128 | args = ensure_bytes(args) 129 | return conn.escape(args) 130 | 131 | def mogrify(self, query, args=None): 132 | """ 133 | Returns the exact string that is sent to the database by calling the 134 | execute() method. 135 | 136 | This method follows the extension to the DB API 2.0 followed by Psycopg. 137 | """ 138 | conn = self._get_db() 139 | if PY2: # Use bytes on Python 2 always 140 | query = self._ensure_bytes(query, encoding=conn.encoding) 141 | 142 | if args is not None: 143 | query = query % self._escape_args(args, conn) 144 | 145 | return query 146 | 147 | def execute(self, query, args=None): 148 | """Execute a query 149 | 150 | :param str query: Query to execute. 151 | 152 | :param args: parameters used with query. (optional) 153 | :type args: tuple, list or dict 154 | 155 | :return: Number of affected rows 156 | :rtype: int 157 | 158 | If args is a list or tuple, %s can be used as a placeholder in the query. 159 | If args is a dict, %(name)s can be used as a placeholder in the query. 160 | """ 161 | while self.nextset(): 162 | pass 163 | 164 | query = self.mogrify(query, args) 165 | 166 | result = self._query(query) 167 | self._executed = query 168 | return result 169 | 170 | def executemany(self, query, args): 171 | # type: (str, list) -> int 172 | """Run several data against one query 173 | 174 | :param query: query to execute on server 175 | :param args: Sequence of sequences or mappings. It is used as parameter. 176 | :return: Number of rows affected, if any. 177 | 178 | This method improves performance on multiple-row INSERT and 179 | REPLACE. Otherwise it is equivalent to looping over args with 180 | execute(). 181 | """ 182 | if not args: 183 | return 184 | 185 | m = RE_INSERT_VALUES.match(query) 186 | if m: 187 | q_prefix = m.group(1) % () 188 | q_values = m.group(2).rstrip() 189 | q_postfix = m.group(3) or '' 190 | assert q_values[0] == '(' and q_values[-1] == ')' 191 | return self._do_execute_many(q_prefix, q_values, q_postfix, args, 192 | self.max_stmt_length, 193 | self._get_db().encoding) 194 | 195 | self.rowcount = sum(self.execute(query, arg) for arg in args) 196 | return self.rowcount 197 | 198 | def _do_execute_many(self, prefix, values, postfix, args, max_stmt_length, encoding): 199 | conn = self._get_db() 200 | escape = self._escape_args 201 | if isinstance(prefix, text_type): 202 | prefix = prefix.encode(encoding) 203 | if PY2 and isinstance(values, text_type): 204 | values = values.encode(encoding) 205 | if isinstance(postfix, text_type): 206 | postfix = postfix.encode(encoding) 207 | sql = bytearray(prefix) 208 | args = iter(args) 209 | v = values % escape(next(args), conn) 210 | if isinstance(v, text_type): 211 | if PY2: 212 | v = v.encode(encoding) 213 | else: 214 | v = v.encode(encoding, 'surrogateescape') 215 | sql += v 216 | rows = 0 217 | for arg in args: 218 | v = values % escape(arg, conn) 219 | if isinstance(v, text_type): 220 | if PY2: 221 | v = v.encode(encoding) 222 | else: 223 | v = v.encode(encoding, 'surrogateescape') 224 | if len(sql) + len(v) + len(postfix) + 1 > max_stmt_length: 225 | rows += self.execute(sql + postfix) 226 | sql = bytearray(prefix) 227 | else: 228 | sql += b',' 229 | sql += v 230 | rows += self.execute(sql + postfix) 231 | self.rowcount = rows 232 | return rows 233 | 234 | def callproc(self, procname, args=()): 235 | """Execute stored procedure procname with args 236 | 237 | procname -- string, name of procedure to execute on server 238 | 239 | args -- Sequence of parameters to use with procedure 240 | 241 | Returns the original args. 242 | 243 | Compatibility warning: PEP-249 specifies that any modified 244 | parameters must be returned. This is currently impossible 245 | as they are only available by storing them in a server 246 | variable and then retrieved by a query. Since stored 247 | procedures return zero or more result sets, there is no 248 | reliable way to get at OUT or INOUT parameters via callproc. 249 | The server variables are named @_procname_n, where procname 250 | is the parameter above and n is the position of the parameter 251 | (from zero). Once all result sets generated by the procedure 252 | have been fetched, you can issue a SELECT @_procname_0, ... 253 | query using .execute() to get any OUT or INOUT values. 254 | 255 | Compatibility warning: The act of calling a stored procedure 256 | itself creates an empty result set. This appears after any 257 | result sets generated by the procedure. This is non-standard 258 | behavior with respect to the DB-API. Be sure to use nextset() 259 | to advance through all result sets; otherwise you may get 260 | disconnected. 261 | """ 262 | conn = self._get_db() 263 | for index, arg in enumerate(args): 264 | q = "SET @_%s_%d=%s" % (procname, index, conn.escape(arg)) 265 | self._query(q) 266 | self.nextset() 267 | 268 | q = "CALL %s(%s)" % (procname, 269 | ','.join(['@_%s_%d' % (procname, i) 270 | for i in range_type(len(args))])) 271 | self._query(q) 272 | self._executed = q 273 | return args 274 | 275 | def fetchone(self): 276 | """Fetch the next row""" 277 | self._check_executed() 278 | if self._rows is None or self.rownumber >= len(self._rows): 279 | return None 280 | result = self._rows[self.rownumber] 281 | self.rownumber += 1 282 | return result 283 | 284 | def fetchmany(self, size=None): 285 | """Fetch several rows""" 286 | self._check_executed() 287 | if self._rows is None: 288 | return () 289 | end = self.rownumber + (size or self.arraysize) 290 | result = self._rows[self.rownumber:end] 291 | self.rownumber = min(end, len(self._rows)) 292 | return result 293 | 294 | def fetchall(self): 295 | """Fetch all the rows""" 296 | self._check_executed() 297 | if self._rows is None: 298 | return () 299 | if self.rownumber: 300 | result = self._rows[self.rownumber:] 301 | else: 302 | result = self._rows 303 | self.rownumber = len(self._rows) 304 | return result 305 | 306 | def scroll(self, value, mode='relative'): 307 | self._check_executed() 308 | if mode == 'relative': 309 | r = self.rownumber + value 310 | elif mode == 'absolute': 311 | r = value 312 | else: 313 | raise err.ProgrammingError("unknown scroll mode %s" % mode) 314 | 315 | if not (0 <= r < len(self._rows)): 316 | raise IndexError("out of range") 317 | self.rownumber = r 318 | 319 | def _query(self, q): 320 | conn = self._get_db() 321 | self._last_executed = q 322 | conn.query(q) 323 | self._do_get_result() 324 | return self.rowcount 325 | 326 | def _do_get_result(self): 327 | conn = self._get_db() 328 | 329 | self.rownumber = 0 330 | self._result = result = conn._result 331 | 332 | self.rowcount = result.affected_rows 333 | self.description = result.description 334 | self.lastrowid = result.insert_id 335 | self._rows = result.rows 336 | self._warnings_handled = False 337 | 338 | if not self._defer_warnings: 339 | self._show_warnings() 340 | 341 | def _show_warnings(self): 342 | if self._warnings_handled: 343 | return 344 | self._warnings_handled = True 345 | if self._result and (self._result.has_next or not self._result.warning_count): 346 | return 347 | ws = self._get_db().show_warnings() 348 | if ws is None: 349 | return 350 | for w in ws: 351 | msg = w[-1] 352 | if PY2: 353 | if isinstance(msg, unicode): 354 | msg = msg.encode('utf-8', 'replace') 355 | warnings.warn(err.Warning(*w[1:3]), stacklevel=4) 356 | 357 | def __iter__(self): 358 | return iter(self.fetchone, None) 359 | 360 | Warning = err.Warning 361 | Error = err.Error 362 | InterfaceError = err.InterfaceError 363 | DatabaseError = err.DatabaseError 364 | DataError = err.DataError 365 | OperationalError = err.OperationalError 366 | IntegrityError = err.IntegrityError 367 | InternalError = err.InternalError 368 | ProgrammingError = err.ProgrammingError 369 | NotSupportedError = err.NotSupportedError 370 | 371 | 372 | class DictCursorMixin(object): 373 | # You can override this to use OrderedDict or other dict-like types. 374 | dict_type = dict 375 | 376 | def _do_get_result(self): 377 | super(DictCursorMixin, self)._do_get_result() 378 | fields = [] 379 | if self.description: 380 | for f in self._result.fields: 381 | name = f.name 382 | if name in fields: 383 | name = f.table_name + '.' + name 384 | fields.append(name) 385 | self._fields = fields 386 | 387 | if fields and self._rows: 388 | self._rows = [self._conv_row(r) for r in self._rows] 389 | 390 | def _conv_row(self, row): 391 | if row is None: 392 | return None 393 | return self.dict_type(zip(self._fields, row)) 394 | 395 | 396 | class DictCursor(DictCursorMixin, Cursor): 397 | """A cursor which returns results as a dictionary""" 398 | 399 | 400 | class SSCursor(Cursor): 401 | """ 402 | Unbuffered Cursor, mainly useful for queries that return a lot of data, 403 | or for connections to remote servers over a slow network. 404 | 405 | Instead of copying every row of data into a buffer, this will fetch 406 | rows as needed. The upside of this is the client uses much less memory, 407 | and rows are returned much faster when traveling over a slow network 408 | or if the result set is very big. 409 | 410 | There are limitations, though. The MySQL protocol doesn't support 411 | returning the total number of rows, so the only way to tell how many rows 412 | there are is to iterate over every row returned. Also, it currently isn't 413 | possible to scroll backwards, as only the current row is held in memory. 414 | """ 415 | 416 | _defer_warnings = True 417 | 418 | def _conv_row(self, row): 419 | return row 420 | 421 | def close(self): 422 | conn = self.connection 423 | if conn is None: 424 | return 425 | 426 | if self._result is not None and self._result is conn._result: 427 | self._result._finish_unbuffered_query() 428 | 429 | try: 430 | while self.nextset(): 431 | pass 432 | finally: 433 | self.connection = None 434 | 435 | def _query(self, q): 436 | conn = self._get_db() 437 | self._last_executed = q 438 | conn.query(q, unbuffered=True) 439 | self._do_get_result() 440 | return self.rowcount 441 | 442 | def nextset(self): 443 | return self._nextset(unbuffered=True) 444 | 445 | def read_next(self): 446 | """Read next row""" 447 | return self._conv_row(self._result._read_rowdata_packet_unbuffered()) 448 | 449 | def fetchone(self): 450 | """Fetch next row""" 451 | self._check_executed() 452 | row = self.read_next() 453 | if row is None: 454 | self._show_warnings() 455 | return None 456 | self.rownumber += 1 457 | return row 458 | 459 | def fetchall(self): 460 | """ 461 | Fetch all, as per MySQLdb. Pretty useless for large queries, as 462 | it is buffered. See fetchall_unbuffered(), if you want an unbuffered 463 | generator version of this method. 464 | """ 465 | return list(self.fetchall_unbuffered()) 466 | 467 | def fetchall_unbuffered(self): 468 | """ 469 | Fetch all, implemented as a generator, which isn't to standard, 470 | however, it doesn't make sense to return everything in a list, as that 471 | would use ridiculous memory for large result sets. 472 | """ 473 | return iter(self.fetchone, None) 474 | 475 | def __iter__(self): 476 | return self.fetchall_unbuffered() 477 | 478 | def fetchmany(self, size=None): 479 | """Fetch many""" 480 | self._check_executed() 481 | if size is None: 482 | size = self.arraysize 483 | 484 | rows = [] 485 | for i in range_type(size): 486 | row = self.read_next() 487 | if row is None: 488 | self._show_warnings() 489 | break 490 | rows.append(row) 491 | self.rownumber += 1 492 | return rows 493 | 494 | def scroll(self, value, mode='relative'): 495 | self._check_executed() 496 | 497 | if mode == 'relative': 498 | if value < 0: 499 | raise err.NotSupportedError( 500 | "Backwards scrolling not supported by this cursor") 501 | 502 | for _ in range_type(value): 503 | self.read_next() 504 | self.rownumber += value 505 | elif mode == 'absolute': 506 | if value < self.rownumber: 507 | raise err.NotSupportedError( 508 | "Backwards scrolling not supported by this cursor") 509 | 510 | end = value - self.rownumber 511 | for _ in range_type(end): 512 | self.read_next() 513 | self.rownumber = value 514 | else: 515 | raise err.ProgrammingError("unknown scroll mode %s" % mode) 516 | 517 | 518 | class SSDictCursor(DictCursorMixin, SSCursor): 519 | """An unbuffered cursor, which returns results as a dictionary""" 520 | -------------------------------------------------------------------------------- /lambda/pymysql/tests/test_issues.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import time 3 | import warnings 4 | import sys 5 | 6 | import pymysql 7 | from pymysql import cursors 8 | from pymysql._compat import text_type 9 | from pymysql.tests import base 10 | import unittest2 11 | 12 | try: 13 | import imp 14 | reload = imp.reload 15 | except AttributeError: 16 | pass 17 | 18 | 19 | __all__ = ["TestOldIssues", "TestNewIssues", "TestGitHubIssues"] 20 | 21 | class TestOldIssues(base.PyMySQLTestCase): 22 | def test_issue_3(self): 23 | """ undefined methods datetime_or_None, date_or_None """ 24 | conn = self.connections[0] 25 | c = conn.cursor() 26 | with warnings.catch_warnings(): 27 | warnings.filterwarnings("ignore") 28 | c.execute("drop table if exists issue3") 29 | c.execute("create table issue3 (d date, t time, dt datetime, ts timestamp)") 30 | try: 31 | c.execute("insert into issue3 (d, t, dt, ts) values (%s,%s,%s,%s)", (None, None, None, None)) 32 | c.execute("select d from issue3") 33 | self.assertEqual(None, c.fetchone()[0]) 34 | c.execute("select t from issue3") 35 | self.assertEqual(None, c.fetchone()[0]) 36 | c.execute("select dt from issue3") 37 | self.assertEqual(None, c.fetchone()[0]) 38 | c.execute("select ts from issue3") 39 | self.assertTrue(isinstance(c.fetchone()[0], datetime.datetime)) 40 | finally: 41 | c.execute("drop table issue3") 42 | 43 | def test_issue_4(self): 44 | """ can't retrieve TIMESTAMP fields """ 45 | conn = self.connections[0] 46 | c = conn.cursor() 47 | with warnings.catch_warnings(): 48 | warnings.filterwarnings("ignore") 49 | c.execute("drop table if exists issue4") 50 | c.execute("create table issue4 (ts timestamp)") 51 | try: 52 | c.execute("insert into issue4 (ts) values (now())") 53 | c.execute("select ts from issue4") 54 | self.assertTrue(isinstance(c.fetchone()[0], datetime.datetime)) 55 | finally: 56 | c.execute("drop table issue4") 57 | 58 | def test_issue_5(self): 59 | """ query on information_schema.tables fails """ 60 | con = self.connections[0] 61 | cur = con.cursor() 62 | cur.execute("select * from information_schema.tables") 63 | 64 | def test_issue_6(self): 65 | """ exception: TypeError: ord() expected a character, but string of length 0 found """ 66 | # ToDo: this test requires access to db 'mysql'. 67 | kwargs = self.databases[0].copy() 68 | kwargs['db'] = "mysql" 69 | conn = pymysql.connect(**kwargs) 70 | c = conn.cursor() 71 | c.execute("select * from user") 72 | conn.close() 73 | 74 | def test_issue_8(self): 75 | """ Primary Key and Index error when selecting data """ 76 | conn = self.connections[0] 77 | c = conn.cursor() 78 | with warnings.catch_warnings(): 79 | warnings.filterwarnings("ignore") 80 | c.execute("drop table if exists test") 81 | c.execute("""CREATE TABLE `test` (`station` int(10) NOT NULL DEFAULT '0', `dh` 82 | datetime NOT NULL DEFAULT '2015-01-01 00:00:00', `echeance` int(1) NOT NULL 83 | DEFAULT '0', `me` double DEFAULT NULL, `mo` double DEFAULT NULL, PRIMARY 84 | KEY (`station`,`dh`,`echeance`)) ENGINE=MyISAM DEFAULT CHARSET=latin1;""") 85 | try: 86 | self.assertEqual(0, c.execute("SELECT * FROM test")) 87 | c.execute("ALTER TABLE `test` ADD INDEX `idx_station` (`station`)") 88 | self.assertEqual(0, c.execute("SELECT * FROM test")) 89 | finally: 90 | c.execute("drop table test") 91 | 92 | def test_issue_9(self): 93 | """ sets DeprecationWarning in Python 2.6 """ 94 | try: 95 | reload(pymysql) 96 | except DeprecationWarning: 97 | self.fail() 98 | 99 | def test_issue_13(self): 100 | """ can't handle large result fields """ 101 | conn = self.connections[0] 102 | cur = conn.cursor() 103 | with warnings.catch_warnings(): 104 | warnings.filterwarnings("ignore") 105 | cur.execute("drop table if exists issue13") 106 | try: 107 | cur.execute("create table issue13 (t text)") 108 | # ticket says 18k 109 | size = 18*1024 110 | cur.execute("insert into issue13 (t) values (%s)", ("x" * size,)) 111 | cur.execute("select t from issue13") 112 | # use assertTrue so that obscenely huge error messages don't print 113 | r = cur.fetchone()[0] 114 | self.assertTrue("x" * size == r) 115 | finally: 116 | cur.execute("drop table issue13") 117 | 118 | def test_issue_15(self): 119 | """ query should be expanded before perform character encoding """ 120 | conn = self.connections[0] 121 | c = conn.cursor() 122 | with warnings.catch_warnings(): 123 | warnings.filterwarnings("ignore") 124 | c.execute("drop table if exists issue15") 125 | c.execute("create table issue15 (t varchar(32))") 126 | try: 127 | c.execute("insert into issue15 (t) values (%s)", (u'\xe4\xf6\xfc',)) 128 | c.execute("select t from issue15") 129 | self.assertEqual(u'\xe4\xf6\xfc', c.fetchone()[0]) 130 | finally: 131 | c.execute("drop table issue15") 132 | 133 | def test_issue_16(self): 134 | """ Patch for string and tuple escaping """ 135 | conn = self.connections[0] 136 | c = conn.cursor() 137 | with warnings.catch_warnings(): 138 | warnings.filterwarnings("ignore") 139 | c.execute("drop table if exists issue16") 140 | c.execute("create table issue16 (name varchar(32) primary key, email varchar(32))") 141 | try: 142 | c.execute("insert into issue16 (name, email) values ('pete', 'floydophone')") 143 | c.execute("select email from issue16 where name=%s", ("pete",)) 144 | self.assertEqual("floydophone", c.fetchone()[0]) 145 | finally: 146 | c.execute("drop table issue16") 147 | 148 | @unittest2.skip("test_issue_17() requires a custom, legacy MySQL configuration and will not be run.") 149 | def test_issue_17(self): 150 | """could not connect mysql use passwod""" 151 | conn = self.connections[0] 152 | host = self.databases[0]["host"] 153 | db = self.databases[0]["db"] 154 | c = conn.cursor() 155 | 156 | # grant access to a table to a user with a password 157 | try: 158 | with warnings.catch_warnings(): 159 | warnings.filterwarnings("ignore") 160 | c.execute("drop table if exists issue17") 161 | c.execute("create table issue17 (x varchar(32) primary key)") 162 | c.execute("insert into issue17 (x) values ('hello, world!')") 163 | c.execute("grant all privileges on %s.issue17 to 'issue17user'@'%%' identified by '1234'" % db) 164 | conn.commit() 165 | 166 | conn2 = pymysql.connect(host=host, user="issue17user", passwd="1234", db=db) 167 | c2 = conn2.cursor() 168 | c2.execute("select x from issue17") 169 | self.assertEqual("hello, world!", c2.fetchone()[0]) 170 | finally: 171 | c.execute("drop table issue17") 172 | 173 | class TestNewIssues(base.PyMySQLTestCase): 174 | def test_issue_34(self): 175 | try: 176 | pymysql.connect(host="localhost", port=1237, user="root") 177 | self.fail() 178 | except pymysql.OperationalError as e: 179 | self.assertEqual(2003, e.args[0]) 180 | except Exception: 181 | self.fail() 182 | 183 | def test_issue_33(self): 184 | conn = pymysql.connect(charset="utf8", **self.databases[0]) 185 | self.safe_create_table(conn, u'hei\xdfe', 186 | u'create table hei\xdfe (name varchar(32))') 187 | c = conn.cursor() 188 | c.execute(u"insert into hei\xdfe (name) values ('Pi\xdfata')") 189 | c.execute(u"select name from hei\xdfe") 190 | self.assertEqual(u"Pi\xdfata", c.fetchone()[0]) 191 | 192 | @unittest2.skip("This test requires manual intervention") 193 | def test_issue_35(self): 194 | conn = self.connections[0] 195 | c = conn.cursor() 196 | print("sudo killall -9 mysqld within the next 10 seconds") 197 | try: 198 | c.execute("select sleep(10)") 199 | self.fail() 200 | except pymysql.OperationalError as e: 201 | self.assertEqual(2013, e.args[0]) 202 | 203 | def test_issue_36(self): 204 | # connection 0 is super user, connection 1 isn't 205 | conn = self.connections[1] 206 | c = conn.cursor() 207 | c.execute("show processlist") 208 | kill_id = None 209 | for row in c.fetchall(): 210 | id = row[0] 211 | info = row[7] 212 | if info == "show processlist": 213 | kill_id = id 214 | break 215 | self.assertEqual(kill_id, conn.thread_id()) 216 | # now nuke the connection 217 | self.connections[0].kill(kill_id) 218 | # make sure this connection has broken 219 | try: 220 | c.execute("show tables") 221 | self.fail() 222 | except Exception: 223 | pass 224 | c.close() 225 | conn.close() 226 | 227 | # check the process list from the other connection 228 | try: 229 | # Wait since Travis-CI sometimes fail this test. 230 | time.sleep(0.1) 231 | 232 | c = self.connections[0].cursor() 233 | c.execute("show processlist") 234 | ids = [row[0] for row in c.fetchall()] 235 | self.assertFalse(kill_id in ids) 236 | finally: 237 | del self.connections[1] 238 | 239 | def test_issue_37(self): 240 | conn = self.connections[0] 241 | c = conn.cursor() 242 | self.assertEqual(1, c.execute("SELECT @foo")) 243 | self.assertEqual((None,), c.fetchone()) 244 | self.assertEqual(0, c.execute("SET @foo = 'bar'")) 245 | c.execute("set @foo = 'bar'") 246 | 247 | def test_issue_38(self): 248 | conn = self.connections[0] 249 | c = conn.cursor() 250 | datum = "a" * 1024 * 1023 # reduced size for most default mysql installs 251 | 252 | try: 253 | with warnings.catch_warnings(): 254 | warnings.filterwarnings("ignore") 255 | c.execute("drop table if exists issue38") 256 | c.execute("create table issue38 (id integer, data mediumblob)") 257 | c.execute("insert into issue38 values (1, %s)", (datum,)) 258 | finally: 259 | c.execute("drop table issue38") 260 | 261 | def disabled_test_issue_54(self): 262 | conn = self.connections[0] 263 | c = conn.cursor() 264 | with warnings.catch_warnings(): 265 | warnings.filterwarnings("ignore") 266 | c.execute("drop table if exists issue54") 267 | big_sql = "select * from issue54 where " 268 | big_sql += " and ".join("%d=%d" % (i,i) for i in range(0, 100000)) 269 | 270 | try: 271 | c.execute("create table issue54 (id integer primary key)") 272 | c.execute("insert into issue54 (id) values (7)") 273 | c.execute(big_sql) 274 | self.assertEqual(7, c.fetchone()[0]) 275 | finally: 276 | c.execute("drop table issue54") 277 | 278 | class TestGitHubIssues(base.PyMySQLTestCase): 279 | def test_issue_66(self): 280 | """ 'Connection' object has no attribute 'insert_id' """ 281 | conn = self.connections[0] 282 | c = conn.cursor() 283 | self.assertEqual(0, conn.insert_id()) 284 | try: 285 | with warnings.catch_warnings(): 286 | warnings.filterwarnings("ignore") 287 | c.execute("drop table if exists issue66") 288 | c.execute("create table issue66 (id integer primary key auto_increment, x integer)") 289 | c.execute("insert into issue66 (x) values (1)") 290 | c.execute("insert into issue66 (x) values (1)") 291 | self.assertEqual(2, conn.insert_id()) 292 | finally: 293 | c.execute("drop table issue66") 294 | 295 | def test_issue_79(self): 296 | """ Duplicate field overwrites the previous one in the result of DictCursor """ 297 | conn = self.connections[0] 298 | c = conn.cursor(pymysql.cursors.DictCursor) 299 | 300 | with warnings.catch_warnings(): 301 | warnings.filterwarnings("ignore") 302 | c.execute("drop table if exists a") 303 | c.execute("drop table if exists b") 304 | c.execute("""CREATE TABLE a (id int, value int)""") 305 | c.execute("""CREATE TABLE b (id int, value int)""") 306 | 307 | a=(1,11) 308 | b=(1,22) 309 | try: 310 | c.execute("insert into a values (%s, %s)", a) 311 | c.execute("insert into b values (%s, %s)", b) 312 | 313 | c.execute("SELECT * FROM a inner join b on a.id = b.id") 314 | r = c.fetchall()[0] 315 | self.assertEqual(r['id'], 1) 316 | self.assertEqual(r['value'], 11) 317 | self.assertEqual(r['b.value'], 22) 318 | finally: 319 | c.execute("drop table a") 320 | c.execute("drop table b") 321 | 322 | def test_issue_95(self): 323 | """ Leftover trailing OK packet for "CALL my_sp" queries """ 324 | conn = self.connections[0] 325 | cur = conn.cursor() 326 | with warnings.catch_warnings(): 327 | warnings.filterwarnings("ignore") 328 | cur.execute("DROP PROCEDURE IF EXISTS `foo`") 329 | cur.execute("""CREATE PROCEDURE `foo` () 330 | BEGIN 331 | SELECT 1; 332 | END""") 333 | try: 334 | cur.execute("""CALL foo()""") 335 | cur.execute("""SELECT 1""") 336 | self.assertEqual(cur.fetchone()[0], 1) 337 | finally: 338 | with warnings.catch_warnings(): 339 | warnings.filterwarnings("ignore") 340 | cur.execute("DROP PROCEDURE IF EXISTS `foo`") 341 | 342 | def test_issue_114(self): 343 | """ autocommit is not set after reconnecting with ping() """ 344 | conn = pymysql.connect(charset="utf8", **self.databases[0]) 345 | conn.autocommit(False) 346 | c = conn.cursor() 347 | c.execute("""select @@autocommit;""") 348 | self.assertFalse(c.fetchone()[0]) 349 | conn.close() 350 | conn.ping() 351 | c.execute("""select @@autocommit;""") 352 | self.assertFalse(c.fetchone()[0]) 353 | conn.close() 354 | 355 | # Ensure autocommit() is still working 356 | conn = pymysql.connect(charset="utf8", **self.databases[0]) 357 | c = conn.cursor() 358 | c.execute("""select @@autocommit;""") 359 | self.assertFalse(c.fetchone()[0]) 360 | conn.close() 361 | conn.ping() 362 | conn.autocommit(True) 363 | c.execute("""select @@autocommit;""") 364 | self.assertTrue(c.fetchone()[0]) 365 | conn.close() 366 | 367 | def test_issue_175(self): 368 | """ The number of fields returned by server is read in wrong way """ 369 | conn = self.connections[0] 370 | cur = conn.cursor() 371 | for length in (200, 300): 372 | columns = ', '.join('c{0} integer'.format(i) for i in range(length)) 373 | sql = 'create table test_field_count ({0})'.format(columns) 374 | try: 375 | cur.execute(sql) 376 | cur.execute('select * from test_field_count') 377 | assert len(cur.description) == length 378 | finally: 379 | with warnings.catch_warnings(): 380 | warnings.filterwarnings("ignore") 381 | cur.execute('drop table if exists test_field_count') 382 | 383 | def test_issue_321(self): 384 | """ Test iterable as query argument. """ 385 | conn = pymysql.connect(charset="utf8", **self.databases[0]) 386 | self.safe_create_table( 387 | conn, "issue321", 388 | "create table issue321 (value_1 varchar(1), value_2 varchar(1))") 389 | 390 | sql_insert = "insert into issue321 (value_1, value_2) values (%s, %s)" 391 | sql_dict_insert = ("insert into issue321 (value_1, value_2) " 392 | "values (%(value_1)s, %(value_2)s)") 393 | sql_select = ("select * from issue321 where " 394 | "value_1 in %s and value_2=%s") 395 | data = [ 396 | [(u"a", ), u"\u0430"], 397 | [[u"b"], u"\u0430"], 398 | {"value_1": [[u"c"]], "value_2": u"\u0430"} 399 | ] 400 | cur = conn.cursor() 401 | self.assertEqual(cur.execute(sql_insert, data[0]), 1) 402 | self.assertEqual(cur.execute(sql_insert, data[1]), 1) 403 | self.assertEqual(cur.execute(sql_dict_insert, data[2]), 1) 404 | self.assertEqual( 405 | cur.execute(sql_select, [(u"a", u"b", u"c"), u"\u0430"]), 3) 406 | self.assertEqual(cur.fetchone(), (u"a", u"\u0430")) 407 | self.assertEqual(cur.fetchone(), (u"b", u"\u0430")) 408 | self.assertEqual(cur.fetchone(), (u"c", u"\u0430")) 409 | 410 | def test_issue_364(self): 411 | """ Test mixed unicode/binary arguments in executemany. """ 412 | conn = pymysql.connect(charset="utf8", **self.databases[0]) 413 | self.safe_create_table( 414 | conn, "issue364", 415 | "create table issue364 (value_1 binary(3), value_2 varchar(3)) " 416 | "engine=InnoDB default charset=utf8") 417 | 418 | sql = "insert into issue364 (value_1, value_2) values (%s, %s)" 419 | usql = u"insert into issue364 (value_1, value_2) values (%s, %s)" 420 | values = [pymysql.Binary(b"\x00\xff\x00"), u"\xe4\xf6\xfc"] 421 | 422 | # test single insert and select 423 | cur = conn.cursor() 424 | cur.execute(sql, args=values) 425 | cur.execute("select * from issue364") 426 | self.assertEqual(cur.fetchone(), tuple(values)) 427 | 428 | # test single insert unicode query 429 | cur.execute(usql, args=values) 430 | 431 | # test multi insert and select 432 | cur.executemany(sql, args=(values, values, values)) 433 | cur.execute("select * from issue364") 434 | for row in cur.fetchall(): 435 | self.assertEqual(row, tuple(values)) 436 | 437 | # test multi insert with unicode query 438 | cur.executemany(usql, args=(values, values, values)) 439 | 440 | def test_issue_363(self): 441 | """ Test binary / geometry types. """ 442 | conn = pymysql.connect(charset="utf8", **self.databases[0]) 443 | self.safe_create_table( 444 | conn, "issue363", 445 | "CREATE TABLE issue363 ( " 446 | "id INTEGER PRIMARY KEY, geom LINESTRING NOT NULL, " 447 | "SPATIAL KEY geom (geom)) " 448 | "ENGINE=MyISAM default charset=utf8") 449 | 450 | cur = conn.cursor() 451 | query = ("INSERT INTO issue363 (id, geom) VALUES" 452 | "(1998, GeomFromText('LINESTRING(1.1 1.1,2.2 2.2)'))") 453 | # From MySQL 5.7, ST_GeomFromText is added and GeomFromText is deprecated. 454 | if self.mysql_server_is(conn, (5, 7, 0)): 455 | with self.assertWarns(pymysql.err.Warning) as cm: 456 | cur.execute(query) 457 | else: 458 | cur.execute(query) 459 | 460 | # select WKT 461 | query = "SELECT AsText(geom) FROM issue363" 462 | if self.mysql_server_is(conn, (5, 7, 0)): 463 | with self.assertWarns(pymysql.err.Warning) as cm: 464 | cur.execute(query) 465 | else: 466 | cur.execute(query) 467 | row = cur.fetchone() 468 | self.assertEqual(row, ("LINESTRING(1.1 1.1,2.2 2.2)", )) 469 | 470 | # select WKB 471 | query = "SELECT AsBinary(geom) FROM issue363" 472 | if self.mysql_server_is(conn, (5, 7, 0)): 473 | with self.assertWarns(pymysql.err.Warning) as cm: 474 | cur.execute(query) 475 | else: 476 | cur.execute(query) 477 | row = cur.fetchone() 478 | self.assertEqual(row, 479 | (b"\x01\x02\x00\x00\x00\x02\x00\x00\x00" 480 | b"\x9a\x99\x99\x99\x99\x99\xf1?" 481 | b"\x9a\x99\x99\x99\x99\x99\xf1?" 482 | b"\x9a\x99\x99\x99\x99\x99\x01@" 483 | b"\x9a\x99\x99\x99\x99\x99\x01@", )) 484 | 485 | # select internal binary 486 | cur.execute("SELECT geom FROM issue363") 487 | row = cur.fetchone() 488 | # don't assert the exact internal binary value, as it could 489 | # vary across implementations 490 | self.assertTrue(isinstance(row[0], bytes)) 491 | 492 | def test_issue_491(self): 493 | """ Test warning propagation """ 494 | conn = pymysql.connect(charset="utf8", **self.databases[0]) 495 | 496 | with warnings.catch_warnings(): 497 | # Ignore all warnings other than pymysql generated ones 498 | warnings.simplefilter("ignore") 499 | warnings.simplefilter("error", category=pymysql.Warning) 500 | 501 | # verify for both buffered and unbuffered cursor types 502 | for cursor_class in (cursors.Cursor, cursors.SSCursor): 503 | c = conn.cursor(cursor_class) 504 | try: 505 | c.execute("SELECT CAST('124b' AS SIGNED)") 506 | c.fetchall() 507 | except pymysql.Warning as e: 508 | # Warnings should have errorcode and string message, just like exceptions 509 | self.assertEqual(len(e.args), 2) 510 | self.assertEqual(e.args[0], 1292) 511 | self.assertTrue(isinstance(e.args[1], text_type)) 512 | else: 513 | self.fail("Should raise Warning") 514 | finally: 515 | c.close() 516 | --------------------------------------------------------------------------------