├── .coveragerc ├── .github └── ISSUE_TEMPLATE.md ├── .gitignore ├── .travis.yml ├── .travis ├── database.json └── initializedb.sh ├── CHANGELOG ├── LICENSE ├── MANIFEST.in ├── README.rst ├── docs ├── Makefile ├── make.bat └── source │ ├── conf.py │ ├── index.rst │ ├── modules │ ├── connections.rst │ ├── cursors.rst │ └── index.rst │ └── user │ ├── development.rst │ ├── examples.rst │ ├── index.rst │ ├── installation.rst │ └── resources.rst ├── example.py ├── runtests.py ├── setup.cfg ├── setup.py ├── tests ├── __init__.py ├── base.py ├── data │ ├── load_local_data.txt │ └── load_local_warn_data.txt ├── test_DictCursor.py ├── test_SSCursor.py ├── test_basic.py ├── test_connection.py ├── test_converters.py ├── test_cursor.py ├── test_err.py ├── test_issues.py ├── test_load_local.py ├── test_nextset.py ├── test_optionfile.py └── thirdparty │ ├── __init__.py │ └── test_MySQLdb │ ├── __init__.py │ ├── capabilities.py │ ├── dbapi20.py │ ├── test_MySQLdb_capabilities.py │ ├── test_MySQLdb_dbapi20.py │ └── test_MySQLdb_nonstandard.py ├── tox.ini └── upymysql ├── __init__.py ├── _socketio.py ├── charset.py ├── connections.py ├── constants ├── CLIENT.py ├── COMMAND.py ├── CR.py ├── FIELD_TYPE.py ├── FLAG.py ├── SERVER_STATUS.py └── __init__.py ├── converters.py ├── cursors.py ├── optionfile.py ├── times.py └── util.py /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch = True 3 | source = 4 | pymysql 5 | omit = pymysql/tests/* 6 | pymysql/tests/thirdparty/test_MySQLdb/* 7 | 8 | [report] 9 | exclude_lines = 10 | pragma: no cover 11 | except ImportError: 12 | if DEBUG: 13 | def __repr__ 14 | def __str__ 15 | raise NotImplementedError 16 | def __getattr__ 17 | raise ValueError 18 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | This project is maintained one busy person with a frail wife and an infant daughter. 2 | My time and energy is a very limited resource. I'm not a teacher or free tech support. 3 | Don't ask a question here. Don't file an issue until you believe it's a not a problem with your code. 4 | Search for friendly volunteers who can teach you or review your code on ML or Q&A sites. 5 | 6 | See also: https://medium.com/@methane/why-you-must-not-ask-questions-on-github-issues-51d741d83fde 7 | 8 | 9 | If you're sure it's PyMySQL's issue, report the complete steps to reproduce, from creating database. 10 | 11 | I don't have time to investigate your issue from an incomplete code snippet. 12 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | *.pyo 3 | __pycache__ 4 | .coverage 5 | /dist 6 | /PyMySQL.egg-info 7 | /.tox 8 | /build 9 | /pymysql/tests/databases.json 10 | 11 | /.idea 12 | docs/build 13 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: required 2 | dist: trusty 3 | language: python 4 | python: 5 | - "3.7-dev" 6 | - "3.6" 7 | - "2.6" 8 | - "pypy3.3-5.2-alpha1" 9 | 10 | cache: pip 11 | 12 | matrix: 13 | include: 14 | - addons: 15 | mariadb: 5.5 16 | python: "3.5" 17 | 18 | - addons: 19 | mariadb: 10.0 20 | python: "pypy" 21 | 22 | - addons: 23 | mariadb: 10.1 24 | python: "2.7" 25 | 26 | - env: 27 | - DB=5.6.35 28 | addons: 29 | apt: 30 | packages: 31 | - libaio-dev 32 | python: "3.3" 33 | 34 | - env: 35 | - DB=5.7.17 36 | addons: 37 | apt: 38 | packages: 39 | - libaio-dev 40 | python: "3.4" 41 | 42 | 43 | 44 | # different py version from 5.6 and 5.7 as cache seems to be based on py version 45 | # http://dev.mysql.com/downloads/mysql/5.7.html has latest development release version 46 | # really only need libaio1 for DB builds however libaio-dev is whitelisted for container builds and liaio1 isn't 47 | install: 48 | - if [ -n "${EXTRAPKG}" ]; then 49 | sudo apt-get install ${EXTRAPKG}; 50 | fi 51 | - export PASSWORD=travis; 52 | - pip install -U coveralls unittest2 coverage 53 | 54 | before_script: 55 | - ./.travis/initializedb.sh 56 | - mysql -e 'create database test_pymysql DEFAULT CHARACTER SET utf8 DEFAULT COLLATE utf8_general_ci;' 57 | - mysql -e 'create database test_pymysql2 DEFAULT CHARACTER SET utf8 DEFAULT COLLATE utf8_general_ci;' 58 | - mysql -u root -e "create user travis_pymysql2 identified by 'some password'; grant all on test_pymysql2.* to travis_pymysql2;" 59 | - mysql -u root -e "create user travis_pymysql2@localhost identified by 'some password'; grant all on test_pymysql2.* to travis_pymysql2@localhost;" 60 | - mysql -e 'select VERSION()' 61 | - python -VV 62 | - rm -f ~/.my.cnf # set in .travis.initialize.db.sh for the above commands - we should be using database.json however 63 | - export COVERALLS_PARALLEL=true 64 | 65 | script: 66 | - coverage run ./runtests.py 67 | 68 | after_success: 69 | - coveralls 70 | - cat /tmp/mysql.err 71 | 72 | after_failure: 73 | - cat /tmp/mysql.err 74 | 75 | # vim: sw=2 ts=2 sts=2 expandtab 76 | -------------------------------------------------------------------------------- /.travis/database.json: -------------------------------------------------------------------------------- 1 | [ 2 | {"host": "localhost", "unix_socket": "/var/run/mysqld/mysqld.sock", "user": "root", "passwd": "", "db": "test_pymysql", "use_unicode": true, "local_infile": true}, 3 | {"host": "127.0.0.1", "port": 3306, "user": "travis_pymysql2", "password": "some password", "db": "test_pymysql2" } 4 | ] 5 | -------------------------------------------------------------------------------- /.travis/initializedb.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | #debug 4 | set -x 5 | #verbose 6 | set -v 7 | 8 | if [ ! -z "${DB}" ]; then 9 | # disable existing database server in case of accidential connection 10 | mysql -u root -e 'drop user travis@localhost; drop user root@localhost; drop user travis; create user super@localhost; grant all on *.* to super@localhost with grant option' 11 | mysql -u super -e 'drop user root' 12 | F=mysql-${DB}-linux-glibc2.5-x86_64 13 | mkdir -p ${HOME}/mysql 14 | P=${HOME}/mysql/${F} 15 | if [ ! -d "${P}" ]; then 16 | wget http://cdn.mysql.com/Downloads/MySQL-${DB%.*}/${F}.tar.gz -O - | tar -zxf - --directory=${HOME}/mysql 17 | fi 18 | if [ -f "${P}"/my.cnf ]; then 19 | O="--defaults-file=${P}/my.cnf" 20 | fi 21 | if [ -x "${P}"/scripts/mysql_install_db ]; then 22 | I=${P}/scripts/mysql_install_db 23 | O="--defaults-file=${P}/my.cnf" 24 | else 25 | I=${P}/bin/mysqld 26 | IO=" --initialize " 27 | O="--no-defaults " 28 | fi 29 | ${I} ${O} ${IO} --basedir=${P} --datadir=${HOME}/db-"${DB}" --log-error=/tmp/mysql.err 30 | PWLINE=$(grep 'A temporary password is generated for root@localhost:' /tmp/mysql.err) 31 | PASSWD=${PWLINE##* } 32 | if [ -x ${P}/bin/mysql_ssl_rsa_setup ]; then 33 | ${P}/bin/mysql_ssl_rsa_setup --datadir=${HOME}/db-"${DB}" 34 | fi 35 | # sha256 password auth keys: 36 | openssl genrsa -out "${P}"/private_key.pem 2048 37 | openssl rsa -in "${P}"/private_key.pem -pubout -out "${P}"/public_key.pem 38 | ${P}/bin/mysqld_safe ${O} --ledir=/ --mysqld=${P}/bin/mysqld --datadir=${HOME}/db-${DB} --socket=/tmp/mysql.sock --port 3307 --innodb-buffer-pool-size=200M --lc-messages-dir=${P}/share --plugin-dir=${P}/lib/plugin/ --log-error=/tmp/mysql.err & 39 | while [ ! -S /tmp/mysql.sock ]; do 40 | sleep 2 41 | done 42 | cat /tmp/mysql.err 43 | if [ ! -z "${PASSWD}" ]; then 44 | ${P}/bin/mysql -S /tmp/mysql.sock -u root -p"${PASSWD}" --connect-expired-password -e "SET PASSWORD = PASSWORD('')" 45 | fi 46 | mysql -S /tmp/mysql.sock -u root -e "create user ${USER}@localhost; create user ${USER}@'%'; grant all on *.* to ${USER}@localhost WITH GRANT OPTION;grant all on *.* to ${USER}@'%' WITH GRANT OPTION;" 47 | sed -e 's/3306/3307/g' -e 's:/var/run/mysqld/mysqld.sock:/tmp/mysql.sock:g' .travis/database.json > pymysql/tests/databases.json 48 | echo -e "[client]\nsocket = /tmp/mysql.sock\n" > "${HOME}"/.my.cnf 49 | else 50 | cp .travis/database.json pymysql/tests/databases.json 51 | fi 52 | -------------------------------------------------------------------------------- /CHANGELOG: -------------------------------------------------------------------------------- 1 | # Changes 2 | 3 | ## 0.7.11 4 | 5 | Release date: 2017-04-06 6 | 7 | * Fixed Connection.close() failed when failed to send COM_CLOSE packet. 8 | * Cursor.executemany() accepts query ends with semicolon. 9 | * ssl parameters can be read from my.cnf. 10 | 11 | 12 | ## 0.7.10 13 | 14 | Release date: 2017-02-14 15 | 16 | * **SECURITY FIX**: Raise RuntimeError when received LOAD_LOCAL packet while 17 | ``loacal_infile=False``. (Thanks to Bryan Helmig) 18 | 19 | * Raise SERVER_LOST error for MariaDB's shutdown packet (#540) 20 | 21 | * Change default connect_timeout to 10. 22 | 23 | * Add bind_address option (#529) 24 | 25 | 26 | ## 0.7.9 27 | 28 | Release date: 2016-09-03 29 | 30 | * Fix PyMySQL stop reading rows when first column is empty string (#513) 31 | Reverts DEPRECATE_EOF introduced in 0.7.7. 32 | 33 | ## 0.7.8 34 | 35 | Release date: 2016-09-01 36 | 37 | * Revert error message change in 0.7.7. 38 | (SQLAlchemy parses error message, #507) 39 | 40 | ## 0.7.7 41 | 42 | Release date: 2016-08-30 43 | 44 | * Add new unicode collation (#498) 45 | * Fix conv option is not used for encoding objects. 46 | * Experimental support for DEPRECATE_EOF protocol. 47 | 48 | ## 0.7.6 49 | 50 | Release date: 2016-07-29 51 | 52 | * Fix SELECT JSON type cause UnicodeError 53 | * Avoid float convertion while parsing microseconds 54 | * Warning has number 55 | * SSCursor supports warnings 56 | 57 | ## 0.7.5 58 | 59 | Release date: 2016-06-28 60 | 61 | * Fix exception raised while importing when getpwuid() fails (#472) 62 | * SSCursor supports LOAD DATA LOCAL INFILE (#473) 63 | * Fix encoding error happen for JSON type (#477) 64 | * Fix test fail on Python 2.7 and MySQL 5.7 (#478) 65 | 66 | ## 0.7.4 67 | 68 | Release date: 2016-05-26 69 | 70 | * Fix AttributeError may happen while Connection.__del__ (#463) 71 | * Fix SyntaxError in test_cursor. (#464) 72 | * frozenset support for query value. (#461) 73 | * Start using readthedocs.io 74 | 75 | ## 0.7.3 76 | 77 | Release date: 2016-05-19 78 | 79 | * Add read_timeout and write_timeout option. 80 | * Support serialization customization by `conv` option. 81 | * Unknown type is converted by `str()`, for MySQLdb compatibility. 82 | * Support '%%' in `Cursor.executemany()` 83 | * Support REPLACE statement in `Cursor.executemany()` 84 | * Fix handling incomplete row caused by 'SHOW SLAVE HOSTS'. 85 | * Fix decode error when use_unicode=False on PY3 86 | * Fix port option in my.cnf file is ignored. 87 | 88 | 89 | ## 0.7.2 90 | 91 | Release date: 2016-02-24 92 | 93 | * Fix misuse of `max_allowed_packet` parameter. (#426, #407 and #397) 94 | * Add %(name)s plceholder support to `Cursor.executemany()`. (#427, thanks to 95 | @WorldException) 96 | 97 | ## 0.7.1 98 | 99 | Release date: 2016-01-14 100 | 101 | * Fix auth fail with MySQL 5.1 102 | * Fix escaping unicode fails on Python 2 103 | 104 | ## 0.7 105 | 106 | Release date: 2016-01-10 107 | 108 | * Faster binary escaping 109 | * Add `"_binary" prefix` to string literal for binary types. 110 | binary types are: `bytearray` on Python 2, `bytes` and `bytearray` on Python 3. 111 | This is because recent MySQL show warnings when string literal is invalid for 112 | connection encoding. 113 | * `pymysql.Binary()` returns `bytearray` on Python 2. This is required to distinguish 114 | binary and string. 115 | * Auth plugin support. 116 | * no_delay option is ignored. It will be removed in PyMySQL 0.8. 117 | 118 | 119 | ## 0.6.7 120 | 121 | Release date: 2015-09-30 122 | 123 | * Allow self signed certificate 124 | * Add max_allowed_packet option 125 | * Fix error when bytes in executemany 126 | * Support geometry type 127 | * Add coveralls badge to README 128 | * Fix some bugs relating to warnings 129 | * Add Cursor.mogrify() method 130 | * no_delay option is deprecated and True by default 131 | * Fix options from my.cnf overrides options from arguments 132 | * Allow socket like object. (It's not feature for end users) 133 | * Strip quotes while reading options from my.cnf file 134 | * Fix encoding issue in executemany() 135 | 136 | ## 0.6.6 137 | 138 | * Add context manager to cursor 139 | * Fix can't encode blob that is not utf-8 on PY3. (regression of 0.6.4, 140 | Thanks to @wiggzz) 141 | 142 | ## 0.6.5 143 | Skipped 144 | 145 | ## 0.6.4 146 | * Support "LOAD LOCAL INFILE". Thanks @wraziens 147 | * Show MySQL warnings after execute query. 148 | * Fix MySQLError may be wrapped with OperationalError while connectiong. (#274) 149 | * SSCursor no longer attempts to expire un-collected rows within __del__, 150 | delaying termination of an interrupted program; cleanup of uncollected 151 | rows is left to the Connection on next execute, which emits a 152 | warning at that time. (#287) 153 | * Support datetime and time with microsecond. (#303) 154 | * Use surrogateescape to format bytes on Python 3. 155 | * OperationalError raised from connect() have information about original 156 | exception. (#304) 157 | * `init_command` now support multi statement. 158 | * `Connection.escape()` method now accepts second argument compatible to 159 | MySQL-Python. 160 | 161 | ## 0.6.3 162 | * Fixed multiple result sets with SSCursor. 163 | * Fixed connection timeout. 164 | * Fixed literal set syntax to work on Py2.6. 165 | * Allow for mysql negative values with 0 hour timedelta. 166 | * Added Connection.begin(). 167 | 168 | ## 0.6.2 169 | * Fixed old password on Python 3. 170 | * Added support for bulk insert in Cursor.executemany(). 171 | * Added support for microseconds in datetimes and dates before 1900. 172 | * Several other bug fixes. 173 | 174 | ## 0.6.1 175 | * Added cursor._last_executed for MySQLdb compatibility 176 | * Cursor.fetchall() and .fetchmany now return list, not tuple 177 | * Allow "length of auth-plugin-data" = 0 178 | * Cursor.connection references connection object without weakref 179 | 180 | ## 0.6 181 | * Improved Py3k support 182 | * Improved PyPy support 183 | * Added IPv6 support 184 | * Added Thing2Literal for Django/MySQLdb compatibility 185 | * Removed errorhandler 186 | * Fixed GC errors 187 | * Improved test suite 188 | * Many bug fixes 189 | * Many performance improvements 190 | 191 | ## 0.4 192 | * Miscellaneous bug fixes 193 | * Implementation of SSL support 194 | * Implementation of kill() 195 | * Cleaned up charset functionality 196 | * Fixed BIT type handling 197 | * Connections raise exceptions after they are close()'d 198 | * Full Py3k and unicode support 199 | 200 | ## 0.3 201 | * Implemented most of the extended DBAPI 2.0 spec including callproc() 202 | * Fixed error handling to include the message from the server and support 203 | multiple protocol versions. 204 | * Implemented ping() 205 | * Implemented unicode support (probably needs better testing) 206 | * Removed DeprecationWarnings 207 | * Ran against the MySQLdb unit tests to check for bugs 208 | * Added support for client_flag, charset, sql_mode, read_default_file, 209 | use_unicode, cursorclass, init_command, and connect_timeout. 210 | * Refactoring for some more compatibility with MySQLdb including a fake 211 | pymysql.version_info attribute. 212 | * Now runs with no warnings with the -3 command-line switch 213 | * Added test cases for all outstanding tickets and closed most of them. 214 | * Basic Jython support added. 215 | * Fixed empty result sets bug. 216 | * Integrated new unit tests and refactored the example into one. 217 | * Fixed bug with decimal conversion. 218 | * Fixed string encoding bug. Now unicode and binary data work! 219 | * Added very basic docstrings. 220 | 221 | ## 0.2 222 | * Changed connection parameter name 'password' to 'passwd' 223 | to make it more plugin replaceable for the other mysql clients. 224 | * Changed pack()/unpack() calls so it runs on 64 bit OSes too. 225 | * Added support for unix_socket. 226 | * Added support for no password. 227 | * Renamed decorders to decoders. 228 | * Better handling of non-existing decoder. 229 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2010, 2013 PyMySQL contributors 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in 11 | all copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 | THE SOFTWARE. 20 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.rst LICENSE CHANGELOG 2 | include runtests.py tox.ini 3 | include example.py 4 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | .. image:: https://img.shields.io/badge/license-MIT-blue.svg 2 | :target: https://github.com/dvrhax/uPyMySQL/blob/master/LICENSE 3 | 4 | 5 | uPyMySQL 6 | ======= 7 | 8 | .. contents:: Table of Contents 9 | :local: 10 | 11 | This package contains a pretty rough hack of the pure-Python MySQL client library. The goal of uPyMySQL 12 | is to be a drop-in replacement for MySQLdb and work on uPython. 13 | 14 | NOTE: PyMySQL doesn't support low level APIs `_mysql` provides like `data_seek`, 15 | `store_result`, and `use_result`. You should use high level APIs defined in `PEP 249`_. 16 | But some APIs like `autocommit` and `ping` are supported because `PEP 249`_ doesn't cover 17 | their usecase. Ditto for uPyMySQL 18 | 19 | .. _`PEP 249`: https://www.python.org/dev/peps/pep-0249/ 20 | 21 | Requirements 22 | ------------- 23 | 24 | * uPython_ 25 | 26 | * MySQL Server -- one of the following: 27 | 28 | - MySQL_ >= 4.1 (tested with only 5.5~) 29 | - MariaDB_ >= 5.1 30 | 31 | .. _uPython: https://micropython.org/ 32 | .. _MySQL: http://www.mysql.com/ 33 | .. _MariaDB: https://mariadb.org/ 34 | 35 | 36 | Installation 37 | ------------ 38 | 39 | Clone from Git and copy the upymysql folder onto your microcontroller 40 | Most likely directly copying onto your microcontroller will fail. 41 | You will likely need to re-compile micropython including upymysql into the firmware 42 | 43 | 44 | Documentation 45 | ------------- 46 | 47 | You're pretty much looking at it 48 | 49 | Example 50 | ------- 51 | 52 | No Guarantees that these work yet: 53 | 54 | The following examples make use of a simple table 55 | 56 | .. code:: sql 57 | 58 | CREATE TABLE `users` ( 59 | `id` int(11) NOT NULL AUTO_INCREMENT, 60 | `email` varchar(255) COLLATE utf8_bin NOT NULL, 61 | `password` varchar(255) COLLATE utf8_bin NOT NULL, 62 | PRIMARY KEY (`id`) 63 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin 64 | AUTO_INCREMENT=1 ; 65 | 66 | 67 | .. code:: python 68 | 69 | import upymysql 70 | import upymysql.cursors 71 | 72 | # Connect to the database 73 | connection = upymysql.connect(host='localhost', 74 | user='user', 75 | password='passwd', 76 | db='db', 77 | charset='utf8mb4', 78 | cursorclass=upymysql.cursors.DictCursor) 79 | 80 | try: 81 | with connection.cursor() as cursor: 82 | # Create a new record 83 | sql = "INSERT INTO `users` (`email`, `password`) VALUES (%s, %s)" 84 | cursor.execute(sql, ('webmaster@python.org', 'very-secret')) 85 | 86 | # connection is not autocommit by default. So you must commit to save 87 | # your changes. 88 | connection.commit() 89 | 90 | with connection.cursor() as cursor: 91 | # Read a single record 92 | sql = "SELECT `id`, `password` FROM `users` WHERE `email`=%s" 93 | cursor.execute(sql, ('webmaster@python.org',)) 94 | result = cursor.fetchone() 95 | print(result) 96 | finally: 97 | connection.close() 98 | 99 | This example will print: 100 | 101 | .. code:: python 102 | 103 | {'password': 'very-secret', 'id': 1} 104 | 105 | 106 | Resources 107 | --------- 108 | 109 | DB-API 2.0: http://www.python.org/dev/peps/pep-0249 110 | 111 | MySQL Reference Manuals: http://dev.mysql.com/doc/ 112 | 113 | MySQL client/server protocol: 114 | http://dev.mysql.com/doc/internals/en/client-server-protocol.html 115 | 116 | PyMySQL mailing list: https://groups.google.com/forum/#!forum/pymysql-users 117 | 118 | PyMySQL Github site: https://github.com/PyMySQL/PyMySQL 119 | 120 | uPython: https://micropython.org/ 121 | 122 | License 123 | ------- 124 | 125 | PyMySQL is released under the MIT License. See LICENSE for more information. 126 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/PyMySQL.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/PyMySQL.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/PyMySQL" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/PyMySQL" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source 10 | set I18NSPHINXOPTS=%SPHINXOPTS% source 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. xml to make Docutils-native XML files 37 | echo. pseudoxml to make pseudoxml-XML files for display purposes 38 | echo. linkcheck to check all external links for integrity 39 | echo. doctest to run all doctests embedded in the documentation if enabled 40 | goto end 41 | ) 42 | 43 | if "%1" == "clean" ( 44 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 45 | del /q /s %BUILDDIR%\* 46 | goto end 47 | ) 48 | 49 | 50 | %SPHINXBUILD% 2> nul 51 | if errorlevel 9009 ( 52 | echo. 53 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 54 | echo.installed, then set the SPHINXBUILD environment variable to point 55 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 56 | echo.may add the Sphinx directory to PATH. 57 | echo. 58 | echo.If you don't have Sphinx installed, grab it from 59 | echo.http://sphinx-doc.org/ 60 | exit /b 1 61 | ) 62 | 63 | if "%1" == "html" ( 64 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 65 | if errorlevel 1 exit /b 1 66 | echo. 67 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 68 | goto end 69 | ) 70 | 71 | if "%1" == "dirhtml" ( 72 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 73 | if errorlevel 1 exit /b 1 74 | echo. 75 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 76 | goto end 77 | ) 78 | 79 | if "%1" == "singlehtml" ( 80 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 81 | if errorlevel 1 exit /b 1 82 | echo. 83 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 84 | goto end 85 | ) 86 | 87 | if "%1" == "pickle" ( 88 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 89 | if errorlevel 1 exit /b 1 90 | echo. 91 | echo.Build finished; now you can process the pickle files. 92 | goto end 93 | ) 94 | 95 | if "%1" == "json" ( 96 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 97 | if errorlevel 1 exit /b 1 98 | echo. 99 | echo.Build finished; now you can process the JSON files. 100 | goto end 101 | ) 102 | 103 | if "%1" == "htmlhelp" ( 104 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 105 | if errorlevel 1 exit /b 1 106 | echo. 107 | echo.Build finished; now you can run HTML Help Workshop with the ^ 108 | .hhp project file in %BUILDDIR%/htmlhelp. 109 | goto end 110 | ) 111 | 112 | if "%1" == "qthelp" ( 113 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 114 | if errorlevel 1 exit /b 1 115 | echo. 116 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 117 | .qhcp project file in %BUILDDIR%/qthelp, like this: 118 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\PyMySQL.qhcp 119 | echo.To view the help file: 120 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\PyMySQL.ghc 121 | goto end 122 | ) 123 | 124 | if "%1" == "devhelp" ( 125 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished. 129 | goto end 130 | ) 131 | 132 | if "%1" == "epub" ( 133 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 134 | if errorlevel 1 exit /b 1 135 | echo. 136 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 137 | goto end 138 | ) 139 | 140 | if "%1" == "latex" ( 141 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 142 | if errorlevel 1 exit /b 1 143 | echo. 144 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 145 | goto end 146 | ) 147 | 148 | if "%1" == "latexpdf" ( 149 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 150 | cd %BUILDDIR%/latex 151 | make all-pdf 152 | cd %BUILDDIR%/.. 153 | echo. 154 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 155 | goto end 156 | ) 157 | 158 | if "%1" == "latexpdfja" ( 159 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 160 | cd %BUILDDIR%/latex 161 | make all-pdf-ja 162 | cd %BUILDDIR%/.. 163 | echo. 164 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 165 | goto end 166 | ) 167 | 168 | if "%1" == "text" ( 169 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 170 | if errorlevel 1 exit /b 1 171 | echo. 172 | echo.Build finished. The text files are in %BUILDDIR%/text. 173 | goto end 174 | ) 175 | 176 | if "%1" == "man" ( 177 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 178 | if errorlevel 1 exit /b 1 179 | echo. 180 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 181 | goto end 182 | ) 183 | 184 | if "%1" == "texinfo" ( 185 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 186 | if errorlevel 1 exit /b 1 187 | echo. 188 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 189 | goto end 190 | ) 191 | 192 | if "%1" == "gettext" ( 193 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 194 | if errorlevel 1 exit /b 1 195 | echo. 196 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 197 | goto end 198 | ) 199 | 200 | if "%1" == "changes" ( 201 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 202 | if errorlevel 1 exit /b 1 203 | echo. 204 | echo.The overview file is in %BUILDDIR%/changes. 205 | goto end 206 | ) 207 | 208 | if "%1" == "linkcheck" ( 209 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 210 | if errorlevel 1 exit /b 1 211 | echo. 212 | echo.Link check complete; look for any errors in the above output ^ 213 | or in %BUILDDIR%/linkcheck/output.txt. 214 | goto end 215 | ) 216 | 217 | if "%1" == "doctest" ( 218 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 219 | if errorlevel 1 exit /b 1 220 | echo. 221 | echo.Testing of doctests in the sources finished, look at the ^ 222 | results in %BUILDDIR%/doctest/output.txt. 223 | goto end 224 | ) 225 | 226 | if "%1" == "xml" ( 227 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 228 | if errorlevel 1 exit /b 1 229 | echo. 230 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 231 | goto end 232 | ) 233 | 234 | if "%1" == "pseudoxml" ( 235 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 236 | if errorlevel 1 exit /b 1 237 | echo. 238 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 239 | goto end 240 | ) 241 | 242 | :end 243 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # PyMySQL documentation build configuration file, created by 4 | # sphinx-quickstart on Tue May 17 12:01:11 2016. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | import sys 16 | import os 17 | 18 | # If extensions (or modules to document with autodoc) are in another directory, 19 | # add these directories to sys.path here. If the directory is relative to the 20 | # documentation root, use os.path.abspath to make it absolute, like shown here. 21 | sys.path.insert(0, os.path.abspath('../../')) 22 | 23 | # -- General configuration ------------------------------------------------ 24 | 25 | # If your documentation needs a minimal Sphinx version, state it here. 26 | #needs_sphinx = '1.0' 27 | 28 | # Add any Sphinx extension module names here, as strings. They can be 29 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 30 | # ones. 31 | extensions = [ 32 | 'sphinx.ext.autodoc', 33 | 'sphinx.ext.intersphinx', 34 | ] 35 | 36 | # Add any paths that contain templates here, relative to this directory. 37 | templates_path = ['_templates'] 38 | 39 | # The suffix of source filenames. 40 | source_suffix = '.rst' 41 | 42 | # The encoding of source files. 43 | #source_encoding = 'utf-8-sig' 44 | 45 | # The master toctree document. 46 | master_doc = 'index' 47 | 48 | # General information about the project. 49 | project = u'PyMySQL' 50 | copyright = u'2016, Yutaka Matsubara and GitHub contributors' 51 | 52 | # The version info for the project you're documenting, acts as replacement for 53 | # |version| and |release|, also used in various other places throughout the 54 | # built documents. 55 | # 56 | # The short X.Y version. 57 | version = '0.7' 58 | # The full version, including alpha/beta/rc tags. 59 | release = '0.7.2' 60 | 61 | # The language for content autogenerated by Sphinx. Refer to documentation 62 | # for a list of supported languages. 63 | #language = None 64 | 65 | # There are two options for replacing |today|: either, you set today to some 66 | # non-false value, then it is used: 67 | #today = '' 68 | # Else, today_fmt is used as the format for a strftime call. 69 | #today_fmt = '%B %d, %Y' 70 | 71 | # List of patterns, relative to source directory, that match files and 72 | # directories to ignore when looking for source files. 73 | exclude_patterns = [] 74 | 75 | # The reST default role (used for this markup: `text`) to use for all 76 | # documents. 77 | #default_role = None 78 | 79 | # If true, '()' will be appended to :func: etc. cross-reference text. 80 | #add_function_parentheses = True 81 | 82 | # If true, the current module name will be prepended to all description 83 | # unit titles (such as .. function::). 84 | #add_module_names = True 85 | 86 | # If true, sectionauthor and moduleauthor directives will be shown in the 87 | # output. They are ignored by default. 88 | #show_authors = False 89 | 90 | # The name of the Pygments (syntax highlighting) style to use. 91 | pygments_style = 'sphinx' 92 | 93 | # A list of ignored prefixes for module index sorting. 94 | #modindex_common_prefix = [] 95 | 96 | # If true, keep warnings as "system message" paragraphs in the built documents. 97 | #keep_warnings = False 98 | 99 | 100 | # -- Options for HTML output ---------------------------------------------- 101 | 102 | # The theme to use for HTML and HTML Help pages. See the documentation for 103 | # a list of builtin themes. 104 | html_theme = 'default' 105 | 106 | # Theme options are theme-specific and customize the look and feel of a theme 107 | # further. For a list of options available for each theme, see the 108 | # documentation. 109 | #html_theme_options = {} 110 | 111 | # Add any paths that contain custom themes here, relative to this directory. 112 | #html_theme_path = [] 113 | 114 | # The name for this set of Sphinx documents. If None, it defaults to 115 | # " v documentation". 116 | #html_title = None 117 | 118 | # A shorter title for the navigation bar. Default is the same as html_title. 119 | #html_short_title = None 120 | 121 | # The name of an image file (relative to this directory) to place at the top 122 | # of the sidebar. 123 | #html_logo = None 124 | 125 | # The name of an image file (within the static path) to use as favicon of the 126 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 127 | # pixels large. 128 | #html_favicon = None 129 | 130 | # Add any paths that contain custom static files (such as style sheets) here, 131 | # relative to this directory. They are copied after the builtin static files, 132 | # so a file named "default.css" will overwrite the builtin "default.css". 133 | html_static_path = ['_static'] 134 | 135 | # Add any extra paths that contain custom files (such as robots.txt or 136 | # .htaccess) here, relative to this directory. These files are copied 137 | # directly to the root of the documentation. 138 | #html_extra_path = [] 139 | 140 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 141 | # using the given strftime format. 142 | #html_last_updated_fmt = '%b %d, %Y' 143 | 144 | # If true, SmartyPants will be used to convert quotes and dashes to 145 | # typographically correct entities. 146 | #html_use_smartypants = True 147 | 148 | # Custom sidebar templates, maps document names to template names. 149 | #html_sidebars = {} 150 | 151 | # Additional templates that should be rendered to pages, maps page names to 152 | # template names. 153 | #html_additional_pages = {} 154 | 155 | # If false, no module index is generated. 156 | #html_domain_indices = True 157 | 158 | # If false, no index is generated. 159 | #html_use_index = True 160 | 161 | # If true, the index is split into individual pages for each letter. 162 | #html_split_index = False 163 | 164 | # If true, links to the reST sources are added to the pages. 165 | #html_show_sourcelink = True 166 | 167 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 168 | #html_show_sphinx = True 169 | 170 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 171 | #html_show_copyright = True 172 | 173 | # If true, an OpenSearch description file will be output, and all pages will 174 | # contain a tag referring to it. The value of this option must be the 175 | # base URL from which the finished HTML is served. 176 | #html_use_opensearch = '' 177 | 178 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 179 | #html_file_suffix = None 180 | 181 | # Output file base name for HTML help builder. 182 | htmlhelp_basename = 'PyMySQLdoc' 183 | 184 | 185 | # -- Options for LaTeX output --------------------------------------------- 186 | 187 | latex_elements = { 188 | # The paper size ('letterpaper' or 'a4paper'). 189 | #'papersize': 'letterpaper', 190 | 191 | # The font size ('10pt', '11pt' or '12pt'). 192 | #'pointsize': '10pt', 193 | 194 | # Additional stuff for the LaTeX preamble. 195 | #'preamble': '', 196 | } 197 | 198 | # Grouping the document tree into LaTeX files. List of tuples 199 | # (source start file, target name, title, 200 | # author, documentclass [howto, manual, or own class]). 201 | latex_documents = [ 202 | ('index', 'PyMySQL.tex', u'PyMySQL Documentation', 203 | u'Yutaka Matsubara and GitHub contributors', 'manual'), 204 | ] 205 | 206 | # The name of an image file (relative to this directory) to place at the top of 207 | # the title page. 208 | #latex_logo = None 209 | 210 | # For "manual" documents, if this is true, then toplevel headings are parts, 211 | # not chapters. 212 | #latex_use_parts = False 213 | 214 | # If true, show page references after internal links. 215 | #latex_show_pagerefs = False 216 | 217 | # If true, show URL addresses after external links. 218 | #latex_show_urls = False 219 | 220 | # Documents to append as an appendix to all manuals. 221 | #latex_appendices = [] 222 | 223 | # If false, no module index is generated. 224 | #latex_domain_indices = True 225 | 226 | 227 | # -- Options for manual page output --------------------------------------- 228 | 229 | # One entry per manual page. List of tuples 230 | # (source start file, name, description, authors, manual section). 231 | man_pages = [ 232 | ('index', 'pymysql', u'PyMySQL Documentation', 233 | [u'Yutaka Matsubara and GitHub contributors'], 1) 234 | ] 235 | 236 | # If true, show URL addresses after external links. 237 | #man_show_urls = False 238 | 239 | 240 | # -- Options for Texinfo output ------------------------------------------- 241 | 242 | # Grouping the document tree into Texinfo files. List of tuples 243 | # (source start file, target name, title, author, 244 | # dir menu entry, description, category) 245 | texinfo_documents = [ 246 | ('index', 'PyMySQL', u'PyMySQL Documentation', 247 | u'Yutaka Matsubara and GitHub contributors', 'PyMySQL', 'One line description of project.', 248 | 'Miscellaneous'), 249 | ] 250 | 251 | # Documents to append as an appendix to all manuals. 252 | #texinfo_appendices = [] 253 | 254 | # If false, no module index is generated. 255 | #texinfo_domain_indices = True 256 | 257 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 258 | #texinfo_show_urls = 'footnote' 259 | 260 | # If true, do not generate a @detailmenu in the "Top" node's menu. 261 | #texinfo_no_detailmenu = False 262 | 263 | 264 | # Example configuration for intersphinx: refer to the Python standard library. 265 | intersphinx_mapping = {'http://docs.python.org/': None} 266 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | Welcome to PyMySQL's documentation! 2 | =================================== 3 | 4 | .. toctree:: 5 | :maxdepth: 2 6 | 7 | user/index 8 | modules/index 9 | 10 | 11 | Indices and tables 12 | ================== 13 | 14 | * :ref:`genindex` 15 | * :ref:`modindex` 16 | * :ref:`search` 17 | 18 | -------------------------------------------------------------------------------- /docs/source/modules/connections.rst: -------------------------------------------------------------------------------- 1 | Connection Object 2 | ================= 3 | 4 | .. module:: pymysql.connections 5 | 6 | .. autoclass:: Connection 7 | :members: 8 | :exclude-members: DataError, DatabaseError, Error, InterfaceError, 9 | IntegrityError, InternalError, NotSupportedError, 10 | OperationalError, ProgrammingError, Warning, 11 | escape, literal, write_packet 12 | -------------------------------------------------------------------------------- /docs/source/modules/cursors.rst: -------------------------------------------------------------------------------- 1 | Cursor Objects 2 | ============== 3 | 4 | .. module:: pymysql.cursors 5 | 6 | .. autoclass:: Cursor 7 | :members: 8 | :exclude-members: DataError, DatabaseError, Error, InterfaceError, 9 | IntegrityError, InternalError, NotSupportedError, 10 | OperationalError, ProgrammingError, Warning 11 | 12 | .. autoclass:: SSCursor 13 | :members: 14 | 15 | .. autoclass:: DictCursor 16 | :members: 17 | 18 | .. autoclass:: SSDictCursor 19 | :members: 20 | -------------------------------------------------------------------------------- /docs/source/modules/index.rst: -------------------------------------------------------------------------------- 1 | API Reference 2 | ------------- 3 | 4 | If you are looking for information on a specific function, class or 5 | method, this part of the documentation is for you. 6 | 7 | For more information, please read the `Python Database API specification 8 | `_. 9 | 10 | .. toctree:: 11 | :maxdepth: 2 12 | 13 | connections 14 | cursors 15 | -------------------------------------------------------------------------------- /docs/source/user/development.rst: -------------------------------------------------------------------------------- 1 | .. _development: 2 | 3 | =========== 4 | Development 5 | =========== 6 | 7 | You can help developing PyMySQL by `contributing on GitHub`_. 8 | 9 | .. _contributing on GitHub: https://github.com/PyMySQL/PyMySQL 10 | 11 | Building the documentation 12 | -------------------------- 13 | 14 | Go to the ``docs`` directory and run ``make html``. 15 | 16 | 17 | Test Suite 18 | ----------- 19 | 20 | If you would like to run the test suite, create a database for testing like this:: 21 | 22 | mysql -e 'create database test_pymysql DEFAULT CHARACTER SET utf8 DEFAULT COLLATE utf8_general_ci;' 23 | mysql -e 'create database test_pymysql2 DEFAULT CHARACTER SET utf8 DEFAULT COLLATE utf8_general_ci;' 24 | 25 | Then, copy the file ``.travis.databases.json`` to ``pymysql/tests/databases.json`` 26 | and edit the new file to match your MySQL configuration:: 27 | 28 | $ cp .travis.databases.json pymysql/tests/databases.json 29 | $ $EDITOR pymysql/tests/databases.json 30 | 31 | To run all the tests, execute the script ``runtests.py``:: 32 | 33 | $ python runtests.py 34 | 35 | A ``tox.ini`` file is also provided for conveniently running tests on multiple 36 | Python versions:: 37 | 38 | $ tox 39 | -------------------------------------------------------------------------------- /docs/source/user/examples.rst: -------------------------------------------------------------------------------- 1 | .. _examples: 2 | 3 | ======== 4 | Examples 5 | ======== 6 | 7 | .. _CRUD: 8 | 9 | CRUD 10 | ---- 11 | 12 | The following examples make use of a simple table 13 | 14 | .. code:: sql 15 | 16 | CREATE TABLE `users` ( 17 | `id` int(11) NOT NULL AUTO_INCREMENT, 18 | `email` varchar(255) COLLATE utf8_bin NOT NULL, 19 | `password` varchar(255) COLLATE utf8_bin NOT NULL, 20 | PRIMARY KEY (`id`) 21 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin 22 | AUTO_INCREMENT=1 ; 23 | 24 | 25 | .. code:: python 26 | 27 | import pymysql.cursors 28 | 29 | # Connect to the database 30 | connection = pymysql.connect(host='localhost', 31 | user='user', 32 | password='passwd', 33 | db='db', 34 | charset='utf8mb4', 35 | cursorclass=pymysql.cursors.DictCursor) 36 | 37 | try: 38 | with connection.cursor() as cursor: 39 | # Create a new record 40 | sql = "INSERT INTO `users` (`email`, `password`) VALUES (%s, %s)" 41 | cursor.execute(sql, ('webmaster@python.org', 'very-secret')) 42 | 43 | # connection is not autocommit by default. So you must commit to save 44 | # your changes. 45 | connection.commit() 46 | 47 | with connection.cursor() as cursor: 48 | # Read a single record 49 | sql = "SELECT `id`, `password` FROM `users` WHERE `email`=%s" 50 | cursor.execute(sql, ('webmaster@python.org',)) 51 | result = cursor.fetchone() 52 | print(result) 53 | finally: 54 | connection.close() 55 | 56 | This example will print: 57 | 58 | .. code:: python 59 | 60 | {'password': 'very-secret', 'id': 1} 61 | -------------------------------------------------------------------------------- /docs/source/user/index.rst: -------------------------------------------------------------------------------- 1 | User Guide 2 | ------------ 3 | 4 | The PyMySQL user guide explains how to install PyMySQL and how to contribute to 5 | the library as a developer. 6 | 7 | 8 | .. toctree:: 9 | :maxdepth: 1 10 | 11 | installation 12 | examples 13 | resources 14 | development 15 | -------------------------------------------------------------------------------- /docs/source/user/installation.rst: -------------------------------------------------------------------------------- 1 | .. _installation: 2 | 3 | ============ 4 | Installation 5 | ============ 6 | 7 | The last stable release is available on PyPI and can be installed with ``pip``:: 8 | 9 | $ pip install PyMySQL 10 | 11 | Requirements 12 | ------------- 13 | 14 | * Python -- one of the following: 15 | 16 | - CPython_ >= 2.6 or >= 3.3 17 | - PyPy_ >= 4.0 18 | - IronPython_ 2.7 19 | 20 | * MySQL Server -- one of the following: 21 | 22 | - MySQL_ >= 4.1 (tested with only 5.5~) 23 | - MariaDB_ >= 5.1 24 | 25 | .. _CPython: http://www.python.org/ 26 | .. _PyPy: http://pypy.org/ 27 | .. _IronPython: http://ironpython.net/ 28 | .. _MySQL: http://www.mysql.com/ 29 | .. _MariaDB: https://mariadb.org/ 30 | -------------------------------------------------------------------------------- /docs/source/user/resources.rst: -------------------------------------------------------------------------------- 1 | .. _resources: 2 | 3 | ============ 4 | Resources 5 | ============ 6 | 7 | DB-API 2.0: http://www.python.org/dev/peps/pep-0249 8 | 9 | MySQL Reference Manuals: http://dev.mysql.com/doc/ 10 | 11 | MySQL client/server protocol: 12 | http://dev.mysql.com/doc/internals/en/client-server-protocol.html 13 | 14 | PyMySQL mailing list: https://groups.google.com/forum/#!forum/pymysql-users 15 | -------------------------------------------------------------------------------- /example.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import print_function 3 | 4 | import pymysql 5 | 6 | conn = pymysql.connect(host='localhost', port=3306, user='root', passwd='', db='mysql') 7 | 8 | cur = conn.cursor() 9 | 10 | cur.execute("SELECT Host,User FROM user") 11 | 12 | print(cur.description) 13 | 14 | print() 15 | 16 | for row in cur: 17 | print(row) 18 | 19 | cur.close() 20 | conn.close() 21 | -------------------------------------------------------------------------------- /runtests.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import unittest2 3 | 4 | from pymysql._compat import PYPY, JYTHON, IRONPYTHON 5 | 6 | if not (PYPY or JYTHON or IRONPYTHON): 7 | import atexit 8 | import gc 9 | gc.set_debug(gc.DEBUG_UNCOLLECTABLE) 10 | 11 | @atexit.register 12 | def report_uncollectable(): 13 | import gc 14 | if not gc.garbage: 15 | print("No garbages!") 16 | return 17 | print('uncollectable objects') 18 | for obj in gc.garbage: 19 | print(obj) 20 | if hasattr(obj, '__dict__'): 21 | print(obj.__dict__) 22 | for ref in gc.get_referrers(obj): 23 | print("referrer:", ref) 24 | print('---') 25 | 26 | import pymysql.tests 27 | unittest2.main(pymysql.tests, verbosity=2) 28 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore = E226,E301,E701 3 | exclude = tests,build 4 | max-line-length = 119 5 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import io 3 | from setuptools import setup, find_packages 4 | 5 | version_tuple = __import__('pymysql').VERSION 6 | 7 | if version_tuple[3] is not None: 8 | version = "%d.%d.%d_%s" % version_tuple 9 | else: 10 | version = "%d.%d.%d" % version_tuple[:3] 11 | 12 | with io.open('./README.rst', encoding='utf-8') as f: 13 | readme = f.read() 14 | 15 | setup( 16 | name="PyMySQL", 17 | version=version, 18 | url='https://github.com/PyMySQL/PyMySQL/', 19 | author='yutaka.matsubara', 20 | author_email='yutaka.matsubara@gmail.com', 21 | maintainer='INADA Naoki', 22 | maintainer_email='songofacandy@gmail.com', 23 | description='Pure Python MySQL Driver', 24 | long_description=readme, 25 | license="MIT", 26 | packages=find_packages(), 27 | classifiers=[ 28 | 'Development Status :: 5 - Production/Stable', 29 | 'Programming Language :: Python :: 2', 30 | 'Programming Language :: Python :: 2.7', 31 | 'Programming Language :: Python :: 3', 32 | 'Programming Language :: Python :: 3.4', 33 | 'Programming Language :: Python :: 3.5', 34 | 'Programming Language :: Python :: 3.6', 35 | 'Programming Language :: Python :: Implementation :: CPython', 36 | 'Programming Language :: Python :: Implementation :: PyPy', 37 | 'Intended Audience :: Developers', 38 | 'License :: OSI Approved :: MIT License', 39 | 'Topic :: Database', 40 | ], 41 | ) 42 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Sorted by alphabetical order 2 | from pymysql.tests.test_DictCursor import * 3 | from pymysql.tests.test_SSCursor import * 4 | from pymysql.tests.test_basic import * 5 | from pymysql.tests.test_connection import * 6 | from pymysql.tests.test_converters import * 7 | from pymysql.tests.test_cursor import * 8 | from pymysql.tests.test_err import * 9 | from pymysql.tests.test_issues import * 10 | from pymysql.tests.test_load_local import * 11 | from pymysql.tests.test_nextset import * 12 | from pymysql.tests.test_optionfile import * 13 | 14 | from pymysql.tests.thirdparty import * 15 | 16 | if __name__ == "__main__": 17 | import unittest2 18 | unittest2.main() 19 | -------------------------------------------------------------------------------- /tests/base.py: -------------------------------------------------------------------------------- 1 | import gc 2 | import json 3 | import os 4 | import re 5 | import warnings 6 | 7 | import unittest2 8 | 9 | import pymysql 10 | from .._compat import CPYTHON 11 | 12 | 13 | class PyMySQLTestCase(unittest2.TestCase): 14 | # You can specify your test environment creating a file named 15 | # "databases.json" or editing the `databases` variable below. 16 | fname = os.path.join(os.path.dirname(__file__), "databases.json") 17 | if os.path.exists(fname): 18 | with open(fname) as f: 19 | databases = json.load(f) 20 | else: 21 | databases = [ 22 | {"host":"localhost","user":"root", 23 | "passwd":"","db":"test_pymysql", "use_unicode": True, 'local_infile': True}, 24 | {"host":"localhost","user":"root","passwd":"","db":"test_pymysql2"}] 25 | 26 | def mysql_server_is(self, conn, version_tuple): 27 | """Return True if the given connection is on the version given or 28 | greater. 29 | 30 | e.g.:: 31 | 32 | if self.mysql_server_is(conn, (5, 6, 4)): 33 | # do something for MySQL 5.6.4 and above 34 | """ 35 | server_version = conn.get_server_info() 36 | server_version_tuple = tuple( 37 | (int(dig) if dig is not None else 0) 38 | for dig in 39 | re.match(r'(\d+)\.(\d+)\.(\d+)', server_version).group(1, 2, 3) 40 | ) 41 | return server_version_tuple >= version_tuple 42 | 43 | def setUp(self): 44 | self.connections = [] 45 | for params in self.databases: 46 | self.connections.append(pymysql.connect(**params)) 47 | self.addCleanup(self._teardown_connections) 48 | 49 | def _teardown_connections(self): 50 | for connection in self.connections: 51 | connection.close() 52 | 53 | def safe_create_table(self, connection, tablename, ddl, cleanup=True): 54 | """create a table. 55 | 56 | Ensures any existing version of that table is first dropped. 57 | 58 | Also adds a cleanup rule to drop the table after the test 59 | completes. 60 | """ 61 | cursor = connection.cursor() 62 | 63 | with warnings.catch_warnings(): 64 | warnings.simplefilter("ignore") 65 | cursor.execute("drop table if exists `%s`" % (tablename,)) 66 | cursor.execute(ddl) 67 | cursor.close() 68 | if cleanup: 69 | self.addCleanup(self.drop_table, connection, tablename) 70 | 71 | def drop_table(self, connection, tablename): 72 | cursor = connection.cursor() 73 | with warnings.catch_warnings(): 74 | warnings.simplefilter("ignore") 75 | cursor.execute("drop table if exists `%s`" % (tablename,)) 76 | cursor.close() 77 | 78 | def safe_gc_collect(self): 79 | """Ensure cycles are collected via gc. 80 | 81 | Runs additional times on non-CPython platforms. 82 | 83 | """ 84 | gc.collect() 85 | if not CPYTHON: 86 | gc.collect() 87 | -------------------------------------------------------------------------------- /tests/data/load_local_warn_data.txt: -------------------------------------------------------------------------------- 1 | 1,2, 2 | 3,4, 3 | 5,6, 4 | 7,8, 5 | 1,2, 6 | 3,4, 7 | 5,6, 8 | ,8, 9 | 1,2, 10 | 3,4, 11 | 5,6, 12 | 7,8, 13 | 1,2, 14 | 3,4, 15 | 5,6, 16 | 7,8, 17 | 1,2, 18 | 3,4, 19 | 5,6, 20 | 7,8, 21 | 1,2, 22 | 3,4, 23 | 5,6, 24 | 7,8, 25 | 1,2, 26 | 3,4, 27 | 5,6, 28 | 7,8, 29 | 1,2, 30 | 3,4, 31 | 5,6, 32 | 7,8, 33 | 1,2, 34 | 3,4, 35 | 5,6, 36 | 7,8, 37 | 1,2, 38 | 3,4, 39 | 5,6, 40 | 7,8, 41 | 1,2, 42 | 3,4, 43 | 5,6, 44 | 7,8, 45 | 1,2, 46 | 3,4, 47 | 5,6, 48 | 7,8, 49 | 1,2, 50 | 3,4, 51 | -------------------------------------------------------------------------------- /tests/test_DictCursor.py: -------------------------------------------------------------------------------- 1 | from pymysql.tests import base 2 | import pymysql.cursors 3 | 4 | import datetime 5 | import warnings 6 | 7 | 8 | class TestDictCursor(base.PyMySQLTestCase): 9 | bob = {'name': 'bob', 'age': 21, 'DOB': datetime.datetime(1990, 2, 6, 23, 4, 56)} 10 | jim = {'name': 'jim', 'age': 56, 'DOB': datetime.datetime(1955, 5, 9, 13, 12, 45)} 11 | fred = {'name': 'fred', 'age': 100, 'DOB': datetime.datetime(1911, 9, 12, 1, 1, 1)} 12 | 13 | cursor_type = pymysql.cursors.DictCursor 14 | 15 | def setUp(self): 16 | super(TestDictCursor, self).setUp() 17 | self.conn = conn = self.connections[0] 18 | c = conn.cursor(self.cursor_type) 19 | 20 | # create a table ane some data to query 21 | with warnings.catch_warnings(): 22 | warnings.filterwarnings("ignore") 23 | c.execute("drop table if exists dictcursor") 24 | # include in filterwarnings since for unbuffered dict cursor warning for lack of table 25 | # will only be propagated at start of next execute() call 26 | c.execute("""CREATE TABLE dictcursor (name char(20), age int , DOB datetime)""") 27 | data = [("bob", 21, "1990-02-06 23:04:56"), 28 | ("jim", 56, "1955-05-09 13:12:45"), 29 | ("fred", 100, "1911-09-12 01:01:01")] 30 | c.executemany("insert into dictcursor values (%s,%s,%s)", data) 31 | 32 | def tearDown(self): 33 | c = self.conn.cursor() 34 | c.execute("drop table dictcursor") 35 | super(TestDictCursor, self).tearDown() 36 | 37 | def _ensure_cursor_expired(self, cursor): 38 | pass 39 | 40 | def test_DictCursor(self): 41 | bob, jim, fred = self.bob.copy(), self.jim.copy(), self.fred.copy() 42 | #all assert test compare to the structure as would come out from MySQLdb 43 | conn = self.conn 44 | c = conn.cursor(self.cursor_type) 45 | 46 | # try an update which should return no rows 47 | c.execute("update dictcursor set age=20 where name='bob'") 48 | bob['age'] = 20 49 | # pull back the single row dict for bob and check 50 | c.execute("SELECT * from dictcursor where name='bob'") 51 | r = c.fetchone() 52 | self.assertEqual(bob, r, "fetchone via DictCursor failed") 53 | self._ensure_cursor_expired(c) 54 | 55 | # same again, but via fetchall => tuple) 56 | c.execute("SELECT * from dictcursor where name='bob'") 57 | r = c.fetchall() 58 | self.assertEqual([bob], r, "fetch a 1 row result via fetchall failed via DictCursor") 59 | # same test again but iterate over the 60 | c.execute("SELECT * from dictcursor where name='bob'") 61 | for r in c: 62 | self.assertEqual(bob, r, "fetch a 1 row result via iteration failed via DictCursor") 63 | # get all 3 row via fetchall 64 | c.execute("SELECT * from dictcursor") 65 | r = c.fetchall() 66 | self.assertEqual([bob,jim,fred], r, "fetchall failed via DictCursor") 67 | #same test again but do a list comprehension 68 | c.execute("SELECT * from dictcursor") 69 | r = list(c) 70 | self.assertEqual([bob,jim,fred], r, "DictCursor should be iterable") 71 | # get all 2 row via fetchmany 72 | c.execute("SELECT * from dictcursor") 73 | r = c.fetchmany(2) 74 | self.assertEqual([bob, jim], r, "fetchmany failed via DictCursor") 75 | self._ensure_cursor_expired(c) 76 | 77 | def test_custom_dict(self): 78 | class MyDict(dict): pass 79 | 80 | class MyDictCursor(self.cursor_type): 81 | dict_type = MyDict 82 | 83 | keys = ['name', 'age', 'DOB'] 84 | bob = MyDict([(k, self.bob[k]) for k in keys]) 85 | jim = MyDict([(k, self.jim[k]) for k in keys]) 86 | fred = MyDict([(k, self.fred[k]) for k in keys]) 87 | 88 | cur = self.conn.cursor(MyDictCursor) 89 | cur.execute("SELECT * FROM dictcursor WHERE name='bob'") 90 | r = cur.fetchone() 91 | self.assertEqual(bob, r, "fetchone() returns MyDictCursor") 92 | self._ensure_cursor_expired(cur) 93 | 94 | cur.execute("SELECT * FROM dictcursor") 95 | r = cur.fetchall() 96 | self.assertEqual([bob, jim, fred], r, 97 | "fetchall failed via MyDictCursor") 98 | 99 | cur.execute("SELECT * FROM dictcursor") 100 | r = list(cur) 101 | self.assertEqual([bob, jim, fred], r, 102 | "list failed via MyDictCursor") 103 | 104 | cur.execute("SELECT * FROM dictcursor") 105 | r = cur.fetchmany(2) 106 | self.assertEqual([bob, jim], r, 107 | "list failed via MyDictCursor") 108 | self._ensure_cursor_expired(cur) 109 | 110 | 111 | class TestSSDictCursor(TestDictCursor): 112 | cursor_type = pymysql.cursors.SSDictCursor 113 | 114 | def _ensure_cursor_expired(self, cursor): 115 | list(cursor.fetchall_unbuffered()) 116 | 117 | if __name__ == "__main__": 118 | import unittest 119 | unittest.main() 120 | -------------------------------------------------------------------------------- /tests/test_SSCursor.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | try: 4 | from pymysql.tests import base 5 | import pymysql.cursors 6 | except Exception: 7 | # For local testing from top-level directory, without installing 8 | sys.path.append('../pymysql') 9 | from pymysql.tests import base 10 | import pymysql.cursors 11 | 12 | class TestSSCursor(base.PyMySQLTestCase): 13 | def test_SSCursor(self): 14 | affected_rows = 18446744073709551615 15 | 16 | conn = self.connections[0] 17 | data = [ 18 | ('America', '', 'America/Jamaica'), 19 | ('America', '', 'America/Los_Angeles'), 20 | ('America', '', 'America/Lima'), 21 | ('America', '', 'America/New_York'), 22 | ('America', '', 'America/Menominee'), 23 | ('America', '', 'America/Havana'), 24 | ('America', '', 'America/El_Salvador'), 25 | ('America', '', 'America/Costa_Rica'), 26 | ('America', '', 'America/Denver'), 27 | ('America', '', 'America/Detroit'),] 28 | 29 | try: 30 | cursor = conn.cursor(pymysql.cursors.SSCursor) 31 | 32 | # Create table 33 | cursor.execute(('CREATE TABLE tz_data (' 34 | 'region VARCHAR(64),' 35 | 'zone VARCHAR(64),' 36 | 'name VARCHAR(64))')) 37 | 38 | conn.begin() 39 | # Test INSERT 40 | for i in data: 41 | cursor.execute('INSERT INTO tz_data VALUES (%s, %s, %s)', i) 42 | self.assertEqual(conn.affected_rows(), 1, 'affected_rows does not match') 43 | conn.commit() 44 | 45 | # Test fetchone() 46 | iter = 0 47 | cursor.execute('SELECT * FROM tz_data') 48 | while True: 49 | row = cursor.fetchone() 50 | if row is None: 51 | break 52 | iter += 1 53 | 54 | # Test cursor.rowcount 55 | self.assertEqual(cursor.rowcount, affected_rows, 56 | 'cursor.rowcount != %s' % (str(affected_rows))) 57 | 58 | # Test cursor.rownumber 59 | self.assertEqual(cursor.rownumber, iter, 60 | 'cursor.rowcount != %s' % (str(iter))) 61 | 62 | # Test row came out the same as it went in 63 | self.assertEqual((row in data), True, 64 | 'Row not found in source data') 65 | 66 | # Test fetchall 67 | cursor.execute('SELECT * FROM tz_data') 68 | self.assertEqual(len(cursor.fetchall()), len(data), 69 | 'fetchall failed. Number of rows does not match') 70 | 71 | # Test fetchmany 72 | cursor.execute('SELECT * FROM tz_data') 73 | self.assertEqual(len(cursor.fetchmany(2)), 2, 74 | 'fetchmany failed. Number of rows does not match') 75 | 76 | # So MySQLdb won't throw "Commands out of sync" 77 | while True: 78 | res = cursor.fetchone() 79 | if res is None: 80 | break 81 | 82 | # Test update, affected_rows() 83 | cursor.execute('UPDATE tz_data SET zone = %s', ['Foo']) 84 | conn.commit() 85 | self.assertEqual(cursor.rowcount, len(data), 86 | 'Update failed. affected_rows != %s' % (str(len(data)))) 87 | 88 | # Test executemany 89 | cursor.executemany('INSERT INTO tz_data VALUES (%s, %s, %s)', data) 90 | self.assertEqual(cursor.rowcount, len(data), 91 | 'executemany failed. cursor.rowcount != %s' % (str(len(data)))) 92 | 93 | # Test multiple datasets 94 | cursor.execute('SELECT 1; SELECT 2; SELECT 3') 95 | self.assertListEqual(list(cursor), [(1, )]) 96 | self.assertTrue(cursor.nextset()) 97 | self.assertListEqual(list(cursor), [(2, )]) 98 | self.assertTrue(cursor.nextset()) 99 | self.assertListEqual(list(cursor), [(3, )]) 100 | self.assertFalse(cursor.nextset()) 101 | 102 | finally: 103 | cursor.execute('DROP TABLE tz_data') 104 | cursor.close() 105 | 106 | __all__ = ["TestSSCursor"] 107 | 108 | if __name__ == "__main__": 109 | import unittest 110 | unittest.main() 111 | -------------------------------------------------------------------------------- /tests/test_basic.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | import datetime 3 | import json 4 | import time 5 | import warnings 6 | 7 | from unittest2 import SkipTest 8 | 9 | from pymysql import util 10 | import pymysql.cursors 11 | from pymysql.tests import base 12 | from pymysql.err import ProgrammingError 13 | 14 | 15 | __all__ = ["TestConversion", "TestCursor", "TestBulkInserts"] 16 | 17 | 18 | class TestConversion(base.PyMySQLTestCase): 19 | def test_datatypes(self): 20 | """ test every data type """ 21 | conn = self.connections[0] 22 | c = conn.cursor() 23 | c.execute("create table test_datatypes (b bit, i int, l bigint, f real, s varchar(32), u varchar(32), bb blob, d date, dt datetime, ts timestamp, td time, t time, st datetime)") 24 | try: 25 | # insert values 26 | 27 | v = (True, -3, 123456789012, 5.7, "hello'\" world", u"Espa\xc3\xb1ol", "binary\x00data".encode(conn.charset), datetime.date(1988,2,2), datetime.datetime(2014, 5, 15, 7, 45, 57), datetime.timedelta(5,6), datetime.time(16,32), time.localtime()) 28 | c.execute("insert into test_datatypes (b,i,l,f,s,u,bb,d,dt,td,t,st) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)", v) 29 | c.execute("select b,i,l,f,s,u,bb,d,dt,td,t,st from test_datatypes") 30 | r = c.fetchone() 31 | self.assertEqual(util.int2byte(1), r[0]) 32 | self.assertEqual(v[1:10], r[1:10]) 33 | self.assertEqual(datetime.timedelta(0, 60 * (v[10].hour * 60 + v[10].minute)), r[10]) 34 | self.assertEqual(datetime.datetime(*v[-1][:6]), r[-1]) 35 | 36 | c.execute("delete from test_datatypes") 37 | 38 | # check nulls 39 | c.execute("insert into test_datatypes (b,i,l,f,s,u,bb,d,dt,td,t,st) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)", [None] * 12) 40 | c.execute("select b,i,l,f,s,u,bb,d,dt,td,t,st from test_datatypes") 41 | r = c.fetchone() 42 | self.assertEqual(tuple([None] * 12), r) 43 | 44 | c.execute("delete from test_datatypes") 45 | 46 | # check sequences type 47 | for seq_type in (tuple, list, set, frozenset): 48 | c.execute("insert into test_datatypes (i, l) values (2,4), (6,8), (10,12)") 49 | seq = seq_type([2,6]) 50 | c.execute("select l from test_datatypes where i in %s order by i", (seq,)) 51 | r = c.fetchall() 52 | self.assertEqual(((4,),(8,)), r) 53 | c.execute("delete from test_datatypes") 54 | 55 | finally: 56 | c.execute("drop table test_datatypes") 57 | 58 | def test_dict(self): 59 | """ test dict escaping """ 60 | conn = self.connections[0] 61 | c = conn.cursor() 62 | c.execute("create table test_dict (a integer, b integer, c integer)") 63 | try: 64 | c.execute("insert into test_dict (a,b,c) values (%(a)s, %(b)s, %(c)s)", {"a":1,"b":2,"c":3}) 65 | c.execute("select a,b,c from test_dict") 66 | self.assertEqual((1,2,3), c.fetchone()) 67 | finally: 68 | c.execute("drop table test_dict") 69 | 70 | def test_string(self): 71 | conn = self.connections[0] 72 | c = conn.cursor() 73 | c.execute("create table test_dict (a text)") 74 | test_value = "I am a test string" 75 | try: 76 | c.execute("insert into test_dict (a) values (%s)", test_value) 77 | c.execute("select a from test_dict") 78 | self.assertEqual((test_value,), c.fetchone()) 79 | finally: 80 | c.execute("drop table test_dict") 81 | 82 | def test_integer(self): 83 | conn = self.connections[0] 84 | c = conn.cursor() 85 | c.execute("create table test_dict (a integer)") 86 | test_value = 12345 87 | try: 88 | c.execute("insert into test_dict (a) values (%s)", test_value) 89 | c.execute("select a from test_dict") 90 | self.assertEqual((test_value,), c.fetchone()) 91 | finally: 92 | c.execute("drop table test_dict") 93 | 94 | def test_blob(self): 95 | """test binary data""" 96 | data = bytes(bytearray(range(256)) * 4) 97 | conn = self.connections[0] 98 | self.safe_create_table( 99 | conn, "test_blob", "create table test_blob (b blob)") 100 | 101 | with conn.cursor() as c: 102 | c.execute("insert into test_blob (b) values (%s)", (data,)) 103 | c.execute("select b from test_blob") 104 | self.assertEqual(data, c.fetchone()[0]) 105 | 106 | def test_untyped(self): 107 | """ test conversion of null, empty string """ 108 | conn = self.connections[0] 109 | c = conn.cursor() 110 | c.execute("select null,''") 111 | self.assertEqual((None,u''), c.fetchone()) 112 | c.execute("select '',null") 113 | self.assertEqual((u'',None), c.fetchone()) 114 | 115 | def test_timedelta(self): 116 | """ test timedelta conversion """ 117 | conn = self.connections[0] 118 | c = conn.cursor() 119 | c.execute("select time('12:30'), time('23:12:59'), time('23:12:59.05100'), time('-12:30'), time('-23:12:59'), time('-23:12:59.05100'), time('-00:30')") 120 | self.assertEqual((datetime.timedelta(0, 45000), 121 | datetime.timedelta(0, 83579), 122 | datetime.timedelta(0, 83579, 51000), 123 | -datetime.timedelta(0, 45000), 124 | -datetime.timedelta(0, 83579), 125 | -datetime.timedelta(0, 83579, 51000), 126 | -datetime.timedelta(0, 1800)), 127 | c.fetchone()) 128 | 129 | def test_datetime_microseconds(self): 130 | """ test datetime conversion w microseconds""" 131 | 132 | conn = self.connections[0] 133 | if not self.mysql_server_is(conn, (5, 6, 4)): 134 | raise SkipTest("target backend does not support microseconds") 135 | c = conn.cursor() 136 | dt = datetime.datetime(2013, 11, 12, 9, 9, 9, 123450) 137 | c.execute("create table test_datetime (id int, ts datetime(6))") 138 | try: 139 | c.execute( 140 | "insert into test_datetime values (%s, %s)", 141 | (1, dt) 142 | ) 143 | c.execute("select ts from test_datetime") 144 | self.assertEqual((dt,), c.fetchone()) 145 | finally: 146 | c.execute("drop table test_datetime") 147 | 148 | 149 | class TestCursor(base.PyMySQLTestCase): 150 | # this test case does not work quite right yet, however, 151 | # we substitute in None for the erroneous field which is 152 | # compatible with the DB-API 2.0 spec and has not broken 153 | # any unit tests for anything we've tried. 154 | 155 | #def test_description(self): 156 | # """ test description attribute """ 157 | # # result is from MySQLdb module 158 | # r = (('Host', 254, 11, 60, 60, 0, 0), 159 | # ('User', 254, 16, 16, 16, 0, 0), 160 | # ('Password', 254, 41, 41, 41, 0, 0), 161 | # ('Select_priv', 254, 1, 1, 1, 0, 0), 162 | # ('Insert_priv', 254, 1, 1, 1, 0, 0), 163 | # ('Update_priv', 254, 1, 1, 1, 0, 0), 164 | # ('Delete_priv', 254, 1, 1, 1, 0, 0), 165 | # ('Create_priv', 254, 1, 1, 1, 0, 0), 166 | # ('Drop_priv', 254, 1, 1, 1, 0, 0), 167 | # ('Reload_priv', 254, 1, 1, 1, 0, 0), 168 | # ('Shutdown_priv', 254, 1, 1, 1, 0, 0), 169 | # ('Process_priv', 254, 1, 1, 1, 0, 0), 170 | # ('File_priv', 254, 1, 1, 1, 0, 0), 171 | # ('Grant_priv', 254, 1, 1, 1, 0, 0), 172 | # ('References_priv', 254, 1, 1, 1, 0, 0), 173 | # ('Index_priv', 254, 1, 1, 1, 0, 0), 174 | # ('Alter_priv', 254, 1, 1, 1, 0, 0), 175 | # ('Show_db_priv', 254, 1, 1, 1, 0, 0), 176 | # ('Super_priv', 254, 1, 1, 1, 0, 0), 177 | # ('Create_tmp_table_priv', 254, 1, 1, 1, 0, 0), 178 | # ('Lock_tables_priv', 254, 1, 1, 1, 0, 0), 179 | # ('Execute_priv', 254, 1, 1, 1, 0, 0), 180 | # ('Repl_slave_priv', 254, 1, 1, 1, 0, 0), 181 | # ('Repl_client_priv', 254, 1, 1, 1, 0, 0), 182 | # ('Create_view_priv', 254, 1, 1, 1, 0, 0), 183 | # ('Show_view_priv', 254, 1, 1, 1, 0, 0), 184 | # ('Create_routine_priv', 254, 1, 1, 1, 0, 0), 185 | # ('Alter_routine_priv', 254, 1, 1, 1, 0, 0), 186 | # ('Create_user_priv', 254, 1, 1, 1, 0, 0), 187 | # ('Event_priv', 254, 1, 1, 1, 0, 0), 188 | # ('Trigger_priv', 254, 1, 1, 1, 0, 0), 189 | # ('ssl_type', 254, 0, 9, 9, 0, 0), 190 | # ('ssl_cipher', 252, 0, 65535, 65535, 0, 0), 191 | # ('x509_issuer', 252, 0, 65535, 65535, 0, 0), 192 | # ('x509_subject', 252, 0, 65535, 65535, 0, 0), 193 | # ('max_questions', 3, 1, 11, 11, 0, 0), 194 | # ('max_updates', 3, 1, 11, 11, 0, 0), 195 | # ('max_connections', 3, 1, 11, 11, 0, 0), 196 | # ('max_user_connections', 3, 1, 11, 11, 0, 0)) 197 | # conn = self.connections[0] 198 | # c = conn.cursor() 199 | # c.execute("select * from mysql.user") 200 | # 201 | # self.assertEqual(r, c.description) 202 | 203 | def test_fetch_no_result(self): 204 | """ test a fetchone() with no rows """ 205 | conn = self.connections[0] 206 | c = conn.cursor() 207 | c.execute("create table test_nr (b varchar(32))") 208 | try: 209 | data = "pymysql" 210 | c.execute("insert into test_nr (b) values (%s)", (data,)) 211 | self.assertEqual(None, c.fetchone()) 212 | finally: 213 | c.execute("drop table test_nr") 214 | 215 | def test_aggregates(self): 216 | """ test aggregate functions """ 217 | conn = self.connections[0] 218 | c = conn.cursor() 219 | try: 220 | c.execute('create table test_aggregates (i integer)') 221 | for i in range(0, 10): 222 | c.execute('insert into test_aggregates (i) values (%s)', (i,)) 223 | c.execute('select sum(i) from test_aggregates') 224 | r, = c.fetchone() 225 | self.assertEqual(sum(range(0,10)), r) 226 | finally: 227 | c.execute('drop table test_aggregates') 228 | 229 | def test_single_tuple(self): 230 | """ test a single tuple """ 231 | conn = self.connections[0] 232 | c = conn.cursor() 233 | self.safe_create_table( 234 | conn, 'mystuff', 235 | "create table mystuff (id integer primary key)") 236 | c.execute("insert into mystuff (id) values (1)") 237 | c.execute("insert into mystuff (id) values (2)") 238 | c.execute("select id from mystuff where id in %s", ((1,),)) 239 | self.assertEqual([(1,)], list(c.fetchall())) 240 | c.close() 241 | 242 | def test_json(self): 243 | args = self.databases[0].copy() 244 | args["charset"] = "utf8mb4" 245 | conn = pymysql.connect(**args) 246 | if not self.mysql_server_is(conn, (5, 7, 0)): 247 | raise SkipTest("JSON type is not supported on MySQL <= 5.6") 248 | 249 | self.safe_create_table(conn, "test_json", """\ 250 | create table test_json ( 251 | id int not null, 252 | json JSON not null, 253 | primary key (id) 254 | );""") 255 | cur = conn.cursor() 256 | 257 | json_str = u'{"hello": "こんにちは"}' 258 | cur.execute("INSERT INTO test_json (id, `json`) values (42, %s)", (json_str,)) 259 | cur.execute("SELECT `json` from `test_json` WHERE `id`=42") 260 | res = cur.fetchone()[0] 261 | self.assertEqual(json.loads(res), json.loads(json_str)) 262 | 263 | cur.execute("SELECT CAST(%s AS JSON) AS x", (json_str,)) 264 | res = cur.fetchone()[0] 265 | self.assertEqual(json.loads(res), json.loads(json_str)) 266 | 267 | 268 | class TestBulkInserts(base.PyMySQLTestCase): 269 | 270 | cursor_type = pymysql.cursors.DictCursor 271 | 272 | def setUp(self): 273 | super(TestBulkInserts, self).setUp() 274 | self.conn = conn = self.connections[0] 275 | c = conn.cursor(self.cursor_type) 276 | 277 | # create a table ane some data to query 278 | self.safe_create_table(conn, 'bulkinsert', """\ 279 | CREATE TABLE bulkinsert 280 | ( 281 | id int(11), 282 | name char(20), 283 | age int, 284 | height int, 285 | PRIMARY KEY (id) 286 | ) 287 | """) 288 | 289 | def _verify_records(self, data): 290 | conn = self.connections[0] 291 | cursor = conn.cursor() 292 | cursor.execute("SELECT id, name, age, height from bulkinsert") 293 | result = cursor.fetchall() 294 | self.assertEqual(sorted(data), sorted(result)) 295 | 296 | def test_bulk_insert(self): 297 | conn = self.connections[0] 298 | cursor = conn.cursor() 299 | 300 | data = [(0, "bob", 21, 123), (1, "jim", 56, 45), (2, "fred", 100, 180)] 301 | cursor.executemany("insert into bulkinsert (id, name, age, height) " 302 | "values (%s,%s,%s,%s)", data) 303 | self.assertEqual( 304 | cursor._last_executed, bytearray( 305 | b"insert into bulkinsert (id, name, age, height) values " 306 | b"(0,'bob',21,123),(1,'jim',56,45),(2,'fred',100,180)")) 307 | cursor.execute('commit') 308 | self._verify_records(data) 309 | 310 | def test_bulk_insert_multiline_statement(self): 311 | conn = self.connections[0] 312 | cursor = conn.cursor() 313 | data = [(0, "bob", 21, 123), (1, "jim", 56, 45), (2, "fred", 100, 180)] 314 | cursor.executemany("""insert 315 | into bulkinsert (id, name, 316 | age, height) 317 | values (%s, 318 | %s , %s, 319 | %s ) 320 | """, data) 321 | self.assertEqual(cursor._last_executed.strip(), bytearray(b"""insert 322 | into bulkinsert (id, name, 323 | age, height) 324 | values (0, 325 | 'bob' , 21, 326 | 123 ),(1, 327 | 'jim' , 56, 328 | 45 ),(2, 329 | 'fred' , 100, 330 | 180 )""")) 331 | cursor.execute('commit') 332 | self._verify_records(data) 333 | 334 | def test_bulk_insert_single_record(self): 335 | conn = self.connections[0] 336 | cursor = conn.cursor() 337 | data = [(0, "bob", 21, 123)] 338 | cursor.executemany("insert into bulkinsert (id, name, age, height) " 339 | "values (%s,%s,%s,%s)", data) 340 | cursor.execute('commit') 341 | self._verify_records(data) 342 | 343 | def test_issue_288(self): 344 | """executemany should work with "insert ... on update" """ 345 | conn = self.connections[0] 346 | cursor = conn.cursor() 347 | data = [(0, "bob", 21, 123), (1, "jim", 56, 45), (2, "fred", 100, 180)] 348 | cursor.executemany("""insert 349 | into bulkinsert (id, name, 350 | age, height) 351 | values (%s, 352 | %s , %s, 353 | %s ) on duplicate key update 354 | age = values(age) 355 | """, data) 356 | self.assertEqual(cursor._last_executed.strip(), bytearray(b"""insert 357 | into bulkinsert (id, name, 358 | age, height) 359 | values (0, 360 | 'bob' , 21, 361 | 123 ),(1, 362 | 'jim' , 56, 363 | 45 ),(2, 364 | 'fred' , 100, 365 | 180 ) on duplicate key update 366 | age = values(age)""")) 367 | cursor.execute('commit') 368 | self._verify_records(data) 369 | 370 | def test_warnings(self): 371 | con = self.connections[0] 372 | cur = con.cursor() 373 | with warnings.catch_warnings(record=True) as ws: 374 | warnings.simplefilter("always") 375 | cur.execute("drop table if exists no_exists_table") 376 | self.assertEqual(len(ws), 1) 377 | self.assertEqual(ws[0].category, pymysql.Warning) 378 | if u"no_exists_table" not in str(ws[0].message): 379 | self.fail("'no_exists_table' not in %s" % (str(ws[0].message),)) 380 | -------------------------------------------------------------------------------- /tests/test_connection.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import sys 3 | import time 4 | import unittest2 5 | import pymysql 6 | from pymysql.tests import base 7 | from pymysql._compat import text_type 8 | 9 | 10 | class TempUser: 11 | def __init__(self, c, user, db, auth=None, authdata=None, password=None): 12 | self._c = c 13 | self._user = user 14 | self._db = db 15 | create = "CREATE USER " + user 16 | if password is not None: 17 | create += " IDENTIFIED BY '%s'" % password 18 | elif auth is not None: 19 | create += " IDENTIFIED WITH %s" % auth 20 | if authdata is not None: 21 | create += " AS '%s'" % authdata 22 | try: 23 | c.execute(create) 24 | self._created = True 25 | except pymysql.err.InternalError: 26 | # already exists - TODO need to check the same plugin applies 27 | self._created = False 28 | try: 29 | c.execute("GRANT SELECT ON %s.* TO %s" % (db, user)) 30 | self._grant = True 31 | except pymysql.err.InternalError: 32 | self._grant = False 33 | 34 | def __enter__(self): 35 | return self 36 | 37 | def __exit__(self, exc_type, exc_value, traceback): 38 | if self._grant: 39 | self._c.execute("REVOKE SELECT ON %s.* FROM %s" % (self._db, self._user)) 40 | if self._created: 41 | self._c.execute("DROP USER %s" % self._user) 42 | 43 | 44 | class TestAuthentication(base.PyMySQLTestCase): 45 | 46 | socket_auth = False 47 | socket_found = False 48 | two_questions_found = False 49 | three_attempts_found = False 50 | pam_found = False 51 | mysql_old_password_found = False 52 | sha256_password_found = False 53 | 54 | import os 55 | osuser = os.environ.get('USER') 56 | 57 | # socket auth requires the current user and for the connection to be a socket 58 | # rest do grants @localhost due to incomplete logic - TODO change to @% then 59 | db = base.PyMySQLTestCase.databases[0].copy() 60 | 61 | socket_auth = db.get('unix_socket') is not None \ 62 | and db.get('host') in ('localhost', '127.0.0.1') 63 | 64 | cur = pymysql.connect(**db).cursor() 65 | del db['user'] 66 | cur.execute("SHOW PLUGINS") 67 | for r in cur: 68 | if (r[1], r[2]) != (u'ACTIVE', u'AUTHENTICATION'): 69 | continue 70 | if r[3] == u'auth_socket.so': 71 | socket_plugin_name = r[0] 72 | socket_found = True 73 | elif r[3] == u'dialog_examples.so': 74 | if r[0] == 'two_questions': 75 | two_questions_found = True 76 | elif r[0] == 'three_attempts': 77 | three_attempts_found = True 78 | elif r[0] == u'pam': 79 | pam_found = True 80 | pam_plugin_name = r[3].split('.')[0] 81 | if pam_plugin_name == 'auth_pam': 82 | pam_plugin_name = 'pam' 83 | # MySQL: authentication_pam 84 | # https://dev.mysql.com/doc/refman/5.5/en/pam-authentication-plugin.html 85 | 86 | # MariaDB: pam 87 | # https://mariadb.com/kb/en/mariadb/pam-authentication-plugin/ 88 | 89 | # Names differ but functionality is close 90 | elif r[0] == u'mysql_old_password': 91 | mysql_old_password_found = True 92 | elif r[0] == u'sha256_password': 93 | sha256_password_found = True 94 | #else: 95 | # print("plugin: %r" % r[0]) 96 | 97 | def test_plugin(self): 98 | # Bit of an assumption that the current user is a native password 99 | self.assertEqual('mysql_native_password', self.connections[0]._auth_plugin_name) 100 | 101 | @unittest2.skipUnless(socket_auth, "connection to unix_socket required") 102 | @unittest2.skipIf(socket_found, "socket plugin already installed") 103 | def testSocketAuthInstallPlugin(self): 104 | # needs plugin. lets install it. 105 | cur = self.connections[0].cursor() 106 | try: 107 | cur.execute("install plugin auth_socket soname 'auth_socket.so'") 108 | TestAuthentication.socket_found = True 109 | self.socket_plugin_name = 'auth_socket' 110 | self.realtestSocketAuth() 111 | except pymysql.err.InternalError: 112 | try: 113 | cur.execute("install soname 'auth_socket'") 114 | TestAuthentication.socket_found = True 115 | self.socket_plugin_name = 'unix_socket' 116 | self.realtestSocketAuth() 117 | except pymysql.err.InternalError: 118 | TestAuthentication.socket_found = False 119 | raise unittest2.SkipTest('we couldn\'t install the socket plugin') 120 | finally: 121 | if TestAuthentication.socket_found: 122 | cur.execute("uninstall plugin %s" % self.socket_plugin_name) 123 | 124 | @unittest2.skipUnless(socket_auth, "connection to unix_socket required") 125 | @unittest2.skipUnless(socket_found, "no socket plugin") 126 | def testSocketAuth(self): 127 | self.realtestSocketAuth() 128 | 129 | def realtestSocketAuth(self): 130 | with TempUser(self.connections[0].cursor(), TestAuthentication.osuser + '@localhost', 131 | self.databases[0]['db'], self.socket_plugin_name) as u: 132 | c = pymysql.connect(user=TestAuthentication.osuser, **self.db) 133 | 134 | class Dialog(object): 135 | fail=False 136 | 137 | def __init__(self, con): 138 | self.fail=TestAuthentication.Dialog.fail 139 | pass 140 | 141 | def prompt(self, echo, prompt): 142 | if self.fail: 143 | self.fail=False 144 | return b'bad guess at a password' 145 | return self.m.get(prompt) 146 | 147 | class DialogHandler(object): 148 | 149 | def __init__(self, con): 150 | self.con=con 151 | 152 | def authenticate(self, pkt): 153 | while True: 154 | flag = pkt.read_uint8() 155 | echo = (flag & 0x06) == 0x02 156 | last = (flag & 0x01) == 0x01 157 | prompt = pkt.read_all() 158 | 159 | if prompt == b'Password, please:': 160 | self.con.write_packet(b'stillnotverysecret\0') 161 | else: 162 | self.con.write_packet(b'no idea what to do with this prompt\0') 163 | pkt = self.con._read_packet() 164 | pkt.check_error() 165 | if pkt.is_ok_packet() or last: 166 | break 167 | return pkt 168 | 169 | class DefectiveHandler(object): 170 | def __init__(self, con): 171 | self.con=con 172 | 173 | 174 | @unittest2.skipUnless(socket_auth, "connection to unix_socket required") 175 | @unittest2.skipIf(two_questions_found, "two_questions plugin already installed") 176 | def testDialogAuthTwoQuestionsInstallPlugin(self): 177 | # needs plugin. lets install it. 178 | cur = self.connections[0].cursor() 179 | try: 180 | cur.execute("install plugin two_questions soname 'dialog_examples.so'") 181 | TestAuthentication.two_questions_found = True 182 | self.realTestDialogAuthTwoQuestions() 183 | except pymysql.err.InternalError: 184 | raise unittest2.SkipTest('we couldn\'t install the two_questions plugin') 185 | finally: 186 | if TestAuthentication.two_questions_found: 187 | cur.execute("uninstall plugin two_questions") 188 | 189 | @unittest2.skipUnless(socket_auth, "connection to unix_socket required") 190 | @unittest2.skipUnless(two_questions_found, "no two questions auth plugin") 191 | def testDialogAuthTwoQuestions(self): 192 | self.realTestDialogAuthTwoQuestions() 193 | 194 | def realTestDialogAuthTwoQuestions(self): 195 | TestAuthentication.Dialog.fail=False 196 | TestAuthentication.Dialog.m = {b'Password, please:': b'notverysecret', 197 | b'Are you sure ?': b'yes, of course'} 198 | with TempUser(self.connections[0].cursor(), 'pymysql_2q@localhost', 199 | self.databases[0]['db'], 'two_questions', 'notverysecret') as u: 200 | with self.assertRaises(pymysql.err.OperationalError): 201 | pymysql.connect(user='pymysql_2q', **self.db) 202 | pymysql.connect(user='pymysql_2q', auth_plugin_map={b'dialog': TestAuthentication.Dialog}, **self.db) 203 | 204 | @unittest2.skipUnless(socket_auth, "connection to unix_socket required") 205 | @unittest2.skipIf(three_attempts_found, "three_attempts plugin already installed") 206 | def testDialogAuthThreeAttemptsQuestionsInstallPlugin(self): 207 | # needs plugin. lets install it. 208 | cur = self.connections[0].cursor() 209 | try: 210 | cur.execute("install plugin three_attempts soname 'dialog_examples.so'") 211 | TestAuthentication.three_attempts_found = True 212 | self.realTestDialogAuthThreeAttempts() 213 | except pymysql.err.InternalError: 214 | raise unittest2.SkipTest('we couldn\'t install the three_attempts plugin') 215 | finally: 216 | if TestAuthentication.three_attempts_found: 217 | cur.execute("uninstall plugin three_attempts") 218 | 219 | @unittest2.skipUnless(socket_auth, "connection to unix_socket required") 220 | @unittest2.skipUnless(three_attempts_found, "no three attempts plugin") 221 | def testDialogAuthThreeAttempts(self): 222 | self.realTestDialogAuthThreeAttempts() 223 | 224 | def realTestDialogAuthThreeAttempts(self): 225 | TestAuthentication.Dialog.m = {b'Password, please:': b'stillnotverysecret'} 226 | TestAuthentication.Dialog.fail=True # fail just once. We've got three attempts after all 227 | with TempUser(self.connections[0].cursor(), 'pymysql_3a@localhost', 228 | self.databases[0]['db'], 'three_attempts', 'stillnotverysecret') as u: 229 | pymysql.connect(user='pymysql_3a', auth_plugin_map={b'dialog': TestAuthentication.Dialog}, **self.db) 230 | pymysql.connect(user='pymysql_3a', auth_plugin_map={b'dialog': TestAuthentication.DialogHandler}, **self.db) 231 | with self.assertRaises(pymysql.err.OperationalError): 232 | pymysql.connect(user='pymysql_3a', auth_plugin_map={b'dialog': object}, **self.db) 233 | 234 | with self.assertRaises(pymysql.err.OperationalError): 235 | pymysql.connect(user='pymysql_3a', auth_plugin_map={b'dialog': TestAuthentication.DefectiveHandler}, **self.db) 236 | with self.assertRaises(pymysql.err.OperationalError): 237 | pymysql.connect(user='pymysql_3a', auth_plugin_map={b'notdialogplugin': TestAuthentication.Dialog}, **self.db) 238 | TestAuthentication.Dialog.m = {b'Password, please:': b'I do not know'} 239 | with self.assertRaises(pymysql.err.OperationalError): 240 | pymysql.connect(user='pymysql_3a', auth_plugin_map={b'dialog': TestAuthentication.Dialog}, **self.db) 241 | TestAuthentication.Dialog.m = {b'Password, please:': None} 242 | with self.assertRaises(pymysql.err.OperationalError): 243 | pymysql.connect(user='pymysql_3a', auth_plugin_map={b'dialog': TestAuthentication.Dialog}, **self.db) 244 | 245 | @unittest2.skipUnless(socket_auth, "connection to unix_socket required") 246 | @unittest2.skipIf(pam_found, "pam plugin already installed") 247 | @unittest2.skipIf(os.environ.get('PASSWORD') is None, "PASSWORD env var required") 248 | @unittest2.skipIf(os.environ.get('PAMSERVICE') is None, "PAMSERVICE env var required") 249 | def testPamAuthInstallPlugin(self): 250 | # needs plugin. lets install it. 251 | cur = self.connections[0].cursor() 252 | try: 253 | cur.execute("install plugin pam soname 'auth_pam.so'") 254 | TestAuthentication.pam_found = True 255 | self.realTestPamAuth() 256 | except pymysql.err.InternalError: 257 | raise unittest2.SkipTest('we couldn\'t install the auth_pam plugin') 258 | finally: 259 | if TestAuthentication.pam_found: 260 | cur.execute("uninstall plugin pam") 261 | 262 | 263 | @unittest2.skipUnless(socket_auth, "connection to unix_socket required") 264 | @unittest2.skipUnless(pam_found, "no pam plugin") 265 | @unittest2.skipIf(os.environ.get('PASSWORD') is None, "PASSWORD env var required") 266 | @unittest2.skipIf(os.environ.get('PAMSERVICE') is None, "PAMSERVICE env var required") 267 | def testPamAuth(self): 268 | self.realTestPamAuth() 269 | 270 | def realTestPamAuth(self): 271 | db = self.db.copy() 272 | import os 273 | db['password'] = os.environ.get('PASSWORD') 274 | cur = self.connections[0].cursor() 275 | try: 276 | cur.execute('show grants for ' + TestAuthentication.osuser + '@localhost') 277 | grants = cur.fetchone()[0] 278 | cur.execute('drop user ' + TestAuthentication.osuser + '@localhost') 279 | except pymysql.OperationalError as e: 280 | # assuming the user doesn't exist which is ok too 281 | self.assertEqual(1045, e.args[0]) 282 | grants = None 283 | with TempUser(cur, TestAuthentication.osuser + '@localhost', 284 | self.databases[0]['db'], 'pam', os.environ.get('PAMSERVICE')) as u: 285 | try: 286 | c = pymysql.connect(user=TestAuthentication.osuser, **db) 287 | db['password'] = 'very bad guess at password' 288 | with self.assertRaises(pymysql.err.OperationalError): 289 | pymysql.connect(user=TestAuthentication.osuser, 290 | auth_plugin_map={b'mysql_cleartext_password': TestAuthentication.DefectiveHandler}, 291 | **self.db) 292 | except pymysql.OperationalError as e: 293 | self.assertEqual(1045, e.args[0]) 294 | # we had 'bad guess at password' work with pam. Well at least we get a permission denied here 295 | with self.assertRaises(pymysql.err.OperationalError): 296 | pymysql.connect(user=TestAuthentication.osuser, 297 | auth_plugin_map={b'mysql_cleartext_password': TestAuthentication.DefectiveHandler}, 298 | **self.db) 299 | if grants: 300 | # recreate the user 301 | cur.execute(grants) 302 | 303 | # select old_password("crummy p\tassword"); 304 | #| old_password("crummy p\tassword") | 305 | #| 2a01785203b08770 | 306 | @unittest2.skipUnless(socket_auth, "connection to unix_socket required") 307 | @unittest2.skipUnless(mysql_old_password_found, "no mysql_old_password plugin") 308 | def testMySQLOldPasswordAuth(self): 309 | if self.mysql_server_is(self.connections[0], (5, 7, 0)): 310 | raise unittest2.SkipTest('Old passwords aren\'t supported in 5.7') 311 | # pymysql.err.OperationalError: (1045, "Access denied for user 'old_pass_user'@'localhost' (using password: YES)") 312 | # from login in MySQL-5.6 313 | if self.mysql_server_is(self.connections[0], (5, 6, 0)): 314 | raise unittest2.SkipTest('Old passwords don\'t authenticate in 5.6') 315 | db = self.db.copy() 316 | db['password'] = "crummy p\tassword" 317 | with self.connections[0] as c: 318 | # deprecated in 5.6 319 | if sys.version_info[0:2] >= (3,2) and self.mysql_server_is(self.connections[0], (5, 6, 0)): 320 | with self.assertWarns(pymysql.err.Warning) as cm: 321 | c.execute("SELECT OLD_PASSWORD('%s')" % db['password']) 322 | else: 323 | c.execute("SELECT OLD_PASSWORD('%s')" % db['password']) 324 | v = c.fetchone()[0] 325 | self.assertEqual(v, '2a01785203b08770') 326 | # only works in MariaDB and MySQL-5.6 - can't separate out by version 327 | #if self.mysql_server_is(self.connections[0], (5, 5, 0)): 328 | # with TempUser(c, 'old_pass_user@localhost', 329 | # self.databases[0]['db'], 'mysql_old_password', '2a01785203b08770') as u: 330 | # cur = pymysql.connect(user='old_pass_user', **db).cursor() 331 | # cur.execute("SELECT VERSION()") 332 | c.execute("SELECT @@secure_auth") 333 | secure_auth_setting = c.fetchone()[0] 334 | c.execute('set old_passwords=1') 335 | # pymysql.err.Warning: 'pre-4.1 password hash' is deprecated and will be removed in a future release. Please use post-4.1 password hash instead 336 | if sys.version_info[0:2] >= (3,2) and self.mysql_server_is(self.connections[0], (5, 6, 0)): 337 | with self.assertWarns(pymysql.err.Warning) as cm: 338 | c.execute('set global secure_auth=0') 339 | else: 340 | c.execute('set global secure_auth=0') 341 | with TempUser(c, 'old_pass_user@localhost', 342 | self.databases[0]['db'], password=db['password']) as u: 343 | cur = pymysql.connect(user='old_pass_user', **db).cursor() 344 | cur.execute("SELECT VERSION()") 345 | c.execute('set global secure_auth=%r' % secure_auth_setting) 346 | 347 | @unittest2.skipUnless(socket_auth, "connection to unix_socket required") 348 | @unittest2.skipUnless(sha256_password_found, "no sha256 password authentication plugin found") 349 | def testAuthSHA256(self): 350 | c = self.connections[0].cursor() 351 | with TempUser(c, 'pymysql_sha256@localhost', 352 | self.databases[0]['db'], 'sha256_password') as u: 353 | if self.mysql_server_is(self.connections[0], (5, 7, 0)): 354 | c.execute("SET PASSWORD FOR 'pymysql_sha256'@'localhost' ='Sh@256Pa33'") 355 | else: 356 | c.execute('SET old_passwords = 2') 357 | c.execute("SET PASSWORD FOR 'pymysql_sha256'@'localhost' = PASSWORD('Sh@256Pa33')") 358 | db = self.db.copy() 359 | db['password'] = "Sh@256Pa33" 360 | # not implemented yet so thows error 361 | with self.assertRaises(pymysql.err.OperationalError): 362 | pymysql.connect(user='pymysql_256', **db) 363 | 364 | class TestConnection(base.PyMySQLTestCase): 365 | 366 | def test_utf8mb4(self): 367 | """This test requires MySQL >= 5.5""" 368 | arg = self.databases[0].copy() 369 | arg['charset'] = 'utf8mb4' 370 | conn = pymysql.connect(**arg) 371 | 372 | def test_largedata(self): 373 | """Large query and response (>=16MB)""" 374 | cur = self.connections[0].cursor() 375 | cur.execute("SELECT @@max_allowed_packet") 376 | if cur.fetchone()[0] < 16*1024*1024 + 10: 377 | print("Set max_allowed_packet to bigger than 17MB") 378 | return 379 | t = 'a' * (16*1024*1024) 380 | cur.execute("SELECT '" + t + "'") 381 | assert cur.fetchone()[0] == t 382 | 383 | def test_autocommit(self): 384 | con = self.connections[0] 385 | self.assertFalse(con.get_autocommit()) 386 | 387 | cur = con.cursor() 388 | cur.execute("SET AUTOCOMMIT=1") 389 | self.assertTrue(con.get_autocommit()) 390 | 391 | con.autocommit(False) 392 | self.assertFalse(con.get_autocommit()) 393 | cur.execute("SELECT @@AUTOCOMMIT") 394 | self.assertEqual(cur.fetchone()[0], 0) 395 | 396 | def test_select_db(self): 397 | con = self.connections[0] 398 | current_db = self.databases[0]['db'] 399 | other_db = self.databases[1]['db'] 400 | 401 | cur = con.cursor() 402 | cur.execute('SELECT database()') 403 | self.assertEqual(cur.fetchone()[0], current_db) 404 | 405 | con.select_db(other_db) 406 | cur.execute('SELECT database()') 407 | self.assertEqual(cur.fetchone()[0], other_db) 408 | 409 | def test_connection_gone_away(self): 410 | """ 411 | http://dev.mysql.com/doc/refman/5.0/en/gone-away.html 412 | http://dev.mysql.com/doc/refman/5.0/en/error-messages-client.html#error_cr_server_gone_error 413 | """ 414 | con = self.connections[0] 415 | cur = con.cursor() 416 | cur.execute("SET wait_timeout=1") 417 | time.sleep(2) 418 | with self.assertRaises(pymysql.OperationalError) as cm: 419 | cur.execute("SELECT 1+1") 420 | # error occures while reading, not writing because of socket buffer. 421 | #self.assertEqual(cm.exception.args[0], 2006) 422 | self.assertIn(cm.exception.args[0], (2006, 2013)) 423 | 424 | def test_init_command(self): 425 | conn = pymysql.connect( 426 | init_command='SELECT "bar"; SELECT "baz"', 427 | **self.databases[0] 428 | ) 429 | c = conn.cursor() 430 | c.execute('select "foobar";') 431 | self.assertEqual(('foobar',), c.fetchone()) 432 | conn.close() 433 | with self.assertRaises(pymysql.err.Error): 434 | conn.ping(reconnect=False) 435 | 436 | def test_read_default_group(self): 437 | conn = pymysql.connect( 438 | read_default_group='client', 439 | **self.databases[0] 440 | ) 441 | self.assertTrue(conn.open) 442 | 443 | def test_context(self): 444 | with self.assertRaises(ValueError): 445 | c = pymysql.connect(**self.databases[0]) 446 | with c as cur: 447 | cur.execute('create table test ( a int )') 448 | c.begin() 449 | cur.execute('insert into test values ((1))') 450 | raise ValueError('pseudo abort') 451 | c.commit() 452 | c = pymysql.connect(**self.databases[0]) 453 | with c as cur: 454 | cur.execute('select count(*) from test') 455 | self.assertEqual(0, cur.fetchone()[0]) 456 | cur.execute('insert into test values ((1))') 457 | with c as cur: 458 | cur.execute('select count(*) from test') 459 | self.assertEqual(1,cur.fetchone()[0]) 460 | cur.execute('drop table test') 461 | 462 | def test_set_charset(self): 463 | c = pymysql.connect(**self.databases[0]) 464 | c.set_charset('utf8') 465 | # TODO validate setting here 466 | 467 | def test_defer_connect(self): 468 | import socket 469 | for db in self.databases: 470 | d = db.copy() 471 | try: 472 | sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) 473 | sock.connect(d['unix_socket']) 474 | except KeyError: 475 | sock = socket.create_connection( 476 | (d.get('host', 'localhost'), d.get('port', 3306))) 477 | for k in ['unix_socket', 'host', 'port']: 478 | try: 479 | del d[k] 480 | except KeyError: 481 | pass 482 | 483 | c = pymysql.connect(defer_connect=True, **d) 484 | self.assertFalse(c.open) 485 | c.connect(sock) 486 | c.close() 487 | sock.close() 488 | 489 | @unittest2.skipUnless(sys.version_info[0:2] >= (3,2), "required py-3.2") 490 | def test_no_delay_warning(self): 491 | current_db = self.databases[0].copy() 492 | current_db['no_delay'] = True 493 | with self.assertWarns(DeprecationWarning) as cm: 494 | conn = pymysql.connect(**current_db) 495 | 496 | 497 | # A custom type and function to escape it 498 | class Foo(object): 499 | value = "bar" 500 | 501 | 502 | def escape_foo(x, d): 503 | return x.value 504 | 505 | 506 | class TestEscape(base.PyMySQLTestCase): 507 | def test_escape_string(self): 508 | con = self.connections[0] 509 | cur = con.cursor() 510 | 511 | self.assertEqual(con.escape("foo'bar"), "'foo\\'bar'") 512 | # added NO_AUTO_CREATE_USER as not including it in 5.7 generates warnings 513 | cur.execute("SET sql_mode='NO_BACKSLASH_ESCAPES,NO_AUTO_CREATE_USER'") 514 | self.assertEqual(con.escape("foo'bar"), "'foo''bar'") 515 | 516 | def test_escape_builtin_encoders(self): 517 | con = self.connections[0] 518 | cur = con.cursor() 519 | 520 | val = datetime.datetime(2012, 3, 4, 5, 6) 521 | self.assertEqual(con.escape(val, con.encoders), "'2012-03-04 05:06:00'") 522 | 523 | def test_escape_custom_object(self): 524 | con = self.connections[0] 525 | cur = con.cursor() 526 | 527 | mapping = {Foo: escape_foo} 528 | self.assertEqual(con.escape(Foo(), mapping), "bar") 529 | 530 | def test_escape_fallback_encoder(self): 531 | con = self.connections[0] 532 | cur = con.cursor() 533 | 534 | class Custom(str): 535 | pass 536 | 537 | mapping = {text_type: pymysql.escape_string} 538 | self.assertEqual(con.escape(Custom('foobar'), mapping), "'foobar'") 539 | 540 | def test_escape_no_default(self): 541 | con = self.connections[0] 542 | cur = con.cursor() 543 | 544 | self.assertRaises(TypeError, con.escape, 42, {}) 545 | 546 | def test_escape_dict_value(self): 547 | con = self.connections[0] 548 | cur = con.cursor() 549 | 550 | mapping = con.encoders.copy() 551 | mapping[Foo] = escape_foo 552 | self.assertEqual(con.escape({'foo': Foo()}, mapping), {'foo': "bar"}) 553 | 554 | def test_escape_list_item(self): 555 | con = self.connections[0] 556 | cur = con.cursor() 557 | 558 | mapping = con.encoders.copy() 559 | mapping[Foo] = escape_foo 560 | self.assertEqual(con.escape([Foo()], mapping), "(bar)") 561 | 562 | def test_previous_cursor_not_closed(self): 563 | con = self.connections[0] 564 | cur1 = con.cursor() 565 | cur1.execute("SELECT 1; SELECT 2") 566 | cur2 = con.cursor() 567 | cur2.execute("SELECT 3") 568 | self.assertEqual(cur2.fetchone()[0], 3) 569 | 570 | def test_commit_during_multi_result(self): 571 | con = self.connections[0] 572 | cur = con.cursor() 573 | cur.execute("SELECT 1; SELECT 2") 574 | con.commit() 575 | cur.execute("SELECT 3") 576 | self.assertEqual(cur.fetchone()[0], 3) 577 | -------------------------------------------------------------------------------- /tests/test_converters.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | from unittest import TestCase 3 | 4 | from pymysql._compat import PY2 5 | from pymysql import converters 6 | 7 | 8 | __all__ = ["TestConverter"] 9 | 10 | 11 | class TestConverter(TestCase): 12 | 13 | def test_escape_string(self): 14 | self.assertEqual( 15 | converters.escape_string(u"foo\nbar"), 16 | u"foo\\nbar" 17 | ) 18 | 19 | if PY2: 20 | def test_escape_string_bytes(self): 21 | self.assertEqual( 22 | converters.escape_string(b"foo\nbar"), 23 | b"foo\\nbar" 24 | ) 25 | 26 | def test_convert_datetime(self): 27 | expected = datetime.datetime(2007, 2, 24, 23, 6, 20) 28 | dt = converters.convert_datetime('2007-02-24 23:06:20') 29 | self.assertEqual(dt, expected) 30 | 31 | def test_convert_datetime_with_fsp(self): 32 | expected = datetime.datetime(2007, 2, 24, 23, 6, 20, 511581) 33 | dt = converters.convert_datetime('2007-02-24 23:06:20.511581') 34 | self.assertEqual(dt, expected) 35 | 36 | def _test_convert_timedelta(self, with_negate=False, with_fsp=False): 37 | d = {'hours': 789, 'minutes': 12, 'seconds': 34} 38 | s = '%(hours)s:%(minutes)s:%(seconds)s' % d 39 | if with_fsp: 40 | d['microseconds'] = 511581 41 | s += '.%(microseconds)s' % d 42 | 43 | expected = datetime.timedelta(**d) 44 | if with_negate: 45 | expected = -expected 46 | s = '-' + s 47 | 48 | tdelta = converters.convert_timedelta(s) 49 | self.assertEqual(tdelta, expected) 50 | 51 | def test_convert_timedelta(self): 52 | self._test_convert_timedelta(with_negate=False, with_fsp=False) 53 | self._test_convert_timedelta(with_negate=True, with_fsp=False) 54 | 55 | def test_convert_timedelta_with_fsp(self): 56 | self._test_convert_timedelta(with_negate=False, with_fsp=True) 57 | self._test_convert_timedelta(with_negate=False, with_fsp=True) 58 | 59 | def test_convert_time(self): 60 | expected = datetime.time(23, 6, 20) 61 | time_obj = converters.convert_time('23:06:20') 62 | self.assertEqual(time_obj, expected) 63 | 64 | def test_convert_time_with_fsp(self): 65 | expected = datetime.time(23, 6, 20, 511581) 66 | time_obj = converters.convert_time('23:06:20.511581') 67 | self.assertEqual(time_obj, expected) 68 | -------------------------------------------------------------------------------- /tests/test_cursor.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | 3 | from pymysql.tests import base 4 | import pymysql.cursors 5 | 6 | class CursorTest(base.PyMySQLTestCase): 7 | def setUp(self): 8 | super(CursorTest, self).setUp() 9 | 10 | conn = self.connections[0] 11 | self.safe_create_table( 12 | conn, 13 | "test", "create table test (data varchar(10))", 14 | ) 15 | cursor = conn.cursor() 16 | cursor.execute( 17 | "insert into test (data) values " 18 | "('row1'), ('row2'), ('row3'), ('row4'), ('row5')") 19 | cursor.close() 20 | self.test_connection = pymysql.connect(**self.databases[0]) 21 | self.addCleanup(self.test_connection.close) 22 | 23 | def test_cleanup_rows_unbuffered(self): 24 | conn = self.test_connection 25 | cursor = conn.cursor(pymysql.cursors.SSCursor) 26 | 27 | cursor.execute("select * from test as t1, test as t2") 28 | for counter, row in enumerate(cursor): 29 | if counter > 10: 30 | break 31 | 32 | del cursor 33 | self.safe_gc_collect() 34 | 35 | c2 = conn.cursor() 36 | 37 | c2.execute("select 1") 38 | self.assertEqual(c2.fetchone(), (1,)) 39 | self.assertIsNone(c2.fetchone()) 40 | 41 | def test_cleanup_rows_buffered(self): 42 | conn = self.test_connection 43 | cursor = conn.cursor(pymysql.cursors.Cursor) 44 | 45 | cursor.execute("select * from test as t1, test as t2") 46 | for counter, row in enumerate(cursor): 47 | if counter > 10: 48 | break 49 | 50 | del cursor 51 | self.safe_gc_collect() 52 | 53 | c2 = conn.cursor() 54 | 55 | c2.execute("select 1") 56 | 57 | self.assertEqual( 58 | c2.fetchone(), (1,) 59 | ) 60 | self.assertIsNone(c2.fetchone()) 61 | 62 | def test_executemany(self): 63 | conn = self.test_connection 64 | cursor = conn.cursor(pymysql.cursors.Cursor) 65 | 66 | m = pymysql.cursors.RE_INSERT_VALUES.match("INSERT INTO TEST (ID, NAME) VALUES (%s, %s)") 67 | self.assertIsNotNone(m, 'error parse %s') 68 | self.assertEqual(m.group(3), '', 'group 3 not blank, bug in RE_INSERT_VALUES?') 69 | 70 | m = pymysql.cursors.RE_INSERT_VALUES.match("INSERT INTO TEST (ID, NAME) VALUES (%(id)s, %(name)s)") 71 | self.assertIsNotNone(m, 'error parse %(name)s') 72 | self.assertEqual(m.group(3), '', 'group 3 not blank, bug in RE_INSERT_VALUES?') 73 | 74 | m = pymysql.cursors.RE_INSERT_VALUES.match("INSERT INTO TEST (ID, NAME) VALUES (%(id_name)s, %(name)s)") 75 | self.assertIsNotNone(m, 'error parse %(id_name)s') 76 | self.assertEqual(m.group(3), '', 'group 3 not blank, bug in RE_INSERT_VALUES?') 77 | 78 | m = pymysql.cursors.RE_INSERT_VALUES.match("INSERT INTO TEST (ID, NAME) VALUES (%(id_name)s, %(name)s) ON duplicate update") 79 | self.assertIsNotNone(m, 'error parse %(id_name)s') 80 | self.assertEqual(m.group(3), ' ON duplicate update', 'group 3 not ON duplicate update, bug in RE_INSERT_VALUES?') 81 | 82 | # cursor._executed must bee "insert into test (data) values (0),(1),(2),(3),(4),(5),(6),(7),(8),(9)" 83 | # list args 84 | data = range(10) 85 | cursor.executemany("insert into test (data) values (%s)", data) 86 | self.assertTrue(cursor._executed.endswith(b",(7),(8),(9)"), 'execute many with %s not in one query') 87 | 88 | # dict args 89 | data_dict = [{'data': i} for i in range(10)] 90 | cursor.executemany("insert into test (data) values (%(data)s)", data_dict) 91 | self.assertTrue(cursor._executed.endswith(b",(7),(8),(9)"), 'execute many with %(data)s not in one query') 92 | 93 | # %% in column set 94 | cursor.execute("""\ 95 | CREATE TABLE percent_test ( 96 | `A%` INTEGER, 97 | `B%` INTEGER)""") 98 | try: 99 | q = "INSERT INTO percent_test (`A%%`, `B%%`) VALUES (%s, %s)" 100 | self.assertIsNotNone(pymysql.cursors.RE_INSERT_VALUES.match(q)) 101 | cursor.executemany(q, [(3, 4), (5, 6)]) 102 | self.assertTrue(cursor._executed.endswith(b"(3, 4),(5, 6)"), "executemany with %% not in one query") 103 | finally: 104 | cursor.execute("DROP TABLE IF EXISTS percent_test") 105 | -------------------------------------------------------------------------------- /tests/test_err.py: -------------------------------------------------------------------------------- 1 | import unittest2 2 | 3 | from pymysql import err 4 | 5 | 6 | __all__ = ["TestRaiseException"] 7 | 8 | 9 | class TestRaiseException(unittest2.TestCase): 10 | 11 | def test_raise_mysql_exception(self): 12 | data = b"\xff\x15\x04Access denied" 13 | with self.assertRaises(err.OperationalError) as cm: 14 | err.raise_mysql_exception(data) 15 | self.assertEqual(cm.exception.args, (1045, 'Access denied')) 16 | 17 | def test_raise_mysql_exception_client_protocol_41(self): 18 | data = b"\xff\x15\x04#28000Access denied" 19 | with self.assertRaises(err.OperationalError) as cm: 20 | err.raise_mysql_exception(data) 21 | self.assertEqual(cm.exception.args, (1045, 'Access denied')) 22 | -------------------------------------------------------------------------------- /tests/test_issues.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import time 3 | import warnings 4 | import sys 5 | 6 | import pymysql 7 | from pymysql import cursors 8 | from pymysql._compat import text_type 9 | from pymysql.tests import base 10 | import unittest2 11 | 12 | try: 13 | import imp 14 | reload = imp.reload 15 | except AttributeError: 16 | pass 17 | 18 | 19 | __all__ = ["TestOldIssues", "TestNewIssues", "TestGitHubIssues"] 20 | 21 | class TestOldIssues(base.PyMySQLTestCase): 22 | def test_issue_3(self): 23 | """ undefined methods datetime_or_None, date_or_None """ 24 | conn = self.connections[0] 25 | c = conn.cursor() 26 | with warnings.catch_warnings(): 27 | warnings.filterwarnings("ignore") 28 | c.execute("drop table if exists issue3") 29 | c.execute("create table issue3 (d date, t time, dt datetime, ts timestamp)") 30 | try: 31 | c.execute("insert into issue3 (d, t, dt, ts) values (%s,%s,%s,%s)", (None, None, None, None)) 32 | c.execute("select d from issue3") 33 | self.assertEqual(None, c.fetchone()[0]) 34 | c.execute("select t from issue3") 35 | self.assertEqual(None, c.fetchone()[0]) 36 | c.execute("select dt from issue3") 37 | self.assertEqual(None, c.fetchone()[0]) 38 | c.execute("select ts from issue3") 39 | self.assertTrue(isinstance(c.fetchone()[0], datetime.datetime)) 40 | finally: 41 | c.execute("drop table issue3") 42 | 43 | def test_issue_4(self): 44 | """ can't retrieve TIMESTAMP fields """ 45 | conn = self.connections[0] 46 | c = conn.cursor() 47 | with warnings.catch_warnings(): 48 | warnings.filterwarnings("ignore") 49 | c.execute("drop table if exists issue4") 50 | c.execute("create table issue4 (ts timestamp)") 51 | try: 52 | c.execute("insert into issue4 (ts) values (now())") 53 | c.execute("select ts from issue4") 54 | self.assertTrue(isinstance(c.fetchone()[0], datetime.datetime)) 55 | finally: 56 | c.execute("drop table issue4") 57 | 58 | def test_issue_5(self): 59 | """ query on information_schema.tables fails """ 60 | con = self.connections[0] 61 | cur = con.cursor() 62 | cur.execute("select * from information_schema.tables") 63 | 64 | def test_issue_6(self): 65 | """ exception: TypeError: ord() expected a character, but string of length 0 found """ 66 | # ToDo: this test requires access to db 'mysql'. 67 | kwargs = self.databases[0].copy() 68 | kwargs['db'] = "mysql" 69 | conn = pymysql.connect(**kwargs) 70 | c = conn.cursor() 71 | c.execute("select * from user") 72 | conn.close() 73 | 74 | def test_issue_8(self): 75 | """ Primary Key and Index error when selecting data """ 76 | conn = self.connections[0] 77 | c = conn.cursor() 78 | with warnings.catch_warnings(): 79 | warnings.filterwarnings("ignore") 80 | c.execute("drop table if exists test") 81 | c.execute("""CREATE TABLE `test` (`station` int(10) NOT NULL DEFAULT '0', `dh` 82 | datetime NOT NULL DEFAULT '2015-01-01 00:00:00', `echeance` int(1) NOT NULL 83 | DEFAULT '0', `me` double DEFAULT NULL, `mo` double DEFAULT NULL, PRIMARY 84 | KEY (`station`,`dh`,`echeance`)) ENGINE=MyISAM DEFAULT CHARSET=latin1;""") 85 | try: 86 | self.assertEqual(0, c.execute("SELECT * FROM test")) 87 | c.execute("ALTER TABLE `test` ADD INDEX `idx_station` (`station`)") 88 | self.assertEqual(0, c.execute("SELECT * FROM test")) 89 | finally: 90 | c.execute("drop table test") 91 | 92 | def test_issue_9(self): 93 | """ sets DeprecationWarning in Python 2.6 """ 94 | try: 95 | reload(pymysql) 96 | except DeprecationWarning: 97 | self.fail() 98 | 99 | def test_issue_13(self): 100 | """ can't handle large result fields """ 101 | conn = self.connections[0] 102 | cur = conn.cursor() 103 | with warnings.catch_warnings(): 104 | warnings.filterwarnings("ignore") 105 | cur.execute("drop table if exists issue13") 106 | try: 107 | cur.execute("create table issue13 (t text)") 108 | # ticket says 18k 109 | size = 18*1024 110 | cur.execute("insert into issue13 (t) values (%s)", ("x" * size,)) 111 | cur.execute("select t from issue13") 112 | # use assertTrue so that obscenely huge error messages don't print 113 | r = cur.fetchone()[0] 114 | self.assertTrue("x" * size == r) 115 | finally: 116 | cur.execute("drop table issue13") 117 | 118 | def test_issue_15(self): 119 | """ query should be expanded before perform character encoding """ 120 | conn = self.connections[0] 121 | c = conn.cursor() 122 | with warnings.catch_warnings(): 123 | warnings.filterwarnings("ignore") 124 | c.execute("drop table if exists issue15") 125 | c.execute("create table issue15 (t varchar(32))") 126 | try: 127 | c.execute("insert into issue15 (t) values (%s)", (u'\xe4\xf6\xfc',)) 128 | c.execute("select t from issue15") 129 | self.assertEqual(u'\xe4\xf6\xfc', c.fetchone()[0]) 130 | finally: 131 | c.execute("drop table issue15") 132 | 133 | def test_issue_16(self): 134 | """ Patch for string and tuple escaping """ 135 | conn = self.connections[0] 136 | c = conn.cursor() 137 | with warnings.catch_warnings(): 138 | warnings.filterwarnings("ignore") 139 | c.execute("drop table if exists issue16") 140 | c.execute("create table issue16 (name varchar(32) primary key, email varchar(32))") 141 | try: 142 | c.execute("insert into issue16 (name, email) values ('pete', 'floydophone')") 143 | c.execute("select email from issue16 where name=%s", ("pete",)) 144 | self.assertEqual("floydophone", c.fetchone()[0]) 145 | finally: 146 | c.execute("drop table issue16") 147 | 148 | @unittest2.skip("test_issue_17() requires a custom, legacy MySQL configuration and will not be run.") 149 | def test_issue_17(self): 150 | """could not connect mysql use passwod""" 151 | conn = self.connections[0] 152 | host = self.databases[0]["host"] 153 | db = self.databases[0]["db"] 154 | c = conn.cursor() 155 | 156 | # grant access to a table to a user with a password 157 | try: 158 | with warnings.catch_warnings(): 159 | warnings.filterwarnings("ignore") 160 | c.execute("drop table if exists issue17") 161 | c.execute("create table issue17 (x varchar(32) primary key)") 162 | c.execute("insert into issue17 (x) values ('hello, world!')") 163 | c.execute("grant all privileges on %s.issue17 to 'issue17user'@'%%' identified by '1234'" % db) 164 | conn.commit() 165 | 166 | conn2 = pymysql.connect(host=host, user="issue17user", passwd="1234", db=db) 167 | c2 = conn2.cursor() 168 | c2.execute("select x from issue17") 169 | self.assertEqual("hello, world!", c2.fetchone()[0]) 170 | finally: 171 | c.execute("drop table issue17") 172 | 173 | class TestNewIssues(base.PyMySQLTestCase): 174 | def test_issue_34(self): 175 | try: 176 | pymysql.connect(host="localhost", port=1237, user="root") 177 | self.fail() 178 | except pymysql.OperationalError as e: 179 | self.assertEqual(2003, e.args[0]) 180 | except Exception: 181 | self.fail() 182 | 183 | def test_issue_33(self): 184 | conn = pymysql.connect(charset="utf8", **self.databases[0]) 185 | self.safe_create_table(conn, u'hei\xdfe', 186 | u'create table hei\xdfe (name varchar(32))') 187 | c = conn.cursor() 188 | c.execute(u"insert into hei\xdfe (name) values ('Pi\xdfata')") 189 | c.execute(u"select name from hei\xdfe") 190 | self.assertEqual(u"Pi\xdfata", c.fetchone()[0]) 191 | 192 | @unittest2.skip("This test requires manual intervention") 193 | def test_issue_35(self): 194 | conn = self.connections[0] 195 | c = conn.cursor() 196 | print("sudo killall -9 mysqld within the next 10 seconds") 197 | try: 198 | c.execute("select sleep(10)") 199 | self.fail() 200 | except pymysql.OperationalError as e: 201 | self.assertEqual(2013, e.args[0]) 202 | 203 | def test_issue_36(self): 204 | # connection 0 is super user, connection 1 isn't 205 | conn = self.connections[1] 206 | c = conn.cursor() 207 | c.execute("show processlist") 208 | kill_id = None 209 | for row in c.fetchall(): 210 | id = row[0] 211 | info = row[7] 212 | if info == "show processlist": 213 | kill_id = id 214 | break 215 | self.assertEqual(kill_id, conn.thread_id()) 216 | # now nuke the connection 217 | self.connections[0].kill(kill_id) 218 | # make sure this connection has broken 219 | try: 220 | c.execute("show tables") 221 | self.fail() 222 | except Exception: 223 | pass 224 | c.close() 225 | conn.close() 226 | 227 | # check the process list from the other connection 228 | try: 229 | # Wait since Travis-CI sometimes fail this test. 230 | time.sleep(0.1) 231 | 232 | c = self.connections[0].cursor() 233 | c.execute("show processlist") 234 | ids = [row[0] for row in c.fetchall()] 235 | self.assertFalse(kill_id in ids) 236 | finally: 237 | del self.connections[1] 238 | 239 | def test_issue_37(self): 240 | conn = self.connections[0] 241 | c = conn.cursor() 242 | self.assertEqual(1, c.execute("SELECT @foo")) 243 | self.assertEqual((None,), c.fetchone()) 244 | self.assertEqual(0, c.execute("SET @foo = 'bar'")) 245 | c.execute("set @foo = 'bar'") 246 | 247 | def test_issue_38(self): 248 | conn = self.connections[0] 249 | c = conn.cursor() 250 | datum = "a" * 1024 * 1023 # reduced size for most default mysql installs 251 | 252 | try: 253 | with warnings.catch_warnings(): 254 | warnings.filterwarnings("ignore") 255 | c.execute("drop table if exists issue38") 256 | c.execute("create table issue38 (id integer, data mediumblob)") 257 | c.execute("insert into issue38 values (1, %s)", (datum,)) 258 | finally: 259 | c.execute("drop table issue38") 260 | 261 | def disabled_test_issue_54(self): 262 | conn = self.connections[0] 263 | c = conn.cursor() 264 | with warnings.catch_warnings(): 265 | warnings.filterwarnings("ignore") 266 | c.execute("drop table if exists issue54") 267 | big_sql = "select * from issue54 where " 268 | big_sql += " and ".join("%d=%d" % (i,i) for i in range(0, 100000)) 269 | 270 | try: 271 | c.execute("create table issue54 (id integer primary key)") 272 | c.execute("insert into issue54 (id) values (7)") 273 | c.execute(big_sql) 274 | self.assertEqual(7, c.fetchone()[0]) 275 | finally: 276 | c.execute("drop table issue54") 277 | 278 | class TestGitHubIssues(base.PyMySQLTestCase): 279 | def test_issue_66(self): 280 | """ 'Connection' object has no attribute 'insert_id' """ 281 | conn = self.connections[0] 282 | c = conn.cursor() 283 | self.assertEqual(0, conn.insert_id()) 284 | try: 285 | with warnings.catch_warnings(): 286 | warnings.filterwarnings("ignore") 287 | c.execute("drop table if exists issue66") 288 | c.execute("create table issue66 (id integer primary key auto_increment, x integer)") 289 | c.execute("insert into issue66 (x) values (1)") 290 | c.execute("insert into issue66 (x) values (1)") 291 | self.assertEqual(2, conn.insert_id()) 292 | finally: 293 | c.execute("drop table issue66") 294 | 295 | def test_issue_79(self): 296 | """ Duplicate field overwrites the previous one in the result of DictCursor """ 297 | conn = self.connections[0] 298 | c = conn.cursor(pymysql.cursors.DictCursor) 299 | 300 | with warnings.catch_warnings(): 301 | warnings.filterwarnings("ignore") 302 | c.execute("drop table if exists a") 303 | c.execute("drop table if exists b") 304 | c.execute("""CREATE TABLE a (id int, value int)""") 305 | c.execute("""CREATE TABLE b (id int, value int)""") 306 | 307 | a=(1,11) 308 | b=(1,22) 309 | try: 310 | c.execute("insert into a values (%s, %s)", a) 311 | c.execute("insert into b values (%s, %s)", b) 312 | 313 | c.execute("SELECT * FROM a inner join b on a.id = b.id") 314 | r = c.fetchall()[0] 315 | self.assertEqual(r['id'], 1) 316 | self.assertEqual(r['value'], 11) 317 | self.assertEqual(r['b.value'], 22) 318 | finally: 319 | c.execute("drop table a") 320 | c.execute("drop table b") 321 | 322 | def test_issue_95(self): 323 | """ Leftover trailing OK packet for "CALL my_sp" queries """ 324 | conn = self.connections[0] 325 | cur = conn.cursor() 326 | with warnings.catch_warnings(): 327 | warnings.filterwarnings("ignore") 328 | cur.execute("DROP PROCEDURE IF EXISTS `foo`") 329 | cur.execute("""CREATE PROCEDURE `foo` () 330 | BEGIN 331 | SELECT 1; 332 | END""") 333 | try: 334 | cur.execute("""CALL foo()""") 335 | cur.execute("""SELECT 1""") 336 | self.assertEqual(cur.fetchone()[0], 1) 337 | finally: 338 | with warnings.catch_warnings(): 339 | warnings.filterwarnings("ignore") 340 | cur.execute("DROP PROCEDURE IF EXISTS `foo`") 341 | 342 | def test_issue_114(self): 343 | """ autocommit is not set after reconnecting with ping() """ 344 | conn = pymysql.connect(charset="utf8", **self.databases[0]) 345 | conn.autocommit(False) 346 | c = conn.cursor() 347 | c.execute("""select @@autocommit;""") 348 | self.assertFalse(c.fetchone()[0]) 349 | conn.close() 350 | conn.ping() 351 | c.execute("""select @@autocommit;""") 352 | self.assertFalse(c.fetchone()[0]) 353 | conn.close() 354 | 355 | # Ensure autocommit() is still working 356 | conn = pymysql.connect(charset="utf8", **self.databases[0]) 357 | c = conn.cursor() 358 | c.execute("""select @@autocommit;""") 359 | self.assertFalse(c.fetchone()[0]) 360 | conn.close() 361 | conn.ping() 362 | conn.autocommit(True) 363 | c.execute("""select @@autocommit;""") 364 | self.assertTrue(c.fetchone()[0]) 365 | conn.close() 366 | 367 | def test_issue_175(self): 368 | """ The number of fields returned by server is read in wrong way """ 369 | conn = self.connections[0] 370 | cur = conn.cursor() 371 | for length in (200, 300): 372 | columns = ', '.join('c{0} integer'.format(i) for i in range(length)) 373 | sql = 'create table test_field_count ({0})'.format(columns) 374 | try: 375 | cur.execute(sql) 376 | cur.execute('select * from test_field_count') 377 | assert len(cur.description) == length 378 | finally: 379 | with warnings.catch_warnings(): 380 | warnings.filterwarnings("ignore") 381 | cur.execute('drop table if exists test_field_count') 382 | 383 | def test_issue_321(self): 384 | """ Test iterable as query argument. """ 385 | conn = pymysql.connect(charset="utf8", **self.databases[0]) 386 | self.safe_create_table( 387 | conn, "issue321", 388 | "create table issue321 (value_1 varchar(1), value_2 varchar(1))") 389 | 390 | sql_insert = "insert into issue321 (value_1, value_2) values (%s, %s)" 391 | sql_dict_insert = ("insert into issue321 (value_1, value_2) " 392 | "values (%(value_1)s, %(value_2)s)") 393 | sql_select = ("select * from issue321 where " 394 | "value_1 in %s and value_2=%s") 395 | data = [ 396 | [(u"a", ), u"\u0430"], 397 | [[u"b"], u"\u0430"], 398 | {"value_1": [[u"c"]], "value_2": u"\u0430"} 399 | ] 400 | cur = conn.cursor() 401 | self.assertEqual(cur.execute(sql_insert, data[0]), 1) 402 | self.assertEqual(cur.execute(sql_insert, data[1]), 1) 403 | self.assertEqual(cur.execute(sql_dict_insert, data[2]), 1) 404 | self.assertEqual( 405 | cur.execute(sql_select, [(u"a", u"b", u"c"), u"\u0430"]), 3) 406 | self.assertEqual(cur.fetchone(), (u"a", u"\u0430")) 407 | self.assertEqual(cur.fetchone(), (u"b", u"\u0430")) 408 | self.assertEqual(cur.fetchone(), (u"c", u"\u0430")) 409 | 410 | def test_issue_364(self): 411 | """ Test mixed unicode/binary arguments in executemany. """ 412 | conn = pymysql.connect(charset="utf8", **self.databases[0]) 413 | self.safe_create_table( 414 | conn, "issue364", 415 | "create table issue364 (value_1 binary(3), value_2 varchar(3)) " 416 | "engine=InnoDB default charset=utf8") 417 | 418 | sql = "insert into issue364 (value_1, value_2) values (%s, %s)" 419 | usql = u"insert into issue364 (value_1, value_2) values (%s, %s)" 420 | values = [pymysql.Binary(b"\x00\xff\x00"), u"\xe4\xf6\xfc"] 421 | 422 | # test single insert and select 423 | cur = conn.cursor() 424 | cur.execute(sql, args=values) 425 | cur.execute("select * from issue364") 426 | self.assertEqual(cur.fetchone(), tuple(values)) 427 | 428 | # test single insert unicode query 429 | cur.execute(usql, args=values) 430 | 431 | # test multi insert and select 432 | cur.executemany(sql, args=(values, values, values)) 433 | cur.execute("select * from issue364") 434 | for row in cur.fetchall(): 435 | self.assertEqual(row, tuple(values)) 436 | 437 | # test multi insert with unicode query 438 | cur.executemany(usql, args=(values, values, values)) 439 | 440 | def test_issue_363(self): 441 | """ Test binary / geometry types. """ 442 | conn = pymysql.connect(charset="utf8", **self.databases[0]) 443 | self.safe_create_table( 444 | conn, "issue363", 445 | "CREATE TABLE issue363 ( " 446 | "id INTEGER PRIMARY KEY, geom LINESTRING NOT NULL, " 447 | "SPATIAL KEY geom (geom)) " 448 | "ENGINE=MyISAM default charset=utf8") 449 | 450 | cur = conn.cursor() 451 | query = ("INSERT INTO issue363 (id, geom) VALUES" 452 | "(1998, GeomFromText('LINESTRING(1.1 1.1,2.2 2.2)'))") 453 | # From MySQL 5.7, ST_GeomFromText is added and GeomFromText is deprecated. 454 | if self.mysql_server_is(conn, (5, 7, 0)): 455 | with self.assertWarns(pymysql.err.Warning) as cm: 456 | cur.execute(query) 457 | else: 458 | cur.execute(query) 459 | 460 | # select WKT 461 | query = "SELECT AsText(geom) FROM issue363" 462 | if self.mysql_server_is(conn, (5, 7, 0)): 463 | with self.assertWarns(pymysql.err.Warning) as cm: 464 | cur.execute(query) 465 | else: 466 | cur.execute(query) 467 | row = cur.fetchone() 468 | self.assertEqual(row, ("LINESTRING(1.1 1.1,2.2 2.2)", )) 469 | 470 | # select WKB 471 | query = "SELECT AsBinary(geom) FROM issue363" 472 | if self.mysql_server_is(conn, (5, 7, 0)): 473 | with self.assertWarns(pymysql.err.Warning) as cm: 474 | cur.execute(query) 475 | else: 476 | cur.execute(query) 477 | row = cur.fetchone() 478 | self.assertEqual(row, 479 | (b"\x01\x02\x00\x00\x00\x02\x00\x00\x00" 480 | b"\x9a\x99\x99\x99\x99\x99\xf1?" 481 | b"\x9a\x99\x99\x99\x99\x99\xf1?" 482 | b"\x9a\x99\x99\x99\x99\x99\x01@" 483 | b"\x9a\x99\x99\x99\x99\x99\x01@", )) 484 | 485 | # select internal binary 486 | cur.execute("SELECT geom FROM issue363") 487 | row = cur.fetchone() 488 | # don't assert the exact internal binary value, as it could 489 | # vary across implementations 490 | self.assertTrue(isinstance(row[0], bytes)) 491 | 492 | def test_issue_491(self): 493 | """ Test warning propagation """ 494 | conn = pymysql.connect(charset="utf8", **self.databases[0]) 495 | 496 | with warnings.catch_warnings(): 497 | # Ignore all warnings other than pymysql generated ones 498 | warnings.simplefilter("ignore") 499 | warnings.simplefilter("error", category=pymysql.Warning) 500 | 501 | # verify for both buffered and unbuffered cursor types 502 | for cursor_class in (cursors.Cursor, cursors.SSCursor): 503 | c = conn.cursor(cursor_class) 504 | try: 505 | c.execute("SELECT CAST('124b' AS SIGNED)") 506 | c.fetchall() 507 | except pymysql.Warning as e: 508 | # Warnings should have errorcode and string message, just like exceptions 509 | self.assertEqual(len(e.args), 2) 510 | self.assertEqual(e.args[0], 1292) 511 | self.assertTrue(isinstance(e.args[1], text_type)) 512 | else: 513 | self.fail("Should raise Warning") 514 | finally: 515 | c.close() 516 | -------------------------------------------------------------------------------- /tests/test_load_local.py: -------------------------------------------------------------------------------- 1 | from pymysql import cursors, OperationalError, Warning 2 | from pymysql.tests import base 3 | 4 | import os 5 | import warnings 6 | 7 | __all__ = ["TestLoadLocal"] 8 | 9 | 10 | class TestLoadLocal(base.PyMySQLTestCase): 11 | def test_no_file(self): 12 | """Test load local infile when the file does not exist""" 13 | conn = self.connections[0] 14 | c = conn.cursor() 15 | c.execute("CREATE TABLE test_load_local (a INTEGER, b INTEGER)") 16 | try: 17 | self.assertRaises( 18 | OperationalError, 19 | c.execute, 20 | ("LOAD DATA LOCAL INFILE 'no_data.txt' INTO TABLE " 21 | "test_load_local fields terminated by ','") 22 | ) 23 | finally: 24 | c.execute("DROP TABLE test_load_local") 25 | c.close() 26 | 27 | def test_load_file(self): 28 | """Test load local infile with a valid file""" 29 | conn = self.connections[0] 30 | c = conn.cursor() 31 | c.execute("CREATE TABLE test_load_local (a INTEGER, b INTEGER)") 32 | filename = os.path.join(os.path.dirname(os.path.realpath(__file__)), 33 | 'data', 34 | 'load_local_data.txt') 35 | try: 36 | c.execute( 37 | ("LOAD DATA LOCAL INFILE '{0}' INTO TABLE " + 38 | "test_load_local FIELDS TERMINATED BY ','").format(filename) 39 | ) 40 | c.execute("SELECT COUNT(*) FROM test_load_local") 41 | self.assertEqual(22749, c.fetchone()[0]) 42 | finally: 43 | c.execute("DROP TABLE test_load_local") 44 | 45 | def test_unbuffered_load_file(self): 46 | """Test unbuffered load local infile with a valid file""" 47 | conn = self.connections[0] 48 | c = conn.cursor(cursors.SSCursor) 49 | c.execute("CREATE TABLE test_load_local (a INTEGER, b INTEGER)") 50 | filename = os.path.join(os.path.dirname(os.path.realpath(__file__)), 51 | 'data', 52 | 'load_local_data.txt') 53 | try: 54 | c.execute( 55 | ("LOAD DATA LOCAL INFILE '{0}' INTO TABLE " + 56 | "test_load_local FIELDS TERMINATED BY ','").format(filename) 57 | ) 58 | c.execute("SELECT COUNT(*) FROM test_load_local") 59 | self.assertEqual(22749, c.fetchone()[0]) 60 | finally: 61 | c.close() 62 | conn.close() 63 | conn.connect() 64 | c = conn.cursor() 65 | c.execute("DROP TABLE test_load_local") 66 | 67 | def test_load_warnings(self): 68 | """Test load local infile produces the appropriate warnings""" 69 | conn = self.connections[0] 70 | c = conn.cursor() 71 | c.execute("CREATE TABLE test_load_local (a INTEGER, b INTEGER)") 72 | filename = os.path.join(os.path.dirname(os.path.realpath(__file__)), 73 | 'data', 74 | 'load_local_warn_data.txt') 75 | try: 76 | with warnings.catch_warnings(record=True) as w: 77 | warnings.simplefilter('always') 78 | c.execute( 79 | ("LOAD DATA LOCAL INFILE '{0}' INTO TABLE " + 80 | "test_load_local FIELDS TERMINATED BY ','").format(filename) 81 | ) 82 | self.assertEqual(w[0].category, Warning) 83 | expected_message = "Incorrect integer value" 84 | if expected_message not in str(w[-1].message): 85 | self.fail("%r not in %r" % (expected_message, w[-1].message)) 86 | finally: 87 | c.execute("DROP TABLE test_load_local") 88 | c.close() 89 | 90 | 91 | if __name__ == "__main__": 92 | import unittest 93 | unittest.main() 94 | -------------------------------------------------------------------------------- /tests/test_nextset.py: -------------------------------------------------------------------------------- 1 | import unittest2 2 | 3 | from pymysql.tests import base 4 | from pymysql import util 5 | 6 | 7 | class TestNextset(base.PyMySQLTestCase): 8 | 9 | def setUp(self): 10 | super(TestNextset, self).setUp() 11 | self.con = self.connections[0] 12 | 13 | def test_nextset(self): 14 | cur = self.con.cursor() 15 | cur.execute("SELECT 1; SELECT 2;") 16 | self.assertEqual([(1,)], list(cur)) 17 | 18 | r = cur.nextset() 19 | self.assertTrue(r) 20 | 21 | self.assertEqual([(2,)], list(cur)) 22 | self.assertIsNone(cur.nextset()) 23 | 24 | def test_skip_nextset(self): 25 | cur = self.con.cursor() 26 | cur.execute("SELECT 1; SELECT 2;") 27 | self.assertEqual([(1,)], list(cur)) 28 | 29 | cur.execute("SELECT 42") 30 | self.assertEqual([(42,)], list(cur)) 31 | 32 | def test_ok_and_next(self): 33 | cur = self.con.cursor() 34 | cur.execute("SELECT 1; commit; SELECT 2;") 35 | self.assertEqual([(1,)], list(cur)) 36 | self.assertTrue(cur.nextset()) 37 | self.assertTrue(cur.nextset()) 38 | self.assertEqual([(2,)], list(cur)) 39 | self.assertFalse(bool(cur.nextset())) 40 | 41 | @unittest2.expectedFailure 42 | def test_multi_cursor(self): 43 | cur1 = self.con.cursor() 44 | cur2 = self.con.cursor() 45 | 46 | cur1.execute("SELECT 1; SELECT 2;") 47 | cur2.execute("SELECT 42") 48 | 49 | self.assertEqual([(1,)], list(cur1)) 50 | self.assertEqual([(42,)], list(cur2)) 51 | 52 | r = cur1.nextset() 53 | self.assertTrue(r) 54 | 55 | self.assertEqual([(2,)], list(cur1)) 56 | self.assertIsNone(cur1.nextset()) 57 | 58 | def test_multi_statement_warnings(self): 59 | cursor = self.con.cursor() 60 | 61 | try: 62 | cursor.execute('DROP TABLE IF EXISTS a; ' 63 | 'DROP TABLE IF EXISTS b;') 64 | except TypeError: 65 | self.fail() 66 | 67 | #TODO: How about SSCursor and nextset? 68 | # It's very hard to implement correctly... 69 | -------------------------------------------------------------------------------- /tests/test_optionfile.py: -------------------------------------------------------------------------------- 1 | from pymysql.optionfile import Parser 2 | from unittest import TestCase 3 | from pymysql._compat import PY2 4 | 5 | try: 6 | from cStringIO import StringIO 7 | except ImportError: 8 | from io import StringIO 9 | 10 | 11 | __all__ = ['TestParser'] 12 | 13 | 14 | _cfg_file = (r""" 15 | [default] 16 | string = foo 17 | quoted = "bar" 18 | single_quoted = 'foobar' 19 | """) 20 | 21 | 22 | class TestParser(TestCase): 23 | 24 | def test_string(self): 25 | parser = Parser() 26 | if PY2: 27 | parser.readfp(StringIO(_cfg_file)) 28 | else: 29 | parser.read_file(StringIO(_cfg_file)) 30 | self.assertEqual(parser.get("default", "string"), "foo") 31 | self.assertEqual(parser.get("default", "quoted"), "bar") 32 | self.assertEqual(parser.get("default", "single_quoted"), "foobar") 33 | -------------------------------------------------------------------------------- /tests/thirdparty/__init__.py: -------------------------------------------------------------------------------- 1 | from .test_MySQLdb import * 2 | 3 | if __name__ == "__main__": 4 | try: 5 | import unittest2 as unittest 6 | except ImportError: 7 | import unittest 8 | unittest.main() 9 | -------------------------------------------------------------------------------- /tests/thirdparty/test_MySQLdb/__init__.py: -------------------------------------------------------------------------------- 1 | from .test_MySQLdb_capabilities import test_MySQLdb as test_capabilities 2 | from .test_MySQLdb_nonstandard import * 3 | from .test_MySQLdb_dbapi20 import test_MySQLdb as test_dbapi2 4 | 5 | if __name__ == "__main__": 6 | import unittest 7 | unittest.main() 8 | -------------------------------------------------------------------------------- /tests/thirdparty/test_MySQLdb/capabilities.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python -O 2 | """ Script to test database capabilities and the DB-API interface 3 | for functionality and memory leaks. 4 | 5 | Adapted from a script by M-A Lemburg. 6 | 7 | """ 8 | import sys 9 | from time import time 10 | try: 11 | import unittest2 as unittest 12 | except ImportError: 13 | import unittest 14 | 15 | PY2 = sys.version_info[0] == 2 16 | 17 | class DatabaseTest(unittest.TestCase): 18 | 19 | db_module = None 20 | connect_args = () 21 | connect_kwargs = dict(use_unicode=True, charset="utf8") 22 | create_table_extra = "ENGINE=INNODB CHARACTER SET UTF8" 23 | rows = 10 24 | debug = False 25 | 26 | def setUp(self): 27 | db = self.db_module.connect(*self.connect_args, **self.connect_kwargs) 28 | self.connection = db 29 | self.cursor = db.cursor() 30 | self.BLOBText = ''.join([chr(i) for i in range(256)] * 100); 31 | if PY2: 32 | self.BLOBUText = unicode().join(unichr(i) for i in range(16834)) 33 | else: 34 | self.BLOBUText = "".join(chr(i) for i in range(16834)) 35 | data = bytearray(range(256)) * 16 36 | self.BLOBBinary = self.db_module.Binary(data) 37 | 38 | leak_test = True 39 | 40 | def tearDown(self): 41 | if self.leak_test: 42 | import gc 43 | del self.cursor 44 | orphans = gc.collect() 45 | self.assertFalse(orphans, "%d orphaned objects found after deleting cursor" % orphans) 46 | 47 | del self.connection 48 | orphans = gc.collect() 49 | self.assertFalse(orphans, "%d orphaned objects found after deleting connection" % orphans) 50 | 51 | def table_exists(self, name): 52 | try: 53 | self.cursor.execute('select * from %s where 1=0' % name) 54 | except Exception: 55 | return False 56 | else: 57 | return True 58 | 59 | def quote_identifier(self, ident): 60 | return '"%s"' % ident 61 | 62 | def new_table_name(self): 63 | i = id(self.cursor) 64 | while True: 65 | name = self.quote_identifier('tb%08x' % i) 66 | if not self.table_exists(name): 67 | return name 68 | i = i + 1 69 | 70 | def create_table(self, columndefs): 71 | 72 | """ Create a table using a list of column definitions given in 73 | columndefs. 74 | 75 | generator must be a function taking arguments (row_number, 76 | col_number) returning a suitable data object for insertion 77 | into the table. 78 | 79 | """ 80 | self.table = self.new_table_name() 81 | self.cursor.execute('CREATE TABLE %s (%s) %s' % 82 | (self.table, 83 | ',\n'.join(columndefs), 84 | self.create_table_extra)) 85 | 86 | def check_data_integrity(self, columndefs, generator): 87 | # insert 88 | self.create_table(columndefs) 89 | insert_statement = ('INSERT INTO %s VALUES (%s)' % 90 | (self.table, 91 | ','.join(['%s'] * len(columndefs)))) 92 | data = [ [ generator(i,j) for j in range(len(columndefs)) ] 93 | for i in range(self.rows) ] 94 | if self.debug: 95 | print(data) 96 | self.cursor.executemany(insert_statement, data) 97 | self.connection.commit() 98 | # verify 99 | self.cursor.execute('select * from %s' % self.table) 100 | l = self.cursor.fetchall() 101 | if self.debug: 102 | print(l) 103 | self.assertEqual(len(l), self.rows) 104 | try: 105 | for i in range(self.rows): 106 | for j in range(len(columndefs)): 107 | self.assertEqual(l[i][j], generator(i,j)) 108 | finally: 109 | if not self.debug: 110 | self.cursor.execute('drop table %s' % (self.table)) 111 | 112 | def test_transactions(self): 113 | columndefs = ( 'col1 INT', 'col2 VARCHAR(255)') 114 | def generator(row, col): 115 | if col == 0: return row 116 | else: return ('%i' % (row%10))*255 117 | self.create_table(columndefs) 118 | insert_statement = ('INSERT INTO %s VALUES (%s)' % 119 | (self.table, 120 | ','.join(['%s'] * len(columndefs)))) 121 | data = [ [ generator(i,j) for j in range(len(columndefs)) ] 122 | for i in range(self.rows) ] 123 | self.cursor.executemany(insert_statement, data) 124 | # verify 125 | self.connection.commit() 126 | self.cursor.execute('select * from %s' % self.table) 127 | l = self.cursor.fetchall() 128 | self.assertEqual(len(l), self.rows) 129 | for i in range(self.rows): 130 | for j in range(len(columndefs)): 131 | self.assertEqual(l[i][j], generator(i,j)) 132 | delete_statement = 'delete from %s where col1=%%s' % self.table 133 | self.cursor.execute(delete_statement, (0,)) 134 | self.cursor.execute('select col1 from %s where col1=%s' % \ 135 | (self.table, 0)) 136 | l = self.cursor.fetchall() 137 | self.assertFalse(l, "DELETE didn't work") 138 | self.connection.rollback() 139 | self.cursor.execute('select col1 from %s where col1=%s' % \ 140 | (self.table, 0)) 141 | l = self.cursor.fetchall() 142 | self.assertTrue(len(l) == 1, "ROLLBACK didn't work") 143 | self.cursor.execute('drop table %s' % (self.table)) 144 | 145 | def test_truncation(self): 146 | columndefs = ( 'col1 INT', 'col2 VARCHAR(255)') 147 | def generator(row, col): 148 | if col == 0: return row 149 | else: return ('%i' % (row%10))*((255-self.rows//2)+row) 150 | self.create_table(columndefs) 151 | insert_statement = ('INSERT INTO %s VALUES (%s)' % 152 | (self.table, 153 | ','.join(['%s'] * len(columndefs)))) 154 | 155 | try: 156 | self.cursor.execute(insert_statement, (0, '0'*256)) 157 | except Warning: 158 | if self.debug: print(self.cursor.messages) 159 | except self.connection.DataError: 160 | pass 161 | else: 162 | self.fail("Over-long column did not generate warnings/exception with single insert") 163 | 164 | self.connection.rollback() 165 | 166 | try: 167 | for i in range(self.rows): 168 | data = [] 169 | for j in range(len(columndefs)): 170 | data.append(generator(i,j)) 171 | self.cursor.execute(insert_statement,tuple(data)) 172 | except Warning: 173 | if self.debug: print(self.cursor.messages) 174 | except self.connection.DataError: 175 | pass 176 | else: 177 | self.fail("Over-long columns did not generate warnings/exception with execute()") 178 | 179 | self.connection.rollback() 180 | 181 | try: 182 | data = [ [ generator(i,j) for j in range(len(columndefs)) ] 183 | for i in range(self.rows) ] 184 | self.cursor.executemany(insert_statement, data) 185 | except Warning: 186 | if self.debug: print(self.cursor.messages) 187 | except self.connection.DataError: 188 | pass 189 | else: 190 | self.fail("Over-long columns did not generate warnings/exception with executemany()") 191 | 192 | self.connection.rollback() 193 | self.cursor.execute('drop table %s' % (self.table)) 194 | 195 | def test_CHAR(self): 196 | # Character data 197 | def generator(row,col): 198 | return ('%i' % ((row+col) % 10)) * 255 199 | self.check_data_integrity( 200 | ('col1 char(255)','col2 char(255)'), 201 | generator) 202 | 203 | def test_INT(self): 204 | # Number data 205 | def generator(row,col): 206 | return row*row 207 | self.check_data_integrity( 208 | ('col1 INT',), 209 | generator) 210 | 211 | def test_DECIMAL(self): 212 | # DECIMAL 213 | def generator(row,col): 214 | from decimal import Decimal 215 | return Decimal("%d.%02d" % (row, col)) 216 | self.check_data_integrity( 217 | ('col1 DECIMAL(5,2)',), 218 | generator) 219 | 220 | def test_DATE(self): 221 | ticks = time() 222 | def generator(row,col): 223 | return self.db_module.DateFromTicks(ticks+row*86400-col*1313) 224 | self.check_data_integrity( 225 | ('col1 DATE',), 226 | generator) 227 | 228 | def test_TIME(self): 229 | ticks = time() 230 | def generator(row,col): 231 | return self.db_module.TimeFromTicks(ticks+row*86400-col*1313) 232 | self.check_data_integrity( 233 | ('col1 TIME',), 234 | generator) 235 | 236 | def test_DATETIME(self): 237 | ticks = time() 238 | def generator(row,col): 239 | return self.db_module.TimestampFromTicks(ticks+row*86400-col*1313) 240 | self.check_data_integrity( 241 | ('col1 DATETIME',), 242 | generator) 243 | 244 | def test_TIMESTAMP(self): 245 | ticks = time() 246 | def generator(row,col): 247 | return self.db_module.TimestampFromTicks(ticks+row*86400-col*1313) 248 | self.check_data_integrity( 249 | ('col1 TIMESTAMP',), 250 | generator) 251 | 252 | def test_fractional_TIMESTAMP(self): 253 | ticks = time() 254 | def generator(row,col): 255 | return self.db_module.TimestampFromTicks(ticks+row*86400-col*1313+row*0.7*col/3.0) 256 | self.check_data_integrity( 257 | ('col1 TIMESTAMP',), 258 | generator) 259 | 260 | def test_LONG(self): 261 | def generator(row,col): 262 | if col == 0: 263 | return row 264 | else: 265 | return self.BLOBUText # 'BLOB Text ' * 1024 266 | self.check_data_integrity( 267 | ('col1 INT', 'col2 LONG'), 268 | generator) 269 | 270 | def test_TEXT(self): 271 | def generator(row,col): 272 | if col == 0: 273 | return row 274 | else: 275 | return self.BLOBUText[:5192] # 'BLOB Text ' * 1024 276 | self.check_data_integrity( 277 | ('col1 INT', 'col2 TEXT'), 278 | generator) 279 | 280 | def test_LONG_BYTE(self): 281 | def generator(row,col): 282 | if col == 0: 283 | return row 284 | else: 285 | return self.BLOBBinary # 'BLOB\000Binary ' * 1024 286 | self.check_data_integrity( 287 | ('col1 INT','col2 LONG BYTE'), 288 | generator) 289 | 290 | def test_BLOB(self): 291 | def generator(row,col): 292 | if col == 0: 293 | return row 294 | else: 295 | return self.BLOBBinary # 'BLOB\000Binary ' * 1024 296 | self.check_data_integrity( 297 | ('col1 INT','col2 BLOB'), 298 | generator) 299 | -------------------------------------------------------------------------------- /tests/thirdparty/test_MySQLdb/test_MySQLdb_capabilities.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from . import capabilities 3 | try: 4 | import unittest2 as unittest 5 | except ImportError: 6 | import unittest 7 | import pymysql 8 | from pymysql.tests import base 9 | import warnings 10 | 11 | warnings.filterwarnings('error') 12 | 13 | class test_MySQLdb(capabilities.DatabaseTest): 14 | 15 | db_module = pymysql 16 | connect_args = () 17 | connect_kwargs = base.PyMySQLTestCase.databases[0].copy() 18 | connect_kwargs.update(dict(read_default_file='~/.my.cnf', 19 | use_unicode=True, 20 | charset='utf8', sql_mode="ANSI,STRICT_TRANS_TABLES,TRADITIONAL")) 21 | 22 | create_table_extra = "ENGINE=INNODB CHARACTER SET UTF8" 23 | leak_test = False 24 | 25 | def quote_identifier(self, ident): 26 | return "`%s`" % ident 27 | 28 | def test_TIME(self): 29 | from datetime import timedelta 30 | def generator(row,col): 31 | return timedelta(0, row*8000) 32 | self.check_data_integrity( 33 | ('col1 TIME',), 34 | generator) 35 | 36 | def test_TINYINT(self): 37 | # Number data 38 | def generator(row,col): 39 | v = (row*row) % 256 40 | if v > 127: 41 | v = v-256 42 | return v 43 | self.check_data_integrity( 44 | ('col1 TINYINT',), 45 | generator) 46 | 47 | def test_stored_procedures(self): 48 | db = self.connection 49 | c = self.cursor 50 | try: 51 | self.create_table(('pos INT', 'tree CHAR(20)')) 52 | c.executemany("INSERT INTO %s (pos,tree) VALUES (%%s,%%s)" % self.table, 53 | list(enumerate('ash birch cedar larch pine'.split()))) 54 | db.commit() 55 | 56 | c.execute(""" 57 | CREATE PROCEDURE test_sp(IN t VARCHAR(255)) 58 | BEGIN 59 | SELECT pos FROM %s WHERE tree = t; 60 | END 61 | """ % self.table) 62 | db.commit() 63 | 64 | c.callproc('test_sp', ('larch',)) 65 | rows = c.fetchall() 66 | self.assertEqual(len(rows), 1) 67 | self.assertEqual(rows[0][0], 3) 68 | c.nextset() 69 | finally: 70 | c.execute("DROP PROCEDURE IF EXISTS test_sp") 71 | c.execute('drop table %s' % (self.table)) 72 | 73 | def test_small_CHAR(self): 74 | # Character data 75 | def generator(row,col): 76 | i = ((row+1)*(col+1)+62)%256 77 | if i == 62: return '' 78 | if i == 63: return None 79 | return chr(i) 80 | self.check_data_integrity( 81 | ('col1 char(1)','col2 char(1)'), 82 | generator) 83 | 84 | def test_bug_2671682(self): 85 | from pymysql.constants import ER 86 | try: 87 | self.cursor.execute("describe some_non_existent_table"); 88 | except self.connection.ProgrammingError as msg: 89 | self.assertEqual(msg.args[0], ER.NO_SUCH_TABLE) 90 | 91 | def test_ping(self): 92 | self.connection.ping() 93 | 94 | def test_literal_int(self): 95 | self.assertTrue("2" == self.connection.literal(2)) 96 | 97 | def test_literal_float(self): 98 | self.assertTrue("3.1415" == self.connection.literal(3.1415)) 99 | 100 | def test_literal_string(self): 101 | self.assertTrue("'foo'" == self.connection.literal("foo")) 102 | 103 | 104 | if __name__ == '__main__': 105 | if test_MySQLdb.leak_test: 106 | import gc 107 | gc.enable() 108 | gc.set_debug(gc.DEBUG_LEAK) 109 | unittest.main() 110 | -------------------------------------------------------------------------------- /tests/thirdparty/test_MySQLdb/test_MySQLdb_dbapi20.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from . import dbapi20 3 | import pymysql 4 | from pymysql.tests import base 5 | 6 | try: 7 | import unittest2 as unittest 8 | except ImportError: 9 | import unittest 10 | 11 | 12 | class test_MySQLdb(dbapi20.DatabaseAPI20Test): 13 | driver = pymysql 14 | connect_args = () 15 | connect_kw_args = base.PyMySQLTestCase.databases[0].copy() 16 | connect_kw_args.update(dict(read_default_file='~/.my.cnf', 17 | charset='utf8', 18 | sql_mode="ANSI,STRICT_TRANS_TABLES,TRADITIONAL")) 19 | 20 | def test_setoutputsize(self): pass 21 | def test_setoutputsize_basic(self): pass 22 | def test_nextset(self): pass 23 | 24 | """The tests on fetchone and fetchall and rowcount bogusly 25 | test for an exception if the statement cannot return a 26 | result set. MySQL always returns a result set; it's just that 27 | some things return empty result sets.""" 28 | 29 | def test_fetchall(self): 30 | con = self._connect() 31 | try: 32 | cur = con.cursor() 33 | # cursor.fetchall should raise an Error if called 34 | # without executing a query that may return rows (such 35 | # as a select) 36 | self.assertRaises(self.driver.Error, cur.fetchall) 37 | 38 | self.executeDDL1(cur) 39 | for sql in self._populate(): 40 | cur.execute(sql) 41 | 42 | # cursor.fetchall should raise an Error if called 43 | # after executing a a statement that cannot return rows 44 | ## self.assertRaises(self.driver.Error,cur.fetchall) 45 | 46 | cur.execute('select name from %sbooze' % self.table_prefix) 47 | rows = cur.fetchall() 48 | self.assertTrue(cur.rowcount in (-1,len(self.samples))) 49 | self.assertEqual(len(rows),len(self.samples), 50 | 'cursor.fetchall did not retrieve all rows' 51 | ) 52 | rows = [r[0] for r in rows] 53 | rows.sort() 54 | for i in range(0,len(self.samples)): 55 | self.assertEqual(rows[i],self.samples[i], 56 | 'cursor.fetchall retrieved incorrect rows' 57 | ) 58 | rows = cur.fetchall() 59 | self.assertEqual( 60 | len(rows),0, 61 | 'cursor.fetchall should return an empty list if called ' 62 | 'after the whole result set has been fetched' 63 | ) 64 | self.assertTrue(cur.rowcount in (-1,len(self.samples))) 65 | 66 | self.executeDDL2(cur) 67 | cur.execute('select name from %sbarflys' % self.table_prefix) 68 | rows = cur.fetchall() 69 | self.assertTrue(cur.rowcount in (-1,0)) 70 | self.assertEqual(len(rows),0, 71 | 'cursor.fetchall should return an empty list if ' 72 | 'a select query returns no rows' 73 | ) 74 | 75 | finally: 76 | con.close() 77 | 78 | def test_fetchone(self): 79 | con = self._connect() 80 | try: 81 | cur = con.cursor() 82 | 83 | # cursor.fetchone should raise an Error if called before 84 | # executing a select-type query 85 | self.assertRaises(self.driver.Error,cur.fetchone) 86 | 87 | # cursor.fetchone should raise an Error if called after 88 | # executing a query that cannnot return rows 89 | self.executeDDL1(cur) 90 | ## self.assertRaises(self.driver.Error,cur.fetchone) 91 | 92 | cur.execute('select name from %sbooze' % self.table_prefix) 93 | self.assertEqual(cur.fetchone(),None, 94 | 'cursor.fetchone should return None if a query retrieves ' 95 | 'no rows' 96 | ) 97 | self.assertTrue(cur.rowcount in (-1,0)) 98 | 99 | # cursor.fetchone should raise an Error if called after 100 | # executing a query that cannnot return rows 101 | cur.execute("insert into %sbooze values ('Victoria Bitter')" % ( 102 | self.table_prefix 103 | )) 104 | ## self.assertRaises(self.driver.Error,cur.fetchone) 105 | 106 | cur.execute('select name from %sbooze' % self.table_prefix) 107 | r = cur.fetchone() 108 | self.assertEqual(len(r),1, 109 | 'cursor.fetchone should have retrieved a single row' 110 | ) 111 | self.assertEqual(r[0],'Victoria Bitter', 112 | 'cursor.fetchone retrieved incorrect data' 113 | ) 114 | ## self.assertEqual(cur.fetchone(),None, 115 | ## 'cursor.fetchone should return None if no more rows available' 116 | ## ) 117 | self.assertTrue(cur.rowcount in (-1,1)) 118 | finally: 119 | con.close() 120 | 121 | # Same complaint as for fetchall and fetchone 122 | def test_rowcount(self): 123 | con = self._connect() 124 | try: 125 | cur = con.cursor() 126 | self.executeDDL1(cur) 127 | ## self.assertEqual(cur.rowcount,-1, 128 | ## 'cursor.rowcount should be -1 after executing no-result ' 129 | ## 'statements' 130 | ## ) 131 | cur.execute("insert into %sbooze values ('Victoria Bitter')" % ( 132 | self.table_prefix 133 | )) 134 | ## self.assertTrue(cur.rowcount in (-1,1), 135 | ## 'cursor.rowcount should == number or rows inserted, or ' 136 | ## 'set to -1 after executing an insert statement' 137 | ## ) 138 | cur.execute("select name from %sbooze" % self.table_prefix) 139 | self.assertTrue(cur.rowcount in (-1,1), 140 | 'cursor.rowcount should == number of rows returned, or ' 141 | 'set to -1 after executing a select statement' 142 | ) 143 | self.executeDDL2(cur) 144 | ## self.assertEqual(cur.rowcount,-1, 145 | ## 'cursor.rowcount not being reset to -1 after executing ' 146 | ## 'no-result statements' 147 | ## ) 148 | finally: 149 | con.close() 150 | 151 | def test_callproc(self): 152 | pass # performed in test_MySQL_capabilities 153 | 154 | def help_nextset_setUp(self,cur): 155 | ''' Should create a procedure called deleteme 156 | that returns two result sets, first the 157 | number of rows in booze then "name from booze" 158 | ''' 159 | sql=""" 160 | create procedure deleteme() 161 | begin 162 | select count(*) from %(tp)sbooze; 163 | select name from %(tp)sbooze; 164 | end 165 | """ % dict(tp=self.table_prefix) 166 | cur.execute(sql) 167 | 168 | def help_nextset_tearDown(self,cur): 169 | 'If cleaning up is needed after nextSetTest' 170 | cur.execute("drop procedure deleteme") 171 | 172 | def test_nextset(self): 173 | from warnings import warn 174 | con = self._connect() 175 | try: 176 | cur = con.cursor() 177 | if not hasattr(cur,'nextset'): 178 | return 179 | 180 | try: 181 | self.executeDDL1(cur) 182 | sql=self._populate() 183 | for sql in self._populate(): 184 | cur.execute(sql) 185 | 186 | self.help_nextset_setUp(cur) 187 | 188 | cur.callproc('deleteme') 189 | numberofrows=cur.fetchone() 190 | assert numberofrows[0]== len(self.samples) 191 | assert cur.nextset() 192 | names=cur.fetchall() 193 | assert len(names) == len(self.samples) 194 | s=cur.nextset() 195 | if s: 196 | empty = cur.fetchall() 197 | self.assertEqual(len(empty), 0, 198 | "non-empty result set after other result sets") 199 | #warn("Incompatibility: MySQL returns an empty result set for the CALL itself", 200 | # Warning) 201 | #assert s == None,'No more return sets, should return None' 202 | finally: 203 | self.help_nextset_tearDown(cur) 204 | 205 | finally: 206 | con.close() 207 | 208 | 209 | if __name__ == '__main__': 210 | unittest.main() 211 | -------------------------------------------------------------------------------- /tests/thirdparty/test_MySQLdb/test_MySQLdb_nonstandard.py: -------------------------------------------------------------------------------- 1 | import sys 2 | try: 3 | import unittest2 as unittest 4 | except ImportError: 5 | import unittest 6 | 7 | import pymysql 8 | _mysql = pymysql 9 | from pymysql.constants import FIELD_TYPE 10 | from pymysql.tests import base 11 | from pymysql._compat import PY2, long_type 12 | 13 | if not PY2: 14 | basestring = str 15 | 16 | 17 | class TestDBAPISet(unittest.TestCase): 18 | def test_set_equality(self): 19 | self.assertTrue(pymysql.STRING == pymysql.STRING) 20 | 21 | def test_set_inequality(self): 22 | self.assertTrue(pymysql.STRING != pymysql.NUMBER) 23 | 24 | def test_set_equality_membership(self): 25 | self.assertTrue(FIELD_TYPE.VAR_STRING == pymysql.STRING) 26 | 27 | def test_set_inequality_membership(self): 28 | self.assertTrue(FIELD_TYPE.DATE != pymysql.STRING) 29 | 30 | 31 | class CoreModule(unittest.TestCase): 32 | """Core _mysql module features.""" 33 | 34 | def test_NULL(self): 35 | """Should have a NULL constant.""" 36 | self.assertEqual(_mysql.NULL, 'NULL') 37 | 38 | def test_version(self): 39 | """Version information sanity.""" 40 | self.assertTrue(isinstance(_mysql.__version__, basestring)) 41 | 42 | self.assertTrue(isinstance(_mysql.version_info, tuple)) 43 | self.assertEqual(len(_mysql.version_info), 5) 44 | 45 | def test_client_info(self): 46 | self.assertTrue(isinstance(_mysql.get_client_info(), basestring)) 47 | 48 | def test_thread_safe(self): 49 | self.assertTrue(isinstance(_mysql.thread_safe(), int)) 50 | 51 | 52 | class CoreAPI(unittest.TestCase): 53 | """Test _mysql interaction internals.""" 54 | 55 | def setUp(self): 56 | kwargs = base.PyMySQLTestCase.databases[0].copy() 57 | kwargs["read_default_file"] = "~/.my.cnf" 58 | self.conn = _mysql.connect(**kwargs) 59 | 60 | def tearDown(self): 61 | self.conn.close() 62 | 63 | def test_thread_id(self): 64 | tid = self.conn.thread_id() 65 | self.assertTrue(isinstance(tid, (int, long_type)), 66 | "thread_id didn't return an integral value.") 67 | 68 | self.assertRaises(TypeError, self.conn.thread_id, ('evil',), 69 | "thread_id shouldn't accept arguments.") 70 | 71 | def test_affected_rows(self): 72 | self.assertEqual(self.conn.affected_rows(), 0, 73 | "Should return 0 before we do anything.") 74 | 75 | 76 | #def test_debug(self): 77 | ## FIXME Only actually tests if you lack SUPER 78 | #self.assertRaises(pymysql.OperationalError, 79 | #self.conn.dump_debug_info) 80 | 81 | def test_charset_name(self): 82 | self.assertTrue(isinstance(self.conn.character_set_name(), basestring), 83 | "Should return a string.") 84 | 85 | def test_host_info(self): 86 | assert isinstance(self.conn.get_host_info(), basestring), "should return a string" 87 | 88 | def test_proto_info(self): 89 | self.assertTrue(isinstance(self.conn.get_proto_info(), int), 90 | "Should return an int.") 91 | 92 | def test_server_info(self): 93 | if sys.version_info[0] == 2: 94 | self.assertTrue(isinstance(self.conn.get_server_info(), basestring), 95 | "Should return an str.") 96 | else: 97 | self.assertTrue(isinstance(self.conn.get_server_info(), basestring), 98 | "Should return an str.") 99 | 100 | if __name__ == "__main__": 101 | unittest.main() 102 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py26,py27,py33,py34,pypy,pypy3 3 | 4 | [testenv] 5 | commands = coverage run ./runtests.py 6 | deps = unittest2 7 | coverage 8 | passenv = USER 9 | PASSWORD 10 | PAMSERVICE 11 | -------------------------------------------------------------------------------- /upymysql/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | uPyMySQL: A pure-Python MySQL client library. 3 | 4 | Copyright (c) 2010-2017 uPyMySQL contributors 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a copy 7 | of this software and associated documentation files (the "Software"), to deal 8 | in the Software without restriction, including without limitation the rights 9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | copies of the Software, and to permit persons to whom the Software is 11 | furnished to do so, subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included in 14 | all copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 22 | THE SOFTWARE. 23 | """ 24 | import sys 25 | import gc 26 | 27 | from .constants import FIELD_TYPE 28 | gc.collect() 29 | from .converters import escape_dict, escape_sequence, escape_string 30 | gc.collect() 31 | #from .err import ( 32 | # Warning, Error, InterfaceError, DataError, 33 | # DatabaseError, OperationalError, IntegrityError, InternalError, 34 | # NotSupportedError, ProgrammingError, MySQLError) 35 | from .times import ( 36 | Date, Time, Timestamp, 37 | DateFromTicks, TimeFromTicks, TimestampFromTicks) 38 | 39 | 40 | VERSION = (0, 7, 11, None) 41 | threadsafety = 1 42 | apilevel = "2.0" 43 | paramstyle = "pyformat" 44 | 45 | 46 | class DBAPISet(frozenset): 47 | 48 | def __ne__(self, other): 49 | if isinstance(other, set): 50 | return frozenset.__ne__(self, other) 51 | else: 52 | return other not in self 53 | 54 | def __eq__(self, other): 55 | if isinstance(other, frozenset): 56 | return frozenset.__eq__(self, other) 57 | else: 58 | return other in self 59 | 60 | def __hash__(self): 61 | return frozenset.__hash__(self) 62 | 63 | 64 | STRING = DBAPISet([FIELD_TYPE.ENUM, FIELD_TYPE.STRING, 65 | FIELD_TYPE.VAR_STRING]) 66 | BINARY = DBAPISet([FIELD_TYPE.BLOB, FIELD_TYPE.LONG_BLOB, 67 | FIELD_TYPE.MEDIUM_BLOB, FIELD_TYPE.TINY_BLOB]) 68 | NUMBER = DBAPISet([FIELD_TYPE.DECIMAL, FIELD_TYPE.DOUBLE, FIELD_TYPE.FLOAT, 69 | FIELD_TYPE.INT24, FIELD_TYPE.LONG, FIELD_TYPE.LONGLONG, 70 | FIELD_TYPE.TINY, FIELD_TYPE.YEAR]) 71 | DATE = DBAPISet([FIELD_TYPE.DATE, FIELD_TYPE.NEWDATE]) 72 | TIME = DBAPISet([FIELD_TYPE.TIME]) 73 | TIMESTAMP = DBAPISet([FIELD_TYPE.TIMESTAMP, FIELD_TYPE.DATETIME]) 74 | DATETIME = TIMESTAMP 75 | ROWID = DBAPISet() 76 | 77 | 78 | def Binary(x): 79 | """Return x as a binary type.""" 80 | return bytes(x) 81 | 82 | 83 | def Connect(*args, **kwargs): 84 | """ 85 | Connect to the database; see connections.Connection.__init__() for 86 | more information. 87 | """ 88 | from .connections import Connection 89 | return Connection(*args, **kwargs) 90 | 91 | #from . import connections as _orig_conn 92 | #if _orig_conn.Connection.__init__.__doc__ is not None: 93 | #Connect.__doc__ = _orig_conn.Connection.__init__.__doc__ 94 | #del _orig_conn 95 | 96 | 97 | def get_client_info(): # for MySQLdb compatibility 98 | version = VERSION 99 | if VERSION[3] is None: 100 | version = VERSION[:3] 101 | return '.'.join(map(str, version)) 102 | 103 | connect = Connection = Connect 104 | 105 | # we include a doctored version_info here for MySQLdb compatibility 106 | version_info = (1,2,6,"final",0) 107 | 108 | NILL = "NILL" 109 | 110 | __version__ = get_client_info() 111 | 112 | def thread_safe(): 113 | return True # match MySQLdb.thread_safe() 114 | 115 | def install_as_MySQLdb(): 116 | """ 117 | After this function is called, any application that imports MySQLdb or 118 | _mysql will unwittingly actually use 119 | """ 120 | sys.modules["MySQLdb"] = sys.modules["_mysql"] = sys.modules["upymysql"] 121 | 122 | 123 | __all__ = [ 124 | 'BINARY', 'Binary', 'Connect', 'Connection', 'DATE', 'Date', 125 | 'Time', 'Timestamp', 'DateFromTicks', 'TimeFromTicks', 'TimestampFromTicks', 126 | 'DataError', 'DatabaseError', 'Error', 'FIELD_TYPE', 'IntegrityError', 127 | 'InterfaceError', 'InternalError', 'MySQLError', 'NILL', 'NUMBER', 128 | 'NotSupportedError', 'DBAPISet', 'OperationalError', 'ProgrammingError', 129 | 'ROWID', 'STRING', 'TIME', 'TIMESTAMP', 'Warning', 'apilevel', 'connect', 130 | 'connections', 'constants', 'converters', 'cursors', 131 | 'escape_dict', 'escape_sequence', 'escape_string', 'get_client_info', 132 | 'paramstyle', 'threadsafety', 'version_info', 133 | 134 | "install_as_MySQLdb", 135 | "NILL", "__version__", 136 | ] 137 | -------------------------------------------------------------------------------- /upymysql/_socketio.py: -------------------------------------------------------------------------------- 1 | """ 2 | SocketIO imported from socket module in Python 3. 3 | 4 | Copyright (c) 2001-2013 Python Software Foundation; All Rights Reserved. 5 | """ 6 | 7 | from socket import * 8 | import io 9 | import errno 10 | 11 | __all__ = ['SocketIO'] 12 | 13 | EINTR = errno.EINTR 14 | _blocking_errnos = (errno.EAGAIN, errno.EWOULDBLOCK) 15 | 16 | class SocketIO(io.RawIOBase): 17 | 18 | """Raw I/O implementation for stream sockets. 19 | 20 | This class supports the makefile() method on sockets. It provides 21 | the raw I/O interface on top of a socket object. 22 | """ 23 | 24 | # One might wonder why not let FileIO do the job instead. There are two 25 | # main reasons why FileIO is not adapted: 26 | # - it wouldn't work under Windows (where you can't used read() and 27 | # write() on a socket handle) 28 | # - it wouldn't work with socket timeouts (FileIO would ignore the 29 | # timeout and consider the socket non-blocking) 30 | 31 | # XXX More docs 32 | 33 | def __init__(self, sock, mode): 34 | if mode not in ("r", "w", "rw", "rb", "wb", "rwb"): 35 | raise ValueError("invalid mode: %r" % mode) 36 | io.RawIOBase.__init__(self) 37 | self._sock = sock 38 | if "b" not in mode: 39 | mode += "b" 40 | self._mode = mode 41 | self._reading = "r" in mode 42 | self._writing = "w" in mode 43 | self._timeout_occurred = False 44 | 45 | def readinto(self, b): 46 | """Read up to len(b) bytes into the writable buffer *b* and return 47 | the number of bytes read. If the socket is non-blocking and no bytes 48 | are available, None is returned. 49 | 50 | If *b* is non-empty, a 0 return value indicates that the connection 51 | was shutdown at the other end. 52 | """ 53 | self._checkClosed() 54 | self._checkReadable() 55 | if self._timeout_occurred: 56 | raise IOError("cannot read from timed out object") 57 | while True: 58 | try: 59 | return self._sock.recv_into(b) 60 | except timeout: 61 | self._timeout_occurred = True 62 | raise 63 | except error as e: 64 | n = e.args[0] 65 | if n == EINTR: 66 | continue 67 | if n in _blocking_errnos: 68 | return None 69 | raise 70 | 71 | def write(self, b): 72 | """Write the given bytes or bytearray object *b* to the socket 73 | and return the number of bytes written. This can be less than 74 | len(b) if not all data could be written. If the socket is 75 | non-blocking and no bytes could be written None is returned. 76 | """ 77 | self._checkClosed() 78 | self._checkWritable() 79 | try: 80 | return self._sock.send(b) 81 | except error as e: 82 | # XXX what about EINTR? 83 | if e.args[0] in _blocking_errnos: 84 | return None 85 | raise 86 | 87 | def readable(self): 88 | """True if the SocketIO is open for reading. 89 | """ 90 | if self.closed: 91 | raise ValueError("I/O operation on closed socket.") 92 | return self._reading 93 | 94 | def writable(self): 95 | """True if the SocketIO is open for writing. 96 | """ 97 | if self.closed: 98 | raise ValueError("I/O operation on closed socket.") 99 | return self._writing 100 | 101 | def seekable(self): 102 | """True if the SocketIO is open for seeking. 103 | """ 104 | if self.closed: 105 | raise ValueError("I/O operation on closed socket.") 106 | return super().seekable() 107 | 108 | def fileno(self): 109 | """Return the file descriptor of the underlying socket. 110 | """ 111 | self._checkClosed() 112 | return self._sock.fileno() 113 | 114 | @property 115 | def name(self): 116 | if not self.closed: 117 | return self.fileno() 118 | else: 119 | return -1 120 | 121 | @property 122 | def mode(self): 123 | return self._mode 124 | 125 | def close(self): 126 | """Close the SocketIO object. This doesn't close the underlying 127 | socket, except if all references to it have disappeared. 128 | """ 129 | if self.closed: 130 | return 131 | io.RawIOBase.close(self) 132 | self._sock._decref_socketios() 133 | self._sock = None 134 | 135 | -------------------------------------------------------------------------------- /upymysql/charset.py: -------------------------------------------------------------------------------- 1 | MBLENGTH = { 2 | 8:1, 3 | 33:3, 4 | 88:2, 5 | 91:2 6 | } 7 | 8 | 9 | class Charset(object): 10 | def __init__(self, id, name, collation, is_default): 11 | self.id, self.name, self.collation = id, name, collation 12 | self.is_default = is_default == 'Yes' 13 | 14 | def __repr__(self): 15 | return "Charset(id=%s, name=%r, collation=%r)" % ( 16 | self.id, self.name, self.collation) 17 | 18 | @property 19 | def encoding(self): 20 | name = self.name 21 | if name == 'utf8mb4': 22 | return 'utf8' 23 | return name 24 | 25 | @property 26 | def is_binary(self): 27 | return self.id == 63 28 | 29 | 30 | class Charsets: 31 | def __init__(self): 32 | self._by_id = {} 33 | 34 | def add(self, c): 35 | self._by_id[c.id] = c 36 | 37 | def by_id(self, id): 38 | return self._by_id[id] 39 | 40 | def by_name(self, name): 41 | name = name.lower() 42 | for c in self._by_id.values(): 43 | if c.name == name and c.is_default: 44 | return c 45 | 46 | _charsets = Charsets() 47 | """ 48 | Generated with: 49 | 50 | mysql -N -s -e "select id, character_set_name, collation_name, is_default 51 | from information_schema.collations order by id;" | python -c "import sys 52 | for l in sys.stdin.readlines(): 53 | id, name, collation, is_default = l.split(chr(9)) 54 | print '_charsets.add(Charset(%s, \'%s\', \'%s\', \'%s\'))' \ 55 | % (id, name, collation, is_default.strip()) 56 | " 57 | 58 | """ 59 | _charsets.add(Charset(8, 'latin1', 'latin1_swedish_ci', 'Yes')) 60 | _charsets.add(Charset(33, 'utf8', 'utf8_general_ci', 'Yes')) 61 | _charsets.add(Charset(45, 'utf8mb4', 'utf8mb4_general_ci', 'Yes')) 62 | _charsets.add(Charset(46, 'utf8mb4', 'utf8mb4_bin', '')) 63 | _charsets.add(Charset(83, 'utf8', 'utf8_bin', '')) 64 | _charsets.add(Charset(192, 'utf8', 'utf8_unicode_ci', '')) 65 | _charsets.add(Charset(223, 'utf8', 'utf8_general_mysql500_ci', '')) 66 | _charsets.add(Charset(224, 'utf8mb4', 'utf8mb4_unicode_ci', '')) 67 | _charsets.add(Charset(246, 'utf8mb4', 'utf8mb4_unicode_520_ci', '')) 68 | 69 | 70 | charset_by_name = _charsets.by_name 71 | charset_by_id = _charsets.by_id 72 | 73 | 74 | def charset_to_encoding(name): 75 | """Convert MySQL's charset name to Python's codec name""" 76 | if name == 'utf8mb4': 77 | return 'utf8' 78 | return name 79 | -------------------------------------------------------------------------------- /upymysql/constants/CLIENT.py: -------------------------------------------------------------------------------- 1 | # https://dev.mysql.com/doc/internals/en/capability-flags.html#packet-Protocol::CapabilityFlags 2 | LONG_PASSWORD = 1 3 | FOUND_ROWS = 1 << 1 4 | LONG_FLAG = 1 << 2 5 | CONNECT_WITH_DB = 1 << 3 6 | NO_SCHEMA = 1 << 4 7 | COMPRESS = 1 << 5 8 | ODBC = 1 << 6 9 | LOCAL_FILES = 1 << 7 10 | IGNORE_SPACE = 1 << 8 11 | PROTOCOL_41 = 1 << 9 12 | INTERACTIVE = 1 << 10 13 | SSL = 1 << 11 14 | IGNORE_SIGPIPE = 1 << 12 15 | TRANSACTIONS = 1 << 13 16 | SECURE_CONNECTION = 1 << 15 17 | MULTI_STATEMENTS = 1 << 16 18 | MULTI_RESULTS = 1 << 17 19 | PS_MULTI_RESULTS = 1 << 18 20 | PLUGIN_AUTH = 1 << 19 21 | PLUGIN_AUTH_LENENC_CLIENT_DATA = 1 << 21 22 | CAPABILITIES = ( 23 | LONG_PASSWORD | LONG_FLAG | PROTOCOL_41 | TRANSACTIONS 24 | | SECURE_CONNECTION | MULTI_STATEMENTS | MULTI_RESULTS 25 | | PLUGIN_AUTH | PLUGIN_AUTH_LENENC_CLIENT_DATA) 26 | 27 | # Not done yet 28 | CONNECT_ATTRS = 1 << 20 29 | HANDLE_EXPIRED_PASSWORDS = 1 << 22 30 | SESSION_TRACK = 1 << 23 31 | DEPRECATE_EOF = 1 << 24 32 | -------------------------------------------------------------------------------- /upymysql/constants/COMMAND.py: -------------------------------------------------------------------------------- 1 | 2 | COM_SLEEP = 0x00 3 | COM_QUIT = 0x01 4 | COM_INIT_DB = 0x02 5 | COM_QUERY = 0x03 6 | COM_FIELD_LIST = 0x04 7 | COM_CREATE_DB = 0x05 8 | COM_DROP_DB = 0x06 9 | COM_REFRESH = 0x07 10 | COM_SHUTDOWN = 0x08 11 | COM_STATISTICS = 0x09 12 | COM_PROCESS_INFO = 0x0a 13 | COM_CONNECT = 0x0b 14 | COM_PROCESS_KILL = 0x0c 15 | COM_DEBUG = 0x0d 16 | COM_PING = 0x0e 17 | COM_TIME = 0x0f 18 | COM_DELAYED_INSERT = 0x10 19 | COM_CHANGE_USER = 0x11 20 | COM_BINLOG_DUMP = 0x12 21 | COM_TABLE_DUMP = 0x13 22 | COM_CONNECT_OUT = 0x14 23 | COM_REGISTER_SLAVE = 0x15 24 | COM_STMT_PREPARE = 0x16 25 | COM_STMT_EXECUTE = 0x17 26 | COM_STMT_SEND_LONG_DATA = 0x18 27 | COM_STMT_CLOSE = 0x19 28 | COM_STMT_RESET = 0x1a 29 | COM_SET_OPTION = 0x1b 30 | COM_STMT_FETCH = 0x1c 31 | COM_DAEMON = 0x1d 32 | COM_BINLOG_DUMP_GTID = 0x1e 33 | COM_END = 0x1f 34 | -------------------------------------------------------------------------------- /upymysql/constants/CR.py: -------------------------------------------------------------------------------- 1 | # flake8: noqa 2 | # errmsg.h 3 | CR_ERROR_FIRST = 2000 4 | CR_UNKNOWN_ERROR = 2000 5 | CR_SOCKET_CREATE_ERROR = 2001 6 | CR_CONNECTION_ERROR = 2002 7 | CR_CONN_HOST_ERROR = 2003 8 | CR_IPSOCK_ERROR = 2004 9 | CR_UNKNOWN_HOST = 2005 10 | CR_SERVER_GONE_ERROR = 2006 11 | CR_VERSION_ERROR = 2007 12 | CR_OUT_OF_MEMORY = 2008 13 | CR_WRONG_HOST_INFO = 2009 14 | CR_LOCALHOST_CONNECTION = 2010 15 | CR_TCP_CONNECTION = 2011 16 | CR_SERVER_HANDSHAKE_ERR = 2012 17 | CR_SERVER_LOST = 2013 18 | CR_COMMANDS_OUT_OF_SYNC = 2014 19 | CR_NAMEDPIPE_CONNECTION = 2015 20 | CR_NAMEDPIPEWAIT_ERROR = 2016 21 | CR_NAMEDPIPEOPEN_ERROR = 2017 22 | CR_NAMEDPIPESETSTATE_ERROR = 2018 23 | CR_CANT_READ_CHARSET = 2019 24 | CR_NET_PACKET_TOO_LARGE = 2020 25 | CR_EMBEDDED_CONNECTION = 2021 26 | CR_PROBE_SLAVE_STATUS = 2022 27 | CR_PROBE_SLAVE_HOSTS = 2023 28 | CR_PROBE_SLAVE_CONNECT = 2024 29 | CR_PROBE_MASTER_CONNECT = 2025 30 | CR_SSL_CONNECTION_ERROR = 2026 31 | CR_MALFORMED_PACKET = 2027 32 | CR_WRONG_LICENSE = 2028 33 | 34 | CR_NULL_POINTER = 2029 35 | CR_NO_PREPARE_STMT = 2030 36 | CR_PARAMS_NOT_BOUND = 2031 37 | CR_DATA_TRUNCATED = 2032 38 | CR_NO_PARAMETERS_EXISTS = 2033 39 | CR_INVALID_PARAMETER_NO = 2034 40 | CR_INVALID_BUFFER_USE = 2035 41 | CR_UNSUPPORTED_PARAM_TYPE = 2036 42 | 43 | CR_SHARED_MEMORY_CONNECTION = 2037 44 | CR_SHARED_MEMORY_CONNECT_REQUEST_ERROR = 2038 45 | CR_SHARED_MEMORY_CONNECT_ANSWER_ERROR = 2039 46 | CR_SHARED_MEMORY_CONNECT_FILE_MAP_ERROR = 2040 47 | CR_SHARED_MEMORY_CONNECT_MAP_ERROR = 2041 48 | CR_SHARED_MEMORY_FILE_MAP_ERROR = 2042 49 | CR_SHARED_MEMORY_MAP_ERROR = 2043 50 | CR_SHARED_MEMORY_EVENT_ERROR = 2044 51 | CR_SHARED_MEMORY_CONNECT_ABANDONED_ERROR = 2045 52 | CR_SHARED_MEMORY_CONNECT_SET_ERROR = 2046 53 | CR_CONN_UNKNOW_PROTOCOL = 2047 54 | CR_INVALID_CONN_HANDLE = 2048 55 | CR_SECURE_AUTH = 2049 56 | CR_FETCH_CANCELED = 2050 57 | CR_NO_DATA = 2051 58 | CR_NO_STMT_METADATA = 2052 59 | CR_NO_RESULT_SET = 2053 60 | CR_NOT_IMPLEMENTED = 2054 61 | CR_SERVER_LOST_EXTENDED = 2055 62 | CR_STMT_CLOSED = 2056 63 | CR_NEW_STMT_METADATA = 2057 64 | CR_ALREADY_CONNECTED = 2058 65 | CR_AUTH_PLUGIN_CANNOT_LOAD = 2059 66 | CR_DUPLICATE_CONNECTION_ATTR = 2060 67 | CR_AUTH_PLUGIN_ERR = 2061 68 | CR_ERROR_LAST = 2061 69 | -------------------------------------------------------------------------------- /upymysql/constants/FIELD_TYPE.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | DECIMAL = 0 4 | TINY = 1 5 | SHORT = 2 6 | LONG = 3 7 | FLOAT = 4 8 | DOUBLE = 5 9 | NILL = 6 10 | TIMESTAMP = 7 11 | LONGLONG = 8 12 | INT24 = 9 13 | DATE = 10 14 | TIME = 11 15 | DATETIME = 12 16 | YEAR = 13 17 | NEWDATE = 14 18 | VARCHAR = 15 19 | BIT = 16 20 | JSON = 245 21 | NEWDECIMAL = 246 22 | ENUM = 247 23 | SET = 248 24 | TINY_BLOB = 249 25 | MEDIUM_BLOB = 250 26 | LONG_BLOB = 251 27 | BLOB = 252 28 | VAR_STRING = 253 29 | STRING = 254 30 | GEOMETRY = 255 31 | 32 | CHAR = TINY 33 | INTERVAL = ENUM 34 | -------------------------------------------------------------------------------- /upymysql/constants/FLAG.py: -------------------------------------------------------------------------------- 1 | NOT_NULL = 1 2 | PRI_KEY = 2 3 | UNIQUE_KEY = 4 4 | MULTIPLE_KEY = 8 5 | BLOB = 16 6 | UNSIGNED = 32 7 | ZEROFILL = 64 8 | BINARY = 128 9 | ENUM = 256 10 | AUTO_INCREMENT = 512 11 | TIMESTAMP = 1024 12 | SET = 2048 13 | PART_KEY = 16384 14 | GROUP = 32767 15 | UNIQUE = 65536 16 | -------------------------------------------------------------------------------- /upymysql/constants/SERVER_STATUS.py: -------------------------------------------------------------------------------- 1 | 2 | SERVER_STATUS_IN_TRANS = 1 3 | SERVER_STATUS_AUTOCOMMIT = 2 4 | SERVER_MORE_RESULTS_EXISTS = 8 5 | SERVER_QUERY_NO_GOOD_INDEX_USED = 16 6 | SERVER_QUERY_NO_INDEX_USED = 32 7 | SERVER_STATUS_CURSOR_EXISTS = 64 8 | SERVER_STATUS_LAST_ROW_SENT = 128 9 | SERVER_STATUS_DB_DROPPED = 256 10 | SERVER_STATUS_NO_BACKSLASH_ESCAPES = 512 11 | SERVER_STATUS_METADATA_CHANGED = 1024 12 | -------------------------------------------------------------------------------- /upymysql/constants/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dvrhax/uPyMySQL/312f70475bb15abd41846fdd979dac140f13fd6d/upymysql/constants/__init__.py -------------------------------------------------------------------------------- /upymysql/converters.py: -------------------------------------------------------------------------------- 1 | #from ._compat import text_type, long_type, unichr 2 | #Directly declaring after removing _compat file 3 | text_type = str 4 | long_type = int 5 | unichr = chr 6 | 7 | 8 | #import datetime 9 | #from decimal import Decimal 10 | 11 | import utime as time 12 | 13 | try: 14 | import re 15 | except ImportError: 16 | import ure as re 17 | 18 | from .constants import FIELD_TYPE, FLAG 19 | from .charset import charset_by_id, charset_to_encoding 20 | 21 | def translate(s, table): 22 | out = '' 23 | for e in s: 24 | out+=table[ord(e)] 25 | return out 26 | 27 | def escape_item(val, charset, mapping=None): 28 | if mapping is None: 29 | mapping = encoders 30 | encoder = mapping.get(type(val)) 31 | 32 | # Fallback to default when no encoder found 33 | if not encoder: 34 | try: 35 | encoder = mapping[text_type] 36 | except KeyError: 37 | raise TypeError("no default type converter defined") 38 | 39 | if encoder in (escape_dict, escape_sequence): 40 | val = encoder(val, charset, mapping) 41 | else: 42 | val = encoder(val, mapping) 43 | return val 44 | 45 | def escape_dict(val, charset, mapping=None): 46 | n = {} 47 | for k, v in val.items(): 48 | quoted = escape_item(v, charset, mapping) 49 | n[k] = quoted 50 | return n 51 | 52 | def escape_sequence(val, charset, mapping=None): 53 | n = [] 54 | for item in val: 55 | quoted = escape_item(item, charset, mapping) 56 | n.append(quoted) 57 | return "(" + ",".join(n) + ")" 58 | 59 | def escape_set(val, charset, mapping=None): 60 | return ','.join([escape_item(x, charset, mapping) for x in val]) 61 | 62 | def escape_bool(value, mapping=None): 63 | return str(int(value)) 64 | 65 | def escape_object(value, mapping=None): 66 | return str(value) 67 | 68 | def escape_int(value, mapping=None): 69 | return str(value) 70 | 71 | def escape_float(value, mapping=None): 72 | return ('%.15g' % value) 73 | 74 | _escape_table = [unichr(x) for x in range(128)] 75 | _escape_table[0] = u'\\0' 76 | _escape_table[ord('\\')] = u'\\\\' 77 | _escape_table[ord('\n')] = u'\\n' 78 | _escape_table[ord('\r')] = u'\\r' 79 | _escape_table[ord('\032')] = u'\\Z' 80 | _escape_table[ord('"')] = u'\\"' 81 | _escape_table[ord("'")] = u"\\'" 82 | 83 | def _escape_unicode(value, mapping=None): 84 | """escapes *value* without adding quote. 85 | 86 | Value should be unicode 87 | """ 88 | return translate(value, _escape_table) 89 | 90 | escape_string = _escape_unicode 91 | 92 | # On Python ~3.5, str.decode('ascii', 'surrogateescape') is slow. 93 | # (fixed in Python 3.6, http://bugs.python.org/issue24870) 94 | # Workaround is str.decode('latin1') then translate 0x80-0xff into 0udc80-0udcff. 95 | # We can escape special chars and surrogateescape at once. 96 | _escape_bytes_table = _escape_table + [chr(i) for i in range(0xdc80, 0xdd00)] 97 | 98 | def escape_bytes(value, mapping=None): 99 | #return "_binary'%s'" % value.decode('latin1').translate(_escape_bytes_table) 100 | return "_binary'%s'" % translate(value.decode('latin1'), _escape_bytes_table) 101 | 102 | 103 | def escape_unicode(value, mapping=None): 104 | return u"'%s'" % _escape_unicode(value) 105 | 106 | def escape_str(value, mapping=None): 107 | return "'%s'" % escape_string(str(value), mapping) 108 | 109 | def escape_None(value, mapping=None): 110 | return 'NILL' 111 | 112 | def escape_timedelta(obj, mapping=None): 113 | seconds = int(obj.seconds) % 60 114 | minutes = int(obj.seconds // 60) % 60 115 | hours = int(obj.seconds // 3600) % 24 + int(obj.days) * 24 116 | if obj.microseconds: 117 | fmt = "'{0:02d}:{1:02d}:{2:02d}.{3:06d}'" 118 | else: 119 | fmt = "'{0:02d}:{1:02d}:{2:02d}'" 120 | return fmt.format(hours, minutes, seconds, obj.microseconds) 121 | 122 | def escape_time(obj, mapping=None): 123 | if obj.microsecond: 124 | fmt = "'{0.hour:02}:{0.minute:02}:{0.second:02}.{0.microsecond:06}'" 125 | else: 126 | fmt = "'{0.hour:02}:{0.minute:02}:{0.second:02}'" 127 | return fmt.format(obj) 128 | 129 | def escape_datetime(obj, mapping=None): 130 | if obj.microsecond: 131 | fmt = "'{0.year:04}-{0.month:02}-{0.day:02} {0.hour:02}:{0.minute:02}:{0.second:02}.{0.microsecond:06}'" 132 | else: 133 | fmt = "'{0.year:04}-{0.month:02}-{0.day:02} {0.hour:02}:{0.minute:02}:{0.second:02}'" 134 | return fmt.format(obj) 135 | 136 | def escape_date(obj, mapping=None): 137 | fmt = "'{0.year:04}-{0.month:02}-{0.day:02}'" 138 | return fmt.format(obj) 139 | 140 | #def escape_struct_time(obj, mapping=None): 141 | # return escape_datetime(datetime.datetime(*obj[:6])) 142 | 143 | def _convert_second_fraction(s): 144 | if not s: 145 | return 0 146 | # Pad zeros to ensure the fraction length in microseconds 147 | s = s.ljust(6, '0') 148 | return int(s[:6]) 149 | 150 | DATETIME_RE = re.compile(r"(\d{1,4})-(\d{1,2})-(\d{1,2})[T ](\d{1,2}):(\d{1,2}):(\d{1,2})(?:.(\d{1,6}))?") 151 | 152 | 153 | def convert_datetime(obj): 154 | """Returns a DATETIME or TIMESTAMP column value as a datetime object: 155 | 156 | >>> datetime_or_None('2007-02-25 23:06:20') 157 | datetime.datetime(2007, 2, 25, 23, 6, 20) 158 | >>> datetime_or_None('2007-02-25T23:06:20') 159 | datetime.datetime(2007, 2, 25, 23, 6, 20) 160 | 161 | Illegal values are returned as None: 162 | 163 | >>> datetime_or_None('2007-02-31T23:06:20') is None 164 | True 165 | >>> datetime_or_None('0000-00-00 00:00:00') is None 166 | True 167 | 168 | """ 169 | if isinstance(obj, (bytes, bytearray)): 170 | obj = obj.decode('ascii') 171 | 172 | m = DATETIME_RE.match(obj) 173 | if not m: 174 | return convert_date(obj) 175 | 176 | try: 177 | groups = list(m.groups()) 178 | groups[-1] = _convert_second_fraction(groups[-1]) 179 | return datetime.datetime(*[ int(x) for x in groups ]) 180 | except ValueError: 181 | return convert_date(obj) 182 | 183 | TIMEDELTA_RE = re.compile(r"(-)?(\d{1,3}):(\d{1,2}):(\d{1,2})(?:.(\d{1,6}))?") 184 | 185 | 186 | def convert_timedelta(obj): 187 | """Returns a TIME column as a timedelta object: 188 | 189 | >>> timedelta_or_None('25:06:17') 190 | datetime.timedelta(1, 3977) 191 | >>> timedelta_or_None('-25:06:17') 192 | datetime.timedelta(-2, 83177) 193 | 194 | Illegal values are returned as None: 195 | 196 | >>> timedelta_or_None('random crap') is None 197 | True 198 | 199 | Note that MySQL always returns TIME columns as (+|-)HH:MM:SS, but 200 | can accept values as (+|-)DD HH:MM:SS. The latter format will not 201 | be parsed correctly by this function. 202 | """ 203 | if isinstance(obj, (bytes, bytearray)): 204 | obj = obj.decode('ascii') 205 | 206 | m = TIMEDELTA_RE.match(obj) 207 | if not m: 208 | return None 209 | 210 | try: 211 | groups = list(m.groups()) 212 | groups[-1] = _convert_second_fraction(groups[-1]) 213 | negate = -1 if groups[0] else 1 214 | hours, minutes, seconds, microseconds = groups[1:] 215 | 216 | tdelta = datetime.timedelta( 217 | hours = int(hours), 218 | minutes = int(minutes), 219 | seconds = int(seconds), 220 | microseconds = int(microseconds) 221 | ) * negate 222 | return tdelta 223 | except ValueError: 224 | return None 225 | 226 | TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})(?:.(\d{1,6}))?") 227 | 228 | 229 | def convert_time(obj): 230 | """Returns a TIME column as a time object: 231 | 232 | >>> time_or_None('15:06:17') 233 | datetime.time(15, 6, 17) 234 | 235 | Illegal values are returned as None: 236 | 237 | >>> time_or_None('-25:06:17') is None 238 | True 239 | >>> time_or_None('random crap') is None 240 | True 241 | 242 | Note that MySQL always returns TIME columns as (+|-)HH:MM:SS, but 243 | can accept values as (+|-)DD HH:MM:SS. The latter format will not 244 | be parsed correctly by this function. 245 | 246 | Also note that MySQL's TIME column corresponds more closely to 247 | Python's timedelta and not time. However if you want TIME columns 248 | to be treated as time-of-day and not a time offset, then you can 249 | use set this function as the converter for FIELD_TYPE.TIME. 250 | """ 251 | if isinstance(obj, (bytes, bytearray)): 252 | obj = obj.decode('ascii') 253 | 254 | m = TIME_RE.match(obj) 255 | if not m: 256 | return None 257 | 258 | try: 259 | groups = list(m.groups()) 260 | groups[-1] = _convert_second_fraction(groups[-1]) 261 | hours, minutes, seconds, microseconds = groups 262 | return datetime.time(hour=int(hours), minute=int(minutes), 263 | second=int(seconds), microsecond=int(microseconds)) 264 | except ValueError: 265 | return None 266 | 267 | 268 | def convert_date(obj): 269 | """Returns a DATE column as a date object: 270 | 271 | >>> date_or_None('2007-02-26') 272 | datetime.date(2007, 2, 26) 273 | 274 | Illegal values are returned as None: 275 | 276 | >>> date_or_None('2007-02-31') is None 277 | True 278 | >>> date_or_None('0000-00-00') is None 279 | True 280 | 281 | """ 282 | if isinstance(obj, (bytes, bytearray)): 283 | obj = obj.decode('ascii') 284 | try: 285 | return datetime.date(*[ int(x) for x in obj.split('-', 2) ]) 286 | except ValueError: 287 | return None 288 | 289 | 290 | def convert_mysql_timestamp(timestamp): 291 | """Convert a MySQL TIMESTAMP to a Timestamp object. 292 | 293 | MySQL >= 4.1 returns TIMESTAMP in the same format as DATETIME: 294 | 295 | >>> mysql_timestamp_converter('2007-02-25 22:32:17') 296 | datetime.datetime(2007, 2, 25, 22, 32, 17) 297 | 298 | MySQL < 4.1 uses a big string of numbers: 299 | 300 | >>> mysql_timestamp_converter('20070225223217') 301 | datetime.datetime(2007, 2, 25, 22, 32, 17) 302 | 303 | Illegal values are returned as None: 304 | 305 | >>> mysql_timestamp_converter('2007-02-31 22:32:17') is None 306 | True 307 | >>> mysql_timestamp_converter('00000000000000') is None 308 | True 309 | 310 | """ 311 | if isinstance(timestamp, (bytes, bytearray)): 312 | timestamp = timestamp.decode('ascii') 313 | if timestamp[4] == '-': 314 | return convert_datetime(timestamp) 315 | timestamp += "0"*(14-len(timestamp)) # padding 316 | year, month, day, hour, minute, second = \ 317 | int(timestamp[:4]), int(timestamp[4:6]), int(timestamp[6:8]), \ 318 | int(timestamp[8:10]), int(timestamp[10:12]), int(timestamp[12:14]) 319 | try: 320 | return datetime.datetime(year, month, day, hour, minute, second) 321 | except ValueError: 322 | return None 323 | 324 | def convert_set(s): 325 | if isinstance(s, (bytes, bytearray)): 326 | return set(s.split(b",")) 327 | return set(s.split(",")) 328 | 329 | 330 | def through(x): 331 | return x 332 | 333 | 334 | #def convert_bit(b): 335 | # b = "\x00" * (8 - len(b)) + b # pad w/ zeroes 336 | # return struct.unpack(">Q", b)[0] 337 | # 338 | # the snippet above is right, but MySQLdb doesn't process bits, 339 | # so we shouldn't either 340 | convert_bit = through 341 | 342 | 343 | def convert_characters(connection, field, data): 344 | field_charset = charset_by_id(field.charsetnr).name 345 | encoding = charset_to_encoding(field_charset) 346 | if field.flags & FLAG.SET: 347 | return convert_set(data.decode(encoding)) 348 | if field.flags & FLAG.BINARY: 349 | return data 350 | 351 | if connection.use_unicode: 352 | data = data.decode(encoding) 353 | elif connection.charset != field_charset: 354 | data = data.decode(encoding) 355 | data = data.encode(connection.encoding) 356 | return data 357 | 358 | encoders = { 359 | bool: escape_bool, 360 | int: escape_int, 361 | long_type: escape_int, 362 | float: escape_float, 363 | str: escape_str, 364 | text_type: escape_unicode, 365 | tuple: escape_sequence, 366 | list: escape_sequence, 367 | set: escape_sequence, 368 | frozenset: escape_sequence, 369 | dict: escape_dict, 370 | bytearray: escape_bytes, 371 | type(None): escape_None, 372 | #datetime.date: escape_date, 373 | #datetime.datetime: escape_datetime, 374 | #datetime.timedelta: escape_timedelta, 375 | #datetime.time: escape_time, 376 | #time.struct_time: escape_struct_time, 377 | #Decimal: escape_object, 378 | } 379 | 380 | encoders[bytes] = escape_bytes 381 | 382 | decoders = { 383 | FIELD_TYPE.BIT: convert_bit, 384 | FIELD_TYPE.TINY: int, 385 | FIELD_TYPE.SHORT: int, 386 | FIELD_TYPE.LONG: int, 387 | FIELD_TYPE.FLOAT: float, 388 | FIELD_TYPE.DOUBLE: float, 389 | FIELD_TYPE.LONGLONG: int, 390 | FIELD_TYPE.INT24: int, 391 | FIELD_TYPE.YEAR: int, 392 | FIELD_TYPE.TIMESTAMP: convert_mysql_timestamp, 393 | FIELD_TYPE.DATETIME: convert_datetime, 394 | FIELD_TYPE.TIME: convert_timedelta, 395 | FIELD_TYPE.DATE: convert_date, 396 | FIELD_TYPE.SET: convert_set, 397 | FIELD_TYPE.BLOB: through, 398 | FIELD_TYPE.TINY_BLOB: through, 399 | FIELD_TYPE.MEDIUM_BLOB: through, 400 | FIELD_TYPE.LONG_BLOB: through, 401 | FIELD_TYPE.STRING: through, 402 | FIELD_TYPE.VAR_STRING: through, 403 | FIELD_TYPE.VARCHAR: through, 404 | #FIELD_TYPE.DECIMAL: Decimal, 405 | #FIELD_TYPE.NEWDECIMAL: Decimal, 406 | } 407 | 408 | 409 | # for MySQLdb compatibility 410 | conversions = encoders.copy() 411 | conversions.update(decoders) 412 | Thing2Literal = escape_str 413 | -------------------------------------------------------------------------------- /upymysql/cursors.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | #from __future__ import print_function, absolute_import 3 | #from functools import partial 4 | #import warnings 5 | 6 | try: 7 | import re 8 | except ImportError: 9 | import ure as re 10 | 11 | #from ._compat import range_type, text_type 12 | #Directly declaring after removing _compat file 13 | range_type = range 14 | text_type = str 15 | 16 | 17 | 18 | #: Regular expression for :meth:`Cursor.executemany`. 19 | #: executemany only suports simple bulk insert. 20 | #: You can use it to load large dataset. 21 | RE_INSERT_VALUES = re.compile( 22 | r"\s*((?:INSERT|REPLACE)\s.+\sVALUES?\s+)" + 23 | r"(\(\s*(?:%s|%\(.+\)s)\s*(?:,\s*(?:%s|%\(.+\)s)\s*)*\))" + 24 | r"(\s*(?:ON DUPLICATE.*)?);?\s*\Z")#, 25 | #re.IGNORECASE | re.DOTALL) 26 | 27 | 28 | class Cursor(object): 29 | """ 30 | This is the object you use to interact with the database. 31 | 32 | Do not create an instance of a Cursor yourself. Call 33 | connections.Connection.cursor(). 34 | """ 35 | 36 | #: Max statement size which :meth:`executemany` generates. 37 | #: 38 | #: Max size of allowed statement is max_allowed_packet - packet_header_size. 39 | #: Default value of max_allowed_packet is 1048576. 40 | max_stmt_length = 1024000 41 | 42 | _defer_warnings = False 43 | 44 | def __init__(self, connection): 45 | self.connection = connection 46 | self.description = None 47 | self.rownumber = 0 48 | self.rowcount = -1 49 | self.arraysize = 1 50 | self._executed = None 51 | self._result = None 52 | self._rows = None 53 | self._warnings_handled = False 54 | 55 | def close(self): 56 | """ 57 | Closing a cursor just exhausts all remaining data. 58 | """ 59 | conn = self.connection 60 | if conn is None: 61 | return 62 | try: 63 | while self.nextset(): 64 | pass 65 | finally: 66 | self.connection = None 67 | 68 | def __enter__(self): 69 | return self 70 | 71 | def __exit__(self, *exc_info): 72 | del exc_info 73 | self.close() 74 | 75 | def _get_db(self): 76 | if not self.connection: 77 | raise SyntaxError('err.ProgrammingError("Cursor closed")') 78 | return self.connection 79 | 80 | def _check_executed(self): 81 | if not self._executed: 82 | raise SyntaxError('err.ProgrammingError("execute() first")') 83 | 84 | def _conv_row(self, row): 85 | return row 86 | 87 | def setinputsizes(self, *args): 88 | """Does nothing, required by DB API.""" 89 | 90 | def setoutputsizes(self, *args): 91 | """Does nothing, required by DB API.""" 92 | 93 | def _nextset(self, unbuffered=False): 94 | """Get the next query set""" 95 | conn = self._get_db() 96 | current_result = self._result 97 | # for unbuffered queries warnings are only available once whole result has been read 98 | if unbuffered: 99 | self._show_warnings() 100 | if current_result is None or current_result is not conn._result: 101 | return None 102 | if not current_result.has_next: 103 | return None 104 | conn.next_result(unbuffered=unbuffered) 105 | self._do_get_result() 106 | return True 107 | 108 | def nextset(self): 109 | return self._nextset(False) 110 | 111 | def _ensure_bytes(self, x, encoding=None): 112 | if isinstance(x, text_type): 113 | x = x.encode(encoding) 114 | elif isinstance(x, (tuple, list)): 115 | x = type(x)(self._ensure_bytes(v, encoding=encoding) for v in x) 116 | return x 117 | 118 | def _escape_args(self, args, conn): 119 | #ensure_bytes = partial(self._ensure_bytes, encoding=conn.encoding) 120 | 121 | if isinstance(args, (tuple, list)): 122 | return tuple(conn.literal(arg) for arg in args) 123 | elif isinstance(args, dict): 124 | return dict((key, conn.literal(val)) for (key, val) in args.items()) 125 | else: 126 | # If it's not a dictionary let's try escaping it anyways. 127 | # Worst case it will throw a Value error 128 | return conn.escape(args) 129 | 130 | def mogrify(self, query, args=None): 131 | """ 132 | Returns the exact string that is sent to the database by calling the 133 | execute() method. 134 | 135 | This method follows the extension to the DB API 2.0 followed by Psycopg. 136 | """ 137 | conn = self._get_db() 138 | 139 | if args is not None: 140 | query = query % self._escape_args(args, conn) 141 | 142 | return query 143 | 144 | def execute(self, query, args=None): 145 | """Execute a query 146 | 147 | :param str query: Query to execute. 148 | 149 | :param args: parameters used with query. (optional) 150 | :type args: tuple, list or dict 151 | 152 | :return: Number of affected rows 153 | :rtype: int 154 | 155 | If args is a list or tuple, %s can be used as a placeholder in the query. 156 | If args is a dict, %(name)s can be used as a placeholder in the query. 157 | """ 158 | while self.nextset(): 159 | pass 160 | 161 | query = self.mogrify(query, args) 162 | 163 | result = self._query(query) 164 | self._executed = query 165 | return result 166 | 167 | def executemany(self, query, args): 168 | # type: (str, list) -> int 169 | """Run several data against one query 170 | 171 | :param query: query to execute on server 172 | :param args: Sequence of sequences or mappings. It is used as parameter. 173 | :return: Number of rows affected, if any. 174 | 175 | This method improves performance on multiple-row INSERT and 176 | REPLACE. Otherwise it is equivalent to looping over args with 177 | execute(). 178 | """ 179 | if not args: 180 | return 181 | 182 | m = RE_INSERT_VALUES.match(query) 183 | if m: 184 | q_prefix = m.group(1) % () 185 | q_values = m.group(2).rstrip() 186 | q_postfix = m.group(3) or '' 187 | assert q_values[0] == '(' and q_values[-1] == ')' 188 | return self._do_execute_many(q_prefix, q_values, q_postfix, args, 189 | self.max_stmt_length, 190 | self._get_db().encoding) 191 | 192 | self.rowcount = sum(self.execute(query, arg) for arg in args) 193 | return self.rowcount 194 | 195 | def _do_execute_many(self, prefix, values, postfix, args, max_stmt_length, encoding): 196 | conn = self._get_db() 197 | escape = self._escape_args 198 | if isinstance(prefix, text_type): 199 | prefix = prefix.encode(encoding) 200 | if isinstance(postfix, text_type): 201 | postfix = postfix.encode(encoding) 202 | sql = bytearray(prefix) 203 | args = iter(args) 204 | v = values % escape(next(args), conn) 205 | if isinstance(v, text_type): 206 | v = v.encode(encoding, 'surrogateescape') 207 | sql += v 208 | rows = 0 209 | for arg in args: 210 | v = values % escape(arg, conn) 211 | if isinstance(v, text_type): 212 | v = v.encode(encoding, 'surrogateescape') 213 | if len(sql) + len(v) + len(postfix) + 1 > max_stmt_length: 214 | rows += self.execute(sql + postfix) 215 | sql = bytearray(prefix) 216 | else: 217 | sql += b',' 218 | sql += v 219 | rows += self.execute(sql + postfix) 220 | self.rowcount = rows 221 | return rows 222 | 223 | def callproc(self, procname, args=()): 224 | """Execute stored procedure procname with args 225 | 226 | procname -- string, name of procedure to execute on server 227 | 228 | args -- Sequence of parameters to use with procedure 229 | 230 | Returns the original args. 231 | 232 | Compatibility warning: PEP-249 specifies that any modified 233 | parameters must be returned. This is currently impossible 234 | as they are only available by storing them in a server 235 | variable and then retrieved by a query. Since stored 236 | procedures return zero or more result sets, there is no 237 | reliable way to get at OUT or INOUT parameters via callproc. 238 | The server variables are named @_procname_n, where procname 239 | is the parameter above and n is the position of the parameter 240 | (from zero). Once all result sets generated by the procedure 241 | have been fetched, you can issue a SELECT @_procname_0, ... 242 | query using .execute() to get any OUT or INOUT values. 243 | 244 | Compatibility warning: The act of calling a stored procedure 245 | itself creates an empty result set. This appears after any 246 | result sets generated by the procedure. This is non-standard 247 | behavior with respect to the DB-API. Be sure to use nextset() 248 | to advance through all result sets; otherwise you may get 249 | disconnected. 250 | """ 251 | conn = self._get_db() 252 | for index, arg in enumerate(args): 253 | q = "SET @_%s_%d=%s" % (procname, index, conn.escape(arg)) 254 | self._query(q) 255 | self.nextset() 256 | 257 | q = "CALL %s(%s)" % (procname, 258 | ','.join(['@_%s_%d' % (procname, i) 259 | for i in range_type(len(args))])) 260 | self._query(q) 261 | self._executed = q 262 | return args 263 | 264 | def fetchone(self): 265 | """Fetch the next row""" 266 | self._check_executed() 267 | if self._rows is None or self.rownumber >= len(self._rows): 268 | return None 269 | result = self._rows[self.rownumber] 270 | self.rownumber += 1 271 | return result 272 | 273 | def fetchmany(self, size=None): 274 | """Fetch several rows""" 275 | self._check_executed() 276 | if self._rows is None: 277 | return () 278 | end = self.rownumber + (size or self.arraysize) 279 | result = self._rows[self.rownumber:end] 280 | self.rownumber = min(end, len(self._rows)) 281 | return result 282 | 283 | def fetchall(self): 284 | """Fetch all the rows""" 285 | self._check_executed() 286 | if self._rows is None: 287 | return () 288 | if self.rownumber: 289 | result = self._rows[self.rownumber:] 290 | else: 291 | result = self._rows 292 | self.rownumber = len(self._rows) 293 | return result 294 | 295 | def scroll(self, value, mode='relative'): 296 | self._check_executed() 297 | if mode == 'relative': 298 | r = self.rownumber + value 299 | elif mode == 'absolute': 300 | r = value 301 | else: 302 | raise SyntaxError('err.ProgrammingError("unknown scroll mode %s"' % mode) 303 | 304 | if not (0 <= r < len(self._rows)): 305 | raise IndexError("out of range") 306 | self.rownumber = r 307 | 308 | def _query(self, q): 309 | conn = self._get_db() 310 | self._last_executed = q 311 | conn.query(q) 312 | self._do_get_result() 313 | return self.rowcount 314 | 315 | def _do_get_result(self): 316 | conn = self._get_db() 317 | 318 | self.rownumber = 0 319 | self._result = result = conn._result 320 | 321 | self.rowcount = result.affected_rows 322 | self.description = result.description 323 | self.lastrowid = result.insert_id 324 | self._rows = result.rows 325 | self._warnings_handled = False 326 | 327 | if not self._defer_warnings: 328 | self._show_warnings() 329 | 330 | def _show_warnings(self): 331 | if self._warnings_handled: 332 | return 333 | self._warnings_handled = True 334 | if self._result and (self._result.has_next or not self._result.warning_count): 335 | return 336 | ws = self._get_db().show_warnings() 337 | if ws is None: 338 | return 339 | for w in ws: 340 | msg = w[-1] 341 | print('warnings disabled in uPyMySQL') 342 | #warnings.warn(err.Warning(*w[1:3]), stacklevel=4) 343 | 344 | def __iter__(self): 345 | return iter(self.fetchone, None) 346 | 347 | #Warning = err.Warning 348 | #Error = err.Error 349 | #InterfaceError = err.InterfaceError 350 | #DatabaseError = err.DatabaseError 351 | #DataError = err.DataError 352 | #OperationalError = err.OperationalError 353 | #IntegrityError = err.IntegrityError 354 | #InternalError = err.InternalError 355 | #ProgrammingError = err.ProgrammingError 356 | #NotSupportedError = err.NotSupportedError 357 | 358 | 359 | class DictCursor(Cursor): 360 | """A cursor which returns results as a dictionary""" 361 | dict_type = dict 362 | 363 | def _do_get_result(self): 364 | conn = self._get_db() 365 | 366 | self.rownumber = 0 367 | self._result = result = conn._result 368 | 369 | self.rowcount = result.affected_rows 370 | self.description = result.description 371 | self.lastrowid = result.insert_id 372 | self._rows = result.rows 373 | self._warnings_handled = False 374 | 375 | if not self._defer_warnings: 376 | self._show_warnings() 377 | 378 | fields = [] 379 | if self.description: 380 | for f in self._result.fields: 381 | name = f.name 382 | if name in fields: 383 | name = f.table_name + '.' + name 384 | fields.append(name) 385 | self._fields = fields 386 | 387 | if fields and self._rows: 388 | self._rows = [self._conv_row(r) for r in self._rows] 389 | 390 | def _conv_row(self, row): 391 | if row is None: 392 | return None 393 | return self.dict_type(zip(self._fields, row)) 394 | 395 | 396 | class SSCursor(Cursor): 397 | """ 398 | Unbuffered Cursor, mainly useful for queries that return a lot of data, 399 | or for connections to remote servers over a slow network. 400 | 401 | Instead of copying every row of data into a buffer, this will fetch 402 | rows as needed. The upside of this is the client uses much less memory, 403 | and rows are returned much faster when traveling over a slow network 404 | or if the result set is very big. 405 | 406 | There are limitations, though. The MySQL protocol doesn't support 407 | returning the total number of rows, so the only way to tell how many rows 408 | there are is to iterate over every row returned. Also, it currently isn't 409 | possible to scroll backwards, as only the current row is held in memory. 410 | """ 411 | 412 | _defer_warnings = True 413 | 414 | def _conv_row(self, row): 415 | return row 416 | 417 | def close(self): 418 | conn = self.connection 419 | if conn is None: 420 | return 421 | 422 | if self._result is not None and self._result is conn._result: 423 | self._result._finish_unbuffered_query() 424 | 425 | try: 426 | while self.nextset(): 427 | pass 428 | finally: 429 | self.connection = None 430 | 431 | def _query(self, q): 432 | conn = self._get_db() 433 | self._last_executed = q 434 | conn.query(q, unbuffered=True) 435 | self._do_get_result() 436 | return self.rowcount 437 | 438 | def nextset(self): 439 | return self._nextset(unbuffered=True) 440 | 441 | def read_next(self): 442 | """Read next row""" 443 | return self._conv_row(self._result._read_rowdata_packet_unbuffered()) 444 | 445 | def fetchone(self): 446 | """Fetch next row""" 447 | self._check_executed() 448 | row = self.read_next() 449 | if row is None: 450 | self._show_warnings() 451 | return None 452 | self.rownumber += 1 453 | return row 454 | 455 | def fetchall(self): 456 | """ 457 | Fetch all, as per MySQLdb. Pretty useless for large queries, as 458 | it is buffered. See fetchall_unbuffered(), if you want an unbuffered 459 | generator version of this method. 460 | """ 461 | return list(self.fetchall_unbuffered()) 462 | 463 | def fetchall_unbuffered(self): 464 | """ 465 | Fetch all, implemented as a generator, which isn't to standard, 466 | however, it doesn't make sense to return everything in a list, as that 467 | would use ridiculous memory for large result sets. 468 | """ 469 | return iter(self.fetchone, None) 470 | 471 | def __iter__(self): 472 | return self.fetchall_unbuffered() 473 | 474 | def fetchmany(self, size=None): 475 | """Fetch many""" 476 | self._check_executed() 477 | if size is None: 478 | size = self.arraysize 479 | 480 | rows = [] 481 | for i in range_type(size): 482 | row = self.read_next() 483 | if row is None: 484 | self._show_warnings() 485 | break 486 | rows.append(row) 487 | self.rownumber += 1 488 | return rows 489 | 490 | def scroll(self, value, mode='relative'): 491 | self._check_executed() 492 | 493 | if mode == 'relative': 494 | if value < 0: 495 | raise SyntaxError('err.NotSupportedError( \ 496 | "Backwards scrolling not supported by this cursor")') 497 | 498 | for _ in range_type(value): 499 | self.read_next() 500 | self.rownumber += value 501 | elif mode == 'absolute': 502 | if value < self.rownumber: 503 | raise SyntaxError('err.NotSupportedError( \ 504 | "Backwards scrolling not supported by this cursor")') 505 | 506 | end = value - self.rownumber 507 | for _ in range_type(end): 508 | self.read_next() 509 | self.rownumber = value 510 | else: 511 | raise SyntaxError('err.ProgrammingError("unknown scroll mode %s"' % mode) 512 | 513 | 514 | class SSDictCursor(SSCursor): 515 | """An unbuffered cursor, which returns results as a dictionary""" 516 | 517 | dict_type = dict 518 | 519 | def _do_get_result(self): 520 | fields = [] 521 | if self.description: 522 | for f in self._result.fields: 523 | name = f.name 524 | if name in fields: 525 | name = f.table_name + '.' + name 526 | fields.append(name) 527 | self._fields = fields 528 | 529 | if fields and self._rows: 530 | self._rows = [self._conv_row(r) for r in self._rows] 531 | 532 | def _conv_row(self, row): 533 | if row is None: 534 | return None 535 | return self.dict_type(zip(self._fields, row)) 536 | 537 | -------------------------------------------------------------------------------- /upymysql/optionfile.py: -------------------------------------------------------------------------------- 1 | import configparser 2 | 3 | 4 | class Parser(configparser.RawConfigParser): 5 | 6 | def __remove_quotes(self, value): 7 | quotes = ["'", "\""] 8 | for quote in quotes: 9 | if len(value) >= 2 and value[0] == value[-1] == quote: 10 | return value[1:-1] 11 | return value 12 | 13 | def get(self, section, option): 14 | value = configparser.RawConfigParser.get(self, section, option) 15 | return self.__remove_quotes(value) 16 | -------------------------------------------------------------------------------- /upymysql/times.py: -------------------------------------------------------------------------------- 1 | from utime import localtime 2 | #from datetime import date, datetime, time, timedelta 3 | 4 | 5 | Date = 'date' 6 | Time = 'time' 7 | TimeDelta = 'timedelta' 8 | Timestamp = 'datetime' 9 | 10 | 11 | def DateFromTicks(ticks): 12 | #return date(*localtime(ticks)[:3]) 13 | return localtime(ticks)[:3] 14 | 15 | 16 | def TimeFromTicks(ticks): 17 | #return time(*localtime(ticks)[3:6]) 18 | return localtime(ticks)[3:6] 19 | 20 | 21 | def TimestampFromTicks(ticks): 22 | #return datetime(*localtime(ticks)[:6]) 23 | return localtime(ticks)[:6] 24 | -------------------------------------------------------------------------------- /upymysql/util.py: -------------------------------------------------------------------------------- 1 | try: 2 | import struct 3 | except ImportError: 4 | import ustruct as struct 5 | 6 | 7 | def byte2int(b): 8 | if isinstance(b, int): 9 | return b 10 | else: 11 | return struct.unpack("!B", b)[0] 12 | 13 | 14 | def int2byte(i): 15 | return struct.pack("!B", i) 16 | 17 | 18 | def join_bytes(bs): 19 | if len(bs) == 0: 20 | return "" 21 | else: 22 | rv = bs[0] 23 | for b in bs[1:]: 24 | rv += b 25 | return rv 26 | --------------------------------------------------------------------------------