├── .gitattributes ├── .gitignore ├── LICENSE.txt ├── README.md ├── buildscripts ├── condarecipe │ ├── bld.bat │ ├── build.sh │ ├── meta.yaml │ └── run_test.py ├── fetch-dependencies └── jenkins-build ├── dbadapter ├── __init__.py ├── _version.py ├── db.py ├── examples │ └── README ├── lib │ ├── Converters.pyx │ ├── __init__.py │ ├── _stdint.h │ ├── converter_functions.c │ ├── converter_functions.h │ ├── errors.py │ ├── field_info.c │ ├── field_info.h │ ├── khash.h │ ├── kstring.c │ ├── kstring.h │ └── kvec.h ├── pyodbc │ ├── INSTALL.rst │ ├── MANIFEST.in │ ├── README.rst │ ├── __init__.py │ ├── bench │ │ ├── leak-detector.py │ │ ├── market-memprof.py │ │ └── market.py │ ├── odbc-sample-cfg │ │ ├── odbc.ini │ │ ├── odbcinst.ini │ │ └── tests-numpy.cfg │ ├── setup_bak.py │ ├── src │ │ ├── buffer.cpp │ │ ├── buffer.h │ │ ├── cnxninfo.cpp │ │ ├── cnxninfo.h │ │ ├── connection.cpp │ │ ├── connection.h │ │ ├── cursor.cpp │ │ ├── cursor.h │ │ ├── dbspecific.h │ │ ├── errors.cpp │ │ ├── errors.h │ │ ├── getdata.cpp │ │ ├── getdata.h │ │ ├── npcontainer.cpp │ │ ├── npcontainer.h │ │ ├── params.cpp │ │ ├── params.h │ │ ├── pyodbc.h │ │ ├── pyodbc.rc │ │ ├── pyodbccompat.cpp │ │ ├── pyodbccompat.h │ │ ├── pyodbcdbg.cpp │ │ ├── pyodbcmodule.cpp │ │ ├── pyodbcmodule.h │ │ ├── resource.h │ │ ├── row.cpp │ │ ├── row.h │ │ ├── sqlwchar.cpp │ │ ├── sqlwchar.h │ │ └── wrapper.h │ ├── tests │ │ ├── README.rst │ │ ├── runtests.py │ │ ├── test_big_fetch.py │ │ ├── test_issue_89.py │ │ ├── test_issue_90.py │ │ ├── test_money.py │ │ ├── test_set_text_limit.py │ │ ├── test_text.py │ │ ├── test_unicode.py │ │ └── unittest_support.py │ ├── tests2 │ │ ├── accesstests.py │ │ ├── dbapi20.py │ │ ├── dbapitests.py │ │ ├── empty.accdb │ │ ├── empty.mdb │ │ ├── exceltests.py │ │ ├── freetdstests.py │ │ ├── informixtests.py │ │ ├── mysqltests.py │ │ ├── pgtests.py │ │ ├── sqlite.db │ │ ├── sqlitetests.py │ │ ├── sqlservertests.py │ │ ├── test.xls │ │ ├── testbase.py │ │ └── testutils.py │ ├── tests3 │ │ ├── accesstests.py │ │ ├── dbapi20.py │ │ ├── dbapitests.py │ │ ├── exceltests.py │ │ ├── informixtests.py │ │ ├── mysqltests.py │ │ ├── pgtests.py │ │ ├── sqlitetests.py │ │ ├── sqlservertests.py │ │ ├── test.py │ │ ├── testbase.py │ │ └── testutils.py │ ├── tests_decimal │ │ └── test_decimal.py │ ├── tests_numpy │ │ ├── generic.py │ │ ├── odbc.cfg │ │ ├── test_crash.py │ │ └── testutils.py │ ├── tests_sqlcancel │ │ └── test_sqlcancel.py │ ├── utils │ │ └── pyodbcconf │ │ │ └── pyodbcconf.cpp │ └── web │ │ ├── docs.html │ │ ├── index.html │ │ ├── license.html │ │ ├── styles.css │ │ └── tutorial.html ├── pyodbc_setup.py ├── tests │ ├── Makefile │ ├── __init__.py │ ├── conftest.py │ ├── data │ │ └── benchmarks.py │ ├── generate.py │ └── test_ints.c └── vertica.py ├── docs ├── Makefile ├── conf.py ├── eula.rst ├── index.rst ├── install.rst ├── make.bat ├── pyodbc.rst ├── pyodbc_cancel.rst ├── pyodbc_enhancedcapabilities.rst ├── pyodbc_firststeps.rst ├── release-notes.rst └── textadapter_examples.rst ├── environment.yml ├── setup.py ├── setupegg.py └── versioneer.py /.gitattributes: -------------------------------------------------------------------------------- 1 | dbadapter/_version.py export-subst 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Editor temporary/working/backup files # 2 | ######################################### 3 | .#* 4 | [#]*# 5 | *~ 6 | *$ 7 | *.bak 8 | *.diff 9 | *.org 10 | .project 11 | *.rej 12 | .settings/ 13 | .*.sw[nop] 14 | .sw[nop] 15 | *.tmp 16 | 17 | # Compiled source # 18 | ################### 19 | *.a 20 | *.com 21 | *.class 22 | *.dll 23 | *.exe 24 | *.o 25 | *.py[ocd] 26 | *.so 27 | 28 | # Python files # 29 | ################ 30 | # setup.py working directory 31 | build 32 | # sphinx build directory 33 | _build 34 | # setup.py dist directory 35 | dist 36 | doc/build 37 | doc/cdoc/build 38 | # Egg metadata 39 | *.egg-info 40 | # The shelf plugin uses this dir 41 | ./.shelf 42 | 43 | # Patches # 44 | ########### 45 | *.patch 46 | *.diff 47 | 48 | # OS generated files # 49 | ###################### 50 | .DS_Store* 51 | .VolumeIcon.icns 52 | .fseventsd 53 | Icon? 54 | .gdb_history 55 | ehthumbs.db 56 | Thumbs.db 57 | 58 | # IPython generated files # 59 | ########################### 60 | .ipynb_checkpoints 61 | 62 | # Specific cython generated c files 63 | ###################### 64 | 65 | # Generated data files for /tests and /examples 66 | dbadapter/tests/data/fixedwidths 67 | dbadapter/tests/data/floats 68 | dbadapter/tests/data/ints 69 | dbadapter/tests/data/ints.gz 70 | dbadapter/tests/data/missingvalues 71 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2009-2016, Continuum Analytics, Inc. and contributors All 2 | rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are met: 6 | 7 | Redistributions of source code must retain the above copyright notice, this 8 | list of conditions and the following disclaimer. 9 | 10 | Redistributions in binary form must reproduce the above copyright notice, 11 | this list of conditions and the following disclaimer in the documentation 12 | and/or other materials provided with the distribution. 13 | 14 | Neither the name of Continuum Analytics nor the names of any contributors 15 | may be used to endorse or promote products derived from this software 16 | without specific prior written permission. 17 | 18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 19 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 20 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 21 | ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE 22 | LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 23 | CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 24 | SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 25 | INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 26 | CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 27 | ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF 28 | THE POSSIBILITY OF SUCH DAMAGE. 29 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DBAdapter 2 | Database adapters forked from IOPro 3 | 4 | DBAdapter is a Python module containing optimized data adapters for importing 5 | data from a variety of database sources into NumPy arrays and Pandas DataFrame. 6 | Database adapter leverages an optimized pyodbc module for accessing any 7 | relational database that supports the ODBC interface (SQL Server, PostgreSQL, 8 | MySQL, etc). 9 | 10 | Build Requirements 11 | ------------------ 12 | 13 | Building DBAdapter requires a number of dependencies. In addition to a C/C++ dev 14 | environment, the following modules are needed, which can be installed via conda: 15 | 16 | * NumPy 17 | * Pandas 18 | * unixodbc 2.3.4 (C lib, Linux only) 19 | 20 | Building Conda Package 21 | ---------------------- 22 | 23 | Note: If building under Windows, make sure the following commands are issued 24 | within the Visual Studio command prompt for version of Visual Studio that 25 | matches the version of Python you're building for. Python 2.6 and 2.7 needs 26 | Visual Studio 2008, Python 3.3 and 3.4 needs Visual Studio 2010, and Python 27 | 3.5 needs Visual Studio 2015. 28 | 29 | 1. Build DBAdapter using the following command: 30 | ``` 31 | conda build buildscripts/condarecipe --python 3.5 32 | ``` 33 | 34 | 1. DBAdapter can now be installed from the built conda package: 35 | ``` 36 | conda install dbadapter --use-local 37 | ``` 38 | 39 | Building By Hand 40 | ---------------- 41 | 42 | Note: If building under Windows, make sure the following commands are issued 43 | within the Visual Studio command prompt for version of Visual Studio that 44 | matches the version of Python you're building for. Python 2.6 and 2.7 needs 45 | Visual Studio 2008, Python 3.3 and 3.4 needs Visual Studio 2010, and Python 46 | 3.5 needs Visual Studio 2015. 47 | 48 | For building DBAdapter for local development/testing: 49 | 50 | 1. Install most of the above dependencies into environment called 'dbadapter': 51 | ``` 52 | conda env create -f environment.yml 53 | ``` 54 | 55 | Be sure to activate new dbadapter environment before proceeding. 56 | 57 | 1. Build DBAdapter using Cython/distutils: 58 | ``` 59 | python setup.py build_ext --inplace 60 | ``` 61 | 62 | Testing 63 | ------- 64 | 65 | Tests can be run by calling the dbadapter module's test function. By default 66 | only the TextAdapter tests will be run: 67 | 68 | TODO: The pyodbc tests live in the `dbadapter/pyodbc/test*` directories. They are not being run (yet). 69 | ```python 70 | python -Wignore -c 'import dbadapter; dbadapter.test()' 71 | ``` 72 | 73 | (Note: `numpy.testing` might produce a FurtureWarning that is not directly 74 | relevant to these unit tests). 75 | 76 | 77 | Related projects 78 | ---------------- 79 | 80 | - TextAdapter (CSV, JSON, etc): https://github.com/ContinuumIO/TextAdapter 81 | - PostgresAdapter (PostgreSQL): https://github.com/ContinuumIO/PostgresAdapter 82 | - AccumuloAdapter (Apache Accumulo): https://github.com/ContinuumIO/AccumuloAdapter 83 | - MongoAdapter (MongoDB): https://github.com/ContinuumIO/MongoAdapter 84 | 85 | -------------------------------------------------------------------------------- /buildscripts/condarecipe/bld.bat: -------------------------------------------------------------------------------- 1 | copy %LIBRARY_LIB%\boost_thread-vc90-mt-1_60.lib %LIBRARY_LIB%\libboost_thread-vc90-mt-1_60.lib 2 | %PYTHON% setup.py install 3 | if errorlevel 1 exit 1 4 | -------------------------------------------------------------------------------- /buildscripts/condarecipe/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ## Use unixodbc for building pyodbc on OS X systems 4 | ## without iodbc installed 5 | #export UNIXODBC_PATH=/Users/jayvius/unixODBC-2.3.4/ 6 | 7 | $PYTHON setup.py install 8 | -------------------------------------------------------------------------------- /buildscripts/condarecipe/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: dbadapter 3 | version: "2.0.0" 4 | 5 | source: 6 | path: ../../ 7 | 8 | build: 9 | number: 0 10 | 11 | requirements: 12 | build: 13 | - python 14 | - numpy 15 | - cython 16 | - unixodbc >=2.2.14 [linux] 17 | - postgresql 9.1.4 18 | 19 | run: 20 | - python 21 | - numpy 22 | - pandas 23 | - six 24 | - ordereddict [py26] 25 | - unixodbc >=2.2.14 [linux] 26 | - postgresql 9.1.4 27 | - psqlodbc >=09.01.0100 28 | - sqlite >=0.96 29 | 30 | # According to the pyodbc/INSTALL.rst document 31 | #- mysql-connector-python >=5.1.10 32 | 33 | test: 34 | requires: 35 | - nose 36 | - pytest 37 | 38 | imports: 39 | - dbadapter 40 | - dbadapter.pyodbc 41 | 42 | about: 43 | home: http://www.continuum.io/ 44 | license: BSD 45 | summary: python interface for SQL databases 46 | -------------------------------------------------------------------------------- /buildscripts/condarecipe/run_test.py: -------------------------------------------------------------------------------- 1 | import dbadapter 2 | 3 | print('TODO: write some tests asserting that DBAdapter functions properly') 4 | #assert dbadapter.test() 5 | 6 | print('dbadapter.__version__: %s' % dbadapter.__version__) 7 | -------------------------------------------------------------------------------- /buildscripts/fetch-dependencies: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | 4 | IOPRO_INCLUDE_PATH=$HOME/anaconda/envs/iopro-test-np1.7/include/python2.7 5 | 6 | WHERE="$HOME/dependencies/" 7 | 8 | function mk_depend_path() 9 | { 10 | echo $WHERE$1 11 | } 12 | 13 | if [ ! -d $(mk_depend_path '') ]; then 14 | mkdir $(mk_depend_path '') 15 | fi 16 | 17 | pushd $(mk_depend_path '') >/dev/null 18 | 19 | 20 | # get a unixODBC driver manager 21 | if [ ! -d unixODBC-2.3.1 ]; then 22 | if [ ! -f unixODBC-2.3.1.tar.gz ]; then 23 | wget ftp://ftp.unixodbc.org/pub/unixODBC/unixODBC-2.3.1.tar.gz 24 | fi 25 | 26 | tar -zxvf unixODBC-2.3.1.tar.gz 27 | pushd unixODBC-2.3.1 >/dev/null 28 | ./configure 29 | make 30 | popd >/dev/null 31 | fi 32 | 33 | if [ -f unixODBC-2.3.1.tar.gz ]; then 34 | # leave it clean 35 | rm unixODBC-2.3.1.tar.gz 36 | fi 37 | 38 | 39 | IOPRO_INCLUDE_PATH=$(mk_depend_path unixODBC-2.3.1):$IOPRO_INCLUDE_PATH 40 | IOPRO_INCLUDE_PATH=$(mk_depend_path unixODBC-2.3.1/include):$IOPRO_INCLUDE_PATH 41 | export IOPRO_INCLUDE_PATH 42 | 43 | echo 'IOPRO_INCLUDE_PATH=' $IOPRO_INCLUDE_PATH 44 | 45 | IOPRO_LIBRARY_PATH=$(mk_depend_path unixODBC-2.3.1):$IOPRO_LIBRARY_PATH 46 | export IOPRO_LIBRARY_PATH 47 | 48 | echo 'IOPRO_LIBRARY_PATH=' $IOPRO_LIBRARY_PATH 49 | 50 | LD_LIBRARY_PATH=$(mk_depend_path unixODBC-2.3.1/DriverManager/.libs):$LD_LIBRARY_PATH 51 | export LD_LIBRARY_PATH 52 | 53 | echo 'LD_LIBRARY_PATH=' $LD_LIBRARY_PATH 54 | 55 | popd >/dev/null 56 | 57 | printf '\n\nBuilding...\n' 58 | python setup.py build_ext --inplace --include-dirs=$IOPRO_INCLUDE_PATH --library-dirs=$IOPRO_LIBRARY_PATH || exit 1 59 | 60 | exit 61 | 62 | 63 | -------------------------------------------------------------------------------- /buildscripts/jenkins-build: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | PYTHON_VERSION=2.7 4 | 5 | if [ "${PYTHON_VERSION}" == "" ]; then 6 | echo You must select a Python version with the PYTHON_VERSION variable. 7 | exit 1 8 | fi 9 | 10 | # Start from scratch 11 | if [ -d build ]; then 12 | rm -rf build 13 | fi 14 | mkdir build 15 | cd build 16 | 17 | # Use conda to create a conda environment of the required 18 | # python version and containing the dependencies. 19 | export PYENV_PREFIX=${WORKSPACE}/build/pyenv 20 | rm -rf ${PYENV_PREFIX} 21 | ~/anaconda/bin/conda create --yes -p ${PYENV_PREFIX} anaconda python=${PYTHON_VERSION} numpy=1.7 || exit 1 22 | export PATH=${PYENV_PREFIX}/bin:${PATH} 23 | 24 | # JNB: Get rid of any iopro that conda may have installed 25 | rm -rf ${PYENV_PREFIX}/lib/python2.7/site-packages/iopro* 26 | 27 | 28 | # Get and build unix odbc lib 29 | if [ ! -f ${WORKSPACE}/unixODBC-2.3.1.tar.gz ]; then 30 | cd .. 31 | wget ftp://ftp.unixodbc.org/pub/unixODBC/unixODBC-2.3.1.tar.gz 32 | cd build 33 | fi 34 | 35 | tar -zxvf ../unixODBC-2.3.1.tar.gz 36 | cd unixODBC-2.3.1 37 | ./configure 38 | make 39 | cd .. 40 | 41 | # Set up include and lib paths since we're not installing in default system paths 42 | export IOPRO_INCLUDE_PATH=${WORKSPACE}/build/unixODBC-2.3.1:${WORKSPACE}/build/unixODBC-2.3.1/include:$IOPRO_INCLUDE_PATH 43 | export IOPRO_LIBRARY_PATH=${WORKSPACE}/build/unixODBC-2.3.1/DriverManager/.libs:$IOPRO_LIBRARY_PATH 44 | export IOPRO_INCLUDE_PATH=~/anaconda/include/python${PYTHON_VERSION}:$IOPRO_INCLUDE_PATH 45 | export IOPRO_LIBRARY_PATH=~/anaconda/lib:$IOPRO_LIBRARY_PATH 46 | 47 | export LD_LIBRARY_PATH=${WORKSPACE}/build/unixODBC-2.3.1/DriverManager/.libs:$LD_LIBRARY_PATH 48 | 49 | cd .. 50 | python setup.py build_ext --inplace --include-dirs=$IOPRO_INCLUDE_PATH --library-dirs=$IOPRO_LIBRARY_PATH || exit 1 51 | python -c 'import iopro; import sys; sys.exit(1 - iopro.test(num_records=1000))' 52 | -------------------------------------------------------------------------------- /dbadapter/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | DBAdapter 3 | ~~~~~ 4 | 5 | DBAdapter provides tools to interface SQL databases in a fast, memory-efficient way. 6 | """ 7 | from __future__ import absolute_import 8 | 9 | from dbadapter._version import get_versions 10 | __version__ = get_versions()['version'] 11 | del get_versions 12 | 13 | from dbadapter.lib.errors import (AdapterException, AdapterIndexError, 14 | ArgumentError, ConfigurationError, 15 | DataIndexError, DataTypeError, 16 | InternalInconsistencyError, NoSuchFieldError, 17 | ParserError, SourceError, SourceNotFoundError) 18 | 19 | 20 | def test(verbosity=1, num_records=100000, results=[]): 21 | #from textadapter.tests.test_DBAdapter import run as run_dbadapter_tests 22 | #result_text = run_dbadapter_tests(verbosity=verbosity, 23 | # num_records=num_records) 24 | #results.append(result_text) 25 | 26 | #from textadapter.tests.test_io import run as run_io_tests 27 | #result_text = run_io_tests(verbosity=verbosity) 28 | #results.append(result_text) 29 | 30 | for result in results: 31 | if not result.wasSuccessful(): 32 | return False 33 | return True 34 | 35 | # pyodbc module import triggers license message 36 | import dbadapter.pyodbc 37 | -------------------------------------------------------------------------------- /dbadapter/db.py: -------------------------------------------------------------------------------- 1 | from pyodbc import * 2 | -------------------------------------------------------------------------------- /dbadapter/examples/README: -------------------------------------------------------------------------------- 1 | To run examples, first generate example data using: 2 | 3 | cd ../tests 4 | python generate.py 500 # number of records 5 | -------------------------------------------------------------------------------- /dbadapter/lib/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ContinuumIO/DBAdapter/19a49b4963d50df27cc930cf27693f31fd8e0426/dbadapter/lib/__init__.py -------------------------------------------------------------------------------- /dbadapter/lib/_stdint.h: -------------------------------------------------------------------------------- 1 | #ifndef STDINT_H 2 | #define STDINT_H 3 | 4 | 5 | #if defined(_MSC_VER) && _MSC_VER < 1600 6 | /* Visual Studio before 2010 didn't have stdint.h */ 7 | #include 8 | typedef signed char int8_t; 9 | typedef short int16_t; 10 | typedef int int32_t; 11 | typedef __int64 int64_t; 12 | typedef unsigned char uint8_t; 13 | typedef unsigned short uint16_t; 14 | typedef unsigned int uint32_t; 15 | typedef unsigned __int64 uint64_t; 16 | #define INT8_MIN SCHAR_MIN 17 | #define INT8_MAX SCHAR_MAX 18 | #define INT16_MIN SHRT_MIN 19 | #define INT16_MAX SHRT_MAX 20 | #define INT32_MIN INT_MIN 21 | #define INT32_MAX INT_MAX 22 | #define UINT8_MAX UCHAR_MAX 23 | #define UINT16_MAX USHRT_MAX 24 | #define UINT32_MAX UINT_MAX 25 | #define UINT64_MAX _UI64_MAX 26 | #else 27 | #include 28 | #endif 29 | 30 | 31 | #endif 32 | -------------------------------------------------------------------------------- /dbadapter/lib/converter_functions.h: -------------------------------------------------------------------------------- 1 | #ifndef CONVERTERS_H 2 | #define CONVERTERS_H 3 | 4 | #if defined(_MSC_VER) && _MSC_VER < 1600 5 | /* Visual Studio before 2010 didn't have stdint.h */ 6 | typedef signed char int8_t; 7 | typedef short int16_t; 8 | typedef int int32_t; 9 | typedef __int64 int64_t; 10 | typedef unsigned char uint8_t; 11 | typedef unsigned short uint16_t; 12 | typedef unsigned int uint32_t; 13 | typedef unsigned __int64 uint64_t; 14 | #define INT8_MIN SCHAR_MIN 15 | #define INT8_MAX SCHAR_MAX 16 | #define INT16_MIN SHRT_MIN 17 | #define INT16_MAX SHRT_MAX 18 | #define INT32_MIN INT_MIN 19 | #define INT32_MAX INT_MAX 20 | #define UINT8_MAX UCHAR_MAX 21 | #define UINT16_MAX USHRT_MAX 22 | #define UINT32_MAX UINT_MAX 23 | #else 24 | #include 25 | #endif 26 | 27 | #include 28 | 29 | 30 | typedef enum 31 | { 32 | CONVERT_SUCCESS, 33 | CONVERT_SUCCESS_TYPE_CHANGED, 34 | CONVERT_ERROR, 35 | CONVERT_ERROR_OVERFLOW, 36 | CONVERT_ERROR_TRUNCATE, 37 | CONVERT_ERROR_INPUT_TYPE, 38 | CONVERT_ERROR_INPUT_SIZE, 39 | CONVERT_ERROR_OUTPUT_SIZE, 40 | CONVERT_ERROR_INPUT_STRING, 41 | CONVERT_ERROR_USER_CONVERTER, 42 | CONVERT_ERROR_OBJECT_CONVERTER, 43 | CONVERT_ERROR_NUMBA, 44 | CONVERT_ERROR_LAST 45 | } ConvertError; 46 | 47 | 48 | typedef enum 49 | { 50 | UINT_CONVERTER_FUNC, 51 | INT_CONVERTER_FUNC, 52 | FLOAT_CONVERTER_FUNC, 53 | STRING_CONVERTER_FUNC, 54 | STRING_OBJECT_CONVERTER_FUNC, 55 | NUM_CONVERTER_FUNCS 56 | } DefaultConverterFuncs; 57 | 58 | 59 | /* 60 | * converter function signature for functions that convert strings to a specific 61 | * data type and stores in output buffer 62 | * Inputs: 63 | * input: null terminated C string representing value to convert 64 | * input_len: length of input (redundant but input string originally was not 65 | * null terminated 66 | * input_type: indicates type of input (not used by every converter func) 67 | * output: pointer to memory block where output value should be stored 68 | * output_len: length of output reserved for output value 69 | * arg: optional arg value/struct specific to each converter func 70 | * Output: 71 | * error code defined above in ConvertError enum 72 | */ 73 | typedef ConvertError (*converter_func_ptr)(const char *input, 74 | uint32_t input_len, 75 | int input_type, 76 | void *output, 77 | uint32_t output_len, 78 | void *arg); 79 | 80 | /* 81 | * The following conversion functions follow conversion function signature 82 | * defined above 83 | */ 84 | 85 | /* Convert null terminated C string to signed int */ 86 | ConvertError str2int_converter(const char *input, uint32_t input_len, 87 | int input_type, void *output, uint32_t output_len, void *arg); 88 | /* Convert null terminated C string to unsigned int */ 89 | ConvertError str2uint_converter(const char *input, uint32_t input_len, 90 | int input_type, void *output, uint32_t output_len, void *arg); 91 | /* Convert null terminated C string to float/double */ 92 | ConvertError str2float_converter(const char *input, uint32_t input_len, 93 | int input_type, void *output, uint32_t output_len, void *arg); 94 | /* Copy null terminated C string to output of possibly different length */ 95 | ConvertError str2str_converter(void *input, uint32_t input_len, 96 | int input_type, void *output, uint32_t output_len, void *arg); 97 | /* Convert null terminated C string to complex number */ 98 | ConvertError str2complex_converter(void *input, uint32_t input_len, 99 | int input_type, void *output, uint32_t output_len, void *arg); 100 | 101 | 102 | /* 103 | * Extract signed int of various sizes from memory block and cast to 104 | * signed int64 if needed. Input integer size is specified by input_len argument. 105 | */ 106 | ConvertError get_int_value(void *input, uint32_t input_len, int64_t *value); 107 | 108 | /* 109 | * Extract unsigned int of various sizes from memory block and cast to 110 | * unsigned int64 if needed. Input integer size is specified by input_len argument. 111 | */ 112 | ConvertError get_uint_value(void *input, uint32_t input_len, uint64_t *value); 113 | 114 | /* 115 | * Extract double/float from from memory block and cast to 116 | * double if needed. Input floating point size is specified by input_len argument. 117 | */ 118 | ConvertError get_float_value(void *input, uint32_t input_len, double *value); 119 | 120 | /* 121 | * Save signed int64 value to memory block, casting to appropriate output integer 122 | * size if needed. Output integer size is specified by output_len arg. 123 | */ 124 | ConvertError put_int_value(void *output, uint32_t output_len, int64_t value); 125 | 126 | /* 127 | * Save unsigned int64 value to memory block, casting to appropriate output integer 128 | * size if needed. Output integer size is specified by output_len arg. 129 | */ 130 | ConvertError put_uint_value(void *output, uint32_t output_len, uint64_t value); 131 | 132 | /* 133 | * Save double/float value to memory block, casting to appropriate output floating 134 | * point size if needed. Output float size is specified by output_len arg. 135 | */ 136 | ConvertError put_float_value(void *output, uint32_t output_len, double value); 137 | 138 | #endif 139 | -------------------------------------------------------------------------------- /dbadapter/lib/errors.py: -------------------------------------------------------------------------------- 1 | class AdapterException(Exception): 2 | """Generic adapter exception for reporting reading, parsing, and 3 | converting issues. All adapter exceptions have following instance 4 | variables in common: 5 | 6 | * `record` - record reference where error occured 7 | * `field` - field reference where error occured 8 | """ 9 | def __init__(self, message=None): 10 | super(AdapterException, self).__init__(message) 11 | 12 | self.record = None 13 | self.field = None 14 | 15 | class SourceError(AdapterException): 16 | """Raised on error while reading or talking to a data source. It might be 17 | seek or read error for file sources or broken connection for database 18 | sources.""" 19 | pass 20 | 21 | class SourceNotFoundError(SourceError): 22 | """Raised when data source (file, table, ...) was not found.""" 23 | def __init__(self, message=None, source=None): 24 | super(SourceNotFoundError, self).__init__(message) 25 | self.source = source 26 | 27 | class ConfigurationError(AdapterException): 28 | """Raised when objects are mis-configured.""" 29 | pass 30 | 31 | class NoSuchFieldError(AdapterException): 32 | """Raised when non-existent field is referenced, either by name or position index.""" 33 | pass 34 | 35 | class DataIndexError(AdapterException): 36 | """Raised for example when a record is not found in record index in indexed 37 | data source.""" 38 | pass 39 | 40 | class DataTypeError(AdapterException): 41 | """Raised on data type mis-match or when type conversion fails.""" 42 | pass 43 | 44 | class ParserError(AdapterException): 45 | """Raised when there is problem with parsing source data, for example in 46 | broken text file with CSV. The `token` instance variable contains problematic 47 | token that was not parsed correctly.""" 48 | def __init__(self, message=None, token=None): 49 | super(ParserError, self).__init__(message) 50 | self.token = token 51 | 52 | class ArgumentError(AdapterException): 53 | """Invalid arguments used in calling dbadapter functions/methods""" 54 | pass 55 | 56 | class InternalInconsistencyError(AdapterException): 57 | """Raised when the library goes into a state that is not expected to 58 | happen.""" 59 | pass 60 | 61 | class AdapterIndexError(AdapterException): 62 | """ Raised when record number or slice is invalid """ 63 | pass 64 | 65 | -------------------------------------------------------------------------------- /dbadapter/lib/field_info.h: -------------------------------------------------------------------------------- 1 | #ifndef FIELD_INFO_H 2 | #define FIELD_INFO_H 3 | 4 | #include "converter_functions.h" 5 | 6 | 7 | typedef struct missing_values_t 8 | { 9 | char **missing_values; 10 | uint32_t *missing_value_lens; 11 | uint32_t num_missing_values; 12 | } MissingValues; 13 | 14 | 15 | typedef struct fill_value_t 16 | { 17 | void *fill_value; 18 | int loose; 19 | } FillValue; 20 | 21 | 22 | typedef struct field_info_t 23 | { 24 | char *name; 25 | 26 | /* converter function to convert data to target data type */ 27 | converter_func_ptr converter; 28 | void *converter_arg; 29 | 30 | MissingValues missing_values; 31 | 32 | FillValue fill_value; 33 | 34 | /* field width for fixed width data */ 35 | uint32_t input_field_width; 36 | 37 | /* field size in output array */ 38 | uint32_t output_field_size; 39 | 40 | /* flag allows user to fix the type. default, though, is to infer_type */ 41 | int infer_type; 42 | 43 | } FieldInfo; 44 | 45 | 46 | typedef struct field_list_t 47 | { 48 | uint32_t num_fields; 49 | FieldInfo *field_info; 50 | } FieldList; 51 | 52 | 53 | void clear_fields(FieldList *fields); 54 | void set_num_fields(FieldList *fields, uint32_t num_fields); 55 | 56 | void clear_missing_values(MissingValues *missing_values); 57 | void clear_fill_value(FillValue *fill_value); 58 | 59 | void init_missing_values(FieldList *fields, char *field_name, 60 | uint32_t field_num, uint32_t num_missing_values); 61 | 62 | void add_missing_value(FieldList *fields, char *field_name, 63 | uint32_t field_num, char *missing_value, uint32_t missing_value_len); 64 | 65 | void set_fill_value(FieldList *fields, char *field_name, 66 | uint32_t field_num, void *fill_value, uint32_t fill_value_len, int loose); 67 | 68 | uint32_t get_field_size(FieldList *fields, char *field_name, 69 | uint32_t field_num); 70 | uint32_t get_output_record_size(FieldList *fields); 71 | 72 | void set_field_width(FieldList *fields, uint32_t field, uint32_t width); 73 | 74 | /* Resets converter function pointers to null */ 75 | void reset_converters(FieldList *fields); 76 | 77 | /* Sets converter function for specified field with specified field size. 78 | * converter_arg will be passed to converter function when called. */ 79 | void set_converter(FieldList *fields, uint32_t field_num, char *field_name, 80 | uint32_t output_field_size, converter_func_ptr converter, 81 | void *converter_arg); 82 | 83 | /* Initialize the type of each of the fields to be inferred */ 84 | void init_infer_types(FieldList *fields); 85 | 86 | int infer_types(FieldList *fields); 87 | 88 | #endif 89 | -------------------------------------------------------------------------------- /dbadapter/lib/kvec.h: -------------------------------------------------------------------------------- 1 | /* The MIT License 2 | 3 | Copyright (c) 2008, by Attractive Chaos 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining 6 | a copy of this software and associated documentation files (the 7 | "Software"), to deal in the Software without restriction, including 8 | without limitation the rights to use, copy, modify, merge, publish, 9 | distribute, sublicense, and/or sell copies of the Software, and to 10 | permit persons to whom the Software is furnished to do so, subject to 11 | the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be 14 | included in all copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 17 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 18 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 19 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS 20 | BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN 21 | ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 22 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 23 | SOFTWARE. 24 | */ 25 | 26 | /* 27 | An example: 28 | 29 | #include "kvec.h" 30 | int main() { 31 | kvec_t(int) array; 32 | kv_init(array); 33 | kv_push(int, array, 10); // append 34 | kv_a(int, array, 20) = 5; // dynamic 35 | kv_A(array, 20) = 4; // static 36 | kv_destroy(array); 37 | return 0; 38 | } 39 | */ 40 | 41 | /* 42 | 2008-09-22 (0.1.0): 43 | 44 | * The initial version. 45 | 46 | */ 47 | 48 | #ifndef AC_KVEC_H 49 | #define AC_KVEC_H 50 | 51 | #include 52 | 53 | #define kv_roundup32(x) (--(x), (x)|=(x)>>1, (x)|=(x)>>2, (x)|=(x)>>4, (x)|=(x)>>8, (x)|=(x)>>16, ++(x)) 54 | 55 | #define kvec_t(type) struct { size_t n, m; type *a; } 56 | #define kv_init(v) ((v).n = (v).m = 0, (v).a = 0) 57 | #define kv_destroy(v) free((v).a) 58 | #define kv_A(v, i) ((v).a[(i)]) 59 | #define kv_pop(v) ((v).a[--(v).n]) 60 | #define kv_size(v) ((v).n) 61 | #define kv_max(v) ((v).m) 62 | 63 | #define kv_resize(type, v, s) ((v).m = (s), (v).a = (type*)realloc((v).a, sizeof(type) * (v).m)) 64 | 65 | #define kv_copy(type, v1, v0) do { \ 66 | if ((v1).m < (v0).n) kv_resize(type, v1, (v0).n); \ 67 | (v1).n = (v0).n; \ 68 | memcpy((v1).a, (v0).a, sizeof(type) * (v0).n); \ 69 | } while (0) \ 70 | 71 | #define kv_push(type, v, x) do { \ 72 | if ((v).n == (v).m) { \ 73 | (v).m = (v).m? (v).m<<1 : 2; \ 74 | (v).a = (type*)realloc((v).a, sizeof(type) * (v).m); \ 75 | } \ 76 | (v).a[(v).n++] = (x); \ 77 | } while (0) 78 | 79 | #define kv_pushp(type, v) (((v).n == (v).m)? \ 80 | ((v).m = ((v).m? (v).m<<1 : 2), \ 81 | (v).a = (type*)realloc((v).a, sizeof(type) * (v).m), 0) \ 82 | : 0), ((v).a + ((v).n++)) 83 | 84 | #define kv_a(type, v, i) ((v).m <= (size_t)(i)? \ 85 | ((v).m = (v).n = (i) + 1, kv_roundup32((v).m), \ 86 | (v).a = (type*)realloc((v).a, sizeof(type) * (v).m), 0) \ 87 | : (v).n <= (size_t)(i)? (v).n = (i) \ 88 | : 0), (v).a[(i)] 89 | 90 | #endif 91 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/INSTALL.rst: -------------------------------------------------------------------------------- 1 | Installing NumPy-aware pyodbc 2 | ============================= 3 | 4 | Compiling 5 | --------- 6 | 7 | For compiling this library, just use the standard Python procedure:: 8 | 9 | $ python setup.py build_ext -i 10 | 11 | You will need a C++ compiler installed. 12 | 13 | 14 | Running the test suite 15 | ---------------------- 16 | 17 | For testing the library, look into the original documentation in: 18 | 19 | :homepage: https://github.com/mkleehammer/pyodbc 20 | 21 | For testing the NumPy-aware methods specifically, please configure 22 | your ODBC system by installing the next packages:: 23 | 24 | - ODBC system: unixODBC >= 2.2.14 25 | - SQLite backend: SQLite ODBC >= 0.96 26 | - MySQL backend: mysql-connector-odbc >= 5.1.10 27 | - Postgres backend: psqlODBC >= 09.01.0100 28 | 29 | Then make sure that you have proper 'odbcinst.ini' and 'odbc.ini' 30 | files configured in '/etc'. Also, edit the 'tests_numpy/odbc.cfg' 31 | file and configure it to your needs. Here it is an example:: 32 | 33 | [sqlite] 34 | connection-string=DSN=odbcsqlite;Database=test-sqlite.db 35 | [mysql] 36 | connection-string=DSN=myodbc;Database=test 37 | [postgresql] 38 | connection-string=DSN=odbcpostgresql;Database=test 39 | 40 | On Windows, try: 41 | 42 | [sqlite] 43 | connection-string=DSN=SQLite3 Datasource;Database=test-sqlite.db 44 | [mysql] 45 | connection-string=DSN=MySQL55;Database=test 46 | 47 | You may want to have a look at the examples included in the 'samples/' 48 | directory. These offer configurations for SQLite, MySQL and 49 | PostgreSQL ODBC drivers, but you can add a new ODBC driver for any 50 | other database you want. The only restriction is that they must 51 | support the ODBC standard 3.0 or higher. 52 | 53 | To run the test suite for the different backends configured in 54 | 'tests_numpy/odbc.cfg', just do:: 55 | 56 | $ PYTHONPATH=. python tests_numpy/generic.py [your-backend] 57 | 58 | This will run the NumPy-aware test suite for the selected backend. In 59 | case you don't provide a backend, it defaults to `sqlite`. 60 | 61 | In case some test fails, please report this back. 62 | 63 | Installing 64 | ---------- 65 | 66 | After you run the test suite, you are ready to install. Just do it with:: 67 | 68 | $ [sudo] python setup.py install 69 | 70 | That's all folks! 71 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/MANIFEST.in: -------------------------------------------------------------------------------- 1 | include src/*.h 2 | include src/*.cpp 3 | include tests/* 4 | include README.rst 5 | include LICENSE.txt 6 | 7 | # Include this file, needed for bdist_rpm 8 | include MANIFEST.in 9 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/README.rst: -------------------------------------------------------------------------------- 1 | 2 | Overview 3 | ======== 4 | 5 | This project is an enhancement of the Python database module for ODBC 6 | that implements the Python DB API 2.0 specification. You can see the 7 | original project here: 8 | 9 | :homepage: http://code.google.com/p/pyodbc 10 | :source: http://github.com/mkleehammer/pyodbc 11 | :source: http://code.google.com/p/pyodbc/source/list 12 | 13 | The enhancements are documented in this file. For general info about 14 | the pyodbc package, please refer to the original project 15 | documentation. 16 | 17 | This module enhancement requires: 18 | 19 | * Python 2.4 or greater 20 | * ODBC 3.0 or greater 21 | * NumPy 1.5 or greater (1.7 is required for datetime64 support) 22 | 23 | The enhancements in this module consist mainly in the addition of some 24 | new methods for fetching the data after a query and put it in a 25 | variety of NumPy containers. 26 | 27 | Using NumPy as data containers instead of the classical list of tuples 28 | has a couple of advantages: 29 | 30 | 1) The NumPy container is much more compact, and hence, it 31 | requires much less memory, than the original approach. 32 | 33 | 2) As a NumPy container can hold arbitrarily large arrays, it requires 34 | much less object creation than the original approach (one Python 35 | object per datum retrieved). 36 | 37 | This means that this enhancements will allow to fetch data out of 38 | relational databases in a much faster way, while consuming 39 | significantly less resources. 40 | 41 | 42 | Installing 43 | ========== 44 | 45 | Please follow the instructions in 'INSTALL.rst'. 46 | 47 | 48 | API additions 49 | ============= 50 | 51 | Variables 52 | ~~~~~~~~~ 53 | 54 | * `pyodbc.npversion` The version for the NumPy additions 55 | 56 | Methods 57 | ~~~~~~~ 58 | 59 | Cursor.fetchdictarray(size=cursor.arraysize) 60 | -------------------------------------------- 61 | 62 | This is similar to the original `Cursor.fetchmany(size)`, but the data 63 | is returned in a dictionary where the keys are the names of the 64 | columns and the values are NumPy containers. 65 | 66 | For example, it a SELECT is returning 3 columns with names 'a', 'b' 67 | and 'c' and types `varchar(10)`, `integer` and `timestamp`, the 68 | returned object will be something similar to:: 69 | 70 | {'a': array([...], dtype='S11'), 71 | 'b': array([...], dtype=int32), 72 | 'c': array([...], dtype=datetime64[us])} 73 | 74 | Note that the `varchar(10)` type is translated automatically to a 75 | string type of 11 elements ('S11'). This is because the ODBC driver 76 | needs one additional space to put the trailing '\0' in strings, and 77 | NumPy needs to provide the room for this. 78 | 79 | Also, it is important to stress that all the `timestamp` types are 80 | translated into a NumPy `datetime64` type with a resolution of 81 | microseconds by default. 82 | 83 | Cursor.fetchsarray(size=cursor.arraysize) 84 | ----------------------------------------- 85 | 86 | This is similar to the original `Cursor.fetchmany(size)`, but the data 87 | is returned in a NumPy structured array, where the name and type of 88 | the fields matches to those resulting from the SELECT. 89 | 90 | Here it is an example of the output for the SELECT above:: 91 | 92 | array([(...), 93 | (...)], 94 | dtype=[('a', '|S11'), ('b', '", result 64 | 65 | 66 | if __name__ == "__main__": 67 | 68 | if len(sys.argv) > 1 and sys.argv[1] == "profile": 69 | profile = True 70 | 71 | # set up a connection 72 | connection = pyodbc.connect( 73 | #'DSN=myodbc3;UID=faltet;PWD=continuum;DATABASE=test') 74 | #'DSN=PSQL;UID=faltet;PWD=continuum;DATABASE=test') 75 | #'DSN=SQLite;DATABASE=market.sqlite') 76 | 'DSN=odbcsqlite;DATABASE=sqlite.db') 77 | #'DSN=SQLite;DATABASE=market-1k.sqlite') 78 | cursor = connection.cursor() 79 | try: 80 | cursor.execute("drop table t") 81 | except: 82 | pass 83 | 84 | check(cursor) 85 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/odbc-sample-cfg/odbc.ini: -------------------------------------------------------------------------------- 1 | ; 2 | ; odbc.ini configuration for MyODBC and MyODBC 3.51 Drivers 3 | ; 4 | 5 | [ODBC Data Sources] 6 | 7 | myodbc = MySQL ODBC 3.51 Driver DSN 8 | odbcsqlite = SQLite ODBC 3.51 Driver DSN 9 | odbcpostgresql = PostgreSQL ODBC ANSI Driver DSN 10 | 11 | [myodbc] 12 | Driver = MySQL 13 | Description = MySQL ODBC 3.51 Driver DSN 14 | Server = localhost 15 | Port = 16 | User = your-user 17 | Password = your-password 18 | Database = test 19 | Option = 3 20 | Socket = 21 | 22 | [odbcsqlite] 23 | Driver = SQLite 24 | Description = SQLite ODBC 3.51 Driver DSN 25 | # optional lock timeout in milliseconds 26 | Timeout=2000 27 | 28 | [odbcpostgresql] 29 | Description = PostgreSQL ODBC ANSI Driver DSN 30 | Driver = PostgreSQL ANSI 31 | Trace = No 32 | TraceFile = /tmp/psqlodbc.log 33 | Database = test 34 | Servername = localhost 35 | UserName = your-user 36 | Password = your-password 37 | Port = 38 | ReadOnly = Yes 39 | RowVersioning = No 40 | ShowSystemTables = No 41 | ShowOidColumn = No 42 | FakeOidIndex = No 43 | ConnSettings = 44 | 45 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/odbc-sample-cfg/odbcinst.ini: -------------------------------------------------------------------------------- 1 | [SQLite] 2 | Description = SQLite ODBC Driver 3 | Driver = libsqlite3odbc.so 4 | Threading = 2 5 | 6 | [MySQL] 7 | Description = MySQL driver 8 | Driver = libmyodbc.so 9 | 10 | [PostgreSQL ANSI] 11 | Description = PostgreSQL ODBC driver (ANSI version) 12 | Driver = psqlodbca.so 13 | Setup = libodbcpsqlS.so 14 | Debug = 0 15 | CommLog = 1 16 | UsageCount = 1 17 | 18 | [PostgreSQL Unicode] 19 | Description = PostgreSQL ODBC driver (Unicode version) 20 | Driver = psqlodbcw.so 21 | Setup = libodbcpsqlS.so 22 | Debug = 0 23 | CommLog = 1 24 | UsageCount = 1 25 | 26 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/odbc-sample-cfg/tests-numpy.cfg: -------------------------------------------------------------------------------- 1 | [sqlite] 2 | connection-string=DSN=odbcsqlite;Database=test-sqlite.db 3 | [mysql] 4 | connection-string=DSN=myodbc;Database=test 5 | [postgresql] 6 | connection-string=DSN=odbcpostgresql;Database=test 7 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/src/buffer.cpp: -------------------------------------------------------------------------------- 1 | 2 | // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated 3 | // documentation files (the "Software"), to deal in the Software without restriction, including without limitation the 4 | // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 5 | // permit persons to whom the Software is furnished to do so. 6 | // 7 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE 8 | // WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS 9 | // OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR 10 | // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 11 | 12 | #include "pyodbc.h" 13 | 14 | #if PY_MAJOR_VERSION < 3 15 | 16 | 17 | #include "buffer.h" 18 | #include "pyodbcmodule.h" 19 | 20 | Py_ssize_t 21 | PyBuffer_GetMemory(PyObject* buffer, const char** pp) 22 | { 23 | PyBufferProcs* procs = Py_TYPE(buffer)->tp_as_buffer; 24 | 25 | if (!procs || !PyType_HasFeature(Py_TYPE(buffer), Py_TPFLAGS_HAVE_GETCHARBUFFER)) 26 | { 27 | // Can't access the memory directly because the buffer object doesn't support it. 28 | return -1; 29 | } 30 | 31 | if (procs->bf_getsegcount(buffer, 0) != 1) 32 | { 33 | // Can't access the memory directly because there is more than one segment. 34 | return -1; 35 | } 36 | 37 | #if PY_VERSION_HEX >= 0x02050000 38 | char* pT = 0; 39 | #else 40 | const char* pT = 0; 41 | #endif 42 | Py_ssize_t cb = procs->bf_getcharbuffer(buffer, 0, &pT); 43 | 44 | if (pp) 45 | *pp = pT; 46 | 47 | return cb; 48 | } 49 | 50 | Py_ssize_t 51 | PyBuffer_Size(PyObject* self) 52 | { 53 | if (!PyBuffer_Check(self)) 54 | { 55 | PyErr_SetString(PyExc_TypeError, "Not a buffer!"); 56 | return 0; 57 | } 58 | 59 | Py_ssize_t total_len = 0; 60 | Py_TYPE(self)->tp_as_buffer->bf_getsegcount(self, &total_len); 61 | return total_len; 62 | } 63 | #endif 64 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/src/buffer.h: -------------------------------------------------------------------------------- 1 | 2 | // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated 3 | // documentation files (the "Software"), to deal in the Software without restriction, including without limitation the 4 | // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 5 | // permit persons to whom the Software is furnished to do so. 6 | // 7 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE 8 | // WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS 9 | // OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR 10 | // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 11 | 12 | #ifndef _BUFFER_H 13 | #define _BUFFER_H 14 | 15 | #if PY_MAJOR_VERSION < 3 16 | 17 | // If the buffer object has a single, accessible segment, returns the length of the buffer. If 'pp' is not NULL, the 18 | // address of the segment is also returned. If there is more than one segment or if it cannot be accessed, -1 is 19 | // returned and 'pp' is not modified. 20 | Py_ssize_t 21 | PyBuffer_GetMemory(PyObject* buffer, const char** pp); 22 | 23 | // Returns the size of a Python buffer. 24 | // 25 | // If an error occurs, zero is returned, but zero is a valid buffer size (I guess), so use PyErr_Occurred to determine 26 | // if it represents a failure. 27 | Py_ssize_t 28 | PyBuffer_Size(PyObject* self); 29 | 30 | 31 | class BufferSegmentIterator 32 | { 33 | PyObject* pBuffer; 34 | Py_ssize_t iSegment; 35 | Py_ssize_t cSegments; 36 | 37 | public: 38 | BufferSegmentIterator(PyObject* _pBuffer) 39 | { 40 | pBuffer = _pBuffer; 41 | PyBufferProcs* procs = Py_TYPE(pBuffer)->tp_as_buffer; 42 | iSegment = 0; 43 | cSegments = procs->bf_getsegcount(pBuffer, 0); 44 | } 45 | 46 | bool Next(byte*& pb, SQLLEN &cb) 47 | { 48 | if (iSegment >= cSegments) 49 | return false; 50 | 51 | PyBufferProcs* procs = Py_TYPE(pBuffer)->tp_as_buffer; 52 | cb = procs->bf_getreadbuffer(pBuffer, iSegment++, (void**)&pb); 53 | return true; 54 | } 55 | }; 56 | 57 | #endif // PY_MAJOR_VERSION 58 | 59 | 60 | #endif 61 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/src/cnxninfo.h: -------------------------------------------------------------------------------- 1 | 2 | // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated 3 | // documentation files (the "Software"), to deal in the Software without restriction, including without limitation the 4 | // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 5 | // permit persons to whom the Software is furnished to do so. 6 | // 7 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE 8 | // WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS 9 | // OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR 10 | // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 11 | 12 | #ifndef CNXNINFO_H 13 | #define CNXNINFO_H 14 | 15 | struct Connection; 16 | extern PyTypeObject CnxnInfoType; 17 | 18 | struct CnxnInfo 19 | { 20 | PyObject_HEAD 21 | 22 | // The description of these fields is in the connection structure. 23 | 24 | char odbc_major; 25 | char odbc_minor; 26 | 27 | bool supports_describeparam; 28 | int datetime_precision; 29 | 30 | // These are from SQLGetTypeInfo.column_size, so the char ones are in characters, not bytes. 31 | int varchar_maxlength; 32 | int wvarchar_maxlength; 33 | int binary_maxlength; 34 | }; 35 | 36 | void CnxnInfo_init(); 37 | 38 | // Looks-up or creates a CnxnInfo object for the given connection string. The connection string can be a Unicode or 39 | // String object. 40 | 41 | PyObject* GetConnectionInfo(PyObject* pConnectionString, Connection* cnxn); 42 | 43 | #endif // CNXNINFO_H 44 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/src/connection.h: -------------------------------------------------------------------------------- 1 | 2 | // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated 3 | // documentation files (the "Software"), to deal in the Software without restriction, including without limitation the 4 | // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 5 | // permit persons to whom the Software is furnished to do so. 6 | // 7 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE 8 | // WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS 9 | // OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR 10 | // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 11 | 12 | #ifndef CONNECTION_H 13 | #define CONNECTION_H 14 | 15 | struct Cursor; 16 | 17 | extern PyTypeObject ConnectionType; 18 | 19 | struct Connection 20 | { 21 | PyObject_HEAD 22 | 23 | // Set to SQL_NULL_HANDLE when the connection is closed. 24 | HDBC hdbc; 25 | 26 | // Will be SQL_AUTOCOMMIT_ON or SQL_AUTOCOMMIT_OFF. 27 | uintptr_t nAutoCommit; 28 | 29 | // The ODBC version the driver supports, from SQLGetInfo(DRIVER_ODBC_VER). This is set after connecting. 30 | char odbc_major; 31 | char odbc_minor; 32 | 33 | // The escape character from SQLGetInfo. This is not initialized until requested, so this may be zero! 34 | PyObject* searchescape; 35 | 36 | // Will be true if SQLDescribeParam is supported. If false, we'll have to guess but the user will not be able 37 | // to insert NULLs into binary columns. 38 | bool supports_describeparam; 39 | 40 | // The column size of datetime columns, obtained from SQLGetInfo(), used to determine the datetime precision. 41 | int datetime_precision; 42 | 43 | // If true, then the strings in the rows are returned as unicode objects. 44 | bool unicode_results; 45 | 46 | // The connection timeout in seconds. 47 | intptr_t timeout; 48 | 49 | // These are copied from cnxn info for performance and convenience. 50 | 51 | int varchar_maxlength; 52 | int wvarchar_maxlength; 53 | int binary_maxlength; 54 | 55 | // Output conversions. Maps from SQL type in conv_types to the converter function in conv_funcs. 56 | // 57 | // If conv_count is zero, conv_types and conv_funcs will also be zero. 58 | // 59 | // pyodbc uses this manual mapping for speed and portability. The STL collection classes use the new operator and 60 | // throw exceptions when out of memory. pyodbc does not use any exceptions. 61 | 62 | int conv_count; // how many items are in conv_types and conv_funcs. 63 | SQLSMALLINT* conv_types; // array of SQL_TYPEs to convert 64 | PyObject** conv_funcs; // array of Python functions 65 | }; 66 | 67 | #define Connection_Check(op) PyObject_TypeCheck(op, &ConnectionType) 68 | #define Connection_CheckExact(op) (Py_TYPE(op) == &ConnectionType) 69 | 70 | /* 71 | * Used by the module's connect function to create new connection objects. If unable to connect to the database, an 72 | * exception is set and zero is returned. 73 | */ 74 | PyObject* Connection_New(PyObject* pConnectString, bool fAutoCommit, bool fAnsi, bool fUnicodeResults, long timeout, bool fReadOnly); 75 | 76 | /* 77 | * Used by the Cursor to implement commit and rollback. 78 | */ 79 | PyObject* Connection_endtrans(Connection* cnxn, SQLSMALLINT type); 80 | 81 | #endif 82 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/src/cursor.h: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated 4 | * documentation files (the "Software"), to deal in the Software without restriction, including without limitation the 5 | * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 6 | * permit persons to whom the Software is furnished to do so. 7 | * 8 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE 9 | * WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS 10 | * OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR 11 | * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 12 | */ 13 | 14 | #ifndef CURSOR_H 15 | #define CURSOR_H 16 | 17 | enum 18 | { 19 | CURSOR_REQUIRE_CNXN = 0x00000001, 20 | CURSOR_REQUIRE_OPEN = 0x00000003, // includes _CNXN 21 | CURSOR_REQUIRE_RESULTS = 0x00000007, // includes _OPEN 22 | CURSOR_RAISE_ERROR = 0x00000010, 23 | }; 24 | 25 | 26 | struct Connection; 27 | 28 | struct ColumnInfo 29 | { 30 | SQLSMALLINT sql_type; 31 | 32 | // The column size from SQLDescribeCol. For character types, this is the maximum length, not including the NULL 33 | // terminator. For binary values, this is the maximum length. For numeric and decimal values, it is the defined 34 | // number of digits. For example, the precision of a column defined as NUMERIC(10,3) is 10. 35 | // 36 | // This value can be SQL_NO_TOTAL in which case the driver doesn't know the maximum length, such as for LONGVARCHAR 37 | // fields. 38 | SQLULEN column_size; 39 | 40 | // Tells us if an integer type is signed or unsigned. This is determined after a query using SQLColAttribute. All 41 | // of the integer types are the same size whether signed and unsigned, so we can allocate memory ahead of time 42 | // without knowing this. We use this during the fetch when converting to a Python integer or long. 43 | bool is_unsigned; 44 | }; 45 | 46 | struct ParamInfo 47 | { 48 | // The following correspond to the SQLBindParameter parameters. 49 | SQLSMALLINT ValueType; 50 | SQLSMALLINT ParameterType; 51 | SQLULEN ColumnSize; 52 | SQLSMALLINT DecimalDigits; 53 | 54 | // The value pointer that will be bound. If `alloc` is true, this was allocated with malloc and must be freed. 55 | // Otherwise it is zero or points into memory owned by the original Python parameter. 56 | SQLPOINTER ParameterValuePtr; 57 | 58 | SQLLEN BufferLength; 59 | SQLLEN StrLen_or_Ind; 60 | 61 | // If true, the memory in ParameterValuePtr was allocated via malloc and must be freed. 62 | bool allocated; 63 | 64 | // The python object containing the parameter value. A reference to this object should be held until we have 65 | // finished using memory owned by it. 66 | PyObject* pParam; 67 | 68 | // Optional data. If used, ParameterValuePtr will point into this. 69 | union 70 | { 71 | unsigned char ch; 72 | long l; 73 | INT64 i64; 74 | double dbl; 75 | TIMESTAMP_STRUCT timestamp; 76 | DATE_STRUCT date; 77 | TIME_STRUCT time; 78 | } Data; 79 | }; 80 | 81 | struct Cursor 82 | { 83 | PyObject_HEAD 84 | 85 | // The Connection object (which is a PyObject) that created this cursor. 86 | Connection* cnxn; 87 | 88 | // Set to SQL_NULL_HANDLE when the cursor is closed. 89 | HSTMT hstmt; 90 | 91 | // 92 | // SQL Parameters 93 | // 94 | 95 | // If non-zero, a pointer to the previously prepared SQL string, allowing us to skip the prepare and gathering of 96 | // parameter data. 97 | PyObject* pPreparedSQL; 98 | 99 | // The number of parameter markers in pPreparedSQL. This will be zero when pPreparedSQL is zero but is set 100 | // immediately after preparing the SQL. 101 | int paramcount; 102 | 103 | // If non-zero, a pointer to an array of SQL type values allocated via malloc. This is zero until we actually ask 104 | // for the type of parameter, which is only when a parameter is None (NULL). At that point, the entire array is 105 | // allocated (length == paramcount) but all entries are set to SQL_UNKNOWN_TYPE. 106 | SQLSMALLINT* paramtypes; 107 | 108 | // If non-zero, a pointer to a buffer containing the actual parameters bound. If pPreparedSQL is zero, this should 109 | // be freed using free and set to zero. 110 | // 111 | // Even if the same SQL statement is executed twice, the parameter bindings are redone from scratch since we try to 112 | // bind into the Python objects directly. 113 | ParamInfo* paramInfos; 114 | 115 | // 116 | // Result Information 117 | // 118 | 119 | // An array of ColumnInfos, allocated via malloc. This will be zero when closed or when there are no query 120 | // results. 121 | ColumnInfo* colinfos; 122 | 123 | // The description tuple described in the DB API 2.0 specification. Set to None when there are no results. 124 | PyObject* description; 125 | 126 | int arraysize; 127 | 128 | // The Cursor.rowcount attribute from the DB API specification. 129 | int rowcount; 130 | 131 | // A dictionary that maps from column name (PyString) to index into the result columns (PyInteger). This is 132 | // constructued during an execute and shared with each row (reference counted) to implement accessing results by 133 | // column name. 134 | // 135 | // This duplicates some ODBC functionality, but allows us to use Row objects after the statement is closed and 136 | // should use less memory than putting each column into the Row's __dict__. 137 | // 138 | // Since this is shared by Row objects, it cannot be reused. New dictionaries are created for every execute. This 139 | // will be zero whenever there are no results. 140 | PyObject* map_name_to_index; 141 | }; 142 | 143 | void Cursor_init(); 144 | 145 | Cursor* Cursor_New(Connection* cnxn); 146 | PyObject* Cursor_execute(PyObject* self, PyObject* args); 147 | Cursor* Cursor_Validate(PyObject* obj, DWORD flags); 148 | 149 | #endif 150 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/src/dbspecific.h: -------------------------------------------------------------------------------- 1 | 2 | #ifndef DBSPECIFIC_H 3 | #define DBSPECIFIC_H 4 | 5 | // Items specific to databases. 6 | // 7 | // Obviously we'd like to minimize this, but if they are needed this file isolates them. I'd like for there to be a 8 | // single build of pyodbc on each platform and not have a bunch of defines for supporting different databases. 9 | 10 | 11 | // --------------------------------------------------------------------------------------------------------------------- 12 | // SQL Server 13 | 14 | 15 | // SQL Server 2005 xml type 16 | 17 | #define SQL_SS_XML -152 18 | 19 | 20 | // SQL Server 2008 time type 21 | 22 | #define SQL_SS_TIME2 -154 23 | 24 | struct SQL_SS_TIME2_STRUCT 25 | { 26 | SQLUSMALLINT hour; 27 | SQLUSMALLINT minute; 28 | SQLUSMALLINT second; 29 | SQLUINTEGER fraction; 30 | }; 31 | 32 | #endif // DBSPECIFIC_H 33 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/src/errors.h: -------------------------------------------------------------------------------- 1 | 2 | #ifndef _ERRORS_H_ 3 | #define _ERRORS_H_ 4 | 5 | // Sets an exception based on the ODBC SQLSTATE and error message and returns zero. If either handle is not available, 6 | // pass SQL_NULL_HANDLE. 7 | // 8 | // szFunction 9 | // The name of the function that failed. Python generates a useful stack trace, but we often don't know where in the 10 | // C++ code we failed. 11 | // 12 | PyObject* RaiseErrorFromHandle(const char* szFunction, HDBC hdbc, HSTMT hstmt); 13 | 14 | // Sets an exception using a printf-like error message. 15 | // 16 | // szSqlState 17 | // The optional SQLSTATE reported by ODBC. If not provided (sqlstate is NULL or sqlstate[0] is NULL), "HY000" 18 | // (General Error) is used. Note that HY000 causes Error to be used if exc_class is not provided. 19 | // 20 | // exc_class 21 | // The optional exception class (DatabaseError, etc.) to construct. If NULL, the appropriate class will be 22 | // determined from the SQLSTATE. 23 | // 24 | PyObject* RaiseErrorV(const char* sqlstate, PyObject* exc_class, const char* format, ...); 25 | 26 | 27 | // Constructs an exception and returns it. 28 | // 29 | // This function is like RaiseErrorFromHandle, but gives you the ability to examine the error first (in particular, 30 | // used to examine the SQLSTATE using HasSqlState). If you want to use the error, call PyErr_SetObject(ex->ob_type, 31 | // ex). Otherwise, dispose of the error using Py_DECREF(ex). 32 | // 33 | // szFunction 34 | // The name of the function that failed. Python generates a useful stack trace, but we often don't know where in the 35 | // C++ code we failed. 36 | // 37 | PyObject* GetErrorFromHandle(const char* szFunction, HDBC hdbc, HSTMT hstmt); 38 | 39 | 40 | // Returns true if `ex` is a database exception with SQLSTATE `szSqlState`. Returns false otherwise. 41 | // 42 | // It is safe to call with ex set to zero. The SQLSTATE comparison is case-insensitive. 43 | // 44 | bool HasSqlState(PyObject* ex, const char* szSqlState); 45 | 46 | 47 | // Returns true if the HSTMT has a diagnostic record with the given SQLSTATE. This is used after SQLGetData call that 48 | // returned SQL_SUCCESS_WITH_INFO to see if it also has SQLSTATE 01004, indicating there is more data. 49 | // 50 | bool HasSqlState(HSTMT hstmt, const char* szSqlState); 51 | 52 | inline PyObject* RaiseErrorFromException(PyObject* pError) 53 | { 54 | // PyExceptionInstance_Class doesn't exist in 2.4 55 | #if PY_MAJOR_VERSION >= 3 56 | PyErr_SetObject((PyObject*)Py_TYPE(pError), pError); 57 | #else 58 | PyObject* cls = (PyObject*)((PyInstance_Check(pError) ? (PyObject*)((PyInstanceObject*)pError)->in_class : (PyObject*)(Py_TYPE(pError)))); 59 | PyErr_SetObject(cls, pError); 60 | #endif 61 | return 0; 62 | } 63 | 64 | #endif // _ERRORS_H_ 65 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/src/getdata.h: -------------------------------------------------------------------------------- 1 | 2 | #ifndef _GETDATA_H_ 3 | #define _GETDATA_H_ 4 | 5 | void GetData_init(); 6 | 7 | PyObject* GetData(Cursor* cur, Py_ssize_t iCol); 8 | 9 | /** 10 | * If this sql type has a user-defined conversion, the index into the connection's `conv_funcs` array is returned. 11 | * Otherwise -1 is returned. 12 | */ 13 | int GetUserConvIndex(Cursor* cur, SQLSMALLINT sql_type); 14 | 15 | #endif // _GETDATA_H_ 16 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/src/npcontainer.h: -------------------------------------------------------------------------------- 1 | #ifndef _NPCONTAINER_H_ 2 | #define _NPCONTAINER_H_ 3 | 4 | 5 | #if PY_VERSION_HEX >= 0x03000000 6 | int NpContainer_init(); 7 | #else 8 | void NpContainer_init(); 9 | #endif 10 | 11 | PyObject* Cursor_fetchsarray(PyObject* self, PyObject* args, PyObject *kwargs); 12 | 13 | PyObject* Cursor_fetchdictarray(PyObject* self, PyObject* args, PyObject *kwargs); 14 | 15 | 16 | extern Py_ssize_t iopro_text_limit; 17 | 18 | #endif // _NPCONTAINER_H_ 19 | 20 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/src/params.h: -------------------------------------------------------------------------------- 1 | 2 | #ifndef PARAMS_H 3 | #define PARAMS_H 4 | 5 | bool Params_init(); 6 | 7 | struct Cursor; 8 | 9 | bool PrepareAndBind(Cursor* cur, PyObject* pSql, PyObject* params, bool skip_first); 10 | void FreeParameterData(Cursor* cur); 11 | void FreeParameterInfo(Cursor* cur); 12 | 13 | #endif 14 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/src/pyodbc.h: -------------------------------------------------------------------------------- 1 | 2 | // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated 3 | // documentation files (the "Software"), to deal in the Software without restriction, including without limitation the 4 | // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 5 | // permit persons to whom the Software is furnished to do so. 6 | // 7 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE 8 | // WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS 9 | // OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR 10 | // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 11 | 12 | #ifndef PYODBC_H 13 | #define PYODBC_H 14 | 15 | 16 | // Python definitions ---------------------------------------------------- 17 | 18 | // first include Python.h to avoid warnings. 19 | #define PY_SSIZE_T_CLEAN 1 20 | 21 | #include 22 | #include 23 | #include 24 | #include 25 | #include 26 | #include 27 | 28 | #if PY_VERSION_HEX < 0x02050000 && !defined(PY_SSIZE_T_MIN) 29 | typedef int Py_ssize_t; 30 | #define PY_SSIZE_T_MAX INT_MAX 31 | #define PY_SSIZE_T_MIN INT_MIN 32 | #define PyInt_AsSsize_t PyInt_AsLong 33 | #define lenfunc inquiry 34 | #define ssizeargfunc intargfunc 35 | #define ssizeobjargproc intobjargproc 36 | #endif 37 | 38 | // System definitions ---------------------------------------------------- 39 | 40 | #ifdef _MSC_VER 41 | # define _CRT_SECURE_NO_WARNINGS 42 | # include 43 | # include 44 | typedef __int64 INT64; 45 | typedef unsigned __int64 UINT64; 46 | #else 47 | typedef unsigned char byte; 48 | typedef unsigned int UINT; 49 | typedef long long INT64; 50 | typedef unsigned long long UINT64; 51 | # define _strcmpi strcasecmp 52 | # ifdef __MINGW32__ 53 | # include 54 | # include 55 | # else 56 | inline int max(int lhs, int rhs) { return (rhs > lhs) ? rhs : lhs; } 57 | # endif 58 | #endif 59 | 60 | #ifdef __SUN__ 61 | # include 62 | #endif 63 | 64 | #if defined(_MSC_VER) 65 | #if _MSC_VER < 1600 66 | /* Visual Studio before 2010 didn't have stdint.h */ 67 | typedef signed char int8_t; 68 | typedef short int16_t; 69 | typedef int int32_t; 70 | typedef __int64 int64_t; 71 | typedef unsigned char uint8_t; 72 | typedef unsigned short uint16_t; 73 | typedef unsigned int uint32_t; 74 | typedef unsigned __int64 uint64_t; 75 | #else 76 | #include 77 | #endif 78 | #endif 79 | 80 | #if defined(__SUNPRO_CC) || defined(__SUNPRO_C) || (defined(__GNUC__) && !defined(__MINGW32__)) 81 | # include 82 | # include 83 | # define CDECL cdecl 84 | # define min(X,Y) ((X) < (Y) ? (X) : (Y)) 85 | # define max(X,Y) ((X) > (Y) ? (X) : (Y)) 86 | # define _alloca alloca 87 | inline void _strlwr(char* name) 88 | { 89 | while (*name) { *name = tolower(*name); name++; } 90 | } 91 | #else 92 | # define CDECL 93 | #endif 94 | 95 | 96 | // ODBC definitions ------------------------------------------------------ 97 | 98 | #include 99 | #include 100 | 101 | 102 | // Utility functions/definitions ---------------------------------------- 103 | 104 | #ifndef _countof 105 | #define _countof(a) (sizeof(a) / sizeof(a[0])) 106 | #endif 107 | 108 | inline bool IsSet(DWORD grf, DWORD flags) 109 | { 110 | return (grf & flags) == flags; 111 | } 112 | 113 | #ifdef UNUSED 114 | #undef UNUSED 115 | #endif 116 | inline void UNUSED(...) { } 117 | 118 | #define STRINGIFY(x) #x 119 | #define TOSTRING(x) STRINGIFY(x) 120 | 121 | 122 | // Debugging support ----------------------------------------------------- 123 | 124 | // Building an actual debug version of Python is so much of a pain that it never happens. I'm providing release-build 125 | // versions of assertions. 126 | 127 | extern bool pyodbc_tracing_enabled; 128 | extern bool pyodbc_alloc_guards; 129 | void pyodbc_assertion_failed(const char *file, int line, const char *expr); 130 | void pyodbc_trace_func(const char *file, int line, const char* fmt, ...); 131 | void *pyodbc_guarded_alloc(const char *file, int line, size_t size); 132 | void pyodbc_guarded_dealloc(const char *file, int line, void* ptr); 133 | void pyodbc_check_guards(const char* file, int line, void* ptr, const char *fmt, ...); 134 | 135 | #if defined(PYODBC_ASSERT) 136 | #define I(expr) if (!(expr)) pyodbc_assertion_failed(__FILE__, __LINE__, #expr); 137 | #define N(expr) if (expr) pyodbc_assertion_failed(__FILE__, __LINE__, #expr); 138 | #else 139 | #define I(expr) 140 | #define N(expr) 141 | #endif 142 | 143 | 144 | #define TRACE(...) \ 145 | if (pyodbc_tracing_enabled) \ 146 | pyodbc_trace_func(__FILE__, __LINE__, __VA_ARGS__) 147 | 148 | #define TRACE_NOLOC(...) \ 149 | if (pyodbc_tracing_enabled) \ 150 | pyodbc_trace_func(NULL, 0, __VA_ARGS__) 151 | 152 | #define GUARDED_ALLOC(...) \ 153 | ((!pyodbc_alloc_guards)? \ 154 | malloc(__VA_ARGS__) : \ 155 | pyodbc_guarded_alloc(__FILE__, __LINE__, __VA_ARGS__)) 156 | 157 | #define GUARDED_DEALLOC(...) \ 158 | do if (!pyodbc_alloc_guards) { \ 159 | free(__VA_ARGS__); \ 160 | } \ 161 | else { \ 162 | pyodbc_guarded_dealloc(__FILE__, __LINE__, __VA_ARGS__); \ 163 | } while(0) 164 | 165 | #define CHECK_ALLOC_GUARDS(...) \ 166 | if (pyodbc_alloc_guards) \ 167 | pyodbc_check_guards(__FILE__, __LINE__, __VA_ARGS__, "") 168 | 169 | #ifdef PYODBC_LEAK_CHECK 170 | #define pyodbc_malloc(len) _pyodbc_malloc(__FILE__, __LINE__, len) 171 | void* _pyodbc_malloc(const char* filename, int lineno, size_t len); 172 | void pyodbc_free(void* p); 173 | void pyodbc_leak_check(); 174 | #else 175 | #define pyodbc_malloc malloc 176 | #define pyodbc_free free 177 | #endif 178 | 179 | 180 | void PrintBytes(void* p, size_t len); 181 | 182 | 183 | // Python 3 compatibility definitions ------------------------------------ 184 | #include "pyodbccompat.h" 185 | 186 | 187 | #endif // pyodbc_h 188 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/src/pyodbc.rc: -------------------------------------------------------------------------------- 1 | 2 | // This was a Microsoft Visual C++ generated resource script, but I've hand modified it to 3 | // remove the afxres.h include. Apparently Visual Studio 2008 Express (used to build the 4 | // Python 2.6 version) does not include afxres.h which is part of MFC. This will probably 5 | // not be editable in the Visual Studio resource editor. 6 | 7 | #include 8 | #include "resource.h" 9 | 10 | #define STRINGIZER(version) #version 11 | #define PRODUCT_VERSION_STRING(major,minor) STRINGIZER(major) "." STRINGIZER(minor) 12 | #define FILE_VERSION_STRING(major,minor,micro,build) STRINGIZER(major) "." STRINGIZER(minor) "." STRINGIZER(micro) "." STRINGIZER(build) 13 | 14 | LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US 15 | #pragma code_page(1252) 16 | 17 | VS_VERSION_INFO VERSIONINFO 18 | FILEVERSION PYODBC_MAJOR,PYODBC_MINOR,PYODBC_MICRO,PYODBC_BUILD 19 | PRODUCTVERSION PYODBC_MAJOR,PYODBC_MINOR,PYODBC_MICRO,PYODBC_BUILD 20 | FILEFLAGSMASK 0x17L 21 | #ifdef _DEBUG 22 | FILEFLAGS 0x1L 23 | #else 24 | FILEFLAGS 0x0L 25 | #endif 26 | FILEOS 0x4L 27 | FILETYPE 0x2L 28 | FILESUBTYPE 0x0L 29 | BEGIN 30 | BLOCK "StringFileInfo" 31 | BEGIN 32 | BLOCK "040904b0" 33 | BEGIN 34 | VALUE "Copyright", "Copyright 2009 Michael Kleehammer" 35 | VALUE "ProductName", "ODBC DB API 2.0 Module" 36 | VALUE "ProductVersion", PRODUCT_VERSION_STRING(PYODBC_MAJOR,PYODBC_MINOR) 37 | VALUE "FileDescription", "ODBC DB API 2.0 Module" 38 | VALUE "FileVersion", FILE_VERSION_STRING(PYODBC_MAJOR,PYODBC_MINOR,PYODBC_MICRO,PYODBC_BUILD) 39 | VALUE "InternalName", "pyodbc" 40 | VALUE "OriginalFilename", "pyodbc.pyd" 41 | END 42 | END 43 | BLOCK "VarFileInfo" 44 | BEGIN 45 | VALUE "Translation", 0x409, 1200 46 | END 47 | END 48 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/src/pyodbccompat.cpp: -------------------------------------------------------------------------------- 1 | 2 | #include "pyodbc.h" 3 | 4 | bool Text_EqualsI(PyObject* lhs, const char* rhs) 5 | { 6 | #if PY_MAJOR_VERSION < 3 7 | // In Python 2, allow ANSI strings. 8 | if (lhs && PyString_Check(lhs)) 9 | return _strcmpi(PyString_AS_STRING(lhs), rhs) == 0; 10 | #endif 11 | 12 | if (lhs == 0 || !PyUnicode_Check(lhs)) 13 | return false; 14 | 15 | Py_ssize_t cchLHS = PyUnicode_GET_SIZE(lhs); 16 | Py_ssize_t cchRHS = (Py_ssize_t)strlen(rhs); 17 | if (cchLHS != cchRHS) 18 | return false; 19 | 20 | Py_UNICODE* p = PyUnicode_AS_UNICODE(lhs); 21 | for (Py_ssize_t i = 0; i < cchLHS; i++) 22 | { 23 | int chL = (int)Py_UNICODE_TOUPPER(p[i]); 24 | int chR = (int)toupper(rhs[i]); 25 | if (chL != chR) 26 | return false; 27 | } 28 | 29 | return true; 30 | } 31 | 32 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/src/pyodbccompat.h: -------------------------------------------------------------------------------- 1 | 2 | #ifndef PYODBCCOMPAT_H 3 | #define PYODBCCOMPAT_H 4 | 5 | // Macros and functions to ease compatibility with Python 2 and Python 3. 6 | 7 | #if PY_VERSION_HEX >= 0x03000000 && PY_VERSION_HEX < 0x03010000 8 | #error Python 3.0 is not supported. Please use 3.1 and higher. 9 | #endif 10 | 11 | // Macros introduced in 2.6, backported for 2.4 and 2.5. 12 | #ifndef PyVarObject_HEAD_INIT 13 | #define PyVarObject_HEAD_INIT(type, size) PyObject_HEAD_INIT(type) size, 14 | #endif 15 | #ifndef Py_TYPE 16 | #define Py_TYPE(ob) (((PyObject*)(ob))->ob_type) 17 | #endif 18 | 19 | // Macros were introduced in 2.6 to map "bytes" to "str" in Python 2. Back port to 2.5. 20 | #if PY_VERSION_HEX >= 0x02060000 21 | #include 22 | #else 23 | #define PyBytes_AS_STRING PyString_AS_STRING 24 | #define PyBytes_Check PyString_Check 25 | #define PyBytes_CheckExact PyString_CheckExact 26 | #define PyBytes_FromStringAndSize PyString_FromStringAndSize 27 | #define PyBytes_GET_SIZE PyString_GET_SIZE 28 | #define PyBytes_Size PyString_Size 29 | #define _PyBytes_Resize _PyString_Resize 30 | #endif 31 | 32 | // Used for items that are ANSI in Python 2 and Unicode in Python 3 or in int 2 and long in 3. 33 | 34 | #if PY_MAJOR_VERSION >= 3 35 | #define PyString_FromString PyUnicode_FromString 36 | #define PyString_FromStringAndSize PyUnicode_FromStringAndSize 37 | #define PyString_Check PyUnicode_Check 38 | #define PyString_Type PyUnicode_Type 39 | #define PyString_Size PyUnicode_Size 40 | #define PyInt_FromLong PyLong_FromLong 41 | #define PyInt_AsLong PyLong_AsLong 42 | #define PyInt_AS_LONG PyLong_AS_LONG 43 | #define PyInt_Type PyLong_Type 44 | #define PyString_FromFormatV PyUnicode_FromFormatV 45 | #define PyString_FromFormat PyUnicode_FromFormat 46 | #define Py_TPFLAGS_HAVE_ITER 0 47 | 48 | #define PyString_AsString PyUnicode_AsString 49 | 50 | #define TEXT_T Py_UNICODE 51 | 52 | #define PyString_Join PyUnicode_Join 53 | 54 | inline void PyString_ConcatAndDel(PyObject** lhs, PyObject* rhs) 55 | { 56 | PyUnicode_Concat(*lhs, rhs); 57 | Py_DECREF(rhs); 58 | } 59 | 60 | #else 61 | #include 62 | #include 63 | #include 64 | 65 | #define TEXT_T char 66 | 67 | #define PyString_Join _PyString_Join 68 | 69 | #endif 70 | 71 | inline PyObject* Text_New(Py_ssize_t length) 72 | { 73 | // Returns a new, uninitialized String (Python 2) or Unicode object (Python 3) object. 74 | #if PY_MAJOR_VERSION < 3 75 | return PyString_FromStringAndSize(0, length); 76 | #else 77 | return PyUnicode_FromUnicode(0, length); 78 | #endif 79 | } 80 | 81 | inline TEXT_T* Text_Buffer(PyObject* o) 82 | { 83 | #if PY_MAJOR_VERSION < 3 84 | I(PyString_Check(o)); 85 | return PyString_AS_STRING(o); 86 | #else 87 | I(PyUnicode_Check(o)); 88 | return PyUnicode_AS_UNICODE(o); 89 | #endif 90 | } 91 | 92 | 93 | inline bool Text_Check(PyObject* o) 94 | { 95 | // A compatibility function that determines if the object is a string, based on the version of Python. 96 | // For Python 2, an ASCII or Unicode string is allowed. For Python 3, it must be a Unicode object. 97 | #if PY_MAJOR_VERSION < 3 98 | if (o && PyString_Check(o)) 99 | return true; 100 | #endif 101 | return o && PyUnicode_Check(o); 102 | } 103 | 104 | bool Text_EqualsI(PyObject* lhs, const char* rhs); 105 | // Case-insensitive comparison for a Python string object (Unicode in Python 3, ASCII or Unicode in Python 2) against 106 | // an ASCII string. If lhs is 0 or None, false is returned. 107 | 108 | 109 | inline Py_ssize_t Text_Size(PyObject* o) 110 | { 111 | #if PY_MAJOR_VERSION < 3 112 | if (o && PyString_Check(o)) 113 | return PyString_GET_SIZE(o); 114 | #endif 115 | return (o && PyUnicode_Check(o)) ? PyUnicode_GET_SIZE(o) : 0; 116 | } 117 | 118 | inline Py_ssize_t TextCopyToUnicode(Py_UNICODE* buffer, PyObject* o) 119 | { 120 | // Copies a String or Unicode object to a Unicode buffer and returns the number of characters copied. 121 | // No NULL terminator is appended! 122 | 123 | #if PY_MAJOR_VERSION < 3 124 | if (PyBytes_Check(o)) 125 | { 126 | const Py_ssize_t cch = PyBytes_GET_SIZE(o); 127 | const char * pch = PyBytes_AS_STRING(o); 128 | for (Py_ssize_t i = 0; i < cch; i++) 129 | *buffer++ = (Py_UNICODE)*pch++; 130 | return cch; 131 | } 132 | else 133 | { 134 | #endif 135 | Py_ssize_t cch = PyUnicode_GET_SIZE(o); 136 | memcpy(buffer, PyUnicode_AS_UNICODE(o), cch * sizeof(Py_UNICODE)); 137 | return cch; 138 | #if PY_MAJOR_VERSION < 3 139 | } 140 | #endif 141 | } 142 | 143 | #endif // PYODBCCOMPAT_H 144 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/src/pyodbcmodule.h: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated 4 | * documentation files (the "Software"), to deal in the Software without restriction, including without limitation the 5 | * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 6 | * permit persons to whom the Software is furnished to do so. 7 | * 8 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE 9 | * WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS 10 | * OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR 11 | * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 12 | */ 13 | 14 | #ifndef _PYPGMODULE_H 15 | #define _PYPGMODULE_H 16 | 17 | extern PyObject* Error; 18 | extern PyObject* Warning; 19 | extern PyObject* InterfaceError; 20 | extern PyObject* DatabaseError; 21 | extern PyObject* InternalError; 22 | extern PyObject* OperationalError; 23 | extern PyObject* ProgrammingError; 24 | extern PyObject* IntegrityError; 25 | extern PyObject* DataError; 26 | extern PyObject* NotSupportedError; 27 | 28 | extern PyObject* null_binary; 29 | 30 | extern PyObject* decimal_type; 31 | 32 | inline bool PyDecimal_Check(PyObject* p) 33 | { 34 | return Py_TYPE(p) == (_typeobject*)decimal_type; 35 | } 36 | extern HENV henv; 37 | 38 | extern PyTypeObject RowType; 39 | extern PyTypeObject CursorType; 40 | extern PyTypeObject ConnectionType; 41 | 42 | // Thd pyodbc module. 43 | extern PyObject* pModule; 44 | 45 | inline bool lowercase() 46 | { 47 | return PyObject_GetAttrString(pModule, "lowercase") == Py_True; 48 | } 49 | 50 | extern Py_UNICODE chDecimal; 51 | 52 | #endif // _PYPGMODULE_H 53 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/src/resource.h: -------------------------------------------------------------------------------- 1 | //{{NO_DEPENDENCIES}} 2 | // Microsoft Visual C++ generated include file. 3 | // Used by pyodbc.rc 4 | 5 | // Next default values for new objects 6 | // 7 | #ifdef APSTUDIO_INVOKED 8 | #ifndef APSTUDIO_READONLY_SYMBOLS 9 | #define _APS_NEXT_RESOURCE_VALUE 101 10 | #define _APS_NEXT_COMMAND_VALUE 40001 11 | #define _APS_NEXT_CONTROL_VALUE 1001 12 | #define _APS_NEXT_SYMED_VALUE 101 13 | #endif 14 | #endif 15 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/src/row.h: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated 4 | * documentation files (the "Software"), to deal in the Software without restriction, including without limitation the 5 | * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 6 | * permit persons to whom the Software is furnished to do so. 7 | * 8 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE 9 | * WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS 10 | * OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR 11 | * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 12 | */ 13 | 14 | #ifndef ROW_H 15 | #define ROW_H 16 | 17 | struct Row; 18 | 19 | /* 20 | * Used to make a new row from an array of column values. 21 | */ 22 | Row* Row_New(PyObject* description, PyObject* map_name_to_index, Py_ssize_t cValues, PyObject** apValues); 23 | 24 | /* 25 | * Dereferences each object in apValues and frees apValue. This is the internal format used by rows. 26 | * 27 | * cValues: The number of items to free in apValues. 28 | * 29 | * apValues: The array of values. This can be NULL. 30 | */ 31 | void FreeRowValues(Py_ssize_t cValues, PyObject** apValues); 32 | 33 | extern PyTypeObject RowType; 34 | #define Row_Check(op) PyObject_TypeCheck(op, &RowType) 35 | #define Row_CheckExact(op) (Py_TYPE(op) == &RowType) 36 | 37 | #endif 38 | 39 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/src/sqlwchar.cpp: -------------------------------------------------------------------------------- 1 | 2 | #include "pyodbc.h" 3 | #include "sqlwchar.h" 4 | #include "wrapper.h" 5 | 6 | Py_ssize_t SQLWCHAR_SIZE = sizeof(SQLWCHAR); 7 | 8 | #ifdef HAVE_WCHAR_H 9 | static int WCHAR_T_SIZE = sizeof(wchar_t); 10 | #endif 11 | 12 | 13 | inline Py_UNICODE CalculateMaxSQL() 14 | { 15 | if (SQLWCHAR_SIZE >= Py_UNICODE_SIZE) 16 | return 0; 17 | 18 | Py_UNICODE m = 0; 19 | for (unsigned int i = 0; i < sizeof(SQLWCHAR); i++) 20 | { 21 | m <<= 8; 22 | m |= 0xFF; 23 | } 24 | return m; 25 | } 26 | 27 | 28 | // If SQLWCHAR is larger than Py_UNICODE, this is the largest value that can be held in a Py_UNICODE. Because it is 29 | // stored in a Py_UNICODE, it is undefined when sizeof(SQLWCHAR) <= sizeof(Py_UNICODE). 30 | static Py_UNICODE MAX_SQLWCHAR = CalculateMaxSQL(); 31 | 32 | // If SQLWCHAR is larger than Py_UNICODE, this is the largest value that can be held in a Py_UNICODE. Because it is 33 | // stored in a Py_UNICODE, it is undefined when sizeof(SQLWCHAR) <= sizeof(Py_UNICODE). 34 | static const SQLWCHAR MAX_PY_UNICODE = (SQLWCHAR)PyUnicode_GetMax(); 35 | 36 | static bool sqlwchar_copy(SQLWCHAR* pdest, const Py_UNICODE* psrc, Py_ssize_t len) 37 | { 38 | // Copies a Python Unicode string to a SQLWCHAR buffer. Note that this does copy the NULL terminator, but `len` 39 | // should not include it. That is, it copies (len + 1) characters. 40 | 41 | if (Py_UNICODE_SIZE == SQLWCHAR_SIZE) 42 | { 43 | memcpy(pdest, psrc, sizeof(SQLWCHAR) * (len + 1)); 44 | } 45 | else 46 | { 47 | if (SQLWCHAR_SIZE < Py_UNICODE_SIZE) 48 | { 49 | for (int i = 0; i < len; i++) 50 | { 51 | if ((Py_ssize_t)psrc[i] > MAX_SQLWCHAR) 52 | { 53 | PyErr_Format(PyExc_ValueError, "Cannot convert from Unicode %zd to SQLWCHAR. Value is too large.", (Py_ssize_t)psrc[i]); 54 | return false; 55 | } 56 | } 57 | } 58 | 59 | for (int i = 0; i <= len; i++) // ('<=' to include the NULL) 60 | pdest[i] = (SQLWCHAR)psrc[i]; 61 | } 62 | 63 | return true; 64 | } 65 | 66 | SQLWChar::SQLWChar(PyObject* o) 67 | { 68 | // Converts from a Python Unicode string. 69 | 70 | pch = 0; 71 | len = 0; 72 | owns_memory = false; 73 | 74 | Convert(o); 75 | } 76 | 77 | void SQLWChar::Free() 78 | { 79 | if (pch && owns_memory) 80 | pyodbc_free(pch); 81 | pch = 0; 82 | len = 0; 83 | owns_memory = false; 84 | } 85 | 86 | bool SQLWChar::Convert(PyObject* o) 87 | { 88 | Free(); 89 | 90 | if (!PyUnicode_Check(o)) 91 | { 92 | PyErr_SetString(PyExc_TypeError, "Unicode required"); 93 | return false; 94 | } 95 | 96 | Py_UNICODE* pU = (Py_UNICODE*)PyUnicode_AS_UNICODE(o); 97 | Py_ssize_t lenT = PyUnicode_GET_SIZE(o); 98 | 99 | if (SQLWCHAR_SIZE == Py_UNICODE_SIZE) 100 | { 101 | // The ideal case - SQLWCHAR and Py_UNICODE are the same, so we point into the Unicode object. 102 | 103 | pch = (SQLWCHAR*)pU; 104 | len = lenT; 105 | owns_memory = false; 106 | return true; 107 | } 108 | else 109 | { 110 | SQLWCHAR* pchT = (SQLWCHAR*)pyodbc_malloc(sizeof(SQLWCHAR) * (lenT + 1)); 111 | if (pchT == 0) 112 | { 113 | PyErr_NoMemory(); 114 | return false; 115 | } 116 | 117 | if (!sqlwchar_copy(pchT, pU, lenT)) 118 | { 119 | pyodbc_free(pchT); 120 | return false; 121 | } 122 | 123 | pch = pchT; 124 | len = lenT; 125 | owns_memory = true; 126 | return true; 127 | } 128 | } 129 | 130 | PyObject* PyUnicode_FromSQLWCHAR(const SQLWCHAR* sz, Py_ssize_t cch) 131 | { 132 | // Create a Python Unicode object from a zero-terminated SQLWCHAR. 133 | 134 | if (SQLWCHAR_SIZE == Py_UNICODE_SIZE) 135 | { 136 | // The ODBC Unicode and Python Unicode types are the same size. Cast the ODBC type to the Python type and use 137 | // a fast function. 138 | return PyUnicode_FromUnicode((const Py_UNICODE*)sz, cch); 139 | } 140 | 141 | #ifdef HAVE_WCHAR_H 142 | if (WCHAR_T_SIZE == SQLWCHAR_SIZE) 143 | { 144 | // The ODBC Unicode is the same as wchar_t. Python provides a function for that. 145 | return PyUnicode_FromWideChar((const wchar_t*)sz, cch); 146 | } 147 | #endif 148 | 149 | // There is no conversion, so we will copy it ourselves with a simple cast. 150 | 151 | if (Py_UNICODE_SIZE < SQLWCHAR_SIZE) 152 | { 153 | // We are casting from a larger size to a smaller one, so we'll make sure they all fit. 154 | 155 | for (Py_ssize_t i = 0; i < cch; i++) 156 | { 157 | if (((Py_ssize_t)sz[i]) > MAX_PY_UNICODE) 158 | { 159 | PyErr_Format(PyExc_ValueError, "Cannot convert from SQLWCHAR %zd to Unicode. Value is too large.", (Py_ssize_t)sz[i]); 160 | return 0; 161 | } 162 | } 163 | 164 | } 165 | 166 | Object result(PyUnicode_FromUnicode(0, cch)); 167 | if (!result) 168 | return 0; 169 | 170 | Py_UNICODE* pch = PyUnicode_AS_UNICODE(result.Get()); 171 | for (Py_ssize_t i = 0; i < cch; i++) 172 | pch[i] = (Py_UNICODE)sz[i]; 173 | 174 | return result.Detach(); 175 | } 176 | 177 | void SQLWChar::dump() 178 | { 179 | printf("sqlwchar=%ld pch=%p len=%ld owns=%d\n", sizeof(SQLWCHAR), pch, len, (int)owns_memory); 180 | if (pch && len) 181 | { 182 | Py_ssize_t i = 0; 183 | while (i < len) 184 | { 185 | Py_ssize_t stop = min(i + 10, len); 186 | 187 | for (Py_ssize_t x = i; x < stop; x++) 188 | { 189 | for (int byteindex = (int)sizeof(SQLWCHAR)-1; byteindex >= 0; byteindex--) 190 | { 191 | int byte = (pch[x] >> (byteindex * 8)) & 0xFF; 192 | printf("%02x", byte); 193 | } 194 | printf(" "); 195 | } 196 | 197 | for (Py_ssize_t x = i; x < stop; x++) 198 | printf("%c", (char)pch[x]); 199 | 200 | printf("\n"); 201 | 202 | i += 10; 203 | } 204 | 205 | printf("\n\n"); 206 | } 207 | } 208 | 209 | 210 | SQLWCHAR* SQLWCHAR_FromUnicode(const Py_UNICODE* pch, Py_ssize_t len) 211 | { 212 | SQLWCHAR* p = (SQLWCHAR*)pyodbc_malloc(sizeof(SQLWCHAR) * (len+1)); 213 | if (p != 0) 214 | { 215 | if (!sqlwchar_copy(p, pch, len)) 216 | { 217 | pyodbc_free(p); 218 | p = 0; 219 | } 220 | } 221 | return p; 222 | } 223 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/src/sqlwchar.h: -------------------------------------------------------------------------------- 1 | 2 | #ifndef _PYODBCSQLWCHAR_H 3 | #define _PYODBCSQLWCHAR_H 4 | 5 | class SQLWChar 6 | { 7 | // An object designed to convert strings and Unicode objects to SQLWCHAR, hold the temporary buffer, and delete it 8 | // in the destructor. 9 | 10 | private: 11 | SQLWCHAR* pch; 12 | Py_ssize_t len; 13 | bool owns_memory; 14 | 15 | public: 16 | SQLWChar() 17 | { 18 | pch = 0; 19 | len = 0; 20 | owns_memory = false; 21 | } 22 | 23 | SQLWChar(PyObject* o); 24 | 25 | bool Convert(PyObject* o); 26 | 27 | void Free(); 28 | 29 | ~SQLWChar() 30 | { 31 | Free(); 32 | } 33 | 34 | void dump(); 35 | 36 | operator SQLWCHAR*() { return pch; } 37 | operator const SQLWCHAR*() const { return pch; } 38 | operator bool() const { return pch != 0; } 39 | Py_ssize_t size() const { return len; } 40 | 41 | SQLWCHAR* operator[] (Py_ssize_t i) 42 | { 43 | I(i <= len); // we'll allow access to the NULL? 44 | return &pch[i]; 45 | } 46 | 47 | const SQLWCHAR* operator[] (Py_ssize_t i) const 48 | { 49 | I(i <= len); // we'll allow access to the NULL? 50 | return &pch[i]; 51 | } 52 | }; 53 | 54 | // The size of a SQLWCHAR. 55 | extern Py_ssize_t SQLWCHAR_SIZE; 56 | 57 | // Allocate a new Unicode object, initialized from the given SQLWCHAR string. 58 | PyObject* PyUnicode_FromSQLWCHAR(const SQLWCHAR* sz, Py_ssize_t cch); 59 | 60 | SQLWCHAR* SQLWCHAR_FromUnicode(const Py_UNICODE* pch, Py_ssize_t len); 61 | 62 | #endif // _PYODBCSQLWCHAR_H 63 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/src/wrapper.h: -------------------------------------------------------------------------------- 1 | 2 | #ifndef _WRAPPER_H_ 3 | #define _WRAPPER_H_ 4 | 5 | class Object 6 | { 7 | private: 8 | PyObject* p; 9 | 10 | // GCC freaks out if these are private, but it doesn't use them (?) 11 | // Object(const Object& illegal); 12 | // void operator=(const Object& illegal); 13 | 14 | public: 15 | Object(PyObject* _p = 0) 16 | { 17 | p = _p; 18 | } 19 | 20 | ~Object() 21 | { 22 | Py_XDECREF(p); 23 | } 24 | 25 | Object& operator=(PyObject* pNew) 26 | { 27 | Py_XDECREF(p); 28 | p = pNew; 29 | return *this; 30 | } 31 | 32 | bool IsValid() const { return p != 0; } 33 | 34 | void Attach(PyObject* _p) 35 | { 36 | Py_XDECREF(p); 37 | p = _p; 38 | } 39 | 40 | PyObject* Detach() 41 | { 42 | PyObject* pT = p; 43 | p = 0; 44 | return pT; 45 | } 46 | 47 | operator PyObject*() 48 | { 49 | return p; 50 | } 51 | 52 | PyObject* Get() 53 | { 54 | return p; 55 | } 56 | }; 57 | 58 | 59 | #ifdef WINVER 60 | struct RegKey 61 | { 62 | HKEY hkey; 63 | 64 | RegKey() 65 | { 66 | hkey = 0; 67 | } 68 | 69 | ~RegKey() 70 | { 71 | if (hkey != 0) 72 | RegCloseKey(hkey); 73 | } 74 | 75 | operator HKEY() { return hkey; } 76 | }; 77 | #endif 78 | 79 | #endif // _WRAPPER_H_ 80 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests/README.rst: -------------------------------------------------------------------------------- 1 | Tests for DBAdapter/pyodbc 2 | ====================== 3 | 4 | In order to test pyodbc access to a database and a properly configured 5 | ODBC driver setup is needed. 6 | 7 | A valid ODBC connection string should be provided in order to run the 8 | test suite. All configuration needed to run the test suite is 9 | performed via environment variables. 10 | 11 | 12 | Configuring the test suite 13 | ========================== 14 | 15 | The following environment variables are used to configure the test 16 | suite: 17 | 18 | 19 | IOPRO_PYODBC_TEST_CONNSTR 20 | ------------------------------------ 21 | 22 | The connection string to use when performing database connections. 23 | 24 | For example: 25 | 'DRIVER={FreeTDS};SERVER=192.168.1.135;DATABASE=test;Port=1433;Uid=test;Pwd=test' 26 | 27 | IOPRO_PYODBC_TEST_DBMS 28 | ------------------------------------ 29 | 30 | If set, specifies which DBMS is behind the ODBC connection. Some 31 | tests containing DBMS specific SQL will be skipped unless the 32 | appropriate DBMS is set in this variable. 33 | 34 | Supported values are: 35 | 'sql_server' - Microsoft SQL server. 36 | 'postgresql' - PostgreSQL. 37 | 38 | 39 | IOPRO_PYODBC_TEST_LOGGING 40 | ------------------------------------ 41 | 42 | If set to a value that evaluates to True, enable tracing when 43 | executing the test suite. This results in very verbose actions on 44 | whats going on inside pyodbc. This is mostly a debugging tool. 45 | 46 | It basically sets "enable_tracing" in DBAdapter/pyodbc when running the 47 | tests. 48 | 49 | 50 | IOPRO_PYODBC_TEST_UNICODE_RESULTS 51 | -------------------------------------------- 52 | 53 | If set to a value that evaluates to True, tell pyodbc to use unicode 54 | results when connecting to the database. This means that connection to 55 | the database is performed by setting the unicode_results keyword 56 | argument for connect to True. 57 | 58 | 59 | A word of warning about side effects of running the test suite 60 | ============================================================== 61 | 62 | The test suite creates and drops tables in order to perform the 63 | tests. This means that the database used for test may have tables 64 | created and dropped. Needless to say, use a blank database meant for 65 | testing to run the tests on, using a database user meant for testing. 66 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests/runtests.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function, division, absolute_import 2 | 3 | import unittest 4 | 5 | def test_all(descriptions=True, buffer=True, verbosity=2, failfast=False): 6 | loader = unittest.TestLoader() 7 | suite = loader.discover('.') 8 | runner = unittest.TextTestRunner(descriptions=descriptions, 9 | verbosity=verbosity, 10 | buffer=buffer, 11 | failfast=failfast) 12 | return runner.run(suite) 13 | 14 | 15 | if __name__=='__main__': 16 | test_all() 17 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests/test_big_fetch.py: -------------------------------------------------------------------------------- 1 | """ 2 | unittest checking that big fetches that involve iterating/realloc in the C-side 3 | is working. 4 | 5 | There've been a number of issues related to this loop. 6 | """ 7 | 8 | from __future__ import absolute_import, print_function, division 9 | 10 | from unittest_support import IOProPyodbcTest 11 | import unittest 12 | import sys 13 | import numpy as np 14 | 15 | class TestBigFetch_unicode(IOProPyodbcTest): 16 | def _create_test_table(self, N): 17 | try: 18 | self.conn.execute('drop table BIG_FETCH_TEST').commit() 19 | except Exception: 20 | pass 21 | 22 | self.conn.execute('''create table BIG_FETCH_TEST ( 23 | unicode_not_null nvarchar(80) not null, 24 | int_nullable int NULL)''').commit() 25 | 26 | cursor = self.conn.cursor() 27 | crazy_nulls = set([1,2,3,5,8,13,21,34]) 28 | for i in range(N): 29 | cursor.execute('''insert into BIG_FETCH_TEST(unicode_not_null, int_nullable) 30 | values (?,?)''', 31 | ('something {0}'.format(i), 32 | i if (i % 42) not in crazy_nulls else None )) 33 | cursor.commit() 34 | 35 | 36 | 37 | def _check_no_nulls(self, N): 38 | self._create_test_table(N) 39 | da = self.conn.execute(''' 40 | select all [unicode_not_null] 41 | from BIG_FETCH_TEST 42 | ''').fetchdictarray() 43 | unicode_not_null = da['unicode_not_null'] 44 | self.assertEqual(N, len(unicode_not_null)) 45 | for i in xrange(N): 46 | self.assertEqual(unicode_not_null[i], 'something {0}'.format(i)) 47 | 48 | def _check_nulls(self, N, query_nulls): 49 | self._create_test_table(N) 50 | da = self.conn.execute(''' 51 | select all [int_nullable] 52 | from BIG_FETCH_TEST 53 | ''').fetchdictarray(return_nulls=query_nulls) 54 | int_nullable = da['int_nullable'] 55 | if query_nulls: 56 | nulls = da['int_nullable_isnull'] 57 | crazy_nulls = set([1,2,3,5,8,13,21,34]) 58 | self.assertEqual(N, len(int_nullable)) 59 | for i in xrange(N): 60 | if i % 42 in crazy_nulls: 61 | # this should be null 62 | self.assertEqual(int_nullable[i], -2147483648) 63 | if query_nulls: 64 | self.assertTrue(nulls[i], 65 | msg='wrong null value in row {0} (expected {1} got {2})'.format(i, True, nulls[i])) 66 | else: 67 | # not null 68 | self.assertEqual(int_nullable[i], i) 69 | if query_nulls: 70 | self.assertFalse(nulls[i], 71 | msg='wrong null value in row {0} (expected {1} got {2})'.format(i, False, nulls[i])) 72 | 73 | 74 | def test_check_no_nulls_single(self): 75 | self._check_no_nulls(1000) 76 | 77 | def test_check_no_nulls_exact(self): 78 | self._check_no_nulls(10000) 79 | 80 | def test_check_no_nulls_multiple(self): 81 | self._check_no_nulls(30000) 82 | 83 | def test_check_no_nulls_modulo(self): 84 | self._check_no_nulls(32000) 85 | 86 | def test_check_nulls_single(self): 87 | self._check_nulls(1000, False) 88 | 89 | def test_check_nulls_exact(self): 90 | self._check_nulls(10000, False) 91 | 92 | def test_check_nulls_multiple(self): 93 | self._check_nulls(30000, False) 94 | 95 | def test_check_nulls_modulo(self): 96 | self._check_nulls(32000, False) 97 | 98 | def test_check_with_nulls_single(self): 99 | self._check_nulls(1000, True) 100 | 101 | def test_check_with_nulls_exact(self): 102 | self._check_nulls(10000, True) 103 | 104 | def test_check_with_nulls_multiple(self): 105 | self._check_nulls(30000, True) 106 | 107 | def test_check_with_nulls_modulo(self): 108 | self._check_nulls(32000, True) 109 | 110 | if __name__ == '__main__': 111 | unittest.main() 112 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests/test_issue_89.py: -------------------------------------------------------------------------------- 1 | """ 2 | unittests UIPro/pyodbc: issue #89 on github 3 | """ 4 | 5 | from __future__ import absolute_import, print_function, division 6 | 7 | from unittest_support import IOProPyodbcTest 8 | import unittest 9 | import numpy as np 10 | 11 | class TestIssue89(IOProPyodbcTest): 12 | def test_issue_89(self): 13 | #note that the issue results in a segfault. 14 | #this sample will also make some basic testing on the number 15 | #of returned rows 16 | try: 17 | self.conn.execute('drop table ISSUE_89_TEST').commit() 18 | except Exception: 19 | pass 20 | 21 | row_count = 100000 22 | batch = 1000 23 | self.conn.execute('''create table ISSUE_89_TEST ( 24 | name nvarchar(200), 25 | fval float(24), 26 | ival int)''').commit() 27 | 28 | for i in range(0,row_count, batch): 29 | cursor = self.conn.cursor() 30 | cursor.executemany('insert into ISSUE_89_TEST values (?, ?, ?)', 31 | [('sample', 42, 31.0)] * batch) 32 | cursor.commit() 33 | cursor.execute('select * from ISSUE_89_TEST') 34 | da = cursor.fetchdictarray() 35 | self.assertEqual(len(da['name']), row_count) 36 | del da 37 | cursor.execute('select * from ISSUE_89_TEST') 38 | sa = cursor.fetchsarray() 39 | self.assertEqual(len(sa), row_count) 40 | del sa 41 | 42 | 43 | if __name__ == '__main__': 44 | unittest.main() 45 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests/test_issue_90.py: -------------------------------------------------------------------------------- 1 | """ 2 | unittests UIPro/pyodbc: issue #90 on github 3 | """ 4 | 5 | from __future__ import absolute_import, print_function, division 6 | 7 | from unittest_support import IOProPyodbcTest 8 | import unittest 9 | import sys 10 | import numpy as np 11 | 12 | class TestIssue90(IOProPyodbcTest): 13 | def _create_test_table(self): 14 | try: 15 | self.conn.execute('drop table ISSUE_90_TEST').commit() 16 | except Exception: 17 | pass 18 | 19 | self.conn.execute('''create table ISSUE_90_TEST ( 20 | name nvarchar(255))''').commit() 21 | 22 | 23 | def _issue_90(self, N): 24 | print("Creating table with {0} elements\n".format(N)) 25 | self._create_test_table() 26 | cursor = self.conn.cursor() 27 | for _ in range(N): 28 | cursor.execute('''insert into ISSUE_90_TEST values (?)''', ('foo')) 29 | cursor.commit() 30 | sys.stdout.flush() 31 | 32 | da = cursor.execute('''SELECT ALL [name] FROM ISSUE_90_TEST''').fetchdictarray() 33 | self.assertTrue((da['name']=='foo').all()) 34 | 35 | def test_issue_90_10001(self): 36 | self._issue_90(10001) 37 | 38 | def test_issue_90_100(self): 39 | self._issue_90(100) 40 | 41 | def test_issue_90_1000(self): 42 | self._issue_90(1000) 43 | 44 | def test_issue_90_5000(self): 45 | self._issue_90(5000) 46 | 47 | def test_issue_90_10000(self): 48 | self._issue_90(10000) 49 | 50 | def test_issue_90_15000(self): 51 | self._issue_90(15000) 52 | 53 | def test_issue_90_20000(self): 54 | self._issue_90(20000) 55 | 56 | def test_issue_90_25000(self): 57 | self._issue_90(25000) 58 | 59 | def test_issue_90_30000(self): 60 | self._issue_90(30000) 61 | 62 | 63 | if __name__ == '__main__': 64 | unittest.main() 65 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests/test_money.py: -------------------------------------------------------------------------------- 1 | """ 2 | unittests for DBAdapter/pyodbc: money type support 3 | """ 4 | from __future__ import absolute_import, print_function, division 5 | 6 | from unittest_support import IOProPyodbcTest 7 | import unittest 8 | import numpy as np 9 | 10 | class TestMoney(IOProPyodbcTest): 11 | def test_smallmoney_dictarray(self): 12 | try: 13 | self.conn.execute('drop table SMALL_MONEY_TEST').commit() 14 | except Exception: 15 | pass 16 | 17 | self.conn.execute('create table SMALL_MONEY_TEST (val smallmoney not null)').commit() 18 | values = ((42.70,), (32.50,), (12.43,)) 19 | cur = self.conn.cursor() 20 | cur.executemany('insert into SMALL_MONEY_TEST values (?)', values) 21 | cur.commit() 22 | 23 | # smallmoney maps to decimal in odbc (with 4 decimal digits). In DBAdapter/pyodbc 24 | # decimal maps to double precision floating point 25 | da = self.conn.execute('select * from SMALL_MONEY_TEST').fetchdictarray() 26 | self.assertEqual(np.float64, da['val'].dtype) 27 | self.assertTrue(np.allclose(np.array(values).ravel('C'), da['val'], rtol=0.0, atol=1e-4)) 28 | self.conn.execute('drop table SMALL_MONEY_TEST').commit() 29 | 30 | 31 | def test_money_dictarray(self): 32 | try: 33 | self.conn.execute('drop table MONEY_TEST').commit() 34 | except Exception: 35 | pass 36 | 37 | self.conn.execute('create table MONEY_TEST (val money not null)').commit() 38 | values = ((42.70,), (32.50,), (12.43,)) 39 | cur = self.conn.cursor() 40 | cur.executemany('insert into MONEY_TEST values (?)', values) 41 | cur.commit() 42 | 43 | da = self.conn.execute('select * from MONEY_TEST').fetchdictarray() 44 | 45 | # money maps to decimal. It contains 4 decimal digits. In DBAdapter/pyodbc decimal 46 | # maps to double precision floating point. 47 | self.assertEqual(np.float64, da['val'].dtype) 48 | self.assertTrue(np.allclose(np.array(values).ravel('C'), da['val'], rtol=0.0, atol=1e-4)) 49 | self.conn.execute('drop table MONEY_TEST').commit() 50 | 51 | 52 | if __name__ == '__main__': 53 | unittest.main() 54 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests/test_set_text_limit.py: -------------------------------------------------------------------------------- 1 | """ 2 | unittest for DBAdapter/pyodbc: setting a text limit 3 | """ 4 | 5 | from __future__ import absolute_import, print_function, division 6 | 7 | from unittest_support import IOProPyodbcTest, text_limit 8 | import unittest 9 | import numpy as np 10 | from dbadapter import pyodbc 11 | 12 | # test not requiring a connection 13 | class TestSetTextLimitInterface(unittest.TestCase): 14 | def test_iopro_set_text_limit_interface(self): 15 | test_values = [80, 60, 10000, -1, -40, 30, -12, 0] 16 | old_values = [pyodbc.iopro_set_text_limit(i) for i in test_values] 17 | old_values.append(pyodbc.iopro_set_text_limit(old_values[0])) # restore original 18 | 19 | for i, val in enumerate(test_values): 20 | if val < 0: 21 | # func normalizes negative values to -1 22 | self.assertEqual(-1, old_values[i+1]) 23 | else: 24 | self.assertEqual(val, old_values[i+1]) 25 | 26 | 27 | class TestSetLimit(IOProPyodbcTest): 28 | def test_simple(self): 29 | limit = 100 30 | try: 31 | self.conn.execute('drop table SET_TEXT_LIMIT_TEST').commit() 32 | except Exception: 33 | pass 34 | 35 | self.conn.execute('create table SET_TEXT_LIMIT_TEST (val varchar({0}) not null)'.format(limit*2)).commit() 36 | lengths = list(range(0, limit*2, limit//40)) 37 | test_data = [('x'*i,) for i in lengths] 38 | cur = self.conn.cursor() 39 | cur.executemany('insert into SET_TEXT_LIMIT_TEST values (?)', test_data) 40 | cur.commit() 41 | 42 | with text_limit(limit): 43 | da = self.conn.execute('select * from SET_TEXT_LIMIT_TEST').fetchdictarray() 44 | 45 | val = da['val'] 46 | self.assertTrue(np.string_, val.dtype.type) 47 | self.assertTrue(limit+1, val.dtype.itemsize) 48 | 49 | 50 | if __name__ == '__main__': 51 | unittest.main() 52 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests/test_text.py: -------------------------------------------------------------------------------- 1 | """ 2 | unittests for DBAdapter/pyodbc: text support 3 | """ 4 | from __future__ import absolute_import, print_function, division 5 | 6 | from unittest_support import (IOProPyodbcTest, text_limit) 7 | import unittest 8 | import numpy as np 9 | 10 | class TestText(IOProPyodbcTest): 11 | def test_text_dictarray(self): 12 | try: 13 | self.conn.execute('drop table TEXT_TEST').commit() 14 | except Exception: 15 | pass 16 | 17 | self.conn.execute('create table TEXT_TEST (val text not null)').commit() 18 | values = (("some small string",), ("foo",)) 19 | cur = self.conn.cursor() 20 | cur.executemany('insert into TEXT_TEST values (?)', values) 21 | cur.commit() 22 | 23 | with text_limit(max(*[len(v[0]) for v in values])): 24 | da = self.conn.execute('select * from TEXT_TEST').fetchdictarray() 25 | 26 | self.assertEqual(np.string_, da['val'].dtype.type) 27 | self.assertTrue(np.all(da['val'] == np.ravel(values))) 28 | self.conn.execute('drop table TEXT_TEST').commit() 29 | 30 | def test_text_dictarray_big_entry(self): 31 | try: 32 | self.conn.execute('drop table TEXT_TEST').commit() 33 | except Exception: 34 | pass 35 | 36 | self.conn.execute('create table TEXT_TEST (val text not null)').commit() 37 | values = (("some small string",), ("foo",), ("0123456789abcde\n"*250,)) 38 | cur = self.conn.cursor() 39 | cur.executemany('insert into TEXT_TEST values (?)', values) 40 | cur.commit() 41 | 42 | 43 | with text_limit(max(*[len(v[0]) for v in values])): 44 | da = self.conn.execute('select * from TEXT_TEST').fetchdictarray() 45 | 46 | self.assertEqual(np.string_, da['val'].dtype.type) 47 | self.assertTrue(np.all(da['val'] == np.ravel(values))) 48 | self.conn.execute('drop table TEXT_TEST').commit() 49 | 50 | 51 | 52 | if __name__ == '__main__': 53 | unittest.main() 54 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests/test_unicode.py: -------------------------------------------------------------------------------- 1 | """ 2 | unittests for DBAdapter/pyodbc: money type support 3 | """ 4 | from __future__ import absolute_import, print_function, division 5 | 6 | from unittest_support import IOProPyodbcTest 7 | import unittest 8 | import numpy as np 9 | 10 | class TestUnicode(IOProPyodbcTest): 11 | def test_nchar(self): 12 | try: 13 | self.conn.execute('drop table NCHAR_TEST').commit() 14 | except Exception: 15 | pass 16 | 17 | self.conn.execute('create table NCHAR_TEST (val NCHAR(42) not null)').commit() 18 | values = ((u"some small string",), (u"foo",)) 19 | cur = self.conn.cursor() 20 | cur.executemany('insert into NCHAR_TEST values (?)', values) 21 | cur.commit() 22 | 23 | da = self.conn.execute('select * from NCHAR_TEST').fetchdictarray() 24 | self.assertEqual(np.unicode_, da['val'].dtype.type) 25 | self.conn.execute('drop table NCHAR_TEST').commit() 26 | 27 | 28 | def test_nvarchar(self): 29 | try: 30 | self.conn.execute('drop table NVARCHAR_TEST').commit() 31 | except Exception: 32 | pass 33 | 34 | self.conn.execute('create table NVARCHAR_TEST (val NVARCHAR(42) not null)').commit() 35 | values = ((u"some small string",), (u"foo",)) 36 | cur = self.conn.cursor() 37 | cur.executemany('insert into NVARCHAR_TEST values (?)', values) 38 | cur.commit() 39 | 40 | da = self.conn.execute('select * from NVARCHAR_TEST').fetchdictarray() 41 | self.assertEqual(np.unicode_, da['val'].dtype.type) 42 | self.conn.execute('drop table NVARCHAR_TEST').commit() 43 | 44 | 45 | if __name__ == '__main__': 46 | unittest.main() 47 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests/unittest_support.py: -------------------------------------------------------------------------------- 1 | """ 2 | support classes for DBAdapter/pyodbc tests 3 | """ 4 | 5 | from __future__ import absolute_import, print_function, division 6 | 7 | from unittest import TestCase 8 | import os 9 | import dbadapter.pyodbc as pyodbc 10 | from contextlib import contextmanager 11 | 12 | import functools 13 | 14 | _conn_string_key = 'IOPRO_PYODBC_TEST_CONNSTR' 15 | _conn_string = os.environ.get(_conn_string_key) 16 | _enable_logging = bool(os.environ.get('IOPRO_PYODBC_TEST_LOGGING')) 17 | _unicode_results = bool(os.environ.get('IOPRO_PYODBC_TEST_UNICODE_RESULTS')) 18 | _test_db = os.environ.get("IOPRO_PYODBC_TEST_DBMS") 19 | 20 | _error_string = """ 21 | Set the environment variable "{0}" to the connection string for 22 | your test database. 23 | example (bash): 24 | export IOPRO_PYODBC_TEST_CONNSTR='DRIVER={{FreeTDS}};SERVER=192.168.1.135;DATABASE=test;Port=1433;Uid=test;Pwd=test' 25 | """.format(_conn_string_key) 26 | 27 | 28 | # Configure pyodbc for execution ----------------------------------------------- 29 | 30 | class IOProPyodbcTest(TestCase): 31 | """ 32 | Provides a connection (self.conn) that is initialized from 33 | environment variables. 34 | 35 | Subclasses can implement a couple of methods to create/cleanup 36 | tables used as tests. This should be implemented as class 37 | methods so that the tables are created once per class. 38 | """ 39 | 40 | def setUp(self): 41 | pyodbc.enable_mem_guards(True) 42 | pyodbc.enable_tracing(_enable_logging) 43 | self.assertIsNotNone(_conn_string, msg=_error_string) 44 | try: 45 | self.conn = pyodbc.connect(_conn_string, unicode_results=_unicode_results, timeout=3) 46 | except Exception as e: 47 | raise Exception('It seems that your {0} is not setup correctly. Attempting to connect resulted in:\n{1}'.format(_conn_string_key, e.args[1])) 48 | 49 | def tearDown(self): 50 | del self.conn 51 | 52 | 53 | # decorators for test specific to some databases... 54 | class DBMS(object): 55 | SQL_Server = 'sql_server' 56 | PostgreSQL = 'postgresql' 57 | 58 | _supported_dbms = [getattr(DBMS, i) for i in dir(DBMS) if not i.startswith('_')] 59 | _warn_message=""" 60 | Warn: Supplied IOPRO_PYODBC_TEST_DBMS '{0}' ignored. 61 | Try one of the following: 62 | \t{1} 63 | """ 64 | 65 | if _test_db and not _test_db in _supported_dbms: 66 | print(_warn_message.format(_test_db, '\n\t'.join(_supported_dbms))) 67 | 68 | 69 | class dbms_specific(object): 70 | """ 71 | A decorator to mark tests as specific to a given (set) of DBMS. 72 | Because they use DBMS specific types/SQL extensions, for example. 73 | 74 | Sample use: 75 | @dbms_specific(DBMS.SQL_Server, DBMS.PostgreSQL) 76 | """ 77 | def __init__(self, *args): 78 | self.dbms = args 79 | 80 | def __call__(self, fn): 81 | if _test_db in self.dbms: 82 | return fn 83 | else: 84 | @functools.wraps(fn) 85 | def fail(*args, **kwargs): 86 | raise SkipTest("only for dbms: {0}".format(', '.join(self.dbms))) 87 | return fail 88 | 89 | def get_connection_string(): 90 | return _conn_string 91 | 92 | 93 | @contextmanager 94 | def text_limit(size): 95 | old = pyodbc.iopro_set_text_limit(size) 96 | yield 97 | pyodbc.iopro_set_text_limit(old) 98 | 99 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests2/dbapitests.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from testutils import * 4 | import dbapi20 5 | 6 | def main(): 7 | add_to_path() 8 | import pyodbc 9 | 10 | from optparse import OptionParser 11 | parser = OptionParser(usage="usage: %prog [options] connection_string") 12 | parser.add_option("-v", "--verbose", action="count", help="Increment test verbosity (can be used multiple times)") 13 | parser.add_option("-d", "--debug", action="store_true", default=False, help="Print debugging items") 14 | 15 | (options, args) = parser.parse_args() 16 | if len(args) > 1: 17 | parser.error('Only one argument is allowed. Do you need quotes around the connection string?') 18 | 19 | if not args: 20 | connection_string = load_setup_connection_string('dbapitests') 21 | 22 | if not connection_string: 23 | parser.print_help() 24 | raise SystemExit() 25 | else: 26 | connection_string = args[0] 27 | 28 | class test_pyodbc(dbapi20.DatabaseAPI20Test): 29 | driver = pyodbc 30 | connect_args = [ connection_string ] 31 | connect_kw_args = {} 32 | 33 | def test_nextset(self): pass 34 | def test_setoutputsize(self): pass 35 | def test_ExceptionsAsConnectionAttributes(self): pass 36 | 37 | suite = unittest.makeSuite(test_pyodbc, 'test') 38 | testRunner = unittest.TextTestRunner(verbosity=(options.verbose > 1) and 9 or 0) 39 | result = testRunner.run(suite) 40 | 41 | if __name__ == '__main__': 42 | main() 43 | 44 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests2/empty.accdb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ContinuumIO/DBAdapter/19a49b4963d50df27cc930cf27693f31fd8e0426/dbadapter/pyodbc/tests2/empty.accdb -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests2/empty.mdb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ContinuumIO/DBAdapter/19a49b4963d50df27cc930cf27693f31fd8e0426/dbadapter/pyodbc/tests2/empty.mdb -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests2/exceltests.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # Tests for reading from Excel files. 4 | # 5 | # I have not been able to successfully create or modify Excel files. 6 | 7 | import sys, os, re 8 | import unittest 9 | from os.path import abspath 10 | from testutils import * 11 | 12 | CNXNSTRING = None 13 | 14 | class ExcelTestCase(unittest.TestCase): 15 | 16 | def __init__(self, method_name): 17 | unittest.TestCase.__init__(self, method_name) 18 | 19 | def setUp(self): 20 | self.cnxn = pyodbc.connect(CNXNSTRING, autocommit=True) 21 | self.cursor = self.cnxn.cursor() 22 | 23 | for i in range(3): 24 | try: 25 | self.cursor.execute("drop table t%d" % i) 26 | self.cnxn.commit() 27 | except: 28 | pass 29 | 30 | self.cnxn.rollback() 31 | 32 | def tearDown(self): 33 | try: 34 | self.cursor.close() 35 | self.cnxn.close() 36 | except: 37 | # If we've already closed the cursor or connection, exceptions are thrown. 38 | pass 39 | 40 | def test_getinfo_string(self): 41 | value = self.cnxn.getinfo(pyodbc.SQL_CATALOG_NAME_SEPARATOR) 42 | self.assert_(isinstance(value, str)) 43 | 44 | def test_getinfo_bool(self): 45 | value = self.cnxn.getinfo(pyodbc.SQL_ACCESSIBLE_TABLES) 46 | self.assert_(isinstance(value, bool)) 47 | 48 | def test_getinfo_int(self): 49 | value = self.cnxn.getinfo(pyodbc.SQL_DEFAULT_TXN_ISOLATION) 50 | self.assert_(isinstance(value, (int, long))) 51 | 52 | def test_getinfo_smallint(self): 53 | value = self.cnxn.getinfo(pyodbc.SQL_CONCAT_NULL_BEHAVIOR) 54 | self.assert_(isinstance(value, int)) 55 | 56 | 57 | def test_read_sheet(self): 58 | # The first method of reading data is to access worksheets by name in this format [name$]. 59 | # 60 | # Our second sheet is named Sheet2 and has two columns. The first has values 10, 20, 30, etc. 61 | 62 | rows = self.cursor.execute("select * from [Sheet2$]").fetchall() 63 | self.assertEquals(len(rows), 5) 64 | 65 | for index, row in enumerate(rows): 66 | self.assertEquals(row.s2num, float(index + 1) * 10) 67 | 68 | def test_read_range(self): 69 | # The second method of reading data is to assign a name to a range of cells and access that as a table. 70 | # 71 | # Our first worksheet has a section named Table1. The first column has values 1, 2, 3, etc. 72 | 73 | rows = self.cursor.execute("select * from Table1").fetchall() 74 | self.assertEquals(len(rows), 10) 75 | 76 | for index, row in enumerate(rows): 77 | self.assertEquals(row.num, float(index + 1)) 78 | self.assertEquals(row.val, chr(ord('a') + index)) 79 | 80 | def test_tables(self): 81 | # This is useful for figuring out what is available 82 | tables = [ row.table_name for row in self.cursor.tables() ] 83 | assert 'Sheet2$' in tables, 'tables: %s' % ' '.join(tables) 84 | 85 | 86 | # def test_append(self): 87 | # rows = self.cursor.execute("select s2num, s2val from [Sheet2$]").fetchall() 88 | # 89 | # print rows 90 | # 91 | # nextnum = max([ row.s2num for row in rows ]) + 10 92 | # 93 | # self.cursor.execute("insert into [Sheet2$](s2num, s2val) values (?, 'z')", nextnum) 94 | # 95 | # row = self.cursor.execute("select s2num, s2val from [Sheet2$] where s2num=?", nextnum).fetchone() 96 | # self.assertTrue(row) 97 | # 98 | # print 'added:', nextnum, len(rows), 'rows' 99 | # 100 | # self.assertEquals(row.s2num, nextnum) 101 | # self.assertEquals(row.s2val, 'z') 102 | # 103 | # self.cnxn.commit() 104 | 105 | 106 | def main(): 107 | from optparse import OptionParser 108 | parser = OptionParser() #usage=usage) 109 | parser.add_option("-v", "--verbose", action="count", help="Increment test verbosity (can be used multiple times)") 110 | parser.add_option("-d", "--debug", action="store_true", default=False, help="Print debugging items") 111 | parser.add_option("-t", "--test", help="Run only the named test") 112 | 113 | (options, args) = parser.parse_args() 114 | 115 | if args: 116 | parser.error('no arguments expected') 117 | 118 | global CNXNSTRING 119 | 120 | path = dirname(abspath(__file__)) 121 | filename = join(path, 'test.xls') 122 | assert os.path.exists(filename) 123 | CNXNSTRING = 'Driver={Microsoft Excel Driver (*.xls)};DBQ=%s;READONLY=FALSE' % filename 124 | 125 | cnxn = pyodbc.connect(CNXNSTRING, autocommit=True) 126 | print_library_info(cnxn) 127 | cnxn.close() 128 | 129 | suite = load_tests(ExcelTestCase, options.test) 130 | 131 | testRunner = unittest.TextTestRunner(verbosity=options.verbose) 132 | result = testRunner.run(suite) 133 | 134 | 135 | if __name__ == '__main__': 136 | 137 | # Add the build directory to the path so we're testing the latest build, not the installed version. 138 | add_to_path() 139 | import pyodbc 140 | main() 141 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests2/freetdstests.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ContinuumIO/DBAdapter/19a49b4963d50df27cc930cf27693f31fd8e0426/dbadapter/pyodbc/tests2/freetdstests.py -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests2/informixtests.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ContinuumIO/DBAdapter/19a49b4963d50df27cc930cf27693f31fd8e0426/dbadapter/pyodbc/tests2/informixtests.py -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests2/mysqltests.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ContinuumIO/DBAdapter/19a49b4963d50df27cc930cf27693f31fd8e0426/dbadapter/pyodbc/tests2/mysqltests.py -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests2/sqlite.db: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ContinuumIO/DBAdapter/19a49b4963d50df27cc930cf27693f31fd8e0426/dbadapter/pyodbc/tests2/sqlite.db -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests2/sqlitetests.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ContinuumIO/DBAdapter/19a49b4963d50df27cc930cf27693f31fd8e0426/dbadapter/pyodbc/tests2/sqlitetests.py -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests2/sqlservertests.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ContinuumIO/DBAdapter/19a49b4963d50df27cc930cf27693f31fd8e0426/dbadapter/pyodbc/tests2/sqlservertests.py -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests2/test.xls: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ContinuumIO/DBAdapter/19a49b4963d50df27cc930cf27693f31fd8e0426/dbadapter/pyodbc/tests2/test.xls -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests2/testbase.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | 4 | _TESTSTR = '0123456789-abcdefghijklmnopqrstuvwxyz-' 5 | 6 | def _generate_test_string(length): 7 | """ 8 | Returns a string of `length` characters, constructed by repeating _TESTSTR as necessary. 9 | 10 | To enhance performance, there are 3 ways data is read, based on the length of the value, so most data types are 11 | tested with 3 lengths. This function helps us generate the test data. 12 | 13 | We use a recognizable data set instead of a single character to make it less likely that "overlap" errors will 14 | be hidden and to help us manually identify where a break occurs. 15 | """ 16 | if length <= len(_TESTSTR): 17 | return _TESTSTR[:length] 18 | 19 | c = (length + len(_TESTSTR)-1) / len(_TESTSTR) 20 | v = _TESTSTR * c 21 | return v[:length] 22 | 23 | class TestBase(unittest.TestCase): 24 | 25 | 26 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests2/testutils.py: -------------------------------------------------------------------------------- 1 | 2 | import os, sys, platform 3 | from os.path import join, dirname, abspath, basename 4 | import unittest 5 | 6 | def add_to_path(): 7 | """ 8 | Prepends the build directory to the path so that newly built pyodbc libraries are used, allowing it to be tested 9 | without installing it. 10 | """ 11 | # Put the build directory into the Python path so we pick up the version we just built. 12 | # 13 | # To make this cross platform, we'll search the directories until we find the .pyd file. 14 | 15 | import imp 16 | 17 | library_exts = [ t[0] for t in imp.get_suffixes() if t[-1] == imp.C_EXTENSION ] 18 | library_names = [ 'pyodbc%s' % ext for ext in library_exts ] 19 | 20 | # Only go into directories that match our version number. 21 | 22 | dir_suffix = '-%s.%s' % (sys.version_info[0], sys.version_info[1]) 23 | 24 | build = join(dirname(dirname(abspath(__file__))), 'build') 25 | 26 | for root, dirs, files in os.walk(build): 27 | for d in dirs[:]: 28 | if not d.endswith(dir_suffix): 29 | dirs.remove(d) 30 | 31 | for name in library_names: 32 | if name in files: 33 | sys.path.insert(0, root) 34 | return 35 | 36 | print >>sys.stderr, 'Did not find the pyodbc library in the build directory. Will use an installed version.' 37 | 38 | 39 | def print_library_info(cnxn): 40 | import pyodbc 41 | print 'python: %s' % sys.version 42 | print 'pyodbc: %s %s' % (pyodbc.version, os.path.abspath(pyodbc.__file__)) 43 | print 'odbc: %s' % cnxn.getinfo(pyodbc.SQL_ODBC_VER) 44 | print 'driver: %s %s' % (cnxn.getinfo(pyodbc.SQL_DRIVER_NAME), cnxn.getinfo(pyodbc.SQL_DRIVER_VER)) 45 | print ' supports ODBC version %s' % cnxn.getinfo(pyodbc.SQL_DRIVER_ODBC_VER) 46 | print 'os: %s' % platform.system() 47 | print 'unicode: Py_Unicode=%s SQLWCHAR=%s' % (pyodbc.UNICODE_SIZE, pyodbc.SQLWCHAR_SIZE) 48 | 49 | if platform.system() == 'Windows': 50 | print ' %s' % ' '.join([s for s in platform.win32_ver() if s]) 51 | 52 | 53 | 54 | def load_tests(testclass, name, *args): 55 | """ 56 | Returns a TestSuite for tests in `testclass`. 57 | 58 | name 59 | Optional test name if you only want to run 1 test. If not provided all tests in `testclass` will be loaded. 60 | 61 | args 62 | Arguments for the test class constructor. These will be passed after the test method name. 63 | """ 64 | if name: 65 | if not name.startswith('test_'): 66 | name = 'test_%s' % name 67 | names = [ name ] 68 | 69 | else: 70 | names = [ method for method in dir(testclass) if method.startswith('test_') ] 71 | 72 | return unittest.TestSuite([ testclass(name, *args) for name in names ]) 73 | 74 | 75 | def load_setup_connection_string(section): 76 | """ 77 | Attempts to read the default connection string from the setup.cfg file. 78 | 79 | If the file does not exist or if it exists but does not contain the connection string, None is returned. If the 80 | file exists but cannot be parsed, an exception is raised. 81 | """ 82 | from os.path import exists, join, dirname, splitext, basename 83 | from configparser import SafeConfigParser 84 | 85 | FILENAME = 'setup.cfg' 86 | KEY = 'connection-string' 87 | 88 | path = join(dirname(dirname(abspath(__file__))), 'tmp', FILENAME) 89 | 90 | if exists(path): 91 | try: 92 | p = SafeConfigParser() 93 | p.read(path) 94 | except: 95 | raise SystemExit('Unable to parse %s: %s' % (path, sys.exc_info()[1])) 96 | 97 | if p.has_option(section, KEY): 98 | return p.get(section, KEY) 99 | 100 | return None 101 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests3/dbapitests.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from testutils import * 4 | import dbapi20 5 | 6 | def main(): 7 | add_to_path() 8 | import pyodbc 9 | 10 | from optparse import OptionParser 11 | parser = OptionParser(usage="usage: %prog [options] connection_string") 12 | parser.add_option("-v", "--verbose", action="count", help="Increment test verbosity (can be used multiple times)") 13 | parser.add_option("-d", "--debug", action="store_true", default=False, help="Print debugging items") 14 | 15 | (options, args) = parser.parse_args() 16 | if len(args) > 1: 17 | parser.error('Only one argument is allowed. Do you need quotes around the connection string?') 18 | 19 | if not args: 20 | connection_string = load_setup_connection_string('dbapitests') 21 | 22 | if not connection_string: 23 | parser.print_help() 24 | raise SystemExit() 25 | else: 26 | connection_string = args[0] 27 | 28 | class test_pyodbc(dbapi20.DatabaseAPI20Test): 29 | driver = pyodbc 30 | connect_args = [ connection_string ] 31 | connect_kw_args = {} 32 | 33 | def test_nextset(self): pass 34 | def test_setoutputsize(self): pass 35 | def test_ExceptionsAsConnectionAttributes(self): pass 36 | 37 | suite = unittest.makeSuite(test_pyodbc, 'test') 38 | testRunner = unittest.TextTestRunner(verbosity=(options.verbose > 1) and 9 or 0) 39 | result = testRunner.run(suite) 40 | 41 | if __name__ == '__main__': 42 | main() 43 | 44 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests3/exceltests.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # Tests for reading from Excel files. 4 | # 5 | # I have not been able to successfully create or modify Excel files. 6 | 7 | import sys, os, re 8 | import unittest 9 | from os.path import abspath 10 | from testutils import * 11 | 12 | CNXNSTRING = None 13 | 14 | class ExcelTestCase(unittest.TestCase): 15 | 16 | def __init__(self, method_name): 17 | unittest.TestCase.__init__(self, method_name) 18 | 19 | def setUp(self): 20 | self.cnxn = pyodbc.connect(CNXNSTRING, autocommit=True) 21 | self.cursor = self.cnxn.cursor() 22 | 23 | for i in range(3): 24 | try: 25 | self.cursor.execute("drop table t%d" % i) 26 | self.cnxn.commit() 27 | except: 28 | pass 29 | 30 | self.cnxn.rollback() 31 | 32 | def tearDown(self): 33 | try: 34 | self.cursor.close() 35 | self.cnxn.close() 36 | except: 37 | # If we've already closed the cursor or connection, exceptions are thrown. 38 | pass 39 | 40 | def test_getinfo_string(self): 41 | value = self.cnxn.getinfo(pyodbc.SQL_CATALOG_NAME_SEPARATOR) 42 | self.assert_(isinstance(value, str)) 43 | 44 | def test_getinfo_bool(self): 45 | value = self.cnxn.getinfo(pyodbc.SQL_ACCESSIBLE_TABLES) 46 | self.assert_(isinstance(value, bool)) 47 | 48 | def test_getinfo_int(self): 49 | value = self.cnxn.getinfo(pyodbc.SQL_DEFAULT_TXN_ISOLATION) 50 | self.assert_(isinstance(value, (int, long))) 51 | 52 | def test_getinfo_smallint(self): 53 | value = self.cnxn.getinfo(pyodbc.SQL_CONCAT_NULL_BEHAVIOR) 54 | self.assert_(isinstance(value, int)) 55 | 56 | 57 | def test_read_sheet(self): 58 | # The first method of reading data is to access worksheets by name in this format [name$]. 59 | # 60 | # Our second sheet is named Sheet2 and has two columns. The first has values 10, 20, 30, etc. 61 | 62 | rows = self.cursor.execute("select * from [Sheet2$]").fetchall() 63 | self.assertEquals(len(rows), 5) 64 | 65 | for index, row in enumerate(rows): 66 | self.assertEquals(row.s2num, float(index + 1) * 10) 67 | 68 | def test_read_range(self): 69 | # The second method of reading data is to assign a name to a range of cells and access that as a table. 70 | # 71 | # Our first worksheet has a section named Table1. The first column has values 1, 2, 3, etc. 72 | 73 | rows = self.cursor.execute("select * from Table1").fetchall() 74 | self.assertEquals(len(rows), 10) 75 | 76 | for index, row in enumerate(rows): 77 | self.assertEquals(row.num, float(index + 1)) 78 | self.assertEquals(row.val, chr(ord('a') + index)) 79 | 80 | def test_tables(self): 81 | # This is useful for figuring out what is available 82 | tables = [ row.table_name for row in self.cursor.tables() ] 83 | assert 'Sheet2$' in tables, 'tables: %s' % ' '.join(tables) 84 | 85 | 86 | # def test_append(self): 87 | # rows = self.cursor.execute("select s2num, s2val from [Sheet2$]").fetchall() 88 | # 89 | # print rows 90 | # 91 | # nextnum = max([ row.s2num for row in rows ]) + 10 92 | # 93 | # self.cursor.execute("insert into [Sheet2$](s2num, s2val) values (?, 'z')", nextnum) 94 | # 95 | # row = self.cursor.execute("select s2num, s2val from [Sheet2$] where s2num=?", nextnum).fetchone() 96 | # self.assertTrue(row) 97 | # 98 | # print 'added:', nextnum, len(rows), 'rows' 99 | # 100 | # self.assertEquals(row.s2num, nextnum) 101 | # self.assertEquals(row.s2val, 'z') 102 | # 103 | # self.cnxn.commit() 104 | 105 | 106 | def main(): 107 | from optparse import OptionParser 108 | parser = OptionParser() #usage=usage) 109 | parser.add_option("-v", "--verbose", action="count", help="Increment test verbosity (can be used multiple times)") 110 | parser.add_option("-d", "--debug", action="store_true", default=False, help="Print debugging items") 111 | parser.add_option("-t", "--test", help="Run only the named test") 112 | 113 | (options, args) = parser.parse_args() 114 | 115 | if args: 116 | parser.error('no arguments expected') 117 | 118 | global CNXNSTRING 119 | 120 | path = dirname(abspath(__file__)) 121 | filename = join(path, 'test.xls') 122 | assert os.path.exists(filename) 123 | CNXNSTRING = 'Driver={Microsoft Excel Driver (*.xls)};DBQ=%s;READONLY=FALSE' % filename 124 | 125 | cnxn = pyodbc.connect(CNXNSTRING, autocommit=True) 126 | print_library_info(cnxn) 127 | cnxn.close() 128 | 129 | suite = load_tests(ExcelTestCase, options.test) 130 | 131 | testRunner = unittest.TextTestRunner(verbosity=options.verbose) 132 | result = testRunner.run(suite) 133 | 134 | 135 | if __name__ == '__main__': 136 | 137 | # Add the build directory to the path so we're testing the latest build, not the installed version. 138 | add_to_path() 139 | import pyodbc 140 | main() 141 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests3/informixtests.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ContinuumIO/DBAdapter/19a49b4963d50df27cc930cf27693f31fd8e0426/dbadapter/pyodbc/tests3/informixtests.py -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests3/mysqltests.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ContinuumIO/DBAdapter/19a49b4963d50df27cc930cf27693f31fd8e0426/dbadapter/pyodbc/tests3/mysqltests.py -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests3/sqlitetests.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ContinuumIO/DBAdapter/19a49b4963d50df27cc930cf27693f31fd8e0426/dbadapter/pyodbc/tests3/sqlitetests.py -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests3/test.py: -------------------------------------------------------------------------------- 1 | 2 | from testutils import * 3 | add_to_path() 4 | 5 | import pyodbc 6 | 7 | cnxn = pyodbc.connect("DRIVER={SQL Server Native Client 10.0};SERVER=localhost;DATABASE=test;Trusted_Connection=yes") 8 | print('cnxn:', cnxn) 9 | 10 | cursor = cnxn.cursor() 11 | print('cursor:', cursor) 12 | 13 | cursor.execute("select 1") 14 | row = cursor.fetchone() 15 | print('row:', row) 16 | 17 | 18 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests3/testbase.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | 4 | _TESTSTR = '0123456789-abcdefghijklmnopqrstuvwxyz-' 5 | 6 | def _generate_test_string(length): 7 | """ 8 | Returns a string of `length` characters, constructed by repeating _TESTSTR as necessary. 9 | 10 | To enhance performance, there are 3 ways data is read, based on the length of the value, so most data types are 11 | tested with 3 lengths. This function helps us generate the test data. 12 | 13 | We use a recognizable data set instead of a single character to make it less likely that "overlap" errors will 14 | be hidden and to help us manually identify where a break occurs. 15 | """ 16 | if length <= len(_TESTSTR): 17 | return _TESTSTR[:length] 18 | 19 | c = (length + len(_TESTSTR)-1) / len(_TESTSTR) 20 | v = _TESTSTR * c 21 | return v[:length] 22 | 23 | class TestBase(unittest.TestCase): 24 | 25 | 26 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests3/testutils.py: -------------------------------------------------------------------------------- 1 | 2 | import os, sys, platform 3 | from os.path import join, dirname, abspath, basename 4 | import unittest 5 | 6 | def add_to_path(): 7 | """ 8 | Prepends the build directory to the path so that newly built pyodbc libraries are used, allowing it to be tested 9 | without installing it. 10 | """ 11 | # Put the build directory into the Python path so we pick up the version we just built. 12 | # 13 | # To make this cross platform, we'll search the directories until we find the .pyd file. 14 | 15 | import imp 16 | 17 | library_exts = [ t[0] for t in imp.get_suffixes() if t[-1] == imp.C_EXTENSION ] 18 | library_names = [ 'pyodbc%s' % ext for ext in library_exts ] 19 | 20 | # Only go into directories that match our version number. 21 | 22 | dir_suffix = '-%s.%s' % (sys.version_info[0], sys.version_info[1]) 23 | 24 | build = join(dirname(dirname(abspath(__file__))), 'build') 25 | 26 | for root, dirs, files in os.walk(build): 27 | for d in dirs[:]: 28 | if not d.endswith(dir_suffix): 29 | dirs.remove(d) 30 | 31 | for name in library_names: 32 | if name in files: 33 | sys.path.insert(0, root) 34 | return 35 | 36 | print('Did not find the pyodbc library in the build directory. Will use an installed version.') 37 | 38 | 39 | def print_library_info(cnxn): 40 | import pyodbc 41 | print('python: %s' % sys.version) 42 | print('pyodbc: %s %s' % (pyodbc.version, os.path.abspath(pyodbc.__file__))) 43 | print('odbc: %s' % cnxn.getinfo(pyodbc.SQL_ODBC_VER)) 44 | print('driver: %s %s' % (cnxn.getinfo(pyodbc.SQL_DRIVER_NAME), cnxn.getinfo(pyodbc.SQL_DRIVER_VER))) 45 | print(' supports ODBC version %s' % cnxn.getinfo(pyodbc.SQL_DRIVER_ODBC_VER)) 46 | print('os: %s' % platform.system()) 47 | print('unicode: Py_Unicode=%s SQLWCHAR=%s' % (pyodbc.UNICODE_SIZE, pyodbc.SQLWCHAR_SIZE)) 48 | 49 | if platform.system() == 'Windows': 50 | print(' %s' % ' '.join([s for s in platform.win32_ver() if s])) 51 | 52 | 53 | 54 | def load_tests(testclass, name, *args): 55 | """ 56 | Returns a TestSuite for tests in `testclass`. 57 | 58 | name 59 | Optional test name if you only want to run 1 test. If not provided all tests in `testclass` will be loaded. 60 | 61 | args 62 | Arguments for the test class constructor. These will be passed after the test method name. 63 | """ 64 | if name: 65 | if not name.startswith('test_'): 66 | name = 'test_%s' % name 67 | names = [ name ] 68 | 69 | else: 70 | names = [ method for method in dir(testclass) if method.startswith('test_') ] 71 | 72 | return unittest.TestSuite([ testclass(name, *args) for name in names ]) 73 | 74 | 75 | def load_setup_connection_string(section): 76 | """ 77 | Attempts to read the default connection string from the setup.cfg file. 78 | 79 | If the file does not exist or if it exists but does not contain the connection string, None is returned. If the 80 | file exists but cannot be parsed, an exception is raised. 81 | """ 82 | from os.path import exists, join, dirname, splitext, basename 83 | from configparser import SafeConfigParser 84 | 85 | FILENAME = 'setup.cfg' 86 | KEY = 'connection-string' 87 | 88 | path = join(dirname(dirname(abspath(__file__))), 'tmp', FILENAME) 89 | 90 | if exists(path): 91 | try: 92 | p = SafeConfigParser() 93 | p.read(path) 94 | except: 95 | raise SystemExit('Unable to parse %s: %s' % (path, sys.exc_info()[1])) 96 | 97 | if p.has_option(section, KEY): 98 | return p.get(section, KEY) 99 | 100 | return None 101 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests_decimal/test_decimal.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | 3 | """This script serves as a test to see if the decimal and numeric 4 | types work as expected""" 5 | 6 | import dbadapter.pyodbc as odbc 7 | 8 | _connection_string = 'DSN=SQLServerTest' 9 | _table_name = 'TEST_NUMERIC' 10 | 11 | 12 | def _report_exec(cursor, sqlcommand, *args, **kwargs): 13 | _str = 'executing sql:' 14 | print('\n\n%s\n%s\n%s' % (_str, '-'*len(_str), sqlcommand)) 15 | cursor.execute(sqlcommand, *args, **kwargs) 16 | 17 | 18 | def ensure_table(conn): 19 | cursor = conn.cursor() 20 | if cursor.tables(table=_table_name).fetchone(): 21 | print('skipping creation, table exists') 22 | return 23 | 24 | create_str = ('CREATE TABLE %s (\n' 25 | ' field_num NUMERIC(16,9),\n' 26 | ' field_dec DECIMAL(16,9)\n' 27 | ')\n' % _table_name) 28 | insert_str = ('INSERT INTO %s (field_num, field_dec)\n' 29 | ' VALUES (?, ?)' % _table_name) 30 | 31 | _report_exec(cursor, create_str) 32 | _report_exec(cursor, insert_str, '42.00', '32.42456') 33 | 34 | cursor.commit() 35 | 36 | 37 | def drop_table(conn): 38 | cursor = conn.cursor() 39 | _report_exec(cursor, 'DROP TABLE %s' % _table_name) 40 | cursor.commit() 41 | 42 | def query(conn): 43 | cursor = conn.cursor() 44 | select_str = 'SELECT field_num, field_dec FROM %s' % _table_name 45 | _report_exec(cursor, select_str) 46 | 47 | result = cursor.fetchone() 48 | print(result) 49 | 50 | def connect(conn_str): 51 | print('Connecting to data source...') 52 | return odbc.connect(conn_str) 53 | 54 | 55 | def main(conn_str): 56 | conn = connect(conn_str) 57 | ensure_table(conn) 58 | query(conn) 59 | drop_table(conn) 60 | 61 | def clean(conn_str): 62 | conn = connect(conn_str) 63 | drop_table(conn) 64 | 65 | if __name__ == '__main__': 66 | main(_connection_string) 67 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests_numpy/odbc.cfg: -------------------------------------------------------------------------------- 1 | [sqlite] 2 | connection-string=DSN=odbcsqlite;Database=test-sqlite.db 3 | [mysql] 4 | connection-string=DSN=myodbc;Database=test 5 | [postgresql] 6 | connection-string=DSN=odbcpostgresql;Database=test 7 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests_numpy/test_crash.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | 3 | # Script written to help reproduce a crash 4 | # it can be run in a way that allows checking large databases that also contain nulls. 5 | 6 | 7 | 8 | 9 | import dbadapter.pyodbc as pyodbc 10 | import sys, getopt 11 | import random 12 | import string 13 | import datetime 14 | import numpy as np 15 | 16 | _connect_string = "DRIVER=/usr/local/lib/psqlodbcw.so;SERVER=localhost;DATABASE=iopro_pyodbc_test" 17 | 18 | 19 | _test_table_name = 'test_table' 20 | 21 | _insert_command = """ 22 | INSERT INTO %s VALUES (?, ?) 23 | """ % _test_table_name 24 | 25 | _select_command = """ 26 | SELECT * FROM %s LIMIT ? 27 | """ % _test_table_name 28 | 29 | def random_string(chars=string.letters + string.digits + ' ', maxlen=42): 30 | return "".join(random.choice(chars) for x in range(random.randint(0,maxlen))) 31 | 32 | tables = ( 33 | { 34 | "name": "numeric_tests", 35 | "type": "decimal(8,4)", 36 | "descr": "Table for test on numeric types", 37 | "generator": lambda x: [[random.uniform(-500,500)] for i in xrange(x)] 38 | }, 39 | { 40 | "name": "char_tests", 41 | "type": "varchar(16)", 42 | "descr": "Table for test on character types", 43 | "generator": lambda x: [[random_string(maxlen=16)] for i in xrange(x)] 44 | }, 45 | { 46 | "name": "datetime_tests", 47 | "type": "timestamp", 48 | "descr": "Table for test on datetimes", 49 | "generator": lambda x: [[datetime.datetime.now() + datetime.timedelta(seconds=random.randint(-3600, 3600))] for i in xrange(x)] 50 | } 51 | ) 52 | 53 | 54 | def verbose_exec(cursor, command, *args, **kwargs): 55 | print(command) 56 | cursor.execute(command, *args, **kwargs) 57 | 58 | def generate_tables(count): 59 | import os, binascii 60 | import random 61 | import datetime 62 | 63 | print("Generating tables for tests (%s elements)" % repr(count)) 64 | 65 | conn = pyodbc.connect(_connect_string) 66 | cur = conn.cursor() 67 | 68 | for tab in tables: 69 | print("Table %s: %s" % (tab["name"], tab["descr"])) 70 | verbose_exec(cur, "drop table if exists %s" % (tab["name"])) 71 | verbose_exec(cur, "create table %s (val %s)" %(tab["name"], tab["type"])) 72 | values = tab["generator"](count/2) 73 | values.extend([(None,)] * (count - len(values))) # add nulls 74 | random.shuffle(values) #and shuffle 75 | cur.executemany("insert into %s values(?)" % (tab["name"],) , values) 76 | 77 | cur.commit() 78 | conn.close() 79 | 80 | def exit_help(val): 81 | print(''' 82 | %s : 83 | 84 | \tAvailable Options: 85 | 86 | \t-h \t print this help 87 | \t-c --create \tgenerate the table with rows 88 | 89 | \t%s 90 | ''' % (sys.argv[0], '\n\t'.join(_experiments.keys()))) 91 | sys.exit(val) 92 | 93 | 94 | 95 | def read_only_connect(): 96 | return pyodbc.connect(_connect_string, 97 | ansi=True, 98 | unicode_results=False, 99 | readonly=True) 100 | 101 | def run_query_fetchsarray_size(count): 102 | conn = read_only_connect() 103 | cur = conn.cursor() 104 | cur.execute(_select_command, count) 105 | result = cur.fetchsarray(int(count)) 106 | del(cur) 107 | del(conn) 108 | return result 109 | 110 | def run_query(table, sqltype, func, count): 111 | conn = read_only_connect() 112 | cur = conn.cursor() 113 | cur.execute("select cast(val as %s) from %s limit %s" % 114 | (sqltype, table, 'all' if count is None or int(count) < 0 else repr(count))) 115 | res = func(cur, count) 116 | del(cur) 117 | del(conn) 118 | return res 119 | 120 | 121 | # supported sqltypes and the table they will use 122 | _numeric_types = set(["smallint", "int", "integer", "bigint", "float(24)", 123 | "float(53)", "real", "double precision", "decimal(8,4)"]) 124 | _character_types = set() #{ "char(16)" } 125 | _datetime_types = set(["timestamp", "time", "date"]) 126 | _all_types = _numeric_types | _character_types | _datetime_types 127 | 128 | _experiments = { 129 | "fetchall": lambda x, c: x.fetchall(), 130 | "fetchdictarray": lambda x,c: x.fetchdictarray() if c is None or int(c) < 0 else x.fetchdictarray(int(c)), 131 | "fetchsarray": lambda x,c: x.fetchsarray() if c is None or int(c) < 0 else x.fetchsarray(int(c)) 132 | } 133 | 134 | def test(types, experiments, n): 135 | for t in types: 136 | table = None 137 | if t in _numeric_types: 138 | table = 'numeric_tests' 139 | elif t in _character_types: 140 | table = 'char_tests' 141 | elif t in _datetime_types: 142 | table = 'datetime_tests' 143 | 144 | if table is not None: 145 | for e in experiments: 146 | try: 147 | print("Running experiment %s for type %s; count is %s" % (e, t, repr(n))) 148 | func = _experiments[e] 149 | res = run_query(table, t, func, n) 150 | 151 | if isinstance(res, dict): 152 | print("dtype returned is %s" % repr(res["val"].dtype)) 153 | elif isinstance(res, np.ndarray): 154 | print("dtype returned is %s" % repr(res.dtype)) 155 | except KeyError: 156 | print("don't know how to run '%s' for type '%s'" % (e, t)) 157 | else: 158 | print("unknown type '%s'", t) 159 | 160 | 161 | def main(argv): 162 | try: 163 | opts, args = getopt.getopt(argv, "hcgn:t:v", ["create", "count=", "type=", "verbose", "use_guards"]) 164 | except getopt.GetoptError: 165 | exit_help(2) 166 | 167 | # these are the supported experiments 168 | 169 | type_ = None 170 | n = None 171 | command = None 172 | create = False 173 | trace = False 174 | for opt, arg in opts: 175 | if opt in ("-h", "--help"): 176 | exit_help(0) 177 | elif opt in("-c", "--create"): 178 | create = True 179 | elif opt in("-n", "--count"): 180 | n = arg 181 | elif opt in("-t", "--type"): 182 | type_ = arg 183 | elif opt in("-v", "--verbose"): 184 | trace = True 185 | elif opt in("-g", "--use_guards"): 186 | guards = True 187 | 188 | try: 189 | pyodbc.enable_tracing(trace) 190 | except: 191 | if (trace): 192 | print("it seems your DBAdapter does not support tracing in pyodbc") 193 | 194 | try: 195 | pyodbc.enable_mem_guards(guards) 196 | except: 197 | if (trace): 198 | print("it seems your DBAdapter does not mem_guards in pyodbc") 199 | 200 | # defaults 201 | types = _all_types if type_ is None else (type_, ) 202 | 203 | if (create): 204 | count = int(n) if n is not None else 100000 205 | generate_tables(count) 206 | 207 | if len(args) == 0 and not create: 208 | exit_help(0) 209 | 210 | test(types, args, n) 211 | 212 | 213 | if __name__ == '__main__': 214 | main(sys.argv[1:]) 215 | 216 | 217 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests_numpy/testutils.py: -------------------------------------------------------------------------------- 1 | 2 | import os, sys, platform 3 | from os.path import join, dirname, abspath, basename 4 | import unittest 5 | 6 | def add_to_path(): 7 | """ 8 | Prepends the build directory to the path so that newly built pyodbc libraries are used, allowing it to be tested 9 | without installing it. 10 | """ 11 | # Put the build directory into the Python path so we pick up the version we just built. 12 | # 13 | # To make this cross platform, we'll search the directories until we find the .pyd file. 14 | 15 | import imp 16 | 17 | library_exts = [ t[0] for t in imp.get_suffixes() if t[-1] == imp.C_EXTENSION ] 18 | library_names = [ 'pyodbc%s' % ext for ext in library_exts ] 19 | 20 | # Only go into directories that match our version number. 21 | 22 | dir_suffix = '-%s.%s' % (sys.version_info[0], sys.version_info[1]) 23 | 24 | build = join(dirname(dirname(abspath(__file__))), 'build') 25 | 26 | for root, dirs, files in os.walk(build): 27 | for d in dirs[:]: 28 | if not d.endswith(dir_suffix): 29 | dirs.remove(d) 30 | 31 | for name in library_names: 32 | if name in files: 33 | sys.path.insert(0, root) 34 | return 35 | 36 | #print('Did not find the pyodbc library in the build directory. Will use an installed version.') 37 | 38 | 39 | def print_library_info(cnxn): 40 | from dbadapter import pyodbc 41 | import numpy as np 42 | print('python: %s' % sys.version) 43 | print('pyodbc: %s %s' % (pyodbc.version, os.path.abspath(pyodbc.__file__))) 44 | print('odbc: %s' % cnxn.getinfo(pyodbc.SQL_ODBC_VER)) 45 | print('numpy: %s' % np.__version__) 46 | print('npodbc: %s' % pyodbc.npversion) 47 | print('driver: %s %s' % (cnxn.getinfo(pyodbc.SQL_DRIVER_NAME), cnxn.getinfo(pyodbc.SQL_DRIVER_VER))) 48 | print(' supports ODBC version %s' % cnxn.getinfo(pyodbc.SQL_DRIVER_ODBC_VER)) 49 | print('os: %s' % platform.system()) 50 | print('unicode: Py_Unicode=%s SQLWCHAR=%s' % (pyodbc.UNICODE_SIZE, pyodbc.SQLWCHAR_SIZE)) 51 | 52 | if platform.system() == 'Windows': 53 | print(' %s' % ' '.join([s for s in platform.win32_ver() if s])) 54 | 55 | 56 | 57 | def load_tests(testclass, name, *args): 58 | """ 59 | Returns a TestSuite for tests in `testclass`. 60 | 61 | name 62 | Optional test name if you only want to run 1 test. If not provided all tests in `testclass` will be loaded. 63 | 64 | args 65 | Arguments for the test class constructor. These will be passed after the test method name. 66 | """ 67 | if name: 68 | if not name.startswith('test_'): 69 | name = 'test_%s' % name 70 | names = [ name ] 71 | 72 | else: 73 | names = [ method for method in dir(testclass) if method.startswith('test_') ] 74 | 75 | return unittest.TestSuite([ testclass(name, *args) for name in names ]) 76 | 77 | 78 | def load_setup_connection_string(section): 79 | """ 80 | Attempts to read the default connection string from the setup.cfg file. 81 | 82 | If the file does not exist or if it exists but does not contain the connection string, None is returned. If the 83 | file exists but cannot be parsed, an exception is raised. 84 | """ 85 | from os.path import exists, join, dirname, splitext, basename 86 | from configparser import SafeConfigParser 87 | 88 | FILENAME = 'odbc.cfg' 89 | KEY = 'connection-string' 90 | 91 | path = join(dirname(abspath(__file__)), FILENAME) 92 | 93 | if exists(path): 94 | try: 95 | p = SafeConfigParser() 96 | p.read(path) 97 | except: 98 | raise SystemExit('Unable to parse %s: %s' % (path, sys.exc_info()[1])) 99 | if p.has_option(section, KEY): 100 | return p.get(section, KEY) 101 | 102 | return None 103 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/tests_sqlcancel/test_sqlcancel.py: -------------------------------------------------------------------------------- 1 | """This script serves as a test to see if sqlcancel works as expected. 2 | TODO: Make this into a proper test""" 3 | 4 | import dbadapter.pyodbc as odbc 5 | from time import time, sleep 6 | import threading 7 | 8 | _connection_string = 'DSN=SQLServerTest' 9 | _table_name = 'TEST_TABLE' 10 | 11 | def ensure_table(conn): 12 | cursor = conn.cursor() 13 | if cursor.tables(table=_table_name).fetchone(): 14 | print 'skipping creation, table exists' 15 | return 16 | 17 | cursor.execute('CREATE TABLE %s (field1 TEXT, field2 TEXT)' % 18 | _table_name) 19 | 20 | for i in range(10000): 21 | cursor.execute('INSERT INTO %s(field1, field2) values (?, ?)' 22 | % _table_name, str(i), str(i*2)) 23 | 24 | cursor.commit() 25 | 26 | def drop_table(conn): 27 | cursor = conn.cursor() 28 | cursor.execute('DROP TABLE %s' % _table_name) 29 | cursor.commit() 30 | 31 | def query(conn): 32 | cursor = conn.cursor() 33 | select_str = """ 34 | SELECT a.field1, b.field2 35 | FROM 36 | %s AS a, %s AS b 37 | WHERE 38 | a.field2 LIKE b.field1""" % (_table_name, _table_name) 39 | 40 | print select_str 41 | cursor.execute(select_str) 42 | 43 | result = cursor.fetchall() 44 | if len(result) > 40: 45 | print ('%s ... %s' % (str(result[:20]), str(result[-20:]))) 46 | else: 47 | print (result) 48 | 49 | 50 | 51 | def query_with_time_out(conn, to): 52 | def watchdog(cursor, time_out): 53 | print ('started thread') 54 | while time_out > 0.0: 55 | print '.' 56 | wait_time = min(time_out, 1.0) 57 | sleep(wait_time) 58 | time_out -= wait_time 59 | 60 | print ('issuing cancel') 61 | cursor.cancel() 62 | 63 | cursor = conn.cursor() 64 | 65 | select_str = """ 66 | SELECT a.field1, b.field2 67 | FROM 68 | %s AS a, %s AS b 69 | WHERE 70 | a.field2 LIKE b.field1""" % (_table_name, _table_name) 71 | 72 | print select_str 73 | 74 | t = threading.Thread(target=watchdog, args=(cursor, to)) 75 | t.start() 76 | try: 77 | cursor.execute(select_str) 78 | 79 | result = cursor.fetchall() 80 | except odbc.Error: 81 | result = 'timed out' 82 | 83 | if len(result) > 40: 84 | print ('%s ... %s' % (str(result[:20]), str(result[-20:]))) 85 | else: 86 | print (result) 87 | 88 | 89 | def main(conn_str): 90 | print ('Connecting to data source...') 91 | conn = odbc.connect(conn_str) 92 | 93 | print ('Building the table...') 94 | ensure_table(conn) 95 | 96 | print ('Trying queries...') 97 | t1 = time() 98 | query_with_time_out(conn, 5.0) 99 | t2 = time() 100 | query(conn) 101 | t3 = time() 102 | print ('query ellapsed %d s, query_with_timeout ellapsed %d s' % 103 | (t3-t2, t2-t1)) 104 | 105 | 106 | def clean(conn_str): 107 | print ('Connecting to data source...') 108 | conn = odbc.connect(conn_str) 109 | print ('Dropping the table') 110 | drop_table(conn) 111 | 112 | 113 | if __name__ == '__main__': 114 | main(_connection_string) 115 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/utils/pyodbcconf/pyodbcconf.cpp: -------------------------------------------------------------------------------- 1 | 2 | #ifdef _MSC_VER 3 | #define _CRT_SECURE_NO_WARNINGS 4 | #include 5 | #include 6 | typedef __int64 INT64; 7 | typedef unsigned __int64 UINT64; 8 | #else 9 | typedef unsigned char byte; 10 | typedef unsigned int UINT; 11 | typedef long long INT64; 12 | typedef unsigned long long UINT64; 13 | #ifdef __MINGW32__ 14 | #include 15 | #include 16 | #endif 17 | #endif 18 | 19 | #define PY_SSIZE_T_CLEAN 1 20 | 21 | #include 22 | #include 23 | #include 24 | 25 | #include 26 | #include 27 | 28 | #if PY_VERSION_HEX < 0x02050000 && !defined(PY_SSIZE_T_MIN) 29 | typedef int Py_ssize_t; 30 | #define PY_SSIZE_T_MAX INT_MAX 31 | #define PY_SSIZE_T_MIN INT_MIN 32 | #endif 33 | 34 | #ifndef _countof 35 | #define _countof(a) (sizeof(a) / sizeof(a[0])) 36 | #endif 37 | 38 | #ifdef UNUSED 39 | #undef UNUSED 40 | #endif 41 | 42 | inline void UNUSED(...) { } 43 | 44 | #define STRINGIFY(x) #x 45 | #define TOSTRING(x) STRINGIFY(x) 46 | 47 | static PyObject* mod_configure(PyObject* self) 48 | { 49 | FILE* f = fopen("pyodbc.conf", "w"); 50 | if (f == 0) 51 | { 52 | perror("Unable to create pyodbc.conf"); 53 | return 0; 54 | } 55 | 56 | fprintf(f, "[define_macros]\n"); 57 | fprintf(f, "PYODBC_VERSION: %s\n", TOSTRING(PYODBC_VERSION)); 58 | fprintf(f, "SQLWCHAR_SIZE: %d\n", (int)sizeof(SQLWCHAR)); 59 | 60 | #if HAVE_WCHAR_H 61 | fprintf(f, "WCHAR_T_SIZE: %d\n", (int)sizeof(wchar_t)); 62 | #endif 63 | 64 | fclose(f); 65 | 66 | Py_RETURN_NONE; 67 | } 68 | 69 | static PyMethodDef methods[] = 70 | { 71 | { "configure", (PyCFunction)mod_configure, METH_NOARGS, 0 }, 72 | { 0, 0, 0, 0 } 73 | }; 74 | 75 | PyMODINIT_FUNC initpyodbcconf() 76 | { 77 | Py_InitModule4("pyodbcconf", methods, 0, 0, PYTHON_API_VERSION); 78 | } 79 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/web/license.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | pyodbc 5 | 6 | 7 | 8 | 9 |
10 | pyodbc - A Python DB API module for ODBC 11 |
12 | 13 | 24 | 25 |
26 | 27 |

Copyright (c) 2004-2008 Michael Kleehammer

28 | 29 |

Permission is hereby granted, free of charge, to any person obtaining a copy of this 30 | software and associated documentation files (the "Software"), to deal in the Software without 31 | restriction, including without limitation the rights to use, copy, modify, merge, publish, 32 | distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the 33 | Software is furnished to do so.

34 | 35 |

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 36 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR 37 | PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR 38 | ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 39 | ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 40 | SOFTWARE.

41 | 42 |
43 | 44 |
45 | SourceForge.net Logo 46 | 47 | 48 | 49 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/web/styles.css: -------------------------------------------------------------------------------- 1 | 2 | body 3 | { 4 | margin: 0; 5 | background-color: #fff; 6 | position: relative; 7 | } 8 | 9 | #titlebox 10 | { 11 | background-color: #577eb2; 12 | color: white; 13 | font-family: Corbel, Verdana, Arial, sans-serif; 14 | font-size: larger; 15 | font-weight: bold; 16 | padding: 0px 0px 4px 4px; 17 | } 18 | 19 | #nav 20 | { 21 | padding: 0px 0px 2px 4px; 22 | font-size: smaller; 23 | background-color: #c1d9f2; 24 | border: 1px solid #577eb2; 25 | color: #3e6aaa; 26 | } 27 | 28 | #nav A 29 | { 30 | color: #3460A0; 31 | text-decoration: none; 32 | } 33 | 34 | #nav A:hover 35 | { 36 | color: #144080; 37 | text-decoration: underline; 38 | } 39 | 40 | #contents 41 | { 42 | margin: .5em; 43 | } 44 | 45 | h1 46 | { 47 | font-weight: bold; 48 | font-size: 1.4em; 49 | color: #577eb2; 50 | } 51 | 52 | h2 53 | { 54 | font-weight: bold; 55 | font-size: 1.1em; 56 | color: #577eb2; 57 | } 58 | 59 | h3 60 | { 61 | font-weight: normal; 62 | font-size: 1em; 63 | color: #577eb2; 64 | } 65 | 66 | a:active 67 | { 68 | color: #144080; 69 | } 70 | 71 | a:visited 72 | { 73 | color: #144080; 74 | } 75 | a:hover 76 | { 77 | color: #577eb2; 78 | } 79 | 80 | 81 | DT 82 | { 83 | margin-top: .5em; 84 | margin-left: .5em; 85 | font-weight: bold; 86 | } 87 | 88 | DD 89 | { 90 | margin-left: 2em; 91 | } 92 | 93 | 94 | 95 | code 96 | { 97 | font-family: "Consolas", "Courier New", "Courier", monospace; 98 | } 99 | 100 | PRE 101 | { 102 | font-family: "Consolas", "Courier New", "Courier", monospace; 103 | margin-bottom: 0px; 104 | padding-bottom: 0px; 105 | } 106 | 107 | TABLE 108 | { 109 | border: 1px solid #a0a0a0; 110 | } 111 | 112 | THEAD TR TD 113 | { 114 | background-color: #f0f0f0; 115 | border-bottom: 1px solid #a0a0a0; 116 | } 117 | 118 | .treven 119 | { 120 | background-color: #f0f0f0; 121 | } 122 | 123 | .added 124 | { 125 | color: #00129c; 126 | } 127 | 128 | .missing 129 | { 130 | color: #9c0000; 131 | } 132 | -------------------------------------------------------------------------------- /dbadapter/pyodbc/web/tutorial.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | pyodbc 5 | 6 | 7 | 8 | 9 |
10 | pyodbc - A Python DB API module for ODBC 11 |
12 | 13 | 24 | 25 |
26 | 27 |

Introduction

28 | 29 |

This document is high-level introduction to using pyodbc and does not cover all its 30 | details. pyodbc implements the 31 | Python Database API Specification 32 | v2.0, so you should read this specification for more information.

33 | 34 |

If you haven't installed pyodbc, 35 | download and 36 | install it.

37 | 38 |

Connecting

39 | 40 |

First, you must import pyodbc. If you get errors here, make sure you have pyodbc installed.

41 | 42 |
 43 | import pyodbc
44 | 45 |

Next, create a connection by passing an ODBC connection string to the connect method. This 46 | step causes ODBC to load the database driver (the SQL Server driver in this example) and 47 | connect to the database.

48 | 49 |
 50 | cnxn = pyodbc.connect('DSN=northwind')
51 | 52 |

The ODBC connection string format is specified by ODBC in the 53 | SQLDriverConnect 55 | documentation. Unfortunately, this is for C programmers, but the comments section discussion 56 | of the connection string format is useful.

57 | 58 |

ODBC itself recognizes the following keywords in a connection string:

59 | 60 |
61 |
DRIVER
62 |
The ODBC driver to use. Make sure the driver you want to use is installed.
63 | 64 |
DSN
65 |
The name of a DSN configured in the control panel Data Sources applet. This allows 66 | database information to be specified in an application-independent manner and location.
67 | 68 |
UID
69 |
The user name when a login is required.
70 | 71 |
PWD
72 |
The password when a login is required. DSNs cannot contain passwords, so you may need 73 | this even when using the DSN keyword. 74 | 75 |
FILEDSN
76 |
The name of a .dsn file, used when the DSN information is stored in a file.
77 |
78 | 79 |

Each database driver may support additional keywords. For example, the SQL Server driver 80 | allows you to specify the machine SQL Server is running on using the SERVER keyword and the 81 | database to connect to using the DATABASE keyword. These two allow you to connect to the 82 | database without registering a DSN in the control panel. (The ODBC section of the SQL Native 83 | Client Using 84 | Connection String Keywords with SQL Native Client documentation may be useful when using SQL Server.)

85 | 86 |
 87 | cnxn = pyodbc.connect('DRIVER={SQL Server};SERVER=localhost;DATABASE=testdb;UID=user;PWD=password')
88 | 89 |

Create an Example Table

90 | 91 |

Next, we'll create a table and populate it with some example values. First, make a cursor 92 | and execute the necessary SQL. (The SQL may need to be modified for your database, 93 | particularly the type names like 'int'. I'm testing this using SQL Server.) Finally, commit 94 | the changes.

95 | 96 |
 97 | cursor = cnxn.cursor()
 98 | cursor.execute("create table tmp(a int, b varchar(30))")
 99 | cnxn.commit()
100 | 101 |

First, notice that the commit is applied to the connection, not the cursor. Changes from 102 | all cursors attached to the same connection will be commited. Also note that the commit 103 | is required. If you do not commit, the changes will be rolled back when the connection 104 | is closed.

105 | 106 |

Insert Some Values

107 | 108 | 109 | 110 |

Selecting Values

111 | 112 |

Once you have a connection, obtain a cursor from it and execute a select statement via the 113 | cursor's execute method:

114 | 115 |
116 | cursor = cnxn.cursor()
117 | cursor.execute('select a, b from tmp')
118 | 
119 | 120 | 121 | 122 | 123 | -------------------------------------------------------------------------------- /dbadapter/tests/Makefile: -------------------------------------------------------------------------------- 1 | CC = gcc 2 | CFLAGS = -g -Werror -Wall -Wdeclaration-after-statement 3 | 4 | clean: 5 | -rm test_text_adapter 6 | -rm -f *.o 7 | 8 | -------------------------------------------------------------------------------- /dbadapter/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ContinuumIO/DBAdapter/19a49b4963d50df27cc930cf27693f31fd8e0426/dbadapter/tests/__init__.py -------------------------------------------------------------------------------- /dbadapter/tests/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | def pytest_addoption(parser): 4 | parser.addoption('--pg_host', action='store') 5 | parser.addoption('--pg_dbname', action='store') 6 | parser.addoption('--pg_user', action='store') 7 | parser.addoption('--acc_host', action='store') 8 | parser.addoption('--acc_user', action='store') 9 | parser.addoption('--acc_password', action='store') 10 | -------------------------------------------------------------------------------- /dbadapter/tests/data/benchmarks.py: -------------------------------------------------------------------------------- 1 | import timeit 2 | import os 3 | 4 | 5 | def timeFunction(function, setup): 6 | print 'timing', function 7 | t = timeit.Timer(stmt=function, setup=setup) 8 | times = [] 9 | for i in range(0,3): 10 | os.system('sudo sh -c "sync; echo 3 > /proc/sys/vm/drop_caches"') 11 | times.append(str(t.timeit(number=1))) 12 | return min(times) 13 | 14 | 15 | ints1 = timeFunction('blazeopt.loadtxt("ints1", dtype="u4,u4,u4,u4,u4", delimiter=",")', 'import blazeopt') 16 | ints2 = timeFunction('blazeopt.loadtxt("ints2", dtype="u4,u4,u4,u4,u4", delimiter=",")', 'import blazeopt') 17 | ints3 = timeFunction('blazeopt.loadtxt("ints3", dtype="u4,u4,u4,u4,u4", delimiter=",")', 'import blazeopt') 18 | print ints1, ints2, ints3 19 | 20 | floats1 = timeFunction('blazeopt.loadtxt("floats1", dtype="f8,f8,f8,f8,f8", delimiter=",")', 'import blazeopt') 21 | floats2 = timeFunction('blazeopt.loadtxt("floats2", dtype="f8,f8,f8,f8,f8", delimiter=",")', 'import blazeopt') 22 | floats3 = timeFunction('blazeopt.loadtxt("floats3", dtype="f8,f8,f8,f8,f8", delimiter=",")', 'import blazeopt') 23 | print floats1, floats2, floats3 24 | 25 | ints1 = timeFunction('blazeopt.genfromtxt("ints1", dtype="u4,u4,u4,u4,u4", delimiter=",")', 'import blazeopt') 26 | ints2 = timeFunction('blazeopt.genfromtxt("ints2", dtype="u4,u4,u4,u4,u4", delimiter=",")', 'import blazeopt') 27 | ints3 = timeFunction('blazeopt.genfromtxt("ints3", dtype="u4,u4,u4,u4,u4", delimiter=",")', 'import blazeopt') 28 | print ints1, ints2, ints3 29 | 30 | floats1 = timeFunction('blazeopt.genfromtxt("floats1", dtype="f8,f8,f8,f8,f8", delimiter=",")', 'import blazeopt') 31 | floats2 = timeFunction('blazeopt.genfromtxt("floats2", dtype="f8,f8,f8,f8,f8", delimiter=",")', 'import blazeopt') 32 | floats3 = timeFunction('blazeopt.genfromtxt("floats3", dtype="f8,f8,f8,f8,f8", delimiter=",")', 'import blazeopt') 33 | print floats1, floats2, floats3 34 | 35 | missingValues1 = timeFunction('blazeopt.genfromtxt("missingvalues1", dtype="u4,u4,u4,u4,u4", delimiter=",", missing_values={0:["NA","NaN"], 1:["xx","inf"]}, filling_values="999")', 'import blazeopt') 36 | missingValues2 = timeFunction('blazeopt.genfromtxt("missingvalues2", dtype="u4,u4,u4,u4,u4", delimiter=",", missing_values={0:["NA","NaN"], 1:["xx","inf"]}, filling_values="999")', 'import blazeopt') 37 | missingValues3 = timeFunction('blazeopt.genfromtxt("missingvalues3", dtype="u4,u4,u4,u4,u4", delimiter=",", missing_values={0:["NA","NaN"], 1:["xx","inf"]}, filling_values="999")', 'import blazeopt') 38 | print missingValues1, missingValues2, missingValues3 39 | 40 | fixedwidth1 = timeFunction('blazeopt.genfromtxt("fixedwidth1", dtype="u4,u4,u4,u4,u4", delimiter=[2,3,4,5,6])', 'import blazeopt') 41 | fixedwidth2 = timeFunction('blazeopt.genfromtxt("fixedwidth2", dtype="u4,u4,u4,u4,u4", delimiter=[2,3,4,5,6])', 'import blazeopt') 42 | fixedwidth3 = timeFunction('blazeopt.genfromtxt("fixedwidth3", dtype="u4,u4,u4,u4,u4", delimiter=[2,3,4,5,6])', 'import blazeopt') 43 | print fixedwidth1, fixedwidth2, fixedwidth3 44 | 45 | -------------------------------------------------------------------------------- /dbadapter/tests/generate.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import time 4 | import gzip 5 | import numpy 6 | 7 | 8 | def generate_dataset(output, valueIter, delimiter, num_recs): 9 | for i in range(0, num_recs): 10 | line = '' 11 | for j in range(0, 5): 12 | if j == 5 - 1: 13 | line += str(valueIter.next()) 14 | else: 15 | line += str(valueIter.next()) + delimiter 16 | output.write(line) 17 | output.write('\n') 18 | output.seek(0) 19 | 20 | 21 | class IntIter(object): 22 | 23 | def __init__(self): 24 | self.value = 0 25 | 26 | def __str__(self): 27 | return 'ints' 28 | 29 | def __iter__(self): 30 | return self 31 | 32 | def next(self): 33 | nextValue = self.value 34 | self.value = self.value + 1 35 | return nextValue 36 | 37 | 38 | class SignedIntIter(object): 39 | 40 | def __init__(self): 41 | self.value = -1 42 | 43 | def __str__(self): 44 | return 'signed int' 45 | 46 | def __iter__(self): 47 | return self 48 | 49 | def next(self): 50 | nextValue = self.value 51 | if self.value < 0: 52 | self.value = self.value - 1 53 | else: 54 | self.value = self.value + 1 55 | self.value *= -1 56 | return nextValue 57 | 58 | 59 | class FloatIter(object): 60 | 61 | def __init__(self): 62 | self.value = 0.0 63 | 64 | def __str__(self): 65 | return 'floats' 66 | 67 | def __iter__(self): 68 | return self 69 | 70 | def next(self): 71 | nextValue = self.value 72 | self.value = self.value + 0.1 73 | return nextValue 74 | 75 | 76 | class MissingValuesIter(object): 77 | 78 | def __init__(self): 79 | self.value = 0 80 | 81 | def __str__(self): 82 | return 'missing values' 83 | 84 | def __iter__(self): 85 | return self 86 | 87 | def next(self): 88 | nextValue = self.value 89 | if nextValue % 20 == 0: 90 | nextValue = 'NA' 91 | elif nextValue % 20 == 4: 92 | nextValue = 'xx' 93 | elif nextValue % 20 == 5: 94 | nextValue = 'NaN' 95 | elif nextValue % 20 == 9: 96 | nextValue = 'inf' 97 | self.value = self.value + 1 98 | return nextValue 99 | 100 | 101 | class FixedWidthIter(object): 102 | 103 | def __init__(self): 104 | self.field = 0 105 | self.fieldValues = ['00','000','0000','00000','000000'] 106 | 107 | def __str__(self): 108 | return 'fixed widths' 109 | 110 | def __iter__(self): 111 | return self 112 | 113 | def next(self): 114 | nextValue = self.fieldValues[self.field] 115 | 116 | self.field = self.field + 1 117 | if self.field == 5: 118 | self.field = 0 119 | self.fieldValues[0] = str((int(self.fieldValues[0]) + 1) % 100).zfill(2) 120 | self.fieldValues[1] = str((int(self.fieldValues[1]) + 1) % 1000).zfill(3) 121 | self.fieldValues[2] = str((int(self.fieldValues[2]) + 1) % 10000).zfill(4) 122 | self.fieldValues[3] = str((int(self.fieldValues[3]) + 1) % 100000).zfill(5) 123 | self.fieldValues[4] = str((int(self.fieldValues[4]) + 1) % 1000000).zfill(6) 124 | 125 | return nextValue 126 | 127 | 128 | class QuoteIter(object): 129 | 130 | def __init__(self): 131 | self.value = 0 132 | 133 | def __str__(self): 134 | return 'quoted strings' 135 | 136 | def __iter__(self): 137 | return self 138 | 139 | def next(self): 140 | nextValue = self.value 141 | characters = list(str(nextValue)) 142 | nextValue = '"' + ',\n'.join(characters) + '"' 143 | 144 | self.value = self.value + 1 145 | return nextValue 146 | 147 | 148 | class DateTimeIter(object): 149 | 150 | def __init__(self): 151 | self.value = 0 152 | 153 | def __str__(self): 154 | return 'datetime' 155 | 156 | def __iter__(self): 157 | return self 158 | 159 | def next(self): 160 | nextValue = self.value 161 | self.value = self.value + 1 162 | return numpy.datetime64(nextValue, 'D') 163 | 164 | 165 | if __name__ == "__main__": 166 | import sys 167 | if len(sys.argv) != 2: 168 | sys.exit("Please define number of records in datasets: ") 169 | 170 | numRecords = int(sys.argv[1]) 171 | 172 | output = open('./data/ints', 'w') 173 | generate_dataset(output, IntIter(), ',', numRecords) 174 | output.close() 175 | 176 | output = open('./data/floats', 'w') 177 | generate_dataset(output, FloatIter(), ',', numRecords) 178 | output.close() 179 | 180 | output = open('./data/missingvalues', 'w') 181 | generate_dataset(output, MissingValuesIter(), ',', numRecords) 182 | output.close() 183 | 184 | output = open('./data/fixedwidths', 'w') 185 | generate_dataset(output, FixedWidthIter(), '', numRecords) 186 | output.close() 187 | 188 | input = open('./data/ints', 'rb') 189 | output = gzip.open('./data/ints.gz', 'wb') 190 | output.writelines(input) 191 | output.close() 192 | input.close 193 | 194 | '''generate_dataset('ints2', IntIter(), ',', 12500000) 195 | generate_dataset('ints3', IntIter(), ',', 25000000) 196 | generate_dataset('signedints1', SignedIntIter(), ',', 2500000) 197 | generate_dataset('floats1', FloatIter(), ',', 1500000) 198 | generate_dataset('floats2', FloatIter(), ',', 7500000) 199 | generate_dataset('floats3', FloatIter(), ',', 15000000) 200 | generate_dataset('missingvalues1', MissingValuesIter(), ',', 3000000) 201 | generate_dataset('missingvalues2', MissingValuesIter(), ',', 15000000) 202 | generate_dataset('missingvalues3', MissingValuesIter(), ',', 30000000) 203 | generate_dataset('fixedwidth1', FixedWidthIter(), '', 5000000) 204 | generate_dataset('fixedwidth2', FixedWidthIter(), '', 25000000) 205 | generate_dataset('fixedwidth3', FixedWidthIter(), '', 50000000) 206 | generate_dataset('ints_spacedelim', IntIter(), ' ', 2500000) 207 | generate_dataset('quotes', QuoteIter(), ' ', 2500000) 208 | generate_dataset('datetime', DateTimeIter(), ',', 2500000)''' 209 | 210 | -------------------------------------------------------------------------------- /dbadapter/tests/test_ints.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include "../textadapter/text_adapter.h" 7 | #include "../textadapter/io_functions.h" 8 | 9 | 10 | int main() 11 | { 12 | uint64_t num_fields = 5; 13 | 14 | FILE *input = fopen("./data/ints", "r"); 15 | setvbuf(input, NULL, _IONBF, 0); 16 | 17 | TextAdapter *adapter = open_text_adapter((void *)input, NULL, &read_file, NULL, &seek_file, NULL); 18 | adapter->tokenize = &delim_tokenizer; 19 | set_num_fields(adapter, num_fields); 20 | adapter->delim_char = ','; 21 | adapter->quote_char = '\0'; 22 | adapter->comment_char = '\0'; 23 | 24 | int c; 25 | for (c = 0; c < num_fields; c++) 26 | { 27 | set_converter(adapter, c, sizeof(uint32_t), &uint_converter, NULL); 28 | } 29 | 30 | uint32_t *data = calloc(10000000, sizeof(uint32_t)*num_fields); 31 | 32 | fseek(input, 0, SEEK_SET); 33 | 34 | clock_t t0 = clock(); 35 | uint64_t recs_read = 0; 36 | int result = read_records(adapter, 10000000, 1, (char *)data, &recs_read); 37 | clock_t t1 = clock(); 38 | 39 | assert(result == ADAPTER_SUCCESS); 40 | 41 | printf("PASSED: read %llu records in %.2lf seconds\n", recs_read, (double)(t1-t0) / (double)CLOCKS_PER_SEC); 42 | 43 | free(data); 44 | close_text_adapter(adapter); 45 | } 46 | -------------------------------------------------------------------------------- /dbadapter/vertica.py: -------------------------------------------------------------------------------- 1 | from threading import Thread 2 | from Queue import Queue, Empty 3 | import time 4 | import uuid 5 | import socket 6 | import atexit 7 | import signal 8 | from dbadapter import pyodbc 9 | import sys 10 | 11 | import logging 12 | logging.basicConfig(level=logging.DEBUG) 13 | log = logging.getLogger(__name__) 14 | def protect_vertica(connstring, label): 15 | """uses atexist to register a helper which kills all queries 16 | where client_label matches label. To use this, you must set LABEL 17 | to some unique value inside your connection string 18 | """ 19 | def signal_helper(*args, **kwargs): 20 | sys.exit(1) 21 | 22 | def helper(*args, **kwargs): 23 | print('cancel all') 24 | _cancel_all(connstring, label) 25 | 26 | signal.signal(signal.SIGTERM, signal_helper) 27 | atexit.register(helper) 28 | 29 | def _cancel_all(connstring, label): 30 | """cancel_all sessions where client_label matches label. 31 | to use this, you must set LABEL to some unique value 32 | inside your connection string 33 | """ 34 | q = """select session_id, statement_id from v_monitor.sessions 35 | where client_label='%s'""" % label 36 | conn = pyodbc.connect(connstring, ansi=True) 37 | data = conn.cursor().execute(q).fetchall() 38 | _interrupt_statements(conn, data) 39 | 40 | def _cancel_conn(conn, queryid): 41 | q = """ 42 | select session_id, statement_id from v_monitor.sessions where 43 | current_statement like '%%%s%%' 44 | and current_statement not like '%%v_monitor.sessions%%'; 45 | """ 46 | q = q % queryid 47 | data = conn.cursor().execute(q).fetchall() 48 | if len(data) == 1: 49 | _interrupt_statements(conn, data) 50 | 51 | def _cancel(connstring, timeout, queryid): 52 | """after some timeout, close the statement associated with 53 | queryid. queryid should be some uuid you add via sql comments 54 | """ 55 | time.sleep(timeout) 56 | conn = pyodbc.connect(connstring, ansi=True) 57 | q = """ 58 | select session_id, statement_id from v_monitor.sessions where 59 | current_statement like '%%%s%%' 60 | and current_statement not like '%%v_monitor.sessions%%'; 61 | """ 62 | q = q % queryid 63 | data = conn.cursor().execute(q).fetchall() 64 | if len(data) == 1: 65 | _interrupt_statements(conn, data) 66 | 67 | def _interrupt_statements(conn, statement_information): 68 | for session_id, statement_id in statement_information: 69 | if statement_id: 70 | log.info("interrupting session:%s, statement:%s", session_id, statement_id) 71 | q = "select interrupt_statement('%s', '%s')" %\ 72 | (session_id, statement_id) 73 | cur = conn.cursor() 74 | cur.execute(q) 75 | print('results', cur.fetchall()) 76 | 77 | def fetchall_timeout(cursor, connstring, query, timeout=10): 78 | queryid = str(uuid.uuid4()) 79 | query += " /* %s */ " % queryid 80 | q = Queue() 81 | def helper(): 82 | try: 83 | result = cursor.execute(query).fetchall() 84 | q.put(result) 85 | except Exception as e: 86 | q.put(e) 87 | try: 88 | t = Thread(target=helper) 89 | t.start() 90 | val = q.get(True, timeout) 91 | if isinstance(val, Exception): 92 | raise val 93 | return val 94 | except (KeyboardInterrupt, Empty) as e: 95 | print('cancelling') 96 | _cancel(connstring, 0, queryid) 97 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # Internal variables. 11 | PAPEROPT_a4 = -D latex_paper_size=a4 12 | PAPEROPT_letter = -D latex_paper_size=letter 13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 14 | # the i18n builder cannot share the environment and doctrees with the others 15 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 16 | 17 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 18 | 19 | help: 20 | @echo "Please use \`make ' where is one of" 21 | @echo " html to make standalone HTML files" 22 | @echo " dirhtml to make HTML files named index.html in directories" 23 | @echo " singlehtml to make a single large HTML file" 24 | @echo " pickle to make pickle files" 25 | @echo " json to make JSON files" 26 | @echo " htmlhelp to make HTML files and a HTML help project" 27 | @echo " qthelp to make HTML files and a qthelp project" 28 | @echo " devhelp to make HTML files and a Devhelp project" 29 | @echo " epub to make an epub" 30 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 31 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 32 | @echo " text to make text files" 33 | @echo " man to make manual pages" 34 | @echo " texinfo to make Texinfo files" 35 | @echo " info to make Texinfo files and run them through makeinfo" 36 | @echo " gettext to make PO message catalogs" 37 | @echo " changes to make an overview of all changed/added/deprecated items" 38 | @echo " linkcheck to check all external links for integrity" 39 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 40 | 41 | clean: 42 | -rm -rf $(BUILDDIR)/* 43 | 44 | html: 45 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 46 | @echo 47 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 48 | 49 | dirhtml: 50 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 51 | @echo 52 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 53 | 54 | singlehtml: 55 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 56 | @echo 57 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 58 | 59 | pickle: 60 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 61 | @echo 62 | @echo "Build finished; now you can process the pickle files." 63 | 64 | json: 65 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 66 | @echo 67 | @echo "Build finished; now you can process the JSON files." 68 | 69 | htmlhelp: 70 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 71 | @echo 72 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 73 | ".hhp project file in $(BUILDDIR)/htmlhelp." 74 | 75 | qthelp: 76 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 77 | @echo 78 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 79 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 80 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/IOPro.qhcp" 81 | @echo "To view the help file:" 82 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/IOPro.qhc" 83 | 84 | devhelp: 85 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 86 | @echo 87 | @echo "Build finished." 88 | @echo "To view the help file:" 89 | @echo "# mkdir -p $$HOME/.local/share/devhelp/IOPro" 90 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/IOPro" 91 | @echo "# devhelp" 92 | 93 | epub: 94 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 95 | @echo 96 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 97 | 98 | latex: 99 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 100 | @echo 101 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 102 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 103 | "(use \`make latexpdf' here to do that automatically)." 104 | 105 | latexpdf: 106 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 107 | @echo "Running LaTeX files through pdflatex..." 108 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 109 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 110 | 111 | text: 112 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 113 | @echo 114 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 115 | 116 | man: 117 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 118 | @echo 119 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 120 | 121 | texinfo: 122 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 123 | @echo 124 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 125 | @echo "Run \`make' in that directory to run these through makeinfo" \ 126 | "(use \`make info' here to do that automatically)." 127 | 128 | info: 129 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 130 | @echo "Running Texinfo files through makeinfo..." 131 | make -C $(BUILDDIR)/texinfo info 132 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 133 | 134 | gettext: 135 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 136 | @echo 137 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 138 | 139 | changes: 140 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 141 | @echo 142 | @echo "The overview file is in $(BUILDDIR)/changes." 143 | 144 | linkcheck: 145 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 146 | @echo 147 | @echo "Link check complete; look for any errors in the above output " \ 148 | "or in $(BUILDDIR)/linkcheck/output.txt." 149 | 150 | doctest: 151 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 152 | @echo "Testing of doctests in the sources finished, look at the " \ 153 | "results in $(BUILDDIR)/doctest/output.txt." 154 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | ----- 2 | IOPro 3 | ----- 4 | 5 | IOPro loads NumPy arrays (and Pandas DataFrames) directly from files, SQL 6 | databases, and NoSQL stores--including ones with millions of rows--without 7 | creating millions of temporary, intermediate Python objects, or requiring 8 | expensive array resizing operations. 9 | 10 | IOPro provides a drop-in replacement for the 11 | NumPy functions :code:`loadtxt()` and :code:`genfromtxt()`, but dramatically 12 | improves performance and reduces memory overhead. 13 | 14 | The current version of IOPro 1.9 was released on July 30, 2016. 15 | 16 | How to get IOPro 17 | ---------------- 18 | 19 | IOPro is included with `Anaconda Workgroup and Anaconda Enterprise 20 | subscriptions `_. 21 | 22 | To start a 30-day free trial just download and install the IOPro package. 23 | 24 | If you already have `Anaconda `_ (free 25 | Python platform) or `Miniconda `_ 26 | installed:: 27 | 28 | conda update conda 29 | conda install iopro 30 | 31 | If you do not have Anaconda installed, you can `download it 32 | `_. 33 | 34 | For more information about IOPro please contact `sales@continuum.io 35 | `_. 36 | 37 | Requirements 38 | ------------ 39 | 40 | * Python 2.7 or 3.4+ 41 | * NumPy 1.10+ 42 | 43 | Optional Python modules: 44 | 45 | * Boto (for S3 support) 46 | * Pandas (to use DataFrames) 47 | 48 | What's new in version 1.9? 49 | -------------------------- 50 | 51 | The documentation has been substantially updated for version 1.9.0. 52 | Numba has been removed and the code has been cleaned up, but no other 53 | features were added or removed. Some refactoring was done that didn't 54 | change functionality. We recommend that users not use older versions. 55 | See :doc:`Release notes ` for additional detail. 56 | 57 | 58 | Getting started 59 | --------------- 60 | 61 | Some of the basic usage patterns look like these. Create TextAdapter object 62 | for data source:: 63 | 64 | >>> import iopro 65 | >>> adapter = iopro.text_adapter('data.csv', parser='csv') 66 | 67 | Define field dtypes (example: set field 0 to unsigned int and field 4 to 68 | float):: 69 | 70 | >>> adapter.set_field_types({0: 'u4', 4:'f4'}) 71 | 72 | Parse text and store records in NumPy array using slicing notation:: 73 | 74 | >>> # read all records 75 | >>> array = adapter[:] 76 | 77 | >>> # read first ten records 78 | >>> array = adapter[0:10] 79 | 80 | >>> # read last record 81 | >>> array = adapter[-1] 82 | 83 | >>> # read every other record 84 | >>> array = adapter[::2] 85 | 86 | User guide 87 | ---------- 88 | 89 | .. toctree:: 90 | :maxdepth: 1 91 | 92 | install 93 | textadapter_examples 94 | pyodbc_firststeps 95 | pyodbc_enhancedcapabilities 96 | pyodbc_cancel 97 | eula 98 | release-notes 99 | 100 | Reference guide 101 | --------------- 102 | 103 | .. toctree:: 104 | :maxdepth: 1 105 | 106 | pyodbc 107 | 108 | 109 | Previous Versions 110 | ----------------- 111 | 112 | This documentation is provided for the use of our customers who have not yet upgraded 113 | to the current version. 114 | 115 | NOTE: We recommend that users not use older versions of IOPro. 116 | 117 | .. toctree:: 118 | :maxdepth: 1 119 | 120 | IOPro 1.8.0 <1.8.0/index> 121 | -------------------------------------------------------------------------------- /docs/install.rst: -------------------------------------------------------------------------------- 1 | Installation 2 | ============ 3 | 4 | If you do not already have Anaconda installed, please download it via the 5 | `downloads page `_ and install it. 6 | 7 | IOPro is included with `Anaconda Workgroup and Anaconda Enterprise 8 | subscriptions `_. 9 | 10 | To start a 30-day free trial just download and install the IOPro package. 11 | 12 | If you already have `Anaconda `_ (free 13 | Python platform) or `Miniconda ` 14 | installed:: 15 | 16 | conda update conda 17 | conda install iopro 18 | 19 | If you do not have Anaconda installed, you can `download it 20 | `_. 21 | 22 | For more information about IOPro please contact `sales@continuum.io 23 | `_. 24 | 25 | IOPro Update Instructions 26 | ------------------------- 27 | 28 | If you have Anaconda (free Python platform) installed, first update 29 | the conda package management tool to the latest version, then use conda 30 | to update the IOPro product installation:: 31 | 32 | conda update conda 33 | conda update iopro 34 | 35 | Uninstall 36 | --------- 37 | 38 | To uninstall using conda:: 39 | 40 | conda remove iopro 41 | 42 | 43 | Installing license 44 | ------------------ 45 | 46 | The IOPro license can be installed with the graphical Anaconda Navigator license 47 | manager or manually with your operating system. In your organization this may be 48 | handled by your site administrator or IT department. Both installation methods 49 | are explained in the :doc:`License installation ` 50 | page. 51 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | set I18NSPHINXOPTS=%SPHINXOPTS% . 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. linkcheck to check all external links for integrity 37 | echo. doctest to run all doctests embedded in the documentation if enabled 38 | goto end 39 | ) 40 | 41 | if "%1" == "clean" ( 42 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 43 | del /q /s %BUILDDIR%\* 44 | goto end 45 | ) 46 | 47 | if "%1" == "html" ( 48 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 49 | if errorlevel 1 exit /b 1 50 | echo. 51 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 52 | goto end 53 | ) 54 | 55 | if "%1" == "dirhtml" ( 56 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 57 | if errorlevel 1 exit /b 1 58 | echo. 59 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 60 | goto end 61 | ) 62 | 63 | if "%1" == "singlehtml" ( 64 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 65 | if errorlevel 1 exit /b 1 66 | echo. 67 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 68 | goto end 69 | ) 70 | 71 | if "%1" == "pickle" ( 72 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 73 | if errorlevel 1 exit /b 1 74 | echo. 75 | echo.Build finished; now you can process the pickle files. 76 | goto end 77 | ) 78 | 79 | if "%1" == "json" ( 80 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 81 | if errorlevel 1 exit /b 1 82 | echo. 83 | echo.Build finished; now you can process the JSON files. 84 | goto end 85 | ) 86 | 87 | if "%1" == "htmlhelp" ( 88 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 89 | if errorlevel 1 exit /b 1 90 | echo. 91 | echo.Build finished; now you can run HTML Help Workshop with the ^ 92 | .hhp project file in %BUILDDIR%/htmlhelp. 93 | goto end 94 | ) 95 | 96 | if "%1" == "qthelp" ( 97 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 98 | if errorlevel 1 exit /b 1 99 | echo. 100 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 101 | .qhcp project file in %BUILDDIR%/qthelp, like this: 102 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\IOPro.qhcp 103 | echo.To view the help file: 104 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\IOPro.ghc 105 | goto end 106 | ) 107 | 108 | if "%1" == "devhelp" ( 109 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 110 | if errorlevel 1 exit /b 1 111 | echo. 112 | echo.Build finished. 113 | goto end 114 | ) 115 | 116 | if "%1" == "epub" ( 117 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 118 | if errorlevel 1 exit /b 1 119 | echo. 120 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 121 | goto end 122 | ) 123 | 124 | if "%1" == "latex" ( 125 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 129 | goto end 130 | ) 131 | 132 | if "%1" == "text" ( 133 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 134 | if errorlevel 1 exit /b 1 135 | echo. 136 | echo.Build finished. The text files are in %BUILDDIR%/text. 137 | goto end 138 | ) 139 | 140 | if "%1" == "man" ( 141 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 142 | if errorlevel 1 exit /b 1 143 | echo. 144 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 145 | goto end 146 | ) 147 | 148 | if "%1" == "texinfo" ( 149 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 150 | if errorlevel 1 exit /b 1 151 | echo. 152 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 153 | goto end 154 | ) 155 | 156 | if "%1" == "gettext" ( 157 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 158 | if errorlevel 1 exit /b 1 159 | echo. 160 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 161 | goto end 162 | ) 163 | 164 | if "%1" == "changes" ( 165 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 166 | if errorlevel 1 exit /b 1 167 | echo. 168 | echo.The overview file is in %BUILDDIR%/changes. 169 | goto end 170 | ) 171 | 172 | if "%1" == "linkcheck" ( 173 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 174 | if errorlevel 1 exit /b 1 175 | echo. 176 | echo.Link check complete; look for any errors in the above output ^ 177 | or in %BUILDDIR%/linkcheck/output.txt. 178 | goto end 179 | ) 180 | 181 | if "%1" == "doctest" ( 182 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 183 | if errorlevel 1 exit /b 1 184 | echo. 185 | echo.Testing of doctests in the sources finished, look at the ^ 186 | results in %BUILDDIR%/doctest/output.txt. 187 | goto end 188 | ) 189 | 190 | :end 191 | -------------------------------------------------------------------------------- /docs/pyodbc.rst: -------------------------------------------------------------------------------- 1 | ------------ 2 | iopro.pyodbc 3 | ------------ 4 | 5 | This project is an enhancement of the Python database module for ODBC 6 | that implements the Python DB API 2.0 specification. You can see the 7 | original project here: 8 | 9 | :homepage: http://code.google.com/p/pyodbc 10 | :source: http://github.com/mkleehammer/pyodbc 11 | :source: http://code.google.com/p/pyodbc/source/list 12 | 13 | The enhancements are documented in this file. For general info about 14 | the pyodbc package, please refer to the original project 15 | documentation. 16 | 17 | This module enhancement requires: 18 | 19 | * Python 2.4 or greater 20 | * ODBC 3.0 or greater 21 | * NumPy 1.5 or greater (1.7 is required for datetime64 support) 22 | 23 | The enhancements in this module consist mainly in the addition of some 24 | new methods for fetching the data after a query and put it in a 25 | variety of NumPy containers. 26 | 27 | Using NumPy as data containers instead of the classical list of tuples 28 | has a couple of advantages: 29 | 30 | 1) The NumPy container is much more compact, and hence, it 31 | requires much less memory, than the original approach. 32 | 33 | 2) As a NumPy container can hold arbitrarily large arrays, it requires 34 | much less object creation than the original approach (one Python 35 | object per datum retrieved). 36 | 37 | This means that this enhancements will allow to fetch data out of 38 | relational databases in a much faster way, while consuming 39 | significantly less resources. 40 | 41 | 42 | API additions 43 | ============= 44 | 45 | Variables 46 | ~~~~~~~~~ 47 | 48 | * `pyodbc.npversion` The version for the NumPy additions 49 | 50 | Methods 51 | ~~~~~~~ 52 | 53 | **Cursor.fetchdictarray** (size=cursor.arraysize) 54 | 55 | This is similar to the original `Cursor.fetchmany(size)`, but the data 56 | is returned in a dictionary where the keys are the names of the 57 | columns and the values are NumPy containers. 58 | 59 | For example, it a SELECT is returning 3 columns with names 'a', 'b' 60 | and 'c' and types `varchar(10)`, `integer` and `timestamp`, the 61 | returned object will be something similar to:: 62 | 63 | {'a': array([...], dtype='S11'), 64 | 'b': array([...], dtype=int32), 65 | 'c': array([...], dtype=datetime64[us])} 66 | 67 | Note that the `varchar(10)` type is translated automatically to a 68 | string type of 11 elements ('S11'). This is because the ODBC driver 69 | needs one additional space to put the trailing '\0' in strings, and 70 | NumPy needs to provide the room for this. 71 | 72 | Also, it is important to stress that all the `timestamp` types are 73 | translated into a NumPy `datetime64` type with a resolution of 74 | microseconds by default. 75 | 76 | **Cursor.fetchsarray** (size=cursor.arraysize) 77 | 78 | This is similar to the original `Cursor.fetchmany(size)`, but the data 79 | is returned in a NumPy structured array, where the name and type of 80 | the fields matches to those resulting from the SELECT. 81 | 82 | Here it is an example of the output for the SELECT above:: 83 | 84 | array([(...), 85 | (...)], 86 | dtype=[('a', '|S11'), ('b', '>> import iopro.pyodbc as pyodbc 18 | >>> # Open the database (use the most appropriate for you) 19 | >>> connect_string = 'DSN=odbcsqlite;DATABASE=market.sqlite' # SQLite 20 | >>> #connect_string = 'Driver={SQL Server};SERVER=MyWinBox;DATABASE=Test;USER=Devel;PWD=XXX' # SQL Server 21 | >>> #connect_string = 'DSN=myodbc3;UID=devel;PWD=XXX;DATABASE=test' # MySQL 22 | >>> #connect_string = 'DSN=PSQL;UID=devel;PWD=XXX;DATABASE=test' # PostgreSQL 23 | >>> connection = pyodbc.connect(connect_string) 24 | >>> cursor = connection.cursor() 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | Create the test table (optional if already done) 33 | ------------------------------------------------ 34 | 35 | 36 | :: 37 | 38 | >>> try: 39 | ... cursor.execute('drop table market') 40 | ... except: 41 | ... pass 42 | >>> cursor.execute('create table market (symbol_ varchar(5), open_ float, low_ float, high_ float, close_ float, volume_ int)') 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | Fill the test table (optional if already done) 51 | ---------------------------------------------- 52 | 53 | 54 | :: 55 | 56 | >>> from time import time 57 | >>> t0 = time() 58 | >>> N = 1000*1000 59 | >>> for i in xrange(N): 60 | ... cursor.execute( 61 | ... "insert into market(symbol_, open_, low_, high_, close_, volume_)" 62 | ... " values (?, ?, ?, ?, ?, ?)", 63 | ... (str(i), float(i), float(2*i), None, float(4*i), i)) 64 | >>> cursor.execute("commit") # not supported by SQLite 65 | >>> t1 = time() - t0 66 | >>> print "Stored %d rows in %.3fs" % (N, t1) 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | Do the query in the traditional way 75 | ----------------------------------- 76 | 77 | 78 | :: 79 | 80 | >>> # Query of the full table using the traditional fetchall 81 | >>> query = "select * from market" 82 | >>> cursor.execute(query) 83 | >>> %time all = cursor.fetchall() 84 | CPU times: user 5.23 s, sys: 0.56 s, total: 5.79 s 85 | Wall time: 7.09 s 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | Do the query and get a dictionary of NumPy arrays 95 | ------------------------------------------------- 96 | 97 | 98 | :: 99 | 100 | >>> # Query of the full table using the fetchdictarray (retrieve a dictionary of arrays) 101 | >>> cursor.execute(query) 102 | >>> %time dictarray = cursor.fetchdictarray() 103 | CPU times: user 0.92 s, sys: 0.10 s, total: 1.02 s 104 | Wall time: 1.44 s 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | Peek into the retrieved data 114 | ---------------------------- 115 | 116 | 117 | :: 118 | 119 | >>> dictarray.keys() 120 | ['high_', 'close_', 'open_', 'low_', 'volume_', 'symbol_'] 121 | >>> dictarray['high_'] 122 | array([ nan, nan, nan, ..., nan, nan, nan]) 123 | >>> dictarray['symbol_'] 124 | array(['0', '1', '2', ..., '99999', '99999', '99999'], dtype='|S6') 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | Do the query and get a NumPy structured array 133 | --------------------------------------------- 134 | 135 | 136 | :: 137 | 138 | >>> # Query of the full table using the fetchsarray (retrieve a structured array) 139 | >>> cursor.execute(query) 140 | >>> %time sarray = cursor.fetchsarray() 141 | CPU times: user 1.08 s, sys: 0.11 s, total: 1.20 s 142 | Wall time: 1.99 s 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | Peek into retrieved data 152 | ------------------------ 153 | 154 | 155 | :: 156 | 157 | >>> sarray.dtype 158 | dtype([('symbol_', 'S6'), ('open_', '<f8'), ('low_', '<f8'), ('high_', '<f8'), ('close_', '<f8'), ('volume_', '<i4')]) 159 | >>> sarray[0:10] 160 | array([('0', 0.0, 0.0, nan, 0.0, 0), ('1', 1.0, 2.0, nan, 4.0, 1), 161 | ('2', 2.0, 4.0, nan, 8.0, 2), ('3', 3.0, 6.0, nan, 12.0, 3), 162 | ('4', 4.0, 8.0, nan, 16.0, 4), ('5', 5.0, 10.0, nan, 20.0, 5), 163 | ('6', 6.0, 12.0, nan, 24.0, 6), ('7', 7.0, 14.0, nan, 28.0, 7), 164 | ('8', 8.0, 16.0, nan, 32.0, 8), ('9', 9.0, 18.0, nan, 36.0, 9)], 165 | dtype=[('symbol_', 'S6'), ('open_', '<f8'), ('low_', '<f8'), ('high_', '<f8'), ('close_', '<f8'), ('volume_', '<i4')]) 166 | >>> sarray['symbol_'] 167 | array(['0', '1', '2', ..., '99999', '99999', '99999'], dtype='|S6') 168 | 169 | 170 | 171 | 172 | 173 | 174 | 175 | 176 | -------------------------------------------------------------------------------- /docs/release-notes.rst: -------------------------------------------------------------------------------- 1 | ../CHANGELOG -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: dbadapter 2 | dependencies: 3 | - ipython 4 | - numpy 5 | - pandas 6 | - pytest 7 | - cython 8 | - unixodbc 9 | - nose 10 | 11 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import glob 4 | from distutils.core import setup, Command 5 | from distutils.extension import Extension 6 | from Cython.Distutils import build_ext 7 | import numpy 8 | import dbadapter.pyodbc_setup 9 | import versioneer 10 | 11 | 12 | class CleanInplace(Command): 13 | user_options = [] 14 | 15 | def initialize_options(self): 16 | self.cwd = None 17 | 18 | def finalize_options(self): 19 | self.cwd = os.getcwd() 20 | 21 | def run(self): 22 | files = glob.glob('./dbadapter/pyodbc.*.so') 23 | for file in files: 24 | try: 25 | os.remove(file) 26 | except OSError: 27 | pass 28 | 29 | 30 | def setup_odbc(include_dirs, lib_dirs): 31 | src_path = os.path.join(os.path.dirname(__file__), 'dbadapter/pyodbc/src') 32 | src = [os.path.abspath(os.path.join(src_path, f)) 33 | for f in os.listdir(src_path) 34 | if f.endswith('.cpp') ] 35 | 36 | if sys.platform == 'win32': 37 | libraries = ['odbc32', 'advapi32'] 38 | elif sys.platform == 'darwin': 39 | if os.environ.get('UNIXODBC_PATH', ''): 40 | include_dirs.append(os.path.join(os.environ.get('UNIXODBC_PATH'))) 41 | include_dirs.append(os.path.join(os.environ.get('UNIXODBC_PATH'), 'include')) 42 | lib_dirs.append(os.path.join(os.environ.get('UNIXODBC_PATH'), 'DriverManager', '.libs')) 43 | libraries = ['odbc'] 44 | else: 45 | libraries = ['odbc'] 46 | else: 47 | libraries = ['odbc'] 48 | 49 | return Extension('dbadapter.pyodbc', 50 | src, 51 | include_dirs=include_dirs, 52 | libraries=libraries, 53 | library_dirs=lib_dirs) 54 | 55 | 56 | def run_setup(): 57 | 58 | include_dirs = [os.path.join('dbadapter', 'lib'), 59 | numpy.get_include()] 60 | if sys.platform == 'win32': 61 | include_dirs.append(os.path.join(sys.prefix, 'Library', 'include')) 62 | else: 63 | include_dirs.append(os.path.join(sys.prefix, 'include')) 64 | 65 | lib_dirs = [] 66 | if sys.platform == 'win32': 67 | lib_dirs.append(os.path.join(sys.prefix, 'Library', 'lib')) 68 | else: 69 | lib_dirs.append(os.path.join(sys.prefix, 'lib')) 70 | 71 | ext_modules = [] 72 | packages = ['dbadapter', 'dbadapter.lib', 'dbadapter.tests'] 73 | ext_modules.append(setup_odbc(include_dirs, lib_dirs)) 74 | 75 | versioneer.versionfile_source = 'dbadapter/_version.py' 76 | versioneer.versionfile_build = 'dbadapter/_version.py' 77 | versioneer.tag_prefix = '' 78 | versioneer.parentdir_prefix = 'dbadapter-' 79 | 80 | cmdclass = versioneer.get_cmdclass() 81 | cmdclass['build_ext'] = build_ext 82 | cmdclass['cleanall'] = CleanInplace 83 | 84 | setup(name='dbadapter', 85 | version = versioneer.get_version(), 86 | description='optimized IO for NumPy/Blaze', 87 | author='Continuum Analytics', 88 | author_email='noreply@continuum.io', 89 | ext_modules=ext_modules, 90 | packages=packages, 91 | cmdclass=cmdclass) 92 | 93 | 94 | if __name__ == '__main__': 95 | run_setup() 96 | -------------------------------------------------------------------------------- /setupegg.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """Wrapper to run setup.py using setuptools.""" 3 | 4 | import os, sys 5 | 6 | # now, import setuptools and call the actual setup 7 | import setuptools 8 | try: 9 | execfile('setup.py') 10 | except NameError: 11 | exec( open('setup.py','rb').read() ) 12 | --------------------------------------------------------------------------------