├── .env.sample
├── .gitignore
├── .tool-versions
├── CHANGELOG.rst
├── LICENSE.txt
├── Makefile
├── README.rst
├── databricks_dbapi
├── __init__.py
├── _version.py
├── hive.py
├── odbc.py
└── sqlalchemy_dialects
│ ├── __init__.py
│ ├── base.py
│ ├── hive.py
│ └── odbc.py
├── poetry.lock
├── pyproject.toml
└── tests
├── conftest.py
├── test_hive.py
├── test_odbc.py
└── test_sqlalchemy_dialects
├── test_hive_dialect.py
└── test_odbc_dialect.py
/.env.sample:
--------------------------------------------------------------------------------
1 | DATABRICKS_TOKEN_WORKSPACE=
2 | DATABRICKS_TOKEN_SQL_ANALYTICS=
3 | DATABRICKS_USER=
4 | DATABRICKS_PASSWORD=
5 | DATABRICKS_HOST=
6 | DATABRICKS_HTTP_PATH_WORKSPACE=
7 | DATABRICKS_HTTP_PATH_SQL_ANALYTICS=
8 | DATABRICKS_ODBC_DRIVER_PATH=
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | *.py,cover
50 | .hypothesis/
51 | .pytest_cache/
52 | cover/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | .pybuilder/
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # IPython
82 | profile_default/
83 | ipython_config.py
84 |
85 | # pyenv
86 | # For a library or package, you might want to ignore these files since the code is
87 | # intended to run in multiple environments; otherwise, check them in:
88 | # .python-version
89 |
90 | # pipenv
91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
94 | # install all needed dependencies.
95 | #Pipfile.lock
96 |
97 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
98 | __pypackages__/
99 |
100 | # Celery stuff
101 | celerybeat-schedule
102 | celerybeat.pid
103 |
104 | # SageMath parsed files
105 | *.sage.py
106 |
107 | # Environments
108 | .env
109 | .venv
110 | env/
111 | venv/
112 | ENV/
113 | env.bak/
114 | venv.bak/
115 |
116 | # Spyder project settings
117 | .spyderproject
118 | .spyproject
119 |
120 | # Rope project settings
121 | .ropeproject
122 |
123 | # mkdocs documentation
124 | /site
125 |
126 | # mypy
127 | .mypy_cache/
128 | .dmypy.json
129 | dmypy.json
130 |
131 | # Pyre type checker
132 | .pyre/
133 |
134 | # pytype static type analyzer
135 | .pytype/
136 |
137 | # Cython debug symbols
138 | cython_debug/
139 |
140 |
141 | .idea/
142 | .DS_Store
--------------------------------------------------------------------------------
/.tool-versions:
--------------------------------------------------------------------------------
1 | python 3.7.4
2 | poetry 1.1.11
3 |
--------------------------------------------------------------------------------
/CHANGELOG.rst:
--------------------------------------------------------------------------------
1 | Release History
2 | ---------------
3 |
4 | 0.6.0: 2021-11-21
5 | ~~~~~~~~~~~~~~~~~
6 |
7 | * ``pyhive`` and ``pyodbc`` are now optional dependencies. At least one of {``hive``, ``odbc``} must now be specified as an extra.
8 | * Change driver and dialect identifiers from bytes to string.
9 |
10 | 0.5.0: 2021-02-25
11 | ~~~~~~~~~~~~~~~~~
12 |
13 | * Add pyodbc as a dependency
14 | * Add ODBC DBAPI connection
15 | * Add kwargs to connect signature, which are passed down to pyhive/pyodbc connect functions
16 | * Add pyodbc-driven sqlalchemy dialects
17 | * Remove any user/password authentication from documentation
18 | * BREAKING: Rename the ``databricks`` module to ``hive``, to explicitly differentiate between the two drivers
19 | * BREAKING: Remove ``cluster`` and ``org`` arguments from DBAPI connect signature
20 |
21 | 0.4.0: 2021-01-09
22 | ~~~~~~~~~~~~~~~~~
23 |
24 | * Override get_columns in DatabricksDialect to account for differences in Databricks and OSS Hive partition header
25 | * Fix version file
26 | * Update black and add isort
27 | * Add org id argument for connection
28 |
29 |
30 | 0.3.0: 2019-08-14
31 | ~~~~~~~~~~~~~~~~~
32 |
33 | * Add module globals to make DBAPI PEP 249 compliant
34 | * Allow port and database name to be passed in connect function
35 | * Add compatibility with SQLAlchemy
36 |
37 | 0.2.0: 2018-12-12
38 | ~~~~~~~~~~~~~~~~~
39 |
40 | * Add docstring
41 | * Add http_path parameter for Azure compatibility (thanks @gwerbin)
42 | * Make metadata available at package level
43 |
44 | 0.1.1: 2018-10-10
45 | ~~~~~~~~~~~~~~~~~
46 |
47 | * Fix email
48 |
49 | 0.1.0: 2018-09-10
50 | ~~~~~~~~~~~~~~~~~
51 |
52 | * First release
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2018-2021 Christopher Flynn
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | setup:
2 | asdf install
3 | poetry install --extras sqlalchemy
4 |
5 | fmt:
6 | poetry run black .
7 | poetry run isort .
8 |
9 | test:
10 | poetry run pytest .
11 |
12 | clean:
13 | rm -rf dist
14 |
15 | build:
16 | poetry build
17 |
18 | publish: clean build
19 | poetry publish
20 |
21 | release: clean build
22 | ghr -u crflynn -r databricks-dbapi -c $(shell git rev-parse HEAD) -delete -b "release" -n $(shell poetry version -s) $(shell poetry version -s) dist
23 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | **DEPRECATION WARNING**: This package is no longer maintained. Databricks now officially maintains a DBAPI package called `databricks-sql-connector `__ that is compatible with workspace and sql analytics clusters. There is also the newer `sqlalchemy-databricks `__ package which uses the ``databricks-sql-connector`` as a driver.
2 |
3 |
4 | databricks-dbapi
5 | ================
6 |
7 | |pypi| |pyversions|
8 |
9 | .. |pypi| image:: https://img.shields.io/pypi/v/databricks-dbapi.svg
10 | :target: https://pypi.python.org/pypi/databricks-dbapi
11 |
12 | .. |pyversions| image:: https://img.shields.io/pypi/pyversions/databricks-dbapi.svg
13 | :target: https://pypi.python.org/pypi/databricks-dbapi
14 |
15 | A thin wrapper around `pyhive `__ and `pyodbc `__ for creating a `DBAPI `__ connection to Databricks Workspace and SQL Analytics clusters. SQL Analytics clusters require the `Simba ODBC driver `__.
16 |
17 | Also provides SQLAlchemy Dialects using ``pyhive`` and ``pyodbc`` for Databricks clusters. Databricks SQL Analytics clusters only support the ``pyodbc``-driven dialect.
18 |
19 | Installation
20 | ------------
21 |
22 | Install using pip. You *must* specify at least one of the extras {``hive`` or ``odbc``}. For ``odbc`` the `Simba driver `__ is required:
23 |
24 | .. code-block:: bash
25 |
26 | pip install databricks-dbapi[hive,odbc]
27 |
28 |
29 | For SQLAlchemy support install with:
30 |
31 | .. code-block:: bash
32 |
33 | pip install databricks-dbapi[hive,odbc,sqlalchemy]
34 |
35 | Usage
36 | -----
37 |
38 | PyHive
39 | ~~~~~~
40 |
41 | The ``connect()`` function returns a ``pyhive`` Hive connection object, which internally wraps a ``thrift`` connection.
42 |
43 | Connecting with ``http_path``, ``host``, and a ``token``:
44 |
45 | .. code-block:: python
46 |
47 | import os
48 |
49 | from databricks_dbapi import hive
50 |
51 |
52 | token = os.environ["DATABRICKS_TOKEN"]
53 | host = os.environ["DATABRICKS_HOST"]
54 | http_path = os.environ["DATABRICKS_HTTP_PATH"]
55 |
56 |
57 | connection = hive.connect(
58 | host=host,
59 | http_path=http_path,
60 | token=token,
61 | )
62 | cursor = connection.cursor()
63 |
64 | cursor.execute("SELECT * FROM some_table LIMIT 100")
65 |
66 | print(cursor.fetchone())
67 | print(cursor.fetchall())
68 |
69 |
70 | The ``pyhive`` connection also provides async functionality:
71 |
72 | .. code-block:: python
73 |
74 | import os
75 |
76 | from databricks_dbapi import hive
77 | from TCLIService.ttypes import TOperationState
78 |
79 |
80 | token = os.environ["DATABRICKS_TOKEN"]
81 | host = os.environ["DATABRICKS_HOST"]
82 | cluster = os.environ["DATABRICKS_CLUSTER"]
83 |
84 |
85 | connection = hive.connect(
86 | host=host,
87 | cluster=cluster,
88 | token=token,
89 | )
90 | cursor = connection.cursor()
91 |
92 | cursor.execute("SELECT * FROM some_table LIMIT 100", async_=True)
93 |
94 | status = cursor.poll().operationState
95 | while status in (TOperationState.INITIALIZED_STATE, TOperationState.RUNNING_STATE):
96 | logs = cursor.fetch_logs()
97 | for message in logs:
98 | print(message)
99 |
100 | # If needed, an asynchronous query can be cancelled at any time with:
101 | # cursor.cancel()
102 |
103 | status = cursor.poll().operationState
104 |
105 | print(cursor.fetchall())
106 |
107 |
108 | ODBC
109 | ~~~~
110 |
111 | The ODBC DBAPI requires the Simba ODBC driver.
112 |
113 | Connecting with ``http_path``, ``host``, and a ``token``:
114 |
115 | .. code-block:: python
116 |
117 | import os
118 |
119 | from databricks_dbapi import odbc
120 |
121 |
122 | token = os.environ["DATABRICKS_TOKEN"]
123 | host = os.environ["DATABRICKS_HOST"]
124 | http_path = os.environ["DATABRICKS_HTTP_PATH"]
125 |
126 |
127 | connection = odbc.connect(
128 | host=host,
129 | http_path=http_path,
130 | token=token,
131 | driver_path="/path/to/simba/driver",
132 | )
133 | cursor = connection.cursor()
134 |
135 | cursor.execute("SELECT * FROM some_table LIMIT 100")
136 |
137 | print(cursor.fetchone())
138 | print(cursor.fetchall())
139 |
140 |
141 | SQLAlchemy Dialects
142 | -------------------
143 |
144 | databricks+pyhive
145 | ~~~~~~~~~~~~~~~~~
146 |
147 | Installing registers the ``databricks+pyhive`` dialect/driver with SQLAlchemy. Fill in the required information when passing the engine URL.
148 |
149 | .. code-block:: python
150 |
151 | from sqlalchemy import *
152 | from sqlalchemy.engine import create_engine
153 | from sqlalchemy.schema import *
154 |
155 |
156 | engine = create_engine(
157 | "databricks+pyhive://token:@:/",
158 | connect_args={"http_path": ""}
159 | )
160 |
161 | logs = Table("my_table", MetaData(bind=engine), autoload=True)
162 | print(select([func.count("*")], from_obj=logs).scalar())
163 |
164 |
165 | databricks+pyodbc
166 | ~~~~~~~~~~~~~~~~~
167 |
168 | Installing registers the ``databricks+pyodbc`` dialect/driver with SQLAlchemy. Fill in the required information when passing the engine URL.
169 |
170 | .. code-block:: python
171 |
172 | from sqlalchemy import *
173 | from sqlalchemy.engine import create_engine
174 | from sqlalchemy.schema import *
175 |
176 |
177 | engine = create_engine(
178 | "databricks+pyodbc://token:@:/",
179 | connect_args={"http_path": "", "driver_path": "/path/to/simba/driver"}
180 | )
181 |
182 | logs = Table("my_table", MetaData(bind=engine), autoload=True)
183 | print(select([func.count("*")], from_obj=logs).scalar())
184 |
185 |
186 | Refer to the following documentation for more details on hostname, cluster name, and http path:
187 |
188 | * `Databricks `__
189 | * `Azure Databricks `__
190 |
191 |
192 | Related
193 | -------
194 |
195 | * `pyhive `__
196 | * `thrift `__
197 | * `pyodbc `__
198 |
--------------------------------------------------------------------------------
/databricks_dbapi/__init__.py:
--------------------------------------------------------------------------------
1 | from ._version import __author__
2 | from ._version import __author_email__
3 | from ._version import __copyright__
4 | from ._version import __description__
5 | from ._version import __license__
6 | from ._version import __title__
7 | from ._version import __url__
8 | from ._version import __version__
9 |
--------------------------------------------------------------------------------
/databricks_dbapi/_version.py:
--------------------------------------------------------------------------------
1 | """Version information."""
2 | __title__ = "databricks_dbapi"
3 | __description__ = "Databricks DBAPI."
4 | __url__ = "https://github.com/crflynn/databricks-dbapi"
5 | __version__ = "0.6.0"
6 | __author__ = "Christopher Flynn"
7 | __author_email__ = "crf204@gmail.com"
8 | __license__ = "MIT"
9 | __copyright__ = "Copyright 2018-2021 Christopher Flynn"
10 |
--------------------------------------------------------------------------------
/databricks_dbapi/hive.py:
--------------------------------------------------------------------------------
1 | """Provide a function to create a Hive connection to a Databricks cluster."""
2 | import base64
3 | import sys
4 |
5 | from pyhive import hive
6 | from pyhive.exc import * # Make all exceptions visible in this module per PEP 249
7 | from thrift.transport import THttpClient
8 |
9 | PY_MAJOR = sys.version_info[0]
10 |
11 | # PEP 249 module globals
12 | apilevel = hive.apilevel
13 | threadsafety = hive.threadsafety
14 | paramstyle = hive.paramstyle
15 |
16 |
17 | def connect(host, port=443, database="default", http_path=None, token=None, user=None, password=None, **kwargs):
18 | """Create a pyhive-driven DBAPI connection to Databricks workspace cluster.
19 |
20 | Create a DBAPI connection to a Databricks cluster, which can be used to generate
21 | DBAPI cursor(s). Provide an ``http_path`` from the cluster's JDBC/ODBC connection
22 | details.
23 |
24 | For authentication, provide either a ``token`` OR both a ``user`` and ``password``.
25 | Token authentication is strongly recommended over passwords.
26 |
27 | The simplest connection requires providing args ``host``, ``http_path``, and
28 | ``token``.
29 |
30 | :param str host: the server hostname from the cluster's JDBC/ODBC connection page.
31 | :param int port: the port number from the cluster's JDBC/ODBC connection page.
32 | :param str database: the database to use
33 | :param str http_path: the HTTP Path as shown in the cluster's JDBC/ODBC connection
34 | page.
35 | :param str token: a Databricks API token.
36 | :param str user: a Databricks user name.
37 | :param str password: the corresponding Databricks user's password.
38 | :param dict kwargs: keyword arguments passed to ``hive.connect``
39 | """
40 | if token is not None:
41 | auth = "token:%s" % token
42 | elif user is not None and password is not None:
43 | auth = "%s:%s" % (user, password)
44 | else:
45 | raise ValueError("Missing arguments. Must provide either token or user/password.")
46 |
47 | # https://kb.databricks.com/python/python-2-eol.html
48 | if PY_MAJOR < 3:
49 | auth = base64.standard_b64encode(auth)
50 | else:
51 | auth = base64.standard_b64encode(auth.encode()).decode()
52 |
53 | if http_path is not None:
54 | url = "https://%s:%s/%s" % (host, port, http_path)
55 | else:
56 | raise ValueError("Missing arguments. Must provide either cluster or http_path.")
57 |
58 | transport = THttpClient.THttpClient(url)
59 | transport.setCustomHeaders({"Authorization": "Basic %s" % auth})
60 |
61 | return hive.connect(database=database, thrift_transport=transport, **kwargs)
62 |
--------------------------------------------------------------------------------
/databricks_dbapi/odbc.py:
--------------------------------------------------------------------------------
1 | """Provide a function to create an ODBC connection to a Databricks cluster."""
2 | import pyodbc
3 | from pyodbc import * # Make globals and exceptions visible in this module per PEP 249
4 |
5 |
6 | def connect(
7 | host,
8 | port=443,
9 | database="default",
10 | http_path=None,
11 | token=None,
12 | user=None,
13 | password=None,
14 | driver_path=None,
15 | **kwargs,
16 | ):
17 | """Create an ODBC DBAPI connection to a Databricks workspace or SQL Analytics cluster.
18 |
19 | Create a DBAPI connection to a Databricks cluster, which can be used to generate
20 | DBAPI cursor(s). Provide an ``http_path`` from the cluster's
21 | JDBC/ODBC connection details.
22 |
23 | For authentication, provide either a ``token`` OR both a ``user`` and ``password``.
24 | Token authentication is strongly recommended over passwords.
25 |
26 | The simplest connection requires providing args ``host``, ``http_path``,
27 | ``token``, and the ODBC ``driver_path``. The default path on Mac OSX is likely:
28 | ``/Library/simba/spark/lib/libsparkodbc_sbu.dylib``
29 |
30 | :param str host: the server hostname from the cluster's JDBC/ODBC connection page.
31 | :param int port: the port number from the cluster's JDBC/ODBC connection page.
32 | :param str database: the database to use
33 | :param str http_path: the HTTP Path as shown in the cluster's JDBC/ODBC connection
34 | page. Required if using Azure platform.
35 | :param str token: a Databricks API token.
36 | :param str user: a Databricks user name.
37 | :param str password: the corresponding Databricks user's password.
38 | :param str driver_path: the absolute path to the ODBC driver.
39 | :param dict kwargs: keyword args passed to ``pyodbc.connect``
40 | """
41 | if driver_path is None:
42 | raise ValueError("Driver path must be provided.")
43 |
44 | if token is not None:
45 | user = "token"
46 | password = token
47 | elif user is not None and password is not None:
48 | pass
49 | else:
50 | raise ValueError("Missing arguments. Must provide either token or user/password.")
51 |
52 | connection_string = (
53 | f"Driver={driver_path};Database={database};Host={host};"
54 | f"Port={port};httpPath={http_path};UID={user};PWD={password};"
55 | "transportMode=http;ssl=1;AuthMech=8;SparkServerType=3;ThriftTransport=1"
56 | )
57 |
58 | # autocommit is required
59 | return pyodbc.connect(connection_string, autocommit=True, **kwargs)
60 |
--------------------------------------------------------------------------------
/databricks_dbapi/sqlalchemy_dialects/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/crflynn/databricks-dbapi/a0bc86e45e648f15c52cf7242a336ae9d6572a6d/databricks_dbapi/sqlalchemy_dialects/__init__.py
--------------------------------------------------------------------------------
/databricks_dbapi/sqlalchemy_dialects/base.py:
--------------------------------------------------------------------------------
1 | import re
2 | from abc import ABC
3 |
4 | from pyhive.sqlalchemy_hive import HiveDialect
5 | from pyhive.sqlalchemy_hive import _type_map
6 | from sqlalchemy import types
7 | from sqlalchemy import util
8 |
9 |
10 | class DatabricksDialectBase(HiveDialect, ABC):
11 | def get_table_names(self, connection, schema=None, **kw):
12 | query = "SHOW TABLES"
13 | if schema:
14 | query += " IN " + self.identifier_preparer.quote_identifier(schema)
15 | return [row[1] for row in connection.execute(query)]
16 |
17 | def get_columns(self, connection, table_name, schema=None, **kw):
18 | """Get columns according to Databricks' hive or oss hive."""
19 | rows = self._get_table_columns(connection, table_name, schema)
20 | # Strip whitespace
21 | rows = [[col.strip() if col else None for col in row] for row in rows]
22 | # Filter out empty rows and comment
23 | rows = [row for row in rows if row[0] and row[0] != "# col_name"]
24 | result = []
25 | for (col_name, col_type, _comment) in rows:
26 | # Handle both oss hive and Databricks' hive partition header, respectively
27 | if col_name in ("# Partition Information", "# Partitioning"):
28 | break
29 | # Take out the more detailed type information
30 | # e.g. 'map' -> 'map'
31 | # 'decimal(10,1)' -> decimal
32 | col_type = re.search(r"^\w+", col_type).group(0)
33 | try:
34 | coltype = _type_map[col_type]
35 | except KeyError:
36 | util.warn("Did not recognize type '%s' of column '%s'" % (col_type, col_name))
37 | coltype = types.NullType
38 |
39 | result.append(
40 | {
41 | "name": col_name,
42 | "type": coltype,
43 | "nullable": True,
44 | "default": None,
45 | }
46 | )
47 | return result
48 |
--------------------------------------------------------------------------------
/databricks_dbapi/sqlalchemy_dialects/hive.py:
--------------------------------------------------------------------------------
1 | from databricks_dbapi import hive
2 | from databricks_dbapi.sqlalchemy_dialects.base import DatabricksDialectBase
3 |
4 |
5 | class DatabricksPyhiveDialect(DatabricksDialectBase):
6 | name = "databricks"
7 | driver = "pyhive"
8 |
9 | @classmethod
10 | def dbapi(cls):
11 | return hive
12 |
13 | def create_connect_args(self, url):
14 | kwargs = {
15 | "host": url.host,
16 | "port": url.port or 443,
17 | "user": url.username,
18 | "password": url.password,
19 | "database": url.database or "default",
20 | }
21 |
22 | if url.query is not None and "http_path" in url.query:
23 | kwargs["http_path"] = url.query["http_path"]
24 |
25 | kwargs.update(url.query)
26 | return [], kwargs
27 |
--------------------------------------------------------------------------------
/databricks_dbapi/sqlalchemy_dialects/odbc.py:
--------------------------------------------------------------------------------
1 | from sqlalchemy.connectors.pyodbc import PyODBCConnector
2 |
3 | from databricks_dbapi import odbc
4 | from databricks_dbapi.sqlalchemy_dialects.base import DatabricksDialectBase
5 |
6 |
7 | class DatabricksPyodbcDialect(DatabricksDialectBase, PyODBCConnector):
8 | name = "databricks"
9 | driver = "pyodbc"
10 |
11 | @classmethod
12 | def dbapi(cls):
13 | return odbc
14 |
15 | def create_connect_args(self, url):
16 | elements, kwargs = super().create_connect_args(url=url)
17 | # we use user (following PEP249 guidelines), pyodbc uses username
18 | kwargs["user"] = kwargs.pop("username")
19 | return elements, kwargs
20 |
--------------------------------------------------------------------------------
/poetry.lock:
--------------------------------------------------------------------------------
1 | [[package]]
2 | name = "appdirs"
3 | version = "1.4.4"
4 | description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
5 | category = "dev"
6 | optional = false
7 | python-versions = "*"
8 |
9 | [[package]]
10 | name = "atomicwrites"
11 | version = "1.4.0"
12 | description = "Atomic file writes."
13 | category = "dev"
14 | optional = false
15 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
16 |
17 | [[package]]
18 | name = "attrs"
19 | version = "20.3.0"
20 | description = "Classes Without Boilerplate"
21 | category = "dev"
22 | optional = false
23 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
24 |
25 | [package.extras]
26 | dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "furo", "sphinx", "pre-commit"]
27 | docs = ["furo", "sphinx", "zope.interface"]
28 | tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"]
29 | tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"]
30 |
31 | [[package]]
32 | name = "backports.functools-lru-cache"
33 | version = "1.6.1"
34 | description = "Backport of functools.lru_cache"
35 | category = "dev"
36 | optional = false
37 | python-versions = ">=2.6"
38 |
39 | [package.extras]
40 | docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"]
41 | testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-black-multipy", "pytest-cov"]
42 |
43 | [[package]]
44 | name = "black"
45 | version = "20.8b1"
46 | description = "The uncompromising code formatter."
47 | category = "dev"
48 | optional = false
49 | python-versions = ">=3.6"
50 |
51 | [package.dependencies]
52 | appdirs = "*"
53 | click = ">=7.1.2"
54 | dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""}
55 | mypy-extensions = ">=0.4.3"
56 | pathspec = ">=0.6,<1"
57 | regex = ">=2020.1.8"
58 | toml = ">=0.10.1"
59 | typed-ast = ">=1.4.0"
60 | typing-extensions = ">=3.7.4"
61 |
62 | [package.extras]
63 | colorama = ["colorama (>=0.4.3)"]
64 | d = ["aiohttp (>=3.3.2)", "aiohttp-cors"]
65 |
66 | [[package]]
67 | name = "click"
68 | version = "7.1.2"
69 | description = "Composable command line interface toolkit"
70 | category = "dev"
71 | optional = false
72 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
73 |
74 | [[package]]
75 | name = "colorama"
76 | version = "0.4.4"
77 | description = "Cross-platform colored terminal text."
78 | category = "dev"
79 | optional = false
80 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
81 |
82 | [[package]]
83 | name = "configparser"
84 | version = "4.0.2"
85 | description = "Updated configparser from Python 3.7 for Python 2.6+."
86 | category = "dev"
87 | optional = false
88 | python-versions = ">=2.6"
89 |
90 | [package.extras]
91 | docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"]
92 | testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2)", "pytest-flake8", "pytest-black-multipy"]
93 |
94 | [[package]]
95 | name = "contextlib2"
96 | version = "0.6.0.post1"
97 | description = "Backports and enhancements for the contextlib module"
98 | category = "dev"
99 | optional = false
100 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
101 |
102 | [[package]]
103 | name = "dataclasses"
104 | version = "0.8"
105 | description = "A backport of the dataclasses module for Python 3.6"
106 | category = "dev"
107 | optional = false
108 | python-versions = ">=3.6, <3.7"
109 |
110 | [[package]]
111 | name = "funcsigs"
112 | version = "1.0.2"
113 | description = "Python function signatures from PEP362 for Python 2.6, 2.7 and 3.2+"
114 | category = "dev"
115 | optional = false
116 | python-versions = "*"
117 |
118 | [[package]]
119 | name = "future"
120 | version = "0.18.2"
121 | description = "Clean single-source support for Python 3 and 2"
122 | category = "main"
123 | optional = true
124 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
125 |
126 | [[package]]
127 | name = "importlib-metadata"
128 | version = "2.1.1"
129 | description = "Read metadata from Python packages"
130 | category = "dev"
131 | optional = false
132 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
133 |
134 | [package.dependencies]
135 | configparser = {version = ">=3.5", markers = "python_version < \"3\""}
136 | contextlib2 = {version = "*", markers = "python_version < \"3\""}
137 | pathlib2 = {version = "*", markers = "python_version < \"3\""}
138 | zipp = ">=0.5"
139 |
140 | [package.extras]
141 | docs = ["sphinx", "rst.linker"]
142 | testing = ["packaging", "pep517", "unittest2", "importlib-resources (>=1.3)"]
143 |
144 | [[package]]
145 | name = "isort"
146 | version = "5.7.0"
147 | description = "A Python utility / library to sort Python imports."
148 | category = "dev"
149 | optional = false
150 | python-versions = ">=3.6,<4.0"
151 |
152 | [package.extras]
153 | pipfile_deprecated_finder = ["pipreqs", "requirementslib"]
154 | requirements_deprecated_finder = ["pipreqs", "pip-api"]
155 | colors = ["colorama (>=0.4.3,<0.5.0)"]
156 |
157 | [[package]]
158 | name = "more-itertools"
159 | version = "5.0.0"
160 | description = "More routines for operating on iterables, beyond itertools"
161 | category = "dev"
162 | optional = false
163 | python-versions = "*"
164 |
165 | [package.dependencies]
166 | six = ">=1.0.0,<2.0.0"
167 |
168 | [[package]]
169 | name = "more-itertools"
170 | version = "8.6.0"
171 | description = "More routines for operating on iterables, beyond itertools"
172 | category = "dev"
173 | optional = false
174 | python-versions = ">=3.5"
175 |
176 | [[package]]
177 | name = "mypy-extensions"
178 | version = "0.4.3"
179 | description = "Experimental type system extensions for programs checked with the mypy typechecker."
180 | category = "dev"
181 | optional = false
182 | python-versions = "*"
183 |
184 | [package.dependencies]
185 | typing = {version = ">=3.5.3", markers = "python_version < \"3.5\""}
186 |
187 | [[package]]
188 | name = "packaging"
189 | version = "20.8"
190 | description = "Core utilities for Python packages"
191 | category = "dev"
192 | optional = false
193 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
194 |
195 | [package.dependencies]
196 | pyparsing = ">=2.0.2"
197 |
198 | [[package]]
199 | name = "pathlib2"
200 | version = "2.3.5"
201 | description = "Object-oriented filesystem paths"
202 | category = "dev"
203 | optional = false
204 | python-versions = "*"
205 |
206 | [package.dependencies]
207 | scandir = {version = "*", markers = "python_version < \"3.5\""}
208 | six = "*"
209 |
210 | [[package]]
211 | name = "pathspec"
212 | version = "0.8.1"
213 | description = "Utility library for gitignore style pattern matching of file paths."
214 | category = "dev"
215 | optional = false
216 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
217 |
218 | [[package]]
219 | name = "pluggy"
220 | version = "0.13.1"
221 | description = "plugin and hook calling mechanisms for python"
222 | category = "dev"
223 | optional = false
224 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
225 |
226 | [package.dependencies]
227 | importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
228 |
229 | [package.extras]
230 | dev = ["pre-commit", "tox"]
231 |
232 | [[package]]
233 | name = "py"
234 | version = "1.10.0"
235 | description = "library with cross-python path, ini-parsing, io, code, log facilities"
236 | category = "dev"
237 | optional = false
238 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
239 |
240 | [[package]]
241 | name = "pyhive"
242 | version = "0.6.4"
243 | description = "Python interface to Hive"
244 | category = "main"
245 | optional = true
246 | python-versions = "*"
247 |
248 | [package.dependencies]
249 | future = "*"
250 | python-dateutil = "*"
251 |
252 | [package.extras]
253 | hive = ["sasl (>=0.2.1)", "thrift (>=0.10.0)", "thrift_sasl (>=0.1.0)"]
254 | kerberos = ["requests_kerberos (>=0.12.0)"]
255 | presto = ["requests (>=1.0.0)"]
256 | sqlalchemy = ["sqlalchemy (>=1.3.0)"]
257 | trino = ["requests (>=1.0.0)"]
258 |
259 | [[package]]
260 | name = "pyodbc"
261 | version = "4.0.30"
262 | description = "DB API Module for ODBC"
263 | category = "main"
264 | optional = true
265 | python-versions = "*"
266 |
267 | [[package]]
268 | name = "pyparsing"
269 | version = "2.4.7"
270 | description = "Python parsing module"
271 | category = "dev"
272 | optional = false
273 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
274 |
275 | [[package]]
276 | name = "pytest"
277 | version = "4.6.0"
278 | description = "pytest: simple powerful testing with Python"
279 | category = "dev"
280 | optional = false
281 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
282 |
283 | [package.dependencies]
284 | atomicwrites = ">=1.0"
285 | attrs = ">=17.4.0"
286 | colorama = {version = "*", markers = "sys_platform == \"win32\""}
287 | funcsigs = {version = ">=1.0", markers = "python_version < \"3.0\""}
288 | importlib-metadata = ">=0.12"
289 | more-itertools = [
290 | {version = ">=4.0.0,<6.0.0", markers = "python_version <= \"2.7\""},
291 | {version = ">=4.0.0", markers = "python_version > \"2.7\""},
292 | ]
293 | packaging = "*"
294 | pathlib2 = {version = ">=2.2.0", markers = "python_version < \"3.6\""}
295 | pluggy = ">=0.12,<1.0"
296 | py = ">=1.5.0"
297 | six = ">=1.10.0"
298 | wcwidth = "*"
299 |
300 | [package.extras]
301 | testing = ["argcomplete", "hypothesis (>=3.56)", "nose", "requests", "mock"]
302 |
303 | [[package]]
304 | name = "python-dateutil"
305 | version = "2.8.1"
306 | description = "Extensions to the standard Python datetime module"
307 | category = "main"
308 | optional = true
309 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
310 |
311 | [package.dependencies]
312 | six = ">=1.5"
313 |
314 | [[package]]
315 | name = "python-dotenv"
316 | version = "0.15.0"
317 | description = "Add .env support to your django/flask apps in development and deployments"
318 | category = "dev"
319 | optional = false
320 | python-versions = "*"
321 |
322 | [package.dependencies]
323 | typing = {version = "*", markers = "python_version < \"3.5\""}
324 |
325 | [package.extras]
326 | cli = ["click (>=5.0)"]
327 |
328 | [[package]]
329 | name = "regex"
330 | version = "2020.11.13"
331 | description = "Alternative regular expression module, to replace re."
332 | category = "dev"
333 | optional = false
334 | python-versions = "*"
335 |
336 | [[package]]
337 | name = "scandir"
338 | version = "1.10.0"
339 | description = "scandir, a better directory iterator and faster os.walk()"
340 | category = "dev"
341 | optional = false
342 | python-versions = "*"
343 |
344 | [[package]]
345 | name = "six"
346 | version = "1.15.0"
347 | description = "Python 2 and 3 compatibility utilities"
348 | category = "main"
349 | optional = false
350 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
351 |
352 | [[package]]
353 | name = "sqlalchemy"
354 | version = "1.3.22"
355 | description = "Database Abstraction Library"
356 | category = "main"
357 | optional = true
358 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
359 |
360 | [package.extras]
361 | mssql = ["pyodbc"]
362 | mssql_pymssql = ["pymssql"]
363 | mssql_pyodbc = ["pyodbc"]
364 | mysql = ["mysqlclient"]
365 | oracle = ["cx-oracle"]
366 | postgresql = ["psycopg2"]
367 | postgresql_pg8000 = ["pg8000"]
368 | postgresql_psycopg2binary = ["psycopg2-binary"]
369 | postgresql_psycopg2cffi = ["psycopg2cffi"]
370 | pymysql = ["pymysql"]
371 |
372 | [[package]]
373 | name = "thrift"
374 | version = "0.15.0"
375 | description = "Python bindings for the Apache Thrift RPC system"
376 | category = "main"
377 | optional = true
378 | python-versions = "*"
379 |
380 | [package.dependencies]
381 | six = ">=1.7.2"
382 |
383 | [package.extras]
384 | all = ["tornado (>=4.0)", "twisted"]
385 | tornado = ["tornado (>=4.0)"]
386 | twisted = ["twisted"]
387 |
388 | [[package]]
389 | name = "toml"
390 | version = "0.10.2"
391 | description = "Python Library for Tom's Obvious, Minimal Language"
392 | category = "dev"
393 | optional = false
394 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
395 |
396 | [[package]]
397 | name = "typed-ast"
398 | version = "1.4.2"
399 | description = "a fork of Python 2 and 3 ast modules with type comment support"
400 | category = "dev"
401 | optional = false
402 | python-versions = "*"
403 |
404 | [[package]]
405 | name = "typing"
406 | version = "3.7.4.3"
407 | description = "Type Hints for Python"
408 | category = "dev"
409 | optional = false
410 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
411 |
412 | [[package]]
413 | name = "typing-extensions"
414 | version = "3.7.4.3"
415 | description = "Backported and Experimental Type Hints for Python 3.5+"
416 | category = "dev"
417 | optional = false
418 | python-versions = "*"
419 |
420 | [package.dependencies]
421 | typing = {version = ">=3.7.4", markers = "python_version < \"3.5\""}
422 |
423 | [[package]]
424 | name = "wcwidth"
425 | version = "0.2.5"
426 | description = "Measures the displayed width of unicode strings in a terminal"
427 | category = "dev"
428 | optional = false
429 | python-versions = "*"
430 |
431 | [package.dependencies]
432 | "backports.functools-lru-cache" = {version = ">=1.2.1", markers = "python_version < \"3.2\""}
433 |
434 | [[package]]
435 | name = "zipp"
436 | version = "1.2.0"
437 | description = "Backport of pathlib-compatible object wrapper for zip files"
438 | category = "dev"
439 | optional = false
440 | python-versions = ">=2.7"
441 |
442 | [package.dependencies]
443 | contextlib2 = {version = "*", markers = "python_version < \"3.4\""}
444 |
445 | [package.extras]
446 | docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"]
447 | testing = ["pathlib2", "unittest2", "jaraco.itertools", "func-timeout"]
448 |
449 | [extras]
450 | hive = ["pyhive", "thrift"]
451 | odbc = ["pyodbc"]
452 | sqlalchemy = ["sqlalchemy"]
453 |
454 | [metadata]
455 | lock-version = "1.1"
456 | python-versions = "~2.7 || ^3.5"
457 | content-hash = "b5810bd3e3c93e8de9c7e2ae5bddd74880d2b78bffcc9508088311daa617f7c2"
458 |
459 | [metadata.files]
460 | appdirs = [
461 | {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"},
462 | {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
463 | ]
464 | atomicwrites = [
465 | {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"},
466 | {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"},
467 | ]
468 | attrs = [
469 | {file = "attrs-20.3.0-py2.py3-none-any.whl", hash = "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6"},
470 | {file = "attrs-20.3.0.tar.gz", hash = "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"},
471 | ]
472 | "backports.functools-lru-cache" = [
473 | {file = "backports.functools_lru_cache-1.6.1-py2.py3-none-any.whl", hash = "sha256:0bada4c2f8a43d533e4ecb7a12214d9420e66eb206d54bf2d682581ca4b80848"},
474 | {file = "backports.functools_lru_cache-1.6.1.tar.gz", hash = "sha256:8fde5f188da2d593bd5bc0be98d9abc46c95bb8a9dde93429570192ee6cc2d4a"},
475 | ]
476 | black = [
477 | {file = "black-20.8b1-py3-none-any.whl", hash = "sha256:70b62ef1527c950db59062cda342ea224d772abdf6adc58b86a45421bab20a6b"},
478 | {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"},
479 | ]
480 | click = [
481 | {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"},
482 | {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"},
483 | ]
484 | colorama = [
485 | {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
486 | {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
487 | ]
488 | configparser = [
489 | {file = "configparser-4.0.2-py2.py3-none-any.whl", hash = "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c"},
490 | {file = "configparser-4.0.2.tar.gz", hash = "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df"},
491 | ]
492 | contextlib2 = [
493 | {file = "contextlib2-0.6.0.post1-py2.py3-none-any.whl", hash = "sha256:3355078a159fbb44ee60ea80abd0d87b80b78c248643b49aa6d94673b413609b"},
494 | {file = "contextlib2-0.6.0.post1.tar.gz", hash = "sha256:01f490098c18b19d2bd5bb5dc445b2054d2fa97f09a4280ba2c5f3c394c8162e"},
495 | ]
496 | dataclasses = [
497 | {file = "dataclasses-0.8-py3-none-any.whl", hash = "sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf"},
498 | {file = "dataclasses-0.8.tar.gz", hash = "sha256:8479067f342acf957dc82ec415d355ab5edb7e7646b90dc6e2fd1d96ad084c97"},
499 | ]
500 | funcsigs = [
501 | {file = "funcsigs-1.0.2-py2.py3-none-any.whl", hash = "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca"},
502 | {file = "funcsigs-1.0.2.tar.gz", hash = "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50"},
503 | ]
504 | future = [
505 | {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"},
506 | ]
507 | importlib-metadata = [
508 | {file = "importlib_metadata-2.1.1-py2.py3-none-any.whl", hash = "sha256:c2d6341ff566f609e89a2acb2db190e5e1d23d5409d6cc8d2fe34d72443876d4"},
509 | {file = "importlib_metadata-2.1.1.tar.gz", hash = "sha256:b8de9eff2b35fb037368f28a7df1df4e6436f578fa74423505b6c6a778d5b5dd"},
510 | ]
511 | isort = [
512 | {file = "isort-5.7.0-py3-none-any.whl", hash = "sha256:fff4f0c04e1825522ce6949973e83110a6e907750cd92d128b0d14aaaadbffdc"},
513 | {file = "isort-5.7.0.tar.gz", hash = "sha256:c729845434366216d320e936b8ad6f9d681aab72dc7cbc2d51bedc3582f3ad1e"},
514 | ]
515 | more-itertools = [
516 | {file = "more-itertools-5.0.0.tar.gz", hash = "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4"},
517 | {file = "more_itertools-5.0.0-py2-none-any.whl", hash = "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc"},
518 | {file = "more_itertools-5.0.0-py3-none-any.whl", hash = "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"},
519 | {file = "more-itertools-8.6.0.tar.gz", hash = "sha256:b3a9005928e5bed54076e6e549c792b306fddfe72b2d1d22dd63d42d5d3899cf"},
520 | {file = "more_itertools-8.6.0-py3-none-any.whl", hash = "sha256:8e1a2a43b2f2727425f2b5839587ae37093f19153dc26c0927d1048ff6557330"},
521 | ]
522 | mypy-extensions = [
523 | {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
524 | {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
525 | ]
526 | packaging = [
527 | {file = "packaging-20.8-py2.py3-none-any.whl", hash = "sha256:24e0da08660a87484d1602c30bb4902d74816b6985b93de36926f5bc95741858"},
528 | {file = "packaging-20.8.tar.gz", hash = "sha256:78598185a7008a470d64526a8059de9aaa449238f280fc9eb6b13ba6c4109093"},
529 | ]
530 | pathlib2 = [
531 | {file = "pathlib2-2.3.5-py2.py3-none-any.whl", hash = "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db"},
532 | {file = "pathlib2-2.3.5.tar.gz", hash = "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868"},
533 | ]
534 | pathspec = [
535 | {file = "pathspec-0.8.1-py2.py3-none-any.whl", hash = "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"},
536 | {file = "pathspec-0.8.1.tar.gz", hash = "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd"},
537 | ]
538 | pluggy = [
539 | {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"},
540 | {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"},
541 | ]
542 | py = [
543 | {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"},
544 | {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"},
545 | ]
546 | pyhive = [
547 | {file = "PyHive-0.6.4.tar.gz", hash = "sha256:10577bb3393e3da3d8ba68b2cfe800edcc1fbb0bf48394fb9a2701740f79665f"},
548 | ]
549 | pyodbc = [
550 | {file = "pyodbc-4.0.30-cp27-cp27m-win32.whl", hash = "sha256:a1af49a2f4f0abbafdc018d510e31561d3f9472725dc1d49cce3bd2e10e9ec18"},
551 | {file = "pyodbc-4.0.30-cp27-cp27m-win_amd64.whl", hash = "sha256:eb9e7a4a5126f2695f307b2a6b0b94cbfccfe7481be2c0d33f5456515328f1cc"},
552 | {file = "pyodbc-4.0.30-cp27-none-macosx_10_15_x86_64.whl", hash = "sha256:3a8212be2e49ff29d71d40a9c1af2cdaa71dcc7246cf80a0f9c7254de47ea4a9"},
553 | {file = "pyodbc-4.0.30-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2f0079951016729b51babebd6aa8112ecef53e11eea3116036c4ec7105f41514"},
554 | {file = "pyodbc-4.0.30-cp36-cp36m-win32.whl", hash = "sha256:7113daddcf346ff095904c568d1e1019f567da74058b4e69099e23bc98211691"},
555 | {file = "pyodbc-4.0.30-cp36-cp36m-win_amd64.whl", hash = "sha256:d9d1469786519c3b545168b45db7c3ece3b493c89d51bb5732d38a2eac6d0863"},
556 | {file = "pyodbc-4.0.30-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:52a42be6561932d74bbcc5b0f54dcdcf2eceae3b03192fc0db64a5020bbca057"},
557 | {file = "pyodbc-4.0.30-cp37-cp37m-win32.whl", hash = "sha256:8fa4147bf3bff1b66a9b1a0063094ca1686b9319383e711e7c193c2b4728b572"},
558 | {file = "pyodbc-4.0.30-cp37-cp37m-win_amd64.whl", hash = "sha256:1b8ed92bd50c6d83dec88153880405434bc261bb013ca02809827bb3ffbb319a"},
559 | {file = "pyodbc-4.0.30-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d4ffeab51dcc03c4c1a9a200f70999ce9b827c91defc4f5740633a6d47d3a206"},
560 | {file = "pyodbc-4.0.30-cp38-cp38-win32.whl", hash = "sha256:d3ad340e0053b6ec4130957efbcecce6de48d68e7e78792ea7588e27ffa629f1"},
561 | {file = "pyodbc-4.0.30-cp38-cp38-win_amd64.whl", hash = "sha256:bce7e41c7cfc06ec976245f361221dfdd0f04e804010cf255cbb36985f6c3406"},
562 | {file = "pyodbc-4.0.30-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3bf7fd4d7a42fa4f0a50cf2418d49cd3c8fa05b8a1972534f0ebfadd92c7ce52"},
563 | {file = "pyodbc-4.0.30-cp39-cp39-win_amd64.whl", hash = "sha256:b149b0c2d11833bbd5355a8fe5a8b41fba5518dacdc613a0218fd77c2969a4f5"},
564 | {file = "pyodbc-4.0.30.tar.gz", hash = "sha256:852b5deeeb3366af8b4408efed993501708be45d221881bce60c9aac54be726a"},
565 | ]
566 | pyparsing = [
567 | {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"},
568 | {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"},
569 | ]
570 | pytest = [
571 | {file = "pytest-4.6.0-py2.py3-none-any.whl", hash = "sha256:5467f37a0d6bb0b4e684b71af268e005996b9eaaefe54e3d64d86afd90da8d78"},
572 | {file = "pytest-4.6.0.tar.gz", hash = "sha256:52fa94b4ac81d2f063ee05e303acedf5c605e15dc0f4eef468b5c137f77241c3"},
573 | ]
574 | python-dateutil = [
575 | {file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"},
576 | {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"},
577 | ]
578 | python-dotenv = [
579 | {file = "python-dotenv-0.15.0.tar.gz", hash = "sha256:587825ed60b1711daea4832cf37524dfd404325b7db5e25ebe88c495c9f807a0"},
580 | {file = "python_dotenv-0.15.0-py2.py3-none-any.whl", hash = "sha256:0c8d1b80d1a1e91717ea7d526178e3882732420b03f08afea0406db6402e220e"},
581 | ]
582 | regex = [
583 | {file = "regex-2020.11.13-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8b882a78c320478b12ff024e81dc7d43c1462aa4a3341c754ee65d857a521f85"},
584 | {file = "regex-2020.11.13-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a63f1a07932c9686d2d416fb295ec2c01ab246e89b4d58e5fa468089cab44b70"},
585 | {file = "regex-2020.11.13-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:6e4b08c6f8daca7d8f07c8d24e4331ae7953333dbd09c648ed6ebd24db5a10ee"},
586 | {file = "regex-2020.11.13-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bba349276b126947b014e50ab3316c027cac1495992f10e5682dc677b3dfa0c5"},
587 | {file = "regex-2020.11.13-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:56e01daca75eae420bce184edd8bb341c8eebb19dd3bce7266332258f9fb9dd7"},
588 | {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:6a8ce43923c518c24a2579fda49f093f1397dad5d18346211e46f134fc624e31"},
589 | {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:1ab79fcb02b930de09c76d024d279686ec5d532eb814fd0ed1e0051eb8bd2daa"},
590 | {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:9801c4c1d9ae6a70aeb2128e5b4b68c45d4f0af0d1535500884d644fa9b768c6"},
591 | {file = "regex-2020.11.13-cp36-cp36m-win32.whl", hash = "sha256:49cae022fa13f09be91b2c880e58e14b6da5d10639ed45ca69b85faf039f7a4e"},
592 | {file = "regex-2020.11.13-cp36-cp36m-win_amd64.whl", hash = "sha256:749078d1eb89484db5f34b4012092ad14b327944ee7f1c4f74d6279a6e4d1884"},
593 | {file = "regex-2020.11.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b2f4007bff007c96a173e24dcda236e5e83bde4358a557f9ccf5e014439eae4b"},
594 | {file = "regex-2020.11.13-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:38c8fd190db64f513fe4e1baa59fed086ae71fa45083b6936b52d34df8f86a88"},
595 | {file = "regex-2020.11.13-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5862975b45d451b6db51c2e654990c1820523a5b07100fc6903e9c86575202a0"},
596 | {file = "regex-2020.11.13-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:262c6825b309e6485ec2493ffc7e62a13cf13fb2a8b6d212f72bd53ad34118f1"},
597 | {file = "regex-2020.11.13-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:bafb01b4688833e099d79e7efd23f99172f501a15c44f21ea2118681473fdba0"},
598 | {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:e32f5f3d1b1c663af7f9c4c1e72e6ffe9a78c03a31e149259f531e0fed826512"},
599 | {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:3bddc701bdd1efa0d5264d2649588cbfda549b2899dc8d50417e47a82e1387ba"},
600 | {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:02951b7dacb123d8ea6da44fe45ddd084aa6777d4b2454fa0da61d569c6fa538"},
601 | {file = "regex-2020.11.13-cp37-cp37m-win32.whl", hash = "sha256:0d08e71e70c0237883d0bef12cad5145b84c3705e9c6a588b2a9c7080e5af2a4"},
602 | {file = "regex-2020.11.13-cp37-cp37m-win_amd64.whl", hash = "sha256:1fa7ee9c2a0e30405e21031d07d7ba8617bc590d391adfc2b7f1e8b99f46f444"},
603 | {file = "regex-2020.11.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:baf378ba6151f6e272824b86a774326f692bc2ef4cc5ce8d5bc76e38c813a55f"},
604 | {file = "regex-2020.11.13-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e3faaf10a0d1e8e23a9b51d1900b72e1635c2d5b0e1bea1c18022486a8e2e52d"},
605 | {file = "regex-2020.11.13-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2a11a3e90bd9901d70a5b31d7dd85114755a581a5da3fc996abfefa48aee78af"},
606 | {file = "regex-2020.11.13-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1ebb090a426db66dd80df8ca85adc4abfcbad8a7c2e9a5ec7513ede522e0a8f"},
607 | {file = "regex-2020.11.13-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:b2b1a5ddae3677d89b686e5c625fc5547c6e492bd755b520de5332773a8af06b"},
608 | {file = "regex-2020.11.13-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:2c99e97d388cd0a8d30f7c514d67887d8021541b875baf09791a3baad48bb4f8"},
609 | {file = "regex-2020.11.13-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:c084582d4215593f2f1d28b65d2a2f3aceff8342aa85afd7be23a9cad74a0de5"},
610 | {file = "regex-2020.11.13-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:a3d748383762e56337c39ab35c6ed4deb88df5326f97a38946ddd19028ecce6b"},
611 | {file = "regex-2020.11.13-cp38-cp38-win32.whl", hash = "sha256:7913bd25f4ab274ba37bc97ad0e21c31004224ccb02765ad984eef43e04acc6c"},
612 | {file = "regex-2020.11.13-cp38-cp38-win_amd64.whl", hash = "sha256:6c54ce4b5d61a7129bad5c5dc279e222afd00e721bf92f9ef09e4fae28755683"},
613 | {file = "regex-2020.11.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1862a9d9194fae76a7aaf0150d5f2a8ec1da89e8b55890b1786b8f88a0f619dc"},
614 | {file = "regex-2020.11.13-cp39-cp39-manylinux1_i686.whl", hash = "sha256:4902e6aa086cbb224241adbc2f06235927d5cdacffb2425c73e6570e8d862364"},
615 | {file = "regex-2020.11.13-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7a25fcbeae08f96a754b45bdc050e1fb94b95cab046bf56b016c25e9ab127b3e"},
616 | {file = "regex-2020.11.13-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:d2d8ce12b7c12c87e41123997ebaf1a5767a5be3ec545f64675388970f415e2e"},
617 | {file = "regex-2020.11.13-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:f7d29a6fc4760300f86ae329e3b6ca28ea9c20823df123a2ea8693e967b29917"},
618 | {file = "regex-2020.11.13-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:717881211f46de3ab130b58ec0908267961fadc06e44f974466d1887f865bd5b"},
619 | {file = "regex-2020.11.13-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3128e30d83f2e70b0bed9b2a34e92707d0877e460b402faca908c6667092ada9"},
620 | {file = "regex-2020.11.13-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:8f6a2229e8ad946e36815f2a03386bb8353d4bde368fdf8ca5f0cb97264d3b5c"},
621 | {file = "regex-2020.11.13-cp39-cp39-win32.whl", hash = "sha256:f8f295db00ef5f8bae530fc39af0b40486ca6068733fb860b42115052206466f"},
622 | {file = "regex-2020.11.13-cp39-cp39-win_amd64.whl", hash = "sha256:a15f64ae3a027b64496a71ab1f722355e570c3fac5ba2801cafce846bf5af01d"},
623 | {file = "regex-2020.11.13.tar.gz", hash = "sha256:83d6b356e116ca119db8e7c6fc2983289d87b27b3fac238cfe5dca529d884562"},
624 | ]
625 | scandir = [
626 | {file = "scandir-1.10.0-cp27-cp27m-win32.whl", hash = "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188"},
627 | {file = "scandir-1.10.0-cp27-cp27m-win_amd64.whl", hash = "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac"},
628 | {file = "scandir-1.10.0-cp34-cp34m-win32.whl", hash = "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f"},
629 | {file = "scandir-1.10.0-cp34-cp34m-win_amd64.whl", hash = "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e"},
630 | {file = "scandir-1.10.0-cp35-cp35m-win32.whl", hash = "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f"},
631 | {file = "scandir-1.10.0-cp35-cp35m-win_amd64.whl", hash = "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32"},
632 | {file = "scandir-1.10.0-cp36-cp36m-win32.whl", hash = "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022"},
633 | {file = "scandir-1.10.0-cp36-cp36m-win_amd64.whl", hash = "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4"},
634 | {file = "scandir-1.10.0-cp37-cp37m-win32.whl", hash = "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173"},
635 | {file = "scandir-1.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d"},
636 | {file = "scandir-1.10.0.tar.gz", hash = "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae"},
637 | ]
638 | six = [
639 | {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"},
640 | {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"},
641 | ]
642 | sqlalchemy = [
643 | {file = "SQLAlchemy-1.3.22-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:61628715931f4962e0cdb2a7c87ff39eea320d2aa96bd471a3c293d146f90394"},
644 | {file = "SQLAlchemy-1.3.22-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:81d8d099a49f83111cce55ec03cc87eef45eec0d90f9842b4fc674f860b857b0"},
645 | {file = "SQLAlchemy-1.3.22-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:d055ff750fcab69ca4e57b656d9c6ad33682e9b8d564f2fbe667ab95c63591b0"},
646 | {file = "SQLAlchemy-1.3.22-cp27-cp27m-win32.whl", hash = "sha256:9bf572e4f5aa23f88dd902f10bb103cb5979022a38eec684bfa6d61851173fec"},
647 | {file = "SQLAlchemy-1.3.22-cp27-cp27m-win_amd64.whl", hash = "sha256:7d4b8de6bb0bc736161cb0bbd95366b11b3eb24dd6b814a143d8375e75af9990"},
648 | {file = "SQLAlchemy-1.3.22-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:4a84c7c7658dd22a33dab2e2aa2d17c18cb004a42388246f2e87cb4085ef2811"},
649 | {file = "SQLAlchemy-1.3.22-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:f1e88b30da8163215eab643962ae9d9252e47b4ea53404f2c4f10f24e70ddc62"},
650 | {file = "SQLAlchemy-1.3.22-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:f115150cc4361dd46153302a640c7fa1804ac207f9cc356228248e351a8b4676"},
651 | {file = "SQLAlchemy-1.3.22-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6aaa13ee40c4552d5f3a59f543f0db6e31712cc4009ec7385407be4627259d41"},
652 | {file = "SQLAlchemy-1.3.22-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:3ab5b44a07b8c562c6dcb7433c6a6c6e03266d19d64f87b3333eda34e3b9936b"},
653 | {file = "SQLAlchemy-1.3.22-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:426ece890153ccc52cc5151a1a0ed540a5a7825414139bb4c95a868d8da54a52"},
654 | {file = "SQLAlchemy-1.3.22-cp35-cp35m-win32.whl", hash = "sha256:bd4b1af45fd322dcd1fb2a9195b4f93f570d1a5902a842e3e6051385fac88f9c"},
655 | {file = "SQLAlchemy-1.3.22-cp35-cp35m-win_amd64.whl", hash = "sha256:62285607a5264d1f91590abd874d6a498e229d5840669bd7d9f654cfaa599bd0"},
656 | {file = "SQLAlchemy-1.3.22-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:314f5042c0b047438e19401d5f29757a511cfc2f0c40d28047ca0e4c95eabb5b"},
657 | {file = "SQLAlchemy-1.3.22-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:62fb881ba51dbacba9af9b779211cf9acff3442d4f2993142015b22b3cd1f92a"},
658 | {file = "SQLAlchemy-1.3.22-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:bde677047305fe76c7ee3e4492b545e0018918e44141cc154fe39e124e433991"},
659 | {file = "SQLAlchemy-1.3.22-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:0c6406a78a714a540d980a680b86654feadb81c8d0eecb59f3d6c554a4c69f19"},
660 | {file = "SQLAlchemy-1.3.22-cp36-cp36m-win32.whl", hash = "sha256:95bde07d19c146d608bccb9b16e144ec8f139bcfe7fd72331858698a71c9b4f5"},
661 | {file = "SQLAlchemy-1.3.22-cp36-cp36m-win_amd64.whl", hash = "sha256:888d5b4b5aeed0d3449de93ea80173653e939e916cc95fe8527079e50235c1d2"},
662 | {file = "SQLAlchemy-1.3.22-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:d53f59744b01f1440a1b0973ed2c3a7de204135c593299ee997828aad5191693"},
663 | {file = "SQLAlchemy-1.3.22-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:70121f0ae48b25ef3e56e477b88cd0b0af0e1f3a53b5554071aa6a93ef378a03"},
664 | {file = "SQLAlchemy-1.3.22-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:54da615e5b92c339e339fe8536cce99fe823b6ed505d4ea344852aefa1c205fb"},
665 | {file = "SQLAlchemy-1.3.22-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:68428818cf80c60dc04aa0f38da20ad39b28aba4d4d199f949e7d6e04444ea86"},
666 | {file = "SQLAlchemy-1.3.22-cp37-cp37m-win32.whl", hash = "sha256:17610d573e698bf395afbbff946544fbce7c5f4ee77b5bcb1f821b36345fae7a"},
667 | {file = "SQLAlchemy-1.3.22-cp37-cp37m-win_amd64.whl", hash = "sha256:216ba5b4299c95ed179b58f298bda885a476b16288ab7243e89f29f6aeced7e0"},
668 | {file = "SQLAlchemy-1.3.22-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:0c72b90988be749e04eff0342dcc98c18a14461eb4b2ad59d611b57b31120f90"},
669 | {file = "SQLAlchemy-1.3.22-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:491fe48adc07d13e020a8b07ef82eefc227003a046809c121bea81d3dbf1832d"},
670 | {file = "SQLAlchemy-1.3.22-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:f8191fef303025879e6c3548ecd8a95aafc0728c764ab72ec51a0bdf0c91a341"},
671 | {file = "SQLAlchemy-1.3.22-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:108580808803c7732f34798eb4a329d45b04c562ed83ee90f09f6a184a42b766"},
672 | {file = "SQLAlchemy-1.3.22-cp38-cp38-win32.whl", hash = "sha256:bab5a1e15b9466a25c96cda19139f3beb3e669794373b9ce28c4cf158c6e841d"},
673 | {file = "SQLAlchemy-1.3.22-cp38-cp38-win_amd64.whl", hash = "sha256:318b5b727e00662e5fc4b4cd2bf58a5116d7c1b4dd56ffaa7d68f43458a8d1ed"},
674 | {file = "SQLAlchemy-1.3.22-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:1418f5e71d6081aa1095a1d6b567a562d2761996710bdce9b6e6ba20a03d0864"},
675 | {file = "SQLAlchemy-1.3.22-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:5a7f224cdb7233182cec2a45d4c633951268d6a9bcedac37abbf79dd07012aea"},
676 | {file = "SQLAlchemy-1.3.22-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:715b34578cc740b743361f7c3e5f584b04b0f1344f45afc4e87fbac4802eb0a0"},
677 | {file = "SQLAlchemy-1.3.22-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:2ff132a379838b1abf83c065be54cef32b47c987aedd06b82fc76476c85225eb"},
678 | {file = "SQLAlchemy-1.3.22-cp39-cp39-win32.whl", hash = "sha256:c389d7cc2b821853fb018c85457da3e7941db64f4387720a329bc7ff06a27963"},
679 | {file = "SQLAlchemy-1.3.22-cp39-cp39-win_amd64.whl", hash = "sha256:04f995fcbf54e46cddeb4f75ce9dfc17075d6ae04ac23b2bacb44b3bc6f6bf11"},
680 | {file = "SQLAlchemy-1.3.22.tar.gz", hash = "sha256:758fc8c4d6c0336e617f9f6919f9daea3ab6bb9b07005eda9a1a682e24a6cacc"},
681 | ]
682 | thrift = [
683 | {file = "thrift-0.15.0.tar.gz", hash = "sha256:87c8205a71cf8bbb111cb99b1f7495070fbc9cabb671669568854210da5b3e29"},
684 | ]
685 | toml = [
686 | {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
687 | {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
688 | ]
689 | typed-ast = [
690 | {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:7703620125e4fb79b64aa52427ec192822e9f45d37d4b6625ab37ef403e1df70"},
691 | {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c9aadc4924d4b5799112837b226160428524a9a45f830e0d0f184b19e4090487"},
692 | {file = "typed_ast-1.4.2-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:9ec45db0c766f196ae629e509f059ff05fc3148f9ffd28f3cfe75d4afb485412"},
693 | {file = "typed_ast-1.4.2-cp35-cp35m-win32.whl", hash = "sha256:85f95aa97a35bdb2f2f7d10ec5bbdac0aeb9dafdaf88e17492da0504de2e6400"},
694 | {file = "typed_ast-1.4.2-cp35-cp35m-win_amd64.whl", hash = "sha256:9044ef2df88d7f33692ae3f18d3be63dec69c4fb1b5a4a9ac950f9b4ba571606"},
695 | {file = "typed_ast-1.4.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c1c876fd795b36126f773db9cbb393f19808edd2637e00fd6caba0e25f2c7b64"},
696 | {file = "typed_ast-1.4.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:5dcfc2e264bd8a1db8b11a892bd1647154ce03eeba94b461effe68790d8b8e07"},
697 | {file = "typed_ast-1.4.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8db0e856712f79c45956da0c9a40ca4246abc3485ae0d7ecc86a20f5e4c09abc"},
698 | {file = "typed_ast-1.4.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:d003156bb6a59cda9050e983441b7fa2487f7800d76bdc065566b7d728b4581a"},
699 | {file = "typed_ast-1.4.2-cp36-cp36m-win32.whl", hash = "sha256:4c790331247081ea7c632a76d5b2a265e6d325ecd3179d06e9cf8d46d90dd151"},
700 | {file = "typed_ast-1.4.2-cp36-cp36m-win_amd64.whl", hash = "sha256:d175297e9533d8d37437abc14e8a83cbc68af93cc9c1c59c2c292ec59a0697a3"},
701 | {file = "typed_ast-1.4.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf54cfa843f297991b7388c281cb3855d911137223c6b6d2dd82a47ae5125a41"},
702 | {file = "typed_ast-1.4.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:b4fcdcfa302538f70929eb7b392f536a237cbe2ed9cba88e3bf5027b39f5f77f"},
703 | {file = "typed_ast-1.4.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:987f15737aba2ab5f3928c617ccf1ce412e2e321c77ab16ca5a293e7bbffd581"},
704 | {file = "typed_ast-1.4.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:37f48d46d733d57cc70fd5f30572d11ab8ed92da6e6b28e024e4a3edfb456e37"},
705 | {file = "typed_ast-1.4.2-cp37-cp37m-win32.whl", hash = "sha256:36d829b31ab67d6fcb30e185ec996e1f72b892255a745d3a82138c97d21ed1cd"},
706 | {file = "typed_ast-1.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8368f83e93c7156ccd40e49a783a6a6850ca25b556c0fa0240ed0f659d2fe496"},
707 | {file = "typed_ast-1.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:963c80b583b0661918718b095e02303d8078950b26cc00b5e5ea9ababe0de1fc"},
708 | {file = "typed_ast-1.4.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e683e409e5c45d5c9082dc1daf13f6374300806240719f95dc783d1fc942af10"},
709 | {file = "typed_ast-1.4.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:84aa6223d71012c68d577c83f4e7db50d11d6b1399a9c779046d75e24bed74ea"},
710 | {file = "typed_ast-1.4.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:a38878a223bdd37c9709d07cd357bb79f4c760b29210e14ad0fb395294583787"},
711 | {file = "typed_ast-1.4.2-cp38-cp38-win32.whl", hash = "sha256:a2c927c49f2029291fbabd673d51a2180038f8cd5a5b2f290f78c4516be48be2"},
712 | {file = "typed_ast-1.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:c0c74e5579af4b977c8b932f40a5464764b2f86681327410aa028a22d2f54937"},
713 | {file = "typed_ast-1.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07d49388d5bf7e863f7fa2f124b1b1d89d8aa0e2f7812faff0a5658c01c59aa1"},
714 | {file = "typed_ast-1.4.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:240296b27397e4e37874abb1df2a608a92df85cf3e2a04d0d4d61055c8305ba6"},
715 | {file = "typed_ast-1.4.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:d746a437cdbca200622385305aedd9aef68e8a645e385cc483bdc5e488f07166"},
716 | {file = "typed_ast-1.4.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:14bf1522cdee369e8f5581238edac09150c765ec1cb33615855889cf33dcb92d"},
717 | {file = "typed_ast-1.4.2-cp39-cp39-win32.whl", hash = "sha256:cc7b98bf58167b7f2db91a4327da24fb93368838eb84a44c472283778fc2446b"},
718 | {file = "typed_ast-1.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:7147e2a76c75f0f64c4319886e7639e490fee87c9d25cb1d4faef1d8cf83a440"},
719 | {file = "typed_ast-1.4.2.tar.gz", hash = "sha256:9fc0b3cb5d1720e7141d103cf4819aea239f7d136acf9ee4a69b047b7986175a"},
720 | ]
721 | typing = [
722 | {file = "typing-3.7.4.3-py2-none-any.whl", hash = "sha256:283d868f5071ab9ad873e5e52268d611e851c870a2ba354193026f2dfb29d8b5"},
723 | {file = "typing-3.7.4.3.tar.gz", hash = "sha256:1187fb9c82fd670d10aa07bbb6cfcfe4bdda42d6fab8d5134f04e8c4d0b71cc9"},
724 | ]
725 | typing-extensions = [
726 | {file = "typing_extensions-3.7.4.3-py2-none-any.whl", hash = "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f"},
727 | {file = "typing_extensions-3.7.4.3-py3-none-any.whl", hash = "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918"},
728 | {file = "typing_extensions-3.7.4.3.tar.gz", hash = "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"},
729 | ]
730 | wcwidth = [
731 | {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"},
732 | {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"},
733 | ]
734 | zipp = [
735 | {file = "zipp-1.2.0-py2.py3-none-any.whl", hash = "sha256:e0d9e63797e483a30d27e09fffd308c59a700d365ec34e93cc100844168bf921"},
736 | {file = "zipp-1.2.0.tar.gz", hash = "sha256:c70410551488251b0fee67b460fb9a536af8d6f9f008ad10ac51f615b6a521b1"},
737 | ]
738 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.black]
2 | line-length = 120
3 | target-version = ['py37']
4 | include = '\.pyi?$'
5 | exclude = '''
6 | (
7 | /(
8 | \.eggs
9 | | \.git
10 | | \.github
11 | | \.hg
12 | | \.mypy_cache
13 | | \.pytest_cache
14 | | \.tox
15 | | \.venv
16 | | _build
17 | | build
18 | | dist
19 | )/
20 | )
21 | '''
22 |
23 | [tool.isort]
24 | force_single_line = true
25 | multi_line_output = 3
26 | include_trailing_comma = true
27 | force_grid_wrap = 0
28 | use_parentheses = true
29 | line_length = 120
30 |
31 | [tool.poetry]
32 | name = "databricks_dbapi"
33 | version = "0.6.0"
34 | description = "A DBAPI 2.0 interface and SQLAlchemy dialect for Databricks interactive clusters."
35 | authors = ["Christopher Flynn "]
36 | license = "MIT"
37 | readme = "README.rst"
38 | homepage = "https://github.com/crflynn/databricks-dbapi"
39 | repository = "https://github.com/crflynn/databricks-dbapi"
40 | documentation = "https://github.com/crflynn/databricks-dbapi"
41 | keywords = ["databricks", "hive", "dbapi", "sqlalchemy", "dialect"]
42 | classifiers = [
43 | "License :: OSI Approved :: MIT License",
44 | "Natural Language :: English",
45 | "Programming Language :: Python :: 2",
46 | "Programming Language :: Python :: 2.7",
47 | "Programming Language :: Python :: 3",
48 | "Programming Language :: Python :: 3.5",
49 | "Programming Language :: Python :: 3.6",
50 | "Programming Language :: Python :: 3.7",
51 | "Programming Language :: Python :: 3.8",
52 | "Programming Language :: Python :: 3.9",
53 | "Programming Language :: Python :: Implementation :: CPython",
54 | "Topic :: Database",
55 | "Topic :: Database :: Front-Ends",
56 | ]
57 | include = ["CHANGELOG.rst"]
58 |
59 | [tool.poetry.plugins."sqlalchemy.dialects"]
60 | "databricks.pyhive" = "databricks_dbapi.sqlalchemy_dialects.hive:DatabricksPyhiveDialect"
61 | "databricks.pyodbc" = "databricks_dbapi.sqlalchemy_dialects.odbc:DatabricksPyodbcDialect"
62 |
63 | [tool.poetry.extras]
64 | sqlalchemy = ["sqlalchemy"]
65 | hive = ["pyhive", "thrift"]
66 | odbc = ["pyodbc"]
67 |
68 | [tool.poetry.dependencies]
69 | python = "~2.7 || ^3.5"
70 | # pyhive with databricks does not use sasl, so in lieu of using
71 | # the hive extra here which requires sasl, thrift, and thrift-sasl,
72 | # we just install thrift.
73 | pyhive = {version = "^0.6.1", optional = true}
74 | thrift = {version = "^0.15.0", optional = true}
75 | sqlalchemy = {version = "^1.3", optional = true}
76 | pyodbc = {version = "^4.0.30", optional = true}
77 |
78 | [tool.poetry.dev-dependencies]
79 | black = {version = "^20.8b1", python = "^3.7" }
80 | isort = {version = "^5.7.0", python = "^3.7" }
81 | pytest = "4.6"
82 | python-dotenv = "^0.15.0"
83 |
84 | [build-system]
85 | requires = ["poetry-core>=1.0.0"]
86 | build-backend = "poetry.core.masonry.api"
87 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import dotenv
4 | import pytest
5 |
6 | dotenv.load_dotenv(dotenv_path=dotenv.find_dotenv())
7 |
8 | TOKEN_WORKSPACE = os.environ["DATABRICKS_TOKEN_WORKSPACE"]
9 | TOKEN_SQL_ANALYTICS = os.environ["DATABRICKS_TOKEN_SQL_ANALYTICS"]
10 | USER = os.environ["DATABRICKS_USER"]
11 | PASSWORD = os.environ["DATABRICKS_PASSWORD"]
12 | HOST = os.environ["DATABRICKS_HOST"]
13 | HTTP_PATH_WORKSPACE = os.environ["DATABRICKS_HTTP_PATH_WORKSPACE"]
14 | HTTP_PATH_SQL_ANALYTICS = os.environ["DATABRICKS_HTTP_PATH_SQL_ANALYTICS"]
15 | ODBC_DRIVER_PATH = os.environ["DATABRICKS_ODBC_DRIVER_PATH"]
16 |
17 |
18 | @pytest.fixture
19 | def token_workspace():
20 | return TOKEN_WORKSPACE
21 |
22 |
23 | @pytest.fixture
24 | def token_sql_analytics():
25 | return TOKEN_SQL_ANALYTICS
26 |
27 |
28 | @pytest.fixture
29 | def user():
30 | return USER
31 |
32 |
33 | @pytest.fixture
34 | def password():
35 | return PASSWORD
36 |
37 |
38 | @pytest.fixture
39 | def host():
40 | return HOST
41 |
42 |
43 | @pytest.fixture
44 | def http_path_workspace():
45 | return HTTP_PATH_WORKSPACE
46 |
47 |
48 | @pytest.fixture
49 | def http_path_sql_analytics():
50 | return HTTP_PATH_SQL_ANALYTICS
51 |
52 |
53 | @pytest.fixture
54 | def odbc_driver_path():
55 | return ODBC_DRIVER_PATH
56 |
--------------------------------------------------------------------------------
/tests/test_hive.py:
--------------------------------------------------------------------------------
1 | from databricks_dbapi import hive
2 |
3 |
4 | def test_workspace(host, http_path_workspace, token_workspace):
5 | connection = hive.connect(host=host, http_path=http_path_workspace, token=token_workspace)
6 | cursor = connection.cursor()
7 | print(cursor)
8 |
--------------------------------------------------------------------------------
/tests/test_odbc.py:
--------------------------------------------------------------------------------
1 | from databricks_dbapi import odbc
2 |
3 |
4 | def test_workspace(host, http_path_workspace, token_workspace, odbc_driver_path):
5 | connection = odbc.connect(
6 | host=host, http_path=http_path_workspace, token=token_workspace, driver_path=odbc_driver_path
7 | )
8 | cursor = connection.cursor()
9 | print(cursor)
10 |
11 |
12 | def test_sql_analytics(host, http_path_sql_analytics, token_sql_analytics, odbc_driver_path):
13 | connection = odbc.connect(
14 | host=host, http_path=http_path_sql_analytics, token=token_sql_analytics, driver_path=odbc_driver_path
15 | )
16 | cursor = connection.cursor()
17 | print(cursor)
18 |
--------------------------------------------------------------------------------
/tests/test_sqlalchemy_dialects/test_hive_dialect.py:
--------------------------------------------------------------------------------
1 | from sqlalchemy import create_engine
2 |
3 |
4 | def test_sqlalchemy_workspace(token_workspace, host, http_path_workspace):
5 | engine = create_engine(
6 | f"databricks+pyhive://token:{token_workspace}@{host}:443/default",
7 | connect_args={"http_path": f"{http_path_workspace}"},
8 | )
9 | connection = engine.connect()
10 | print(connection)
11 |
--------------------------------------------------------------------------------
/tests/test_sqlalchemy_dialects/test_odbc_dialect.py:
--------------------------------------------------------------------------------
1 | from sqlalchemy import create_engine
2 |
3 |
4 | def test_sqlalchemy_workspace(token_workspace, host, http_path_workspace, odbc_driver_path):
5 | engine = create_engine(
6 | f"databricks+pyodbc://token:{token_workspace}@{host}:443/default",
7 | connect_args={"http_path": f"{http_path_workspace}", "driver_path": odbc_driver_path},
8 | )
9 | connection = engine.connect()
10 | print(connection)
11 |
12 |
13 | def test_sqlalchemy_sql_analytics(token_sql_analytics, host, http_path_sql_analytics, odbc_driver_path):
14 | engine = create_engine(
15 | f"databricks+pyodbc://token:{token_sql_analytics}@{host}:443/default",
16 | connect_args={"http_path": f"{http_path_sql_analytics}", "driver_path": odbc_driver_path},
17 | )
18 | connection = engine.connect()
19 | print(connection)
20 |
--------------------------------------------------------------------------------