├── .bumpversion.cfg
├── .gitattributes
├── .github
└── workflows
│ ├── publish_on_pypi.yml
│ └── test_with_tox.yml
├── .gitignore
├── LICENSE
├── MANIFEST.in
├── README.md
├── dbutils
├── __init__.py
├── persistent_db.py
├── persistent_pg.py
├── pooled_db.py
├── pooled_pg.py
├── simple_pooled_db.py
├── simple_pooled_pg.py
├── steady_db.py
└── steady_pg.py
├── docs
├── changelog.html
├── changelog.rst
├── dependencies_db.png
├── dependencies_pg.png
├── doc.css
├── docutils.css
├── main.de.html
├── main.de.rst
├── main.html
├── main.rst
├── make.py
├── persistent.png
└── pooled.png
├── pyproject.toml
├── tests
├── __init__.py
├── mock_db.py
├── mock_pg.py
├── test_persistent_db.py
├── test_persistent_pg.py
├── test_pooled_db.py
├── test_pooled_pg.py
├── test_simple_pooled_db.py
├── test_simple_pooled_pg.py
├── test_steady_db.py
├── test_steady_pg.py
└── test_threading_local.py
└── tox.ini
/.bumpversion.cfg:
--------------------------------------------------------------------------------
1 | [bumpversion]
2 | current_version = 3.1.0
3 |
4 | [bumpversion:file:pyproject.toml]
5 | search = version = "{current_version}"
6 | replace = version = "{new_version}"
7 |
8 | [bumpversion:file:dbutils/__init__.py]
9 | search = __version__ = '{current_version}'
10 | replace = __version__ = '{new_version}'
11 |
12 | [bumpversion:file:README.md]
13 | search = The current version {current_version}
14 | replace = The current version {new_version}
15 |
16 | [bumpversion:file:docs/main.rst]
17 | search = :Version: {current_version}
18 | search = :Version: {new_version}
19 |
20 | [bumpversion:file:docs/main.de.rst]
21 | search = :Version: {current_version}
22 | search = :Version: {new_version}
23 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | * text=auto eol=lf
2 |
3 | *.bat text eol=crlf
4 | *.config text eol=lf
5 | *.css text eol=lf
6 | *.html text eol=lf
7 | *.js text eol=lf
8 | *.prefs text
9 | *.py text eol=lf
10 | *.rst text eol=lf
11 | *.sh text eol=lf
12 | *.txt text eol=lf
13 | *.po text eol=lf
14 | *.pot text eol=lf
15 | *.styl text eol=lf
16 | *.xml text
17 |
18 | *.gif binary
19 | *.ico binary
20 | *.jpg binary
21 | *.lnk binary
22 | *.mo binary
23 | *.png binary
24 | *.exe binary
25 | *.so binary
26 | *.ppt binary
27 | *.pdf binary
28 | *.gz binary
29 | *.zip binary
30 |
--------------------------------------------------------------------------------
/.github/workflows/publish_on_pypi.yml:
--------------------------------------------------------------------------------
1 | name: Publish DBUtils on PyPI
2 |
3 | on:
4 | push:
5 | tags:
6 | - 'Release-*'
7 |
8 | jobs:
9 | publish:
10 | runs-on: ubuntu-latest
11 |
12 | steps:
13 | - uses: actions/checkout@v4
14 |
15 | - name: Set up Python
16 | uses: actions/setup-python@v5
17 | with:
18 | python-version: "3.12"
19 |
20 | - name: Install build tool
21 | run: python -m pip install build --user
22 |
23 | - name: Build source tarball and wheel
24 | run: python -m build
25 |
26 | - name: Publish distribution to PyPI
27 | uses: pypa/gh-action-pypi-publish@release/v1
28 | with:
29 | user: __token__
30 | password: ${{ secrets.PYPI_TOKEN }}
31 |
--------------------------------------------------------------------------------
/.github/workflows/test_with_tox.yml:
--------------------------------------------------------------------------------
1 | name: Test DBUtils using tox
2 |
3 | on: [push, pull_request]
4 |
5 | jobs:
6 | test:
7 | runs-on: ubuntu-latest
8 | strategy:
9 | matrix:
10 | python: ['3.9', '3.10', '3.11', '3.12', '3.13']
11 |
12 | steps:
13 | - uses: actions/checkout@v4
14 |
15 | - name: Setup Python ${{ matrix.python }}
16 | uses: actions/setup-python@v4
17 | with:
18 | python-version: ${{ matrix.python }}
19 |
20 | - run: pip install tox
21 |
22 | - run: tox -e py
23 |
24 | - if: matrix.python == 3.12
25 | run: TOXENV=ruff,manifest,docs,spell tox
26 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *~
2 | *.bak
3 | *.default
4 | *.egg-info
5 | *.log
6 | *.patch
7 | *.pid
8 | *.pstats
9 | *.pyc
10 | *.pyo
11 | *.swp
12 |
13 | build
14 | dist
15 | local
16 |
17 | .idea
18 | .tox
19 | .pytest_cache
20 |
21 | test.bat
22 |
23 | MANIFEST
24 |
25 | Thumbs.db
26 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2025 Christoph Zwerschke
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include MANIFEST.in
2 |
3 | include LICENSE
4 | include README.md
5 |
6 | include .bumpversion.cfg
7 | include pyproject.toml
8 | include tox.ini
9 |
10 | recursive-include tests *.py
11 |
12 | recursive-include docs *.rst make.py *.html *.css *.png
13 | prune docs/_build
14 |
15 | global-exclude *.py[co] __pycache__
16 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | DBUtils
2 | =======
3 |
4 | DBUtils is a suite of tools providing solid, persistent and pooled connections
5 | to a database that can be used in all kinds of multi-threaded environments.
6 |
7 | The suite supports DB-API 2 compliant database interfaces
8 | and the classic PyGreSQL interface.
9 |
10 | The current version 3.1.1 of DBUtils supports Python versions 3.7 to 3.13.
11 |
12 | **Please have a look at the [changelog](https://webwareforpython.github.io/DBUtils/changelog.html), because there were some breaking changes in version 2.0.**
13 |
14 | The DBUtils home page can be found at https://webwareforpython.github.io/DBUtils/
15 |
--------------------------------------------------------------------------------
/dbutils/__init__.py:
--------------------------------------------------------------------------------
1 | """The DBUtils main package."""
2 |
3 | __all__ = ["__version__"]
4 |
5 | __version__ = "3.1.1"
6 |
--------------------------------------------------------------------------------
/dbutils/persistent_db.py:
--------------------------------------------------------------------------------
1 | """PersistentDB - persistent DB-API 2 connections.
2 |
3 | Implements steady, thread-affine persistent connections to a database
4 | based on an arbitrary DB-API 2 compliant database interface module.
5 |
6 | This should result in a speedup for persistent applications such as the
7 | application server of "Webware for Python," without loss of robustness.
8 |
9 | Robustness is provided by using "hardened" SteadyDB connections.
10 | Even if the underlying database is restarted and all connections
11 | are lost, they will be automatically and transparently reopened.
12 | However, since you don't want this to happen in the middle of a database
13 | transaction, you must explicitly start transactions with the begin()
14 | method so that SteadyDB knows that the underlying connection shall not
15 | be replaced and errors passed on until the transaction is completed.
16 |
17 | Measures are taken to make the database connections thread-affine.
18 | This means the same thread always uses the same cached connection,
19 | and no other thread will use it. So even if the underlying DB-API module
20 | is not thread-safe at the connection level this will be no problem here.
21 |
22 | For best performance, the application server should keep threads persistent.
23 | For this, you have to set MinServerThreads = MaxServerThreads in Webware.
24 |
25 | For the Python DB-API 2 specification, see:
26 | https://www.python.org/dev/peps/pep-0249/
27 | For information on Webware for Python, see:
28 | https://webwareforpython.github.io/w4py/
29 |
30 |
31 | Usage:
32 |
33 | First you need to set up a generator for your kind of database connections
34 | by creating an instance of PersistentDB, passing the following parameters:
35 |
36 | creator: either an arbitrary function returning new DB-API 2
37 | connection objects or a DB-API 2 compliant database module
38 | maxusage: the maximum number of reuses of a single connection
39 | (the default of 0 or None means unlimited reuse)
40 | Whenever the limit is reached, the connection will be reset.
41 | setsession: an optional list of SQL commands that may serve to
42 | prepare the session, e.g. ["set datestyle to german", ...].
43 | failures: an optional exception class or a tuple of exception classes
44 | for which the connection failover mechanism shall be applied,
45 | if the default (OperationalError, InterfaceError, InternalError)
46 | is not adequate for the used database module
47 | ping: an optional flag controlling when connections are checked
48 | with the ping() method if such a method is available
49 | (0 = None = never, 1 = default = whenever it is requested,
50 | 2 = when a cursor is created, 4 = when a query is executed,
51 | 7 = always, and all other bit combinations of these values)
52 | closeable: if this is set to true, then closing connections will
53 | be allowed, but by default this will be silently ignored
54 | threadlocal: an optional class for representing thread-local data
55 | that will be used instead of our Python implementation
56 | (threading.local is faster, but cannot be used in all cases)
57 |
58 | The creator function or the connect function of the DB-API 2 compliant
59 | database module specified as the creator will receive any additional
60 | parameters such as the host, database, user, password etc. You may
61 | choose some or all of these parameters in your own creator function,
62 | allowing for sophisticated failover and load-balancing mechanisms.
63 |
64 | For instance, if you are using pgdb as your DB-API 2 database module and want
65 | every connection to your local database 'mydb' to be reused 1000 times:
66 |
67 | import pgdb # import used DB-API 2 module
68 | from dbutils.persistent_db import PersistentDB
69 | persist = PersistentDB(pgdb, 1000, database='mydb')
70 |
71 | Once you have set up the generator with these parameters, you can
72 | request database connections of that kind:
73 |
74 | db = persist.connection()
75 |
76 | You can use these connections just as if they were ordinary
77 | DB-API 2 connections. Actually what you get is the hardened
78 | SteadyDB version of the underlying DB-API 2 connection.
79 |
80 | Closing a persistent connection with db.close() will be silently
81 | ignored since it would be reopened at the next usage anyway and
82 | contrary to the intent of having persistent connections. Instead,
83 | the connection will be automatically closed when the thread dies.
84 | You can change this behavior by setting the closeable parameter.
85 |
86 | Note that you need to explicitly start transactions by calling the
87 | begin() method. This ensures that the transparent reopening will be
88 | suspended until the end of the transaction, and that the connection
89 | will be rolled back before being reused by the same thread.
90 |
91 | By setting the threadlocal parameter to threading.local, getting
92 | connections may become a bit faster, but this may not work in all
93 | environments (for instance, mod_wsgi is known to cause problems
94 | since it clears the threading.local data between requests).
95 |
96 |
97 | Ideas for improvement:
98 |
99 | * Add a thread for monitoring, restarting (or closing) bad or expired
100 | connections (similar to DBConnectionPool/ResourcePool by Warren Smith).
101 | * Optionally log usage, bad connections and exceeding of limits.
102 |
103 |
104 | Copyright, credits and license:
105 |
106 | * Contributed as supplement for Webware for Python and PyGreSQL
107 | by Christoph Zwerschke in September 2005
108 | * Based on an idea presented on the Webware developer mailing list
109 | by Geoffrey Talvola in July 2005
110 |
111 | Licensed under the MIT license.
112 | """
113 |
114 | from . import __version__
115 | from .steady_db import connect
116 |
117 | try:
118 | # Prefer the pure Python version of threading.local.
119 | # The C implementation turned out to be problematic with mod_wsgi,
120 | # since it does not keep the thread-local data between requests.
121 | from _threading_local import local
122 | except ImportError:
123 | # Fall back to the default version of threading.local.
124 | from threading import local
125 |
126 | __all__ = ['PersistentDB', 'PersistentDBError', 'NotSupportedError']
127 |
128 |
129 | class PersistentDBError(Exception):
130 | """General PersistentDB error."""
131 |
132 |
133 | class NotSupportedError(PersistentDBError):
134 | """DB-API module not supported by PersistentDB."""
135 |
136 |
137 | class PersistentDB:
138 | """Generator for persistent DB-API 2 connections.
139 |
140 | After you have created the connection pool, you can use
141 | connection() to get thread-affine, steady DB-API 2 connections.
142 | """
143 |
144 | version = __version__
145 |
146 | def __init__(
147 | self, creator,
148 | maxusage=None, setsession=None, failures=None, ping=1,
149 | closeable=False, threadlocal=None, *args, **kwargs):
150 | """Set up the persistent DB-API 2 connection generator.
151 |
152 | creator: either an arbitrary function returning new DB-API 2
153 | connection objects or a DB-API 2 compliant database module
154 | maxusage: maximum number of reuses of a single connection
155 | (number of database operations, 0 or None means unlimited)
156 | Whenever the limit is reached, the connection will be reset.
157 | setsession: optional list of SQL commands that may serve to prepare
158 | the session, e.g. ["set datestyle to ...", "set time zone ..."]
159 | failures: an optional exception class or a tuple of exception classes
160 | for which the connection failover mechanism shall be applied,
161 | if the default (OperationalError, InterfaceError, InternalError)
162 | is not adequate for the used database module
163 | ping: determines when the connection should be checked with ping()
164 | (0 = None = never, 1 = default = whenever it is requested,
165 | 2 = when a cursor is created, 4 = when a query is executed,
166 | 7 = always, and all other bit combinations of these values)
167 | closeable: if this is set to true, then closing connections will
168 | be allowed, but by default this will be silently ignored
169 | threadlocal: an optional class for representing thread-local data
170 | that will be used instead of our Python implementation
171 | (threading.local is faster, but cannot be used in all cases)
172 | args, kwargs: the parameters that shall be passed to the creator
173 | function or the connection constructor of the DB-API 2 module
174 | """
175 | try:
176 | threadsafety = creator.threadsafety
177 | except AttributeError:
178 | try:
179 | threadsafety = creator.dbapi.threadsafety
180 | except AttributeError:
181 | try:
182 | if not callable(creator.connect):
183 | raise AttributeError
184 | except AttributeError:
185 | threadsafety = 1
186 | else:
187 | threadsafety = 0
188 | if not threadsafety:
189 | raise NotSupportedError("Database module is not thread-safe.")
190 | self._creator = creator
191 | self._maxusage = maxusage
192 | self._setsession = setsession
193 | self._failures = failures
194 | self._ping = ping
195 | self._closeable = closeable
196 | self._args, self._kwargs = args, kwargs
197 | self.thread = (threadlocal or local)()
198 |
199 | def steady_connection(self):
200 | """Get a steady, non-persistent DB-API 2 connection."""
201 | return connect(
202 | self._creator, self._maxusage, self._setsession,
203 | self._failures, self._ping, self._closeable,
204 | *self._args, **self._kwargs)
205 |
206 | def connection(self, shareable=False): # noqa: ARG002
207 | """Get a steady, persistent DB-API 2 connection.
208 |
209 | The shareable parameter exists only for compatibility with the
210 | PooledDB connection method. In reality, persistent connections
211 | are of course never shared with other threads.
212 | """
213 | try:
214 | con = self.thread.connection
215 | except AttributeError as error:
216 | con = self.steady_connection()
217 | if not con.threadsafety():
218 | raise NotSupportedError(
219 | "Database module is not thread-safe.") from error
220 | self.thread.connection = con
221 | con._ping_check()
222 | return con
223 |
224 | def dedicated_connection(self):
225 | """Alias for connection(shareable=False)."""
226 | return self.connection()
227 |
--------------------------------------------------------------------------------
/dbutils/persistent_pg.py:
--------------------------------------------------------------------------------
1 | """PersistentPg - persistent classic PyGreSQL connections.
2 |
3 | Implements steady, thread-affine persistent connections to a PostgreSQL
4 | database using the classic (not DB-API 2 compliant) PyGreSQL API.
5 |
6 | This should result in a speedup for persistent applications such as the
7 | application server of "Webware for Python," without loss of robustness.
8 |
9 | Robustness is provided by using "hardened" SteadyPg connections.
10 | Even if the underlying database is restarted and all connections
11 | are lost, they will be automatically and transparently reopened.
12 | However, since you don't want this to happen in the middle of a database
13 | transaction, you must explicitly start transactions with the begin()
14 | method so that SteadyPg knows that the underlying connection shall not
15 | be replaced and errors passed on until the transaction is completed.
16 |
17 | Measures are taken to make the database connections thread-affine.
18 | This means the same thread always uses the same cached connection,
19 | and no other thread will use it. So the fact that the classic PyGreSQL
20 | pg module is not thread-safe at the connection level is no problem here.
21 |
22 | For best performance, the application server should keep threads persistent.
23 | For this, you have to set MinServerThreads = MaxServerThreads in Webware.
24 |
25 | For more information on PostgreSQL, see:
26 | https://www.postgresql.org/
27 | For more information on PyGreSQL, see:
28 | http://www.pygresql.org
29 | For more information on Webware for Python, see:
30 | https://webwareforpython.github.io/w4py/
31 |
32 |
33 | Usage:
34 |
35 | First you need to set up a generator for your kind of database connections
36 | by creating an instance of PersistentPg, passing the following parameters:
37 |
38 | maxusage: the maximum number of reuses of a single connection
39 | (the default of 0 or None means unlimited reuse)
40 | When this maximum usage number of the connection is reached,
41 | the connection is automatically reset (closed and reopened).
42 | setsession: An optional list of SQL commands that may serve to
43 | prepare the session, e.g. ["set datestyle to german", ...]
44 | closeable: if this is set to true, then closing connections will
45 | be allowed, but by default this will be silently ignored
46 | threadlocal: an optional class for representing thread-local data
47 | that will be used instead of our Python implementation
48 | (threading.local is faster, but cannot be used in all cases)
49 |
50 | Additionally, you have to pass the parameters for the actual
51 | PostgreSQL connection which are passed via PyGreSQL,
52 | such as the names of the host, database, user, password etc.
53 |
54 | For instance, if you want every connection to your local database 'mydb'
55 | to be reused 1000 times:
56 |
57 | from dbutils.persistent_pg import PersistentPg
58 | persist = PersistentPg(5, dbname='mydb')
59 |
60 | Once you have set up the generator with these parameters, you can
61 | request database connections of that kind:
62 |
63 | db = persist.connection()
64 |
65 | You can use these connections just as if they were ordinary
66 | classic PyGreSQL API connections. Actually what you get is the
67 | hardened SteadyPg version of a classic PyGreSQL connection.
68 |
69 | Closing a persistent connection with db.close() will be silently
70 | ignored since it would be reopened at the next usage anyway and
71 | contrary to the intent of having persistent connections. Instead,
72 | the connection will be automatically closed when the thread dies.
73 | You can change this behavior by setting the closeable parameter.
74 |
75 | Note that you need to explicitly start transactions by calling the
76 | begin() method. This ensures that the transparent reopening will be
77 | suspended until the end of the transaction, and that the connection
78 | will be rolled back before being reused in the same thread. To end
79 | transactions, use one of the end(), commit() or rollback() methods.
80 |
81 | By setting the threadlocal parameter to threading.local, getting
82 | connections may become a bit faster, but this may not work in all
83 | environments (for instance, mod_wsgi is known to cause problems
84 | since it clears the threading.local data between requests).
85 |
86 |
87 | Ideas for improvement:
88 |
89 | * Add a thread for monitoring, restarting (or closing) bad or expired
90 | connections (similar to DBConnectionPool/ResourcePool by Warren Smith).
91 | * Optionally log usage, bad connections and exceeding of limits.
92 |
93 |
94 | Copyright, credits and license:
95 |
96 | * Contributed as supplement for Webware for Python and PyGreSQL
97 | by Christoph Zwerschke in September 2005
98 | * Based on an idea presented on the Webware developer mailing list
99 | by Geoffrey Talvola in July 2005
100 |
101 | Licensed under the MIT license.
102 | """
103 |
104 | from . import __version__
105 | from .steady_pg import SteadyPgConnection
106 |
107 | try:
108 | # Prefer the pure Python version of threading.local.
109 | # The C implementation turned out to be problematic with mod_wsgi,
110 | # since it does not keep the thread-local data between requests.
111 | from _threading_local import local
112 | except ImportError:
113 | # Fall back to the default version of threading.local.
114 | from threading import local
115 |
116 | __all__ = ['PersistentPg']
117 |
118 |
119 | class PersistentPg:
120 | """Generator for persistent classic PyGreSQL connections.
121 |
122 | After you have created the connection pool, you can use
123 | connection() to get thread-affine, steady PostgreSQL connections.
124 | """
125 |
126 | version = __version__
127 |
128 | def __init__(
129 | self, maxusage=None, setsession=None,
130 | closeable=False, threadlocal=None, *args, **kwargs):
131 | """Set up the persistent PostgreSQL connection generator.
132 |
133 | maxusage: maximum number of reuses of a single connection
134 | (0 or None means unlimited reuse)
135 | When this maximum usage number of the connection is reached,
136 | the connection is automatically reset (closed and reopened).
137 | setsession: optional list of SQL commands that may serve to prepare
138 | the session, e.g. ["set datestyle to ...", "set time zone ..."]
139 | closeable: if this is set to true, then closing connections will
140 | be allowed, but by default this will be silently ignored
141 | threadlocal: an optional class for representing thread-local data
142 | that will be used instead of our Python implementation
143 | (threading.local is faster, but cannot be used in all cases)
144 | args, kwargs: the parameters that shall be used to establish
145 | the PostgreSQL connections using class PyGreSQL pg.DB()
146 | """
147 | self._maxusage = maxusage
148 | self._setsession = setsession
149 | self._closeable = closeable
150 | self._args, self._kwargs = args, kwargs
151 | self.thread = (threadlocal or local)()
152 |
153 | def steady_connection(self):
154 | """Get a steady, non-persistent PyGreSQL connection."""
155 | return SteadyPgConnection(
156 | self._maxusage, self._setsession, self._closeable,
157 | *self._args, **self._kwargs)
158 |
159 | def connection(self):
160 | """Get a steady, persistent PyGreSQL connection."""
161 | try:
162 | con = self.thread.connection
163 | except AttributeError:
164 | con = self.steady_connection()
165 | self.thread.connection = con
166 | return con
167 |
--------------------------------------------------------------------------------
/dbutils/pooled_db.py:
--------------------------------------------------------------------------------
1 | """PooledDB - pooling for DB-API 2 connections.
2 |
3 | Implements a pool of steady, thread-safe cached connections
4 | to a database which are transparently reused,
5 | using an arbitrary DB-API 2 compliant database interface module.
6 |
7 | This should result in a speedup for persistent applications such as the
8 | application server of "Webware for Python," without loss of robustness.
9 |
10 | Robustness is provided by using "hardened" SteadyDB connections.
11 | Even if the underlying database is restarted and all connections
12 | are lost, they will be automatically and transparently reopened.
13 | However, since you don't want this to happen in the middle of a database
14 | transaction, you must explicitly start transactions with the begin()
15 | method so that SteadyDB knows that the underlying connection shall not
16 | be replaced and errors passed on until the transaction is completed.
17 |
18 | Measures are taken to make the pool of connections thread-safe.
19 | If the underlying DB-API module is thread-safe at the connection level,
20 | the requested connections may be shared with other threads by default,
21 | but you can also request dedicated connections in case you need them.
22 |
23 | For the Python DB-API 2 specification, see:
24 | https://www.python.org/dev/peps/pep-0249/
25 | For information on Webware for Python, see:
26 | https://webwareforpython.github.io/w4py/
27 |
28 |
29 | Usage:
30 |
31 | First you need to set up the database connection pool by creating
32 | an instance of PooledDB, passing the following parameters:
33 |
34 | creator: either an arbitrary function returning new DB-API 2
35 | connection objects or a DB-API 2 compliant database module
36 | mincached: the initial number of idle connections in the pool
37 | (the default of 0 means no connections are made at startup)
38 | maxcached: the maximum number of idle connections in the pool
39 | (the default value of 0 or None means unlimited pool size)
40 | maxshared: maximum number of shared connections allowed
41 | (the default value of 0 or None means all connections are dedicated)
42 | When this maximum number is reached, connections are
43 | shared if they have been requested as shareable.
44 | maxconnections: maximum number of connections generally allowed
45 | (the default value of 0 or None means any number of connections)
46 | blocking: determines behavior when exceeding the maximum
47 | (if this is set to true, block and wait until the number of
48 | connections decreases, but by default an error will be reported)
49 | maxusage: maximum number of reuses of a single connection
50 | (the default of 0 or None means unlimited reuse)
51 | When this maximum usage number of the connection is reached,
52 | the connection is automatically reset (closed and reopened).
53 | setsession: an optional list of SQL commands that may serve to
54 | prepare the session, e.g. ["set datestyle to german", ...]
55 | reset: how connections should be reset when returned to the pool
56 | (False or None to rollback transactions started with begin(),
57 | the default value True always issues a rollback for safety's sake)
58 | failures: an optional exception class or a tuple of exception classes
59 | for which the connection failover mechanism shall be applied,
60 | if the default (OperationalError, InterfaceError, InternalError)
61 | is not adequate for the used database module
62 | ping: an optional flag controlling when connections are checked
63 | with the ping() method if such a method is available
64 | (0 = None = never, 1 = default = whenever fetched from the pool,
65 | 2 = when a cursor is created, 4 = when a query is executed,
66 | 7 = always, and all other bit combinations of these values)
67 |
68 | The creator function or the connect function of the DB-API 2 compliant
69 | database module specified as the creator will receive any additional
70 | parameters such as the host, database, user, password etc. You may
71 | choose some or all of these parameters in your own creator function,
72 | allowing for sophisticated failover and load-balancing mechanisms.
73 |
74 | For instance, if you are using pgdb as your DB-API 2 database module and
75 | want a pool of at least five connections to your local database 'mydb':
76 |
77 | import pgdb # import used DB-API 2 module
78 | from dbutils.pooled_db import PooledDB
79 | pool = PooledDB(pgdb, 5, database='mydb')
80 |
81 | Once you have set up the connection pool you can request
82 | database connections from that pool:
83 |
84 | db = pool.connection()
85 |
86 | You can use these connections just as if they were ordinary
87 | DB-API 2 connections. Actually what you get is the hardened
88 | SteadyDB version of the underlying DB-API 2 connection.
89 |
90 | Please note that the connection may be shared with other threads
91 | by default if you set a non-zero maxshared parameter and the DB-API 2
92 | module allows this. If you want to have a dedicated connection, use:
93 |
94 | db = pool.connection(shareable=False)
95 |
96 | You can also use this to get a dedicated connection:
97 |
98 | db = pool.dedicated_connection()
99 |
100 | If you don't need it anymore, you should immediately return it to the
101 | pool with db.close(). You can get another connection in the same way.
102 |
103 | Warning: In a threaded environment, never do the following:
104 |
105 | pool.connection().cursor().execute(...)
106 |
107 | This would release the connection too early for reuse which may be
108 | fatal if the connections are not thread-safe. Make sure that the
109 | connection object stays alive as long as you are using it, like that:
110 |
111 | db = pool.connection()
112 | cur = db.cursor()
113 | cur.execute(...)
114 | res = cur.fetchone()
115 | cur.close() # or del cur
116 | db.close() # or del db
117 |
118 | You can also use context managers for simpler code:
119 |
120 | with pool.connection() as db:
121 | with db.cursor as cur:
122 | cur.execute(...)
123 | res = cur.fetchone()
124 |
125 | Note that you need to explicitly start transactions by calling the
126 | begin() method. This ensures that the connection will not be shared
127 | with other threads, that the transparent reopening will be suspended
128 | until the end of the transaction, and that the connection will be rolled
129 | back before being given back to the connection pool.
130 |
131 |
132 | Ideas for improvement:
133 |
134 | * Add a thread for monitoring, restarting (or closing) bad or expired
135 | connections (similar to DBConnectionPool/ResourcePool by Warren Smith).
136 | * Optionally log usage, bad connections and exceeding of limits.
137 |
138 |
139 | Copyright, credits and license:
140 |
141 | * Contributed as supplement for Webware for Python and PyGreSQL
142 | by Christoph Zwerschke in September 2005
143 | * Based on the code of DBPool, contributed to Webware for Python
144 | by Dan Green in December 2000
145 |
146 | Licensed under the MIT license.
147 | """
148 |
149 | from contextlib import suppress
150 | from functools import total_ordering
151 | from threading import Condition
152 |
153 | from . import __version__
154 | from .steady_db import connect
155 |
156 | __all__ = [
157 | 'PooledDB', 'PooledDedicatedDBConnection',
158 | 'SharedDBConnection', 'PooledSharedDBConnection',
159 | 'PooledDBError', 'InvalidConnectionError',
160 | 'NotSupportedError', 'TooManyConnectionsError',
161 | ]
162 |
163 |
164 | class PooledDBError(Exception):
165 | """General PooledDB error."""
166 |
167 |
168 | class InvalidConnectionError(PooledDBError):
169 | """Database connection is invalid."""
170 |
171 |
172 | class NotSupportedError(PooledDBError):
173 | """DB-API module not supported by PooledDB."""
174 |
175 |
176 | class TooManyConnectionsError(PooledDBError):
177 | """Too many database connections were opened."""
178 |
179 |
180 | # deprecated alias names for error classes
181 | InvalidConnection = InvalidConnectionError
182 | TooManyConnections = TooManyConnectionsError
183 |
184 |
185 | class PooledDB:
186 | """Pool for DB-API 2 connections.
187 |
188 | After you have created the connection pool, you can use
189 | connection() to get pooled, steady DB-API 2 connections.
190 | """
191 |
192 | version = __version__
193 |
194 | def __init__(
195 | self, creator, mincached=0, maxcached=0,
196 | maxshared=0, maxconnections=0, blocking=False,
197 | maxusage=None, setsession=None, reset=True,
198 | failures=None, ping=1,
199 | *args, **kwargs):
200 | """Set up the DB-API 2 connection pool.
201 |
202 | creator: either an arbitrary function returning new DB-API 2
203 | connection objects or a DB-API 2 compliant database module
204 | mincached: initial number of idle connections in the pool
205 | (0 means no connections are made at startup)
206 | maxcached: maximum number of idle connections in the pool
207 | (0 or None means unlimited pool size)
208 | maxshared: maximum number of shared connections
209 | (0 or None means all connections are dedicated)
210 | When this maximum number is reached, connections are
211 | shared if they have been requested as shareable.
212 | maxconnections: maximum number of connections generally allowed
213 | (0 or None means an arbitrary number of connections)
214 | blocking: determines behavior when exceeding the maximum
215 | (if this is set to true, block and wait until the number of
216 | connections decreases, otherwise an error will be reported)
217 | maxusage: maximum number of reuses of a single connection
218 | (0 or None means unlimited reuse)
219 | When this maximum usage number of the connection is reached,
220 | the connection is automatically reset (closed and reopened).
221 | setsession: optional list of SQL commands that may serve to prepare
222 | the session, e.g. ["set datestyle to ...", "set time zone ..."]
223 | reset: how connections should be reset when returned to the pool
224 | (False or None to rollback transactions started with begin(),
225 | True to always issue a rollback for safety's sake)
226 | failures: an optional exception class or a tuple of exception classes
227 | for which the connection failover mechanism shall be applied,
228 | if the default (OperationalError, InterfaceError, InternalError)
229 | is not adequate for the used database module
230 | ping: determines when the connection should be checked with ping()
231 | (0 = None = never, 1 = default = whenever fetched from the pool,
232 | 2 = when a cursor is created, 4 = when a query is executed,
233 | 7 = always, and all other bit combinations of these values)
234 | args, kwargs: the parameters that shall be passed to the creator
235 | function or the connection constructor of the DB-API 2 module
236 | """
237 | try:
238 | threadsafety = creator.threadsafety
239 | except AttributeError:
240 | try:
241 | threadsafety = creator.dbapi.threadsafety
242 | except AttributeError:
243 | try:
244 | if not callable(creator.connect):
245 | raise AttributeError
246 | except AttributeError:
247 | threadsafety = 1
248 | else:
249 | threadsafety = 0
250 | if not threadsafety:
251 | raise NotSupportedError("Database module is not thread-safe.")
252 | self._creator = creator
253 | self._args, self._kwargs = args, kwargs
254 | self._blocking = blocking
255 | self._maxusage = maxusage
256 | self._setsession = setsession
257 | self._reset = reset
258 | self._failures = failures
259 | self._ping = ping
260 | if mincached is None:
261 | mincached = 0
262 | if maxcached is None:
263 | maxcached = 0
264 | if maxconnections is None:
265 | maxconnections = 0
266 | if maxcached:
267 | maxcached = max(maxcached, mincached)
268 | self._maxcached = maxcached
269 | else:
270 | self._maxcached = 0
271 | if threadsafety > 1 and maxshared:
272 | self._maxshared = maxshared
273 | self._shared_cache = [] # the cache for shared connections
274 | else:
275 | self._maxshared = 0
276 | if maxconnections:
277 | maxconnections = max(maxconnections, maxcached)
278 | maxconnections = max(maxconnections, maxshared)
279 | self._maxconnections = maxconnections
280 | else:
281 | self._maxconnections = 0
282 | self._idle_cache = [] # the actual pool of idle connections
283 | self._lock = Condition()
284 | self._connections = 0
285 | # Establish an initial number of idle database connections:
286 | idle = [self.dedicated_connection() for i in range(mincached)]
287 | while idle:
288 | idle.pop().close()
289 |
290 | def steady_connection(self):
291 | """Get a steady, unpooled DB-API 2 connection."""
292 | return connect(
293 | self._creator, self._maxusage, self._setsession,
294 | self._failures, self._ping, True, *self._args, **self._kwargs)
295 |
296 | def connection(self, shareable=True):
297 | """Get a steady, cached DB-API 2 connection from the pool.
298 |
299 | If shareable is set and the underlying DB-API 2 allows it,
300 | then the connection may be shared with other threads.
301 | """
302 | if shareable and self._maxshared:
303 | with self._lock:
304 | while (not self._shared_cache and self._maxconnections
305 | and self._connections >= self._maxconnections):
306 | self._wait_lock()
307 | if len(self._shared_cache) < self._maxshared:
308 | # shared cache is not full, get a dedicated connection
309 | try: # first try to get it from the idle cache
310 | con = self._idle_cache.pop(0)
311 | except IndexError: # else get a fresh connection
312 | con = self.steady_connection()
313 | else:
314 | con._ping_check() # check this connection
315 | con = SharedDBConnection(con)
316 | self._connections += 1
317 | else: # shared cache full or no more connections allowed
318 | self._shared_cache.sort() # least shared connection first
319 | con = self._shared_cache.pop(0) # get it
320 | while con.con._transaction:
321 | # do not share connections which are in a transaction
322 | self._shared_cache.insert(0, con)
323 | self._wait_lock()
324 | self._shared_cache.sort()
325 | con = self._shared_cache.pop(0)
326 | con.con._ping_check() # check the underlying connection
327 | con.share() # increase share of this connection
328 | # put the connection (back) into the shared cache
329 | self._shared_cache.append(con)
330 | self._lock.notify()
331 | con = PooledSharedDBConnection(self, con)
332 | else: # try to get a dedicated connection
333 | with self._lock:
334 | while (self._maxconnections
335 | and self._connections >= self._maxconnections):
336 | self._wait_lock()
337 | # connection limit not reached, get a dedicated connection
338 | try: # first try to get it from the idle cache
339 | con = self._idle_cache.pop(0)
340 | except IndexError: # else get a fresh connection
341 | con = self.steady_connection()
342 | else:
343 | con._ping_check() # check connection
344 | con = PooledDedicatedDBConnection(self, con)
345 | self._connections += 1
346 | return con
347 |
348 | def dedicated_connection(self):
349 | """Alias for connection(shareable=False)."""
350 | return self.connection(False)
351 |
352 | def unshare(self, con):
353 | """Decrease the share of a connection in the shared cache."""
354 | with self._lock:
355 | con.unshare()
356 | shared = con.shared
357 | if not shared: # connection is idle
358 | # try to remove it from shared cache
359 | with suppress(ValueError): # if pool has already been closed
360 | self._shared_cache.remove(con)
361 | if not shared: # connection has become idle,
362 | self.cache(con.con) # so add it to the idle cache
363 |
364 | def cache(self, con):
365 | """Put a dedicated connection back into the idle cache."""
366 | with self._lock:
367 | if not self._maxcached or len(self._idle_cache) < self._maxcached:
368 | con._reset(force=self._reset) # rollback possible transaction
369 | # the idle cache is not full, so put it there
370 | self._idle_cache.append(con) # append it to the idle cache
371 | else: # if the idle cache is already full,
372 | con.close() # then close the connection
373 | self._connections -= 1
374 | self._lock.notify()
375 |
376 | def close(self):
377 | """Close all connections in the pool."""
378 | with self._lock:
379 | while self._idle_cache: # close all idle connections
380 | con = self._idle_cache.pop(0)
381 | with suppress(Exception):
382 | con.close()
383 | if self._maxshared: # close all shared connections
384 | while self._shared_cache:
385 | con = self._shared_cache.pop(0).con
386 | with suppress(Exception):
387 | con.close()
388 | self._connections -= 1
389 | self._lock.notify_all()
390 |
391 | def __del__(self):
392 | """Delete the pool."""
393 | # builtins (including Exceptions) might not exist anymore
394 | try: # noqa: SIM105
395 | self.close()
396 | except: # noqa: E722, S110
397 | pass
398 |
399 | def _wait_lock(self):
400 | """Wait until notified or report an error."""
401 | if not self._blocking:
402 | raise TooManyConnectionsError
403 | self._lock.wait()
404 |
405 |
406 | # Auxiliary classes for pooled connections
407 |
408 | class PooledDedicatedDBConnection:
409 | """Auxiliary proxy class for pooled dedicated connections."""
410 |
411 | def __init__(self, pool, con):
412 | """Create a pooled dedicated connection.
413 |
414 | pool: the corresponding PooledDB instance
415 | con: the underlying SteadyDB connection
416 | """
417 | # basic initialization to make finalizer work
418 | self._con = None
419 | # proper initialization of the connection
420 | if not con.threadsafety():
421 | raise NotSupportedError("Database module is not thread-safe.")
422 | self._pool = pool
423 | self._con = con
424 |
425 | def close(self):
426 | """Close the pooled dedicated connection."""
427 | # Instead of actually closing the connection,
428 | # return it to the pool for future reuse.
429 | if self._con:
430 | self._pool.cache(self._con)
431 | self._con = None
432 |
433 | def __getattr__(self, name):
434 | """Proxy all members of the class."""
435 | if self._con:
436 | return getattr(self._con, name)
437 | raise InvalidConnectionError
438 |
439 | def __del__(self):
440 | """Delete the pooled connection."""
441 | # builtins (including Exceptions) might not exist anymore
442 | try: # noqa: SIM105
443 | self.close()
444 | except: # noqa: E722, S110
445 | pass
446 |
447 | def __enter__(self):
448 | """Enter a runtime context for the connection."""
449 | return self
450 |
451 | def __exit__(self, *exc):
452 | """Exit a runtime context for the connection."""
453 | self.close()
454 |
455 |
456 | @total_ordering
457 | class SharedDBConnection:
458 | """Auxiliary class for shared connections."""
459 |
460 | def __init__(self, con):
461 | """Create a shared connection.
462 |
463 | con: the underlying SteadyDB connection
464 | """
465 | self.con = con
466 | self.shared = 1
467 |
468 | def __lt__(self, other):
469 | """Check whether this connection should come before the other one."""
470 | if self.con._transaction == other.con._transaction:
471 | return self.shared < other.shared
472 | return not self.con._transaction
473 |
474 | def __eq__(self, other):
475 | """Check whether this connection is the same as the other one."""
476 | return (self.con._transaction == other.con._transaction
477 | and self.shared == other.shared)
478 |
479 | def share(self):
480 | """Increase the share of this connection."""
481 | self.shared += 1
482 |
483 | def unshare(self):
484 | """Decrease the share of this connection."""
485 | self.shared -= 1
486 |
487 |
488 | class PooledSharedDBConnection:
489 | """Auxiliary proxy class for pooled shared connections."""
490 |
491 | def __init__(self, pool, shared_con):
492 | """Create a pooled shared connection.
493 |
494 | pool: the corresponding PooledDB instance
495 | con: the underlying SharedDBConnection
496 | """
497 | # basic initialization to make finalizer work
498 | self._con = None
499 | # proper initialization of the connection
500 | con = shared_con.con
501 | if not con.threadsafety() > 1:
502 | raise NotSupportedError("Database connection is not thread-safe.")
503 | self._pool = pool
504 | self._shared_con = shared_con
505 | self._con = con
506 |
507 | def close(self):
508 | """Close the pooled shared connection."""
509 | # Instead of actually closing the connection,
510 | # unshare it and/or return it to the pool.
511 | if self._con:
512 | self._pool.unshare(self._shared_con)
513 | self._shared_con = self._con = None
514 |
515 | def __getattr__(self, name):
516 | """Proxy all members of the class."""
517 | if self._con:
518 | return getattr(self._con, name)
519 | raise InvalidConnectionError
520 |
521 | def __del__(self):
522 | """Delete the pooled connection."""
523 | # builtins (including Exceptions) might not exist anymore
524 | try: # noqa: SIM105
525 | self.close()
526 | except: # noqa: E722, S110
527 | pass
528 |
529 | def __enter__(self):
530 | """Enter a runtime context for the connection."""
531 | return self
532 |
533 | def __exit__(self, *exc):
534 | """Exit a runtime context for the connection."""
535 | self.close()
536 |
--------------------------------------------------------------------------------
/dbutils/pooled_pg.py:
--------------------------------------------------------------------------------
1 | """PooledPg - pooling for classic PyGreSQL connections.
2 |
3 | Implements a pool of steady, thread-safe cached connections
4 | to a PostgreSQL database which are transparently reused,
5 | using the classic (not DB-API 2 compliant) PyGreSQL API.
6 |
7 | This should result in a speedup for persistent applications such as the
8 | application server of "Webware for Python," without loss of robustness.
9 |
10 | Robustness is provided by using "hardened" SteadyPg connections.
11 | Even if the underlying database is restarted and all connections
12 | are lost, they will be automatically and transparently reopened.
13 | However, since you don't want this to happen in the middle of a database
14 | transaction, you must explicitly start transactions with the begin()
15 | method so that SteadyPg knows that the underlying connection shall not
16 | be replaced and errors passed on until the transaction is completed.
17 |
18 | Measures are taken to make the pool of connections thread-safe
19 | regardless of the fact that the classic PyGreSQL pg module itself
20 | is not thread-safe at the connection level.
21 |
22 | For more information on PostgreSQL, see:
23 | https://www.postgresql.org/
24 | For more information on PyGreSQL, see:
25 | http://www.pygresql.org
26 | For more information on Webware for Python, see:
27 | https://webwareforpython.github.io/w4py/
28 |
29 |
30 | Usage:
31 |
32 | First you need to set up the database connection pool by creating
33 | an instance of PooledPg, passing the following parameters:
34 |
35 | mincached: the initial number of connections in the pool
36 | (the default of 0 means no connections are made at startup)
37 | maxcached: the maximum number of connections in the pool
38 | (the default value of 0 or None means unlimited pool size)
39 | maxconnections: maximum number of connections generally allowed
40 | (the default value of 0 or None means any number of connections)
41 | blocking: determines behavior when exceeding the maximum
42 | (if this is set to true, block and wait until the number of
43 | connections decreases, but by default an error will be reported)
44 | maxusage: maximum number of reuses of a single connection
45 | (the default of 0 or None means unlimited reuse)
46 | When this maximum usage number of the connection is reached,
47 | the connection is automatically reset (closed and reopened).
48 | setsession: an optional list of SQL commands that may serve to
49 | prepare the session, e.g. ["set datestyle to german", ...]
50 |
51 | Additionally, you have to pass the parameters for the actual
52 | PostgreSQL connection which are passed via PyGreSQL,
53 | such as the names of the host, database, user, password etc.
54 |
55 | For instance, if you want a pool of at least five connections
56 | to your local database 'mydb':
57 |
58 | from dbutils.pooled_pg import PooledPg
59 | pool = PooledPg(5, dbname='mydb')
60 |
61 | Once you have set up the connection pool you can request
62 | database connections from that pool:
63 |
64 | db = pool.connection()
65 |
66 | You can use these connections just as if they were ordinary
67 | classic PyGreSQL API connections. Actually what you get is a
68 | proxy class for the hardened SteadyPg version of the connection.
69 |
70 | The connection will not be shared with other threads. If you don't need
71 | it anymore, you should immediately return it to the pool with db.close().
72 | You can get another connection in the same way or with db.reopen().
73 |
74 | Warning: In a threaded environment, never do the following:
75 |
76 | res = pool.connection().query(...).getresult()
77 |
78 | This would release the connection too early for reuse which may be
79 | fatal because the connections are not thread-safe. Make sure that the
80 | connection object stays alive as long as you are using it, like that:
81 |
82 | db = pool.connection()
83 | res = db.query(...).getresult()
84 | db.close() # or del db
85 |
86 | You can also a context manager for simpler code:
87 |
88 | with pool.connection() as db:
89 | res = db.query(...).getresult()
90 |
91 | Note that you need to explicitly start transactions by calling the
92 | begin() method. This ensures that the transparent reopening will be
93 | suspended until the end of the transaction, and that the connection will
94 | be rolled back before being given back to the connection pool. To end
95 | transactions, use one of the end(), commit() or rollback() methods.
96 |
97 |
98 | Ideas for improvement:
99 |
100 | * Add a thread for monitoring, restarting (or closing) bad or expired
101 | connections (similar to DBConnectionPool/ResourcePool by Warren Smith).
102 | * Optionally log usage, bad connections and exceeding of limits.
103 |
104 |
105 | Copyright, credits and license:
106 |
107 | * Contributed as supplement for Webware for Python and PyGreSQL
108 | by Christoph Zwerschke in September 2005
109 | * Based on the code of DBPool, contributed to Webware for Python
110 | by Dan Green in December 2000
111 |
112 | Licensed under the MIT license.
113 | """
114 |
115 | from contextlib import suppress
116 | from queue import Empty, Full, Queue
117 |
118 | from . import __version__
119 | from .steady_pg import SteadyPgConnection
120 |
121 | __all__ = [
122 | 'PooledPg', 'PooledPgConnection',
123 | 'PooledPgError', 'InvalidConnectionError', 'TooManyConnectionsError',
124 | 'RESET_ALWAYS_ROLLBACK', 'RESET_COMPLETELY',
125 | ]
126 |
127 | # constants for "reset" parameter
128 | RESET_ALWAYS_ROLLBACK = 1
129 | RESET_COMPLETELY = 2
130 |
131 |
132 | class PooledPgError(Exception):
133 | """General PooledPg error."""
134 |
135 |
136 | class InvalidConnectionError(PooledPgError):
137 | """Database connection is invalid."""
138 |
139 |
140 | class TooManyConnectionsError(PooledPgError):
141 | """Too many database connections were opened."""
142 |
143 |
144 | # deprecated alias names for error classes
145 | InvalidConnection = InvalidConnectionError
146 | TooManyConnections = TooManyConnectionsError
147 |
148 |
149 | class PooledPg:
150 | """Pool for classic PyGreSQL connections.
151 |
152 | After you have created the connection pool, you can use
153 | connection() to get pooled, steady PostgreSQL connections.
154 | """
155 |
156 | version = __version__
157 |
158 | def __init__(
159 | self, mincached=0, maxcached=0,
160 | maxconnections=0, blocking=False,
161 | maxusage=None, setsession=None, reset=None,
162 | *args, **kwargs):
163 | """Set up the PostgreSQL connection pool.
164 |
165 | mincached: initial number of connections in the pool
166 | (0 means no connections are made at startup)
167 | maxcached: maximum number of connections in the pool
168 | (0 or None means unlimited pool size)
169 | maxconnections: maximum number of connections generally allowed
170 | (0 or None means an arbitrary number of connections)
171 | blocking: determines behavior when exceeding the maximum
172 | (if this is set to true, block and wait until the number of
173 | connections decreases, otherwise an error will be reported)
174 | maxusage: maximum number of reuses of a single connection
175 | (0 or None means unlimited reuse)
176 | When this maximum usage number of the connection is reached,
177 | the connection is automatically reset (closed and reopened).
178 | setsession: optional list of SQL commands that may serve to prepare
179 | the session, e.g. ["set datestyle to ...", "set time zone ..."]
180 | reset: how connections should be reset when returned to the pool
181 | (0 or None to rollback transactions started with begin(),
182 | 1 to always issue a rollback, 2 for a complete reset)
183 | args, kwargs: the parameters that shall be used to establish
184 | the PostgreSQL connections using class PyGreSQL pg.DB()
185 | """
186 | self._args, self._kwargs = args, kwargs
187 | self._maxusage = maxusage
188 | self._setsession = setsession
189 | self._reset = reset or 0
190 | if mincached is None:
191 | mincached = 0
192 | if maxcached is None:
193 | maxcached = 0
194 | if maxconnections is None:
195 | maxconnections = 0
196 | if maxcached and maxcached < mincached:
197 | maxcached = mincached
198 | if maxconnections:
199 | maxconnections = max(maxconnections, maxcached)
200 | # Create semaphore for number of allowed connections generally:
201 | from threading import Semaphore
202 | self._connections = Semaphore(maxconnections)
203 | self._blocking = blocking
204 | else:
205 | self._connections = None
206 | self._cache = Queue(maxcached) # the actual connection pool
207 | # Establish an initial number of database connections:
208 | idle = [self.connection() for i in range(mincached)]
209 | while idle:
210 | idle.pop().close()
211 |
212 | def steady_connection(self):
213 | """Get a steady, unpooled PostgreSQL connection."""
214 | return SteadyPgConnection(self._maxusage, self._setsession, True,
215 | *self._args, **self._kwargs)
216 |
217 | def connection(self):
218 | """Get a steady, cached PostgreSQL connection from the pool."""
219 | if self._connections and not self._connections.acquire(self._blocking):
220 | raise TooManyConnectionsError
221 | try:
222 | con = self._cache.get_nowait()
223 | except Empty:
224 | con = self.steady_connection()
225 | return PooledPgConnection(self, con)
226 |
227 | def cache(self, con):
228 | """Put a connection back into the pool cache."""
229 | try:
230 | if self._reset == RESET_COMPLETELY:
231 | con.reset() # reset the connection completely
232 | elif self._reset == RESET_ALWAYS_ROLLBACK or con._transaction:
233 | with suppress(Exception):
234 | con.rollback() # rollback a possible transaction
235 | self._cache.put_nowait(con) # and then put it back into the cache
236 | except Full:
237 | con.close()
238 | if self._connections:
239 | self._connections.release()
240 |
241 | def close(self):
242 | """Close all connections in the pool."""
243 | while 1:
244 | try:
245 | con = self._cache.get_nowait()
246 | with suppress(Exception):
247 | con.close()
248 | if self._connections:
249 | self._connections.release()
250 | except Empty:
251 | break
252 |
253 | def __del__(self):
254 | """Delete the pool."""
255 | # builtins (including Exceptions) might not exist anymore
256 | try: # noqa: SIM105
257 | self.close()
258 | except: # noqa: E722, S110
259 | pass
260 |
261 |
262 | # Auxiliary class for pooled connections
263 |
264 | class PooledPgConnection:
265 | """Proxy class for pooled PostgreSQL connections."""
266 |
267 | def __init__(self, pool, con):
268 | """Create a pooled DB-API 2 connection.
269 |
270 | pool: the corresponding PooledPg instance
271 | con: the underlying SteadyPg connection
272 | """
273 | self._pool = pool
274 | self._con = con
275 |
276 | def close(self):
277 | """Close the pooled connection."""
278 | # Instead of actually closing the connection,
279 | # return it to the pool so that it can be reused.
280 | if self._con:
281 | self._pool.cache(self._con)
282 | self._con = None
283 |
284 | def reopen(self):
285 | """Reopen the pooled connection."""
286 | # If the connection is already back in the pool,
287 | # get another connection from the pool,
288 | # otherwise reopen the underlying connection.
289 | if self._con:
290 | self._con.reopen()
291 | else:
292 | self._con = self._pool.connection()
293 |
294 | def __getattr__(self, name):
295 | """Proxy all members of the class."""
296 | if self._con:
297 | return getattr(self._con, name)
298 | raise InvalidConnectionError
299 |
300 | def __del__(self):
301 | """Delete the pooled connection."""
302 | # builtins (including Exceptions) might not exist anymore
303 | try: # noqa: SIM105
304 | self.close()
305 | except: # noqa: E722, S110
306 | pass
307 |
308 | def __enter__(self):
309 | """Enter a runtime context for the connection."""
310 | return self
311 |
312 | def __exit__(self, *exc):
313 | """Exit a runtime context for the connection."""
314 | self.close()
315 |
--------------------------------------------------------------------------------
/dbutils/simple_pooled_db.py:
--------------------------------------------------------------------------------
1 | """SimplePooledDB - a very simple DB-API 2 database connection pool.
2 |
3 | Implements a pool of threadsafe cached DB-API 2 connections
4 | to a database which are transparently reused.
5 |
6 | This should result in a speedup for persistent applications
7 | such as the "Webware for Python" AppServer.
8 |
9 | For more information on the DB-API 2, see:
10 | https://www.python.org/dev/peps/pep-0249/
11 | For more information on Webware for Python, see:
12 | https://webwareforpython.github.io/w4py/
13 |
14 | Measures are taken to make the pool of connections threadsafe
15 | regardless of whether the DB-API 2 module used is threadsafe
16 | on the connection level (threadsafety > 1) or not. It must only
17 | be threadsafe on the module level (threadsafety = 1). If the
18 | DB-API 2 module is threadsafe, the connections will be shared
19 | between threads (keep this in mind if you use transactions).
20 |
21 | Usage:
22 |
23 | The idea behind SimplePooledDB is that it's completely transparent.
24 | After you have established your connection pool, stating the
25 | DB-API 2 module to be used, the number of connections
26 | to be cached in the pool and the connection parameters, e.g.
27 |
28 | import pgdb # import used DB-API 2 module
29 | from dbutils.simple_pooled_db import PooledDB
30 | dbpool = PooledDB(pgdb, 5, host=..., database=..., user=..., ...)
31 |
32 | you can demand database connections from that pool,
33 |
34 | db = dbpool.connection()
35 |
36 | and use them just as if they were ordinary DB-API 2 connections.
37 | It's really just a proxy class.
38 |
39 | db.close() will return the connection to the pool, it will not
40 | actually close it. This is so your existing code works nicely.
41 |
42 | Ideas for improvement:
43 |
44 | * Do not create the maximum number of connections on startup
45 | already, but only a certain number and the rest on demand.
46 | * Detect and transparently reset "bad" connections.
47 | * Connections should have some sort of maximum usage limit
48 | after which they should be automatically closed and reopened.
49 | * Prefer or enforce thread-affinity for the connections,
50 | allowing for both shareable and non-shareable connections.
51 |
52 | Please note that these and other ideas have been already
53 | implemented in in PooledDB, a more sophisticated version
54 | of SimplePooledDB. You might also consider using PersistentDB
55 | instead for thread-affine persistent database connections.
56 | SimplePooledDB may still serve as a very simple reference
57 | and example implementation for developers.
58 |
59 |
60 | Copyright, credits and license:
61 |
62 | * Contributed as MiscUtils/DBPool for Webware for Python
63 | by Dan Green, December 2000
64 | * Thread safety bug found by Tom Schwaller
65 | * Fixes by Geoff Talvola (thread safety in _threadsafe_getConnection())
66 | * Clean up by Chuck Esterbrook
67 | * Fix unthreadsafe functions which were leaking, Jay Love
68 | * Eli Green's webware-discuss comments were lifted for additional docs
69 | * Clean-up and detailed commenting, rename and move to DBUtils
70 | by Christoph Zwerschke in September 2005
71 |
72 | Licensed under the MIT license.
73 | """
74 |
75 | from . import __version__
76 |
77 | __all__ = [
78 | 'PooledDB', 'PooledDBConnection', 'PooledDBError', 'NotSupportedError',
79 | ]
80 |
81 |
82 | class PooledDBError(Exception):
83 | """General PooledDB error."""
84 |
85 |
86 | class NotSupportedError(PooledDBError):
87 | """DB-API module not supported by PooledDB."""
88 |
89 |
90 | class PooledDBConnection:
91 | """A proxy class for pooled database connections.
92 |
93 | You don't normally deal with this class directly,
94 | but use PooledDB to get new connections.
95 | """
96 |
97 | def __init__(self, pool, con):
98 | """Initialize pooled connection."""
99 | self._con = con
100 | self._pool = pool
101 |
102 | def close(self):
103 | """Close the pooled connection."""
104 | # Instead of actually closing the connection,
105 | # return it to the pool so that it can be reused.
106 | if self._con is not None:
107 | self._pool.returnConnection(self._con)
108 | self._con = None
109 |
110 | def __getattr__(self, name):
111 | """Get the attribute with the given name."""
112 | # All other attributes are the same.
113 | return getattr(self._con, name)
114 |
115 | def __del__(self):
116 | """Delete the pooled connection."""
117 | self.close()
118 |
119 |
120 | class PooledDB:
121 | """A very simple database connection pool.
122 |
123 | After you have created the connection pool,
124 | you can get connections using getConnection().
125 | """
126 |
127 | version = __version__
128 |
129 | def __init__(self, dbapi, maxconnections, *args, **kwargs):
130 | """Set up the database connection pool.
131 |
132 | dbapi: the DB-API 2 compliant module you want to use
133 | maxconnections: the number of connections cached in the pool
134 | args, kwargs: the parameters that shall be used to establish
135 | the database connections using connect()
136 | """
137 | try:
138 | threadsafety = dbapi.threadsafety
139 | except Exception:
140 | threadsafety = None
141 | if threadsafety == 0:
142 | raise NotSupportedError(
143 | "Database module does not support any level of threading.")
144 | if threadsafety == 1:
145 | # If there is no connection level safety, build
146 | # the pool using the synchronized queue class
147 | # that implements all the required locking semantics.
148 | from queue import Queue
149 | self._queue = Queue(maxconnections) # create the queue
150 | self.connection = self._unthreadsafe_get_connection
151 | self.addConnection = self._unthreadsafe_add_connection
152 | self.returnConnection = self._unthreadsafe_return_connection
153 | elif threadsafety in (2, 3):
154 | # If there is connection level safety, implement the
155 | # pool with an ordinary list used as a circular buffer.
156 | # We only need a minimum of locking in this case.
157 | from threading import Lock
158 | self._lock = Lock() # create a lock object to be used later
159 | self._nextConnection = 0 # index of the next connection to be used
160 | self._connections = [] # the list of connections
161 | self.connection = self._threadsafe_get_connection
162 | self.addConnection = self._threadsafe_add_connection
163 | self.returnConnection = self._threadsafe_return_connection
164 | else:
165 | raise NotSupportedError(
166 | "Database module threading support cannot be determined.")
167 | # Establish all database connections (it would be better to
168 | # only establish a part of them now, and the rest on demand).
169 | for _i in range(maxconnections):
170 | self.addConnection(dbapi.connect(*args, **kwargs))
171 |
172 | # The following functions are used with DB-API 2 modules
173 | # that do not have connection level threadsafety, like PyGreSQL.
174 | # However, the module must be threadsafe at the module level.
175 | # Note: threadsafe/unthreadsafe refers to the DB-API 2 module,
176 | # not to this class which should be threadsafe in any case.
177 |
178 | def _unthreadsafe_get_connection(self):
179 | """Get a connection from the pool."""
180 | return PooledDBConnection(self, self._queue.get())
181 |
182 | def _unthreadsafe_add_connection(self, con):
183 | """Add a connection to the pool."""
184 | self._queue.put(con)
185 |
186 | def _unthreadsafe_return_connection(self, con):
187 | """Return a connection to the pool.
188 |
189 | In this case, the connections need to be put
190 | back into the queue after they have been used.
191 | This is done automatically when the connection is closed
192 | and should never be called explicitly outside of this module.
193 | """
194 | self._unthreadsafe_add_connection(con)
195 |
196 | # The following functions are used with DB-API 2 modules
197 | # that are threadsafe at the connection level, like psycopg.
198 | # Note: In this case, connections are shared between threads.
199 | # This may lead to problems if you use transactions.
200 |
201 | def _threadsafe_get_connection(self):
202 | """Get a connection from the pool."""
203 | with self._lock:
204 | next_con = self._nextConnection
205 | con = PooledDBConnection(self, self._connections[next_con])
206 | next_con += 1
207 | if next_con >= len(self._connections):
208 | next_con = 0
209 | self._nextConnection = next_con
210 | return con
211 |
212 | def _threadsafe_add_connection(self, con):
213 | """Add a connection to the pool."""
214 | self._connections.append(con)
215 |
216 | def _threadsafe_return_connection(self, con):
217 | """Return a connection to the pool.
218 |
219 | In this case, the connections always stay in the pool,
220 | so there is no need to do anything here.
221 | """
222 | # we don't need to do anything here
223 |
--------------------------------------------------------------------------------
/dbutils/simple_pooled_pg.py:
--------------------------------------------------------------------------------
1 | """SimplePooledPg - a very simple classic PyGreSQL connection pool.
2 |
3 | Implements a pool of threadsafe cached connections
4 | to a PostgreSQL database which are transparently reused,
5 | using the classic (not DB-API 2 compliant) PyGreSQL pg API.
6 |
7 | This should result in a speedup for persistent applications
8 | such as the "Webware for Python" AppServer.
9 |
10 | For more information on PostgreSQL, see:
11 | https://www.postgresql.org/
12 | For more information on PyGreSQL, see:
13 | http://www.pygresql.org
14 | For more information on Webware for Python, see:
15 | https://webwareforpython.github.io/w4py/
16 |
17 | Measures are taken to make the pool of connections threadsafe
18 | regardless of the fact that the PyGreSQL pg module itself is
19 | not threadsafe at the connection level. Connections will never be
20 | shared between threads, so you can safely use transactions.
21 |
22 | Usage:
23 |
24 | The idea behind SimplePooledPg is that it's completely transparent.
25 | After you have established your connection pool, stating the
26 | number of connections to be cached in the pool and the
27 | connection parameters, e.g.
28 |
29 | from dbutils.simple_pooled_pg import PooledPg
30 | dbpool = PooledPg(5, host=..., database=..., user=..., ...)
31 |
32 | you can demand database connections from that pool,
33 |
34 | db = dbpool.connection()
35 |
36 | and use them just as if they were ordinary PyGreSQL pg API
37 | connections. It's really just a proxy class.
38 |
39 | db.close() will return the connection to the pool, it will not
40 | actually close it. This is so your existing code works nicely.
41 |
42 | Ideas for improvement:
43 |
44 | * Do not create the maximum number of connections on startup
45 | already, but only a certain number and the rest on demand.
46 | * Detect and transparently reset "bad" connections. The PyGreSQL
47 | pg API provides a status attribute and a reset() method for that.
48 | * Connections should have some sort of "maximum usage limit"
49 | after which they should be automatically closed and reopened.
50 | * Prefer or enforce thread affinity for the connections.
51 |
52 | Please note that these and other ideas have been already
53 | implemented in in PooledPg, a more sophisticated version
54 | of SimplePooledPg. You might also consider using PersistentPg
55 | instead for thread-affine persistent PyGreSQL connections.
56 | SimplePooledPg may still serve as a very simple reference
57 | and example implementation for developers.
58 |
59 |
60 | Copyright, credits and license:
61 |
62 | * Contributed as supplement for Webware for Python and PyGreSQL
63 | by Christoph Zwerschke in September 2005
64 | * Based on the code of DBPool, contributed to Webware for Python
65 | by Dan Green in December 2000
66 |
67 | Licensed under the MIT license.
68 | """
69 |
70 | from pg import DB as PgConnection # noqa: N811
71 |
72 | from . import __version__
73 |
74 | __all__ = ['PooledPg', 'PooledPgConnection']
75 |
76 |
77 | class PooledPgConnection:
78 | """A proxy class for pooled PostgreSQL connections.
79 |
80 | You don't normally deal with this class directly,
81 | but use PooledPg to get new connections.
82 | """
83 |
84 | def __init__(self, pool, con):
85 | """Initialize pooled connection."""
86 | self._con = con
87 | self._pool = pool
88 |
89 | def close(self):
90 | """Close the pooled connection."""
91 | # Instead of actually closing the connection,
92 | # return it to the pool so that it can be reused.
93 | if self._con is not None:
94 | self._pool.cache(self._con)
95 | self._con = None
96 |
97 | def __getattr__(self, name):
98 | """Get the attribute with the given name."""
99 | # All other attributes are the same.
100 | return getattr(self._con, name)
101 |
102 | def __del__(self):
103 | """Delete the pooled connection."""
104 | self.close()
105 |
106 |
107 | class PooledPg:
108 | """A very simple PostgreSQL connection pool.
109 |
110 | After you have created the connection pool,
111 | you can get connections using getConnection().
112 | """
113 |
114 | version = __version__
115 |
116 | def __init__(self, maxconnections, *args, **kwargs):
117 | """Set up the PostgreSQL connection pool.
118 |
119 | maxconnections: the number of connections cached in the pool
120 | args, kwargs: the parameters that shall be used to establish
121 | the PostgreSQL connections using pg.connect()
122 | """
123 | # Since there is no connection level safety, we
124 | # build the pool using the synchronized queue class
125 | # that implements all the required locking semantics.
126 | from queue import Queue
127 | self._queue = Queue(maxconnections)
128 | # Establish all database connections (it would be better to
129 | # only establish a part of them now, and the rest on demand).
130 | for _i in range(maxconnections):
131 | self.cache(PgConnection(*args, **kwargs))
132 |
133 | def cache(self, con):
134 | """Add or return a connection to the pool."""
135 | self._queue.put(con)
136 |
137 | def connection(self):
138 | """Get a connection from the pool."""
139 | return PooledPgConnection(self, self._queue.get())
140 |
--------------------------------------------------------------------------------
/dbutils/steady_pg.py:
--------------------------------------------------------------------------------
1 | """SteadyPg - hardened classic PyGreSQL connections.
2 |
3 | Implements steady connections to a PostgreSQL database
4 | using the classic (not DB-API 2 compliant) PyGreSQL API.
5 |
6 | The connections are transparently reopened when they are
7 | closed or the database connection has been lost or when
8 | they are used more often than an optional usage limit.
9 | Only connections which have been marked as being in a database
10 | transaction with a begin() call will not be silently replaced.
11 |
12 | A typical situation where database connections are lost
13 | is when the database server or an intervening firewall is
14 | shutdown and restarted for maintenance reasons. In such a
15 | case, all database connections would become unusable, even
16 | though the database service may be already available again.
17 |
18 | The "hardened" connections provided by this module will
19 | make the database connections immediately available again.
20 |
21 | This results in a steady PostgreSQL connection that can be used
22 | by PooledPg or PersistentPg to create pooled or persistent
23 | connections to a PostgreSQL database in a threaded environment
24 | such as the application server of "Webware for Python."
25 | Note, however, that the connections themselves are not thread-safe.
26 |
27 | For more information on PostgreSQL, see:
28 | https://www.postgresql.org/
29 | For more information on PyGreSQL, see:
30 | http://www.pygresql.org
31 | For more information on Webware for Python, see:
32 | https://webwareforpython.github.io/w4py/
33 |
34 |
35 | Usage:
36 |
37 | You can use the class SteadyPgConnection in the same way as you
38 | would use the class DB from the classic PyGreSQL API module db.
39 | The only difference is that you may specify a usage limit as the
40 | first parameter when you open a connection (set it to None
41 | if you prefer unlimited usage), and an optional list of commands
42 | that may serve to prepare the session as the second parameter,
43 | and you can specify whether is is allowed to close the connection
44 | (by default this is true). When the connection to the PostgreSQL
45 | database is lost or has been used too often, it will be automatically
46 | reset, without further notice.
47 |
48 | from dbutils.steady_pg import SteadyPgConnection
49 | db = SteadyPgConnection(10000, ["set datestyle to german"],
50 | host=..., dbname=..., user=..., ...)
51 | ...
52 | result = db.query('...')
53 | ...
54 | db.close()
55 |
56 |
57 | Ideas for improvement:
58 |
59 | * Alternatively to the maximum number of uses,
60 | implement a maximum time to live for connections.
61 | * Optionally log usage and loss of connection.
62 |
63 |
64 | Copyright, credits and license:
65 |
66 | * Contributed as supplement for Webware for Python and PyGreSQL
67 | by Christoph Zwerschke in September 2005
68 |
69 | Licensed under the MIT license.
70 | """
71 |
72 | from contextlib import suppress
73 |
74 | from pg import DB as PgConnection # noqa: N811
75 |
76 | from . import __version__
77 |
78 |
79 | class SteadyPgError(Exception):
80 | """General SteadyPg error."""
81 |
82 |
83 | class InvalidConnectionError(SteadyPgError):
84 | """Database connection is invalid."""
85 |
86 |
87 | # deprecated alias names for error classes
88 | InvalidConnection = InvalidConnectionError
89 |
90 |
91 | class SteadyPgConnection:
92 | """Class representing steady connections to a PostgreSQL database.
93 |
94 | Underlying the connection is a classic PyGreSQL pg API database
95 | connection which is reset if the connection is lost or used too often.
96 | Thus the resulting connection is steadier ("tough and self-healing").
97 |
98 | If you want the connection to be persistent in a threaded environment,
99 | then you should not deal with this class directly, but use either the
100 | PooledPg module or the PersistentPg module to get the connections.
101 | """
102 |
103 | version = __version__
104 |
105 | def __init__(
106 | self, maxusage=None, setsession=None, closeable=True,
107 | *args, **kwargs):
108 | """Create a "tough" PostgreSQL connection.
109 |
110 | A hardened version of the DB wrapper class of PyGreSQL.
111 |
112 | maxusage: maximum usage limit for the underlying PyGreSQL connection
113 | (number of uses, 0 or None means unlimited usage)
114 | When this limit is reached, the connection is automatically reset.
115 | setsession: optional list of SQL commands that may serve to prepare
116 | the session, e.g. ["set datestyle to ...", "set time zone ..."]
117 | closeable: if this is set to false, then closing the connection will
118 | be silently ignored, but by default the connection can be closed
119 | args, kwargs: the parameters that shall be used to establish
120 | the PostgreSQL connections with PyGreSQL using pg.DB()
121 | """
122 | # basic initialization to make finalizer work
123 | self._con = None
124 | self._closed = True
125 | # proper initialization of the connection
126 | if maxusage is None:
127 | maxusage = 0
128 | if not isinstance(maxusage, int):
129 | raise TypeError("'maxusage' must be an integer value.")
130 | self._maxusage = maxusage
131 | self._setsession_sql = setsession
132 | self._closeable = closeable
133 | self._con = PgConnection(*args, **kwargs)
134 | self._transaction = False
135 | self._closed = False
136 | self._setsession()
137 | self._usage = 0
138 |
139 | def __enter__(self):
140 | """Enter the runtime context. This will start a transaction."""
141 | self.begin()
142 | return self
143 |
144 | def __exit__(self, *exc):
145 | """Exit the runtime context. This will end the transaction."""
146 | if exc[0] is None and exc[1] is None and exc[2] is None:
147 | self.commit()
148 | else:
149 | self.rollback()
150 |
151 | def _setsession(self):
152 | """Execute the SQL commands for session preparation."""
153 | if self._setsession_sql:
154 | for sql in self._setsession_sql:
155 | self._con.query(sql)
156 |
157 | def _close(self):
158 | """Close the tough connection.
159 |
160 | You can always close a tough connection with this method,
161 | and it will not complain if you close it more than once.
162 | """
163 | if not self._closed:
164 | with suppress(Exception):
165 | self._con.close()
166 | self._transaction = False
167 | self._closed = True
168 |
169 | def close(self):
170 | """Close the tough connection.
171 |
172 | You are allowed to close a tough connection by default,
173 | and it will not complain if you close it more than once.
174 |
175 | You can disallow closing connections by setting
176 | the closeable parameter to something false. In this case,
177 | closing tough connections will be silently ignored.
178 | """
179 | if self._closeable:
180 | self._close()
181 | elif self._transaction:
182 | self.reset()
183 |
184 | def reopen(self):
185 | """Reopen the tough connection.
186 |
187 | It will not complain if the connection cannot be reopened.
188 | """
189 | try:
190 | self._con.reopen()
191 | except Exception:
192 | if self._transaction:
193 | self._transaction = False
194 | with suppress(Exception):
195 | self._con.query('rollback')
196 | else:
197 | self._transaction = False
198 | self._closed = False
199 | self._setsession()
200 | self._usage = 0
201 |
202 | def reset(self):
203 | """Reset the tough connection.
204 |
205 | If a reset is not possible, tries to reopen the connection.
206 | It will not complain if the connection is already closed.
207 | """
208 | try:
209 | self._con.reset()
210 | self._transaction = False
211 | self._setsession()
212 | self._usage = 0
213 | except Exception:
214 | try:
215 | self.reopen()
216 | except Exception:
217 | with suppress(Exception):
218 | self.rollback()
219 |
220 | def begin(self, sql=None):
221 | """Begin a transaction."""
222 | self._transaction = True
223 | try:
224 | begin = self._con.begin
225 | except AttributeError:
226 | return self._con.query(sql or 'begin')
227 | else:
228 | # use existing method if available
229 | return begin(sql=sql) if sql else begin()
230 |
231 | def end(self, sql=None):
232 | """Commit the current transaction."""
233 | self._transaction = False
234 | try:
235 | end = self._con.end
236 | except AttributeError:
237 | return self._con.query(sql or 'end')
238 | else:
239 | return end(sql=sql) if sql else end()
240 |
241 | def commit(self, sql=None):
242 | """Commit the current transaction."""
243 | self._transaction = False
244 | try:
245 | commit = self._con.commit
246 | except AttributeError:
247 | return self._con.query(sql or 'commit')
248 | else:
249 | return commit(sql=sql) if sql else commit()
250 |
251 | def rollback(self, sql=None):
252 | """Rollback the current transaction."""
253 | self._transaction = False
254 | try:
255 | rollback = self._con.rollback
256 | except AttributeError:
257 | return self._con.query(sql or 'rollback')
258 | else:
259 | return rollback(sql=sql) if sql else rollback()
260 |
261 | def _get_tough_method(self, method):
262 | """Return a "tough" version of a connection class method.
263 |
264 | The tough version checks whether the connection is bad (lost)
265 | and automatically and transparently tries to reset the connection
266 | if this is the case (for instance, the database has been restarted).
267 | """
268 | def tough_method(*args, **kwargs):
269 | transaction = self._transaction
270 | if not transaction:
271 | try:
272 | # check whether connection status is bad
273 | # or the connection has been used too often
274 | if not self._con.db.status or (
275 | self._maxusage and self._usage >= self._maxusage):
276 | raise AttributeError
277 | except Exception:
278 | self.reset() # then reset the connection
279 | try:
280 | result = method(*args, **kwargs) # try connection method
281 | except Exception: # error in query
282 | if transaction: # inside a transaction
283 | self._transaction = False
284 | raise # propagate the error
285 | if self._con.db.status: # if it was not a connection problem
286 | raise # then propagate the error
287 | self.reset() # reset the connection
288 | result = method(*args, **kwargs) # and try one more time
289 | self._usage += 1
290 | return result
291 | return tough_method
292 |
293 | def __getattr__(self, name):
294 | """Inherit the members of the standard connection class.
295 |
296 | Some methods are made "tougher" than in the standard version.
297 | """
298 | if self._con:
299 | attr = getattr(self._con, name)
300 | if (name in ('query', 'get', 'insert', 'update', 'delete')
301 | or name.startswith('get_')):
302 | attr = self._get_tough_method(attr)
303 | return attr
304 | raise InvalidConnectionError
305 |
306 | def __del__(self):
307 | """Delete the steady connection."""
308 | # builtins (including Exceptions) might not exist anymore
309 | try: # noqa: SIM105
310 | self._close() # make sure the connection is closed
311 | except: # noqa: E722, S110
312 | pass
313 |
--------------------------------------------------------------------------------
/docs/changelog.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
Support Python version 3.12, cease support for Python 3.6.
28 |
Various small internal improvements and modernizations.
29 |
30 |
31 |
32 |
3.0.3
33 |
DBUtils 3.0.3 was released on April 27, 2023.
34 |
Changes:
35 |
36 |
Support Python version 3.11.
37 |
Improve determination of DB API module if creator is specified.
38 |
Minor fixes and section an advanced usage in docs.
39 |
40 |
41 |
42 |
3.0.2
43 |
DBUtils 3.0.2 was released on January 14, 2022.
44 |
The optional iterator protocol on cursors is now supported.
45 |
46 |
47 |
3.0.1
48 |
DBUtils 3.0.1 was released on December 22, 2021.
49 |
It includes InterfaceError to the default list of exceptions
50 | for which the connection failover mechanism is applied.
51 | You can override this with the failures parameter.
52 |
53 |
54 |
3.0.0
55 |
DBUtils 3.0.0 was released on November 26, 2021.
56 |
It is intended to be used with Python versions 3.6 to 3.10.
57 |
Changes:
58 |
59 |
Cease support for Python 2 and 3.5, minor optimizations.
60 |
61 |
62 |
63 |
2.0.3
64 |
DBUtils 2.0.3 was released on November 26, 2021.
65 |
Changes:
66 |
67 |
Support Python version 3.10.
68 |
69 |
70 |
71 |
2.0.2
72 |
DBUtils 2.0.2 was released on June 8, 2021.
73 |
Changes:
74 |
75 |
Allow using context managers for pooled connections.
76 |
77 |
78 |
79 |
2.0.1
80 |
DBUtils 2.0.1 was released on April 8, 2021.
81 |
Changes:
82 |
83 |
Avoid "name Exception is not defined" when exiting.
84 |
85 |
86 |
87 |
2.0
88 |
DBUtils 2.0 was released on September 26, 2020.
89 |
It is intended to be used with Python versions 2.7 and 3.5 to 3.9.
90 |
Changes:
91 |
92 |
DBUtils does not act as a Webware plugin anymore, it is now just an ordinary
93 | Python package (of course it could be used as such also before).
94 |
The Webware Examples folder has been removed.
95 |
Folders, packages and modules have been renamed to lower-case.
96 | Particularly, you need to import dbutils instead of DBUtils now.
97 |
The internal naming conventions have also been changed to comply with PEP8.
98 |
The documentation has been adapted to reflect the changes in this version.
99 |
This changelog has been compiled from the former release notes.
100 |
101 |
102 |
103 |
1.4
104 |
DBUtils 1.4 was released on September 26, 2020.
105 |
It is intended to be used with Python versions 2.7 and 3.5 to 3.9.
106 |
Improvements:
107 |
108 |
The SteadyDB and SteadyPg classes only reconnect after the
109 | maxusage limit has been reached when the connection is not currently
110 | inside a transaction.
111 |
112 |
113 |
114 |
1.3
115 |
DBUtils 1.3 was released on March 3, 2018.
116 |
It is intended to be used with Python versions 2.6, 2.7 and 3.4 to 3.7.
117 |
Improvements:
118 |
119 |
This version now supports context handlers for connections and cursors.
120 |
121 |
122 |
123 |
1.2
124 |
DBUtils 1.2 was released on February 5, 2017.
125 |
It is intended to be used with Python versions 2.6, 2.7 and 3.0 to 3.6.
126 |
127 |
128 |
1.1.1
129 |
DBUtils 1.1.1 was released on February 4, 2017.
130 |
It is intended to be used with Python versions 2.3 to 2.7.
131 |
Improvements:
132 |
133 |
Reopen SteadyDB connections when commit or rollback fails
134 | (suggested by Ben Hoyt).
135 |
136 |
Bugfixes:
137 |
138 |
Fixed a problem when running under Jython (reported by Vitaly Kruglikov).
139 |
140 |
141 |
142 |
1.1
143 |
DBUtils 1.1 was released on August 14, 2011.
144 |
Improvements:
145 |
146 |
The transparent reopening of connections is actually an undesired behavior
147 | if it happens during database transactions. In these cases, the transaction
148 | should fail and the error be reported back to the application instead of the
149 | rest of the transaction being executed in a new connection and therefore in
150 | a new transaction. Therefore DBUtils now allows suspending the transparent
151 | reopening during transactions. All you need to do is indicate the beginning
152 | of a transaction by calling the begin() method of the connection.
153 | DBUtils makes sure that this method always exists, even if the database
154 | driver does not support it.
155 |
If the database driver supports a ping() method, then DBUtils can use it
156 | to check whether connections are alive instead of just trying to use the
157 | connection and reestablishing it in case it was dead. Since these checks are
158 | done at the expense of some performance, you have exact control when these
159 | are executed via the new ping parameter.
160 |
PooledDB has got another new parameter reset for controlling how
161 | connections are reset before being put back into the pool.
162 |
163 |
Bugfixes:
164 |
165 |
Fixed propagation of error messages when the connection was lost.
166 |
Fixed an issue with the setoutputsize() cursor method.
167 |
Fixed some minor issues with the DBUtilsExample for Webware.
168 |
169 |
170 |
171 |
1.0
172 |
DBUtils 1.0 was released on November 29, 2008.
173 |
It is intended to be used with Python versions 2.2 to 2.6.
174 |
Changes:
175 |
176 |
Added a failures parameter for configuring the exception classes for
177 | which the failover mechanisms is applied (as suggested by Matthew Harriger).
178 |
Added a closeable parameter for configuring whether connections can be
179 | closed (otherwise closing connections will be silently ignored).
180 |
It is now possible to override defaults via the creator.dbapi and
181 | creator.threadsafety attributes.
182 |
Added an alias method dedicated_connection as a shorthand for
183 | connection(shareable=False).
184 |
Added a version attribute to all exported classes.
185 |
Where the value 0 has the meaning "unlimited", parameters can now be also
186 | set to the value None instead.
187 |
It turned out that threading.local does not work properly with
188 | mod_wsgi, so we use the Python implementation for thread-local data
189 | even when a faster threading.local implementation is available.
190 | A new parameter threadlocal allows you to pass an arbitrary class
191 | such as threading.local if you know it works in your environment.
192 |
193 |
Bugfixes and improvements:
194 |
195 |
In some cases, when instance initialization failed or referenced objects
196 | were already destroyed, finalizers could throw exceptions or create infinite
197 | recursion (problem reported by Gregory Pinero and Jehiah Czebotar).
198 |
DBUtils now tries harder to find the underlying DB-API 2 module if only a
199 | connection creator function is specified. This had not worked before with
200 | the MySQLdb module (problem reported by Gregory Pinero).
201 |
202 |
203 |
204 |
0.9.4
205 |
DBUtils 0.9.4 was released on July 7, 2007.
206 |
This release fixes a problem in the destructor code and has been supplemented
207 | with a German User's Guide.
208 |
Again, please note that the dbapi parameter has been renamed to creator
209 | in the last release, since you can now pass custom creator functions
210 | for database connections instead of DB-API 2 modules.
211 |
212 |
213 |
0.9.3
214 |
DBUtils 0.9.3 was released on May 21, 2007.
215 |
Changes:
216 |
217 |
Support custom creator functions for database connections.
218 | These can now be used as the first parameter instead of an DB-API module
219 | (suggested by Ezio Vernacotola).
Some fixes in the documentation.
224 | Added Chinese translation of the User's Guide, kindly contributed by gashero.
225 |
226 |
227 |
228 |
0.9.2
229 |
DBUtils 0.9.2 was released on September 22, 2006.
230 |
It is intended to be used with Python versions 2.2 to 2.5.
231 |
Changes:
232 |
233 |
Renamed SolidDB to SteadyDB to avoid confusion with the "solidDB"
234 | storage engine. Accordingly, renamed SolidPg to SteadyPg.
235 |
236 |
237 |
238 |
0.9.1
239 |
DBUtils 0.9.1 was released on May 8, 2006.
240 |
It is intended to be used with Python versions 2.2 to 2.4.
241 |
Changes:
242 |
243 |
Added _closeable attribute and made persistent connections not closeable
244 | by default. This allows PersistentDB to be used in the same way as you
245 | would use PooledDB.
246 |
Allowed arguments in the DB-API 2 cursor() method. MySQLdb is using this
247 | to specify cursor classes. (Suggested by Michael Palmer.)
248 |
Improved the documentation and added a User's Guide.
249 |
250 |
251 |
252 |
0.8.1 - 2005-09-13
253 |
DBUtils 0.8.1 was released on September 13, 2005.
254 |
It is intended to be used with Python versions 2.0 to 2.4.