├── logtopg ├── tests │ ├── __init__.py │ └── test_logtopg.py ├── createtable.sql ├── insertrow.sql ├── version.py └── __init__.py ├── MANIFEST.in ├── requirements.txt ├── tox.ini ├── .travis.yml ├── setup.py ├── .gitignore ├── docs └── example.py └── README.rst /logtopg/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | recursive-include logtopg *.sql 2 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | psycopg2==2.5.4 2 | nose>=1.3.4 3 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py27,py34 3 | 4 | [testenv] 5 | commands=python setup.py nosetests --with-doctest 6 | deps=-rrequirements.txt 7 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | --- 2 | sudo: false 3 | language: python 4 | python: 5 | 6 | - "2.7" 7 | - "3.2" 8 | - "3.3" 9 | - "3.4" 10 | 11 | # command to install dependencies 12 | install: 13 | - "pip install -r requirements.txt ." 14 | # - "pip install ." 15 | 16 | # command to run tests 17 | script: "python setup.py nosetests --with-doctest" 18 | 19 | addons: 20 | postgresql: "9.3" 21 | 22 | before_script: 23 | - psql -c "create user logtopg with password 'l0gt0pg';" -U postgres 24 | - psql -c "create database logtopg_tests with owner logtopg;" -U postgres 25 | - psql -d logtopg_tests -c "create extension ltree;" -U postgres 26 | -------------------------------------------------------------------------------- /logtopg/createtable.sql: -------------------------------------------------------------------------------- 1 | create table if not exists {0} ( 2 | 3 | created timestamptz, 4 | 5 | process_id int, 6 | process_name text, 7 | 8 | logger_name ltree, 9 | 10 | path_name text, 11 | module text, 12 | file_name text, 13 | 14 | function_name text, 15 | 16 | line_number int, 17 | 18 | log_level text, 19 | log_level_number int, 20 | 21 | message text, 22 | 23 | exc_info text, 24 | thread_id bigint, 25 | thread_name text, 26 | inserted timestamptz not null default now() 27 | ); 28 | 29 | create index on {0} (created); 30 | create index on {0} (inserted); 31 | create index on {0} (logger_name); 32 | create index on {0} (process_id); 33 | -------------------------------------------------------------------------------- /logtopg/insertrow.sql: -------------------------------------------------------------------------------- 1 | insert into {0} ( 2 | created, 3 | process_id, 4 | process_name, 5 | logger_name, 6 | path_name, 7 | module, 8 | file_name, 9 | function_name, 10 | line_number, 11 | log_level, 12 | log_level_number, 13 | message, 14 | exc_info, 15 | thread_id, 16 | thread_name 17 | ) values ( 18 | to_timestamp(%(created)s), 19 | %(process)s, 20 | %(processName)s, 21 | %(name)s, 22 | %(pathname)s, 23 | %(module)s, 24 | %(filename)s, 25 | %(funcName)s, 26 | %(lineno)s, 27 | %(levelname)s, 28 | %(levelno)s, 29 | %(message)s, 30 | %(exc_text)s, 31 | %(thread)s, 32 | %(threadName)s 33 | ); 34 | 35 | -------------------------------------------------------------------------------- /logtopg/version.py: -------------------------------------------------------------------------------- 1 | # vim: set expandtab ts=4 sw=4 filetype=python fileencoding=utf8: 2 | 3 | """ 4 | Do not do anything in this file except define the __version__ variable! 5 | 6 | The setup.py script reads this version from here during install. 7 | 8 | In the past, I've defined the version in the setup.py file (A), or 9 | defined it in the top of the project, like in logtopg/__init__.py (B). 10 | 11 | Choice A is bad because it isn't easy to fire up a python session and 12 | then do:: 13 | 14 | >>> import logtopg 15 | >>> logtopg.__version__ # doctest: +SKIP 16 | 17 | to look up the version. 18 | 19 | And choice B is bad because the logtopg/__init__.py file might blow up 20 | during install because it tries to import a some third-party module that 21 | hasn't been imported yet. 22 | 23 | """ 24 | 25 | __version__ = "0.1.1" 26 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # vim: set expandtab ts=4 sw=4 filetype=python fileencoding=utf8: 2 | 3 | import sys 4 | 5 | if sys.version_info < (2, 7): 6 | raise Exception("sorry, this needs at least python 2.7!") 7 | 8 | # Read __version__ from version.py 9 | with open("logtopg/version.py") as f: 10 | exec(f.read()) 11 | 12 | from setuptools import find_packages, setup 13 | 14 | setup( 15 | name="LogToPG", 16 | version=__version__, 17 | description="Python logging handler that stores logs in postgresql", 18 | url="https://github.com/216software/logtopg/", 19 | packages=find_packages(), 20 | 21 | author="216 Software, LLC", 22 | author_email="info@216software.com", 23 | license="BSD License", 24 | include_package_data=True, 25 | 26 | install_requires=[ 27 | 'psycopg2', 28 | ], 29 | 30 | test_suite="nose.collector", 31 | use_2to3=True, 32 | ) 33 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | bin/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | *.egg-info/ 23 | .installed.cfg 24 | *.egg 25 | 26 | # Installer logs 27 | pip-log.txt 28 | pip-delete-this-directory.txt 29 | 30 | # Unit test / coverage reports 31 | htmlcov/ 32 | .tox/ 33 | .coverage 34 | .cache 35 | nosetests.xml 36 | coverage.xml 37 | 38 | # Translations 39 | *.mo 40 | 41 | # Mr Developer 42 | .mr.developer.cfg 43 | .project 44 | .pydevproject 45 | 46 | # Rope 47 | .ropeproject 48 | 49 | # Django stuff: 50 | *.log 51 | *.pot 52 | 53 | # Sphinx documentation 54 | docs/_build/ 55 | 56 | # My stuff. 57 | static/bower_components 58 | static/node_modules 59 | dazzle/webapp/static/img 60 | .*.swp 61 | dev.yaml 62 | 63 | static/img/12hourchart.png 64 | static/img/24hourchart.png 65 | static/img/4hourchart.png 66 | static/img/8hourchart.png 67 | 68 | .noseids 69 | -------------------------------------------------------------------------------- /docs/example.py: -------------------------------------------------------------------------------- 1 | # vim: set expandtab ts=4 sw=4 filetype=python fileencoding=utf8: 2 | 3 | import logging 4 | import logging.config 5 | 6 | log = logging.getLogger("logtopg.example") 7 | 8 | if __name__ == "__main__": 9 | 10 | # These need to be correct, so you'll likely need to change them. 11 | db_credentials = { 12 | "database":"logtopg", 13 | "host":"localhost", 14 | "user":"logtopg", 15 | "password":"l0gt0pg"} 16 | 17 | d = dict({ 18 | 'disable_existing_loggers': False, 19 | 20 | 'handlers': { 21 | 22 | 'pg': { 23 | 'class': 'logtopg.PGHandler', 24 | 'level': 'DEBUG', 25 | 'log_table_name': 'logtopg_example', 26 | 27 | 'params': db_credentials}, 28 | 29 | "console": { 30 | "class": "logging.StreamHandler", 31 | "level": "DEBUG" 32 | }}, 33 | 34 | 'root': { 35 | 'handlers': ["console", 'pg'], 36 | 'level': 'DEBUG'}, 37 | 38 | 'version': 1}) 39 | 40 | logging.config.dictConfig(d) 41 | 42 | log.debug("debug!") 43 | log.info("info!") 44 | log.warn("warn!") 45 | log.error("error!") 46 | log.critical("critical!") 47 | 48 | 49 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | +++++++++++++++++ 2 | Log to PostgreSQL 3 | +++++++++++++++++ 4 | 5 | .. image:: https://travis-ci.org/216software/logtopg.svg?branch=master 6 | :target: https://travis-ci.org/216software/logtopg 7 | 8 | .. image:: https://circleci.com/gh/216software/logtopg.png?circle-token=389fee16249541b4b1df6e8a7f8edb1401be66de 9 | :target:https://circleci.com/gh/216software/logtopg 10 | 11 | Install 12 | ======= 13 | 14 | Grab the code with pip:: 15 | 16 | $ pip install logtopg 17 | 18 | But you also have to install the ltree contrib module into your 19 | database:: 20 | 21 | $ sudo -u postgres psql -c "create extension ltree;" 22 | 23 | Try it out 24 | ========== 25 | 26 | The code in `docs/example.py`_ shows how to set up your logging configs 27 | with this handler. 28 | 29 | .. _`docs/example.py`: https://github.com/216software/logtopg/blob/master/docs/example.py 30 | 31 | .. include:: docs/example.py 32 | :number-lines: 33 | :code: python 34 | 35 | 36 | Contribute to logtopg 37 | ===================== 38 | 39 | Get a copy of the code:: 40 | 41 | $ git clone --origin github https://github.com/216software/logtopg.git 42 | 43 | Install it like this:: 44 | 45 | $ cd logtopg 46 | $ pip install -e . 47 | 48 | Create test user and test database:: 49 | 50 | $ sudo -u postgres createuser logtopg 51 | $ sudo -u postgres createdb --owner logtopg logtopg_tests 52 | $ sudo -u postgres psql -c "create extension ltree;" -d logtopg_tests 53 | 54 | Then run the tests like this:: 55 | 56 | $ python setup.py --quiet test 57 | ..... 58 | ---------------------------------------------------------------------- 59 | Ran 5 tests in 0.379s 60 | 61 | OK 62 | 63 | Hopefully it works! 64 | 65 | 66 | Stuff to do 67 | =========== 68 | 69 | * Fill out classifiers in setup.py. 70 | 71 | * Somehow block updates to the table. Maybe a trigger is the right 72 | way. Maybe there's a much simpler trick that I'm not aware of. 73 | 74 | * Create a few views for typical queries. 75 | 76 | * Test performance with many connected processes and tons of logging 77 | messages. Make sure that logging doesn't compete with real 78 | application work for database resources. Is there a way to say 79 | something like 80 | 81 | "Hey postgresql, take your time with this stuff, and deal with 82 | other stuff first!" 83 | 84 | In other words, a "nice" command for queries. 85 | 86 | * Allow people to easily write their own SQL to create the logging 87 | table and to insert records to it. The queries could be returned 88 | from properties, so people would just need to subclass the PGHandler 89 | and then redefine those properties. 90 | 91 | * Write some documentation: 92 | 93 | * installation 94 | * typical queries 95 | * tweak log table columns or indexes 96 | * discuss performance issues 97 | 98 | * Set up a readthedocs page for logtopg for that documentation. 99 | 100 | * Experiment with what happens when the emit(...) function call takes 101 | a long time. For example, say somebody is logging to a PG server 102 | across the internet, will calls to log.debug(...) slow down the 103 | local app? I imagine so. 104 | 105 | * I just found out that the ltree column type (that I use for logger 106 | names) can not handle logger names like "dazzle.insert-stuff". That 107 | dash in there is invalid syntax. 108 | 109 | I hope there is a way to raise an exception as soon as somebody uses 110 | an invalid logger name. 111 | 112 | Or, maybe I need to convert the invalid name to a valid name, by 113 | maybe substituting any of a set of characters with something else. 114 | 115 | * Set up table partitioning so that when there are millions or logs, 116 | they are dealt with sanely. 117 | 118 | This is a query that shows logs by day and log level:: 119 | 120 | select to_char(date_trunc('day', inserted), 'YYYY-MM-DD'), 121 | log_level, count(*) 122 | 123 | from dazzlelogs 124 | 125 | group by 1, 2 126 | 127 | order by 1, 2; 128 | 129 | .. vim: set syntax=rst: 130 | -------------------------------------------------------------------------------- /logtopg/__init__.py: -------------------------------------------------------------------------------- 1 | # vim: set expandtab ts=4 sw=4 filetype=python fileencoding=utf8: 2 | 3 | 4 | import logging 5 | import os 6 | import subprocess 7 | import textwrap 8 | import traceback 9 | import warnings 10 | 11 | import pkg_resources 12 | import psycopg2 13 | from psycopg2.extensions import adapt 14 | 15 | from logtopg.version import __version__ 16 | 17 | log = logging.getLogger(__name__) 18 | 19 | class PGHandler(logging.Handler): 20 | 21 | def __init__(self, log_table_name, 22 | database, 23 | user=None, 24 | password=None, 25 | host=None, 26 | port=5432): 27 | 28 | logging.Handler.__init__(self) 29 | 30 | self.log_table_name = log_table_name 31 | 32 | self.database = database 33 | self.host = host 34 | self.user = user 35 | self.password = password 36 | self.port = port 37 | 38 | self.pgconn = None 39 | self.create_table_sql = None 40 | self.insert_row_sql = None 41 | 42 | def check_if_log_table_exists(self): 43 | 44 | pgconn = self.get_pgconn() 45 | 46 | cursor = pgconn.cursor() 47 | 48 | cursor.execute(""" 49 | select exists( 50 | select * 51 | from information_schema.tables 52 | where table_name = %s) 53 | """, [self.log_table_name]) 54 | 55 | return cursor.fetchone()[0] 56 | 57 | def maybe_create_table(self): 58 | 59 | if not self.check_if_log_table_exists(): 60 | 61 | create_table_sql = self.get_create_table_sql() 62 | 63 | out = run_sql_commands(create_table_sql, self.user, self.password, 64 | self.host, self.port, self.database) 65 | 66 | log.info("Created log table {0}.".format(self.log_table_name)) 67 | 68 | def get_pgconn(self): 69 | 70 | if not self.pgconn: 71 | self.make_pgconn() 72 | 73 | return self.pgconn 74 | 75 | def make_pgconn(self): 76 | 77 | self.pgconn = psycopg2.connect( 78 | database=self.database, 79 | host=self.host, 80 | user=self.user, 81 | password=self.password, 82 | port=self.port) 83 | 84 | self.pgconn.autocommit = True 85 | 86 | log.info("Just made an autocommitting database connection: {0}.".format( 87 | self.pgconn)) 88 | 89 | def get_create_table_sql(self): 90 | 91 | if not self.create_table_sql: 92 | 93 | s = \ 94 | pkg_resources.resource_string( 95 | "logtopg", "createtable.sql")\ 96 | .decode("utf-8")\ 97 | .format(self.log_table_name) 98 | 99 | self.create_table_sql = s.encode("utf-8") 100 | 101 | return self.create_table_sql 102 | 103 | def get_insert_row_sql(self): 104 | 105 | """ 106 | Cache the insert query (with placeholder parameters) in memory 107 | so that every log.... call doesn't do file IO. 108 | """ 109 | 110 | if not self.insert_row_sql: 111 | 112 | self.insert_row_sql = \ 113 | pkg_resources.resource_string( 114 | "logtopg", "insertrow.sql")\ 115 | .decode("utf-8")\ 116 | .format(self.log_table_name) 117 | 118 | return self.insert_row_sql 119 | 120 | def build_d(self, record_dict): 121 | 122 | d = record_dict 123 | 124 | # Catch messages that can't be adapted as-is, and convert it to 125 | # strings 126 | try: 127 | d["msg"] = adapt(record_dict["msg"]) 128 | 129 | except Exception as ex: 130 | d["msg"] = str(record_dict["msg"]) 131 | 132 | return d 133 | 134 | def emit(self, record): 135 | 136 | self.format(record) 137 | 138 | if record.exc_info: 139 | record.exc_text = logging._defaultFormatter.formatException(record.exc_info) 140 | 141 | else: 142 | record.exc_text = "" 143 | 144 | if isinstance(record.msg, Exception): 145 | record.msg = str(record.msg) 146 | 147 | pgconn = self.get_pgconn() 148 | 149 | self.maybe_create_table() 150 | 151 | cursor = pgconn.cursor() 152 | 153 | cursor.execute( 154 | self.get_insert_row_sql(), 155 | self.build_d(record.__dict__)) 156 | 157 | 158 | example_dict_config = dict({ 159 | 160 | "loggers": { 161 | "logtopg": { 162 | # "handlers": ["pg", "console"], 163 | "handlers": ["console"], 164 | "level": "DEBUG", 165 | } 166 | }, 167 | 168 | 'handlers': { 169 | 'pg': { 170 | 'class': 'logtopg.PGHandler', 171 | 'level': 'DEBUG', 172 | 'log_table_name': 'logtopg_logs', 173 | 174 | "database":"logtopg", 175 | "host":"localhost", 176 | "user":"logtopg", 177 | "password":"l0gt0pg", 178 | }, 179 | 180 | "console": { 181 | "class": "logging.StreamHandler", 182 | "level": "DEBUG", 183 | "formatter": "consolefmt", 184 | }, 185 | 186 | }, 187 | 188 | "formatters": { 189 | "consolefmt":{ 190 | "format": '%(asctime)-22s [%(process)d] %(name)-30s %(lineno)-5d %(levelname)-8s %(message)s', 191 | }, 192 | }, 193 | 194 | # Any handlers attached to root get log messages from EVERYTHING, 195 | # like third-party modules, etc. 196 | 'root': { 197 | 'handlers': ["pg"], 198 | 'level': 'DEBUG', 199 | }, 200 | 201 | 'version': 1, 202 | 203 | # This is important! Without it, any log instances created before 204 | # you run logging.config.dictConfig(...) will be disabled, which 205 | # means all the global log objects in all the various imported files 206 | # won't do anything. 207 | 'disable_existing_loggers': False, 208 | }) 209 | 210 | def run_sql_commands(sql_text, user, password, host, port, database): 211 | 212 | """ 213 | Run a whole bunch of SQL commands. This is nice when you have a 214 | script with more than one statement in it. 215 | 216 | Don't pass me the path to a SQL script file! Instead, give me the 217 | sql text after you read it in from a file. 218 | """ 219 | 220 | env = os.environ.copy() 221 | 222 | if password: 223 | env['PGPASSWORD'] = password 224 | 225 | # Feed the sql_text to psql's stdin. 226 | # http://stackoverflow.com/questions/163542/python-how-do-i-pass-a-string-into-subprocess-popen-using-the-stdin-argument 227 | 228 | stuff = [ 229 | "psql", 230 | "--quiet", 231 | "--no-psqlrc", 232 | "-d", 233 | database, 234 | "--single-transaction", 235 | ] 236 | 237 | if user: 238 | stuff.append("-U") 239 | stuff.append(user) 240 | 241 | if host: 242 | stuff.append("-h") 243 | stuff.append(host) 244 | 245 | if port: 246 | stuff.append("-p") 247 | stuff.append(str(port)) 248 | 249 | p = subprocess.Popen( 250 | stuff, 251 | stdin=subprocess.PIPE, 252 | env=env) 253 | 254 | out = p.communicate(input=sql_text) 255 | 256 | return out 257 | -------------------------------------------------------------------------------- /logtopg/tests/test_logtopg.py: -------------------------------------------------------------------------------- 1 | # vim: set expandtab ts=4 sw=4 filetype=python fileencoding=utf8: 2 | 3 | import logging 4 | import logging.config 5 | import os 6 | import unittest 7 | 8 | import logtopg 9 | import psycopg2 10 | 11 | testing_dict_config = dict({ 12 | 13 | "loggers": { 14 | "logtopg": { 15 | "handlers": ["pg"], 16 | "level": "DEBUG", 17 | } 18 | }, 19 | 20 | 'handlers': { 21 | 'pg': { 22 | 'class': 'logtopg.PGHandler', 23 | 'level': 'DEBUG', 24 | 'log_table_name': 'logtopg_tests', 25 | "database":"logtopg_tests", 26 | }, 27 | 28 | "console": { 29 | "class": "logging.StreamHandler", 30 | "level": "DEBUG", 31 | }, 32 | 33 | }, 34 | 35 | # 'root': { 36 | # 'handlers': ["console"], 37 | # 'level': 'DEBUG'}, 38 | 39 | 'version': 1, 40 | 41 | # This is important! Without it, any log instances created before 42 | # you run logging.config.dictConfig(...) will be disabled. 43 | 'disable_existing_loggers': False, 44 | }) 45 | 46 | class Test1(unittest.TestCase): 47 | 48 | """ 49 | This depends on a real postgresql database. I'll create a table and 50 | then drop it. 51 | """ 52 | 53 | d = testing_dict_config 54 | log_table_name = d["handlers"]["pg"]["log_table_name"] 55 | database = d["handlers"]["pg"]["database"] 56 | user = d["handlers"]["pg"].get("user") 57 | password = d["handlers"]["pg"].get("password") 58 | host = d["handlers"]["pg"].get("host") 59 | 60 | db_credentials = dict( 61 | user=user, 62 | password=password, 63 | host=host, 64 | database=database, 65 | ) 66 | 67 | def setUp(self): 68 | 69 | logging.config.dictConfig(self.d) 70 | 71 | self.log = logging.getLogger("logtopg.tests") 72 | 73 | self.ltpg = logtopg.PGHandler( 74 | self.log_table_name, 75 | self.user, 76 | self.password, 77 | self.host, 78 | self.database) 79 | 80 | # Make a separate database connection to check results in 81 | # database. 82 | self.test_pgconn = psycopg2.connect(**self.db_credentials) 83 | 84 | def test_1(self): 85 | 86 | """ 87 | Verify we only read sql files once each. 88 | """ 89 | 90 | self.assertTrue(self.ltpg.create_table_sql is None) 91 | 92 | s1 = self.ltpg.get_create_table_sql() 93 | 94 | self.assertTrue(isinstance(self.ltpg.create_table_sql, bytes)) 95 | 96 | s2 = self.ltpg.get_create_table_sql() 97 | 98 | self.assertTrue(s1 is s2) 99 | 100 | self.ltpg.get_insert_row_sql() 101 | 102 | 103 | def test_2(self): 104 | 105 | """ 106 | Verify we make only one database connection in an instance. 107 | """ 108 | 109 | ltpg = logtopg.PGHandler( 110 | self.log_table_name, 111 | **self.db_credentials) 112 | 113 | self.assertTrue(ltpg.pgconn is None) 114 | 115 | conn1 = ltpg.get_pgconn() 116 | 117 | self.assertTrue(ltpg.pgconn) 118 | 119 | conn2 = ltpg.get_pgconn() 120 | 121 | self.assertTrue(conn1 is conn2) 122 | 123 | 124 | def test_3(self): 125 | 126 | """ 127 | Verify we can create the log table. 128 | """ 129 | 130 | ltpg = logtopg.PGHandler( 131 | self.log_table_name, 132 | **self.db_credentials) 133 | 134 | ltpg.maybe_create_table() 135 | 136 | # Now, verify the table exists. 137 | cursor = ltpg.pgconn.cursor() 138 | 139 | cursor.execute(""" 140 | select exists( 141 | select * 142 | from information_schema.tables 143 | where table_name = %s) 144 | """, [self.log_table_name]) 145 | 146 | row = cursor.fetchone() 147 | 148 | self.assertTrue(row[0], ) 149 | 150 | # Subsequent calls to maybe_create_table should be harmless and 151 | # nearly instantaneous. 152 | ltpg.maybe_create_table() 153 | ltpg.maybe_create_table() 154 | ltpg.maybe_create_table() 155 | 156 | ltpg.pgconn.rollback() 157 | 158 | 159 | def test_4(self): 160 | 161 | """ 162 | Verify log messages are stored in the database. 163 | """ 164 | 165 | logging.config.dictConfig(self.d) 166 | 167 | log1 = logging.getLogger("logtopg.tests") 168 | log2 = logging.getLogger("logtopg.tests") 169 | log3 = logging.getLogger("logtopg.tests") 170 | log4 = logging.getLogger("logtopg.tests") 171 | 172 | log = logging.getLogger("logtopg.tests") 173 | 174 | log.debug("debug!") 175 | log.info("info!") 176 | log.warning("warning!") 177 | log.error("error!") 178 | log.critical("critical!") 179 | 180 | # Now check that those logs are actually in the database. 181 | cursor = self.test_pgconn.cursor() 182 | 183 | cursor.execute( 184 | """ 185 | select message 186 | from {} 187 | where process_id = %s 188 | """.format(self.log_table_name), [os.getpid()]) 189 | 190 | counted_rows = cursor.rowcount 191 | 192 | self.test_pgconn.rollback() 193 | 194 | # There should be 7 logs in the database with this process's ID. 195 | # Those 7 are the five above and the two connection logs. 196 | self.assertEqual(counted_rows, 7) 197 | 198 | 199 | def test_5(self): 200 | 201 | """ 202 | Verify different logger instances use a single database 203 | connection. 204 | """ 205 | 206 | logging.config.dictConfig(self.d) 207 | 208 | log1 = logging.getLogger("logtopg.tests.a") 209 | log1.debug("trying this guy out") 210 | 211 | log2 = logging.getLogger("logtopg.tests.b") 212 | log2.debug("trying this guy out") 213 | 214 | def test_6(self): 215 | 216 | """ 217 | Log an exception to the database. 218 | """ 219 | 220 | logging.config.dictConfig(self.d) 221 | log = logging.getLogger("logtopg.tests.tests_6") 222 | 223 | try: 224 | 225 | 1/0 226 | 227 | except Exception as ex: 228 | 229 | log.exception(ex) 230 | 231 | log.debug(AttributeError("This is a bogus exception")) 232 | 233 | def test_7(self): 234 | 235 | """ 236 | Log something that can't be adapted to the database. 237 | """ 238 | 239 | logging.config.dictConfig(self.d) 240 | log = logging.getLogger("logtopg.tests.tests_7") 241 | 242 | class Unadaptable(object): 243 | pass 244 | 245 | u = Unadaptable() 246 | 247 | log.debug("u is a {0}.".format(u)) 248 | log.debug(u) 249 | log.debug(dict(u=u)) 250 | 251 | def tearDown(self): 252 | 253 | self.test_pgconn.rollback() 254 | 255 | cursor = self.test_pgconn.cursor() 256 | 257 | cursor.execute( 258 | "drop table if exists {0}".format( 259 | Test1.log_table_name)) 260 | 261 | self.test_pgconn.commit() 262 | 263 | 264 | def tearDownModule(): 265 | 266 | pgconn = psycopg2.connect(**Test1.db_credentials) 267 | 268 | cursor = pgconn.cursor() 269 | 270 | cursor.execute( 271 | "drop table if exists {0}".format( 272 | Test1.log_table_name)) 273 | 274 | pgconn.commit() 275 | 276 | 277 | if __name__ == "__main__": 278 | unittest.main() 279 | --------------------------------------------------------------------------------