├── .travis.yml ├── .gitignore ├── README.rst ├── setup.py └── mrfh ├── __init__.py └── tests └── stresstest.py /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | python: 3 | - "2.7" 4 | install: python setup.py install 5 | script: python setup.py test 6 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | 55 | # Sphinx documentation 56 | docs/_build/ 57 | 58 | # PyBuilder 59 | target/ 60 | 61 | test 62 | env 63 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | mrfh (Multiprocess Rotating File Handler) 2 | ========================================= 3 | 4 | .. image:: https://travis-ci.org/di/mrfh.svg?branch=master 5 | :target: https://travis-ci.org/di/mrfh 6 | 7 | Description 8 | ----------- 9 | 10 | The `MultiprocessRotatingFileHandler` is a drop-in replacement for the 11 | `logging` modules's `RotatingFileHandler 12 | `__ 13 | which provides a process-safe rotating log file handler using file-based locks. 14 | 15 | Documentation 16 | ------------- 17 | 18 | Installation 19 | ~~~~~~~~~~~~ 20 | 21 | Installing: 22 | 23 | :: 24 | 25 | $ pip install mrfh 26 | 27 | Quickstart 28 | ~~~~~~~~~~ 29 | 30 | Where you once had: 31 | 32 | .. code:: python 33 | 34 | from logging.handlers import RotatingFileHandler 35 | 36 | logger = logging.getLogger('my_logger') 37 | handler = RotatingFileHandler('my_log.log', maxBytes=2000, backupCount=10) 38 | logger.addHandler(handler) 39 | 40 | logger.debug('Some debug message!') 41 | 42 | You can now have: 43 | 44 | .. code:: python 45 | 46 | from mrfh import MultiprocessRotatingFileHandler 47 | 48 | logger = logging.getLogger('my_logger') 49 | handler = MultiprocessRotatingFileHandler('my_log.log', maxBytes=2000, backupCount=10) 50 | logger.addHandler(handler) 51 | 52 | logger.debug('Some debug message!') 53 | 54 | Your rotating file handler is now process-safe! 55 | 56 | Testing 57 | ~~~~~~~ 58 | 59 | To run the tests: 60 | 61 | :: 62 | 63 | python setup.py test 64 | 65 | Authors 66 | ------- 67 | 68 | - `Dustin Ingram `__ 69 | 70 | Credits 71 | ------- 72 | 73 | Roughly based on the defunct `ConcurrentLogHandler 74 | `__. 75 | 76 | License 77 | ------- 78 | 79 | Open source MIT license. 80 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | import sys, os 4 | from setuptools import setup, find_packages, Command 5 | from setuptools.command.test import test as TestCommand 6 | 7 | version = '0.0.1' 8 | 9 | 10 | class StressTest(TestCommand): 11 | 12 | def initialize_options(self): 13 | TestCommand.initialize_options(self) 14 | 15 | def finalize_options(self): 16 | TestCommand.finalize_options(self) 17 | self.test_args = [] 18 | self.test_suite = True 19 | 20 | def run_tests(self): 21 | os.system('rm -rf test') 22 | errno = os.system('python mrfh/tests/stresstest.py') 23 | sys.exit(errno) 24 | 25 | 26 | def readme(): 27 | with open('README.rst') as f: 28 | return f.read() 29 | 30 | 31 | setup( 32 | name='mrfh', 33 | version=version, 34 | description="Multiprocess Rotating File Handler", 35 | classifiers=[ 36 | "Development Status :: 4 - Beta", 37 | 'Environment :: Console', 38 | 'Natural Language :: English', 39 | "License :: OSI Approved :: MIT License", 40 | "Operating System :: OS Independent", 41 | "Programming Language :: Python", 42 | "Programming Language :: Python :: 2", 43 | "Programming Language :: Python :: 2.6", 44 | "Programming Language :: Python :: 2.7", 45 | 'Programming Language :: Python :: 3', 46 | 'Programming Language :: Python :: 3.0', 47 | 'Programming Language :: Python :: 3.1', 48 | 'Programming Language :: Python :: 3.2', 49 | 'Programming Language :: Python :: 3.3', 50 | 'Programming Language :: Python :: 3.4', 51 | "Intended Audience :: Developers", 52 | ], 53 | keywords='multi process rotating file handler concurrent multiprocess', 54 | author='Dustin Ingram', 55 | author_email='github@dustingram.com', 56 | url='http://github.com/di/mrfh', 57 | license='MIT', 58 | long_description=readme(), 59 | packages=find_packages(exclude=['examples', 'tests']), 60 | include_package_data=True, 61 | zip_safe=False, 62 | install_requires=[], 63 | cmdclass={'test': StressTest}, 64 | ) 65 | -------------------------------------------------------------------------------- /mrfh/__init__.py: -------------------------------------------------------------------------------- 1 | import fcntl 2 | from logging import Handler 3 | from logging.handlers import BaseRotatingHandler 4 | import os 5 | 6 | 7 | class MultiprocessRotatingFileHandler(BaseRotatingHandler): 8 | def __init__(self, filename, mode='a', maxBytes=0, backupCount=0, 9 | encoding=None): 10 | BaseRotatingHandler.__init__(self, filename, mode, encoding) 11 | self.maxBytes = maxBytes 12 | self.backupCount = backupCount 13 | head, tail = os.path.split(filename) 14 | self.stream_lock = open("{}/.{}.lock".format(head, tail), "w") 15 | 16 | def _openFile(self, mode): 17 | self.stream = open(self.baseFilename, mode) 18 | 19 | def acquire(self): 20 | Handler.acquire(self) 21 | fcntl.flock(self.stream_lock, fcntl.LOCK_EX) 22 | if self.stream.closed: 23 | self._openFile(self.mode) 24 | 25 | def release(self): 26 | if not self.stream.closed: 27 | self.stream.flush() 28 | if not self.stream_lock.closed: 29 | fcntl.flock(self.stream_lock, fcntl.LOCK_UN) 30 | Handler.release(self) 31 | 32 | def close(self): 33 | if not self.stream.closed: 34 | self.stream.flush() 35 | self.stream.close() 36 | if not self.stream_lock.closed: 37 | self.stream_lock.close() 38 | Handler.close(self) 39 | 40 | def flush(self): 41 | pass 42 | 43 | def doRollover(self): 44 | self.stream.close() 45 | if self.backupCount <= 0: 46 | self._openFile(self.mode) 47 | return 48 | try: 49 | tmpname = "{}.rot.{}".format(self.baseFilename, os.getpid()) 50 | os.rename(self.baseFilename, tmpname) 51 | for i in range(self.backupCount - 1, 0, -1): 52 | sfn = "%s.%d" % (self.baseFilename, i) 53 | dfn = "%s.%d" % (self.baseFilename, i + 1) 54 | if os.path.exists(sfn): 55 | if os.path.exists(dfn): 56 | os.remove(dfn) 57 | os.rename(sfn, dfn) 58 | dfn = self.baseFilename + ".1" 59 | if os.path.exists(dfn): 60 | os.remove(dfn) 61 | os.rename(tmpname, dfn) 62 | finally: 63 | self._openFile(self.mode) 64 | 65 | def shouldRollover(self, record): 66 | def _shouldRollover(): 67 | if self.maxBytes > 0: 68 | if self.stream.tell() >= self.maxBytes: 69 | return True 70 | return False 71 | 72 | if _shouldRollover(): 73 | self.stream.close() 74 | self._openFile(self.mode) 75 | return _shouldRollover() 76 | return False 77 | -------------------------------------------------------------------------------- /mrfh/tests/stresstest.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ stresstest.py: A stress-tester for MultiprocessRotatingFileHandler 3 | 4 | This utility spawns a bunch of processes that all try to concurrently write to 5 | the same file. This is pretty much the worst-case scenario for my log handler. 6 | Once all of the processes have completed writing to the log file, the output is 7 | compared to see if any log messages have been lost. 8 | 9 | """ 10 | 11 | import os 12 | import sys 13 | from subprocess import call, Popen, STDOUT 14 | from time import sleep 15 | 16 | ROTATE_COUNT = 5000 17 | 18 | # local lib; for testing 19 | from mrfh import MultiprocessRotatingFileHandler 20 | 21 | class RotateLogStressTester: 22 | def __init__(self, sharedfile, uniquefile, name="LogStressTester"): 23 | self.sharedfile = sharedfile 24 | self.uniquefile = uniquefile 25 | self.name = name 26 | self.writeLoops = 100000 27 | self.rotateSize = 128 * 1024 28 | self.rotateCount = ROTATE_COUNT 29 | self.random_sleep_mode = False 30 | self.debug = False 31 | 32 | def getLogHandler(self, fn): 33 | """ Override this method if you want to test a different logging handler 34 | class. """ 35 | return MultiprocessRotatingFileHandler(fn, 'a', self.rotateSize, 36 | self.rotateCount) 37 | # To run the test with the standard library's RotatingFileHandler: 38 | # from logging.handlers import RotatingFileHandler 39 | # return RotatingFileHandler(fn, 'a', self.rotateSize, self.rotateCount) 40 | 41 | def start(self): 42 | from logging import getLogger, FileHandler, Formatter, DEBUG 43 | self.log = getLogger(self.name) 44 | self.log.setLevel(DEBUG) 45 | 46 | formatter = Formatter('%(asctime)s [%(process)d] %(levelname)-8s %(name)s: %(message)s') 47 | # Unique log handler (single file) 48 | handler = FileHandler(self.uniquefile, "w") 49 | handler.setLevel(DEBUG) 50 | handler.setFormatter(formatter) 51 | self.log.addHandler(handler) 52 | 53 | # If you suspect that the diff stuff isn't working, un comment the next 54 | # line. You should see this show up once per-process. 55 | # self.log.info("Here is a line that should only be in the first output.") 56 | 57 | # Setup output used for testing 58 | handler = self.getLogHandler(self.sharedfile) 59 | handler.setLevel(DEBUG) 60 | handler.setFormatter(formatter) 61 | self.log.addHandler(handler) 62 | 63 | # If this ever becomes a real "Thread", then remove this line: 64 | self.run() 65 | 66 | def run(self): 67 | c = 0 68 | from random import choice, randint 69 | # Use a bunch of random quotes, numbers, and severity levels to mix it up a bit! 70 | msgs = ["I found %d puppies", "There are %d cats in your hatz", 71 | "my favorite number is %d", "I am %d years old.", "1 + 1 = %d", 72 | "%d/0 = DivideByZero", "blah! %d thingies!", "8 15 16 23 48 %d", 73 | "the worlds largest prime number: %d", "%d happy meals!"] 74 | logfuncts = [self.log.debug, self.log.info, self.log.warn, self.log.error] 75 | 76 | self.log.info("Starting to write random log message. Loop=%d", self.writeLoops) 77 | while c <= self.writeLoops: 78 | c += 1 79 | msg = choice(msgs) 80 | logfunc = choice(logfuncts) 81 | logfunc(msg, randint(0,99999999)) 82 | 83 | if self.random_sleep_mode and c % 1000 == 0: 84 | # Sleep from 0-15 seconds 85 | s = randint(1,15) 86 | print("PID %d sleeping for %d seconds" % (os.getpid(), s)) 87 | sleep(s) 88 | # break 89 | self.log.info("Done witting random log messages.") 90 | 91 | def iter_lognames(logfile, count): 92 | """ Generator for log file names based on a rotation scheme """ 93 | for i in range(count -1, 0, -1): 94 | yield "%s.%d" % (logfile, i) 95 | yield logfile 96 | 97 | def iter_logs(iterable, missing_ok=False): 98 | """ Generator to extract log entries from shared log file. """ 99 | for fn in iterable: 100 | if os.path.exists(fn): 101 | for line in open(fn): 102 | yield line 103 | elif not missing_ok: 104 | raise ValueError("Missing log file %s" % fn) 105 | 106 | def combine_logs(combinedlog, iterable, mode="w"): 107 | """ write all lines (iterable) into a single log file. """ 108 | fp = open(combinedlog, mode) 109 | for chunk in iterable: 110 | fp.write(chunk) 111 | fp.close() 112 | 113 | 114 | 115 | from optparse import OptionParser 116 | parser = OptionParser(usage="usage: %prog", 117 | description="Stress test the MRFH module.") 118 | parser.add_option("--log-calls", metavar="NUM", 119 | action="store", type="int", default=50000, 120 | help="Number of logging entries to write to each log file. " 121 | "Default is %default") 122 | parser.add_option("--random-sleep-mode", 123 | action="store_true", default=False) 124 | parser.add_option("--debug", 125 | action="store_true", default=False) 126 | 127 | 128 | def main_client(args): 129 | (options, args) = parser.parse_args(args) 130 | if len(args) != 2: 131 | raise ValueError("Require 2 arguments. We have %d args" % len(args)) 132 | (shared, client) = args 133 | 134 | if os.path.isfile(client): 135 | sys.stderr.write("Already a client using output file %s\n" % client) 136 | sys.exit(1) 137 | tester = RotateLogStressTester(shared, client) 138 | tester.random_sleep_mode = options.random_sleep_mode 139 | tester.debug = options.debug 140 | tester.writeLoops = options.log_calls 141 | tester.start() 142 | print("We are done pid=%d" % os.getpid()) 143 | 144 | 145 | 146 | class TestManager: 147 | class ChildProc(object): 148 | """ Very simple child container class.""" 149 | __slots__ = [ "popen", "sharedfile", "clientfile" ] 150 | def __init__(self, **kwargs): 151 | self.update(**kwargs) 152 | def update(self, **kwargs): 153 | for key, val in kwargs.items(): 154 | setattr(self, key, val) 155 | 156 | def __init__(self): 157 | self.tests = [] 158 | 159 | def launchPopen(self, *args, **kwargs): 160 | proc = Popen(*args, **kwargs) 161 | cp = self.ChildProc(popen=proc) 162 | self.tests.append(cp) 163 | return cp 164 | 165 | def wait(self, check_interval=3): 166 | """ Wait for all child test processes to complete. """ 167 | print("Waiting while children are out running and playing!") 168 | while True: 169 | sleep(check_interval) 170 | waiting = [] 171 | for cp in self.tests: 172 | if cp.popen.poll() is None: 173 | waiting.append(cp.popen.pid) 174 | if not waiting: 175 | break 176 | print("Waiting on %r " % waiting) 177 | print("All children have stopped.") 178 | 179 | def checkExitCodes(self): 180 | for cp in self.tests: 181 | if cp.popen.poll() != 0: 182 | return False 183 | return True 184 | 185 | 186 | def unified_diff(a,b, out=sys.stdout): 187 | import difflib 188 | empty = True 189 | ai = open(a).readlines() 190 | bi = open(b).readlines() 191 | for line in difflib.unified_diff(ai, bi, a, b): 192 | empty = False 193 | out.write(line) 194 | return empty 195 | 196 | 197 | def main_runner(args): 198 | parser.add_option("--processes", metavar="NUM", 199 | action="store", type="int", default=3, 200 | help="Number of processes to spawn. Default: %default") 201 | parser.add_option("--delay", metavar="secs", 202 | action="store", type="float", default=2.5, 203 | help="Wait SECS before spawning next processes. " 204 | "Default: %default") 205 | parser.add_option("-p", "--path", metavar="DIR", 206 | action="store", default="test", 207 | help="Path to a temporary directory. Default: '%default'") 208 | 209 | 210 | this_script = args[0] 211 | (options, args) = parser.parse_args(args) 212 | options.path = os.path.abspath(options.path) 213 | if not os.path.isdir(options.path): 214 | os.makedirs(options.path) 215 | 216 | manager = TestManager() 217 | shared = os.path.join(options.path, "shared.log") 218 | for client_id in range(options.processes): 219 | client = os.path.join(options.path, "client.log_client%s.log" % client_id) 220 | cmdline = [ sys.executable, this_script, "client", shared, client, 221 | "--log-calls=%d" % options.log_calls ] 222 | if options.random_sleep_mode: 223 | cmdline.append("--random-sleep-mode") 224 | if options.debug: 225 | cmdline.append("--debug") 226 | 227 | child = manager.launchPopen(cmdline) 228 | child.update(sharedfile=shared, clientfile=client) 229 | sleep(options.delay) 230 | 231 | # Wait for all of the subprocesses to exit 232 | manager.wait() 233 | # Check children exit codes 234 | if not manager.checkExitCodes(): 235 | sys.stderr.write("One or more of the child process has failed.\n" 236 | "Aborting test.\n") 237 | sys.exit(2) 238 | 239 | client_combo = os.path.join(options.path, "client.log.combo") 240 | shared_combo = os.path.join(options.path, "shared.log.combo") 241 | 242 | # Combine all of the log files... 243 | client_files = [ child.clientfile for child in manager.tests ] 244 | 245 | if False: 246 | def sort_em(iterable): 247 | return iterable 248 | else: 249 | sort_em = sorted 250 | 251 | print("Writing out combined client logs...") 252 | combine_logs(client_combo, sort_em(iter_logs(client_files))) 253 | print("done.") 254 | 255 | print("Writing out combined shared logs...") 256 | shared_log_files = iter_lognames(shared, ROTATE_COUNT) 257 | log_lines = iter_logs(shared_log_files, missing_ok=True) 258 | combine_logs(shared_combo, sort_em(log_lines)) 259 | print("done.") 260 | 261 | print("Running internal diff:") 262 | empty = unified_diff(client_combo, shared_combo) 263 | if empty: 264 | print("Passed! :)") 265 | sys.exit(0) 266 | else: 267 | print("Failed. :(") 268 | sys.exit(1) 269 | 270 | 271 | if __name__ == '__main__': 272 | if len(sys.argv) > 1 and sys.argv[1].lower() == "client": 273 | main_client(sys.argv[2:]) 274 | else: 275 | main_runner(sys.argv) 276 | --------------------------------------------------------------------------------