├── .gitignore ├── .travis.yml ├── LICENSE.txt ├── README.md ├── __init__.py ├── check_log_ng.py └── test_check_log_ng.py /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | *.swp 3 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | python: 3 | - "2.7" 4 | - "3.5" 5 | - "3.6" 6 | #install: 7 | # - pip install -r requirements.txt 8 | script: 9 | - PYTHONPATH=. python -m unittest test_check_log_ng 10 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright (C) 2014-2018 Takamura Narimichi and Takizawa Takashi All rights reserved. 2 | 3 | Redistribution and use in source and binary forms, with or without modification, 4 | are permitted provided that the following conditions are met: 5 | 6 | 1. Redistributions of source code must retain the above copyright notice, 7 | this list of conditions and the following disclaimer. 8 | 9 | 2. Redistributions in binary form must reproduce the above copyright notice, 10 | this list of conditions and the following disclaimer in the 11 | documentation and/or other materials provided with the distribution. 12 | 13 | THIS SOFTWARE IS PROVIDED BY AUTHOR AND CONTRIBUTORS "AS IS" AND 14 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 15 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 16 | DISCLAIMED. IN NO EVENT SHALL AUTHOR OR CONTRIBUTORS BE LIABLE FOR 17 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 18 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 19 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 20 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 21 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 22 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 23 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # check_log_ng 2 | 3 | [![Build Status](https://travis-ci.org/heartbeatsjp/check_log_ng.svg?branch=master)](https://travis-ci.org/heartbeatsjp/check_log_ng) 4 | 5 | A log file regular expression-based parser plugin for Nagios. 6 | 7 | Features are as follows: 8 | 9 | - You can specify the character string you want to detect with regular expressions. 10 | - You can specify the character string you do not want to detect with regular expressions. 11 | - You can specify the character encoding of a log file. 12 | - You can check multiple log files at once and also check log-rotated files. 13 | - This script uses seek files which record the position where the check is completed for each log file. With these seek files, you can check only the differences from the last check. 14 | - You can check multiple lines outputted at once as one message. 15 | - The result can be cached within the specified time period. This will help multiple monitoring servers and multiple attempts. 16 | 17 | Originally, this script had be inspired by [`check_log3.pl`](https://exchange.nagios.org/directory/Plugins/Log-Files/check_log3-2Epl/details). 18 | Currentlly, this has different options. 19 | 20 | ## Examples of usage 21 | 22 | ### Pattern 23 | 24 | If you want to detect character strings, you can add `-p ` or `-P ` option. 25 | 26 | ~~~sh 27 | check_log_ng.py -p 'ERROR' -S /var/spool/check_log_ng -l '/var/log/messages' 28 | ~~~ 29 | 30 | Or 31 | 32 | ~~~sh 33 | check_log_ng.py -P /path/to/pattern.txt -S /var/spool/check_log_ng -l '/var/log/messages' 34 | ~~~ 35 | 36 | ~~~sh 37 | $ cat /path/to/pattern.txt 38 | ERROR 39 | FATAL 40 | ~~~ 41 | 42 | ### Negative pattern 43 | 44 | If you have character strings not to detect, you can add `-n ` or `-N ` option. 45 | 46 | ~~~sh 47 | check_log_ng.py -p 'ERROR' -n 'no problem' -S /var/spool/check_log_ng -l '/var/log/messages' 48 | ~~~ 49 | 50 | Or 51 | 52 | ~~~sh 53 | check_log_ng.py -P /path/to/pattern.txt -N /path/to/negpattern.txt -S /var/spool/check_log_ng -l '/var/log/messages' 54 | ~~~ 55 | 56 | ~~~sh 57 | $ cat /path/to/negpattern.txt 58 | no problem 59 | information 60 | ~~~ 61 | 62 | ### Case insensitive 63 | 64 | If you want to do a case insensitive scan, you can add `-i` option. 65 | 66 | ~~~sh 67 | check_log_ng.py -i -p 'ERROR' -S /var/spool/check_log_ng -l '/var/log/messages' 68 | ~~~ 69 | 70 | ### Multiple lines 71 | 72 | When output to multiple lines at the same time such as the following, you can add `-M` option. 73 | If the log format is not syslog, you must add -F/--format option. 74 | 75 | ``` 76 | 2013/12/05 09:36:51,024 jobs-thread-5 ERROR ~ *** Called URI is: https://www.example.com/submit 77 | 2013/12/05 09:36:51,024 jobs-thread-5 ERROR ~ *** Response code is: 500 78 | ``` 79 | 80 | ~~~sh 81 | check_log_ng.py -F '^(%Y/%m/%d\s%T,\d+ \S+ \S+) (.*)$' -M -p 'ERROR' -S /var/spool/check_log_ng -l '/var/log/application.log' 82 | ~~~ 83 | 84 | This is considered a message like the following: 85 | 86 | ``` 87 | 2013/12/05 09:36:51,024 jobs-thread-5 ERROR ~ *** Called URI is: https://www.example.com/submit ~ *** Response code is: 500 88 | ``` 89 | 90 | ### Multiple monitoring items 91 | 92 | If you want use multiple monitoring items, you can add `-T ` option to prevent name collisions of seek files. 93 | 94 | ~~~sh 95 | check_log_ng.py -T 'log_error' -p 'ERROR' -S /var/spool/check_log_ng -l '/var/log/messages' 96 | ~~~ 97 | 98 | ~~~sh 99 | check_log_ng.py -T 'log_block' -p 'BLOCK' -S /var/spool/check_log_ng -l '/var/log/messages' 100 | ~~~ 101 | 102 | ### Monitoring interval 103 | 104 | If your monitoring interval is 180 seconds, you can add `--cachetime=180` option to cache the result within monitoring interval. 105 | It is useful for multiple monitoring servers. 106 | 107 | ~~~sh 108 | check_log_ng.py --cachetime=180 -p 'ERROR' -S /var/spool/check_log_ng -l '/var/log/messages' 109 | ~~~ 110 | 111 | ### Multiple log files 112 | 113 | If you want to check log-rotated files with the file name such as 'message.N' or 'message-YYYYMMDD', you can add `-I -R` options to trace inode informations. 114 | 115 | ~~~sh 116 | check_log_ng.py -I -R -p 'ERROR' -S /var/spool/check_log_ng -l '/var/log/messages*' 117 | ~~~ 118 | 119 | If the log rotation period exceeds one week, you can add `-E ` option. 120 | This value must be longer than the log rotation period. 121 | If it is one month, you can add `-E 2764800`, which is 32 days. 122 | 123 | ~~~sh 124 | check_log_ng.py -I -R -E 2764800 -p 'ERROR' -S /var/spool/check_log_ng -l '/var/log/messages*' 125 | ~~~ 126 | 127 | ### Suppress output 128 | 129 | If you want to reduce the size of the output by suppressing the message, 130 | you can add `-H` or `-q`. 131 | 132 | #### Example not to suppress 133 | 134 | ~~~sh 135 | check_log_ng.py -p 'ERROR' -S /var/spool/check_log_ng -l '/var/log/messages' 136 | ~~~ 137 | 138 | Outout: 139 | 140 | ~~~ 141 | WARNING: Found 1 lines (limit=1/0): Jul 11 06:44:22 hostname app: ERROR Unable to access to 192.0.2.1 at /var/log/messages 142 | ~~~ 143 | 144 | #### Example to use `-H/--output-header` 145 | 146 | ~~~sh 147 | check_log_ng.py -p 'ERROR' -H -S /var/spool/check_log_ng -l '/var/log/messages' 148 | ~~~ 149 | 150 | Outout: 151 | 152 | ~~~ 153 | WARNING: Found 1 lines (limit=1/0, HEADER): Jul 11 06:44:22 hostname app: at /var/log/messages 154 | ~~~ 155 | 156 | #### Example to use `-q/--quiet` 157 | 158 | ~~~sh 159 | check_log_ng.py -p 'ERROR' -Q -S /var/spool/check_log_ng -l '/var/log/messages' 160 | ~~~ 161 | 162 | Outout: 163 | 164 | ~~~ 165 | WARNING: Found 1 lines (limit=1/0, QUIET): at /var/log/messages 166 | ~~~ 167 | 168 | ### Dry run 169 | 170 | If you want to do dry run, you can add `--dry-run` option. 171 | The seek files are not updated and cache file is not used. 172 | If log format is not correct, it prints an error message. 173 | 174 | ~~~sh 175 | check_log_ng.py --dry-run -p 'ERROR' -S /var/spool/check_log_ng -l '/var/log/messages' 176 | ~~~ 177 | 178 | 179 | ## Requirement 180 | 181 | - Python 2.6, 2.7, 3.5 or 3.6. 182 | - In python 2.6, argparse module. 183 | 184 | ## Installation 185 | 186 | Clone a copy of the main `check_log_ng` git repository. 187 | 188 | ~~~sh 189 | $ git clone git@github.com:heartbeatsjp/check_log_ng.git 190 | $ cd check_log_ng 191 | ~~~ 192 | 193 | Add execute permission. 194 | 195 | ~~~sh 196 | $ chmod 755 check_log_ng.py 197 | ~~~ 198 | 199 | Copy this plugin to a nagios-plugins directory. 200 | 201 | ~~~sh 202 | $ sudo cp check_log_ng.py /usr/lib64/nagios/plugins/ 203 | ~~~ 204 | 205 | Create a directory to store a cache file, a lock file and seek files. 206 | 207 | ~~~sh 208 | $ sudo mkdir /var/spool/check_log_ng 209 | ~~~ 210 | 211 | Change the owner of the directory to the user who will run nrpe. 212 | 213 | ~~~sh 214 | $ sudo chown nrpe: /var/spool/check_log_ng 215 | ~~~ 216 | 217 | If root privilege is necessary to read log files, add the following lines to a sudoers file. 218 | 219 | ``` 220 | Defaults:nrpe !requiretty 221 | nagios ALL=(root) NOPASSWD: /usr/lib64/nagios/plugins/check_log_ng.py 222 | ``` 223 | 224 | If you use Python 2.6, install argparse module. 225 | If you use RHEL6/CentOS6, you can run: 226 | 227 | ~~~sh 228 | $ sudo yum install python-argparse 229 | ~~~ 230 | 231 | Note: By default, `FALLBACK_PATH` is set HEARTBEATS (it's us!) specific value. 232 | When you want to use `check_log_ng` with Python2.6 and use `FALLBACK_PATH`, 233 | change this to adjust to your environment. 234 | 235 | ## Usage 236 | 237 | ### Help 238 | 239 | ``` 240 | usage: check_log_ng.py [options] [-p |-P ] -S -l 241 | 242 | A log file regular expression-based parser plugin for Nagios. 243 | 244 | optional arguments: 245 | -h, --help show this help message and exit 246 | --version show program's version number and exit 247 | --dry-run Do dry run. The seek files are not updated and cache 248 | file is not used. If log format is not correct, it 249 | prints an error message. 250 | -l , --logfile 251 | The file names of log files to be scanned. The 252 | metacharacters * and ? are available. To set multiple 253 | files, set a space between file names. See also 254 | --scantime. 255 | -F , --format 256 | Regular expression for log format. It requires two 257 | groups in format of '^(HEADER)(.*)$'. HEADER includes 258 | TIMESTAMP, HOSTNAME, TAG and so on. Also, it may use 259 | %%, %Y, %y, %a, %b, %m, %d, %e, %H, %M, %S, %F and %T 260 | of strftime(3). (default: regular expression for 261 | syslog. 262 | -s , --seekfile 263 | Deprecated. Use -S option instead. The file name of 264 | the file to store the seek position of the last scan. 265 | -S , --state-directory , --seekfile-directory 266 | The directory to store seek files, cache file and lock 267 | file. '--seekfile-directory' is for backwards 268 | compatibility. 269 | -T , --tag , --seekfile-tag 270 | Add a tag in the file names of state files, to prevent 271 | names collisions. Useful to avoid maintaining many 272 | '-S' directories when you check the same files several 273 | times with different args. '--seekfile-tag' is for 274 | backwards compatibility. 275 | -I, --trace-inode If set, trace the inode of the log file. After log 276 | rotatation, you can trace the log file. 277 | -p , --pattern 278 | The regular expression to scan for in the log file. 279 | -P , --patternfile 280 | The file name of the file containing regular 281 | expressions, one per line. 282 | --critical-pattern 283 | The regular expression to scan for in the log file. If 284 | found, return CRITICAL. 285 | --critical-patternfile 286 | The file name of the file containing regular 287 | expressions, one per line. If found, return CRITICAL. 288 | -n , --negpattern 289 | The regular expression which all will be skipped 290 | except as critical pattern in the log file. 291 | -N , -f , --negpatternfile 292 | The file name of the file containing regular 293 | expressions which all will be skipped except as 294 | critical pattern, one per line. '-f' is for backwards 295 | compatibility. 296 | --critical-negpattern 297 | The regular expression which all will be skipped in 298 | the log file. 299 | --critical-negpatternfile 300 | The file name of the file containing regular 301 | expressions which all will be skipped, one per line. 302 | -i, --case-insensitive 303 | Do a case insensitive scan. 304 | --encoding 305 | Specify the character encoding in the log file. 306 | (default: utf-8) 307 | -w , --warning 308 | Return WARNING if at least this many matches found. 309 | (default: 1) 310 | -c , --critical 311 | Return CRITICAL if at least this many matches found. 312 | i.e. don't return critical alerts unless specified 313 | explicitly. (default: 0) 314 | -t , --scantime 315 | The range of time to scan. The log files older than 316 | this time are not scanned. (default: 86400) 317 | -E , --expiration 318 | The expiration of seek files. This must be longer than 319 | the log rotation period. The expired seek files are 320 | deleted with -R option. (default: 691200) 321 | -R, --remove-seekfile 322 | Remove expired seek files. See also --expiration. 323 | -M, --multiline Treat multiple lines outputted at once as one message. 324 | If the log format is not syslog, set --format option. 325 | See also --format. 326 | --cachetime 327 | The period to cache the result. To disable this cache 328 | feature, set '0'. (default: 60) 329 | --lock-timeout 330 | The period to wait for if another process is running. 331 | If timeout occurs, UNKNOWN is returned. (default: 3) 332 | -H, --output-header HEADER mode: Suppress the output of the message on 333 | matched lines. Only HEADER(TIMESTAMP, HOSTNAME, TAG 334 | etc) is outputted. If the log format is not syslog, 335 | set --format option. See also --format. 336 | -q, --quiet QUIET mode: Suppress the output of matched lines. 337 | ``` 338 | 339 | ## Contributing 340 | 341 | If you have a problem, please [create an issue](https://github.com/heartbeatsjp/check_log_ng/issues) or a pull request. 342 | 343 | 1. Fork it 344 | 1. Create your feature branch (git checkout -b my-new-feature) 345 | 1. Commit your changes (git commit -am 'Add some feature') 346 | 1. Push to the branch (git push origin my-new-feature) 347 | 1. Create new Pull Request 348 | 349 | If you debug this script, use -O option. 350 | 351 | ~~~sh 352 | python -O check_log_ng.py ... 353 | ~~~ 354 | 355 | ## License 356 | 357 | [BSD](https://github.com/heartbeatsjp/check_log_ng/blob/master/LICENSE.txt) 358 | 359 | ## Todo 360 | 361 | - improve the current test code coverage 362 | 363 | ---- 364 | 365 | # for Commiters 366 | 367 | ## How to release 368 | 369 | 1. confirm that all tests are green 370 | 1. `git checkout master && git pull` 371 | 1. change `__version__` in `check_log_ng.py` 372 | - based on semantic versioning 373 | 1. `git commit check_log_ng.py -m "version up"` 374 | 1. `git tag -a -m ` (`` is `2.0.8` etc... ) 375 | 1. `git push` 376 | 1. `git push --tags` 377 | 1. confirm that new version is listed on [release page](https://github.com/heartbeatsjp/check_log_ng/releases) 378 | -------------------------------------------------------------------------------- /__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heartbeatsjp/check_log_ng/5c36e1bb87bae66fc249955205f651b0c3526a78/__init__.py -------------------------------------------------------------------------------- /check_log_ng.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | """A log file regular expression-based parser plugin for Nagios. 4 | 5 | Features are as follows: 6 | 7 | - You can specify the character string you want to detect with regular 8 | expressions. 9 | - You can specify the character string you do not want to detect with 10 | regular expressions. 11 | - You can specify the character encoding of a log file. 12 | - You can check multiple log files at once and also check log-rotated files. 13 | - This script uses seek files which record the position where the check is 14 | completed for each log file. 15 | With these seek files, you can check only the differences from the last check. 16 | - You can check multiple lines outputted at once as one message. 17 | - The result can be cached within the specified time period. 18 | This will help multiple monitoring servers and multiple attempts. 19 | 20 | This module is available in Python 2.6, 2.7, 3.5, 3.6. 21 | Require argparse module in python 2.6. 22 | """ 23 | 24 | from __future__ import absolute_import 25 | from __future__ import division 26 | from __future__ import print_function 27 | from __future__ import unicode_literals 28 | import sys 29 | import os 30 | import io 31 | import glob 32 | import time 33 | import re 34 | import hashlib 35 | import base64 36 | import fcntl 37 | import warnings 38 | 39 | FALLBACK_PATH = "/usr/local/hb-agent/bin" 40 | 41 | try: 42 | import argparse 43 | except ImportError as _ex: 44 | if __name__ != "__main__": 45 | raise _ex 46 | if FALLBACK_PATH not in os.environ["PATH"]: 47 | os.environ["PATH"] = ":".join([FALLBACK_PATH, os.environ["PATH"]]) 48 | os.execve(__file__, sys.argv, os.environ) 49 | else: 50 | raise _ex 51 | 52 | # Globals 53 | __version__ = '2.0.8' 54 | 55 | 56 | class LogChecker(object): 57 | """LogChecker.""" 58 | 59 | # Class constant 60 | STATE_OK = 0 61 | STATE_WARNING = 1 62 | STATE_CRITICAL = 2 63 | STATE_UNKNOWN = 3 64 | STATE_DEPENDENT = 4 65 | STATE_NO_CACHE = -1 66 | FORMAT_SYSLOG = ( 67 | r'^((?:%b\s%e\s%T|%FT%T\S*)\s' 68 | r'[-_0-9A-Za-z.]+\s' 69 | r'(?:[^ :\[\]]+(?:\[\d+?\])?:\s)?)' 70 | r'(.*)$') 71 | '''FORMAT_SYSLOG is `^(TIMESTAMP HOSTNAME (TAG )?)(MSG)$`.''' 72 | 73 | _SUFFIX_SEEK = ".seek" 74 | _SUFFIX_SEEK_WITH_INODE = ".inode.seek" 75 | _SUFFIX_CACHE = ".cache" 76 | _SUFFIX_LOCK = ".lock" 77 | _RETRY_PERIOD = 0.5 78 | _LOGFORMAT_EXPANSION_LIST = [ 79 | {'%%': '_PERCENT_'}, 80 | {'%F': '%Y-%m-%d'}, 81 | {'%T': '%H:%M:%S'}, 82 | {'%a': '(?:Sun|Mon|Tue|Wed|Thu|Fri|Sat)'}, 83 | {'%b': '(?:Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)'}, 84 | {'%Y': '20[0-9][0-9]'}, 85 | {'%y': '[0-9][0-9]'}, 86 | {'%m': '(?:0[1-9]|1[0-2])'}, 87 | {'%d': '(?:0[1-9]|[12][0-9]|3[01])'}, 88 | {'%e': '(?: [1-9]|[12][0-9]|3[01])'}, 89 | {'%H': '(?:[01][0-9]|2[0-3])'}, 90 | {'%M': '[0-5][0-9]'}, 91 | {'%S': '(?:[0-5][0-9]|60)'}, 92 | {'_PERCENT_': '%'}, 93 | ] 94 | 95 | def __init__(self, config): 96 | """Constructor. 97 | 98 | The keys of configuration parameters are:: 99 | 100 | logformat (str): Regular expression for log format. 101 | state_directory (str): The directory to store seek files, cache 102 | file and lock file. 103 | pattern_list (list): The list of regular expressions to scan for 104 | in the log file. 105 | critical_pattern_list (list): The list of regular expressions to 106 | scan for in the log file. If found, return CRITICAL. 107 | negpattern_list (list): The list of regular expressions which all 108 | will be skipped except as critical pattern in the log file. 109 | critical_negpattern_list (list): The list of regular expressions 110 | which all will be skipped except as critical pattern in the 111 | log file. If found, return CRITICAL. 112 | case_insensitive (bool): Do a case insensitive scan. 113 | encoding (str): Specify the character encoding in the log file. 114 | warning (int): The number of times found that be needed to return WARNING. 115 | critical (int): The number of times found that be needed to return CRITICAL. 116 | trace_inode (bool): Trace the inode of the log file. 117 | multiline (bool): Treat multiple lines outputted at once as one message. 118 | scantime (int): The range of time to scan. 119 | expiration (int): The expiration of seek files. 120 | cachetime (int): The period to cache the result. 121 | lock_timeout (int): The period to wait for if another process is running. 122 | output_header (bool): Suppress the output of the message on matched lines. 123 | quiet (bool): Suppress output of matched lines. 124 | 125 | Args: 126 | config (dict): The dictionary of configuration parameters. 127 | 128 | """ 129 | # set default value 130 | self.config = {} 131 | self.config['dry_run'] = False 132 | self.config['logformat'] = LogChecker.FORMAT_SYSLOG 133 | self.config['state_directory'] = None 134 | self.config['pattern_list'] = [] 135 | self.config['critical_pattern_list'] = [] 136 | self.config['negpattern_list'] = [] 137 | self.config['critical_negpattern_list'] = [] 138 | self.config['case_insensitive'] = False 139 | self.config['encoding'] = 'utf-8' 140 | self.config['warning'] = 1 141 | self.config['critical'] = 0 142 | self.config['trace_inode'] = False 143 | self.config['multiline'] = False 144 | self.config['scantime'] = 86400 145 | self.config['expiration'] = 691200 146 | self.config['cachetime'] = 60 147 | self.config['lock_timeout'] = 3 148 | self.config['output_header'] = False 149 | self.config['output_quiet'] = False 150 | 151 | # overwrite values with user's values 152 | for key in self.config: 153 | if key not in config: 154 | continue 155 | value = config[key] 156 | if isinstance(value, (bool, int)): 157 | pass 158 | elif isinstance(value, list): 159 | value = [LogChecker.to_unicode(x) for x in value] 160 | else: 161 | # On python 2.x, str, unicode or None reaches. 162 | # On python 3.x, bytes, str or None reaches. 163 | value = LogChecker.to_unicode(value) 164 | self.config[key] = value 165 | 166 | self.pattern_flags = 0 167 | if self.config['case_insensitive']: 168 | self.pattern_flags = re.IGNORECASE 169 | 170 | self.re_logformat = re.compile(LogChecker._expand_logformat_by_strftime( 171 | self.config['logformat'])) 172 | _debug("logformat='{0}'".format(self.re_logformat.pattern)) 173 | 174 | # status variables 175 | self.state = None 176 | self.message = None 177 | self.messages = [] 178 | self.found = [] 179 | self.found_messages = [] 180 | self.critical_found = [] 181 | self.critical_found_messages = [] 182 | 183 | def _check_updated(self, logfile, offset, filesize): 184 | """Check whether the log file is updated. 185 | 186 | If updated, return True. 187 | """ 188 | if os.stat(logfile).st_mtime < time.time() - self.config['scantime']: 189 | _debug("Skipped: mtime < curtime - scantime") 190 | return False 191 | 192 | if filesize == offset: 193 | _debug("Skipped: filesize == offset") 194 | return False 195 | 196 | return True 197 | 198 | def _find_pattern(self, message, negative=False, critical=False): 199 | """Find pattern. 200 | 201 | If found, return True. 202 | """ 203 | if negative: 204 | if critical: 205 | pattern_list = self.config['critical_negpattern_list'] 206 | pattern_type = "critical_negpattern" 207 | else: 208 | pattern_list = self.config['negpattern_list'] 209 | pattern_type = "negpattern" 210 | else: 211 | if critical: 212 | pattern_list = self.config['critical_pattern_list'] 213 | pattern_type = "critical_pattern" 214 | else: 215 | pattern_list = self.config['pattern_list'] 216 | pattern_type = "pattern" 217 | 218 | if not pattern_list: 219 | return False 220 | for pattern in pattern_list: 221 | if not pattern: 222 | continue 223 | matchobj = re.search(pattern, message, self.pattern_flags) 224 | if matchobj: 225 | _debug("{0}: '{1}' found".format(pattern_type, pattern)) 226 | return True 227 | return False 228 | 229 | def _remove_old_seekfile(self, logfile_pattern_list, tag=''): 230 | """Remove old seek files.""" 231 | if self.config['dry_run']: 232 | return True 233 | 234 | cwd = os.getcwd() 235 | try: 236 | os.chdir(self.config['state_directory']) 237 | except OSError: 238 | LogChecker.print_message("Unable to chdir: {0}".format( 239 | self.config['state_directory'])) 240 | sys.exit(LogChecker.STATE_UNKNOWN) 241 | 242 | curtime = time.time() 243 | for logfile_pattern in logfile_pattern_list.split(): 244 | if not logfile_pattern: 245 | continue 246 | seekfile_pattern = ( 247 | re.sub(r'[^-0-9A-Za-z*?]', '_', logfile_pattern) + 248 | tag + LogChecker._SUFFIX_SEEK) 249 | for seekfile in glob.glob(seekfile_pattern): 250 | if not os.path.isfile(seekfile): 251 | continue 252 | if curtime - self.config['expiration'] <= os.stat(seekfile).st_mtime: 253 | continue 254 | try: 255 | _debug("remove seekfile: {0}".format(seekfile)) 256 | os.unlink(seekfile) 257 | except OSError: 258 | LogChecker.print_message("Unable to remove old seekfile: {0}".format( 259 | seekfile)) 260 | sys.exit(LogChecker.STATE_UNKNOWN) 261 | 262 | try: 263 | os.chdir(cwd) 264 | except OSError: 265 | LogChecker.print_message("Unable to chdir: {0}".format(cwd)) 266 | sys.exit(LogChecker.STATE_UNKNOWN) 267 | 268 | return True 269 | 270 | def _remove_old_seekfile_with_inode(self, logfile_pattern, tag=''): 271 | """Remove old inode-based seek files.""" 272 | if self.config['dry_run']: 273 | return True 274 | 275 | prefix = None 276 | if self.config['trace_inode']: 277 | prefix = LogChecker.get_digest(logfile_pattern) 278 | 279 | cwd = os.getcwd() 280 | try: 281 | os.chdir(self.config['state_directory']) 282 | except OSError: 283 | LogChecker.print_message("Unable to chdir: {0}".format( 284 | self.config['state_directory'])) 285 | sys.exit(LogChecker.STATE_UNKNOWN) 286 | 287 | curtime = time.time() 288 | seekfile_pattern = "{0}.[0-9]*{1}{2}".format( 289 | prefix, tag, LogChecker._SUFFIX_SEEK_WITH_INODE) 290 | for seekfile in glob.glob(seekfile_pattern): 291 | if not os.path.isfile(seekfile): 292 | continue 293 | if curtime - self.config['expiration'] <= os.stat(seekfile).st_mtime: 294 | continue 295 | try: 296 | _debug("remove seekfile: {0}".format(seekfile)) 297 | os.unlink(seekfile) 298 | except OSError: 299 | LogChecker.print_message("Unable to remove old seekfile: {0}".format(seekfile)) 300 | sys.exit(LogChecker.STATE_UNKNOWN) 301 | 302 | try: 303 | os.chdir(cwd) 304 | except OSError: 305 | LogChecker.print_message("Unable to chdir: {0}".format(cwd)) 306 | sys.exit(LogChecker.STATE_UNKNOWN) 307 | 308 | return True 309 | 310 | def _get_logfile_list(self, filename_pattern_list): 311 | """Get the list of log files from pattern of filenames.""" 312 | logfile_list = [] 313 | for filename_pattern in filename_pattern_list.split(): 314 | filename_list = glob.glob(filename_pattern) 315 | if filename_list: 316 | logfile_list.extend(filename_list) 317 | if logfile_list: 318 | logfile_list = sorted( 319 | logfile_list, key=lambda x: os.stat(x).st_mtime) 320 | return logfile_list 321 | 322 | def _update_state(self): 323 | """Update the state of the result.""" 324 | output_mode = None 325 | if self.config['output_quiet']: 326 | output_mode = "QUIET" 327 | elif self.config['output_header']: 328 | output_mode = "HEADER" 329 | num_critical = len(self.critical_found) 330 | if num_critical > 0: 331 | self.state = LogChecker.STATE_CRITICAL 332 | if output_mode: 333 | self.messages.append("Critical Found {0} lines ({1}): {2}".format( 334 | num_critical, output_mode, ','.join(self.critical_found_messages))) 335 | else: 336 | self.messages.append("Critical Found {0} lines: {1}".format( 337 | num_critical, ','.join(self.critical_found_messages))) 338 | num = len(self.found) 339 | if num > 0: 340 | if output_mode: 341 | self.messages.append( 342 | "Found {0} lines (limit={1}/{2}, {3}): {4}".format( 343 | num, self.config['warning'], self.config['critical'], 344 | output_mode, ','.join(self.found_messages))) 345 | else: 346 | self.messages.append( 347 | "Found {0} lines (limit={1}/{2}): {3}".format( 348 | num, self.config['warning'], self.config['critical'], 349 | ','.join(self.found_messages))) 350 | if self.config['critical'] > 0 and self.config['critical'] <= num: 351 | if self.state is None: 352 | self.state = LogChecker.STATE_CRITICAL 353 | if self.config['warning'] > 0 and self.config['warning'] <= num: 354 | if self.state is None: 355 | self.state = LogChecker.STATE_WARNING 356 | if self.state is None: 357 | self.state = LogChecker.STATE_OK 358 | return 359 | 360 | def _update_message(self): 361 | state_string = 'OK' 362 | message = 'OK - No matches found.' 363 | if self.state == LogChecker.STATE_WARNING: 364 | state_string = 'WARNING' 365 | elif self.state == LogChecker.STATE_CRITICAL: 366 | state_string = 'CRITICAL' 367 | if self.state != LogChecker.STATE_OK: 368 | message = "{0}: {1}".format(state_string, ', '.join(self.messages)) 369 | message = message.replace('|', '(pipe)') 370 | self.message = message 371 | return 372 | 373 | def _set_found(self, header, message, found, critical_found): 374 | """Set the found and critical_found if matching pattern is found.""" 375 | _debug("header='{0}', message='{1}'".format(header, message)) 376 | log_message = ''.join([header, message]) 377 | found_negpattern = self._find_pattern(log_message, negative=True) 378 | found_critical_negpattern = self._find_pattern( 379 | log_message, negative=True, critical=True) 380 | 381 | if not found_negpattern and not found_critical_negpattern: 382 | if self._find_pattern(log_message): 383 | found.append({"header": header, "message": message}) 384 | if not found_critical_negpattern: 385 | if self._find_pattern(log_message, critical=True): 386 | critical_found.append({"header": header, "message": message}) 387 | return 388 | 389 | def _check_each_multiple_lines( 390 | self, logfile, start_position, found, critical_found): 391 | """Match the pattern each multiple lines in the log file.""" 392 | messages = [] 393 | previous_header = None 394 | header = None 395 | message = None 396 | 397 | with io.open(logfile, mode='r', encoding=self.config['encoding'], 398 | errors='replace') as fileobj: 399 | fileobj.seek(start_position, 0) 400 | 401 | for line in fileobj: 402 | line = line.rstrip() 403 | _debug("line='{0}'".format(line)) 404 | 405 | matchobj = self.re_logformat.match(line) 406 | if matchobj: 407 | header = matchobj.group(1) 408 | message = matchobj.group(2) 409 | _debug(" logformat: header='{0}', message='{1}'".format( 410 | header, message)) 411 | else: 412 | _debug(" logformat: unmatched") 413 | if previous_header is None: 414 | if self.config['dry_run']: 415 | LogChecker.print_message("[DRY RUN] Log format does not match. Set --format option.") 416 | sys.exit(LogChecker.STATE_UNKNOWN) 417 | else: 418 | # If you do not enable dry run, ignore log format errors. 419 | previous_header = '' 420 | # assume it is continuation 421 | header = previous_header 422 | message = line 423 | 424 | if previous_header is not None and previous_header != header: 425 | # The current line is a new log line. 426 | self._set_found(previous_header, ' '.join(messages), found, critical_found) 427 | messages = [] 428 | 429 | previous_header = header 430 | messages.append(message) 431 | end_position = fileobj.tell() 432 | fileobj.close() 433 | 434 | # flush 435 | if messages: 436 | self._set_found(header, ' '.join(messages), found, critical_found) 437 | return end_position 438 | 439 | def _check_each_single_line( 440 | self, logfile, start_position, found, critical_found): 441 | """Match the pattern each a single line in the log file.""" 442 | with io.open(logfile, mode='r', encoding=self.config['encoding'], 443 | errors='replace') as fileobj: 444 | fileobj.seek(start_position, 0) 445 | 446 | for line in fileobj: 447 | line = line.rstrip() 448 | _debug("line='{0}'".format(line)) 449 | 450 | matchobj = self.re_logformat.match(line) 451 | if matchobj: 452 | header = matchobj.group(1) 453 | message = matchobj.group(2) 454 | _debug(" logformat: header='{0}', message='{1}'".format( 455 | header, message)) 456 | else: 457 | _debug(" logformat: unmatched") 458 | if self.config['dry_run']: 459 | LogChecker.print_message("[DRY RUN] Log format does not match. Set --format option.") 460 | sys.exit(LogChecker.STATE_UNKNOWN) 461 | else: 462 | # If you do not enable dry run, ignore log format errors. 463 | header = '' 464 | message = line 465 | 466 | self._set_found(header, message, found, critical_found) 467 | end_position = fileobj.tell() 468 | fileobj.close() 469 | return end_position 470 | 471 | def _create_digest_condition(self, logfile_pattern): 472 | """Create the digest of search conditions.""" 473 | strings = [] 474 | for key in sorted(self.config): 475 | if key in ['expiration', 'cachetime', 'lock_timeout']: 476 | continue 477 | value = self.config[key] 478 | if isinstance(value, list): 479 | strings.append( 480 | "{0}={1}".format(key, "\t".join(value))) 481 | elif isinstance(value, bool): 482 | strings.append( 483 | "{0}={1}".format(key, LogChecker.to_unicode(str(value)))) 484 | elif isinstance(value, int): 485 | strings.append( 486 | "{0}={1}".format(key, LogChecker.to_unicode(str(value)))) 487 | else: 488 | strings.append("{0}={1}".format(key, value)) 489 | strings.append(logfile_pattern) 490 | digest_condition = LogChecker.get_digest('\n'.join(strings)) 491 | return digest_condition 492 | 493 | def _create_seek_filename( 494 | self, logfile_pattern, logfile, trace_inode=False, tag=''): 495 | """Return the file name of seek file.""" 496 | prefix = None 497 | filename = None 498 | if trace_inode: 499 | filename = (str(os.stat(logfile).st_ino) + 500 | tag + LogChecker._SUFFIX_SEEK_WITH_INODE) 501 | prefix = LogChecker.get_digest(logfile_pattern) 502 | else: 503 | filename = (re.sub(r'[^-0-9A-Za-z]', '_', logfile) + 504 | tag + LogChecker._SUFFIX_SEEK) 505 | if prefix: 506 | filename = prefix + '.' + filename 507 | seekfile = os.path.join(self.config['state_directory'], filename) 508 | return seekfile 509 | 510 | def _create_cache_filename(self, logfile_pattern, tag=''): 511 | """Return the file name of cache file.""" 512 | digest_condition = self._create_digest_condition(logfile_pattern) 513 | filename_elements = [] 514 | filename_elements.append(digest_condition) 515 | if tag: 516 | filename_elements.append(".") 517 | filename_elements.append(tag) 518 | filename_elements.append(LogChecker._SUFFIX_CACHE) 519 | cache_filename = os.path.join( 520 | self.config['state_directory'], "".join(filename_elements)) 521 | return cache_filename 522 | 523 | def _create_lock_filename(self, logfile_pattern, tag=''): 524 | """Return the file name of lock file.""" 525 | digest_condition = self._create_digest_condition(logfile_pattern) 526 | filename_elements = [] 527 | filename_elements.append(digest_condition) 528 | if tag: 529 | filename_elements.append(".") 530 | filename_elements.append(tag) 531 | filename_elements.append(LogChecker._SUFFIX_LOCK) 532 | lock_filename = os.path.join( 533 | self.config['state_directory'], "".join(filename_elements)) 534 | return lock_filename 535 | 536 | def check( 537 | self, logfile_pattern, seekfile=None, 538 | remove_seekfile=False, tag=''): 539 | """Check log files. 540 | 541 | If cache is enabled and exists, return cache. 542 | 543 | Args: 544 | logfile_pattern (str): The file names of log files to be scanned. 545 | seekfile (str, optional): The file name of the seek file. 546 | remove_seekfile (bool, optional): If true, remove expired seek files. 547 | tag (str, optional): The tag added in the file names of state files, 548 | to prevent names collisions. 549 | """ 550 | logfile_pattern = LogChecker.to_unicode(logfile_pattern) 551 | seekfile = LogChecker.to_unicode(seekfile) 552 | tag = LogChecker.to_unicode(tag) 553 | cachefile = self._create_cache_filename(logfile_pattern, tag=tag) 554 | lockfile = self._create_lock_filename(logfile_pattern, tag=tag) 555 | 556 | locked = False 557 | cur_time = time.time() 558 | timeout_time = cur_time + self.config['lock_timeout'] 559 | while cur_time < timeout_time: 560 | if self.config['cachetime'] > 0: 561 | state, message = self._get_cache(cachefile) 562 | if state != LogChecker.STATE_NO_CACHE: 563 | self.state = state 564 | self.message = message 565 | return 566 | with warnings.catch_warnings(): 567 | warnings.simplefilter("ignore") 568 | lockfileobj = LogChecker.lock(lockfile) 569 | if lockfileobj: 570 | locked = True 571 | break 572 | cur_time = time.time() 573 | time.sleep(LogChecker._RETRY_PERIOD) 574 | if not locked: 575 | self.state = LogChecker.STATE_UNKNOWN 576 | self.message = "UNKNOWN: Lock timeout. Another process is running." 577 | return 578 | 579 | if LogChecker.is_multiple_logfiles(logfile_pattern): 580 | self._check_log_multi( 581 | logfile_pattern, remove_seekfile=remove_seekfile, tag=tag) 582 | else: 583 | # create seekfile 584 | if not seekfile: 585 | seekfile = self._create_seek_filename( 586 | logfile_pattern, logfile_pattern, 587 | trace_inode=self.config['trace_inode'], tag=tag) 588 | self._check_log(logfile_pattern, seekfile) 589 | 590 | if self.config['cachetime'] > 0: 591 | self._update_cache(cachefile) 592 | 593 | LogChecker.unlock(lockfile, lockfileobj) 594 | return 595 | 596 | def check_log(self, logfile, seekfile): 597 | """Check the log file. 598 | 599 | deprecated:: 2.0.1 600 | Use :func:`check` instead. 601 | """ 602 | self.check(logfile, seekfile=seekfile) 603 | return 604 | 605 | def _check_log(self, logfile, seekfile): 606 | """Check the log file. 607 | 608 | Args: 609 | logfile (str): The file name of the log file to be scanned. 610 | seekfile (str): The file name of the seek file. 611 | 612 | """ 613 | _debug("logfile='{0}', seekfile='{1}'".format(logfile, seekfile)) 614 | logfile = LogChecker.to_unicode(logfile) 615 | if not os.path.exists(logfile): 616 | return 617 | 618 | filesize = os.path.getsize(logfile) 619 | # define seek positions. 620 | start_position = LogChecker._read_seekfile(seekfile) 621 | end_position = 0 622 | if not self._check_updated(logfile, start_position, filesize): 623 | return 624 | 625 | # if log was rotated, set start_position. 626 | if filesize < start_position: 627 | start_position = 0 628 | 629 | found = [] 630 | critical_found = [] 631 | if self.config['multiline']: 632 | end_position = self._check_each_multiple_lines( 633 | logfile, start_position, found, critical_found) 634 | else: 635 | end_position = self._check_each_single_line( 636 | logfile, start_position, found, critical_found) 637 | 638 | if found: 639 | self.found.extend(found) 640 | if self.config['output_quiet']: 641 | self.found_messages.append( 642 | "at {0}".format(logfile)) 643 | elif self.config['output_header']: 644 | self.found_messages.append( 645 | "{0} at {1}".format(LogChecker._join_header(found), logfile)) 646 | else: 647 | self.found_messages.append( 648 | "{0} at {1}".format(LogChecker._join_header_and_message(found) , logfile)) 649 | if critical_found: 650 | self.critical_found.extend(critical_found) 651 | if self.config['output_quiet']: 652 | self.critical_found_messages.append( 653 | "at {0}".format(logfile)) 654 | elif self.config['output_header']: 655 | self.critical_found_messages.append( 656 | "{0} at {1}".format(LogChecker._join_header(critical_found), logfile)) 657 | else: 658 | self.critical_found_messages.append( 659 | "{0} at {1}".format(LogChecker._join_header_and_message(critical_found), logfile)) 660 | 661 | self._update_seekfile(seekfile, end_position) 662 | return 663 | 664 | def check_log_multi( 665 | self, logfile_pattern, state_directory, 666 | remove_seekfile=False, tag=''): 667 | """Check the multiple log files. 668 | 669 | deprecated:: 2.0.1 670 | Use :func:`check` instead. 671 | """ 672 | state_directory = state_directory # not used 673 | self.check(logfile_pattern, remove_seekfile=remove_seekfile, tag=tag) 674 | 675 | def _check_log_multi(self, logfile_pattern, remove_seekfile=False, tag=''): 676 | """Check the multiple log files. 677 | 678 | Args: 679 | logfile_pattern (str): The file names of log files to be scanned. 680 | remove_seekfile (bool, optional): If true, remove expired seek files. 681 | tag (str, optional): The tag added in the file names of state files, 682 | to prevent names collisions. 683 | 684 | """ 685 | logfile_list = self._get_logfile_list(logfile_pattern) 686 | for logfile in logfile_list: 687 | if not os.path.isfile(logfile): 688 | continue 689 | seekfile = self._create_seek_filename( 690 | logfile_pattern, logfile, 691 | trace_inode=self.config['trace_inode'], tag=tag) 692 | self._check_log(logfile, seekfile) 693 | 694 | if remove_seekfile: 695 | if self.config['trace_inode']: 696 | self._remove_old_seekfile_with_inode(logfile_pattern, tag) 697 | else: 698 | self._remove_old_seekfile(logfile_pattern, tag) 699 | return 700 | 701 | def clear_state(self): 702 | """Clear the state of the result.""" 703 | self.state = None 704 | self.message = None 705 | self.messages = [] 706 | self.found = [] 707 | self.found_messages = [] 708 | self.critical_found = [] 709 | self.critical_found_messages = [] 710 | return 711 | 712 | def get_state(self): 713 | """Get the state of the result. 714 | 715 | When get_state() or get_message() is executed, 716 | the state is retained until clear_state() is executed. 717 | """ 718 | if self.state is None: 719 | self._update_state() 720 | return self.state 721 | 722 | def get_message(self): 723 | """Get the message of the result. 724 | 725 | When get_state() or get_message() is executed, 726 | the message is retained until clear_state() is executed. 727 | """ 728 | if self.state is None: 729 | self._update_state() 730 | if self.message is None: 731 | self._update_message() 732 | return self.message 733 | 734 | def _get_cache(self, cachefile): 735 | """Get the cache.""" 736 | if self.config['dry_run']: 737 | return LogChecker.STATE_NO_CACHE, None 738 | 739 | if not os.path.exists(cachefile): 740 | return LogChecker.STATE_NO_CACHE, None 741 | if os.stat(cachefile).st_mtime < time.time() - self.config['cachetime']: 742 | _debug("Cache is expired: mtime < curtime - cachetime") 743 | return LogChecker.STATE_NO_CACHE, None 744 | with io.open(cachefile, mode='r', encoding='utf-8') as fileobj: 745 | line = fileobj.readline() 746 | fileobj.close() 747 | state, message = line.split("\t", 1) 748 | _debug("cache: state={0}, message='{1}'".format(state, message)) 749 | return int(state), message 750 | 751 | def _update_cache(self, cachefile): 752 | """Update the cache.""" 753 | if self.config['dry_run']: 754 | return True 755 | 756 | tmp_cachefile = cachefile + "." + str(os.getpid()) 757 | with io.open(tmp_cachefile, mode='w', encoding='utf-8') as cachefileobj: 758 | cachefileobj.write(LogChecker.to_unicode(str(self.get_state()))) 759 | cachefileobj.write("\t") 760 | cachefileobj.write(self.get_message()) 761 | cachefileobj.flush() 762 | cachefileobj.close() 763 | os.rename(tmp_cachefile, cachefile) 764 | return True 765 | 766 | def _remove_cache(self, cachefile): 767 | """Remove the cache file.""" 768 | if self.config['dry_run']: 769 | return True 770 | 771 | if os.path.isfile(cachefile): 772 | os.unlink(cachefile) 773 | 774 | @staticmethod 775 | def get_pattern_list(pattern_string, pattern_filename): 776 | """Get the pattern list. 777 | 778 | Args: 779 | pattern_string (str): The pattern to scan for. 780 | pattern_filename (str): The file name of file containing patterns. 781 | 782 | Returns: 783 | The list of patterns. 784 | 785 | """ 786 | pattern_list = [] 787 | if pattern_string: 788 | # Revert the surrogate-escaped string in the ASCII locale. 789 | try: 790 | pattern_string = re.sub( 791 | r'[\udc80-\udcff]+', 792 | lambda m: b''.join( 793 | [bytes.fromhex('%x' % (ord(char) - ord('\udc00'))) for char in m.group(0)] 794 | ).decode('utf-8'), 795 | pattern_string) 796 | pattern_list.append(LogChecker.to_unicode(pattern_string)) 797 | except UnicodeDecodeError: 798 | LogChecker.print_message("The character encoding of the locale or pattern string is incorrect. Use UTF-8.") 799 | sys.exit(LogChecker.STATE_UNKNOWN) 800 | if pattern_filename: 801 | if os.path.isfile(pattern_filename): 802 | lines = [] 803 | try: 804 | with io.open(pattern_filename, mode='r', encoding='utf-8') as fileobj: 805 | for line in fileobj: 806 | pattern = line.rstrip() 807 | if pattern: 808 | lines.append(pattern) 809 | fileobj.close() 810 | except UnicodeDecodeError: 811 | LogChecker.print_message("The character encoding of the pattern file is incorrect: {0}. Save its character encoding as UTF-8.".format(pattern_filename)) 812 | sys.exit(LogChecker.STATE_UNKNOWN) 813 | if lines: 814 | pattern_list.extend(lines) 815 | else: 816 | LogChecker.print_message("Unable to find the pattern file: {0}".format(pattern_filename)) 817 | sys.exit(LogChecker.STATE_UNKNOWN) 818 | return pattern_list 819 | 820 | @staticmethod 821 | def _expand_logformat_by_strftime(logformat): 822 | """Expand log format by strftime variables. 823 | 824 | Args: 825 | logformat (str): The string of log format. 826 | 827 | Returns: 828 | The string expanded by strftime(). 829 | 830 | """ 831 | for item in LogChecker._LOGFORMAT_EXPANSION_LIST: 832 | key = list(item)[0] 833 | logformat = logformat.replace(key, item[key]) 834 | return logformat 835 | 836 | def _update_seekfile(self, seekfile, position): 837 | """Update the seek file for the log file.""" 838 | if self.config['dry_run']: 839 | return True 840 | 841 | tmp_seekfile = seekfile + "." + str(os.getpid()) 842 | with io.open(tmp_seekfile, mode='w', encoding='utf-8') as fileobj: 843 | fileobj.write(LogChecker.to_unicode(str(position))) 844 | fileobj.flush() 845 | fileobj.close() 846 | os.rename(tmp_seekfile, seekfile) 847 | return True 848 | 849 | @staticmethod 850 | def _read_seekfile(seekfile): 851 | """Read the offset of the log file from its seek file.""" 852 | if not os.path.exists(seekfile): 853 | return 0 854 | with io.open(seekfile, mode='r', encoding='utf-8') as fileobj: 855 | offset = int(fileobj.readline()) 856 | fileobj.close() 857 | return offset 858 | 859 | @staticmethod 860 | def _join_header(found): 861 | """Join header.""" 862 | headers = [] 863 | for item in found: 864 | if item['header']: 865 | headers.append(item['header']) 866 | else: 867 | headers.append(item['message']) 868 | return ','.join(headers) 869 | 870 | @staticmethod 871 | def _join_header_and_message(found): 872 | """Join header and message.""" 873 | log_messages = [] 874 | for item in found: 875 | log_messages.append(''.join([item['header'], item['message']])) 876 | return ','.join(log_messages) 877 | 878 | @staticmethod 879 | def lock(lockfile): 880 | """Lock. 881 | 882 | Args: 883 | lockfile (str): The file name of the lock file. 884 | 885 | Returns: 886 | The instance of the object of the lock file. 887 | If lock fails, return None. 888 | 889 | """ 890 | lockfileobj = io.open(lockfile, mode='w') 891 | try: 892 | fcntl.flock(lockfileobj, fcntl.LOCK_EX | fcntl.LOCK_NB) 893 | except IOError: 894 | return None 895 | lockfileobj.flush() 896 | return lockfileobj 897 | 898 | @staticmethod 899 | def unlock(lockfile, lockfileobj): 900 | """Unlock. 901 | 902 | Args: 903 | lockfile (str): The file name of the lock file. 904 | lockfileobj (file): The instance of the object of the lock file. 905 | 906 | Returns: 907 | True if unlock successes. 908 | 909 | """ 910 | if lockfileobj is None: 911 | return False 912 | lockfileobj.close() 913 | if os.path.isfile(lockfile): 914 | os.unlink(lockfile) 915 | return True 916 | 917 | @staticmethod 918 | def get_digest(string): 919 | """Get digest string. 920 | 921 | Args: 922 | string (str): The string to be digested. 923 | 924 | Returns: 925 | The string of digest. 926 | 927 | """ 928 | hashobj = hashlib.sha1() 929 | hashobj.update(LogChecker.to_bytes(string)) 930 | digest = LogChecker.to_unicode( 931 | base64.urlsafe_b64encode(hashobj.digest())) 932 | return digest 933 | 934 | @staticmethod 935 | def is_multiple_logfiles(logfile_pattern): 936 | """Whether the pattern of the log file names is multiple files. 937 | 938 | Args: 939 | logfile_pattern (str): The pattern of the file names of log files. 940 | 941 | Returns: 942 | True if the string of the log file pattern is multiple log files. 943 | 944 | """ 945 | matchobj = re.search('[*? ]', logfile_pattern) 946 | if matchobj: 947 | return True 948 | return False 949 | 950 | @staticmethod 951 | def to_unicode(string): 952 | """Convert str to unicode. 953 | 954 | Args: 955 | string (str or bytes): The string or bytes to convert to unicode string. 956 | 957 | Returns: 958 | The unicode string to be converted. 959 | 960 | """ 961 | if sys.version_info >= (3,): 962 | # Python3 963 | # type: str or bytes 964 | if isinstance(string, bytes): 965 | # type: bytes 966 | # convert bytes to str. 967 | return string.decode('utf-8') 968 | # type: str 969 | else: 970 | # Python2 971 | # type: unicode or str 972 | if isinstance(string, str): 973 | # type: str 974 | # convert str to unicode. 975 | return string.decode('utf-8') 976 | # type: unicode 977 | return string 978 | 979 | @staticmethod 980 | def to_bytes(string): 981 | """Convert str to bytes. 982 | 983 | Args: 984 | string (str or unicode): The string to convert to bytes. 985 | 986 | Returns: 987 | The bytes to be converted. 988 | 989 | """ 990 | if sys.version_info >= (3,): 991 | # Python3 992 | # type: str or bytes 993 | if isinstance(string, str): 994 | # type: str 995 | return string.encode('utf-8') 996 | # type: bytes 997 | else: 998 | # Python2 999 | # type: unicode or str 1000 | if not isinstance(string, str): 1001 | # type: unicode 1002 | return string.encode('utf-8') 1003 | # type: str 1004 | return string 1005 | 1006 | @staticmethod 1007 | def print_message(string): 1008 | with io.open(sys.stdout.fileno(), mode='w', encoding='utf-8') as fileobj: 1009 | fileobj.write(string) 1010 | fileobj.write('\n') 1011 | fileobj.close() 1012 | 1013 | 1014 | def _debug(string): 1015 | if not __debug__: 1016 | print("DEBUG: {0}".format(string)) 1017 | 1018 | 1019 | def _make_parser(): 1020 | parser = argparse.ArgumentParser( 1021 | description="A log file regular expression-based parser plugin for Nagios.", 1022 | usage=("%(prog)s [options] [-p |-P ] " 1023 | "-S -l ")) 1024 | parser.add_argument( 1025 | "--version", 1026 | action="version", 1027 | version="%(prog)s {0}".format(__version__) 1028 | ) 1029 | parser.add_argument( 1030 | "--dry-run", 1031 | action="store_true", 1032 | dest="dry_run", 1033 | default=False, 1034 | help=("Do dry run. " 1035 | "The seek files are not updated and cache file is not used. " 1036 | "If log format is not correct, it prints an error message.") 1037 | ) 1038 | parser.add_argument( 1039 | "-l", "--logfile", 1040 | action="store", 1041 | dest="logfile_pattern", 1042 | required=True, 1043 | metavar="", 1044 | help=("The file names of log files to be scanned. " 1045 | "The metacharacters * and ? are available. " 1046 | "To set multiple files, set a space between file names. " 1047 | "See also --scantime.") 1048 | ) 1049 | parser.add_argument( 1050 | "-F", "--format", 1051 | action="store", 1052 | dest="logformat", 1053 | metavar="", 1054 | default=LogChecker.FORMAT_SYSLOG, 1055 | help=("Regular expression for log format. " 1056 | "It requires two groups in format of '^(HEADER)(.*)$'. " 1057 | "HEADER includes TIMESTAMP, HOSTNAME, TAG and so on. " 1058 | "Also, it may use %%%%, %%Y, %%y, %%a, %%b, %%m, %%d, %%e, %%H, " 1059 | "%%M, %%S, %%F and %%T of strftime(3). " 1060 | "(default: regular expression for syslog.") 1061 | ) 1062 | parser.add_argument( 1063 | "-s", "--seekfile", 1064 | action="store", 1065 | dest="seekfile", 1066 | metavar="", 1067 | help=("Deprecated. Use -S option instead. " 1068 | "The file name of the file to store the seek position of the last scan. ") 1069 | ) 1070 | parser.add_argument( 1071 | "-S", "--state-directory", "--seekfile-directory", 1072 | action="store", 1073 | dest="state_directory", 1074 | metavar="", 1075 | help=("The directory to store seek files, cache file and lock file. " 1076 | "'--seekfile-directory' is for backwards compatibility.") 1077 | ) 1078 | parser.add_argument( 1079 | "-T", "--tag", "--seekfile-tag", 1080 | action="store", 1081 | dest="tag", 1082 | default="", 1083 | metavar="", 1084 | help=("Add a tag in the file names of state files, to prevent names collisions. " 1085 | "Useful to avoid maintaining many '-S' directories " 1086 | "when you check the same files several times with different args. " 1087 | "'--seekfile-tag' is for backwards compatibility.") 1088 | ) 1089 | parser.add_argument( 1090 | "-I", "--trace-inode", 1091 | action="store_true", 1092 | dest="trace_inode", 1093 | default=False, 1094 | help=("If set, trace the inode of the log file. " 1095 | "After log rotatation, you can trace the log file.") 1096 | ) 1097 | parser.add_argument( 1098 | "-p", "--pattern", 1099 | action="store", 1100 | dest="pattern", 1101 | metavar="", 1102 | help="The regular expression to scan for in the log file." 1103 | ) 1104 | parser.add_argument( 1105 | "-P", "--patternfile", 1106 | action="store", 1107 | dest="patternfile", 1108 | metavar="", 1109 | help="The file name of the file containing regular expressions, one per line. " 1110 | ) 1111 | parser.add_argument( 1112 | "--critical-pattern", 1113 | action="store", 1114 | dest="critical_pattern", 1115 | metavar="", 1116 | help=("The regular expression to scan for in the log file. " 1117 | "If found, return CRITICAL.") 1118 | ) 1119 | parser.add_argument( 1120 | "--critical-patternfile", 1121 | action="store", 1122 | dest="critical_patternfile", 1123 | metavar="", 1124 | help=("The file name of the file containing regular expressions, one per line. " 1125 | "If found, return CRITICAL.") 1126 | ) 1127 | parser.add_argument( 1128 | "-n", "--negpattern", 1129 | action="store", 1130 | dest="negpattern", 1131 | metavar="", 1132 | help=("The regular expression which all will be skipped except as critical pattern " 1133 | "in the log file.") 1134 | ) 1135 | parser.add_argument( 1136 | "-N", "-f", "--negpatternfile", 1137 | action="store", 1138 | dest="negpatternfile", 1139 | metavar="", 1140 | help=("The file name of the file containing regular expressions " 1141 | "which all will be skipped except as critical pattern, " 1142 | "one per line. " 1143 | "'-f' is for backwards compatibility.") 1144 | ) 1145 | parser.add_argument( 1146 | "--critical-negpattern", 1147 | action="store", 1148 | dest="critical_negpattern", 1149 | metavar="", 1150 | help="The regular expression which all will be skipped in the log file." 1151 | ) 1152 | parser.add_argument( 1153 | "--critical-negpatternfile", 1154 | action="store", 1155 | dest="critical_negpatternfile", 1156 | metavar="", 1157 | help=("The file name of the file containing regular expressions " 1158 | "which all will be skipped, one per line.") 1159 | ) 1160 | parser.add_argument( 1161 | "-i", "--case-insensitive", 1162 | action="store_true", 1163 | dest="case_insensitive", 1164 | default=False, 1165 | help="Do a case insensitive scan." 1166 | ) 1167 | parser.add_argument( 1168 | "--encoding", 1169 | action="store", 1170 | dest="encoding", 1171 | default='utf-8', 1172 | metavar="", 1173 | help=("Specify the character encoding in the log file. " 1174 | "(default: %(default)s)") 1175 | ) 1176 | parser.add_argument( 1177 | "-w", "--warning", 1178 | action="store", 1179 | type=int, 1180 | dest="warning", 1181 | default=1, 1182 | metavar="", 1183 | help=("Return WARNING if at least this many matches found. " 1184 | "(default: %(default)s)") 1185 | ) 1186 | parser.add_argument( 1187 | "-c", "--critical", 1188 | action="store", 1189 | type=int, 1190 | dest="critical", 1191 | default=0, 1192 | metavar="", 1193 | help=("Return CRITICAL if at least this many matches found. " 1194 | "i.e. don't return critical alerts unless specified explicitly. " 1195 | "(default: %(default)s)") 1196 | ) 1197 | parser.add_argument( 1198 | "-t", "--scantime", 1199 | action="store", 1200 | type=int, 1201 | dest="scantime", 1202 | default=86400, 1203 | metavar="", 1204 | help=("The range of time to scan. " 1205 | "The log files older than this time are not scanned. " 1206 | "(default: %(default)s)") 1207 | ) 1208 | parser.add_argument( 1209 | "-E", "--expiration", 1210 | action="store", 1211 | type=int, 1212 | dest="expiration", 1213 | default=691200, 1214 | metavar="", 1215 | help=("The expiration of seek files. " 1216 | "This must be longer than the log rotation period. " 1217 | "The expired seek files are deleted with -R option. " 1218 | "(default: %(default)s)") 1219 | ) 1220 | parser.add_argument( 1221 | "-R", "--remove-seekfile", 1222 | action="store_true", 1223 | dest="remove_seekfile", 1224 | default=False, 1225 | help="Remove expired seek files. See also --expiration." 1226 | ) 1227 | parser.add_argument( 1228 | "-M", "--multiline", 1229 | action="store_true", 1230 | dest="multiline", 1231 | default=False, 1232 | help=("Treat multiple lines outputted at once as one message. " 1233 | "If the log format is not syslog, set --format option. " 1234 | "See also --format.") 1235 | ) 1236 | parser.add_argument( 1237 | "--cachetime", 1238 | action="store", 1239 | type=int, 1240 | dest="cachetime", 1241 | default=60, 1242 | metavar="", 1243 | help=("The period to cache the result. " 1244 | "To disable this cache feature, set '0'. " 1245 | "(default: %(default)s)") 1246 | ) 1247 | parser.add_argument( 1248 | "--lock-timeout", 1249 | action="store", 1250 | type=int, 1251 | dest="lock_timeout", 1252 | default=3, 1253 | metavar="", 1254 | help=("The period to wait for if another process is running. " 1255 | "If timeout occurs, UNKNOWN is returned. " 1256 | "(default: %(default)s)") 1257 | ) 1258 | parser.add_argument( 1259 | "-H", "--output-header", 1260 | action="store_true", 1261 | dest="output_header", 1262 | default=False, 1263 | help=("HEADER mode: Suppress the output of the message on matched lines. " 1264 | "Only HEADER(TIMESTAMP, HOSTNAME, TAG etc) is outputted. " 1265 | "If the log format is not syslog, set --format option. " 1266 | "See also --format.") 1267 | ) 1268 | parser.add_argument( 1269 | "-q", "--quiet", 1270 | action="store_true", 1271 | dest="output_quiet", 1272 | default=False, 1273 | help=("QUIET mode: Suppress the output of matched lines.") 1274 | ) 1275 | return parser 1276 | 1277 | 1278 | def _check_parser_args(parser): 1279 | args = parser.parse_args() 1280 | 1281 | if len(sys.argv) == 1: 1282 | parser.print_help() 1283 | sys.exit(LogChecker.STATE_UNKNOWN) 1284 | 1285 | # check args 1286 | if not args.logfile_pattern: 1287 | parser.exit( 1288 | LogChecker.STATE_UNKNOWN, 1289 | "the following arguments are required: -l/--logfile") 1290 | 1291 | if args.state_directory: 1292 | if not os.path.isdir(args.state_directory): 1293 | parser.exit( 1294 | LogChecker.STATE_UNKNOWN, 1295 | "the state directory is not found: {0}".format( 1296 | args.state_directory)) 1297 | if (args.seekfile and 1298 | os.path.dirname(args.seekfile) != args.state_directory): 1299 | parser.exit( 1300 | LogChecker.STATE_UNKNOWN, 1301 | "the seek file is outside the state directory: {0}".format( 1302 | args.seekfile)) 1303 | else: 1304 | if args.seekfile: 1305 | state_directory = os.path.dirname(args.seekfile) 1306 | if not os.path.isdir(state_directory): 1307 | parser.exit( 1308 | LogChecker.STATE_UNKNOWN, 1309 | "the state directory is not found: {0}".format( 1310 | state_directory)) 1311 | else: 1312 | parser.exit( 1313 | LogChecker.STATE_UNKNOWN, 1314 | "the following arguments are required: -S/--state-directory") 1315 | 1316 | if args.seekfile: 1317 | if LogChecker.is_multiple_logfiles(args.logfile_pattern): 1318 | parser.exit( 1319 | LogChecker.STATE_UNKNOWN, 1320 | "If check multiple log files, use arguments -s/--seekfile.") 1321 | else: 1322 | if not os.path.isfile(args.logfile_pattern): 1323 | parser.exit( 1324 | LogChecker.STATE_UNKNOWN, 1325 | "the log file is not found: {0}".format( 1326 | args.logfile_pattern)) 1327 | 1328 | pattern_list = LogChecker.get_pattern_list(args.pattern, args.patternfile) 1329 | critical_pattern_list = LogChecker.get_pattern_list( 1330 | args.critical_pattern, args.critical_patternfile) 1331 | if not pattern_list and not critical_pattern_list: 1332 | parser.exit( 1333 | LogChecker.STATE_UNKNOWN, 1334 | "any valid patterns are not found.") 1335 | 1336 | return args 1337 | 1338 | 1339 | def _generate_config(args): 1340 | """Generate initial data.""" 1341 | if args.seekfile and not args.state_directory: 1342 | state_directory = os.path.dirname(args.seekfile) 1343 | else: 1344 | state_directory = args.state_directory 1345 | 1346 | # make pattern list 1347 | pattern_list = LogChecker.get_pattern_list(args.pattern, args.patternfile) 1348 | critical_pattern_list = LogChecker.get_pattern_list( 1349 | args.critical_pattern, args.critical_patternfile) 1350 | negpattern_list = LogChecker.get_pattern_list( 1351 | args.negpattern, args.negpatternfile) 1352 | critical_negpattern_list = LogChecker.get_pattern_list( 1353 | args.critical_negpattern, args.critical_negpatternfile) 1354 | 1355 | # set value of args 1356 | config = { 1357 | "dry_run": args.dry_run, 1358 | "logformat": args.logformat, 1359 | "state_directory": state_directory, 1360 | "pattern_list": pattern_list, 1361 | "critical_pattern_list": critical_pattern_list, 1362 | "negpattern_list": negpattern_list, 1363 | "critical_negpattern_list": critical_negpattern_list, 1364 | "case_insensitive": args.case_insensitive, 1365 | "encoding": args.encoding, 1366 | "warning": args.warning, 1367 | "critical": args.critical, 1368 | "trace_inode": args.trace_inode, 1369 | "multiline": args.multiline, 1370 | "scantime": args.scantime, 1371 | "expiration": args.expiration, 1372 | "cachetime": args.cachetime, 1373 | "lock_timeout": args.lock_timeout, 1374 | "output_header": args.output_header, 1375 | "output_quiet": args.output_quiet 1376 | } 1377 | return config 1378 | 1379 | 1380 | def main(): 1381 | """Run check_log_ng.""" 1382 | parser = _make_parser() 1383 | args = _check_parser_args(parser) 1384 | config = _generate_config(args) 1385 | log = LogChecker(config) 1386 | log.check( 1387 | args.logfile_pattern, seekfile=args.seekfile, 1388 | remove_seekfile=args.remove_seekfile, tag=args.tag) 1389 | state = log.get_state() 1390 | message = log.get_message() 1391 | if args.dry_run: 1392 | LogChecker.print_message("[DRY RUN] {0}".format(message)) 1393 | else: 1394 | LogChecker.print_message(message) 1395 | sys.exit(state) 1396 | 1397 | 1398 | if __name__ == "__main__": 1399 | main() 1400 | 1401 | # vim: set ts=4 sw=4 et: 1402 | -------------------------------------------------------------------------------- /test_check_log_ng.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | """Unit test for check_log_ng""" 4 | 5 | from __future__ import absolute_import 6 | from __future__ import division 7 | from __future__ import print_function 8 | from __future__ import unicode_literals 9 | import unittest 10 | import warnings 11 | import os 12 | import glob 13 | import io 14 | import time 15 | import datetime 16 | import subprocess 17 | from check_log_ng import LogChecker 18 | 19 | 20 | class LogCheckerTestCase(unittest.TestCase): 21 | 22 | """Unit test.""" 23 | 24 | # Class constant 25 | MESSAGE_OK = "OK - No matches found." 26 | MESSAGE_WARNING_ONE = "WARNING: Found 1 lines (limit=1/0): {0} at {1}" 27 | MESSAGE_WARNING_ONE_WITH_QUIET = "WARNING: Found 1 lines (limit=1/0, QUIET): at {0}" 28 | MESSAGE_WARNING_ONE_WITH_HEADER = "WARNING: Found 1 lines (limit=1/0, HEADER): {0} at {1}" 29 | MESSAGE_WARNING_TWO = "WARNING: Found 2 lines (limit=1/0): {0},{1} at {2}" 30 | MESSAGE_WARNING_TWO_WITH_QUIET = "WARNING: Found 2 lines (limit=1/0, QUIET): at {0}" 31 | MESSAGE_WARNING_TWO_WITH_HEADER = "WARNING: Found 2 lines (limit=1/0, HEADER): {0},{1} at {2}" 32 | MESSAGE_WARNING_TWO_IN_TWO_FILES = ( 33 | "WARNING: Found 2 lines (limit=1/0): {0} at {1},{2} at {3}") 34 | MESSAGE_CRITICAL_ONE = "CRITICAL: Critical Found 1 lines: {0} at {1}" 35 | MESSAGE_CRITICAL_ONE_WITH_QUIET = "CRITICAL: Critical Found 1 lines (QUIET): at {0}" 36 | MESSAGE_CRITICAL_ONE_WITH_HEADER = "CRITICAL: Critical Found 1 lines (HEADER): {0} at {1}" 37 | MESSAGE_UNKNOWN_LOCK_TIMEOUT = ( 38 | "UNKNOWN: Lock timeout. Another process is running.") 39 | 40 | # Class variablesex 41 | BASEDIR = None 42 | TESTDIR = None 43 | LOGDIR = None 44 | STATEDIR = None 45 | 46 | @classmethod 47 | def setUpClass(cls): 48 | cls.BASEDIR = os.getcwd() 49 | cls.TESTDIR = os.path.join(cls.BASEDIR, 'test') 50 | cls.LOGDIR = os.path.join(cls.TESTDIR, 'log') 51 | cls.STATEDIR = os.path.join(cls.TESTDIR, 'state') 52 | if not os.path.isdir(cls.TESTDIR): 53 | os.mkdir(cls.TESTDIR) 54 | if not os.path.isdir(cls.LOGDIR): 55 | os.mkdir(cls.LOGDIR) 56 | if not os.path.isdir(cls.STATEDIR): 57 | os.mkdir(cls.STATEDIR) 58 | 59 | @classmethod 60 | def tearDownClass(cls): 61 | if os.path.exists(cls.LOGDIR): 62 | os.removedirs(cls.LOGDIR) 63 | if os.path.exists(cls.STATEDIR): 64 | os.removedirs(cls.STATEDIR) 65 | if os.path.exists(cls.TESTDIR): 66 | os.removedirs(cls.TESTDIR) 67 | 68 | def setUp(self): 69 | # log files 70 | self.logfile = os.path.join(self.LOGDIR, 'testlog') 71 | self.logfile1 = os.path.join(self.LOGDIR, 'testlog.1') 72 | self.logfile2 = os.path.join(self.LOGDIR, 'testlog.2') 73 | self.logfile_pattern = os.path.join(self.LOGDIR, 'testlog*') 74 | 75 | # seek files 76 | self.tag1 = 'tag1' 77 | self.tag2 = 'tag2' 78 | self.seekfile = os.path.join(self.STATEDIR, 'testlog.seek') 79 | 80 | # lock file 81 | self.lockfile = os.path.join(self.STATEDIR, 'check_log_ng.lock') 82 | 83 | # configuration 84 | # Set cachetime to 0 for convenience in testing. 85 | self.config = { 86 | "logformat": LogChecker.FORMAT_SYSLOG, 87 | "state_directory": self.STATEDIR, 88 | "pattern_list": [], 89 | "critical_pattern_list": [], 90 | "negpattern_list": [], 91 | "critical_negpattern_list": [], 92 | "case_insensitive": False, 93 | "encoding": "utf-8", 94 | "warning": 1, 95 | "critical": 0, 96 | "trace_inode": False, 97 | "multiline": False, 98 | "scantime": 86400, 99 | "expiration": 691200, 100 | "cachetime": 0, 101 | "lock_timeout": 3 102 | } 103 | 104 | def tearDown(self): 105 | # remove log files. 106 | for logfile in [self.logfile, self.logfile1, self.logfile2]: 107 | if os.path.exists(logfile): 108 | os.unlink(logfile) 109 | 110 | # remove seek files. 111 | seekfiles = glob.glob( 112 | os.path.join(self.STATEDIR, '*' + LogChecker._SUFFIX_SEEK)) 113 | for seekfile in seekfiles: 114 | if os.path.exists(seekfile): 115 | os.unlink(seekfile) 116 | 117 | # remove a cache file. 118 | cachefiles = glob.glob( 119 | os.path.join(self.STATEDIR, '*' + LogChecker._SUFFIX_CACHE)) 120 | for cachefile in cachefiles: 121 | if os.path.exists(cachefile): 122 | os.unlink(cachefile) 123 | 124 | # remove a lock file. 125 | lockfiles = glob.glob( 126 | os.path.join(self.STATEDIR, '*' + LogChecker._SUFFIX_LOCK)) 127 | for lockfile in lockfiles: 128 | if os.path.exists(lockfile): 129 | os.unlink(lockfile) 130 | 131 | def test_dry_run(self): 132 | """--dry-run option 133 | """ 134 | self.config["pattern_list"] = ["ERROR"] 135 | log = LogChecker(self.config) 136 | 137 | # create a seek file 138 | # Dec 5 12:34:50 hostname noop: NOOP 139 | line4 = self._make_line(self._get_timestamp(), "noop", "NOOP") 140 | self._write_logfile(self.logfile, line4) 141 | log.clear_state() 142 | log.check(self.logfile) 143 | 144 | self.assertEqual(log.get_state(), LogChecker.STATE_OK) 145 | self.assertEqual(log.get_message(), self.MESSAGE_OK) 146 | 147 | # verify a seek file is not updated. 148 | self.config["dry_run"] = True 149 | log = LogChecker(self.config) 150 | 151 | # Dec 5 12:34:50 hostname test: ERROR 152 | line = self._make_line(self._get_timestamp(), "test", "ERROR") 153 | self._write_logfile(self.logfile, line) 154 | 155 | log.clear_state() 156 | log.check(self.logfile) 157 | 158 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 159 | self.assertEqual( 160 | log.get_message(), 161 | self.MESSAGE_WARNING_ONE.format(line, self.logfile)) 162 | 163 | log.clear_state() 164 | log.check(self.logfile) 165 | 166 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 167 | self.assertEqual( 168 | log.get_message(), 169 | self.MESSAGE_WARNING_ONE.format(line, self.logfile)) 170 | 171 | def test_format(self): 172 | """--format option 173 | """ 174 | self.config["logformat"] = r"^(\[%a %b %d %T %Y\] \[\S+\] )(.*)$" 175 | self.config["pattern_list"] = ["ERROR"] 176 | log = LogChecker(self.config) 177 | 178 | # [Thu Dec 05 12:34:50 2013] [error] ERROR 179 | line = self._make_customized_line( 180 | self._get_customized_timestamp(), "error", "ERROR") 181 | self._write_customized_logfile(self.logfile, line) 182 | log.check(self.logfile) 183 | 184 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 185 | self.assertEqual( 186 | log.get_message(), 187 | self.MESSAGE_WARNING_ONE.format(line, self.logfile)) 188 | 189 | def test_pattern(self): 190 | """--pattern option 191 | """ 192 | self.config["pattern_list"] = ["ERROR"] 193 | log = LogChecker(self.config) 194 | 195 | # 1 line matched 196 | # Dec 5 12:34:50 hostname test: ERROR 197 | line1 = self._make_line(self._get_timestamp(), "test", "ERROR") 198 | self._write_logfile(self.logfile, line1) 199 | log.check(self.logfile) 200 | 201 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 202 | self.assertEqual( 203 | log.get_message(), 204 | self.MESSAGE_WARNING_ONE.format(line1, self.logfile)) 205 | 206 | # 2 lines matched 207 | # Dec 5 12:34:50 hostname test: ERROR1 208 | # Dec 5 12:34:50 hostname test: ERROR2 209 | line2 = self._make_line(self._get_timestamp(), "test", "ERROR1") 210 | line3 = self._make_line(self._get_timestamp(), "test", "ERROR2") 211 | self._write_logfile(self.logfile, [line2, line3]) 212 | log.clear_state() 213 | log.check(self.logfile) 214 | 215 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 216 | self.assertEqual( 217 | log.get_message(), 218 | self.MESSAGE_WARNING_TWO.format(line2, line3, self.logfile)) 219 | 220 | # no line matched 221 | # Dec 5 12:34:50 hostname noop: NOOP 222 | line4 = self._make_line(self._get_timestamp(), "noop", "NOOP") 223 | self._write_logfile(self.logfile, line4) 224 | log.clear_state() 225 | log.check(self.logfile) 226 | 227 | self.assertEqual(log.get_state(), LogChecker.STATE_OK) 228 | self.assertEqual(log.get_message(), self.MESSAGE_OK) 229 | 230 | def test_pattern_with_quiet(self): 231 | """--pattern and --quiet options 232 | """ 233 | self.config["pattern_list"] = ["ERROR"] 234 | self.config['output_quiet'] = True 235 | log = LogChecker(self.config) 236 | 237 | # 1 line matched 238 | # Dec 5 12:34:50 hostname test: ERROR 239 | line1 = self._make_line(self._get_timestamp(), "test", "ERROR") 240 | self._write_logfile(self.logfile, line1) 241 | log.check(self.logfile) 242 | 243 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 244 | self.assertEqual( 245 | log.get_message(), 246 | self.MESSAGE_WARNING_ONE_WITH_QUIET.format(self.logfile)) 247 | 248 | # 2 lines matched 249 | # Dec 5 12:34:50 hostname test: ERROR1 250 | # Dec 5 12:34:50 hostname test: ERROR2 251 | line2 = self._make_line(self._get_timestamp(), "test", "ERROR1") 252 | line3 = self._make_line(self._get_timestamp(), "test", "ERROR2") 253 | self._write_logfile(self.logfile, [line2, line3]) 254 | log.clear_state() 255 | log.check(self.logfile) 256 | 257 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 258 | self.assertEqual( 259 | log.get_message(), 260 | self.MESSAGE_WARNING_TWO_WITH_QUIET.format(self.logfile)) 261 | 262 | # no line matched 263 | # Dec 5 12:34:50 hostname noop: NOOP 264 | line4 = self._make_line(self._get_timestamp(), "noop", "NOOP") 265 | self._write_logfile(self.logfile, line4) 266 | log.clear_state() 267 | log.check(self.logfile) 268 | 269 | self.assertEqual(log.get_state(), LogChecker.STATE_OK) 270 | self.assertEqual(log.get_message(), self.MESSAGE_OK) 271 | 272 | def test_pattern_with_header(self): 273 | """--pattern and --header options 274 | """ 275 | self.config["pattern_list"] = ["ERROR"] 276 | self.config['output_header'] = True 277 | log = LogChecker(self.config) 278 | 279 | # 1 line matched 280 | # Dec 5 12:34:50 hostname test: ERROR 281 | timestamp = self._get_timestamp() 282 | line1 = self._make_line(timestamp, "test", "ERROR") 283 | header1 = self._make_line(timestamp, "test", "") 284 | self._write_logfile(self.logfile, line1) 285 | log.check(self.logfile) 286 | 287 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 288 | self.assertEqual( 289 | log.get_message(), 290 | self.MESSAGE_WARNING_ONE_WITH_HEADER.format(header1, self.logfile)) 291 | 292 | # 2 lines matched 293 | # Dec 5 12:34:50 hostname test: ERROR1 294 | # Dec 5 12:34:50 hostname test: ERROR2 295 | timestamp = self._get_timestamp() 296 | line2 = self._make_line(timestamp, "test", "ERROR1") 297 | line3 = self._make_line(timestamp, "test", "ERROR2") 298 | header2 = self._make_line(timestamp, "test", "") 299 | header3 = self._make_line(timestamp, "test", "") 300 | self._write_logfile(self.logfile, [line2, line3]) 301 | log.clear_state() 302 | log.check(self.logfile) 303 | 304 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 305 | self.assertEqual( 306 | log.get_message(), 307 | self.MESSAGE_WARNING_TWO_WITH_HEADER.format(header2, header3, self.logfile)) 308 | 309 | # no line matched 310 | # Dec 5 12:34:50 hostname noop: NOOP 311 | line4 = self._make_line(self._get_timestamp(), "noop", "NOOP") 312 | self._write_logfile(self.logfile, line4) 313 | log.clear_state() 314 | log.check(self.logfile) 315 | 316 | self.assertEqual(log.get_state(), LogChecker.STATE_OK) 317 | self.assertEqual(log.get_message(), self.MESSAGE_OK) 318 | 319 | def test_critical_pattern(self): 320 | """--critical-pattern option 321 | """ 322 | self.config["critical_pattern_list"] = ["FATAL"] 323 | log = LogChecker(self.config) 324 | 325 | # Dec 5 12:34:50 hostname test: FATAL 326 | line = self._make_line(self._get_timestamp(), "test", "FATAL") 327 | self._write_logfile(self.logfile, line) 328 | log.check(self.logfile) 329 | 330 | self.assertEqual(log.get_state(), LogChecker.STATE_CRITICAL) 331 | self.assertEqual( 332 | log.get_message(), 333 | self.MESSAGE_CRITICAL_ONE.format(line, self.logfile)) 334 | 335 | def test_critical_pattern_with_quiet(self): 336 | """--critical-pattern and --quiet options 337 | """ 338 | self.config["critical_pattern_list"] = ["FATAL"] 339 | self.config['output_quiet'] = True 340 | log = LogChecker(self.config) 341 | 342 | # Dec 5 12:34:50 hostname test: FATAL 343 | line = self._make_line(self._get_timestamp(), "test", "FATAL") 344 | self._write_logfile(self.logfile, line) 345 | log.check(self.logfile) 346 | 347 | self.assertEqual(log.get_state(), LogChecker.STATE_CRITICAL) 348 | self.assertEqual( 349 | log.get_message(), 350 | self.MESSAGE_CRITICAL_ONE_WITH_QUIET.format(self.logfile)) 351 | 352 | def test_critical_pattern_with_header(self): 353 | """--critical-pattern and --header options 354 | """ 355 | self.config["critical_pattern_list"] = ["FATAL"] 356 | self.config['output_header'] = True 357 | log = LogChecker(self.config) 358 | 359 | # Dec 5 12:34:50 hostname test: FATAL 360 | timestamp = self._get_timestamp() 361 | line = self._make_line(timestamp, "test", "FATAL") 362 | header = self._make_line(timestamp, "test", "") 363 | self._write_logfile(self.logfile, line) 364 | log.check(self.logfile) 365 | 366 | self.assertEqual(log.get_state(), LogChecker.STATE_CRITICAL) 367 | self.assertEqual( 368 | log.get_message(), 369 | self.MESSAGE_CRITICAL_ONE_WITH_HEADER.format(header, self.logfile)) 370 | 371 | def test_negpattern(self): 372 | """--negpattern option 373 | """ 374 | self.config["pattern_list"] = ["ERROR"] 375 | self.config["critical_pattern_list"] = ["FATAL"] 376 | self.config["negpattern_list"] = ["IGNORE"] 377 | log = LogChecker(self.config) 378 | 379 | # check --pattern 380 | # Dec 5 12:34:50 hostname test: ERROR IGNORE 381 | line1 = self._make_line(self._get_timestamp(), "test", "ERROR IGNORE") 382 | self._write_logfile(self.logfile, line1) 383 | log.check(self.logfile) 384 | 385 | self.assertEqual(log.get_state(), LogChecker.STATE_OK) 386 | self.assertEqual(log.get_message(), self.MESSAGE_OK) 387 | 388 | # check --critical-pattern 389 | # Dec 5 12:34:50 hostname test: FATAL IGNORE 390 | line2 = self._make_line(self._get_timestamp(), "test", "FATAL IGNORE") 391 | self._write_logfile(self.logfile, line2) 392 | log.clear_state() 393 | log.check(self.logfile) 394 | 395 | self.assertEqual(log.get_state(), LogChecker.STATE_CRITICAL) 396 | self.assertEqual( 397 | log.get_message(), 398 | self.MESSAGE_CRITICAL_ONE.format(line2, self.logfile)) 399 | 400 | def test_critical_negpattern(self): 401 | """--critical-negpattern option 402 | """ 403 | self.config["pattern_list"] = ["ERROR"] 404 | self.config["critical_pattern_list"] = ["FATAL"] 405 | self.config["critical_negpattern_list"] = ["IGNORE"] 406 | log = LogChecker(self.config) 407 | 408 | # check --pattern and --critical-negpattern 409 | # Dec 5 12:34:50 hostname test: ERROR IGNORE 410 | line1 = self._make_line(self._get_timestamp(), "test", "ERROR IGNORE") 411 | self._write_logfile(self.logfile, line1) 412 | log.check(self.logfile) 413 | 414 | self.assertEqual(log.get_state(), LogChecker.STATE_OK) 415 | self.assertEqual(log.get_message(), self.MESSAGE_OK) 416 | 417 | # check --critical-pattern and --ciritical-negpattern 418 | # Dec 5 12:34:50 hostname test: FATAL IGNORE 419 | line2 = self._make_line(self._get_timestamp(), "test", "FATAL IGNORE") 420 | self._write_logfile(self.logfile, line2) 421 | log.clear_state() 422 | log.check(self.logfile) 423 | 424 | self.assertEqual(log.get_state(), LogChecker.STATE_OK) 425 | self.assertEqual(log.get_message(), self.MESSAGE_OK) 426 | 427 | # check --pattern, --critical-pattern and --critical-negpattern 428 | # Dec 5 12:34:50 hostname test: ERROR FATAL IGNORE 429 | line3 = self._make_line( 430 | self._get_timestamp(), "test", "ERROR FATAL IGNORE") 431 | self._write_logfile(self.logfile, line3) 432 | log.clear_state() 433 | log.check(self.logfile) 434 | 435 | self.assertEqual(log.get_state(), LogChecker.STATE_OK) 436 | self.assertEqual(log.get_message(), self.MESSAGE_OK) 437 | 438 | def test_case_insensitive(self): 439 | """--case-insensitive option 440 | """ 441 | self.config["pattern_list"] = ["error"] 442 | self.config["critical_pattern_list"] = ["fatal"] 443 | self.config["negpattern_list"] = ["ignore"] 444 | self.config["case_insensitive"] = True 445 | log = LogChecker(self.config) 446 | 447 | # check --pattern 448 | # Dec 5 12:34:50 hostname test: ERROR 449 | line1 = self._make_line(self._get_timestamp(), "test", "ERROR") 450 | self._write_logfile(self.logfile, line1) 451 | log.clear_state() 452 | log.check(self.logfile) 453 | 454 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 455 | self.assertEqual( 456 | log.get_message(), 457 | self.MESSAGE_WARNING_ONE.format(line1, self.logfile)) 458 | 459 | # check --critical-pattern 460 | # Dec 5 12:34:50 hostname test: FATAL 461 | line2 = self._make_line(self._get_timestamp(), "test", "FATAL") 462 | self._write_logfile(self.logfile, line2) 463 | log.clear_state() 464 | log.check(self.logfile) 465 | 466 | self.assertEqual(log.get_state(), LogChecker.STATE_CRITICAL) 467 | self.assertEqual( 468 | log.get_message(), 469 | self.MESSAGE_CRITICAL_ONE.format(line2, self.logfile)) 470 | 471 | # check --pattern and --negpattern 472 | # Dec 5 12:34:50 hostname test: ERROR ERROR IGNORE 473 | line3 = self._make_line(self._get_timestamp(), "test", "ERROR IGNORE") 474 | self._write_logfile(self.logfile, line3) 475 | log.clear_state() 476 | log.check(self.logfile) 477 | 478 | self.assertEqual(log.get_state(), LogChecker.STATE_OK) 479 | self.assertEqual(log.get_message(), self.MESSAGE_OK) 480 | 481 | def test_encoding(self): 482 | """--pattern and --encoding 483 | """ 484 | self.config["pattern_list"] = ["エラー"] 485 | self.config["encoding"] = "EUC-JP" 486 | log = LogChecker(self.config) 487 | 488 | # Dec 5 12:34:50 hostname test: エラー 489 | line = self._make_line(self._get_timestamp(), "test", "エラー") 490 | self._write_logfile(self.logfile, line, encoding='EUC-JP') 491 | log.clear_state() 492 | log.check(self.logfile) 493 | 494 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 495 | self.assertEqual( 496 | log.get_message(), 497 | self.MESSAGE_WARNING_ONE.format(line, self.logfile)) 498 | 499 | def test_multiline(self): 500 | """--multiline 501 | """ 502 | self.config["pattern_list"] = ["ERROR1.*ERROR2"] 503 | self.config["negpattern_list"] = ["IGNORE"] 504 | self.config["multiline"] = True 505 | log = LogChecker(self.config) 506 | 507 | # check --pattern, --multiline 508 | # Dec 5 12:34:50 hostname test: ERROR1 509 | # Dec 5 12:34:50 hostname test: ERROR2 510 | timestamp = self._get_timestamp() 511 | lines = [] 512 | messages = ["ERROR1", "ERROR2"] 513 | for message in messages: 514 | lines.append(self._make_line(timestamp, "test", message)) 515 | self._write_logfile(self.logfile, lines) 516 | log.clear_state() 517 | log.check(self.logfile) 518 | 519 | # detected line: Dec 5 12:34:50 hostname test: ERROR1 ERROR2 520 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 521 | self.assertEqual( 522 | log.get_message(), 523 | self.MESSAGE_WARNING_ONE.format( 524 | lines[0] + " " + messages[1], self.logfile)) 525 | 526 | # check --pattern, --negpattern and --multiline 527 | # Dec 5 12:34:50 hostname test: ERROR 528 | # Dec 5 12:34:50 hostname test: ERROR IGNORE 529 | timestamp = self._get_timestamp() 530 | lines = [] 531 | messages = ["ERROR", "ERROR IGNORE"] 532 | for message in messages: 533 | lines.append(self._make_line(timestamp, "test", message)) 534 | self._write_logfile(self.logfile, lines) 535 | log.clear_state() 536 | log.check(self.logfile) 537 | 538 | # detected line: Dec 5 12:34:50 hostname test: ERROR ERROR IGNORE 539 | self.assertEqual(log.get_state(), LogChecker.STATE_OK) 540 | self.assertEqual(log.get_message(), self.MESSAGE_OK) 541 | 542 | def test_logfile(self): 543 | """--logfile option 544 | """ 545 | self.config["pattern_list"] = ["ERROR"] 546 | log = LogChecker(self.config) 547 | 548 | # check -logfile option with wild card '*' 549 | # Dec 5 12:34:50 hostname test: ERROR 550 | line1 = self._make_line(self._get_timestamp(), "test", "ERROR") 551 | self._write_logfile(self.logfile1, line1) 552 | time.sleep(1) 553 | 554 | # Dec 5 12:34:51 hostname test: ERROR 555 | line2 = self._make_line(self._get_timestamp(), "test", "ERROR") 556 | self._write_logfile(self.logfile2, line2) 557 | log.clear_state() 558 | log.check(self.logfile_pattern) 559 | 560 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 561 | self.assertEqual( 562 | log.get_message(), 563 | self.MESSAGE_WARNING_TWO_IN_TWO_FILES.format( 564 | line1, self.logfile1, line2, self.logfile2)) 565 | 566 | # --logfile option with multiple filenames 567 | # Dec 5 12:34:51 hostname test: ERROR 568 | line1 = self._make_line(self._get_timestamp(), "test", "ERROR") 569 | self._write_logfile(self.logfile1, line1) 570 | time.sleep(1) 571 | 572 | # Dec 5 12:34:52 hostname test: ERROR 573 | line2 = self._make_line(self._get_timestamp(), "test", "ERROR") 574 | self._write_logfile(self.logfile2, line2) 575 | logfile_pattern = "{0} {1}".format(self.logfile1, self.logfile2) 576 | log.clear_state() 577 | log.check(logfile_pattern) 578 | 579 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 580 | self.assertEqual( 581 | log.get_message(), 582 | self.MESSAGE_WARNING_TWO_IN_TWO_FILES.format( 583 | line1, self.logfile1, line2, self.logfile2)) 584 | 585 | def test_trace_inode(self): 586 | """--trace_inode 587 | """ 588 | self.config["pattern_list"] = ["ERROR"] 589 | self.config["trace_inode"] = True 590 | log = LogChecker(self.config) 591 | 592 | # within expiration 593 | # create logfile 594 | # Dec 5 12:34:50 hostname test: ERROR 595 | line1 = self._make_line(self._get_timestamp(), "test", "ERROR") 596 | self._write_logfile(self.logfile, line1) 597 | 598 | # create seekfile of logfile 599 | log.check(self.logfile_pattern) 600 | seekfile_1 = log._create_seek_filename( 601 | self.logfile_pattern, self.logfile, trace_inode=True) 602 | 603 | # update logfile 604 | # Dec 5 12:34:51 hostname test: ERROR 605 | line2 = self._make_line(self._get_timestamp(), "test", "ERROR") 606 | self._write_logfile(self.logfile, line2) 607 | 608 | # log rotation 609 | os.rename(self.logfile, self.logfile1) 610 | 611 | # create a new logfile 612 | # Dec 5 12:34:52 hostname noop: NOOP 613 | line3 = self._make_line(self._get_timestamp(), "noop", "NOOP") 614 | self._write_logfile(self.logfile, line3) 615 | 616 | # create seekfile of logfile 617 | log.clear_state() 618 | log.check(self.logfile_pattern) 619 | seekfile_2 = log._create_seek_filename( 620 | self.logfile_pattern, self.logfile, trace_inode=True) 621 | seekfile1_2 = log._create_seek_filename( 622 | self.logfile_pattern, self.logfile1, trace_inode=True) 623 | 624 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 625 | self.assertEqual( 626 | log.get_message(), 627 | self.MESSAGE_WARNING_ONE.format(line2, self.logfile1)) 628 | self.assertEqual(seekfile_1, seekfile1_2) 629 | self.assertTrue(os.path.exists(seekfile_2)) 630 | self.assertTrue(os.path.exists(seekfile1_2)) 631 | 632 | def test_scantime(self): 633 | """--scantime option 634 | """ 635 | self.config["pattern_list"] = ["ERROR"] 636 | self.config["scantime"] = 2 637 | log = LogChecker(self.config) 638 | 639 | # within scantime. 640 | # Dec 5 12:34:50 hostname test: ERROR 641 | line1 = self._make_line(self._get_timestamp(), "test", "ERROR") 642 | self._write_logfile(self.logfile1, line1) 643 | log.clear_state() 644 | log.check(self.logfile_pattern) 645 | 646 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 647 | self.assertEqual( 648 | log.get_message(), 649 | self.MESSAGE_WARNING_ONE.format(line1, self.logfile1)) 650 | 651 | # over scantime 652 | # Dec 5 12:34:50 hostname test: ERROR 653 | line2 = self._make_line(self._get_timestamp(), "test", "ERROR") 654 | self._write_logfile(self.logfile1, line2) 655 | time.sleep(4) 656 | log.clear_state() 657 | log.check(self.logfile_pattern) 658 | 659 | self.assertEqual(log.get_state(), LogChecker.STATE_OK) 660 | self.assertEqual(log.get_message(), self.MESSAGE_OK) 661 | 662 | # multiple logfiles. 663 | # Dec 5 12:34:50 hostname test: ERROR 664 | line3 = self._make_line(self._get_timestamp(), "test", "ERROR") 665 | self._write_logfile(self.logfile1, line3) 666 | time.sleep(4) 667 | 668 | # Dec 5 12:34:54 hostname test: ERROR 669 | line4 = self._make_line(self._get_timestamp(), "test", "ERROR") 670 | self._write_logfile(self.logfile2, line4) 671 | 672 | # logfile1 should be older than spantime. Therefore, don't scan it. 673 | log.clear_state() 674 | log.check(self.logfile_pattern) 675 | 676 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 677 | self.assertEqual( 678 | log.get_message(), 679 | self.MESSAGE_WARNING_ONE.format(line4, self.logfile2)) 680 | 681 | def test_remove_seekfile(self): 682 | """--expiration and --remove-seekfile options 683 | """ 684 | self.config["pattern_list"] = ["ERROR"] 685 | self.config["scantime"] = 2 686 | self.config["expiration"] = 4 687 | log = LogChecker(self.config) 688 | 689 | # within expiration 690 | # Dec 5 12:34:50 hostname test: ERROR 691 | line1 = self._make_line(self._get_timestamp(), "test", "ERROR") 692 | self._write_logfile(self.logfile1, line1) 693 | 694 | log.check(self.logfile_pattern, remove_seekfile=True) 695 | self.seekfile1 = log._create_seek_filename( 696 | self.logfile_pattern, self.logfile1) 697 | time.sleep(2) 698 | 699 | # Dec 5 12:34:54 hostname test: ERROR 700 | line2 = self._make_line(self._get_timestamp(), "test", "ERROR") 701 | self._write_logfile(self.logfile2, line2) 702 | 703 | # seek file of logfile1 should not be purged. 704 | log.clear_state() 705 | log.check(self.logfile_pattern, remove_seekfile=True) 706 | self.seekfile2 = log._create_seek_filename( 707 | self.logfile_pattern, self.logfile2) 708 | 709 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 710 | self.assertEqual( 711 | log.get_message(), 712 | self.MESSAGE_WARNING_ONE.format(line2, self.logfile2)) 713 | self.assertTrue(os.path.exists(self.seekfile1)) 714 | self.assertTrue(os.path.exists(self.seekfile2)) 715 | 716 | # over expiration 717 | # Dec 5 12:34:50 hostname test: ERROR 718 | line1 = self._make_line(self._get_timestamp(), "test", "ERROR") 719 | self._write_logfile(self.logfile1, line1) 720 | 721 | log.check(self.logfile_pattern, remove_seekfile=True) 722 | time.sleep(6) 723 | 724 | # Dec 5 12:34:54 hostname test: ERROR 725 | line2 = self._make_line(self._get_timestamp(), "test", "ERROR") 726 | self._write_logfile(self.logfile2, line2) 727 | 728 | # seek file of logfile1 should be purged. 729 | log.clear_state() 730 | log.check(self.logfile_pattern, remove_seekfile=True) 731 | 732 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 733 | self.assertEqual( 734 | log.get_message(), 735 | self.MESSAGE_WARNING_ONE.format(line2, self.logfile2)) 736 | self.assertFalse(os.path.exists(self.seekfile1)) 737 | self.assertTrue(os.path.exists(self.seekfile2)) 738 | 739 | def test_remove_seekfile_with_dry_run(self): 740 | """--expiration, --remove-seekfile, and --dry-run options 741 | """ 742 | self.config["pattern_list"] = ["ERROR"] 743 | self.config["scantime"] = 2 744 | self.config["expiration"] = 4 745 | log = LogChecker(self.config) 746 | 747 | # within expiration 748 | # Dec 5 12:34:50 hostname test: ERROR 749 | line1 = self._make_line(self._get_timestamp(), "test", "ERROR") 750 | self._write_logfile(self.logfile1, line1) 751 | 752 | log.check(self.logfile_pattern, remove_seekfile=True) 753 | self.seekfile1 = log._create_seek_filename( 754 | self.logfile_pattern, self.logfile1) 755 | time.sleep(2) 756 | 757 | # Dec 5 12:34:54 hostname test: ERROR 758 | line2 = self._make_line(self._get_timestamp(), "test", "ERROR") 759 | self._write_logfile(self.logfile2, line2) 760 | 761 | # seek file of logfile1 should not be purged. 762 | log.clear_state() 763 | log.check(self.logfile_pattern, remove_seekfile=True) 764 | self.seekfile2 = log._create_seek_filename( 765 | self.logfile_pattern, self.logfile2) 766 | 767 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 768 | self.assertEqual( 769 | log.get_message(), 770 | self.MESSAGE_WARNING_ONE.format(line2, self.logfile2)) 771 | self.assertTrue(os.path.exists(self.seekfile1)) 772 | self.assertTrue(os.path.exists(self.seekfile2)) 773 | 774 | # with dry run 775 | self.config["dry_run"] = True 776 | log = LogChecker(self.config) 777 | 778 | # over expiration 779 | # Dec 5 12:34:50 hostname test: ERROR 780 | line1 = self._make_line(self._get_timestamp(), "test", "ERROR") 781 | self._write_logfile(self.logfile1, line1) 782 | 783 | log.check(self.logfile_pattern, remove_seekfile=True) 784 | time.sleep(6) 785 | 786 | # Dec 5 12:34:54 hostname test: ERROR 787 | line2 = self._make_line(self._get_timestamp(), "test", "ERROR") 788 | self._write_logfile(self.logfile2, line2) 789 | 790 | log.clear_state() 791 | log.check(self.logfile_pattern, remove_seekfile=True) 792 | 793 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 794 | self.assertEqual( 795 | log.get_message(), 796 | self.MESSAGE_WARNING_ONE.format(line2, self.logfile2)) 797 | self.assertTrue(os.path.exists(self.seekfile1)) 798 | self.assertTrue(os.path.exists(self.seekfile2)) 799 | 800 | def test_remove_seekfile_inode(self): 801 | """--trace_inode, --expiration and --remove-seekfile options 802 | """ 803 | self.config["pattern_list"] = ["ERROR"] 804 | self.config["trace_inode"] = True 805 | self.config["scantime"] = 2 806 | self.config["expiration"] = 3 807 | log = LogChecker(self.config) 808 | 809 | # create logfile 810 | # Dec 5 12:34:50 hostname test: ERROR 811 | line1 = self._make_line(self._get_timestamp(), "test", "ERROR") 812 | self._write_logfile(self.logfile, line1) 813 | 814 | # log rotation 815 | os.rename(self.logfile, self.logfile1) 816 | 817 | # create new logfile 818 | # Dec 5 12:34:50 hostname test: ERROR 819 | line2 = self._make_line(self._get_timestamp(), "test", "ERROR") 820 | self._write_logfile(self.logfile, line2) 821 | 822 | # do check_log_multi, and create seekfile and seekfile1 823 | log.clear_state() 824 | log.check(self.logfile_pattern, remove_seekfile=True) 825 | seekfile_1 = log._create_seek_filename( 826 | self.logfile_pattern, self.logfile, trace_inode=True) 827 | seekfile1_1 = log._create_seek_filename( 828 | self.logfile_pattern, self.logfile1, trace_inode=True) 829 | time.sleep(4) 830 | 831 | # update logfile 832 | # Dec 5 12:34:54 hostname test: ERROR 833 | line3 = self._make_line(self._get_timestamp(), "test", "ERROR") 834 | self._write_logfile(self.logfile, line3) 835 | 836 | # log rotation, purge old logfile2 837 | os.rename(self.logfile1, self.logfile2) 838 | os.rename(self.logfile, self.logfile1) 839 | 840 | # seek file of old logfile1 should be purged. 841 | log.clear_state() 842 | log.check( 843 | self.logfile_pattern, remove_seekfile=True) 844 | seekfile1_2 = log._create_seek_filename( 845 | self.logfile_pattern, self.logfile1, trace_inode=True) 846 | 847 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 848 | self.assertEqual( 849 | log.get_message(), 850 | self.MESSAGE_WARNING_ONE.format(line3, self.logfile1)) 851 | self.assertEqual(seekfile_1, seekfile1_2) 852 | self.assertFalse(os.path.exists(seekfile1_1)) 853 | self.assertTrue(os.path.exists(seekfile1_2)) 854 | 855 | def test_remove_seekfile_inode_with_dry_run(self): 856 | """--trace_inode, --expiration, --remove-seekfile, and --dry-run options 857 | """ 858 | self.config["pattern_list"] = ["ERROR"] 859 | self.config["trace_inode"] = True 860 | self.config["scantime"] = 2 861 | self.config["expiration"] = 3 862 | log = LogChecker(self.config) 863 | 864 | # create logfile 865 | # Dec 5 12:34:50 hostname test: ERROR 866 | line1 = self._make_line(self._get_timestamp(), "test", "ERROR") 867 | self._write_logfile(self.logfile, line1) 868 | 869 | # log rotation 870 | os.rename(self.logfile, self.logfile1) 871 | 872 | # create new logfile 873 | # Dec 5 12:34:50 hostname test: ERROR 874 | line2 = self._make_line(self._get_timestamp(), "test", "ERROR") 875 | self._write_logfile(self.logfile, line2) 876 | 877 | # do check_log_multi, and create seekfile and seekfile1 878 | log.clear_state() 879 | log.check(self.logfile_pattern, remove_seekfile=True) 880 | seekfile_1 = log._create_seek_filename( 881 | self.logfile_pattern, self.logfile, trace_inode=True) 882 | seekfile1_1 = log._create_seek_filename( 883 | self.logfile_pattern, self.logfile1, trace_inode=True) 884 | time.sleep(4) 885 | 886 | # update logfile 887 | # Dec 5 12:34:54 hostname test: ERROR 888 | line3 = self._make_line(self._get_timestamp(), "test", "ERROR") 889 | self._write_logfile(self.logfile, line3) 890 | 891 | # log rotation, purge old logfile2 892 | os.rename(self.logfile1, self.logfile2) 893 | os.rename(self.logfile, self.logfile1) 894 | 895 | # with dry run 896 | self.config["dry_run"] = True 897 | log = LogChecker(self.config) 898 | 899 | log.clear_state() 900 | log.check( 901 | self.logfile_pattern, remove_seekfile=True) 902 | seekfile1_2 = log._create_seek_filename( 903 | self.logfile_pattern, self.logfile1, trace_inode=True) 904 | 905 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 906 | self.assertEqual( 907 | log.get_message(), 908 | self.MESSAGE_WARNING_ONE.format(line3, self.logfile1)) 909 | self.assertEqual(seekfile_1, seekfile1_2) 910 | self.assertTrue(os.path.exists(seekfile1_1)) 911 | self.assertTrue(os.path.exists(seekfile1_2)) 912 | 913 | def test_replace_pipe_symbol(self): 914 | """replace pipe symbol 915 | """ 916 | line = "Dec | 5 12:34:56 hostname test: ERROR" 917 | self.config["pattern_list"] = ["ERROR"] 918 | log = LogChecker(self.config) 919 | 920 | # Dec 5 12:34:50 hostname test: ERROR | 921 | line = self._make_line(self._get_timestamp(), "test", "ERROR |") 922 | self._write_logfile(self.logfile, line) 923 | log.check(self.logfile) 924 | 925 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 926 | self.assertEqual( 927 | log.get_message(), 928 | self.MESSAGE_WARNING_ONE.format( 929 | line.replace("|", "(pipe)"), self.logfile)) 930 | 931 | def test_seekfile(self): 932 | """--seekfile option 933 | """ 934 | self.config["pattern_list"] = ["ERROR"] 935 | log = LogChecker(self.config) 936 | 937 | # 1 line matched 938 | # Dec 5 12:34:50 hostname test: ERROR 939 | line1 = self._make_line(self._get_timestamp(), "test", "ERROR") 940 | self._write_logfile(self.logfile, line1) 941 | log.check(self.logfile, seekfile=self.seekfile) 942 | 943 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 944 | self.assertEqual( 945 | log.get_message(), 946 | self.MESSAGE_WARNING_ONE.format(line1, self.logfile)) 947 | 948 | # 2 lines matched 949 | # Dec 5 12:34:50 hostname test: ERROR1 950 | # Dec 5 12:34:50 hostname test: ERROR2 951 | line2 = self._make_line(self._get_timestamp(), "test", "ERROR1") 952 | line3 = self._make_line(self._get_timestamp(), "test", "ERROR2") 953 | self._write_logfile(self.logfile, [line2, line3]) 954 | log.clear_state() 955 | log.check(self.logfile, seekfile=self.seekfile) 956 | 957 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 958 | self.assertEqual( 959 | log.get_message(), 960 | self.MESSAGE_WARNING_TWO.format(line2, line3, self.logfile)) 961 | 962 | # no line matched 963 | # Dec 5 12:34:50 hostname noop: NOOP 964 | line4 = self._make_line(self._get_timestamp(), "noop", "NOOP") 965 | self._write_logfile(self.logfile, line4) 966 | log.clear_state() 967 | log.check(self.logfile, seekfile=self.seekfile) 968 | 969 | self.assertEqual(log.get_state(), LogChecker.STATE_OK) 970 | self.assertEqual(log.get_message(), self.MESSAGE_OK) 971 | 972 | def test_tag(self): 973 | """--tag 974 | """ 975 | self.config["pattern_list"] = ["ERROR"] 976 | log = LogChecker(self.config) 977 | 978 | # create new logfiles 979 | # Dec 5 12:34:50 hostname test: ERROR 980 | line1 = self._make_line(self._get_timestamp(), "test", "ERROR") 981 | self._write_logfile(self.logfile, line1) 982 | 983 | # Dec 5 12:34:50 hostname test: ERROR 984 | line2 = self._make_line(self._get_timestamp(), "test", "ERROR") 985 | self._write_logfile(self.logfile1, line2) 986 | 987 | # Dec 5 12:34:50 hostname test: ERROR 988 | line3 = self._make_line(self._get_timestamp(), "noop", "NOOP") 989 | self._write_logfile(self.logfile2, line3) 990 | 991 | # create seekfile of logfile 992 | seekfile_1 = log._create_seek_filename( 993 | self.logfile_pattern, self.logfile, tag=self.tag1) 994 | seekfile_2 = log._create_seek_filename( 995 | self.logfile_pattern, self.logfile, tag=self.tag1) 996 | seekfile_3 = log._create_seek_filename( 997 | self.logfile_pattern, self.logfile, tag=self.tag2) 998 | log.check(self.logfile, seekfile=seekfile_3) 999 | log.clear_state() 1000 | log.check( 1001 | self.logfile_pattern, tag=self.tag2) 1002 | 1003 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 1004 | self.assertEqual( 1005 | log.get_message(), 1006 | self.MESSAGE_WARNING_ONE.format(line2, self.logfile1)) 1007 | self.assertEqual(seekfile_1, seekfile_2) 1008 | self.assertNotEqual(seekfile_1, seekfile_3) 1009 | self.assertTrue(seekfile_1.find(self.tag1)) 1010 | self.assertTrue(os.path.exists(seekfile_3)) 1011 | 1012 | def test_cachetime(self): 1013 | """--cachetime 1014 | """ 1015 | self.config["pattern_list"] = ["ERROR"] 1016 | self.config["cachetime"] = 2 1017 | log = LogChecker(self.config) 1018 | 1019 | cachefile = log._create_cache_filename(self.logfile) 1020 | 1021 | # within cachetime 1022 | # Dec 5 12:34:50 hostname test: ERROR 1023 | line = self._make_line(self._get_timestamp(), "test", "ERROR") 1024 | self._write_logfile(self.logfile, line) 1025 | 1026 | # check 1027 | log.clear_state() 1028 | log.check(self.logfile) 1029 | 1030 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 1031 | self.assertEqual( 1032 | log.get_message(), 1033 | self.MESSAGE_WARNING_ONE.format(line, self.logfile)) 1034 | 1035 | # check again 1036 | log.clear_state() 1037 | log.check(self.logfile) 1038 | 1039 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 1040 | self.assertEqual( 1041 | log.get_message(), 1042 | self.MESSAGE_WARNING_ONE.format(line, self.logfile)) 1043 | 1044 | log._remove_cache(cachefile) 1045 | 1046 | # over cachetime 1047 | # Dec 5 12:34:50 hostname test: ERROR 1048 | line = self._make_line(self._get_timestamp(), "test", "ERROR") 1049 | self._write_logfile(self.logfile, line) 1050 | 1051 | log.clear_state() 1052 | log.check(self.logfile) 1053 | 1054 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 1055 | self.assertEqual( 1056 | log.get_message(), 1057 | self.MESSAGE_WARNING_ONE.format(line, self.logfile)) 1058 | 1059 | # check again 1060 | time.sleep(self.config["cachetime"] + 1) 1061 | log.clear_state() 1062 | log.check(self.logfile) 1063 | 1064 | self.assertEqual(log.get_state(), LogChecker.STATE_OK) 1065 | 1066 | log._remove_cache(cachefile) 1067 | 1068 | def test_cachetime_with_dry_run(self): 1069 | """--cachetime and --dry-run 1070 | """ 1071 | self.config["pattern_list"] = ["ERROR"] 1072 | self.config["cachetime"] = 60 1073 | log = LogChecker(self.config) 1074 | 1075 | cachefile = log._create_cache_filename(self.logfile) 1076 | 1077 | # create a cache file 1078 | # Dec 5 12:34:50 hostname test: ERROR 1079 | line = self._make_line(self._get_timestamp(), "test", "ERROR") 1080 | self._write_logfile(self.logfile, line) 1081 | 1082 | log.clear_state() 1083 | log.check(self.logfile) 1084 | 1085 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 1086 | self.assertEqual( 1087 | log.get_message(), 1088 | self.MESSAGE_WARNING_ONE.format(line, self.logfile)) 1089 | 1090 | # verify it does not read a cache file. 1091 | self.config["dry_run"] = True 1092 | log = LogChecker(self.config) 1093 | 1094 | # Dec 5 12:34:50 hostname test: NOOP 1095 | line = self._make_line(self._get_timestamp(), "test", "NOOP") 1096 | self._write_logfile(self.logfile, line) 1097 | 1098 | log.clear_state() 1099 | log.check(self.logfile) 1100 | 1101 | self.assertEqual(log.get_state(), LogChecker.STATE_OK) 1102 | 1103 | # verify a cache file is not updated. 1104 | self.config["dry_run"] = False 1105 | log = LogChecker(self.config) 1106 | 1107 | # Dec 5 12:34:50 hostname test: ERROR 1108 | line = self._make_line(self._get_timestamp(), "test", "ERROR") 1109 | self._write_logfile(self.logfile, line) 1110 | 1111 | log.clear_state() 1112 | log.check(self.logfile) 1113 | 1114 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 1115 | self.assertEqual( 1116 | log.get_message(), 1117 | self.MESSAGE_WARNING_ONE.format(line, self.logfile)) 1118 | 1119 | log._remove_cache(cachefile) 1120 | 1121 | def test_lock_timeout(self): 1122 | """--lock-timeout 1123 | """ 1124 | self.config["pattern_list"] = ["ERROR"] 1125 | self.config["lock_timeout"] = 6 1126 | log = LogChecker(self.config) 1127 | 1128 | lockfile = log._create_lock_filename(self.logfile) 1129 | 1130 | # within lock_timeout 1131 | # |time|sub |main | 1132 | # |----|-----------|-----------| 1133 | # | 0|fork |sleep | 1134 | # | 1|lock OK |sleep | 1135 | # | 1| |check | 1136 | # | 1| |lock fail | 1137 | # | *| |sleep | 1138 | # | 5|unlock OK |sleep | 1139 | # | 5| |lock OK | 1140 | # | 5| |unlock OK | 1141 | # Dec 5 12:34:50 hostname test: ERROR 1142 | line = self._make_line(self._get_timestamp(), "test", "ERROR") 1143 | self._write_logfile(self.logfile, line) 1144 | 1145 | # locked by an another process 1146 | locked_time = 4 1147 | wait_interval = 0.1 1148 | proc = self._run_locked_subprocess(lockfile, locked_time) 1149 | for _ in range(100): 1150 | if os.path.isfile(lockfile): 1151 | break 1152 | time.sleep(wait_interval) 1153 | 1154 | # check 1155 | log.clear_state() 1156 | start_time = time.time() 1157 | log.check(self.logfile) 1158 | elapsed_time = time.time() - start_time 1159 | proc.wait() 1160 | 1161 | self.assertEqual(log.get_state(), LogChecker.STATE_WARNING) 1162 | self.assertEqual( 1163 | log.get_message(), 1164 | self.MESSAGE_WARNING_ONE.format(line, self.logfile)) 1165 | self.assertTrue(elapsed_time < self.config["lock_timeout"]) 1166 | self.assertTrue(elapsed_time > locked_time - wait_interval) 1167 | 1168 | # over lock_timeout 1169 | # |time|sub |main | 1170 | # |----|-----------|-----------| 1171 | # | 0|fork |sleep | 1172 | # | 1|lock OK |sleep | 1173 | # | 1| |check | 1174 | # | 1| |lock fail | 1175 | # | *| |sleep | 1176 | # | 7| |timeout | 1177 | # | 9|unlock OK | | 1178 | # Dec 5 12:34:50 hostname test: ERROR 1179 | line = self._make_line(self._get_timestamp(), "test", "ERROR") 1180 | self._write_logfile(self.logfile, line) 1181 | 1182 | # locked by an another process 1183 | locked_time = 8 1184 | wait_interval = 0.1 1185 | proc = self._run_locked_subprocess(lockfile, locked_time) 1186 | for _ in range(100): 1187 | if os.path.isfile(lockfile): 1188 | break 1189 | time.sleep(wait_interval) 1190 | 1191 | # check 1192 | log.clear_state() 1193 | start_time = time.time() 1194 | log.check(self.logfile) 1195 | elapsed_time = time.time() - start_time 1196 | proc.wait() 1197 | 1198 | self.assertEqual(log.get_state(), LogChecker.STATE_UNKNOWN) 1199 | self.assertEqual(log.get_message(), self.MESSAGE_UNKNOWN_LOCK_TIMEOUT) 1200 | self.assertTrue(elapsed_time > self.config["lock_timeout"]) 1201 | self.assertTrue(elapsed_time < locked_time) 1202 | 1203 | def test_lock(self): 1204 | """LogChecker.lock() 1205 | """ 1206 | # lock succeeded 1207 | lockfileobj = LogChecker.lock(self.lockfile) 1208 | self.assertNotEqual(lockfileobj, None) 1209 | LogChecker.unlock(self.lockfile, lockfileobj) 1210 | 1211 | # locked by an another process 1212 | locked_time = 4 1213 | wait_interval = 0.1 1214 | proc = self._run_locked_subprocess(self.lockfile, locked_time) 1215 | for _ in range(100): 1216 | if os.path.isfile(self.lockfile): 1217 | break 1218 | time.sleep(wait_interval) 1219 | 1220 | with warnings.catch_warnings(): 1221 | warnings.simplefilter("ignore") 1222 | lockfileobj = LogChecker.lock(self.lockfile) 1223 | proc.wait() 1224 | self.assertEqual(lockfileobj, None) 1225 | 1226 | def test_unlock(self): 1227 | """LogChecker.unlock() 1228 | """ 1229 | lockfileobj = LogChecker.lock(self.lockfile) 1230 | LogChecker.unlock(self.lockfile, lockfileobj) 1231 | self.assertFalse(os.path.exists(self.lockfile)) 1232 | self.assertTrue(lockfileobj.closed) 1233 | 1234 | def _get_timestamp(self): 1235 | # format: Dec 5 12:34:00 1236 | timestamp = LogChecker.to_unicode( 1237 | datetime.datetime.now().strftime("%b %e %T")) 1238 | return timestamp 1239 | 1240 | def _get_customized_timestamp(self): 1241 | # format: Thu Dec 05 12:34:56 2013 1242 | timestamp = LogChecker.to_unicode( 1243 | datetime.datetime.now().strftime("%a %b %d %T %Y")) 1244 | return timestamp 1245 | 1246 | def _make_line(self, timestamp, tag, message): 1247 | # format: Dec 5 12:34:00 hostname noop: NOOP 1248 | line = "{0} hostname {1}: {2}".format(timestamp, tag, message) 1249 | return line 1250 | 1251 | def _make_customized_line(self, timestamp, level, message): 1252 | # format: [Thu Dec 05 12:34:56 2013] [info] NOOP 1253 | line = "[{0}] [{1}] {2}".format(timestamp, level, message) 1254 | return line 1255 | 1256 | def _write_logfile(self, logfile, lines, encoding='utf-8'): 1257 | """Write log file for syslog format.""" 1258 | fileobj = io.open(logfile, mode='a', encoding=encoding) 1259 | fileobj.write(self._make_line(self._get_timestamp(), "noop", "NOOP")) 1260 | fileobj.write("\n") 1261 | if isinstance(lines, list): 1262 | for line in lines: 1263 | fileobj.write(line) 1264 | fileobj.write("\n") 1265 | else: 1266 | fileobj.write(lines) 1267 | fileobj.write("\n") 1268 | fileobj.write(self._make_line(self._get_timestamp(), "noop", "NOOP")) 1269 | fileobj.write("\n") 1270 | fileobj.flush() 1271 | fileobj.close() 1272 | 1273 | def _write_customized_logfile(self, logfile, lines, encoding='utf-8'): 1274 | """Write log file for customized format.""" 1275 | fileobj = io.open(logfile, mode='a', encoding=encoding) 1276 | fileobj.write( 1277 | self._make_customized_line( 1278 | self._get_customized_timestamp(), "info", "NOOP")) 1279 | fileobj.write("\n") 1280 | if isinstance(lines, list): 1281 | for line in lines: 1282 | fileobj.write(line) 1283 | fileobj.write("\n") 1284 | else: 1285 | fileobj.write(lines) 1286 | fileobj.write("\n") 1287 | fileobj.write( 1288 | self._make_customized_line( 1289 | self._get_customized_timestamp(), "info", "NOOP")) 1290 | fileobj.write("\n") 1291 | fileobj.flush() 1292 | fileobj.close() 1293 | 1294 | def _run_locked_subprocess(self, lockfile, sleeptime): 1295 | code = ( 1296 | "import time\n" 1297 | "from check_log_ng import LogChecker\n" 1298 | "lockfile = '{0}'\n" 1299 | "lockfileobj = LogChecker.lock(lockfile)\n" 1300 | "time.sleep({1})\n" 1301 | "LogChecker.unlock(lockfile, lockfileobj)\n" 1302 | ).format(lockfile, LogChecker.to_unicode(str(sleeptime))) 1303 | code = code.replace("\n", ";") 1304 | proc = subprocess.Popen(['python', '-c', code]) 1305 | return proc 1306 | 1307 | 1308 | if __name__ == "__main__": 1309 | unittest.main() 1310 | 1311 | # vim: set ts=4 sw=4 et: 1312 | --------------------------------------------------------------------------------