├── logs └── .gitkeep ├── alert ├── __init__.py ├── alert_base.py ├── alert_util.py ├── alert_factory.py ├── webhook.py ├── guardian_alert.py └── emails.py ├── spark_checker ├── __init__.py ├── checker.py └── streaming_utils.py ├── requirements.txt ├── .gitignore ├── package.sh ├── waterdrop-guardian.spec ├── README.md ├── config.json.template ├── config_api.py ├── LICENSE └── guardian.py /logs/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /alert/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /spark_checker/__init__.py: -------------------------------------------------------------------------------- 1 | from .checker import check 2 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | requests==2.10.0 2 | pyinstaller==3.3.1 3 | tzlocal==1.4 4 | six==1.10.0 5 | setuptools==19.2 6 | flask==0.12.2 7 | python-dateutil==2.4.2 8 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # If you need to exclude files such as those generated by an IDE, use 2 | # $GIT_DIR/info/exclude or the core.excludesFile configuration variable as 3 | # described in https://git-scm.com/docs/gitignore 4 | 5 | # Env 6 | VENV 7 | venv 8 | env 9 | dist 10 | build 11 | *.spec 12 | .idea 13 | 14 | ## Python 15 | __pycache__/ 16 | *.py[cod] 17 | *$py.class 18 | -------------------------------------------------------------------------------- /alert/alert_base.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | import abc 4 | 5 | 6 | class GuardianAlertBase: 7 | def __init__(self, name, config): 8 | self.name = name 9 | self.config = config 10 | 11 | @abc.abstractmethod 12 | def send_alert(self, level, subject, objects, content): 13 | pass 14 | 15 | @abc.abstractmethod 16 | def check_config(self): 17 | pass 18 | -------------------------------------------------------------------------------- /alert/alert_util.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | 4 | def match_alert(routes, level): 5 | if level in routes['match']['level']: 6 | return True 7 | else: 8 | return False 9 | 10 | 11 | class AlertException(Exception): 12 | pass 13 | 14 | 15 | class UnsupportedAlertMethod(AlertException): 16 | pass 17 | 18 | 19 | class IncorrectConfig(AlertException): 20 | pass 21 | -------------------------------------------------------------------------------- /package.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | APP_NAME=guardian 4 | VERSION=1.1.0 5 | PACKAGE_DIR=$APP_NAME"_"$VERSION 6 | echo "Building Package $PACKAGE_DIR" 7 | 8 | pyinstaller --onefile $APP_NAME.py 9 | if [ $? != 0 ];then 10 | echo "[ERROR] failed to build $APP_NAME module: $APP_NAME" 11 | exit -1 12 | fi 13 | 14 | cd dist 15 | mkdir -p $PACKAGE_DIR/bin 16 | mkdir -p $PACKAGE_DIR/logs 17 | cp $APP_NAME $PACKAGE_DIR/bin 18 | 19 | mkdir -p $PACKAGE_DIR/config 20 | cp ../config.json.template $PACKAGE_DIR/config 21 | 22 | tar zcvf $PACKAGE_DIR.tar.gz $PACKAGE_DIR -------------------------------------------------------------------------------- /waterdrop-guardian.spec: -------------------------------------------------------------------------------- 1 | # -*- mode: python -*- 2 | a = Analysis(['waterdrop-guardian.py'], 3 | pathex=['/Users/yixia/IdeaProjects/waterdrop/guardian'], 4 | hiddenimports=[], 5 | hookspath=None, 6 | runtime_hooks=None) 7 | pyz = PYZ(a.pure) 8 | exe = EXE(pyz, 9 | a.scripts, 10 | a.binaries, 11 | a.zipfiles, 12 | a.datas, 13 | name='waterdrop-guardian', 14 | debug=False, 15 | strip=None, 16 | upx=True, 17 | console=True ) 18 | -------------------------------------------------------------------------------- /alert/alert_factory.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | from alert.emails import Emails 4 | from alert.webhook import Webhook 5 | from alert.alert_util import UnsupportedAlertMethod 6 | 7 | 8 | class GuardianAlertFactory: 9 | def __init__(self): 10 | pass 11 | 12 | @staticmethod 13 | def render_alert(name, config): 14 | if name == "emails": 15 | return Emails(name, config) 16 | elif name == "webhook": 17 | return Webhook(name, config) 18 | else: 19 | raise UnsupportedAlertMethod( 20 | "Unsupported alert method: {}".format(name)) 21 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## README 2 | 3 | Guardian 是 Waterdrop 的子项目,用于监控 Waterdrop 的运行状态,目前仅支持以下功能: 4 | 5 | * 监控运行于Yarn集群上的 Waterdrop 程序 6 | 7 | * 可监控 Waterdrop 是否存活,并能够根据配置自动拉起 Waterdrop 程序 8 | 9 | * 可监控 Waterdrop 程序运行时streaming batch是否存在堆积和延迟 10 | 11 | * 以上两项监控如果达到阈值可发送邮件报警 12 | 13 | * 可自定义实现不同的报警方法(Python),如短信报警,微信报警等。 14 | 15 | 16 | --- 17 | 18 | ## 运行环境 19 | 20 | Guardian 虽然是用python开发的,但是它已经被打包为可独立部署的程序包,不依赖任何Python环境及Python依赖包 21 | 22 | --- 23 | 24 | ## 为Guardian的代码做贡献 25 | 26 | > 强烈建议使用Python2.7.x 作为 Guardian 的开发环境 27 | 28 | > 建议使用virtualenv(但不是必须的)作为python运行环境切换的工具。 29 | 30 | > 安装virtualenv方法:pip install virtualenv==1.11.6 31 | 32 | ``` 33 | # 初始化开发环境 34 | virtualenv -p python2.7 VENV 35 | source VENV/bin/activate 36 | 37 | pip install -r requirements.txt 38 | 39 | ``` 40 | 41 | ## 打包为可独立运行的服务 42 | 43 | ``` 44 | ./package.sh 45 | ``` 46 | 47 | 打包完成后,可以在`dist/`找到guardian_.tar.gz, 解压缩后可直接运行 48 | 49 | ## 运行 50 | 51 | ``` 52 | # show help information 53 | cd ./dist/guardian_1.1.0/ 54 | ./bin/guardian checkout config.json.template 55 | ``` 56 | -------------------------------------------------------------------------------- /alert/webhook.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | import json 4 | import httplib 5 | from alert.alert_base import GuardianAlertBase 6 | from urlparse import urlparse 7 | 8 | from alert_util import match_alert, AlertException 9 | 10 | 11 | class Webhook(GuardianAlertBase): 12 | 13 | def send_alert(self, level, subject, objects, content): 14 | 15 | alert_config = self.config['webhook'] 16 | url = alert_config['url'] 17 | params = { 18 | 'subject': subject, 19 | 'objects': objects, 20 | 'content': content 21 | } 22 | 23 | headers = { 24 | 'content-type': 'application/json;charset=UTF-8', 25 | 'Accept': 'text/plain' 26 | } 27 | 28 | if match_alert(alert_config['routes'], level): 29 | url_info = urlparse(url) 30 | port = 80 if url_info.port is None else url_info.port 31 | try: 32 | http_client = httplib.HTTPConnection(url_info.hostname, 33 | port, timeout=5) 34 | http_client.request("POST", url_info.path, 35 | json.dumps(params), headers) 36 | except Exception as e: 37 | raise AlertException(e) 38 | 39 | def check_config(self): 40 | alert_config = self.config['webhook'] 41 | if 'url' not in alert_config or 'routes' not in alert_config: 42 | return False 43 | else: 44 | return True 45 | -------------------------------------------------------------------------------- /alert/guardian_alert.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | import logging 4 | 5 | from alert.alert_factory import GuardianAlertFactory 6 | 7 | from alert.alert_util import (UnsupportedAlertMethod, IncorrectConfig, 8 | AlertException) 9 | 10 | 11 | class GuardianAlert(object): 12 | 13 | def __init__(self, alert_config): 14 | self.alert_config = alert_config 15 | self.alerts = self.create_alert() 16 | self.check_config() 17 | 18 | def create_alert(self): 19 | alerts = [] 20 | for method in self.alert_config: 21 | try: 22 | alerts.append(GuardianAlertFactory.render_alert( 23 | method, 24 | self.alert_config)) 25 | except Exception as e: 26 | logging.error(e) 27 | raise UnsupportedAlertMethod(e) 28 | 29 | return alerts 30 | 31 | def send_alert(self, level, subject, objects, content): 32 | for alert in self.alerts: 33 | try: 34 | alert.send_alert(level, subject, objects, content) 35 | logging.info("Send alert with <{}> succeed.".format( 36 | alert.name)) 37 | except AlertException as e: 38 | logging.error( 39 | 'Failed to send alert, caught exception: ' + repr(e)) 40 | 41 | def check_config(self): 42 | for alert_impl in self.alerts: 43 | if not alert_impl.check_config(): 44 | raise IncorrectConfig("Incorrect Config: " + alert_impl.name) 45 | -------------------------------------------------------------------------------- /alert/emails.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | import smtplib 4 | import logging 5 | from alert.alert_base import GuardianAlertBase 6 | from email.mime.text import MIMEText 7 | from email.header import Header 8 | 9 | from alert_util import match_alert, AlertException 10 | 11 | 12 | class Emails(GuardianAlertBase): 13 | 14 | def send_alert(self, level, subject, objects, content): 15 | 16 | config = self.config 17 | config = config['emails'] 18 | sender = config['sender'] 19 | receivers = config['receivers'] 20 | smtpserver = config['smtp_server'] 21 | username = config['auth_username'] 22 | password = config['auth_password'] 23 | 24 | if match_alert(config['routes'], level): 25 | 26 | message = MIMEText('{0}: {1}'.format(objects, content), 'plain', 'utf-8') 27 | message['Subject'] = Header(subject, 'utf-8') 28 | message['To'] = ';'.join(config['receivers']) 29 | 30 | try: 31 | smtp = smtplib.SMTP() 32 | smtp.connect(smtpserver) 33 | smtp.login(username, password) 34 | smtp.sendmail(sender, receivers, message.as_string()) 35 | 36 | except Exception as e: 37 | logging.error(str(e)) 38 | raise AlertException(e) 39 | 40 | def check_config(self): 41 | alert_config = self.config['emails'] 42 | arg_list = ['sender', 'receivers', 'smtp_server', 'auth_username', 43 | 'auth_password', 'routes'] 44 | 45 | for arg in arg_list: 46 | if arg not in alert_config: 47 | return False 48 | 49 | return True 50 | -------------------------------------------------------------------------------- /config.json.template: -------------------------------------------------------------------------------- 1 | { 2 | "port": 5000, 3 | "node_name": "node01", 4 | "check_interval": 300, 5 | "yarn": { 6 | "api_hosts": [ 7 | "10.11.10.21:8088", 8 | "10.11.10.22:8088" 9 | ] 10 | }, 11 | "apps": [ 12 | { 13 | "app_name": "waterdrop-app2", 14 | "start_cmd": "test_cmd_not_exist", 15 | "app_num": 1, 16 | "check_type": "spark", 17 | "check_options": {} 18 | }, 19 | { 20 | "app_name": "waterdrop-app3", 21 | "start_cmd": "test_cmd_not_exist", 22 | "app_num": 1, 23 | "check_type": "spark", 24 | "check_options": { 25 | "alert_level": "WARNING", 26 | "max_delayed_batch_num": 10, 27 | "max_delayed_time": 600 28 | } 29 | }, 30 | { 31 | "app_name": "not_running_app", 32 | "start_cmd": "test_cmd_not_exist", 33 | "app_num": 0, 34 | "check_type": "spark", 35 | "check_options": {}, 36 | "active": false 37 | } 38 | ], 39 | "alert_manager": { 40 | "emails": { 41 | "auth_username": "username", 42 | "auth_password": "password", 43 | "smtp_server": "smtp.163.com", 44 | "sender": "huochen1994@163.com", 45 | "receivers": ["", ""], 46 | "routes": { 47 | "match": { 48 | "level": ["WARNING", "ERROR"] 49 | } 50 | } 51 | }, 52 | "webhook": { 53 | "url": "http://api.webhook.interestinglab.org/alert", 54 | "routes": { 55 | "match": { 56 | "level": ["ERROR"] 57 | } 58 | } 59 | } 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /spark_checker/checker.py: -------------------------------------------------------------------------------- 1 | # coding:utf-8 2 | 3 | import logging 4 | import streaming_utils 5 | 6 | 7 | def check(apps, config, alert_client): 8 | 9 | for app in apps: 10 | logging.debug("check spark streaming:" + repr(app)) 11 | 12 | _check_impl(app, config, alert_client) 13 | 14 | 15 | def _alert_invalid_config(alert_client): 16 | subject = 'Guardian' 17 | objects = '配置' 18 | content = '监控配置错误' 19 | 20 | alert_client.send_alert('INTERNAL', subject, objects, content) 21 | 22 | 23 | def _check_impl(app, config, alert_client): 24 | 25 | app_config = config['app'] 26 | if 'check_options' not in app_config or not isinstance(app_config['check_options'], dict) or len(app_config['check_options']) == 0: 27 | return 28 | 29 | check_options = app_config['check_options'] 30 | if 'alert_level' not in check_options or ('max_delayed_batch_num' not in check_options and 'max_delayed_time' not in check_options): 31 | _alert_invalid_config(alert_client) 32 | return 33 | 34 | active_rm = config['yarn']['active_rm'] 35 | 36 | try: 37 | batch_stats = streaming_utils.streaming_batch_stats(active_rm, app['id'], status='RUNNING') 38 | delayed_batch_number = streaming_utils.streaming_batch_delay(batch_stats) 39 | seconds_delayed = streaming_utils.streaming_time_delay(batch_stats) 40 | log_content = 'checked spark streaming, appname: %s, delayed_batch_number: %d, seconds_delayed: %d' % (app['name'], delayed_batch_number, seconds_delayed) 41 | logging.debug(log_content) 42 | except streaming_utils.StreamingUtilsError as e: 43 | logging.error("failed to get streaming batch delay stats, caught exception" + repr(e)) 44 | subject = 'Guardian' 45 | objects = app['name'] 46 | content = "无法获取到流式处理延迟统计" 47 | alert_client.send_alert('FATAL', subject, objects, content) 48 | return 49 | 50 | content = "" 51 | send_alert = False 52 | if 'max_delayed_batch_num' in check_options and delayed_batch_number > int(check_options['max_delayed_batch_num']): 53 | send_alert = True 54 | content = "流式处理延迟batch个数[%d], 超过阈值[%d], " % (delayed_batch_number, int(check_options['max_delayed_batch_num'])) 55 | 56 | if 'max_delayed_time' in check_options and seconds_delayed > int(check_options['max_delayed_time']): 57 | send_alert = True 58 | content += ", 流式处理延迟时间[%d]秒, 超过阈值[%d]秒" % (seconds_delayed, int(check_options['max_delayed_time'])) 59 | 60 | if send_alert: 61 | subject = 'Guardian' 62 | objects = app['name'] 63 | if check_options['alert_level'] == 'FATAL': 64 | alert_client.send_alert('FATAL', subject, objects, content) 65 | elif check_options['alert_level'] == 'ERROR': 66 | alert_client.send_alert('ERROR', subject, objects, content) 67 | elif check_options['alert_level'] == 'WARNING': 68 | alert_client.send_alert('WARNING', subject, objects, content) 69 | -------------------------------------------------------------------------------- /config_api.py: -------------------------------------------------------------------------------- 1 | from flask import Flask, request, jsonify 2 | import json 3 | 4 | 5 | app = Flask(__name__) 6 | 7 | 8 | @app.route('/config/', methods=['GET', 'POST', 'DELETE']) 9 | def hello_world(app_name): 10 | 11 | msg = {} 12 | try: 13 | f = open(app.config['config_name'], 'r') 14 | config = json.load(f) 15 | apps = config['apps'] 16 | f.close() 17 | 18 | except IOError: 19 | msg = { 20 | 'status': 2, 21 | 'content': 'File <{}> not found'.format(app.config['config_name']) 22 | } 23 | return jsonify(msg) 24 | 25 | except Exception as e: 26 | msg = { 27 | 'status': 2, 28 | 'content': str(e) 29 | } 30 | return jsonify(msg) 31 | 32 | if request.method == 'GET': 33 | 34 | flag = 0 35 | for spark_app in apps: 36 | if spark_app['app_name'] == app_name: 37 | msg = { 38 | 'status': 0, 39 | 'content': spark_app 40 | 41 | } 42 | flag = 1 43 | 44 | if flag == 0: 45 | msg = { 46 | 'status': 2, 47 | 'content': '{0} not in {1}'.format(app_name, app.config['config_name']) 48 | } 49 | 50 | return jsonify(msg) 51 | 52 | # Supporting insert and update 53 | elif request.method == 'POST': 54 | 55 | try: 56 | body = json.loads(request.get_data()) 57 | except Exception as e: 58 | msg = { 59 | 'status': 2, 60 | 'content': str(e) 61 | } 62 | 63 | return jsonify(msg) 64 | 65 | flag = 0 66 | for spark_app in apps: 67 | if spark_app['app_name'] == app_name: 68 | info = spark_app 69 | for key in body.keys(): 70 | info[key] = body[key] 71 | 72 | flag = 1 73 | 74 | if flag == 0: 75 | body['app_name'] = app_name 76 | apps.append(body) 77 | 78 | f = open(app.config['config_name'], 'w') 79 | f.write(json.dumps(config, indent=4)) 80 | f.close() 81 | 82 | msg = { 83 | "status": 0 84 | } 85 | return jsonify(msg) 86 | 87 | elif request.method == 'DELETE': 88 | 89 | for i in range(len(apps)): 90 | if apps[i]['app_name'] == app_name: 91 | del apps[i] 92 | 93 | msg = { 94 | 'status': 0 95 | } 96 | 97 | f = open(app.config['config_name'], 'w') 98 | f.write(json.dumps(config, indent=4)) 99 | f.close() 100 | 101 | return jsonify(msg) 102 | 103 | msg = { 104 | 'status': '2', 105 | 'content': '{0} not in {1}'.format(app_name, app.config['config_name']) 106 | } 107 | 108 | return jsonify(msg) 109 | 110 | else: 111 | msg = { 112 | 'status': 1, 113 | 'content': 'Unsupported Method' 114 | } 115 | 116 | return jsonify(msg) 117 | -------------------------------------------------------------------------------- /spark_checker/streaming_utils.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import datetime 4 | import re 5 | import pytz 6 | import requests 7 | from tzlocal import get_localzone 8 | 9 | MAX_RETRY = 5 10 | 11 | 12 | class StreamingUtilsError(Exception): 13 | pass 14 | 15 | 16 | def streaming_batch_stats(master_url, application_id, status=None, timeout=20): 17 | stats_url = 'http://' + master_url + '/proxy/' + application_id + \ 18 | '/api/v1/applications/' + application_id + '/jobs' 19 | 20 | if status is not None: 21 | stats_url += "?status=" + status 22 | 23 | retry = 0 24 | stats_json = None 25 | while retry < MAX_RETRY: 26 | retry += 1 27 | 28 | try: 29 | resp = requests.get(stats_url, timeout=timeout) 30 | except requests.exceptions.ConnectTimeout: 31 | continue 32 | except requests.exceptions.ReadTimeout: 33 | continue 34 | 35 | if resp.status_code != 200: 36 | continue 37 | 38 | try: 39 | stats_json = resp.json() 40 | except ValueError as e: 41 | stats_json = None 42 | 43 | break 44 | 45 | if stats_json is None: 46 | raise StreamingUtilsError("cannot get streaming batch utils from yarn") 47 | 48 | batch_regex = re.compile(r'.*id=(\d+).*batch time (\d\d:\d\d:\d\d).*') 49 | 50 | batch_stats = {} 51 | for job in stats_json: 52 | 53 | if not 'description' in job: 54 | continue # job needs a batch start time 55 | 56 | job_stats = {} 57 | 58 | description = job['description'] 59 | matches = batch_regex.match(description).groups() 60 | batch_id = int(matches[0]) 61 | batch_time = datetime.datetime.fromtimestamp(batch_id / 1000) 62 | batch_time = batch_time.replace(tzinfo=get_localzone()) 63 | 64 | if not batch_id in batch_stats: 65 | batch_stats[batch_id] = { 66 | 'batchStartTime': batch_time, # Scheduled Batch Start Time 67 | 'status': job['status'], 68 | 'jobs': [] # Job Details 69 | } 70 | 71 | if job['status'] == 'RUNNING': 72 | batch_stats[batch_id]['status'] = job['status'] 73 | 74 | job_stats = { 75 | 'status': job['status'], 76 | } 77 | if 'submissionTime' in job: 78 | job_stats['submissionTime'] = datetime.datetime.strptime( 79 | job['submissionTime'], '%Y-%m-%dT%H:%M:%S.%f%Z') 80 | job_stats['submissionTime'] = job_stats['submissionTime'].replace( 81 | tzinfo=pytz.UTC) 82 | # print job['submissionTime'], job_stats['submissionTime'].astimezone(get_localzone()) 83 | 84 | if 'completionTime' in job: 85 | job_stats['completionTime'] = datetime.datetime.strptime( 86 | job['completionTime'], '%Y-%m-%dT%H:%M:%S.%f%Z') 87 | job_stats['completionTime'] = job_stats['completionTime'].replace( 88 | tzinfo=pytz.UTC) 89 | # print job['completionTime'], job_stats['completionTime'].astimezone(get_localzone()) 90 | 91 | batch_stats[batch_id]['jobs'].append(job_stats) 92 | 93 | return batch_stats 94 | 95 | 96 | def streaming_duration(batch_stats): 97 | """get spark streaming duration in seconds 98 | return None if there is only 0 or 1 batch, so duration cannot be calculated 99 | """ 100 | if batch_stats is None: 101 | raise StreamingUtilsError("invalid batch stats") 102 | 103 | if len(batch_stats) < 2: 104 | return None 105 | 106 | batch_list = sorted(batch_stats.iteritems()) 107 | return (batch_list[1][1]['batchStartTime'] - batch_list[0][1][ 108 | 'batchStartTime']).seconds 109 | 110 | 111 | def streaming_batch_delay(batch_stats): 112 | """return number of batches delayed by now 113 | """ 114 | if batch_stats is None: 115 | raise StreamingUtilsError("invalid batch stats") 116 | 117 | running_batches = streaming_running_batch(batch_stats) 118 | if len(running_batches) == 0: 119 | return 0 120 | 121 | now = datetime.datetime.now() 122 | now = now.replace(tzinfo=get_localzone()) 123 | 124 | batch_duration = streaming_duration(batch_stats) 125 | if batch_duration is None: 126 | return 0 127 | 128 | running_batches = sorted(running_batches) 129 | 130 | delay = (now - running_batches[-1][ 131 | 'batchStartTime']).seconds / batch_duration 132 | return delay 133 | 134 | 135 | def streaming_time_delay(batch_stats): 136 | """get time delay in seconds of running batch by now 137 | """ 138 | if batch_stats is None: 139 | raise StreamingUtilsError("invalid batch stats") 140 | 141 | running_batches = streaming_running_batch(batch_stats) 142 | if len(running_batches) == 0: 143 | return 0 144 | 145 | batch_duration = streaming_duration(batch_stats) 146 | if batch_duration is None: 147 | return 0 148 | 149 | now = datetime.datetime.now() 150 | now = now.replace(tzinfo=get_localzone()) 151 | 152 | if now - datetime.timedelta(seconds=batch_duration) <= running_batches[0][ 153 | 'batchStartTime']: 154 | return 0 155 | 156 | delay = now - datetime.timedelta(seconds=batch_duration) - \ 157 | running_batches[0]['batchStartTime'] 158 | print "now:", now, "batch_duration:", batch_duration, "batchStartTime:", \ 159 | running_batches[0]['batchStartTime'], "delay:", delay 160 | return delay.seconds 161 | 162 | 163 | def streaming_running_batch(batch_stats): 164 | """get running batch list of given batch stats, order by 'batchStartTime' ascending 165 | """ 166 | running_batches = filter(lambda x: x['status'] == 'RUNNING', 167 | batch_stats.values()) 168 | if len(running_batches) > 1: 169 | running_batches = sorted(running_batches) 170 | return running_batches 171 | 172 | 173 | def main(master_url, application_id, status): 174 | batch_stats = streaming_batch_stats(master_url, application_id, status) 175 | batch_list = sorted(batch_stats.iteritems()) 176 | 177 | batch_duration = streaming_duration(batch_stats) 178 | if batch_duration is None: 179 | print "Spark Streaming Batch Duration: too few batches to calculate duration", 180 | else: 181 | print "Spark Streaming Batch Duration:", batch_duration, "seconds" 182 | 183 | for item in batch_list: 184 | 185 | stats = item[1] 186 | jobs = stats['jobs'] 187 | jobs = sorted(jobs, key=lambda x: x['submissionTime']) 188 | 189 | scheduling_delay = jobs[0]['submissionTime'] - stats['batchStartTime'] 190 | 191 | processing_time = None 192 | if stats['status'] != 'RUNNING': 193 | processing_time = jobs[-1]['completionTime'] - jobs[0][ 194 | 'submissionTime'] 195 | 196 | print 'Batch: %10s -- %20s -- %20s -- %20s' % ( 197 | stats['status'], stats['batchStartTime'], processing_time, 198 | scheduling_delay) 199 | 200 | print "Pick up running batches: " 201 | running_batches = streaming_running_batch(batch_stats) 202 | for stats in running_batches: 203 | print 'Batch: %10s -- %20s -- %20s -- %20s' % ( 204 | stats['status'], stats['batchStartTime'], processing_time, 205 | scheduling_delay,) 206 | 207 | print "~" * 30 208 | 209 | num_delayed_batches = streaming_batch_delay(batch_stats) 210 | time_delayed = streaming_time_delay(batch_stats) 211 | print "Detect batch scheduling delay," 212 | print "\tNumber batch delay:", num_delayed_batches 213 | print "\tTime delayed:", time_delayed 214 | 215 | 216 | """ 217 | Get Spark Streaming microbatch statistics: 218 | - Batch start time 219 | - Scheduling delay (in seconds) for each microbatch 220 | - Processing time (in seconds) for each microbatch 221 | 222 | Tested on Spark 2.0.0 running on YARN 2.7.2. 223 | 224 | Time deltas are naive, do not run close to midnight (yet!). 225 | 226 | Example usage: 227 | 228 | python get_spark_streaming_batch_statistics.py \ 229 | --master ec2-52-40-144-150.us-west-2.compute.amazonaws.com \ 230 | --applicationId application_1469205272660_0006 231 | 232 | Output (batch start time, processing time, scheduling delay): 233 | 234 | 18:36:55 3.991 3783.837 235 | 18:36:56 4.001 3786.832 236 | 18:36:57 3.949 3789.862 237 | ... 238 | """ 239 | 240 | if __name__ == '__main__': 241 | import argparse 242 | 243 | parser = argparse.ArgumentParser() 244 | parser.add_argument('--master', help='YARN ResourceManager URL', 245 | required=True) 246 | parser.add_argument('--applicationId', help='YARN application ID', 247 | required=True) 248 | parser.add_argument('--status', 249 | help='Spark Job Status[RUNNING, SUCCEEDED]', 250 | required=False) 251 | 252 | args = vars(parser.parse_args()) 253 | 254 | master_url = args['master'] 255 | application_id = args['applicationId'] 256 | status = args['status'] 257 | 258 | try: 259 | main(master_url, application_id, status) 260 | except StreamingUtilsError as e: 261 | print "Failure, caught exception:", repr(e) 262 | except KeyboardInterrupt as e: 263 | print "Exiting, bye !" 264 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /guardian.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import json 4 | import sys 5 | import os 6 | import subprocess 7 | import time 8 | import threading 9 | import requests 10 | import logging 11 | from logging import getLogger, Formatter, DEBUG 12 | from logging.handlers import TimedRotatingFileHandler 13 | 14 | import config_api 15 | from alert.guardian_alert import GuardianAlert 16 | from alert.alert_util import AlertException 17 | 18 | # TODO: 19 | # from contacts import contacts 20 | 21 | 22 | import spark_checker 23 | 24 | # TODO: 25 | # start application concurrently 26 | 27 | log = getLogger() 28 | log_file = os.path.abspath("logs/guardian.log") 29 | rotate_handler = TimedRotatingFileHandler(log_file, when='h', interval=24, 30 | backupCount=7) 31 | 32 | stream_handler = logging.StreamHandler() 33 | format_str = ('%(asctime)s %(levelname)s ' 34 | '[%(module)s %(filename)s:%(lineno)d] %(message)s') 35 | formatter = Formatter(format_str) 36 | rotate_handler.setFormatter(formatter) 37 | stream_handler.setFormatter(formatter) 38 | 39 | log.addHandler(rotate_handler) 40 | log.setLevel(DEBUG) 41 | 42 | 43 | def set_config_default(config): 44 | if 'node_name' not in config: 45 | config['node_name'] = u'my_guardian' 46 | 47 | 48 | def get_args_check(args): 49 | f = open(args, 'r') 50 | try: 51 | config = json.load(f) 52 | except ValueError as e: 53 | log.error(repr(e)) 54 | raise ValueError('Config file is not a valid json') 55 | 56 | set_config_default(config) 57 | 58 | config = _get_active_app_config(config) 59 | 60 | return config 61 | 62 | 63 | class ThreadCheck(threading.Thread): 64 | def __init__(self, t_name, file_path, alert_client): 65 | self.path = file_path 66 | self.alert_client = alert_client 67 | threading.Thread.__init__(self, name=t_name) 68 | 69 | def run(self): 70 | command_check(self.path, self.alert_client) 71 | 72 | 73 | def command_check(file_path, alert_client): 74 | """ 75 | 76 | :param file_path: Config file path. 77 | :param alert_client: Member of GuardianAlert 78 | :return: 79 | """ 80 | logging.info("Starting to check applications") 81 | 82 | while True: 83 | config = get_args_check(file_path) 84 | check_impl(config, alert_client) 85 | time.sleep(config['check_interval']) 86 | 87 | 88 | class GuardianError(Exception): 89 | pass 90 | 91 | 92 | class NoAvailableYarnRM(GuardianError): 93 | pass 94 | 95 | 96 | class NoActiveYarnRM(GuardianError): 97 | pass 98 | 99 | 100 | class CannotGetClusterApps(GuardianError): 101 | pass 102 | 103 | 104 | def _get_active_app_config(config): 105 | apps = config['apps'] 106 | del_list = [] 107 | for i in range(len(apps)): 108 | app = apps[i] 109 | if 'active' in app.keys() and app['active'] is False: 110 | del_list.insert(0, i) 111 | 112 | for i in del_list: 113 | del apps[i] 114 | 115 | return config 116 | 117 | 118 | def _get_yarn_active_rm(hosts, timeout=10): 119 | """Find active yarn resource manager. 120 | """ 121 | 122 | active_rm = None 123 | available_hosts = len(hosts) 124 | for host in hosts: 125 | url = 'http://{host}/ws/v1/cluster/info'.format(host=host) 126 | try: 127 | resp = requests.get(url, timeout=timeout) 128 | except requests.exceptions.ConnectTimeout as e: 129 | available_hosts -= 1 130 | continue 131 | 132 | if resp.status_code != 200: 133 | available_hosts -= 1 134 | continue 135 | 136 | cluster_info = resp.json() 137 | 138 | if cluster_info['clusterInfo']['haState'].lower() == "active": 139 | active_rm = host 140 | break 141 | 142 | if available_hosts == 0: 143 | raise NoAvailableYarnRM 144 | 145 | if active_rm is None: 146 | raise NoActiveYarnRM 147 | 148 | logging.debug('Picked up Yarn active resource manager:' + active_rm) 149 | 150 | return active_rm 151 | 152 | 153 | def _request_yarn(hosts, timeout=10): 154 | active_rm = _get_yarn_active_rm(hosts) 155 | 156 | url = 'http://{host}/ws/v1/cluster/apps?states=accepted,running'.format( 157 | host=active_rm) 158 | resp = requests.get(url, timeout=timeout) 159 | if resp.status_code != 200: 160 | raise CannotGetClusterApps() 161 | 162 | stats = resp.json() 163 | 164 | if len(stats.keys()) == 0: 165 | raise ValueError('Cannot not get yarn application stats') 166 | 167 | return stats, active_rm 168 | 169 | 170 | def check_impl(args, alert_client): 171 | yarn_active_rm = None 172 | retry = 0 173 | while retry < 3: 174 | try: 175 | j, yarn_active_rm = _request_yarn(args['yarn']['api_hosts']) 176 | break 177 | 178 | except (ValueError, NoAvailableYarnRM, NoActiveYarnRM): 179 | logging.warning("Failed to send request to yarn resource manager, " 180 | "retry") 181 | retry += 1 182 | 183 | if retry >= 3: 184 | logging.error( 185 | "Failed to send request to yarn resource manager, host config: " + 186 | ', '.join(args['yarn']['api_hosts'])) 187 | subject = 'Guardian' 188 | objects = 'Yarn RM' 189 | content = 'Failed to send request to yarn resource manager.' 190 | alert_client.send_alert("ERROR", subject, objects, content) 191 | 192 | return 193 | 194 | if j['apps'] is None: 195 | logging.info("There is no app in yarn.") 196 | running_apps = [] 197 | else: 198 | running_apps = j['apps']['app'] 199 | 200 | app_map = {} 201 | for app in running_apps: 202 | key = app['name'] 203 | if key not in app_map: 204 | app_map[key] = [] 205 | 206 | app_map[key].append(app) 207 | 208 | not_running_apps = [] 209 | for app_config in args['apps']: 210 | app_name = app_config['app_name'] 211 | 212 | apps = None 213 | try: 214 | apps = app_map[app_name] 215 | except KeyError: 216 | apps = [] 217 | 218 | actual_app_num = len(apps) 219 | expected_app_num = 1 220 | try: 221 | expected_app_num = app_config['app_num'] 222 | except KeyError: 223 | pass 224 | 225 | if actual_app_num < expected_app_num: 226 | not_running_apps.append(app_name) 227 | continue 228 | 229 | # app is running but not in expected number 230 | if actual_app_num > expected_app_num: 231 | subject = 'Guardian' 232 | objects = app_name 233 | content = 'Unexpected running app number, expected/actual: {expected}/{actual}'.format( 234 | expected=app_config['app_num'], actual=len(apps)) 235 | alert_client.send_alert("ERROR", subject, objects, content) 236 | continue 237 | 238 | # specific type of checker has been set 239 | if 'check_type' in app_config and 'check_options' in app_config: 240 | 241 | config = { 242 | 'app': app_config, 243 | 'yarn': { 244 | 'active_rm': yarn_active_rm 245 | }, 246 | 'node_name': args['node_name'], 247 | } 248 | 249 | if app_config['check_type'] == 'spark': 250 | spark_checker.check(apps, config, alert_client) 251 | 252 | if len(not_running_apps) == 0: 253 | logging.info("There is no application need to be started.") 254 | return 255 | 256 | alert_not_running_apps(not_running_apps, args['apps'], alert_client) 257 | 258 | 259 | def alert_not_running_apps(app_names, app_configs, alert_client): 260 | for app_name in app_names: 261 | 262 | subject = 'Guardian' 263 | objects = app_name 264 | content = ('App is not running or less than expected number of ' 265 | 'running instance, will restart.') 266 | alert_client.send_alert("ERROR", subject, objects, content) 267 | 268 | app_info = filter(lambda x: x['app_name'] == app_name, app_configs) 269 | raw_cmd = app_info[0]['start_cmd'] 270 | cmd = raw_cmd.split() # split by whitespace to comand and arguments for popen 271 | 272 | retry = 0 273 | while retry < 3: 274 | try: 275 | p = subprocess.Popen(cmd) 276 | except OSError: 277 | # probably os cannot find the start command. 278 | log.error("Invalid start command: " + raw_cmd) 279 | retry += 1 280 | continue 281 | 282 | output, err = p.communicate() 283 | 284 | if err is not None: 285 | print err 286 | retry += 1 287 | continue 288 | else: 289 | print output 290 | break 291 | 292 | if retry >= 3: 293 | 294 | logging.info("Alert sms after failed 3 times.") 295 | subject = 'Guardian' 296 | objects = app_name 297 | content = 'Failed to start yarn app after 3 times.' 298 | alert_client.send_alert("ERROR", subject, objects, content) 299 | 300 | logging.info("Finished checking applications") 301 | 302 | 303 | def get_args_inspect(args): 304 | """ 305 | args: 306 | filter: only support "app_name" 307 | value: only support regular expression 308 | """ 309 | 310 | if len(args) != 3: 311 | raise ValueError("Invalid argument number") 312 | 313 | f = open(args[0], 'r') 314 | try: 315 | config = json.load(f) 316 | except ValueError as e: 317 | log.error(repr(e)) 318 | raise ValueError('Config file is not a valid json') 319 | 320 | if args[1] != 'app_name': 321 | raise ValueError("Invalid Filter, only support \"app_name\"") 322 | 323 | args_map = { 324 | 'config': config, 325 | 'filter': args[1], 326 | 'value': args[2], 327 | } 328 | 329 | return args_map 330 | 331 | 332 | def command_inspect(args): 333 | logging.info("Starting to inspect applications") 334 | 335 | import re 336 | 337 | def match(s): 338 | 339 | pattern = args['value'] 340 | m = re.search(pattern, s) 341 | return True if m is not None else False 342 | 343 | j, active_rm = _request_yarn(args['config']['yarn']['api_hosts']) 344 | 345 | if j['apps'] is None: 346 | logging.info("There's no app in yarn") 347 | return 348 | 349 | running_apps = j['apps']['app'] 350 | 351 | running_app_names = map(lambda x: x['name'], running_apps) 352 | running_app_names = filter(match, running_app_names) 353 | running_app_names = set(running_app_names) 354 | 355 | config = args['config'] 356 | 357 | configured_apps = map(lambda x: x['app_name'], config['apps']) 358 | configured_apps = set(configured_apps) 359 | 360 | for app_name in running_app_names - configured_apps: 361 | config['apps'].append({ 362 | 'app_name': app_name, 363 | 'start_cmd': 'TODO', 364 | 'app_num': 1 365 | }) 366 | 367 | print json.dumps(config, indent=4) 368 | 369 | logging.info("Finished inspecting applications, please check config.") 370 | 371 | 372 | if __name__ == '__main__': 373 | 374 | import ntpath 375 | 376 | executable = ntpath.basename(sys.argv[0]) 377 | if len(sys.argv[1:]) < 1: 378 | print "usage:", executable, " " 379 | print " commands:" 380 | print " - check " 381 | print " example:", executable, "check ./config.json" 382 | print "" 383 | print " - inspect " 384 | print " * filter: app_name" 385 | print " * value: any regular expression" 386 | print " example:", executable, "inspect ./config.json app_name waterdrop_" 387 | print "" 388 | sys.exit(-1) 389 | 390 | command = sys.argv[1] 391 | 392 | try: 393 | 394 | if command == 'check': 395 | if len(sys.argv[2:]) != 1: 396 | raise ValueError('Invalid argument number') 397 | 398 | config = get_args_check(sys.argv[2]) 399 | 400 | alert_client = GuardianAlert(config["alert_manager"]) 401 | t = ThreadCheck('check', sys.argv[2], alert_client) 402 | t.setDaemon(True) 403 | t.start() 404 | 405 | port = 5000 406 | if 'port' in config: 407 | port = config['port'] 408 | 409 | app = config_api.app 410 | app.config['config_name'] = sys.argv[2] 411 | 412 | app.run(host='0.0.0.0', port=port) 413 | 414 | elif command == 'inspect': 415 | config = get_args_inspect(sys.argv[2:]) 416 | command_inspect(config) 417 | 418 | else: 419 | raise ValueError("Unsupported Command:" + command) 420 | 421 | except KeyboardInterrupt as e: 422 | logging.info("Exiting. Bye") 423 | sys.exit(0) 424 | --------------------------------------------------------------------------------