├── README.md ├── lambda_code ├── AWS_AmazonSNS.py └── AWS_Lambda.py ├── scripts ├── AWS_Service_Health_Dashboard.py ├── autoscaling_zabbix.py ├── cloudwatch_zabbix.py └── lambda_zabbix.py └── templates ├── 2.2 ├── AWS_Service_Health_Dashboard_template.xml ├── AmazonSNS_AWSLambda_Zabbix_template.xml ├── autoscaling_template.xml ├── awsbilling_template.xml └── cloudwatch_template.xml └── 3.0 ├── AWS_Service_Health_Dashboard_template.xml ├── AmazonSNS_AWSLambda_Zabbix_template.xml ├── autoscaling_template.xml ├── awsbilling_template.xml └── cloudwatch_template.xml /README.md: -------------------------------------------------------------------------------- 1 | # Zabbix AWS Monitoring Templates 2 | 3 | This template collection is for effective monitoring AWS(Amazon Web Services) with Zabbix. 4 | 5 | # What templates? 6 | 7 | * Amazon CloudWatch Metrics monitoring Template 8 | * AWS Service Health Dashboard monitoring Template 9 | * AWS EC2 auto scaling monitoring Template 10 | * Amazon SNS monitoring Template (comming soon...) 11 | * AWS Lambda function execution action script (comming soon...) 12 | * Other templates are under considering. 13 | 14 | # Details 15 | 16 | Please check wiki pages. 17 | 18 | * [Amazon CloudWatch Metrics monitoring Template](https://github.com/tech-sketch/zabbix_aws_template/wiki/Amazon-CloudWatch-Metrics-monitoring-Template) 19 | * [AWS Service Health Dashboard monitoring Template](https://github.com/tech-sketch/zabbix_aws_template/wiki/AWS-Service-Health-Dashboard-monitoring-Template) 20 | * [AWS EC2 AutoScaling monitoring Template](https://github.com/tech-sketch/zabbix_aws_template/wiki/AWS-EC2-AutoScaling-monitoring-Template) 21 | 22 | # License 23 | 24 | Licensed under the Apache License, Version 2.0. 25 | The Apache v2 full text is published at this link(http://www.apache.org/licenses/LICENSE-2.0). 26 | 27 | # Contact 28 | 29 | VINX CORP. 30 | 31 | Technical support service is provided by VINX CORP. 32 | 33 | https://www.vinx.co.jp/awszabbix/index.html 34 | 35 | --- 36 | Copyright 2016 TIS Inc. 37 | -------------------------------------------------------------------------------- /lambda_code/AWS_AmazonSNS.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | 3 | import os 4 | import re 5 | import json 6 | import socket 7 | import struct 8 | import time 9 | import calendar 10 | import datetime 11 | import dateutil.parser 12 | 13 | def lambda_handler(event, context): 14 | sns_zabbix = SnsZabbix() 15 | sns_zabbix.make_send_items(event) 16 | sns_zabbix.send_to_zabbix() 17 | 18 | class SnsZabbix: 19 | def __init__(self): 20 | self.zabbix_host = "localhost" 21 | self.zabbix_port = 10051 22 | self.send_items = [] 23 | 24 | def make_send_items(self, event): 25 | for record in event['Records']: 26 | event_type = self.__check_event_type(record) 27 | self.__add_send_item(record, event_type) 28 | 29 | def __add_send_item(self, record, event_type): 30 | send_json_string = '{"host":"", "key":"", "value":"", "clock":""}' 31 | send_item = json.loads(send_json_string) 32 | message = json.loads(record['Sns']['Message']) 33 | if event_type == "AutoScaling": 34 | send_item["host"] = "AutoScaling" 35 | value = [] 36 | value.append("Event : " + message['Event']) 37 | value.append("Service : " + message['Service']) 38 | value.append("Description : " + message['Description']) 39 | value.append("AvailabilityZone : " + message['Details']['Availability Zone']) 40 | value.append("AutoScalingGroupName : " + message['AutoScalingGroupName']) 41 | value.append("Cause : " + message['Cause']) 42 | value.append("StatusCode : " + message['StatusCode']) 43 | value.append("StatusMessage : " + message['StatusMessage']) 44 | send_item["value"] = os.linesep.join(value) 45 | 46 | elif event_type == "RDS": 47 | send_item["host"] = message["Source ID"] 48 | 49 | value = [] 50 | value.append("EventSource : " + message["Event Source"]) 51 | value.append("IdentifierLink : " + message["Identifier Link"]) 52 | value.append("SourceId : " + message["Source ID"]) 53 | value.append("EventId : " + message["Event ID"]) 54 | value.append("EventMessage : " + message["Event Message"]) 55 | value.append("TopicArn : "+ record['Sns']['TopicArn']) 56 | send_item["value"] = os.linesep.join(value) 57 | 58 | 59 | elif event_type == "CloudWatch": 60 | send_item["host"] = message['Trigger']['Dimensions'][0]['value'] 61 | value = [] 62 | value.append("NewStatus : " + message['NewStateValue']) 63 | value.append("MetricNamespace : " + message['Trigger']['Namespace']) 64 | value.append("Dimensions : " + message['Trigger']['Dimensions'][0]['name'] + " = " + message['Trigger']['Dimensions'][0]['value']) 65 | value.append("MetricName : " + message['Trigger']['MetricName']) 66 | value.append("NewStateReason : " + message['NewStateReason']) 67 | value.append("Region : " + message['Region']) 68 | value.append("TopicArn : " + record['Sns']['TopicArn']) 69 | send_item["value"] = os.linesep.join(value) 70 | 71 | elif event_type == "EC2RDS": 72 | send_item["host"] = message['Trigger']['Namespace'].replace('AWS/',"") 73 | value = [] 74 | value.append("NewStatus : " + message['NewStateValue']) 75 | value.append("Dimensions : " + json.dumps(message['Trigger']['Dimensions'])) 76 | value.append("MetricName : " + message['Trigger']['MetricName']) 77 | value.append("NewStateReason : " + message['NewStateReason']) 78 | value.append("Region :" + message['Region']) 79 | value.append("TopicArn : " + record['Sns']['TopicArn']) 80 | send_item["value"] = os.linesep.join(value) 81 | 82 | else: 83 | send_item["host"] = "Other" 84 | value = json.loads(record['Sns']['Message']) 85 | 86 | send_item["key"] = "sns.event" 87 | event_timestamp = dateutil.parser.parse(record['Sns']['Timestamp']) 88 | send_item["clock"] = calendar.timegm(event_timestamp.utctimetuple()) 89 | self.send_items.append(send_item) 90 | 91 | def __check_event_type(self, record): 92 | message = json.loads(record['Sns']['Message']) 93 | subject = record['Sns']['Subject'] 94 | if subject.find("Auto Scaling") != -1: 95 | return "AutoScaling" 96 | elif subject.find("RDS Notification Message") != -1: 97 | return "RDS" 98 | elif message['Trigger']['Dimensions']: 99 | return "CloudWatch" 100 | elif message['Trigger']['Namespace']: 101 | return "EC2RDS" 102 | else: 103 | return "Other" 104 | 105 | def send_to_zabbix(self): 106 | now = "%.9f" % time.time() 107 | sec = now.split(".")[0] 108 | ns = now.split(".")[1] 109 | send_data = json.loads('{"request":"sender data","data":[],"clock":"%s","ns":"%s" }' % (sec, ns)) 110 | send_data["data"] = self.send_items 111 | send_data_string = json.dumps(send_data) 112 | zbx_client = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 113 | try: 114 | zbx_client.connect((self.zabbix_host, self.zabbix_port)) 115 | except Exception: 116 | print("Error") 117 | quit() 118 | header = struct.pack('<4sBQ', 'ZBXD', 1, len(send_data_string)) 119 | send_data_string = header + send_data_string 120 | try: 121 | zbx_client.sendall(send_data_string) 122 | except Exception: 123 | print('Data sending failure') 124 | quit() 125 | response = '' 126 | while True: 127 | data = zbx_client.recv(4096) 128 | if not data: 129 | break 130 | response += data 131 | print(response[13:]) 132 | zbx_client.close() 133 | -------------------------------------------------------------------------------- /lambda_code/AWS_Lambda.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | 3 | import json 4 | import boto3 5 | 6 | print('Loading function') 7 | 8 | def lambda_handler(event, context): 9 | event = json.loads(event) 10 | response = operation_ec2(event) 11 | return response 12 | 13 | def operation_ec2(event): 14 | def checkExecResult(response): 15 | print(response) 16 | if(response["ResponseMetadata"]["HTTPStatusCode"] == 200): 17 | return "Succeed" 18 | else: 19 | return "Failed" 20 | 21 | def stopEC2(ec2): 22 | execResult = ec2.stop() 23 | return checkExecResult(execResult) 24 | 25 | def startEC2(ec2): 26 | execResult = ec2.start() 27 | return checkExecResult(execResult) 28 | 29 | def checkOperationalInstance(ec2,operation): 30 | if(operation == "start"): 31 | if(ec2.state['Name'] == "stopped"): 32 | return True 33 | elif(operation == "stop"): 34 | if(ec2.state['Name'] == "running"): 35 | return True 36 | return False 37 | 38 | response = { 39 | "message" : "Nothing Operation", 40 | "instance_id": None, 41 | "operation": None 42 | } 43 | 44 | if "instance_id" not in event: 45 | response["message"] = "Not Found InstanceID" 46 | return response 47 | 48 | instanceid = event["instance_id"] 49 | response["instance_id"] = instanceid 50 | print(event) 51 | 52 | if "operation" not in event: 53 | response["message"] = "Not Found Operation" 54 | return response 55 | 56 | print("instance_id: " + instanceid) 57 | ec2 = boto3.resource('ec2').Instance(instanceid) 58 | 59 | if(checkOperationalInstance(ec2,event["operation"])): 60 | if(event["operation"] == "stop"): 61 | print("Stop EC2: " + instanceid) 62 | response["operation"] = "stop" 63 | response["message"] = stopEC2(ec2) 64 | elif(event["operation"] == "start"): 65 | print("Start EC2: " + instanceid) 66 | response["operation"] = "start" 67 | response["message"] = startEC2(ec2) 68 | else: 69 | response["message"] = "Invalid Operation." 70 | else: 71 | response["message"] = "Cannot Operation. Instance State is " + ec2.state['Name'] + "." 72 | return response -------------------------------------------------------------------------------- /scripts/AWS_Service_Health_Dashboard.py: -------------------------------------------------------------------------------- 1 | #!/bin/env python 2 | import re 3 | import argparse 4 | import calendar 5 | import datetime 6 | import dateutil.parser 7 | import feedparser 8 | import threading 9 | import time 10 | import urllib2 11 | import argparse 12 | import json 13 | import socket 14 | import struct 15 | from HTMLParser import HTMLParser 16 | 17 | class AWSSHDParser(HTMLParser): 18 | 19 | 20 | def __init__(self, base_url, block, zabbix_host, zabbix_port): 21 | HTMLParser.__init__(self) 22 | self.block = block 23 | self.check = False 24 | self.base_url = base_url 25 | self.url_list = [] 26 | self.lld_json = json.loads('{"data":[]}') 27 | self.zabbix_host = zabbix_host 28 | self.zabbix_port = zabbix_port 29 | 30 | 31 | def get_rss(self, url): 32 | now = "%.9f" % time.time() 33 | sec = now.split(".")[0] 34 | ns = now.split(".")[1] 35 | send_data = json.loads('{"request":"sender data","data":[],"clock":"%s","ns":"%s" }' % (sec, ns)) 36 | response = feedparser.parse(url) 37 | send_items = [] 38 | 39 | for entry in range(len(response.entries)): 40 | title = response.entries[entry].title 41 | published = response.entries[entry].published 42 | 43 | pub = dateutil.parser.parse(published) 44 | uni = calendar.timegm(pub.utctimetuple()) 45 | now = calendar.timegm(time.gmtime()) 46 | 47 | if now - args.interval < uni: 48 | send_json_string = '{"host":"", "key":"", "value":"", "clock":""}' 49 | send_item = json.loads(send_json_string) 50 | send_item["host"] = self.block 51 | 52 | replace = re.compile(".+/rss/(.*?)(-(ap-[a-z]+-[0-9]|us-[a-z]+-[0-9]|eu-[a-z]+-[0-9]|sa-[a-z]+-[0-9]))*\.rss") 53 | match = replace.match(url) 54 | ServiceName = match.group(1) 55 | Region = match.group(3) 56 | 57 | if Region == None: 58 | send_item["key"] = 'health.status[%s.]' % ServiceName 59 | else: 60 | send_item["key"] = 'health.status[%s.%s]' % (ServiceName, Region) 61 | 62 | send_item["value"] = title 63 | send_item["clock"] = uni 64 | send_items.append(send_item) 65 | else: 66 | break 67 | send_data["data"].extend(send_items) 68 | self.__send_to_zabbix(send_data) 69 | 70 | 71 | def __send_to_zabbix(self, send_data): 72 | send_data_string = json.dumps(send_data) 73 | zbx_client = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 74 | try: 75 | zbx_client.connect((self.zabbix_host, self.zabbix_port)) 76 | except Exception: 77 | print "Can't connect to zabbix server" 78 | quit() 79 | 80 | header = struct.pack('<4sBQ', 'ZBXD', 1, len(send_data_string)) 81 | send_data_string = header + send_data_string 82 | try: 83 | zbx_client.sendall(send_data_string) 84 | except Exception: 85 | print 'Data sending failure' 86 | quit() 87 | response = '' 88 | while True: 89 | data = zbx_client.recv(4096) 90 | if not data: 91 | break 92 | response += data 93 | 94 | print response[13:] 95 | zbx_client.close() 96 | 97 | 98 | def handle_starttag(self, tagname, attribute): 99 | if tagname.lower() == "div": 100 | for i in attribute: 101 | if i[1] == self.block + "_block": 102 | self.check = True 103 | if self.check == True and tagname.lower() == "a": 104 | for i in attribute: 105 | if i[0].lower() == "href": 106 | self.url_list.append(self.base_url + i[1][1:]) 107 | lld_json_string = '{"{#SERVICE.NAME}":"", "{#REGION}":""}' 108 | lld_item = json.loads(lld_json_string) 109 | 110 | replace = re.compile(".+/rss/(.*?)(-(ap-[a-z]+-[0-9]|us-[a-z]+-[0-9]|eu-[a-z]+-[0-9]|sa-[a-z]+-[0-9]))*\.rss") 111 | match = replace.match(self.base_url + i[1][1:]) 112 | ServiceName = match.group(1) 113 | Region = match.group(3) 114 | 115 | if Region == None: 116 | lld_item["{#SERVICE.NAME}"] = ServiceName 117 | lld_item["{#REGION}"] = "" 118 | else: 119 | lld_item["{#SERVICE.NAME}"] = ServiceName 120 | lld_item["{#REGION}"] = Region 121 | 122 | self.lld_json["data"].append(lld_item) 123 | 124 | 125 | def handle_endtag(self, tagname): 126 | if tagname.lower() == "div": 127 | self.check = False 128 | 129 | 130 | if __name__== "__main__": 131 | parser = argparse.ArgumentParser(description='Get RSS list or Zabbix LLD format output from AWS Service Health Dashboard page.') 132 | parser.add_argument('-b', '--block', default="AP", help='set AWS region block(e.g.:NA or SA or EU or AP)') 133 | parser.add_argument('-i', '--interval', type=int, help='set interval time (seconds)') 134 | parser.add_argument('-m', '--send-mode', default='False', help='set True if you send AWS Service Health Dashboard status information. set False if you want to get lld format service list. (e.g.: True or False)') 135 | parser.add_argument('-p', '--zabbix-port', type=int, default=10051, help='set listening port number for Zabbix server') 136 | parser.add_argument('-z', '--zabbix-host', default='localhost', help='set listening IP address for Zabbix server') 137 | 138 | block_list = ["NA", "SA", "EU", "AP"] 139 | args = parser.parse_args() 140 | 141 | if args.block not in block_list: 142 | print "please set block name. :" + " or ".join(map(str, block_list)) 143 | 144 | base_url = "http://status.aws.amazon.com/" 145 | socket.setdefaulttimeout(30) 146 | htmldata = urllib2.urlopen(base_url) 147 | 148 | parser = AWSSHDParser(base_url, args.block, args.zabbix_host, args.zabbix_port) 149 | parser.feed(htmldata.read()) 150 | 151 | if args.send_mode.upper() == "TRUE": 152 | for url in parser.url_list: 153 | get_rss_th = threading.Thread(target=parser.get_rss,name="get_rss_th", args=(url,)) 154 | get_rss_th.start() 155 | 156 | if args.send_mode.upper() == "FALSE": 157 | print json.dumps(parser.lld_json) 158 | 159 | parser.close() 160 | htmldata.close() 161 | -------------------------------------------------------------------------------- /scripts/autoscaling_zabbix.py: -------------------------------------------------------------------------------- 1 | #!/bin/env python 2 | 3 | import boto3 4 | import json 5 | import argparse 6 | import os 7 | import socket 8 | import struct 9 | import time 10 | import calendar 11 | from zabbix_api import ZabbixAPI,ZabbixAPIException,Already_Exists 12 | from datetime import datetime 13 | from datetime import timedelta 14 | 15 | class AwsZabbix: 16 | 17 | def __init__(self, region, access_key, secret, pref_if, zbx_url, zbx_user, zbx_pass, set_macro): 18 | self.region = region 19 | self.access_key = access_key 20 | self.secret = secret 21 | self.pref_if = pref_if 22 | self.zbx_url = zbx_url 23 | self.zbx_user = zbx_user 24 | self.zbx_pass = zbx_pass 25 | self.set_macro = set_macro 26 | 27 | self.ec2 = boto3.resource( 28 | 'ec2', 29 | region_name=region, 30 | aws_access_key_id=access_key, 31 | aws_secret_access_key=secret 32 | ) 33 | self.client = boto3.client( 34 | 'autoscaling', 35 | region_name=region, 36 | aws_access_key_id=access_key, 37 | aws_secret_access_key=secret 38 | ) 39 | 40 | self.zapi = ZabbixAPI(server=self.zbx_url) 41 | self.zapi.login(self.zbx_user, self.zbx_pass) 42 | 43 | 44 | def __get_interfaces(self, host, region, key, secret): 45 | interfaces = [] 46 | priv_intf = '' 47 | pub_intf = '' 48 | instance = self.ec2.Instance(host) 49 | 50 | priv_intf = ({ 51 | 'type':1, 52 | 'useip':1, 53 | 'main':(1 if self.pref_if == 'Private' else 0), 54 | 'ip':instance.private_ip_address, 55 | 'dns':'', 56 | 'port':'10050' 57 | }) 58 | if instance.public_ip_address: 59 | pub_intf = ({ 60 | 'type':1, 61 | 'useip':1, 62 | 'main':(1 if self.pref_if == 'Public' else 0), 63 | 'ip':instance.public_ip_address, 64 | 'dns':'', 65 | 'port':'10050' 66 | }) 67 | else: 68 | priv_intf['main'] = 1 69 | 70 | if pub_intf: 71 | interfaces = [priv_intf, pub_intf] 72 | else: 73 | interfaces = [priv_intf] 74 | 75 | return interfaces 76 | 77 | 78 | def __get_hostid(self, instanceid): 79 | host = self.zapi.host.get({ 80 | 'filter':{ 81 | 'host':instanceid 82 | } 83 | }) 84 | 85 | return host[0]['hostid'] if host else False 86 | 87 | 88 | def __create_interfaces(self, hostid, interfaces): 89 | for aws_ifname in interfaces: 90 | aws_ifname['hostid'] = hostid 91 | interface = self.zapi.hostinterface.get({ 92 | 'filter':{ 93 | 'hostid':hostid, 94 | 'ip':aws_ifname['ip'] 95 | } 96 | }) 97 | 98 | if interface: 99 | aws_ifname['interfaceid'] = interface[0]['interfaceid'] 100 | 101 | try: 102 | self.zapi.hostinterface.update(aws_ifname) 103 | except ZabbixAPIException, e: 104 | print str(e) 105 | else: 106 | try: 107 | self.zapi.hostinterface.create(aws_ifname) 108 | except ZabbixAPIException, e: 109 | print str(e) 110 | 111 | return 112 | 113 | 114 | def __create_host(self, host, interfaces, template_ids, groupid): 115 | try: 116 | params = { 117 | 'host':host, 118 | 'interfaces':interfaces, 119 | 'groups':[{'groupid':groupid}] 120 | } 121 | if template_ids: 122 | params["templates"] = template_ids 123 | 124 | self.zapi.host.create(params) 125 | except Already_Exists, e: 126 | hostid = self.__get_hostid([host]) 127 | params = { 128 | 'hostid':hostid, 129 | 'groups':[{'groupid':groupid}] 130 | } 131 | if template_ids: 132 | params["templates"] = template_ids 133 | 134 | self.zapi.host.update(params) 135 | self.__create_interfaces(hostid, interfaces) 136 | 137 | return 138 | 139 | 140 | def __create_usermacro(self, hostid, macro): 141 | update_macro = {} 142 | 143 | try: 144 | self.zapi.usermacro.create({ 145 | 'hostid':hostid, 146 | 'macro':macro['name'], 147 | 'value':macro['value'] 148 | }) 149 | except Already_Exists, e: 150 | defined_macro = self.zapi.usermacro.get({ 151 | 'filter':{ 152 | 'macro':macro['name'] 153 | }, 154 | 'hostids':hostid 155 | }) 156 | try: 157 | self.zapi.usermacro.update({ 158 | 'hostmacroid':defined_macro[0]['hostmacroid'], 159 | 'value':macro['value'] 160 | }) 161 | except ZabbixAPIException, e: 162 | print str(e) 163 | 164 | return 165 | 166 | 167 | def __set_usermacros(self, hostid): 168 | macros = [{ 169 | 'name':'{$REGION}', 170 | 'value':self.region 171 | }, 172 | { 173 | 'name':'{$KEY}', 174 | 'value':self.access_key 175 | }, 176 | { 177 | 'name':'{$SECRET}', 178 | 'value':self.secret 179 | }] 180 | for macro in macros: 181 | self.__create_usermacro(hostid, macro) 182 | 183 | return 184 | 185 | 186 | def __disable_host(self, hostid): 187 | try: 188 | self.zapi.host.update({'hostid':hostid,'status':1}) 189 | except ZabbixAPIException, e: 190 | print str(e) 191 | 192 | return 193 | 194 | 195 | def send_autoscaling_data_to_zabbix(self): 196 | response = self.client.describe_auto_scaling_groups() 197 | for group in response['AutoScalingGroups']: 198 | groupid = '' 199 | templates = [] 200 | template_ids = [] 201 | hostgroup_hosts = [] 202 | hostids = [] 203 | usermacros = [] 204 | 205 | try: 206 | response = self.zapi.hostgroup.create({'name':group['AutoScalingGroupName']}) 207 | groupid = response['groupids'][0] 208 | except ZabbixAPIException, e: 209 | response = self.zapi.hostgroup.get({ 210 | 'filter':{ 211 | 'name':[group['AutoScalingGroupName']] 212 | }, 213 | 'selectHosts':'extend' 214 | }) 215 | for hostgroup_host in response[0]['hosts']: 216 | hostgroup_hosts.append(hostgroup_host['host']) 217 | groupid = response[0]['groupid'] 218 | 219 | for tag in group['Tags']: 220 | if tag['Key'] == 'ZabbixTemplates': 221 | templates = tag['Value'].split(',') 222 | 223 | if templates: 224 | try: 225 | response = self.zapi.template.get({ 226 | 'filter':{ 227 | 'host':templates 228 | } 229 | }) 230 | for template in response: 231 | template_ids.append({'templateid':template['templateid']}) 232 | except ZabbixAPIException, e: 233 | print str(e) 234 | 235 | for instance in group['Instances']: 236 | instanceid = instance['InstanceId'] 237 | if instanceid in hostgroup_hosts: 238 | hostgroup_hosts.remove(instanceid) 239 | interfaces = self.__get_interfaces(instanceid, self.region, self.access_key, self.secret) 240 | 241 | ## Create or update host 242 | self.__create_host(instanceid, interfaces, template_ids, groupid) 243 | 244 | ## Set user macros for CloudWatch 245 | if self.set_macro == 'True': 246 | hostid = self.__get_hostid([instance['InstanceId']]) 247 | self.__set_usermacros(hostid) 248 | 249 | ## host status disable for not exist EC2 instance host 250 | for deleted_host in hostgroup_hosts: 251 | hostid = self.__get_hostid([deleted_host]) 252 | self.__disable_host(hostid) 253 | 254 | 255 | if __name__ == '__main__': 256 | parser = argparse.ArgumentParser(description='Get AWS Auto Scaling Metric list json format, and send Zabbix API.') 257 | 258 | parser.add_argument('-r', '--region', 259 | default=os.getenv('AWS_DEFAULT_REGION'), 260 | help='set AWS region name(e.g.: ap-northeast-1)') 261 | parser.add_argument('-a', '--accesskey', 262 | default=os.getenv('AWS_ACCESS_KEY_ID'), 263 | help='set AWS Access Key ID') 264 | parser.add_argument('-s', '--secret', 265 | default=os.getenv('AWS_SECRET_ACCESS_KEY'), 266 | help='set AWS Secret Access Key') 267 | parser.add_argument('-z', '--url', 268 | default='http://localhost/zabbix', 269 | help='set Zabbix Frontend url') 270 | parser.add_argument('-u', '--user', 271 | default='Admin', help='set Zabbix API username') 272 | parser.add_argument('-p', '--password', 273 | default='zabbix', help='set Zabbix API user password') 274 | parser.add_argument('-P', '--preffer-if', 275 | default='Private', choices=['Private', 'Public'], 276 | help='set preffer interface(e.g.: Private or Public)') 277 | parser.add_argument('-m', '--set-macro', 278 | default='False', choices=['False', 'True'], 279 | help='set User macros for CloudWatch(e.g.: False or True)') 280 | 281 | args = parser.parse_args() 282 | aws_zabbix = AwsZabbix(region=args.region, 283 | access_key=args.accesskey, secret=args.secret, 284 | pref_if=args.preffer_if, 285 | zbx_url=args.url, zbx_user=args.user, zbx_pass=args.password, 286 | set_macro=args.set_macro) 287 | aws_zabbix.send_autoscaling_data_to_zabbix() 288 | 289 | -------------------------------------------------------------------------------- /scripts/cloudwatch_zabbix.py: -------------------------------------------------------------------------------- 1 | #!/bin/env python 2 | 3 | import boto3 4 | import json 5 | import argparse 6 | import os 7 | import socket 8 | import struct 9 | import time 10 | import calendar 11 | from datetime import datetime 12 | from datetime import timedelta 13 | 14 | class Metric: 15 | def __init__(self, name="", namespace="", unit="", dimensions=[]): 16 | self.name = name 17 | self.namespace = namespace 18 | self.unit = unit 19 | self.dimensions = dimensions 20 | 21 | class AwsZabbix: 22 | 23 | def __init__(self, region, access_key, secret, identity, hostname, service, timerange_min, 24 | zabbix_host, zabbix_port): 25 | self.zabbix_host = zabbix_host 26 | self.zabbix_port = zabbix_port 27 | self.identity = identity 28 | self.hostname = hostname 29 | self.service = service 30 | self.timerange_min = timerange_min 31 | self.id_dimentions = { 32 | 'ec2':'InstanceId', 33 | 'rds':'DBInstanceIdentifier', 34 | 'elb':'LoadBalancerName', 35 | 'ebs':'VolumeId', 36 | 'billing': 'Currency' 37 | } 38 | self.client = boto3.client( 39 | 'cloudwatch', 40 | region_name=region, 41 | aws_access_key_id=access_key, 42 | aws_secret_access_key=secret 43 | ) 44 | self.sum_stat_metrics = [ 45 | {'namespace': 'AWS/ELB', 'metricname': 'RequestCount'}, 46 | {'namespace': 'AWS/ELB', 'metricname': 'HTTPCode_Backend_2XX'}, 47 | {'namespace': 'AWS/ELB', 'metricname': 'HTTPCode_Backend_3XX'}, 48 | {'namespace': 'AWS/ELB', 'metricname': 'HTTPCode_Backend_4XX'}, 49 | {'namespace': 'AWS/ELB', 'metricname': 'HTTPCode_Backend_5XX'}, 50 | {'namespace': 'AWS/ELB', 'metricname': 'HTTPCode_ELB_4XX'}, 51 | {'namespace': 'AWS/ELB', 'metricname': 'HTTPCode_ELB_5XX'} 52 | ] 53 | 54 | def __get_metric_list(self): 55 | resp = self.client.list_metrics( 56 | Dimensions = [ 57 | { 58 | 'Name': self.id_dimentions[self.service], 59 | 'Value': ('USD' if self.service == "billing" else self.identity) 60 | } 61 | ] 62 | ) 63 | metric_list = [] 64 | for data in resp["Metrics"]: 65 | metric = Metric(name=data["MetricName"], namespace=data["Namespace"], dimensions=data["Dimensions"]) 66 | if self.service == "elb": 67 | for dimension in data["Dimensions"]: 68 | if dimension["Name"] == "AvailabilityZone": 69 | metric.name = data["MetricName"] + "." + dimension["Value"] 70 | metric_list.append(metric) 71 | return metric_list 72 | 73 | def __get_metric_stats(self, metric_name, metric_namespace, servicename, timerange_min, stat_type="Average", period_sec=300): 74 | if self.service == "billing": 75 | dimensions = [ 76 | { 77 | 'Name': self.id_dimentions[self.service], 78 | 'Value': 'USD' 79 | } 80 | ] 81 | if servicename != "billing": 82 | dimensions.insert(0, 83 | { 84 | 'Name': 'ServiceName', 85 | 'Value': servicename 86 | } 87 | ) 88 | else: 89 | dimensions = [ 90 | { 91 | 'Name': self.id_dimentions[self.service], 92 | 'Value': self.identity 93 | } 94 | ] 95 | if self.service == "elb": 96 | split_metric_name = metric_name.split(".") 97 | if len(split_metric_name) == 2: 98 | metric_name = split_metric_name[0] 99 | dimensions.append( 100 | { 101 | 'Name': 'AvailabilityZone', 102 | 'Value': split_metric_name[1] 103 | } 104 | ) 105 | stats = self.client.get_metric_statistics( 106 | Namespace=metric_namespace, 107 | MetricName=metric_name, 108 | Dimensions=dimensions, 109 | StartTime=datetime.utcnow() - timedelta(minutes=timerange_min), 110 | EndTime=datetime.utcnow(), 111 | Period=period_sec, 112 | Statistics=[stat_type], 113 | ) 114 | return stats 115 | 116 | def __set_unit(self, metric_list): 117 | ret_val = [] 118 | for metric in metric_list: 119 | servicename = self.service 120 | if self.service == "billing": 121 | metric.unit = 'USD' 122 | else: 123 | stats = self.__get_metric_stats(metric.name, metric.namespace, servicename, self.timerange_min) 124 | for datapoint in stats["Datapoints"]: 125 | metric.unit = datapoint["Unit"] 126 | break 127 | ret_val.append(metric) 128 | return ret_val 129 | 130 | def __get_send_items(self, stats, metric): 131 | send_items = [] 132 | datapoints = stats["Datapoints"] 133 | datapoints = sorted(datapoints, key=lambda datapoints: datapoints["Timestamp"], reverse=True) 134 | for datapoint in datapoints: 135 | servicename = '' 136 | send_json_string = '{"host":"", "key":"", "value":"", "clock":""}' 137 | send_item = json.loads(send_json_string) 138 | 139 | if self.hostname == "undefined": 140 | send_item["host"] = self.identity 141 | else: 142 | send_item["host"] = self.hostname 143 | 144 | if self.service == "billing": 145 | for dimension in metric.dimensions: 146 | if dimension["Name"] == "ServiceName": 147 | servicename = dimension["Value"] 148 | send_item["key"] = 'cloudwatch.metric[%s.%s]' % (metric.name, servicename) 149 | else: 150 | send_item["key"] = 'cloudwatch.metric[%s]' % metric.name 151 | send_item["value"] = self.__get_datapoint_value_string(datapoint) 152 | send_item["clock"] = calendar.timegm(datapoint["Timestamp"].utctimetuple()) 153 | send_items.append(send_item) 154 | break 155 | return send_items 156 | 157 | def __get_datapoint_value_string(self, datapoint): 158 | if datapoint.has_key("Average"): 159 | return str(datapoint["Average"]) 160 | elif datapoint.has_key("Sum"): 161 | return str(datapoint["Sum"]) 162 | else: 163 | return "" 164 | 165 | def __send_to_zabbix(self, send_data): 166 | send_data_string = json.dumps(send_data) 167 | zbx_client = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 168 | try: 169 | zbx_client.connect((self.zabbix_host, self.zabbix_port)) 170 | except Exception: 171 | print "Can't connect to zabbix server" 172 | quit() 173 | 174 | header = struct.pack('<4sBQ', 'ZBXD', 1, len(send_data_string)) 175 | send_data_string = header + send_data_string 176 | try: 177 | zbx_client.sendall(send_data_string) 178 | except Exception: 179 | print 'Data sending failure' 180 | quit() 181 | response = '' 182 | while True: 183 | data = zbx_client.recv(4096) 184 | if not data: 185 | break 186 | response += data 187 | 188 | print response[13:] 189 | zbx_client.close() 190 | 191 | 192 | def send_metric_data_to_zabbix(self): 193 | now = "%.9f" % time.time() 194 | sec = now.split(".")[0] 195 | ns = now.split(".")[1] 196 | send_data = json.loads('{"request":"sender data","data":[],"clock":"%s","ns":"%s" }' % (sec, ns)) 197 | metric_list = self.__get_metric_list() 198 | all_metric_stats = [] 199 | servicename = self.service 200 | for metric in metric_list: 201 | if self.service == "billing": 202 | for dimension in metric.dimensions: 203 | if dimension["Name"] == "ServiceName": 204 | servicename = dimension["Value"] 205 | target_metric_info = {'namespace': metric.namespace, 'metricname': metric.name} 206 | for sum_stat_metric in self.sum_stat_metrics: # for support each region metrics (RequestCount, RequestCount.ap-northeast-1 etc.) 207 | if metric.name.find(sum_stat_metric['metricname']) == 0: # Only convert when finding the begging of string. 208 | target_metric_info['metricname'] = sum_stat_metric['metricname'] 209 | if target_metric_info in self.sum_stat_metrics: 210 | stats = self.__get_metric_stats(metric.name, metric.namespace, servicename, self.timerange_min, 'Sum') 211 | else: 212 | stats = self.__get_metric_stats(metric.name, metric.namespace, servicename, self.timerange_min) 213 | send_data["data"].extend(self.__get_send_items(stats, metric)) 214 | self.__send_to_zabbix(send_data) 215 | 216 | def show_metriclist_lld(self): 217 | lld_output_json = json.loads('{"data":[]}') 218 | metric_list = self.__get_metric_list() 219 | metric_list = self.__set_unit(metric_list) 220 | for metric in metric_list: 221 | lld_json_string = '{"{#METRIC.NAME}":"", "{#METRIC.UNIT}":"", "{#METRIC.NAMESPACE}":""}' 222 | lld_item = json.loads(lld_json_string) 223 | lld_item["{#METRIC.NAME}"] = metric.name 224 | lld_item["{#METRIC.NAMESPACE}"] = metric.namespace 225 | lld_item["{#METRIC.UNIT}"] = metric.unit 226 | lld_output_json["data"].append(lld_item) 227 | if self.service == "billing": 228 | lld_item["{#METRIC.SERVICENAME}"] = "" 229 | for dimension in metric.dimensions: 230 | if dimension["Name"] == "ServiceName": 231 | lld_item["{#METRIC.SERVICENAME}"] = dimension["Value"] 232 | print json.dumps(lld_output_json) 233 | 234 | if __name__ == '__main__': 235 | parser = argparse.ArgumentParser(description='Get AWS CloudWatch Metric list json format.') 236 | 237 | parser.add_argument('-r', '--region', default=os.getenv("AWS_DEFAULT_REGION"), help='set AWS region name(e.g.: ap-northeast-1)') 238 | parser.add_argument('-a', '--accesskey', default=os.getenv("AWS_ACCESS_KEY_ID"), help='set AWS Access Key ID') 239 | parser.add_argument('-s', '--secret', default=os.getenv("AWS_SECRET_ACCESS_KEY"), help='set AWS Secret Access Key') 240 | parser.add_argument('-i', '--identity', required=True, help='set Identity data (ec2: InstanceId, elb: LoadBalancerName, rds: DBInstanceIdentifier, ebs: VolumeId)') 241 | parser.add_argument('-H', '--hostname', default='undefined', help='set string that has to match HOST.HOST. defaults to identity)') 242 | parser.add_argument('-m', '--send-mode', default='False', help='set True if you send statistic data (e.g.: True or False)') 243 | parser.add_argument('-t', '--timerange', type=int, default=10, help='set Timerange min') 244 | parser.add_argument('-p', '--zabbix-port', type=int, default=10051, help='set listening port number for Zabbix server') 245 | parser.add_argument('-z', '--zabbix-host', default='localhost', help='set listening IP address for Zabbix server') 246 | parser.add_argument('service', metavar='service_name', help='set Service name (e.g.: ec2 or elb or rds') 247 | 248 | args = parser.parse_args() 249 | 250 | aws_zabbix = AwsZabbix(region=args.region, access_key=args.accesskey, secret=args.secret, 251 | identity=args.identity, hostname=args.hostname, service=args.service, 252 | timerange_min=args.timerange, zabbix_host=args.zabbix_host, zabbix_port=args.zabbix_port) 253 | 254 | if args.send_mode.upper() == 'TRUE': 255 | aws_zabbix.send_metric_data_to_zabbix() 256 | else: 257 | aws_zabbix.show_metriclist_lld() 258 | -------------------------------------------------------------------------------- /scripts/lambda_zabbix.py: -------------------------------------------------------------------------------- 1 | #!/bin/env python 2 | 3 | import boto3 4 | import json 5 | import argparse 6 | import base64 7 | import logging 8 | 9 | class AWSLambda: 10 | def __init__(self,region,access_key,secret_key,debug): 11 | self.client = boto3.client('lambda',region_name=region,aws_access_key_id=access_key,aws_secret_access_key=secret_key) 12 | log_fmt = '%(asctime)s- %(message)s' 13 | logging.basicConfig(level=debug.upper(),format=log_fmt) 14 | 15 | def invokeLambda(self,funcname,invocationtype,logtype,payload): 16 | logging.debug(json.dumps(payload)) 17 | response = self.client.invoke( 18 | FunctionName = funcname, 19 | InvocationType = invocationtype, 20 | LogType = logtype, 21 | Payload = json.dumps(payload) 22 | ) 23 | return response 24 | 25 | def dispResult(self,response): 26 | logging.debug("Exec Result: ") 27 | logging.debug(base64.b64decode(response["LogResult"])) 28 | 29 | payload = json.loads(response['Payload'].read().decode('utf-8')) 30 | logging.info("ResponseCode: %d" % response['ResponseMetadata']['HTTPStatusCode']) 31 | if payload and "message" in payload: 32 | logging.info(payload["message"]) 33 | 34 | if __name__ == '__main__': 35 | parser = argparse.ArgumentParser(description='Get Lambda Parameter.') 36 | 37 | parser.add_argument('-r', '--region', required=True, help='Set AWS region name(e.g.: ap-northeast-1)') 38 | parser.add_argument('-a', '--accesskey', help='Set AWS Access Key ID') 39 | parser.add_argument('-s', '--secretkey', help='Set AWS Secret Access Key') 40 | parser.add_argument('-f', '--funcname',required=True, help='Set Function Name of AWS Lambda (e.g: arn:aws:lambda:ap-northeast-1:*******:function:Test') 41 | parser.add_argument('-i', '--invocationtype',default='RequestResponse',help='Set invocation type: RequestResponse(sync), Event(async) or DryRun(test)') 42 | parser.add_argument('-l', '--logtype',default='Tail',help='Set log data type. You can set this parameter only if you specify the InvocationType with value RequestResponse. Tail: Returns base64 encoded last 4KB of log. None: No returns log.') 43 | parser.add_argument('-p', '--payload',default={}, help= 'Set payload if you want to include intanceid, AWS Service and so on. The payload must be json. (e.g. {"instance_id":"xxxxx"}') 44 | parser.add_argument('-d', '--debuglevel',default="info",help='Debug Level: INFO, WARNING, ERROR') 45 | 46 | args = parser.parse_args() 47 | 48 | awslambda = AWSLambda(region=args.region,access_key=args.accesskey,secret_key=args.secretkey,debug=args.debuglevel) 49 | response = awslambda.invokeLambda(funcname=args.funcname,invocationtype = args.invocationtype,logtype=args.logtype,payload=args.payload) 50 | 51 | awslambda.dispResult(response) 52 | -------------------------------------------------------------------------------- /templates/2.2/AWS_Service_Health_Dashboard_template.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 2.0 4 | 2016-07-07T08:21:29Z 5 | 6 | 7 | TIS Templates 8 | 9 | 10 | 11 | 168 | 169 | 170 | -------------------------------------------------------------------------------- /templates/2.2/AmazonSNS_AWSLambda_Zabbix_template.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 2.0 4 | 2016-08-21T11:19:51Z 5 | 6 | 7 | SNS 8 | 9 | 10 | 11 | 66 | 121 | 176 | 231 | 232 | 233 | 234 | {Template AWS SNS EVENT AutoScaling:sns.event.regexp(INSTANCE_LAUNCH_ERROR)}=1 235 | AutoScaling_Instance_Launch_Error 236 | 237 | 0 238 | 3 239 | 240 | 1 241 | 242 | 243 | 244 | {Template AWS SNS EVENT AutoScaling:sns.event.regexp(INSTANCE_TERMINATE_ERROR)}=1 245 | AutoScaling_Instance_Terminate_Error 246 | 247 | 0 248 | 3 249 | 250 | 1 251 | 252 | 253 | 254 | {Template AWS SNS EVENT EC2:sns.event.regexp(ALARM)}=1 255 | EC2_Alarm 256 | 257 | 0 258 | 3 259 | 260 | 1 261 | 262 | 263 | 264 | {Template AWS SNS EVENT EC2:sns.event.regexp(INSUFFICIENT_DATA)}=1 265 | EC2_InsufficientData 266 | 267 | 0 268 | 2 269 | 270 | 1 271 | 272 | 273 | 274 | {Template AWS SNS EVENT Other:sns.event.regexp(ALARM)}=1 275 | Other_Service_Alarm 276 | 277 | 0 278 | 3 279 | 280 | 1 281 | 282 | 283 | 284 | {Template AWS SNS EVENT Other:sns.event.regexp(INSUFFICIENT_DATA)}=1 285 | Other_Service_InsufficientData 286 | 287 | 0 288 | 2 289 | 290 | 1 291 | 292 | 293 | 294 | {Template AWS SNS EVENT RDS:sns.event.regexp(ALARM)}=1 295 | RDS_Alarm 296 | 297 | 0 298 | 3 299 | 300 | 1 301 | 302 | 303 | 304 | {Template AWS SNS EVENT RDS:sns.event.regexp("^EventId.*RDS-EVENT-(0022|0031|0034|0035|0036|0045|0069)")}=1 305 | RDS_Failure_notification 306 | 307 | 0 308 | 4 309 | 310 | 1 311 | 312 | 313 | 314 | {Template AWS SNS EVENT RDS:sns.event.regexp(INSUFFICIENT_DATA)}=1 315 | RDS_InsufficientData 316 | 317 | 0 318 | 2 319 | 320 | 1 321 | 322 | 323 | 324 | 325 | 326 | -------------------------------------------------------------------------------- /templates/2.2/autoscaling_template.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 2.0 4 | 2016-07-20T02:41:29Z 5 | 6 | 7 | TIS Templates 8 | 9 | 10 | 11 | 99 | 100 | 101 | -------------------------------------------------------------------------------- /templates/2.2/awsbilling_template.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 2.0 4 | 2016-06-17T07:40:30Z 5 | 6 | 7 | TIS Templates 8 | 9 | 10 | 11 | 157 | 158 | 159 | -------------------------------------------------------------------------------- /templates/2.2/cloudwatch_template.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 2.0 4 | 2016-05-25T01:21:50Z 5 | 6 | 7 | TIS Templates 8 | 9 | 10 | 11 | 151 | 291 | 431 | 571 | 572 | 573 | 574 | -------------------------------------------------------------------------------- /templates/3.0/AWS_Service_Health_Dashboard_template.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 3.0 4 | 2016-07-07T08:19:08Z 5 | 6 | 7 | TIS Templates 8 | 9 | 10 | 11 | 181 | 182 | 183 | -------------------------------------------------------------------------------- /templates/3.0/AmazonSNS_AWSLambda_Zabbix_template.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 3.0 4 | 2016-08-21T11:06:17Z 5 | 6 | 7 | TIS Templates 8 | 9 | 10 | 11 | 80 | 149 | 205 | 261 | 262 | 263 | 264 | {Template AWS SNS EVENT AutoScaling:sns.event.regexp(INSTANCE_LAUNCH_ERROR)}=1 265 | AutoScaling_Instance_Launch_Error 266 | 267 | 0 268 | 3 269 | 270 | 1 271 | 272 | 273 | 274 | {Template AWS SNS EVENT AutoScaling:sns.event.regexp(INSTANCE_TERMINATE_ERROR)}=1 275 | AutoScaling_Instance_Terminate_Error 276 | 277 | 0 278 | 3 279 | 280 | 1 281 | 282 | 283 | 284 | {Template AWS SNS EVENT EC2:sns.event.regexp(ALARM)}=1 285 | EC2_Alarm 286 | 287 | 0 288 | 3 289 | 290 | 1 291 | 292 | 293 | 294 | {Template AWS SNS EVENT EC2:sns.event.regexp(INSUFFICIENT_DATA)}=1 295 | EC2_InsufficientData 296 | 297 | 0 298 | 2 299 | 300 | 1 301 | 302 | 303 | 304 | {Template AWS SNS EVENT Other:sns.event.regexp(ALARM)}=1 305 | Other_Service_Alarm 306 | 307 | 0 308 | 3 309 | 310 | 1 311 | 312 | 313 | 314 | {Template AWS SNS EVENT Other:sns.event.regexp(INSUFFICIENT_DATA)}=1 315 | Other_Service_InsufficientData 316 | 317 | 0 318 | 2 319 | 320 | 1 321 | 322 | 323 | 324 | {Template AWS SNS EVENT RDS:sns.event.regexp(ALARM)}=1 325 | RDS_Alarm 326 | 327 | 0 328 | 3 329 | 330 | 1 331 | 332 | 333 | 334 | {Template AWS SNS EVENT RDS:sns.event.regexp("^EventId.*RDS-EVENT-(0022|0031|0034|0035|0036|0045|0069)")}=1 335 | RDS_Failure_notification 336 | 337 | 0 338 | 4 339 | 340 | 1 341 | 342 | 343 | 344 | {Template AWS SNS EVENT RDS:sns.event.regexp(INSUFFICIENT_DATA)}=1 345 | RDS_InsufficientData 346 | 347 | 0 348 | 2 349 | 350 | 1 351 | 352 | 353 | 354 | 355 | 356 | -------------------------------------------------------------------------------- /templates/3.0/autoscaling_template.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 3.0 4 | 2016-07-19T23:56:21Z 5 | 6 | 7 | TIS Templates 8 | 9 | 10 | 11 | 100 | 101 | 102 | -------------------------------------------------------------------------------- /templates/3.0/awsbilling_template.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 3.0 4 | 2016-06-17T07:42:54Z 5 | 6 | 7 | TIS Templates 8 | 9 | 10 | 11 | 163 | 164 | 165 | -------------------------------------------------------------------------------- /templates/3.0/cloudwatch_template.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 3.0 4 | 2016-05-21T23:23:55Z 5 | 6 | 7 | TIS Templates 8 | 9 | 10 | 11 | 159 | 307 | 455 | 603 | 604 | 605 | --------------------------------------------------------------------------------