├── .gitignore ├── LICENSE ├── README.md ├── bro.py ├── bro └── intel_refresh.sh ├── common_methods.py ├── crits.py ├── nyx.conf.example ├── nyx.py ├── nyx_soltra.py ├── palo_alto.py ├── plugin_template.py ├── qradar.py ├── requirements.txt ├── soltra.py ├── web_proxy.py └── wise.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | lib/ 17 | lib64/ 18 | parts/ 19 | sdist/ 20 | var/ 21 | *.egg-info/ 22 | .installed.cfg 23 | *.egg 24 | 25 | # PyInstaller 26 | # Usually these files are written by a python script from a template 27 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 28 | *.manifest 29 | *.spec 30 | 31 | # Installer logs 32 | pip-log.txt 33 | pip-delete-this-directory.txt 34 | 35 | # Unit test / coverage reports 36 | htmlcov/ 37 | .tox/ 38 | .coverage 39 | .cache 40 | nosetests.xml 41 | coverage.xml 42 | 43 | # Translations 44 | *.mo 45 | *.pot 46 | 47 | # Django stuff: 48 | *.log 49 | 50 | # Sphinx documentation 51 | docs/_build/ 52 | 53 | # PyBuilder 54 | target/ 55 | 56 | # excluding configuration files: 57 | *.conf 58 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2014 Paul Poputa-Clean 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | nyx 2 | === 3 | 4 | Threat Intelligence artifact distribution 5 | 6 | The goal of this project is to facilitate distribution of Threat Intelligence artifacts to defensive systems and to enhance the value derrived from both open source and commercial tools. An example usage of this is described on: https://www.sans.org/reading-room/whitepapers/threats/automated-defense-threat-intelligence-augment-35692 7 | 8 | Needless to say, this is an experimental project - use at your own risk. Proper Documentation pending - as of right now it was damn hard to write, it should be damn hard to use ;). 9 | 10 | How to use this: 11 | 12 | 1. install dependencies from `requirements.txt` 13 | 14 | 1. Change the configuration file and put in your systems. You will need either CRITs or Soltra to get started. You will need to create a few things in order to get started: 15 | - Set up the maps of high versus medium criticality/confidence sets. 16 | - If you are using CRITs, make sure your intel is properly classified. 17 | - If you are using Soltra, you will need to set up the searches for high versus medium indicators. 18 | 19 | 1. You will also need to set up some of the objects in the various systems: 20 | - reference sets in QRadar 21 | - Palo Alto object groups 22 | - moloch wise configuration 23 | - bro intel framework 24 | - web filter custom categories 25 | 26 | 1. Run it in a test environment, make sure nothing breaks. Take the time to measure how long it takes to run - it might be useful when you set up the crontab job. Figure out how to deploy your intel files for the systems that don't have an API - store them on apache, use a share, or rsync them. 27 | 28 | 1. Run it in production (after you've read all code to know exactly what it does - don't trust me) 29 | 30 | 1. ***Beer*** - you achieved Threat Intelligence distribution automation, are a pioneer in your industry, and probably made a horde of interns less useful. 31 | 32 | The technologies currently coded for are: 33 | - IBM QRadar (https://github.com/ibm-security-intelligence/) 34 | - Palo Alto Networks (https://live.paloaltonetworks.com/twzvq79624/attachments/twzvq79624/documentation_tkb/246/1/XML-API-6.0.pdf) 35 | - CRITs (https://github.com/crits/crits) 36 | - BRO IDS (https://www.bro.org/sphinx/frameworks/intel.html) 37 | - Soltra Edge (https://soltra.com/) 38 | - Moloch (https://github.com/aol/moloch/wiki/WISE) 39 | -------------------------------------------------------------------------------- /bro.py: -------------------------------------------------------------------------------- 1 | from common_methods import * 2 | def alert_bro(indicator, settings): 3 | """ adds indicator to the bro intel framework based on the type of indicator 4 | #fields indicator indicator_type meta.source meta.url meta.do_notice meta.if_in 5 | Intel::ADDR 6 | Intel::URL 7 | Intel::SOFTWARE 8 | Intel::EMAIL 9 | Intel::DOMAIN 10 | Intel::USER_NAME 11 | Intel::FILE_HASH 12 | Intel::FILE_NAME 13 | Intel::CERT_HASH 14 | """ 15 | # for now, we're mapping really basic elements: 16 | indicator_map=settings['indicator_map'] 17 | 18 | if 'type' in indicator.keys() and indicator['type']=='Address - ipv4-addr': 19 | # adding an ip 20 | with open(settings['filename']+'_addr.txt','a+') as bro_file: 21 | bro_file.write('\t'.join([indicator['ip'],indicator_map[indicator['type']],get_sources(indicator),'T'])+'\n') 22 | return True 23 | elif 'type' in indicator.keys() and indicator['type']=='A': 24 | # adding the domain 25 | with open(settings['filename']+'_dom.txt','a+') as bro_file: 26 | bro_file.write('\t'.join([indicator['domain'],indicator_map[indicator['type']],get_sources(indicator),'T'])+'\n') 27 | return True 28 | elif 'md5' in indicator.keys(): 29 | # adding the md5 hash and the filename 30 | with open(settings['filename']+'_file.txt','a+') as bro_file: 31 | if indicator['md5']: 32 | bro_file.write('\t'.join([indicator['md5'],indicator_map['md5'],get_sources(indicator),'T'])+'\n') 33 | if indicator['filename']: 34 | bro_file.write('\t'.join([indicator['filename'],indicator_map['filename'],get_sources(indicator),'T'])+'\n') 35 | return True 36 | elif 'x_mailer' in indicator.keys(): 37 | # adding the email address - for now, assuming spearphish, therefore focusing on the <> field 38 | with open(settings['filename']+'_mail.txt','a+') as bro_file: 39 | bro_file.write('\t'.join([indicator['from'],indicator_map['email'],get_sources(indicator),'T'])+'\n') 40 | return True 41 | else: 42 | syslog.syslog('nyx->BRO: I do not know how to handle the following type of observable: %s' % indicator['type']) 43 | return False -------------------------------------------------------------------------------- /bro/intel_refresh.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # add this to the crontbab: 4 | # */5 * * * * /opt/bro/intel/intel_refresh.sh > /var/log/intel.log 5 | 6 | cd /opt/bro/intel/ 7 | rm CRITs.intel* 8 | wget -q http://192.168.11.38/CRITs.intel 9 | -------------------------------------------------------------------------------- /common_methods.py: -------------------------------------------------------------------------------- 1 | import ConfigParser 2 | import syslog 3 | import json 4 | 5 | def get_sources(indicator): 6 | """ appends the sources of an indicator in a string""" 7 | source_arr=[] 8 | if 'source' in indicator.keys(): 9 | for source in indicator['source']: 10 | if not source in source_arr: 11 | source_arr.append(source['name']) 12 | if source_arr: 13 | return source_arr 14 | else: 15 | return "CRITs" 16 | 17 | def get_intel_confidence(indicator): 18 | """ sets the confidence to the highest confidence source. 19 | I am starting the confidence level with the first campaign, then adding some points for each subsequent one. 20 | The idea is that the more distinct campaigns this indicator is a part of, the more certain we can be that 21 | it is not a false positive""" 22 | initial_score = {'low':30, 'medium':50, 'high':75} 23 | add_score={'low':5,'medium':10,'high':25} 24 | # setting the confidence to parrallel the highest-confidence source 25 | processed_campaigns=[indicator[u'campaign'][0]['name']] 26 | confidence=initial_score[indicator[u'campaign'][0]['confidence']] 27 | for campaign in indicator[u'campaign']: 28 | if not campaign['name'] in processed_campaigns: 29 | confidence+=add_score[campaign['confidence']] 30 | processed_campaigns.append(campaign['name']) 31 | if confidence in range(0,50): 32 | return 'low' 33 | elif confidence in range(50,75): 34 | return 'medium' 35 | elif confidence > 74: 36 | return 'high' 37 | else: 38 | syslog.syslog(syslog.LOG_ERR,'something got messed up in trying to gauge the confidence.') 39 | return 'low' 40 | 41 | def read_configs(config_file): 42 | """ read configurations from the config file. if the section exists, then create the config module 43 | As of right now, it is still in doubt whether to error out on bad configuration, or to just exclude module""" 44 | config = ConfigParser.SafeConfigParser() 45 | cfg_success = config.read(config_file) 46 | if not cfg_success: 47 | syslog.syslog(syslog.LOG_ERR,'Could not read nyx.conf') 48 | exit(-1) 49 | 50 | configs={} 51 | source=False 52 | 53 | if config.has_section('crits'): 54 | #getting the CRITs configurations 55 | configs['crits']={} 56 | if config.has_option('crits','username'): 57 | configs['crits']['username']=config.get('crits','username') 58 | else: 59 | syslog.syslog(syslog.LOG_ERR,'nyx: Please make sure that the [crits] section of the nyx.conf file has a username section') 60 | exit(-1) 61 | 62 | if config.has_option('crits','api_key'): 63 | configs['crits']['api_key']=config.get('crits', 'api_key') 64 | else: 65 | syslog.syslog(syslog.LOG_ERR,'nyx: Please make sure that the [crits] section of the nyx.conf file has a api_key section') 66 | exit(-1) 67 | 68 | if config.has_option('crits','url'): 69 | configs['crits']['url']=config.get('crits','url') 70 | else: 71 | syslog.syslog(syslog.LOG_ERR,'nyx: Please make sure that the [crits] section of the nyx.conf file has a url section') 72 | exit(-1) 73 | 74 | if config.has_option('crits','offset'): 75 | configs['crits']['offset']=int(config.get('crits','offset')) 76 | else: 77 | crits_config['offset']=20 78 | source='crits' 79 | 80 | if config.has_section('soltra'): 81 | #getting the CRITs configurations 82 | configs['soltra']={} 83 | if config.has_option('soltra','username'): 84 | configs['soltra']['username']=config.get('soltra','username') 85 | else: 86 | syslog.syslog(syslog.LOG_ERR,'nyx: Please make sure that the [soltra] section of the nyx.conf file has a username section') 87 | exit(-1) 88 | 89 | if config.has_option('soltra','password'): 90 | # i know, I know, it's not actual password encryption, but it should be better than cleartext 91 | configs['soltra']['password']=config.get('soltra', 'password').decode('base64').decode('rot13') 92 | else: 93 | syslog.syslog(syslog.LOG_ERR,'nyx: Please make sure that the [soltra] section of the nyx.conf file has a password section') 94 | exit(-1) 95 | 96 | if config.has_option('soltra','server'): 97 | configs['soltra']['server']=config.get('soltra','server') 98 | else: 99 | syslog.syslog(syslog.LOG_ERR,'nyx: Please make sure that the [soltra] section of the nyx.conf file has a server section') 100 | exit(-1) 101 | 102 | if config.has_option('soltra','subscriptions'): 103 | configs['soltra']['subscriptions']=json.loads(config.get('soltra','subscriptions')) 104 | else: 105 | syslog.syslog(syslog.LOG_ERR,'nyx: Unable to get the Soltra subscriptions from the configuation file.') 106 | exit(-1) 107 | 108 | if config.has_option('soltra','supported_objects'): 109 | configs['soltra']['supported_objects']=json.loads(config.get('soltra','supported_objects')) 110 | else: 111 | syslog.syslog(syslog.LOG_ERR,'nyx: Unable to get the Soltra supported_objects from the configuation file.') 112 | exit(-1) 113 | 114 | source='soltra' 115 | if not source: 116 | syslog.syslog(syslog.LOG_ERR,'nyx: minimum sections required for nyx to run: a threat intel source - please populate either the soltra or the CRITs sections') 117 | exit(-1) 118 | 119 | if config.has_section('qradar'): 120 | # getting the QRadar Settings 121 | configs['qradar']={'map':{'ip':{'medium':'nyx_default_IP_medium','high':'nyx_default_IP_high'},'sample':{'medium':'nyx_default_sample_medium','high':'nyx_default_sample_high'},'email':{'medium':'nyx_default_email_medium','high':'nyx_default_email_high'},'domain':{'medium':'nyx_default_domain_medium','high':'nyx_default_domain_high'}}} 122 | if config.has_option('qradar','console'): 123 | configs['qradar']['base_url']=config.get('qradar','console')+'api/' 124 | else: 125 | syslog.syslog(syslog.LOG_ERR,'nyx: Unable to get the QRadar console address from the configuation file.') 126 | exit(-1) 127 | 128 | if config.has_option('qradar','api_key'): 129 | configs['qradar']['SEC']=config.get('qradar','api_key') 130 | else: 131 | syslog.syslog(syslog.LOG_ERR,'nyx: Unable to get the QRadar api key from the configuration file') 132 | exit(-1) 133 | 134 | # starting to read and validate the sets: 135 | if config.has_option('qradar','sets_to_validate'): 136 | configs['qradar']['sets_to_validate']=json.loads(config.get('qradar','sets_to_validate')) 137 | else: 138 | syslog.syslog(syslog.LOG_ERR,'nyx: Unable to get the QRadar intel reference sets from the configuation file.') 139 | exit(-1) 140 | 141 | set_test=[] 142 | 143 | if config.has_option('qradar','high_reference_sets'): 144 | configs['qradar']['high_reference_sets']=json.loads(config.get('qradar','high_reference_sets')) 145 | set_test+=configs['qradar']['high_reference_sets'].values() 146 | configs['qradar']['map']['ip']['high']=configs['qradar']['high_reference_sets']["Address - ipv4-addr"] 147 | configs['qradar']['map']['domain']['high']=configs['qradar']['high_reference_sets']["A"] 148 | configs['qradar']['map']['sample']['high']=configs['qradar']['high_reference_sets']["md5"] 149 | configs['qradar']['map']['email']['high']=configs['qradar']['high_reference_sets']["email"] 150 | else: 151 | syslog.syslog(syslog.LOG_ERR,'nyx: Unable to get the QRadar intel reference sets from the configuation file.') 152 | exit(-1) 153 | 154 | if config.has_option('qradar','medium_reference_sets'): 155 | configs['qradar']['medium_reference_sets']=json.loads(config.get('qradar','medium_reference_sets')) 156 | set_test+=configs['qradar']['medium_reference_sets'].values() 157 | configs['qradar']['map']['ip']['medium']=configs['qradar']['medium_reference_sets']["Address - ipv4-addr"] 158 | configs['qradar']['map']['domain']['medium']=configs['qradar']['medium_reference_sets']["A"] 159 | configs['qradar']['map']['sample']['medium']=configs['qradar']['medium_reference_sets']["md5"] 160 | configs['qradar']['map']['email']['medium']=configs['qradar']['medium_reference_sets']["email"] 161 | else: 162 | syslog.syslog(syslog.LOG_ERR,'nyx: Unable to get the QRadar intel reference sets from the configuation file.') 163 | exit(-1) 164 | for qset in set_test: 165 | if not qset in configs['qradar']['sets_to_validate'].keys(): 166 | configs['qradar']['sets_to_validate'][qset]="ALNIC" 167 | syslog.syslog(syslog.LOG_ERR,"nyx: Unable to find metadata about the (%s) set in the configuation file. Defaulting to ALNIC" % qset) 168 | 169 | if config.has_section('bro'): 170 | #reading BRO settings 171 | configs['bro']={} 172 | if config.has_option('bro','filename'): 173 | configs['bro']['filename']=config.get('bro','filename') 174 | # truncating the filename to ensure no stale indicators 175 | for ftype in ['_file.txt','_addr.txt','_dom.txt','_mail.txt']: 176 | with open(configs['bro']['filename'],'w') as bro_file: 177 | bro_file.write('#fields\tindicator\tindicator_type\tmeta.source\tmeta.do_notice\n') 178 | else: 179 | syslog.syslog(syslog.LOG_ERR,'Unable to get the BRO intel file location from the configuation file.') 180 | exit(-1) 181 | if config.has_option('bro','indicator_map'): 182 | configs['bro']['indicator_map']=json.loads(config.get('bro','indicator_map')) 183 | set_test+=configs['bro']['indicator_map'].keys() 184 | else: 185 | syslog.syslog(syslog.LOG_ERR,'nyx: Unable to load the BRO indicator map from the configuation file.') 186 | exit(-1) 187 | 188 | if config.has_section('palo_alto'): 189 | # reading palo alto settings 190 | configs['palo_alto']={'map':{'ip':{'medium':'nyx_default_IP_medium','high':'nyx_default_IP_high'},'domain':{'medium':'nyx_default_domain_medium','high':'nyx_default_domain_high'}}} 191 | if config.has_option('palo_alto','api_key'): 192 | configs['palo_alto']['api_key']=config.get('palo_alto','api_key') 193 | else: 194 | syslog.syslog(syslog.LOG_ERR,'nyx: Unable to get the Palo Alto api key from the configuation file.') 195 | exit(-1) 196 | 197 | if config.has_option('palo_alto','url'): 198 | configs['palo_alto']['url']=config.get('palo_alto','url') 199 | else: 200 | syslog.syslog(syslog.LOG_ERR,'nyx: Unable to get the Palo Alto base URL from the configuation file.') 201 | exit(-1) 202 | # this needs fixing to be more map-like: 203 | if config.has_option('palo_alto','url_alert_list'): 204 | configs['palo_alto']['map']['domain']['medium']=config.get('palo_alto','url_alert_list') 205 | else: 206 | syslog.syslog(syslog.LOG_ERR,'nyx: Unable to get the Palo Alto BlockList from the configuation file.') 207 | exit(-1) 208 | 209 | if config.has_option('palo_alto','url_block_list'): 210 | configs['palo_alto']['map']['domain']['high']=config.get('palo_alto','url_block_list') 211 | else: 212 | syslog.syslog(syslog.LOG_ERR,'nyx: Unable to get the Palo Alto Alert List from the configuation file.') 213 | exit(-1) 214 | 215 | if config.has_option('palo_alto','ip_alert_list'): 216 | configs['palo_alto']['map']['ip']['medium']=config.get('palo_alto','ip_alert_list') 217 | else: 218 | syslog.syslog(syslog.LOG_ERR,'nyx: Unable to get the Palo Alto BlockList from the configuation file.') 219 | exit(-1) 220 | 221 | if config.has_option('palo_alto','ip_block_list'): 222 | configs['palo_alto']['map']['ip']['high']=config.get('palo_alto','ip_block_list') 223 | else: 224 | syslog.syslog(syslog.LOG_ERR,'nyx: Unable to get the Palo Alto Alert List from the configuation file.') 225 | exit(-1) 226 | 227 | 228 | if config.has_section('web_proxy'): 229 | # reading web content gateway settings 230 | configs['web_proxy']={} 231 | if config.has_option('web_proxy','filename'): 232 | configs['web_proxy']['filename']=config.get('web_proxy','filename') 233 | # truncating the filename to ensure no stale indicators 234 | with open(configs['web_proxy']['filename'],'w') as wp_file: 235 | wp_file.write('') 236 | else: 237 | syslog.syslog(syslog.LOG_ERR,'nyx: Unable to get the BRO intel file location from the configuation file.') 238 | exit(-1) 239 | 240 | if config.has_section('wise'): 241 | #reading wise settings 242 | configs['wise']={} 243 | if config.has_option('wise','filename'): 244 | configs['wise']['filename']=config.get('wise','filename') 245 | else: 246 | syslog.syslog(syslog.LOG_ERR,'Unable to get the wise intel file location from the configuation file.') 247 | exit(-1) 248 | if config.has_option('wise','indicator_map'): 249 | configs['wise']['indicator_map']=json.loads(config.get('wise','indicator_map')) 250 | set_test+=configs['wise']['indicator_map'].keys() 251 | else: 252 | syslog.syslog(syslog.LOG_ERR,'nyx: Unable to load the wise indicator map from the configuation file.') 253 | exit(-1) 254 | # truncating the filename to ensure no stale indicators 255 | for ftype in config['wise']['indicator_map'].values(): 256 | with open(configs['wise']['filename']+ftype,'w') as wise_file: 257 | wise_file.write() 258 | 259 | return configs 260 | 261 | def address_in_index(address,ip_index): 262 | """checks to see if an address is in the index of IPs. 263 | The index should be a key-value pair address|CIDR:address group|reference set""" 264 | if address+"/32" in ip_index.keys(): 265 | return ip_index[address+"/32"] 266 | elif address in ip_index.keys(): 267 | return ip_index[address] 268 | else: 269 | return False 270 | 271 | def url_in_index(url,url_index): 272 | """ checking url in url index 273 | The index should be a key-value pair url:address group|reference set""" 274 | 275 | if url in url_index.keys(): 276 | return url_index[url] 277 | elif '*'+url in url_index.keys(): 278 | return url_index['*'+url] 279 | else: 280 | return False -------------------------------------------------------------------------------- /crits.py: -------------------------------------------------------------------------------- 1 | from common_methods import * 2 | import requests 3 | 4 | def list_indicators(settings): 5 | """exports a list of the indocators in CRITs""" 6 | params={'username':settings['username'],'api_key':settings['api_key'],'limit':settings['offset'],'offset':0} 7 | url=settings['url']+'indicators/' 8 | total=settings['offset'] 9 | indicators=[] 10 | while params['offset'] <= total: 11 | #print total,params['offset'] 12 | r = requests.get(url, params=params, verify=False) 13 | if r.status_code == 200: 14 | res=r.json() 15 | for potential_result in res['objects']: 16 | # only getting indicators meaning something 17 | if potential_result['campaign'] and get_intel_confidence(potential_result) in ['medium','high']: 18 | indicators.append(potential_result) 19 | params['offset']+=settings['offset'] 20 | total=res['meta']['total_count'] 21 | return indicators 22 | 23 | def list_ips(settings, limit=0): 24 | """exports a list of the IPs in CRITs, basing the confidence on the campaign confidence""" 25 | ips=[] 26 | params={'username':settings['username'],'api_key':settings['api_key'],'limit':settings['offset'],'offset':0} 27 | url=settings['url']+'ips/' 28 | #total=settings['offset'] 29 | total=limit 30 | while params['offset'] <= total: 31 | #print total,params['offset'] 32 | r = requests.get(url, params=params, verify=False) 33 | if r.status_code == 200: 34 | res=r.json() 35 | for potential_result in res['objects']: 36 | # only getting indicators meaning something - don't care about low and unknowns 37 | if potential_result['campaign'] and get_intel_confidence(potential_result) in ['medium','high']: 38 | #print potential_result 39 | ips.append(potential_result) 40 | params['offset']+=settings['offset'] 41 | if not limit: 42 | total=res['meta']['total_count'] 43 | return ips 44 | 45 | def normalize_ip(indicator,indicators): 46 | """ normalizes the ip and adds it to the indicator if it doesn't already exist""" 47 | if indicator['type']=='Address - ipv4-addr': 48 | ip=indicator['ip'] 49 | tags=get_sources(indicator) 50 | if ip in indicators['incoming']['ip'].keys(): 51 | for tag in tags: 52 | if not tag in indicators['incoming']['ip'][ip]['tags']: 53 | indicators['incoming']['ip'][ip]['tags'].append(tag) 54 | else: 55 | indicators['incoming']['ip'][ip]={'confidence':get_intel_confidence(indicator),'tags':tags} 56 | return indicators 57 | 58 | 59 | def list_fqdns(settings,limit=0): 60 | """exports a list of the FQDNs in CRITs, basing the confidence on the campaign confidence""" 61 | fqdns=[] 62 | params={'username':settings['username'],'api_key':settings['api_key'],'limit':settings['offset'],'offset':0} 63 | url=settings['url']+'domains/' 64 | if limit: 65 | total=limit 66 | else: 67 | total=settings['offset']+1 68 | while params['offset'] <= total: 69 | r = requests.get(url, params=params, verify=False) 70 | if r.status_code == 200: 71 | res=r.json() 72 | for potential_result in res['objects']: 73 | if potential_result['campaign'] and get_intel_confidence(potential_result) in ['medium','high']: 74 | fqdns.append(potential_result) 75 | params['offset']+=settings['offset'] 76 | if not limit: 77 | total=res['meta']['total_count'] 78 | return fqdns 79 | 80 | def normalize_fqdn(indicator,indicators): 81 | """ normalizes the domain and adds it to the indicator if it doesn't already exist""" 82 | if indicator['type']=='A': 83 | fqdn=indicator['domain'] 84 | tags=get_sources(indicator) 85 | if fqdn in indicators['incoming']['domain'].keys(): 86 | for tag in tags: 87 | if not tag in indicators['incoming']['domain'][fqdn]['tags']: 88 | indicators['incoming']['domain'][fqdn]['tags'].append(tag) 89 | else: 90 | indicators['incoming']['domain'][fqdn]={'confidence':get_intel_confidence(indicator),'tags':tags} 91 | return indicators 92 | 93 | def list_samples(settings,limit=0): 94 | """exports a list of the samples in CRITs, basing the confidence on the campaign confidence""" 95 | samples=[] 96 | params={'username':settings['username'],'api_key':settings['api_key'],'limit':settings['offset'],'offset':0} 97 | url=settings['url']+'samples/' 98 | if limit: 99 | total=limit 100 | else: 101 | total=settings['offset'] 102 | while params['offset'] <= total: 103 | #print total,params['offset'] 104 | r = requests.get(url, params=params, verify=False) 105 | if r.status_code == 200: 106 | res=r.json() 107 | for potential_result in res['objects']: 108 | # only getting indicators meaning something - don't care about lows and unknowns 109 | if potential_result['campaign'] and get_intel_confidence(potential_result) in ['medium','high'] and (potential_result['md5'] or potential_results['fiename']): 110 | #print potential_result 111 | samples.append(potential_result) 112 | params['offset']+=settings['offset'] 113 | if not limit: 114 | total=res['meta']['total_count'] 115 | return samples 116 | 117 | def normalize_sample(indicator,indicators): 118 | """ normalizes the sample and adds it to the indicator if it doesn't already exist""" 119 | if 'md5' in indicator.keys(): 120 | md5=indicator['md5'] 121 | # I need to make sure to check for all the necessary properties here filename, sha, sha256z, et cetera 122 | tags=get_sources(indicator) 123 | if fqdn in indicators['incoming']['sample'].keys(): 124 | for tag in tags: 125 | if not tag in indicators['incoming']['domain'][fqdn]['tags']: 126 | indicators['incoming']['domain'][fqdn]['tags'].append(tag) 127 | else: 128 | indicators['incoming']['domain'][fqdn]={'confidence':get_intel_confidence(fqdn),'tags':tags} 129 | return indicators 130 | 131 | def list_targets(settings): 132 | """exports a list of the targets in CRITs""" 133 | targets=[] 134 | params={'username':settings['username'],'api_key':settings['api_key'],'limit':settings['offset'],'offset':0} 135 | url=settings['url']+'targets/' 136 | total=settings['offset'] 137 | while params['offset'] <= total: 138 | #print total,params['offset'] 139 | r = requests.get(url, params=params, verify=False) 140 | if r.status_code == 200: 141 | res=r.json() 142 | for potential_result in res['objects']: 143 | targets.append(potential_result) 144 | params['offset']+=settings['offset'] 145 | total=res['meta']['total_count'] 146 | return targets -------------------------------------------------------------------------------- /nyx.conf.example: -------------------------------------------------------------------------------- 1 | [crits] 2 | url = [CRITs API url] 3 | username = [CRITs usename] 4 | api_key = [CRITs API key] 5 | offset = [query page size] 6 | 7 | [qradar] 8 | api_key = [generated QRadar API key] 9 | console = [console address] 10 | high_reference_sets = {"A": "Intel.High.Domains", "Address - ipv4-addr": "Intel.High.IPs", "email": "Intel.High.Emails", "md5": "Intel.High.Hashes"} 11 | medium_reference_sets = {"A": "Intel.Medium.Domains", "Address - ipv4-addr": "Intel.Medium.IPs", "email": "Intel.Medium.Emails", "md5": "Intel.Medium.Hashes"} 12 | sets_to_validate = {"Intel.High.Domains": "ALNIC", "Intel.Medium.Domains": "ALNIC", "Intel.High.Hashes": "ALNIC", "Intel.High.IPs": "IP", "Intel.Medium.Emails": "ALNIC", "Intel.Medium.Hashes": "ALNIC", "Intel.High.Emails": "ALNIC", "Intel.Medium.IPs": "IP"} 13 | 14 | [palo_alto] 15 | api_key = [Palo Alto API key] 16 | url = [Palo Alto base URL] 17 | block_list = [Custom URL category created for blocking] 18 | alert_list = [custom URL category created for alerting] 19 | 20 | [bro] 21 | filename = [file_path] 22 | indicator_map = {"A": "Intel::DOMAIN", "Address - ipv4-addr": "Intel::ADDR", "filename": "Intel::FILE_NAME", "email": "Intel::EMAIL", "md5": "Intel::FILE_HASH"} 23 | 24 | [web_proxy] 25 | filename = [file_path] -------------------------------------------------------------------------------- /nyx.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | import ConfigParser 3 | import syslog 4 | import json 5 | import requests 6 | import time 7 | from common_methods import * 8 | import crits 9 | import qradar 10 | from bro import * 11 | from web_proxy import * 12 | import palo_alto 13 | 14 | 15 | 16 | 17 | if __name__ == "__main__": 18 | 19 | # reading the settings - upon successful read, the functionality will be dependent on the sections in the configuration file 20 | config=read_configs('nyx.conf') 21 | indicator_index={} 22 | indicators={'incoming':{'ip':{},'domain':{},'sample':{},'email':{},}, 23 | 'outgoing':{'to_add':{'ip':{},'domain':{},'sample':{},'email':{}}, 24 | 'to_remove':{'ip':{},'domain':{},'sample':{},'email':{}}, 25 | 'to_change':{'ip':{},'domain':{},'sample':{},'email':{}} 26 | } 27 | } 28 | tag_index=[] 29 | for tool in config.keys(): 30 | syslog.syslog(syslog.LOG_INFO,"nyx: retrieving indicators from %s" % tool) 31 | if tool == 'palo_alto': 32 | indicator_index[tool]={'ip':palo_alto.list_ips(config[tool]), 33 | 'domain':palo_alto.list_domains(config[tool])} 34 | elif tool == 'qradar': 35 | indicator_index[tool]={'ip':qradar.list_ips(config[tool]), 36 | 'domain':qradar.list_domains(config[tool])} 37 | elif tool == 'crits': 38 | # getting the intel from crits 39 | for crits_ip in crits.list_ips(config['crits']): 40 | indicators=crits.normalize_ip(crits_ip,indicators) 41 | # doing the same for domains 42 | for crits_domain in crits.list_fqdns(config['crits']): 43 | indicators=crits.normalize_fqdn(crits_domain,indicators) 44 | 45 | #for crits_sample in crits.list_samples(settings[tool],100): 46 | # indicators=crits.normalize_sample(crits_sample,indicators) 47 | else: 48 | # getting cached intel from the rest of the controls 49 | syslog.syslog(syslog.LOG_ERR,"nyx: don't quite know what to do about indicators from and to %s. You make your own or tweet @p4ulpc" % tool) 50 | 51 | 52 | for tool in indicator_index.keys(): 53 | for itype in indicator_index[tool].keys(): 54 | for tool_ind in indicator_index[tool][itype]: 55 | if not tool_ind in indicators['incoming'][itype].keys() and indicator_index[tool][itype][tool_ind] in config[tool]['map'][itype].values(): 56 | # only removing indicators in the reference sets we actually care aboot 57 | syslog.syslog(syslog.LOG_INFO,"nyx (to be implemented): %s (%s) is outdated, removing it" % (tool_ind,indicator_index[tool][itype][tool_ind])) 58 | if not tool_ind in indicators['outgoing']['to_remove'][itype].keys(): 59 | indicators['outgoing']['to_remove'][itype][tool_ind]=[] 60 | indicators['outgoing']['to_remove'][itype][tool_ind].append(tool) 61 | 62 | for itype in indicators['incoming'].keys(): 63 | for source_ind in indicators['incoming'][itype]: 64 | for tool in indicator_index.keys(): 65 | if itype in indicator_index[tool].keys(): 66 | if indicators['incoming'][itype][source_ind]['confidence'] in ['medium','high'] and not source_ind in indicator_index[tool][itype].keys(): 67 | if not source_ind in indicators['outgoing']['to_add'][itype].keys(): 68 | indicators['outgoing']['to_add'][itype][source_ind]={} 69 | if not tool in indicators['outgoing']['to_add'][itype][source_ind].keys(): 70 | syslog.syslog(syslog.LOG_INFO, "nyx: should be adding %s to %s" % (source_ind,tool)) 71 | indicators['outgoing']['to_add'][itype][source_ind][tool]={ 72 | 'list':config[tool]['map'][itype][indicators['incoming'][itype][source_ind]['confidence']], 73 | 'tags':indicators['incoming'][itype][source_ind]['tags']} 74 | for tag in indicators['incoming'][itype][source_ind]['tags']: 75 | if not tag in tag_index: 76 | tag_index.append(tag) 77 | else: 78 | syslog.syslog(syslog.LOG_ERR,"nyx: WATCH OUT! indicator type (%s) not supported yet, silly!" % itype) 79 | 80 | syslog.syslog(syslog.LOG_INFO,"nyx: starting prepwork") 81 | # prep work starts here. 82 | # dumping indicators locally (you know, just in case) 83 | json.dump(indicators,open('temp_indicators.json','w')) 84 | json.dump(indicator_index,open('temp_index.json','w')) 85 | # for palo alto, making sure we have all the tags in place 86 | if 'palo_alto' in config.keys(): 87 | syslog.syslog(syslog.LOG_INFO,"nyx: syncronizing palo alto tags") 88 | palo_tag_index=palo_alto.list_tags(config['palo_alto']) 89 | for tag in tag_index: 90 | palo_tag="dvn_intel_"+tag.replace(" ","_") 91 | if not palo_tag in palo_tag_index: 92 | palo_alto.add_tag(palo_tag,config['palo_alto']) 93 | # and while we're at it, let's make sure we have the tags for the addresses - just in case we're runnig it the first time: 94 | for tag in config['palo_alto']['map']['ip'].values(): 95 | if not tag in palo_tag_index: 96 | palo_alto.add_tag_to_panorama(tag,config['palo_alto']) 97 | syslog.syslog(syslog.LOG_INFO,"nyx: starting to add IPs") 98 | for add_ip in indicators['outgoing']['to_add']['ip']: 99 | for tool in indicators['outgoing']['to_add']['ip'][add_ip].keys(): 100 | if tool == 'palo_alto': 101 | result=palo_alto.add_ip(add_ip,config['palo_alto'],indicators['outgoing']['to_add']['ip'][add_ip]['palo_alto']['list'],indicators['outgoing']['to_add']['ip'][add_ip]['palo_alto']['tags']) 102 | elif tool == 'qradar': 103 | result=qradar.add_ip(add_ip,config['qradar'],indicators['outgoing']['to_add']['ip'][add_ip]['qradar']['list'],indicators['outgoing']['to_add']['ip'][add_ip]['qradar']['tags']) 104 | syslog.syslog(syslog.LOG_INFO,"nyx: starting to add domains") 105 | for add_domain in indicators['outgoing']['to_add']['domain']: 106 | for tool in indicators['outgoing']['to_add']['domain'][add_domain].keys(): 107 | if tool == 'palo_alto': 108 | result=palo_alto.add_domain(add_domain,config['palo_alto'],indicators['outgoing']['to_add']['domain'][add_domain]['palo_alto']['list']) 109 | elif tool == 'qradar': 110 | result=qradar.add_domain(add_domain,config['qradar'],indicators['outgoing']['to_add']['domain'][add_domain]['qradar']['list'],indicators['outgoing']['to_add']['domain'][add_domain]['qradar']['tags']) 111 | #result=qradar.add_to_reference_set(indicators['outgoing']['to_add']['domain'][add_domain]['qradar']['list'], add_domain, indicators['outgoing']['to_add']['domain'][add_domain]['qradar']['tags'], config['qradar']) 112 | 113 | # !!! NOTE !!! make sure to recategorize shit here from high to medium and the other way around 114 | 115 | syslog.syslog(syslog.LOG_INFO,"nyx: starting closeut tasks") 116 | palo_alto.pan_commit(config['palo_alto']) -------------------------------------------------------------------------------- /nyx_soltra.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import json 4 | import requests 5 | import time 6 | from common_methods import * 7 | from crits import * 8 | import soltra 9 | from qradar import * 10 | from bro import * 11 | from web_proxy import * 12 | from palo_alto import * 13 | from wise import * 14 | 15 | 16 | if __name__ == "__main__": 17 | 18 | # reading the settings - upon successful read, the functionality will be dependent on the sections in the configuration file 19 | settings=read_configs('nyx.conf') 20 | obs_index={'A':{'medium':[],'high':[]},'Address - ipv4-addr':{'medium':[],'high':[]},'md5':{'medium':[],'high':[]},'email':{'medium':[],'high':[]},'userid':{'medium':[],'high':[]}} 21 | 22 | intel={} # validating that the sets in the configuration file are in QRadar 23 | if 'qradar' in settings.keys(): 24 | validate_qradar(settings['qradar']) 25 | 26 | if 'soltra' in settings.keys(): 27 | intel['medium']=soltra.poll_feed(settings['soltra'],'medium') 28 | intel['high']=soltra.poll_feed(settings['soltra'],'high') 29 | 30 | for csi,ivalues in intel.iteritems(): 31 | for ip in ivalues['AddressObjectType']: 32 | # creating crits-like objects 33 | observable={"type":"Address - ipv4-addr","source":[{"name":"Soltra-"+csi}],'ip':ip['value']} 34 | obs_index['Address - ipv4-addr'][csi].append(ip['value']) 35 | if 'bro' in settings.keys(): 36 | alert_bro(observable,settings['bro']) 37 | if 'qradar' in settings.keys(): 38 | qradar(observable, settings['qradar'],csi+'_reference_sets') 39 | if 'palo_alto' in settings.keys() and csi == 'high': 40 | palo_alto(observable,settings['palo_alto'],'ip_block_list') 41 | if 'moloch' in settings.keys(): 42 | alert_wise(observable, settings['moloch'],csi) 43 | for domain in ivalues['DomainNameObjectType']: 44 | observable={"type":"A","source":[{"name":"Soltra-"+csi}],'domain':domain['value']} 45 | obs_index['A'][csi].append(domain['value']) 46 | if 'bro' in settings.keys(): 47 | alert_bro(observable,settings['bro']) 48 | if 'qradar' in settings.keys(): 49 | qradar(observable, settings['qradar'],csi+'_reference_sets') 50 | if 'palo_alto' in settings.keys() and csi == 'high': 51 | palo_alto(observable,settings['palo_alto'],'url_block_list') 52 | if 'moloch' in settings.keys(): 53 | alert_wise(observable, settings['moloch'],csi) 54 | for file_obj in ivalues['FileObjectType']: 55 | for file_prop in file_obj: 56 | if 'simple_hash_value' in file_prop.keys(): 57 | # congratulations, it's a Hash! 58 | try: 59 | observable={"type":file_prop['type'],"source":[{"name":"Soltra-"+csi}],str(file_prop['type']).lower():file_prop['simple_hash_value']['value'],'filename':False} 60 | obs_index['md5'][csi].append(file_prop['simple_hash_value']['value']) 61 | if 'bro' in settings.keys(): 62 | alert_bro(observable,settings['bro']) 63 | if 'qradar' in settings.keys(): 64 | qradar(observable, settings['qradar'],csi+'_reference_sets') 65 | if 'moloch' in settings.keys(): 66 | alert_wise(observable, settings['moloch'],csi) 67 | 68 | except: 69 | print {"type":file_prop['type'],"source":[{"name":"Soltra-"+csi}],str(file_prop['type']):file_prop['simple_hash_value']['value'],'filename':False} 70 | if 'qradar' in settings.keys(): 71 | qradar_sets_cleanup(obs_index,settings['qradar']) 72 | -------------------------------------------------------------------------------- /palo_alto.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import syslog 3 | import xmltodict 4 | import re 5 | from common_methods import get_sources 6 | 7 | def add(indicator, settings, plist): 8 | """ adds indicator in the medium-confidence watch list in Palo Alto """ 9 | if 'type' in indicator.keys() and indicator['type']=='Address - ipv4-addr': 10 | # adding an ip 11 | res=add_ip_to_panorama(indicator['ip'],get_sources(indicator),settings,settings[plist]) 12 | bad_indicator=indicator['ip'] 13 | elif 'type' in indicator.keys() and indicator['type']=='A': 14 | # adding the domain 15 | res=add_site_to_panorama(indicator['domain'],settings,settings[plist]) 16 | bad_indicator=indicator['domain'] 17 | else: 18 | syslog.syslog('nyx->PAN: I do not know how to handle the following type of observable: %s' % indicator['type']) 19 | 20 | if res and res.status_code == 200 and 'code="20"' in res.text: 21 | syslog.syslog(syslog.LOG_INFO,'nyx->PAN: successfully added %s to %s '% (bad_indicator, settings[plist])) 22 | return True 23 | else: 24 | syslog.syslog(syslog.LOG_ERR,'nyx->PAN: Palo Alto potential issues: %s' % res.text) 25 | return False 26 | 27 | def add_ip_to_pan(ip,source,settings,pan_list): 28 | """ adds an ip in the address group. Will have to create the object first then the address group""" 29 | # adding the adress object 30 | 31 | url=settings['url']+'?type=config&action=set&key='+settings['api_key']+"&xpath=/config/devices/entry/vsys/entry[@name='vsys1']/address/entry[@name='"+ip+"']&element="+ip+"/32" 32 | r_actor=requests.get(url,verify=False) 33 | if r_actor.status_code==200 and 'code="20"' in r_actor.text: 34 | # if we successfully added the addressobject, adding it to the apropriate address group 35 | url=settings['url']+'?type=config&action=set&key='+settings['api_key']+"&xpath=/config/devices/entry/vsys/entry[@name='vsys1']/address-group/entry[@name='"+pan_list+"']&element="+ip+"" 36 | r=requests.get(url,verify=False) 37 | return r 38 | else: 39 | return r_actor 40 | 41 | def add_ip(ip,settings,pan_list,tags): 42 | """ adds an ip in the address group. Will have to create the object first then the address group""" 43 | # adding the adress object 44 | tag_str=""+pan_list+"" 45 | for tag in tags: 46 | clean_tag='dvn_intel_'+tag.replace(" ","_") 47 | tag_str+=""+clean_tag+"" 48 | url=settings['url']+'?type=config&action=set&key='+settings['api_key']+"&xpath=/config/shared/address/entry[@name='"+ip+"']&element="+ip+"/32"+tag_str+"" 49 | r_actor=requests.get(url,verify=False) 50 | if r_actor and r_actor.status_code == 200 and 'code="20"' in r_actor.text: 51 | syslog.syslog(syslog.LOG_INFO,'nyx->PAN: successfully added %s to %s '% (ip, pan_list)) 52 | return True 53 | else: 54 | syslog.syslog(syslog.LOG_ERR,'nyx->PAN: problems adding %s to %s '% (ip, pan_list)) 55 | print r_actor.text 56 | return False 57 | 58 | def remove_ip_from_panorama(ip,settings): 59 | """ removes an IP address from panorama """ 60 | url=settings['url']+'?type=config&action=delete&key='+settings['api_key']+"&xpath=/config/shared/address/entry[@name='"+ip+"']" 61 | r_actor=requests.get(url,verify=False) 62 | if r_actor and r_actor.status_code == 200 and 'code="20"' in r_actor.text: 63 | syslog.syslog(syslog.LOG_INFO,'nyx->PAN: successfully removed %s'% (ip)) 64 | return True 65 | else: 66 | syslog.syslog(syslog.LOG_ERR,'nyx->PAN: problems removing %s'% (ip)) 67 | print r_actor.text 68 | return False 69 | 70 | def add_site_to_pan(site,settings,pan_list): 71 | """ adds a url to Palo Alto custom URL list""" 72 | url=settings['url']+'?type=config&action=set&key='+settings['api_key']+"&xpath=/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys1']/profiles/custom-url-category/entry[@name=%27"+pan_list+'%27]/list&element='+site+'' 73 | r=requests.get(url,verify=False) 74 | return r 75 | 76 | def add_domain(site,settings,pan_list): 77 | """ adds a url to Palo Alto custom URL list""" 78 | url=settings['url']+'?type=config&action=set&key='+settings['api_key']+"&xpath=/config/shared/profiles/custom-url-category/entry[@name=%27"+pan_list+'%27]/list&element='+site+'' 79 | r=requests.get(url,verify=False) 80 | # if this is a domain, adding the * subdomains 81 | if len(site.split('.'))<3: 82 | url=settings['url']+'?type=config&action=set&key='+settings['api_key']+"&xpath=/config/shared/profiles/custom-url-category/entry[@name=%27"+pan_list+'%27]/list&element='+"*."+site+'' 83 | r=requests.get(url,verify=False) 84 | if r and r.status_code == 200 and 'code="20"' in r.text: 85 | syslog.syslog(syslog.LOG_INFO,'nyx->PAN: successfully added %s to %s '% (site, pan_list)) 86 | return True 87 | else: 88 | syslog.syslog(syslog.LOG_ERR,'nyx->PAN: problems adding %s to %s '% (site, pan_list)) 89 | print r.text 90 | return False 91 | 92 | def remove_site_from_panorama(): 93 | """ removes a fqdn from panorama """ 94 | 95 | def check_url(site,settings): 96 | # key may vary depending on the system you're connecting to (see note above) 97 | url = settings['url']+'?type=op&key='+settings['api_key']+'&cmd='+site+'' 98 | r = requests.get(url, verify=False) 99 | return r.text 100 | 101 | 102 | def pan_commit(settings): 103 | """Once we are happy with the settings, we will commit them to PAN 104 | partial commits are not as effective as hoped 105 | """ 106 | url=settings['url']+'?type=commit&cmd=&key='+settings['api_key'] 107 | r=requests.get(url,verify=False) 108 | if r.status_code == 200 and 'code="19"'in r.text: 109 | syslog.syslog(syslog.LOG_INFO,'nyx->PAN: successfully sent commit command') 110 | 111 | 112 | def list_ips(settings): 113 | """ returns a list of all the addresses in the palo alto config - doing a diff in memory of this process to speed up the process""" 114 | palo_ip_index={} 115 | url=settings['url']+'/?type=config&action=get&key='+settings['api_key']+'&xpath=/config/shared/address' 116 | result = xmltodict.parse(requests.get(url, verify=False).text) 117 | if result['response']['@status']=='success': 118 | for address in result['response']['result']['address']['entry']: 119 | if 'tag' in address.keys(): 120 | if 'ip-netmask' in address.keys(): 121 | # i know i am making an assumption that we're only dealing with /32 netmasks here 122 | if isinstance(address['ip-netmask'],dict): 123 | clean_address == address['ip-netmask']['#text'].split('/')[0] 124 | elif isinstance(address['ip-netmask'],unicode): 125 | clean_address=address['ip-netmask'].split('/')[0] 126 | palo_ip_index[clean_address]=address['tag']['member'] 127 | elif 'ip-rage' in address.keys(): 128 | syslog.syslog(syslog.LOG_INFO,"nyx->PAN: range probably means it's an internal object [%s]" % address['@name']) 129 | elif 'fqdn' in address.keys(): 130 | syslog.syslog(syslog.LOG_INFO,"nyx->PAN: fqdn means probably an internal object [%s]"% address['@name']) 131 | else: 132 | syslog.syslog(syslog.LOG_INFO,"nyx->PAN: address object does not have any tags associated with it [%s]"% address['@name']) 133 | return palo_ip_index 134 | 135 | def list_domains(settings): 136 | """ returns a list of all the urls in the two inteligence-based custom url lists in the config file """ 137 | palo_url_index={} 138 | for url_cat in settings['map']['domain'].values(): 139 | url=settings['url']+"/?type=config&action=get&key="+settings['api_key']+"&xpath=/config/shared/profiles/custom-url-category/entry[@name='"+url_cat+"']/list" 140 | result = xmltodict.parse(requests.get(url, verify=False).text) 141 | if result['response']['@status']=='success': 142 | for member in result['response']['result']['list']['member']: 143 | if isinstance(member,dict): 144 | if member['#text'][0]=="*": 145 | palo_domain=member['#text'][2:] 146 | else: 147 | palo_domain=member['#text'] 148 | palo_url_index[palo_domain]=url_cat 149 | elif isinstance(member,unicode): 150 | if member[0]=="*": 151 | palo_domain=member[2:] 152 | else: 153 | palo_domain=member 154 | palo_url_index[palo_domain]=url_cat 155 | return palo_url_index 156 | 157 | def list_tags(settings): 158 | """ lists the tags in panorama""" 159 | palo_tag_index=[] 160 | url=settings['url']+"/?type=config&action=get&key="+settings['api_key']+"&xpath=/config/shared/tag" 161 | result = xmltodict.parse(requests.get(url, verify=False).text) 162 | if result['response']['@status']=='success': 163 | for tag in result['response']['result']['tag']['entry']: 164 | palo_tag_index.append(tag['@name']) 165 | return palo_tag_index 166 | 167 | def add_tag(tag,settings): 168 | """ adds an ip in the address group. Will have to create the object first then the address group""" 169 | # adding the adress object 170 | url=settings['url']+'?type=config&action=set&key='+settings['api_key']+"&xpath=/config/shared/tag/entry[@name='"+tag+"']&element=color1" 171 | r_actor=requests.get(url,verify=False) 172 | if r_actor and r_actor.status_code == 200 and 'code="20"' in r_actor.text: 173 | syslog.syslog(syslog.LOG_INFO,'nyx->PAN: successfully added tag: %s '% (tag)) 174 | return True 175 | else: 176 | syslog.syslog(syslog.LOG_ERR,'nyx->PAN: problems adding tag: %s '% (tag)) 177 | return False 178 | -------------------------------------------------------------------------------- /plugin_template.py: -------------------------------------------------------------------------------- 1 | import syslog 2 | 3 | def add_ip(ip,settings,intel_list,tags): 4 | """ adds an IP to the pre-established list. The tags might or might not be supported by the control""" 5 | 6 | # your code here 7 | 8 | if "[condition for confirming a successful addition": 9 | syslog.syslog(syslog.LOG_INFO,'nyx->[this_plugin]: successfully added %s to %s'% (ip,intel_list)) 10 | return True 11 | else: 12 | syslog.syslog(syslog.LOG_ERR,'nyx->[this_plugin]: problems adding %s to %s'% (ip,intel_list)) 13 | return False 14 | 15 | def add_domain(domain,settings,intel_list,tags): 16 | """ adds an domain to the pre-established list. The tags might or might not be supported by the control""" 17 | 18 | # your code here 19 | 20 | if "[condition for confirming a successful addition": 21 | syslog.syslog(syslog.LOG_INFO,'nyx->[this_plugin]: successfully added %s to %s'% (ip,intel_list)) 22 | return True 23 | else: 24 | syslog.syslog(syslog.LOG_ERR,'nyx->[this_plugin]: problems adding %s to %s'% (ip,intel_list)) 25 | return False 26 | 27 | def list_ips(settings): 28 | """ retrieves the IP addresses from the control's specific lists for comparison""" 29 | ip_index={} 30 | # your code here 31 | return ip_index 32 | 33 | def list_domains(settings): 34 | """ retrieves the domains from the control's lists for comparison. 35 | The index should be structured as a dictionary of {domain:intel_list}""" 36 | domain_index={} 37 | # your code here 38 | return domain_index 39 | 40 | def remove_ip(ip,settings): 41 | """ removes an IP from the control""" 42 | 43 | # your code here 44 | 45 | 46 | if "[conditions for successful removal]": 47 | syslog.syslog(syslog.LOG_INFO,'nyx->[this_plugin]:: successfully removed %s'% (ip)) 48 | return True 49 | else: 50 | syslog.syslog(syslog.LOG_ERR,'nyx->[this_plugin]: problems removing %s'% (ip)) 51 | return False 52 | 53 | def remove_domain(domain,settings): 54 | """ removes a domain from the control""" 55 | 56 | # your code here 57 | 58 | 59 | if "[conditions for successful removal]": 60 | syslog.syslog(syslog.LOG_INFO,'nyx->[this_plugin]:: successfully removed %s'% (ip)) 61 | return True 62 | else: 63 | syslog.syslog(syslog.LOG_ERR,'nyx->[this_plugin]: problems removing %s'% (ip)) 64 | return False -------------------------------------------------------------------------------- /qradar.py: -------------------------------------------------------------------------------- 1 | from common_methods import * 2 | import requests 3 | import json 4 | import syslog 5 | 6 | def add(indicator,settings, reference_sets): 7 | """ places the indicator in a reference set""" 8 | reference_set_map=settings[reference_sets] 9 | if 'type' in indicator.keys() and indicator['type']=='Address - ipv4-addr': 10 | # adding an ip 11 | add_to_reference_set(reference_set_map[indicator['type']], indicator['ip'], get_sources(indicator), settings) 12 | return True 13 | elif 'type' in indicator.keys() and indicator['type']=='A': 14 | # adding the domain 15 | add_to_reference_set(reference_set_map[indicator['type']], indicator['domain'], get_sources(indicator), settings) 16 | return True 17 | elif 'md5' in indicator.keys(): 18 | # adding the md5 hash 19 | if indicator['md5']: 20 | add_to_reference_set(reference_set_map['md5'], indicator['md5'], get_sources(indicator), settings) 21 | return True 22 | elif 'x_mailer' in indicator.keys(): 23 | # adding the email address - for now, assuming spearphish, therefore focusing on the <> field 24 | add_to_reference_set(reference_set_map['email'], indicator['from'], get_sources(indicator), settings) 25 | return True 26 | elif 'organization_id' in indicator.keys() and 'email_address' in indicator.keys(): 27 | # adding a target email 28 | if indicator['email_address']: 29 | add_to_reference_set(reference_set_map['email'], indicator['email_address'], get_sources(indicator), settings) 30 | # adding the userid 31 | if indicator['organization_id']: 32 | add_to_reference_set(reference_set_map['userid'], indicator['organization_id'], get_sources(indicator), settings) 33 | return True 34 | else: 35 | syslog.syslog('nyx->QRadar: I do not know how to handle the following observable: %s' % str(indicator)) 36 | return False 37 | 38 | def add_to_reference_set(qset, value, source, settings): 39 | """ Adding the indicator (value) to the qset Reference Set, while maintaining the source""" 40 | headers = {'Version': '4.0', 'Accept': 'application/json','SEC':settings['SEC']} 41 | parameters={'value':value, 'source':source} 42 | resp=requests.post(settings['base_url']+'reference_data/sets/'+qset,headers=headers,params=parameters,verify=False) 43 | # print parameters, resp.text 44 | if resp.status_code==200 or resp.status_code==201: 45 | syslog.syslog(syslog.LOG_INFO,'nyx->QRadar: successfully added to %s to reference set: %s' % (value,qset)) 46 | return True 47 | else: 48 | syslog.syslog(syslog.LOG_ERR,str(resp.status_code)+'nyx->QRadar: Unable to add %s to reference set: %s' % (value,qset)) 49 | return False 50 | def add_ip(ip,settings,intel_list,tags): 51 | """ adds an IP to the pre-established list. The tags might or might not be supported by the control""" 52 | return add_to_reference_set(intel_list, ip, tags, settings) 53 | 54 | 55 | def add_domain(domain,settings,intel_list,tags): 56 | """ adds an domain to the pre-established list. The tags might or might not be supported by the control""" 57 | return add_to_reference_set(intel_list, domain, tags, settings) 58 | 59 | def remove_from_reference_set(qset,value,settings): 60 | """ removes an indicator from the qset reference set """ 61 | headers = {'Version': '2.0', 'Accept': 'application/json','SEC':settings['SEC']} 62 | resp=requests.delete(settings['base_url']+'reference_data/sets/'+qset+'/'+value,headers=headers,verify=False) 63 | if resp.status_code==200 or resp.status_code==201: 64 | syslog.syslog(syslog.LOG_INFO,'nyx->QRadar: deleted %s from reference set: %s' % (value,qset)) 65 | return True 66 | else: 67 | syslog.syslog(syslog.LOG_ERR,str(resp.status_code)+'nyx->QRadar: Unable to delete %s from reference set: %s' % (value,qset)) 68 | return False 69 | 70 | def list_reference_set(qset,settings): 71 | """ retrieves the elements of a reference set """ 72 | headers = {'Version': '4.0', 'Accept': 'application/json','SEC':settings['SEC']} 73 | params={'limit':0} 74 | # getting basic metadata 75 | res=requests.get(settings['base_url']+'reference_data/sets/'+qset,headers=headers,params=params,verify=False) 76 | if res.status_code == 200: 77 | metadata=json.loads(res.text) 78 | # trying to get the whole thing: 79 | if metadata['number_of_elements'] > 0: 80 | params['limit']=metadata["number_of_elements"] 81 | resp=requests.get(settings['base_url']+'reference_data/sets/'+qset,headers=headers,params=params,verify=False) 82 | if resp.status_code == 200: 83 | result=json.loads(resp.text)['data'] 84 | return result 85 | else: 86 | syslog.syslog(syslog.LOG_ERR,str(resp.status_code)+'nyx->QRadar: Unable to read reference set: %s' % qset) 87 | return [] 88 | else: 89 | return [] 90 | else: 91 | syslog.syslog(syslog.LOG_ERR,str(resp.status_code)+'nyx->QRadar: Unable to read reference set: %s' % qset) 92 | return [] 93 | 94 | def list_ips(settings): 95 | """ getting a list of all the ips in the qradar intel-related reference sets""" 96 | qradar_index={} 97 | for ip_cat in settings['map']['ip'].values(): 98 | for q_ip in list_reference_set(ip_cat, settings): 99 | qradar_index[q_ip['value']]=ip_cat 100 | return qradar_index 101 | 102 | def list_domains(settings): 103 | """ getting a list of domains from the qradar intel-related reference sets""" 104 | qradar_index={} 105 | for domain_cat in settings['map']['domain'].values(): 106 | for q_domain in list_reference_set(domain_cat, settings): 107 | qradar_index[q_domain['value']]=domain_cat 108 | return qradar_index 109 | 110 | def qradar_sets_cleanup(obs_index, settings): 111 | """ removes the outdated indicators from various sets """ 112 | reference_sets={} 113 | # Checking IP addresses 114 | #high confidence first: 115 | qset=settings['high_reference_sets']['Address - ipv4-addr'] 116 | for ipaddr in list_reference_set(qset,settings): 117 | if not ipaddr['value'] in obs_index['Address - ipv4-addr']['high']: 118 | # this is an orphan value, needs to be removed 119 | remove_from_reference_set(qset,ipaddr['value'],settings) 120 | 121 | # checking medium confidence IPs 122 | qset=settings['medium_reference_sets']['Address - ipv4-addr'] 123 | for ipaddr in list_reference_set(qset,settings): 124 | if not ipaddr['value'] in obs_index['Address - ipv4-addr']['medium']: 125 | # this is an orphan value, needs to be removed 126 | remove_from_reference_set(qset,ipaddr['value'],settings) 127 | 128 | # checking Domains / URLS 129 | # high confidence domains 130 | qset=settings['high_reference_sets']['A'] 131 | for domain in list_reference_set(qset,settings): 132 | if not domain['value'] in obs_index['A']['high']: 133 | # this is an orphan value, needs to be removed 134 | remove_from_reference_set(qset,domain['value'],settings) 135 | 136 | # medium confidence domains 137 | qset=settings['medium_reference_sets']['A'] 138 | for domain in list_reference_set(qset,settings): 139 | if not domain['value'] in obs_index['A']['medium']: 140 | # this is an orphan value, needs to be removed 141 | remove_from_reference_set(qset,domain['value'],settings) 142 | 143 | # Checking Hashes 144 | # high confidence hashes 145 | qset=settings['high_reference_sets']['md5'] 146 | for hash in list_reference_set(qset,settings): 147 | if not hash['value'] in obs_index['md5']['high']: 148 | # this is an orphan value, needs to be removed 149 | remove_from_reference_set(qset,hash['value'],settings) 150 | 151 | # medium confidence hashes 152 | qset=settings['medium_reference_sets']['md5'] 153 | for hash in list_reference_set(qset,settings): 154 | if not hash['value'] in obs_index['md5']['medium']: 155 | # this is an orphan value, needs to be removed 156 | remove_from_reference_set(qset,hash['value'],settings) 157 | 158 | # checking emails 159 | # high confidence emails 160 | qset=settings['high_reference_sets']['email'] 161 | for email in list_reference_set(qset,settings): 162 | if not email['value'] in obs_index['email']['high']: 163 | # this is an orphan value, needs to be removed 164 | remove_from_reference_set(qset,email['value'],settings) 165 | 166 | # medium confidence emails 167 | qset=settings['medium_reference_sets']['email'] 168 | for email in list_reference_set(qset,settings): 169 | if not email['value'] in obs_index['email']['medium']: 170 | # this is an orphan value, needs to be removed 171 | remove_from_reference_set(qset,email['value'],settings) 172 | 173 | # checking userid 174 | # high confidence user_ids 175 | qset=settings['high_reference_sets']['userid'] 176 | for userid in list_reference_set(qset,settings): 177 | if not userid['value'] in obs_index['userid']['high']: 178 | # this is an orphan value, needs to be removed 179 | remove_from_reference_set(qset,userid['value'],settings) 180 | 181 | def validate_qradar(settings): 182 | """Adding indicators to QRadar reference sets. Keep in mind the categorization matrix, 183 | and use the indicators in the apropriate buckets. For this example here, we are using the 184 | following indicator buckets: 185 | -> Intel.High.Hashes, Intel.Medium.Hashes for MD5s 186 | -> Intel.High.IPs, Intel.Medium.IPs for IP addresses (ipv4) 187 | -> Intel.High.Domains, Intel.Medium.Domains for FQDNs""" 188 | headers = {'Version': '2.0', 'Accept': 'application/json','SEC':settings['SEC']} 189 | 190 | resp=requests.get(settings['base_url']+'reference_data/sets',headers=headers, verify=False) 191 | qradar_sets=json.loads(resp.text) 192 | for vset in settings['sets_to_validate'].keys(): 193 | validated=False 194 | for qset in qradar_sets: 195 | if qset['name']==vset: 196 | validated=True 197 | if not validated: 198 | # creating reference sets not already in QRadar 199 | parameters={'name':vset,'element_type':settings['sets_to_validate'][vset]} 200 | resp=requests.post(settings['base_url']+'reference_data/sets',headers=headers,params=parameters,verify=False) 201 | if resp.status_code==201: 202 | syslog.syslog(syslog.LOG_INFO,'nyx->QRadar: Created reference set: %s' % vset) 203 | return True 204 | else: 205 | syslog.syslog(syslog.LOG_ERR,'nyx->QRadar: Unable to create additional reference set: %s' % vset) 206 | exit(-1) -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | Nyx 2 | libtaxii>=1.1.107 3 | stix>=1.2.0.1.dev1 4 | requests>=2.4.3 -------------------------------------------------------------------------------- /soltra.py: -------------------------------------------------------------------------------- 1 | from common_methods import * 2 | import libtaxii as t 3 | import libtaxii.clients as tc 4 | import libtaxii.messages_11 as tm11 5 | from libtaxii.constants import * 6 | from stix.core import STIXPackage 7 | import requests 8 | import json 9 | import uuid 10 | 11 | 12 | def poll_feed(settings,subscription): 13 | """ polls a TAXII feed""" 14 | client = tc.HttpClient() 15 | client.set_auth_type(tc.HttpClient.AUTH_BASIC) 16 | client.set_use_https(True) 17 | client.set_auth_credentials({'username': settings['username'], 'password': settings['password']}) 18 | 19 | msg_id=uuid.uuid4().hex 20 | poll_request1 = tm11.PollRequest(message_id=msg_id,collection_name=settings['subscriptions'][subscription]['collection_name'],subscription_id=settings['subscriptions'][subscription]['subscription_id']) 21 | poll_xml=poll_request1.to_xml() 22 | http_resp = client.call_taxii_service2(settings['server'], '/taxii-data/', VID_TAXII_XML_11, poll_xml) 23 | taxii_message = t.get_message_from_http_response(http_resp, poll_request1.message_id) 24 | observables={} 25 | 26 | indicators = json.loads(taxii_message.to_json()) 27 | if 'content_blocks' in indicators.keys(): 28 | for indicator in indicators['content_blocks']: 29 | open('/tmp/indicator.xml','w').write(indicator['content']) 30 | indi=STIXPackage.from_xml('/tmp/indicator.xml').to_dict() 31 | if 'observables' in indi.keys(): 32 | for obs in indi['observables']['observables']: 33 | if 'object' in obs.keys(): 34 | ot=obs['object']['properties']['xsi:type'] 35 | if ot in settings['supported_objects'].keys() and not ot in observables.keys(): 36 | observables[ot]=[] 37 | if ot in settings['supported_objects'].keys() and settings['supported_objects'][ot] in obs['object']['properties'].keys(): 38 | # note, you will only be able to process one property per object type, but you also know there's only one property you can process 39 | try: 40 | observables[ot].append(obs['object']['properties'][settings['supported_objects'][ot]]) 41 | except: 42 | print "[-] you're dumb" 43 | print supported_objects[ot], "not in:", obs['object'] 44 | return observables 45 | 46 | 47 | def list_indicators(settings): 48 | """exports a list of the indocators in CRITs""" 49 | params={'username':settings['username'],'api_key':settings['api_key'],'limit':settings['offset'],'offset':0} 50 | url=settings['url']+'indicators/' 51 | total=settings['offset'] 52 | indicators=[] 53 | while params['offset'] <= total: 54 | #print total,params['offset'] 55 | r = requests.get(url, params=params, verify=False) 56 | if r.status_code == 200: 57 | res=r.json() 58 | for potential_result in res['objects']: 59 | # only getting indicators meaning something 60 | if potential_result['campaign'] and get_intel_confidence(potential_result) in ['medium','high']: 61 | indicators.append(potential_result) 62 | params['offset']+=settings['offset'] 63 | total=res['meta']['total_count'] 64 | return indicators 65 | 66 | def list_ips(settings, limit=0): 67 | """exports a list of the IPs in CRITs, basing the confidence on the campaign confidence""" 68 | ips=[] 69 | params={'username':settings['username'],'api_key':settings['api_key'],'limit':settings['offset'],'offset':0} 70 | url=settings['url']+'ips/' 71 | #total=settings['offset'] 72 | total=limit 73 | while params['offset'] <= total: 74 | #print total,params['offset'] 75 | r = requests.get(url, params=params, verify=False) 76 | if r.status_code == 200: 77 | res=r.json() 78 | for potential_result in res['objects']: 79 | # only getting indicators meaning something - don't care about low and unknowns 80 | if potential_result['campaign'] and get_intel_confidence(potential_result) in ['medium','high']: 81 | #print potential_result 82 | ips.append(potential_result) 83 | params['offset']+=settings['offset'] 84 | if not limit: 85 | total=res['meta']['total_count'] 86 | return ips 87 | 88 | def list_fqdns(settings,limit=0): 89 | """exports a list of the FQDNs in CRITs, basing the confidence on the campaign confidence""" 90 | fqdns=[] 91 | params={'username':settings['username'],'api_key':settings['api_key'],'limit':settings['offset'],'offset':0} 92 | url=settings['url']+'domains/' 93 | if limit: 94 | total=limit 95 | else: 96 | total=settings['offset'] 97 | while params['offset'] <= total: 98 | #print total,params['offset'] 99 | r = requests.get(url, params=params, verify=False) 100 | if r.status_code == 200: 101 | res=r.json() 102 | for potential_result in res['objects']: 103 | # only getting indicators meaning something - don't care about lows and unknowns 104 | if potential_result['campaign'] and get_intel_confidence(potential_result) in ['medium','high']: 105 | #print potential_result 106 | fqdns.append(potential_result) 107 | params['offset']+=settings['offset'] 108 | if not limit: 109 | total=res['meta']['total_count'] 110 | return fqdns 111 | 112 | def list_samples(settings,limit=0): 113 | """exports a list of the samples in CRITs, basing the confidence on the campaign confidence""" 114 | samples=[] 115 | params={'username':settings['username'],'api_key':settings['api_key'],'limit':settings['offset'],'offset':0} 116 | url=settings['url']+'samples/' 117 | if limit: 118 | total=limit 119 | else: 120 | total=settings['offset'] 121 | while params['offset'] <= total: 122 | #print total,params['offset'] 123 | r = requests.get(url, params=params, verify=False) 124 | if r.status_code == 200: 125 | res=r.json() 126 | for potential_result in res['objects']: 127 | # only getting indicators meaning something - don't care about lows and unknowns 128 | if potential_result['campaign'] and get_intel_confidence(potential_result) in ['medium','high'] and (potential_result['md5'] or potential_results['fiename']): 129 | #print potential_result 130 | samples.append(potential_result) 131 | params['offset']+=settings['offset'] 132 | if not limit: 133 | total=res['meta']['total_count'] 134 | return samples 135 | 136 | def list_targets(settings): 137 | """exports a list of the targets in CRITs""" 138 | targets=[] 139 | params={'username':settings['username'],'api_key':settings['api_key'],'limit':settings['offset'],'offset':0} 140 | url=settings['url']+'targets/' 141 | total=settings['offset'] 142 | while params['offset'] <= total: 143 | #print total,params['offset'] 144 | r = requests.get(url, params=params, verify=False) 145 | if r.status_code == 200: 146 | res=r.json() 147 | for potential_result in res['objects']: 148 | targets.append(potential_result) 149 | params['offset']+=settings['offset'] 150 | total=res['meta']['total_count'] 151 | return targets -------------------------------------------------------------------------------- /web_proxy.py: -------------------------------------------------------------------------------- 1 | def add_to_proxy(indicator, settings): 2 | """adds the domain to the blacklist for the proxy (only able to add the domains""" 3 | with open(settings['filename'],'a+') as web_proxy_file: 4 | # making sure that the indicator is acceptable 5 | if 'type' in indicator.keys() and indicator['type']=='A': 6 | web_proxy_file.write(indicator['domain']+'\n') 7 | return True -------------------------------------------------------------------------------- /wise.py: -------------------------------------------------------------------------------- 1 | from common_methods import * 2 | def alert_wise(indicator, settings,csi): 3 | """ adds indicator to wise service file 4 | # To load local files, set a unique section title starting with file: 5 | # Type should be ip, domain, md5, email 6 | # Files are assumed to be CSV or use format=tagger 7 | [file:blah] 8 | file=/tmp/test.ips 9 | type=ip 10 | tags=TAG1,TAG2 11 | #column=1 12 | """ 13 | # for now, we're mapping really basic elements: 14 | 15 | indicator_map=settings['indicator_map'] 16 | 17 | if 'type' in indicator.keys() and indicator['type']=='Address - ipv4-addr': 18 | # adding an ip 19 | with open(settings['filename']+indicator_map[indicator['type']]+csi+'.txt','a+') as wise_file: 20 | wise_file.write(indicator['ip']+'\n') 21 | return True 22 | elif 'type' in indicator.keys() and indicator['type']=='A': 23 | # adding the domain 24 | with open(settings['filename']+indicator_map[indicator['type']]+csi+'.txt','a+') as wise_file: 25 | wise_file.write(indicator['domain']+'\n') 26 | return True 27 | elif 'md5' in indicator.keys(): 28 | # adding the md5 hash and the filename 29 | with open(settings['filename']+indicator_map[indicator['type']]+csi+'.txt','a+') as wise_file: 30 | if indicator['md5']: 31 | wise_file.write(indicator['md5']+'\n') 32 | return True 33 | elif 'x_mailer' in indicator.keys(): 34 | # adding the email address - for now, assuming spearphish, therefore focusing on the <> field 35 | with open(settings['filename']+indicator_map[indicator['type']]+csi+'.txt','a+') as wise_file: 36 | wise_file.write(indicator['from']+'\n') 37 | return True 38 | else: 39 | syslog.syslog('nyx->wise: I do not know how to handle the following type of observable: %s' % indicator['type']) 40 | return False --------------------------------------------------------------------------------