├── .gitignore ├── .npmignore ├── .travis.yml ├── LICENSE ├── Makefile ├── README.md ├── bin └── streamstash ├── examples ├── Dockerfile ├── advanced.js ├── basic.js ├── custom_parser.js ├── project │ ├── config.js │ └── package.json ├── relp_basic.js ├── relp_container.js ├── tcp.js └── telemetry.js ├── index.js ├── lib ├── EventContainer.js ├── Logger.js ├── StreamStash.js ├── Telemetry.js ├── index.js ├── inputs │ ├── BaseInput.js │ ├── HTTPInput.js │ ├── ProxyProtocol.js │ ├── RELPInput.js │ ├── SMTPInput.js │ ├── SocketInput.js │ ├── StaticFileInput.js │ ├── StdInInput.js │ └── index.js ├── outputs │ ├── BlackholeOutput.js │ ├── ElasticSearchOutput.js │ ├── StdOutOutput.js │ └── index.js └── parsers │ ├── goAuditParser.js │ ├── goAuditParserConstants.js │ ├── httpCombinedAccessParser.js │ ├── httpVHostCombinedAccessParser.js │ ├── index.js │ ├── jsonParser.js │ ├── relpSyslogParser.js │ ├── sshdParser.js │ └── sudoParser.js ├── package.json ├── sendmail.js └── test ├── EventContainer.test.js ├── Logger.test.js ├── StreamStash.test.js ├── inputs └── StdIn.test.js ├── outputs └── StdOut.test.js └── parsers ├── goAuditParser.test.js ├── httpCombinedAccessParser.test.js ├── httpVHostCombinedAccessParser.test.js ├── index.test.js ├── jsonParser.test.js ├── relpSyslogParser.test.js ├── sshdParser.test.js ├── sudoParser.test.js └── util.js /.gitignore: -------------------------------------------------------------------------------- 1 | *.dot 2 | coverage 3 | node_modules 4 | .idea 5 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | examples 2 | test 3 | .travis.yml 4 | Makefile 5 | .gitignore 6 | docs 7 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | node_js: 3 | - 8 4 | - node 5 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | (The MIT License) 2 | 3 | Copyright (c) 2013 Betable, inc 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining 6 | a copy of this software and associated documentation files (the 7 | 'Software'), to deal in the Software without restriction, including 8 | without limitation the rights to use, copy, modify, merge, publish, 9 | distribute, sublicense, and/or sell copies of the Software, and to 10 | permit persons to whom the Software is furnished to do so, subject to 11 | the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be 14 | included in all copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, 17 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 18 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 19 | IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 20 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, 21 | TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 22 | SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 23 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | all: test 2 | 3 | test: 4 | NODE_ENV=test node_modules/ppunit/bin/ppunit -T none -R list 5 | 6 | test-cov: 7 | NODE_ENV=test node node_modules/istanbul/lib/cli.js --print=detail cover \ 8 | node_modules/ppunit/bin/ppunit -- -T none -R list 9 | 10 | test-cov-html: 11 | NODE_ENV=test node node_modules/istanbul/lib/cli.js --print=summary cover \ 12 | node_modules/ppunit/bin/ppunit -- -T none -R list 13 | 14 | @echo "" 15 | @echo "****************************************************************************************" 16 | @echo "Results: file://$$PWD/coverage/lcov-report/index.html" 17 | @echo "****************************************************************************************" 18 | 19 | docs: 20 | rm -rf ./docs 21 | node_modules/jsdoc/jsdoc.js -r -d docs ./lib 22 | 23 | .PHONY: all test test-cov test-cov-html docs 24 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Build Status](https://travis-ci.org/nbrownus/streamstash.png?branch=master)](https://travis-ci.org/nbrownus/streamstash) 2 | 3 | ### StreamStash 4 | 5 | `streamstash` is a log aggregating, filtering, redirecting service. A lightweight [Node.js](http://nodejs.org/) 6 | alternative to projects like [logstash](http://logstash.net/), [flume](http://flume.apache.org/), 7 | [fluentd](http://fluentd.org/), etc. 8 | 9 | 10 | ### Usage 11 | 12 | I typically setup a separate repo with my `config.js` and `package.json` that lists `streamstash` as a 13 | dependency. Deploy that repo to my servers and run `npm install`. The last step is to run `streamstash` 14 | 15 | /node_modules/streamstash/bin/streamstash /config.js 16 | 17 | An example of this can be found [here](examples/project) 18 | 19 | ### Inputs 20 | 21 | Inputs are things that slurp event data from different places and provides them to `streamstash` for filtering 22 | (by filters) and outputting (by outputs). 23 | 24 | Inputs packaged with `streamstash`: 25 | 26 | - RELP: Provides an easy and reliable integration with rsyslog. Uses [rsyslogs](http://www.rsyslog.com/) Reliable Event 27 | Logging Protocol. For more info see the [relp webpage](http://www.rsyslog.com/doc/relp.html) 28 | - StdIn: Takes data received from standard input and creates events for them 29 | - Socket: A very customizable connection oriented socket input 30 | - StaticFile: Simple static file ingestion, main use case is to run streamstash per file you need to ingest 31 | - SMTP: Turns email received via SMTP into events 32 | - HTTP: Turns http requests into events. 33 | 34 | Example usage can be found in the [examples folder](examples) 35 | 36 | ### Filters 37 | 38 | Filters are javascript functions that allow you to modify event data or control the flow of an event through the system. 39 | 40 | The main reason this project exists was to provide users a "real" scripting language to use when working with event 41 | data. If you have ever tried using logstash you may have gotten irritated with trying to do anything more than basic 42 | data manipulation, this is mainly because you were working in almost ruby but not quite. 43 | 44 | Every event will contain the following properties in the data object: 45 | 46 | - `source`: The input plugin that generated the event. 47 | - `message`: The event message. 48 | - `timestamp`: The time the event occurred or was received by the input plugin. 49 | 50 | A simple filter example: 51 | 52 | addFilter(function (event) { 53 | // Add a gotHere property to the event data 54 | event.data.gotHere = 'Yay!' 55 | 56 | // Allow the event to progress to the next filter or on to output plugins 57 | event.next() 58 | }) 59 | 60 | A little more advanced, this one is named: 61 | 62 | addFilter('cool', function (event) { 63 | // Drop all events with a 'stupid event' message, these events will never see an output plugin 64 | if (event.data.message == 'stupid event') { 65 | // Be sure to return anytime you may continue processing the event to avoid weird issues 66 | return event.cancel() 67 | } 68 | 69 | // Have any events with a 'high priority' message skip any other filters and go directly to output plugins 70 | if (event.data.message == 'high priority') { 71 | return event.complete() 72 | } 73 | 74 | // All other events get here 75 | event.data.superAwesome = 'sure is' 76 | 77 | // Want to rename a field to have a crazy character? 78 | event.data['@message'] = event.data.message 79 | delete event.data.message 80 | 81 | // Since this is the last thing in the filter there is no need to return 82 | event.next() 83 | }) 84 | 85 | Filters get an integer name by default. If you want better error and telemetry reporting, give them a name. 86 | 87 | Remember, this is all pure Node.js. You can do any crazy exotic thing you want. Just remember that the more you do the 88 | slower each event is processed. 89 | 90 | ### Outputs 91 | 92 | Outputs are exactly what they sound like. The output an event to a place. 93 | 94 | Outputs packaged with `streamstash`: 95 | 96 | - `ElasticSearch`: Outputs event data to your [ElasticSearch](http://www.elasticsearch.org/overview) cluster. 97 | Works great with [kibana](http://www.elasticsearch.org/overview/kibana/) 98 | - `StdOut`: Writes event data to standard output 99 | 100 | Example usage can be found in the [examples folder](examples) 101 | 102 | ### Telemetry 103 | 104 | If enabled, `streamstash` will output interesting stats to [statsite](https://github.com/armon/statsite), 105 | [statsd](https://github.com/etsy/statsd), or any other service that conforms to the `statsd` line protocol. 106 | 107 | General stats 108 | 109 | - `events.processing` A gauge of how many events are currently being processed 110 | - `events.total` A gauge of how many events have been processed since the start of the current process 111 | - `filter.` A timer of how long each event took in each filter. Typically a histogram is created from the 112 | data so you can see p99, p95, mean, max, etc of the time spent in each filter. 113 | 114 | Some plugins may also emit stats. 115 | 116 | `RELP` 117 | 118 | - `inputs..connection` A gauge of the number of current connections being handled. 119 | 120 | Example usage can be found in the [examples folder](examples) 121 | 122 | ### Docker 123 | 124 | Make sure to replace `relp_basic.js` with your own `config.js` in the `Dockerfile` CMD section. 125 | Also, move Dockerfile from the examples folder into home directory or fix file structure in your commands. 126 | 127 | Building and running: 128 | 129 | ``` 130 | docker build -t streamstash . 131 | docker run -p 9200:9200 -p 9300:9300 -p 5514:5514 streamstash 132 | ``` 133 | 134 | ### TODO 135 | 136 | - Need to think about outputs for special events (send interesting thing to slack, email, etc) 137 | - Add some helpers for things like renaming fields in filters? 138 | -------------------------------------------------------------------------------- /bin/streamstash: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | var StreamStash = require('../'), 4 | util = require('util'), 5 | path = require('path'), 6 | fs = require('fs'), 7 | dashdash = require('dashdash'), 8 | StatsDClient = require('statsd-client'), 9 | plugins = { 10 | inputs: {}, 11 | outputs: {} 12 | } 13 | 14 | //TODO: We had -v[vv] 15 | 16 | //process.stdin.resume() 17 | 18 | var optParser = new dashdash.Parser({ 19 | options: [ 20 | { 21 | names: ['help', 'h'], 22 | type: 'bool', 23 | help: 'Print this help and exit' 24 | }, 25 | { 26 | names: ['kill-timeout', 'k'], 27 | type: 'number', 28 | default: 30, 29 | help: 'The amount of time to wait for a shutdown before forcing an exit, default 30 seconds' 30 | }, 31 | { 32 | names: ['log-level', 'l'], 33 | type: 'string', 34 | help: 'Set the minimum log level to output; debug, info, error, or off. Defaults to info.', 35 | helpArg: 'LEVEL', 36 | default: 'info' 37 | } 38 | ] 39 | }) 40 | 41 | Object.keys(StreamStash.inputs).forEach(function (plugin) { 42 | if (!StreamStash.inputs[plugin].NAME) { 43 | console.error(plugin + " did not have a basename, ignoring") 44 | } 45 | 46 | plugins.inputs[StreamStash.inputs[plugin].NAME.toLowerCase()] = StreamStash.inputs[plugin] 47 | }) 48 | 49 | Object.keys(StreamStash.outputs).forEach(function (plugin) { 50 | if (!StreamStash.outputs[plugin].NAME) { 51 | console.error(plugin + " did not have a basename, ignoring") 52 | } 53 | 54 | plugins.outputs[StreamStash.outputs[plugin].NAME.toLowerCase()] = StreamStash.outputs[plugin] 55 | }) 56 | 57 | try { 58 | var opts = optParser.parse(process.argv) 59 | } catch (error) { 60 | printHelp(error.message) 61 | } 62 | 63 | if (opts.help) { 64 | printHelp() 65 | } 66 | 67 | var levelName = opts['log_level'].toUpperCase() 68 | 69 | if (!StreamStash.Logger.LEVEL.hasOwnProperty(levelName)) { 70 | printHelp('Invalid log level `' + opts['log_level'] + '`') 71 | } 72 | 73 | var logger = new StreamStash.Logger({ level: StreamStash.Logger.LEVEL[levelName] }), 74 | streamStash = new StreamStash({ logger: logger }) 75 | 76 | // Build up our globals, helpers for basic usage of StreamStash 77 | global.StreamStash = StreamStash 78 | global.streamStash = streamStash 79 | global.logger = logger 80 | global.parsers = StreamStash.parsers 81 | 82 | /** 83 | * Adds a filter to the filter chain 84 | * TODO: Document the filter function args 85 | * 86 | * @param {String} [name] Name of the filter, will show in telemetry, among other places. 87 | * Avoid using `.` unless you want your telemetry to be all screwy 88 | * @param {Function} func A filter function to add 89 | */ 90 | global.addFilter = function (name, func) { 91 | streamStash.addFilter(name, func) 92 | } 93 | 94 | /** 95 | * Adds an input plugin to be used 96 | * 97 | * @param {String|Function} name Either a built in input name, path to an input plugin, or constructor 98 | * @param {Object} [options] Configuration options for the plugin, refer to the plugin for more information 99 | */ 100 | global.addInputPlugin = function (name, options) { 101 | var ctor = loadPlugin('inputs', name), 102 | baseOptions = { 103 | streamStash: streamStash, 104 | EventContainer: StreamStash.EventContainer, 105 | logger: logger 106 | } 107 | 108 | streamStash.addInputPlugin(new ctor(util._extend(baseOptions, options))) 109 | } 110 | 111 | /** 112 | * Adds an output plugin to be used 113 | * 114 | * @param {String|Function} name Either a built in output name, path to an output plugin, or constructor 115 | * @param {Object} [options] Configuration options for the plugin, refer to the plugin for more information 116 | */ 117 | global.addOutputPlugin = function (name, options) { 118 | var ctor = loadPlugin('outputs', name), 119 | baseOptions = { 120 | streamStash: streamStash, 121 | logger: logger 122 | } 123 | 124 | streamStash.addOutputPlugin(new ctor(util._extend(baseOptions, options))) 125 | } 126 | 127 | /** 128 | * Enables telemetry to be sent to a statsd compliant service 129 | * 130 | * @param {String} host The host to send telemetry to, can also be `http://...` to send over http 131 | * @param {Number} port The port to connect to the remote service on 132 | * @param {String} [prefix=streamstash] Prefix to use for all stats 133 | * @param {Boolean} [tcp=false] If true TCP will be used, if false UDP will be used 134 | */ 135 | global.telemetry = function (host, port, prefix, tcp) { 136 | var statsdClient = new StatsDClient({ 137 | host: host, 138 | port: port, 139 | tcp: tcp, 140 | prefix: prefix || 'streamstash' 141 | }) 142 | 143 | streamStash.enableTelemetry(statsdClient) 144 | } 145 | 146 | // Setup config files 147 | var configPaths = opts._args, 148 | files = [], 149 | pathRegex = new RegExp('\\.js$') 150 | 151 | try { 152 | configPaths.forEach(function (testPath) { 153 | files = files.concat(lookupFiles(testPath)) 154 | }) 155 | } catch (error) { 156 | var useError 157 | 158 | if (error.code === 'ENOENT') { 159 | useError = 'No such file or directory: ' + error.path 160 | } else { 161 | useError = error.message 162 | } 163 | 164 | console.error(useError) 165 | process.exit(1) 166 | } 167 | 168 | if (files.length === 0) { 169 | printHelp('No config files or directories were provided') 170 | } 171 | 172 | files.forEach(function (filePath) { 173 | require(path.resolve(filePath)) 174 | }) 175 | 176 | // Set up a signal handler for SIGINT to gracefully shutdown 177 | streamStash.killTimeout = opts['kill_timeout'] 178 | 179 | process.on('SIGINT', function () { 180 | logger.info('StreamStash', 'Got SIGINT, starting shutdown') 181 | streamStash.shutdown('SIGINT after shutdown request') 182 | }) 183 | 184 | process.on('SIGTERM', function () { 185 | logger.info('StreamStash', 'Got SIGTERM, starting shutdown') 186 | streamStash.shutdown('SIGTERM after shutdown request') 187 | }) 188 | 189 | streamStash.once('stopped', function () { 190 | logger.info('StreamStash', 'Exited gracefully') 191 | process.exit(0) 192 | }) 193 | 194 | // Start it up 195 | streamStash.start() 196 | 197 | process.on('uncaughtException', function (error) { 198 | logger.error('uncaught_exception', error, { error: error.stack || error }) 199 | streamStash.shutdown('Uncaught exception occurred') 200 | }) 201 | 202 | /** 203 | * Tries to load a plugin 204 | * 205 | * @param {String} type Either `inputs` or `outputs` for whichever type is being loaded 206 | * @param {String|Function} name Either an built in plugin name, a path to a plugin to load, or a constructor 207 | * 208 | * @returns {Function} A constructor for the plugin 209 | */ 210 | function loadPlugin (type, name) { 211 | var ctor, 212 | useName = name.toLowerCase() 213 | 214 | //If we got a function in the first place, use it 215 | if (typeof name === 'function') { 216 | return name 217 | 218 | //Look for our own plugin first 219 | } else if (plugins[type][useName]) { 220 | ctor = plugins[type][useName] 221 | 222 | //See if it's an external module 223 | } else { 224 | try { 225 | ctor = require(name) 226 | } catch (err) { 227 | printHelp('Invalid ' + type.slice(0, -1) + ' plugin "' + name + '"') 228 | } 229 | } 230 | 231 | return ctor 232 | } 233 | 234 | /** 235 | * Looks up files within a path, optionally recursively 236 | * 237 | * @param {String} filePath The file path to search 238 | * 239 | * @returns {Array.} An array of files found within filePath 240 | */ 241 | function lookupFiles (filePath) { 242 | var found = [], 243 | useFilePath = fs.existsSync(filePath) ? filePath : filePath + '.js' 244 | 245 | var stat = fs.statSync(useFilePath) 246 | if (stat.isFile()) { 247 | return [useFilePath] 248 | } 249 | 250 | fs.readdirSync(useFilePath).forEach(function (file) { 251 | var useFile = path.join(useFilePath, file), 252 | stat = fs.statSync(useFile) 253 | 254 | if (stat.isDirectory()) { 255 | found = found.concat(lookupFiles(useFile)) 256 | return 257 | } 258 | 259 | if (!stat.isFile() || !pathRegex.test(useFile) || path.basename(useFile)[0] == '.') { 260 | return 261 | } 262 | 263 | found.push(useFile) 264 | }) 265 | 266 | return found 267 | } 268 | 269 | /** 270 | * Prints the help and optionally an error 271 | * 272 | * @param {String} [error] An error message to print before the help 273 | */ 274 | function printHelp (error) { 275 | var status = 0, 276 | name, 277 | plugin 278 | 279 | console.log() 280 | 281 | if (error) { 282 | console.error('ERROR:', error) 283 | console.log() 284 | status = 1 285 | } 286 | var width = (process.stdout.getWindowSize) ? process.stdout.getWindowSize()[0] : 120 287 | 288 | console.log('Usage: streamstash [OPTIONS] [...]\n') 289 | console.log('Options:') 290 | console.log(optParser.help({ includeEnv: true, maxCol: width }).trimRight()) 291 | 292 | console.log('\nKnown input plugins') 293 | for (name in plugins.inputs) { 294 | plugin = plugins.inputs[name] 295 | console.log(' ' + plugin.NAME + ' - ' + plugin.DESCRIPTION) 296 | } 297 | 298 | console.log('\nKnown output plugins') 299 | for (name in plugins.outputs) { 300 | plugin = plugins.outputs[name] 301 | console.log(' ' + plugin.NAME + ' - ' + plugin.DESCRIPTION) 302 | } 303 | 304 | process.exit(status) 305 | } 306 | -------------------------------------------------------------------------------- /examples/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:4-onbuild 2 | 3 | COPY package.json /src/package.json 4 | RUN cd /src; npm install 5 | 6 | COPY . /src 7 | 8 | EXPOSE 9200 9 | EXPOSE 9300 10 | EXPOSE 5514 11 | 12 | # Make sure to replace relp_container.js with your own config.js here - or edit this one. 13 | CMD ["/src/bin/streamstash","/src/examples/relp_container.js"] 14 | -------------------------------------------------------------------------------- /examples/advanced.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Takes input from rsyslog via the omrelp output plugin and outputs event objects to elasticsearch 3 | */ 4 | 5 | var util = require('util') 6 | 7 | addInputPlugin( 8 | 'relp', 9 | { 10 | host: 'localhost', 11 | port: 5514, 12 | // Rename the input, this will show in logs and in telemetry 13 | name: 'my-relp-input' 14 | } 15 | ) 16 | 17 | addOutputPlugin( 18 | 'elasticsearch', 19 | { 20 | typeField: '@type', 21 | timstampField: '@timestamp', 22 | hostname: 'my-es-host.com', 23 | port: '9200', 24 | batchSize: 500, 25 | name: 'main-es' 26 | } 27 | ) 28 | 29 | addFilter(function (event) { 30 | var result = parsers.jsonParser(event) 31 | if (result === false) { 32 | return event.done() 33 | } 34 | 35 | // If the event message is literally 'useless' then cancel the event. The event will not be output anywhere 36 | if (event.data.message === 'useless') { 37 | return event.cancel() 38 | } 39 | 40 | // If the event came from apache2 try and parse combined access logs 41 | if (event.data.service === 'apache2') { 42 | // Every parser has a .raw that provides the result back to you instead of modifying the event directly 43 | var results = parsers.httpCombinedAccessParser.raw(event.data.message) 44 | if (results.error) { 45 | event.data['_type'] = 'http_unparseable' 46 | event.data['parseError'] = results.error 47 | } else { 48 | // Parser succeeded so attach the parsed data to a new property on the event 49 | event.data.http = results.data 50 | } 51 | } 52 | 53 | event.next() 54 | }) 55 | -------------------------------------------------------------------------------- /examples/basic.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Takes input from stdin and outputs event objects to stdout 3 | * 4 | * Try it by running 5 | * 6 | * ./bin/streamstash examples/basic.js 7 | * 8 | * Type a message and hit enter 9 | */ 10 | 11 | //Pause all inputs if the number of in process events exceeds the highWatermark 12 | streamStash.highWaterMark = 1000 13 | 14 | //Unpause all inputs after the number of in process events drops below lowWatermark 15 | streamStash.lowWatermark = 100 16 | 17 | addInputPlugin('stdin') 18 | 19 | addOutputPlugin('stdout') 20 | 21 | addFilter(function (event) { 22 | // parsers is a global object containing all parsers built into StreamStash 23 | // If the parser fails event.data will contain `@type` set to 'unparseable' and `parseError` will be 24 | // the reason the parser failed 25 | // If the parser succeeds the original message will be backed up in event.data.originalMessage 26 | // The third argument controls whether or not the original message is backed up, 27 | // set it to false or don't provide it to disable the feature 28 | if (!parsers.jsonParser(event, '@type', true)) { 29 | return event.done() 30 | } 31 | 32 | event.data.filter1 = true 33 | 34 | event.next() 35 | }) 36 | 37 | addFilter(function (event) { 38 | event.data.filter2 = true 39 | 40 | event.next() 41 | }) 42 | -------------------------------------------------------------------------------- /examples/custom_parser.js: -------------------------------------------------------------------------------- 1 | var StreamStash = require('../lib') 2 | 3 | // Create a new parser that appends ' custom' to the event.data.message 4 | // StreamStash.parsers.wrap provides the helper function that modifies 5 | // the event directly with the parser result but makes some assumptions 6 | StreamStash.parsers.my_custom_parser = StreamStash.parsers.wrap( 7 | // This is the actual custom parser function 8 | function (message) { 9 | return { 10 | data: { 11 | message: message + ' custom' 12 | }, 13 | error: void 0 14 | } 15 | } 16 | ) 17 | 18 | var testEvent = new StreamStash.EventContainer({ message: 'hi' }) 19 | StreamStash.parsers.my_custom_parser(testEvent) 20 | // returns true 21 | // testEvent.data is now { message: 'hi custom' } 22 | 23 | StreamStash.parsers.my_custom_parser.raw(testEvent.data.message) 24 | // returns { data: { message: 'hi custom' }, error: undefined } 25 | -------------------------------------------------------------------------------- /examples/project/config.js: -------------------------------------------------------------------------------- 1 | var util = require('util') 2 | 3 | addInputPlugin('stdin') 4 | 5 | addOutputPlugin('stdout') 6 | 7 | addFilter(function (event) { 8 | var data 9 | 10 | try { 11 | data = JSON.parse(event.data.message) 12 | event.data = util._extend(event.data, data) 13 | event.data.message = event.data['@message'] 14 | delete event.data.originalMessage 15 | 16 | } catch (error) { 17 | event.data['@type'] = 'unparsable' 18 | } 19 | 20 | event.data.filter1 = true 21 | 22 | event.next() 23 | }) 24 | -------------------------------------------------------------------------------- /examples/project/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "example-streamstash", 3 | "dependencies": { 4 | "streamstash": "1.x" 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /examples/relp_basic.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Takes input from rsyslog via the omrelp output plugin and outputs event objects to stdout 3 | * 4 | * Try it by running 5 | * 6 | * ./bin/streamstash examples/relp_basic.js 7 | * 8 | * Configure your rsyslog instance to talk us and restart 9 | * 10 | * module(load="omrelp") 11 | * action(type="omrelp" name="streamstash_relp" target="localhost" port="5514") 12 | * 13 | * Make sure you have rsyslog-relp installed (ubuntu/debian) 14 | * 15 | * Send some log lines to rsyslog 16 | * 17 | * logger "HI THERE" 18 | */ 19 | 20 | addInputPlugin('relp', { host: 'localhost', port: 5514 }) 21 | addOutputPlugin('stdout') 22 | -------------------------------------------------------------------------------- /examples/relp_container.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Just like `relp_basic.js` but this one is for the docker example 3 | */ 4 | 5 | addInputPlugin('relp', { host: '0.0.0.0', port: 5514 }) 6 | addOutputPlugin('stdout') 7 | -------------------------------------------------------------------------------- /examples/tcp.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Takes input from a tcp socket server and outputs event objects to stdout 3 | * 4 | * Try it by running 5 | * 6 | * ./bin/streamstash examples/tcp.js 7 | * 8 | * Use netcat to send data 9 | * 10 | * echo "Hello there!" | nc localhost 19999 11 | */ 12 | var util = require('util') 13 | 14 | addInputPlugin('tcp', { port: 19999 }) 15 | 16 | addOutputPlugin('stdout') 17 | 18 | addFilter(function (event) { 19 | event.next() 20 | }) 21 | -------------------------------------------------------------------------------- /examples/telemetry.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Just like basic.js but with telemetry enabled 3 | * 4 | * Try it by running 5 | * 6 | * ./bin/streamstash examples/telemetry.js 7 | * 8 | * You may want to run a netcat server to see the telemetry output 9 | * 10 | * nc -w0 -kul localhost 8181 11 | */ 12 | var util = require('util') 13 | 14 | telemetry('localhost', 8181) 15 | 16 | addInputPlugin('stdin') 17 | 18 | addOutputPlugin('stdout') 19 | 20 | addFilter(function (event) { 21 | setTimeout(function () { event.next() }, Math.floor(Math.random() * (1000))) 22 | }) 23 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | module.exports = require('./lib') -------------------------------------------------------------------------------- /lib/EventContainer.js: -------------------------------------------------------------------------------- 1 | var util = require('util'), 2 | EventEmitter = require('events').EventEmitter 3 | 4 | //TODO: Allow for filters to remove outputs by name 5 | 6 | /** 7 | * Wraps an event received from an input plugin and provides helpers for processing 8 | * Each event should at least contain message, source, and timestamp 9 | * 10 | * @param {Object} data The data received from the input 11 | * @param {Object} data.message Required message field 12 | * @param {Object} data.event_source Required details from the input plugin for this event 13 | * Should at least contain the input name under `name` 14 | * @param {Object} data.timestamp Required timestamp field, time of the event 15 | * 16 | * @constructor 17 | */ 18 | var EventContainer = function (data) { 19 | this.data = data 20 | 21 | this.state = EventContainer.STATE.PROCESSING 22 | this.eventId = void 0 23 | 24 | this._emittedDone = false 25 | this._plugins = {} 26 | this._pluginsLeft = 0 27 | } 28 | 29 | util.inherits(EventContainer, EventEmitter) 30 | module.exports = EventContainer 31 | 32 | /** 33 | * States an event can be in 34 | * - FAILED: The event failed to complete processing. 35 | * If the input source is reliable the event should be retried 36 | * - CANCELED: The event was dropped, on purpose 37 | * - PROCESSING: The event is still being filtered/output 38 | * - COMPLETED: The event has completed all processing 39 | * 40 | * @type {Object} 41 | */ 42 | EventContainer.STATE = { 43 | FAILED: -1, 44 | CANCELED: 0, 45 | PROCESSING: 1, 46 | COMPLETED: 2 47 | } 48 | 49 | /** 50 | * Allows a filter to progress the event to the next filter or to the output plugins 51 | */ 52 | EventContainer.prototype.next = function () { 53 | //Setup within StreamStash for speed 54 | return false 55 | } 56 | 57 | /** 58 | * Allows a filter to progress the event directly to the output plugins, past any other filter in the chain 59 | */ 60 | EventContainer.prototype.done = function () { 61 | //Setup within StreamStash for speed 62 | return false 63 | } 64 | 65 | /** 66 | * Allows a filter to cancel further processing for the event 67 | * The event will be completed and `state` will be canceled 68 | */ 69 | EventContainer.prototype.cancel = function () { 70 | //Setup within StreamStash for speed 71 | return false 72 | } 73 | 74 | /** 75 | * Marks the event as completed 76 | * 77 | * @fires EventContainer#complete For any interesting plugins to do post processing 78 | * 79 | * @returns {boolean} True of the event was marked as complete, false if already completed or invalid 80 | */ 81 | EventContainer.prototype.processingComplete = function () { 82 | if (this._emittedDone) { 83 | return false 84 | } 85 | 86 | this.emit('complete') 87 | this._emittedDone = true 88 | return true 89 | } 90 | -------------------------------------------------------------------------------- /lib/Logger.js: -------------------------------------------------------------------------------- 1 | var util = require('util') 2 | 3 | /** 4 | * Provides a simple logging interface 5 | * 6 | * @param {Object} options Configuration options 7 | * @param {Number} [options.level=Logger.INFO] Adjusts how verbose logging output is {@see Logger.LEVEL} 8 | * @param {Stream} [options.stream=process.stdout] Use a different stream to write logs to 9 | * 10 | * @constructor 11 | */ 12 | var Logger = function (options) { 13 | var useOptions = options || {} 14 | this.level = useOptions.level === void 0 ? Logger.LEVEL.INFO : useOptions.level 15 | this.stream = useOptions.stream || process.stdout 16 | } 17 | 18 | module.exports = Logger 19 | 20 | /** 21 | * Log levels 22 | * 23 | * @type {Object} 24 | */ 25 | Logger.LEVEL = { 26 | OFF: -1, 27 | ERROR: 0, 28 | INFO: 1, 29 | DEBUG: 2 30 | } 31 | 32 | /** 33 | * Map of levels to their names 34 | * 35 | * @type {Object} 36 | */ 37 | Logger.NAMES = { 38 | 0: 'ERROR', 39 | 1: 'INFO', 40 | 2: 'DEBUG' 41 | } 42 | 43 | /** 44 | * Writes a critical, possibly fatal, message to the log 45 | * 46 | * @param {String} name Module name that is logging the message 47 | * @param {String} message Simple description of the log event 48 | * @param {Object} [data] An object of extra information to add 49 | * 50 | * @returns {boolean} True if the log was written, false if not 51 | */ 52 | Logger.prototype.error = function (name, message, data) { 53 | return this._output(Logger.LEVEL.ERROR, { name: name, message: message, data: data }) 54 | } 55 | 56 | /** 57 | * Writes an interesting runtime information message to the log 58 | * 59 | * @param {String} name Module name that is logging the message 60 | * @param {String} message Simple description of the log event 61 | * @param {Object} [data] An object of extra information to add 62 | * 63 | * @returns {boolean} True if the log was written, false if not 64 | */ 65 | Logger.prototype.info = function (name, message, data) { 66 | return this._output(Logger.LEVEL.INFO, { name: name, message: message, data: data }) 67 | } 68 | 69 | /** 70 | * Writes message that would assist in debugging the runtime to the log 71 | * 72 | * @param {String} name Module name that is logging the message 73 | * @param {String} message Simple description of the log event 74 | * @param {Object} [data] An object of extra information to add 75 | * 76 | * @returns {boolean} True if the log was written, false if not 77 | */ 78 | Logger.prototype.debug = function (name, message, data) { 79 | return this._output(Logger.LEVEL.DEBUG, { name: name, message: message, data: data }) 80 | } 81 | 82 | /** 83 | * Assembles the log line and writes it 84 | * 85 | * @param {Number} level The log level of the message to write 86 | * @param {Object} event An object containing at least name and message 87 | * 88 | * @returns {boolean} True if the log was written, false if not 89 | * 90 | * @private 91 | */ 92 | Logger.prototype._output = function (level, event) { 93 | if (level > this.level) { 94 | return false 95 | } 96 | 97 | event.timestamp = (new Date()).toISOString() 98 | event.level = Logger.NAMES[level] 99 | this.stream.write(JSON.stringify(event) + '\n') 100 | } 101 | -------------------------------------------------------------------------------- /lib/StreamStash.js: -------------------------------------------------------------------------------- 1 | var util = require('util'), 2 | EventContainer = require('./EventContainer'), 3 | EventEmitter = require('events').EventEmitter, 4 | Telemetry = require('./Telemetry'), 5 | uuid = require('uuid/v4') 6 | 7 | var memoryUsage = function () { 8 | var mem = process.memoryUsage() 9 | return { 10 | 'heap_total': mem.heapTotal, 11 | 'heap_used': mem.heapUsed 12 | } 13 | } 14 | 15 | //Try to use the v8 module to get memory stats 16 | try { 17 | var v8 = require('v8') 18 | memoryUsage = function () { 19 | return v8.getHeapStatistics() 20 | } 21 | } catch (e) {} 22 | 23 | //TODO: setInterval on shutdown to give info on how many events/plugins we are waiting on 24 | //TODO: need logs for stoppingInput and stopped plugin events 25 | //TODO: need started event to fire after all plugins have started 26 | //TODO: review log levels 27 | //TODO: Option to disable logging timestamp 28 | //TODO: Option to enable json log line format 29 | //TODO: record event telemetry per input/output 30 | //TODO: ES output connection timeout 31 | 32 | /** 33 | * Coordinates events from input plugins to filters and finally output plugins 34 | * 35 | * @param {Object} options Configuration parameters 36 | * @param {Logger} options.logger The logger object to use for outputting logs 37 | * @param {Number} [options.telemetryInterval=5000] Interval in ms to send telemetry 38 | * 39 | * @constructor 40 | */ 41 | var StreamStash = function (options) { 42 | StreamStash.super_.call(this) 43 | 44 | var self = this 45 | 46 | self.setMaxListeners(0) 47 | self.state = StreamStash.STATE.CONFIGURING 48 | 49 | self.logger = options.logger 50 | self.telemetry = new Telemetry() 51 | 52 | self.inputs = [] 53 | self.filters = [] 54 | self.outputs = [] 55 | 56 | self.pausedInputs = false 57 | 58 | self.stats = { 59 | startTime: null, 60 | events: { 61 | processing: 0, 62 | canceled: 0, 63 | failed: 0, 64 | total: 0 65 | }, 66 | plugins: { 67 | started: 0, 68 | stoppedInput: 0, 69 | stopped: 0, 70 | total: 0 71 | } 72 | } 73 | 74 | self._telemetryInterval = setInterval( 75 | function () { 76 | self.telemetry.gauge('events.processing', self.stats.events.processing) 77 | self.telemetry.gauge('events.total', self.stats.events.total) 78 | self.telemetry.gauge('events.canceled', self.stats.events.canceled) 79 | self.telemetry.gauge('events.failed', self.stats.events.failed) 80 | 81 | var mem = memoryUsage() 82 | Object.keys(mem).forEach(function (stat) { 83 | self.telemetry.gauge('process.memory.' + stat, mem[stat]) 84 | }) 85 | }, 86 | options.telemetryInterval || 5000 87 | ) 88 | 89 | /** 90 | * Pause all input plugins if the number of events being processed exceeds this number. Defaults to 100,000.
91 | * Setting this to 0 disables the functionality 92 | * @type {Number} 93 | */ 94 | self.highWatermark = 100000 95 | 96 | /** 97 | * Unpause all input plugins after the number of events being processed drops below this number.
98 | * Only applies if all input plugins have already been paused. Defaults to 1,000
99 | * @type {Number} 100 | */ 101 | self.lowWatermark = 1000 102 | 103 | /** 104 | * The number of seconds to wait for the number of events being processed to drop below lowWatermark before
105 | * Defaults to 60 seconds 106 | * forcefully stopping the process 107 | * @type {Number} 108 | */ 109 | self.pauseTimeout = 60 110 | 111 | /** 112 | * The number of seconds to wait after a shutdown request has been made before we forcefully stop the process
113 | * Settings this to 0 disables the forceful shutdown behavior. Take note that this may hang the process indefintely if 114 | * the number of outstanding events never drops below the lowWatermark.
115 | * Defaults to 30 seconds 116 | * @type {Number} 117 | */ 118 | self.killTimeout = 30 119 | 120 | /** 121 | * A function that generates a random id. By default this is uuid v4 but can be swapped out for anything that can 122 | * immediately return a unique identifier. Ids should be unique across multiple instances of streamstash to avoid 123 | * collisions in highly scalable environments. 124 | * @type {Function} 125 | */ 126 | self.idGen = uuid 127 | } 128 | 129 | util.inherits(StreamStash, EventEmitter) 130 | module.exports = StreamStash 131 | 132 | /** 133 | * All the states StreamStash can be in
134 | * - CONFIGURING: Instantiated and ready for input/output plugins and filters to be added
135 | * - STARTED: Configured and ready to process events
136 | * - STOPPING_INPUT: Beginning to shutdown, all input plugins should not be emitting events anymore
137 | * - STOPPING_ALL: All inputs have stopped emitting events, waiting for in flight events and plugins to complete
138 | * - STOPPED: All events and plugins have completed, we are stopped
139 | * 140 | * @type {Object} 141 | */ 142 | StreamStash.STATE = { 143 | CONFIGURING: 0, 144 | STARTED: 1, 145 | STOPPING_INPUT: 2, 146 | STOPPING_ALL: 3, 147 | STOPPED: 4 148 | } 149 | 150 | /** 151 | * Adds an input plugin to provide events 152 | * 153 | * @param {Object} plugin An already instantiated input plugin 154 | * 155 | * @returns {boolean} True if the plugin was added, false if not allowed 156 | */ 157 | StreamStash.prototype.addInputPlugin = function (plugin) { 158 | var self = this 159 | 160 | if (self.state !== StreamStash.STATE.CONFIGURING) { 161 | return false 162 | } 163 | 164 | if (plugin.hasOwnProperty('name') === false) { 165 | throw new Error('Input plugin did not have a name') 166 | } 167 | 168 | if (self._uniqueName(self.inputs, plugin) === false) { 169 | throw new Error('Each input plugin must have a unique name', plugin.name) 170 | } 171 | 172 | plugin.once('started', function () { 173 | self.logger.info(plugin.name, 'started') 174 | self.stats.plugins.started++ 175 | }) 176 | 177 | plugin.on('event', function (eventContainer) { 178 | self._handleInputEvent(plugin, eventContainer) 179 | }) 180 | 181 | plugin.pluginId = self.inputs.push(plugin) - 1 182 | self.stats.plugins.total++ 183 | 184 | return true 185 | } 186 | 187 | /** 188 | * Adds a filter function to use for events 189 | * TODO: Document what is given to the filter functions 190 | * 191 | * @param {String} [name] Name of the filter, will show in telemetry, among other places. 192 | * Avoid using `.` unless you want your telemetry to be all screwy 193 | * @param {Function} func The function to use for this filter 194 | * 195 | * @returns {boolean} True if the filter was added, false if not allowed 196 | */ 197 | StreamStash.prototype.addFilter = function (name, func) { 198 | var useFunc = func, 199 | useName = name 200 | 201 | if (this.state !== StreamStash.STATE.CONFIGURING) { 202 | return false 203 | } 204 | 205 | // Backwards compat for no name filters 206 | if (typeof name === 'function') { 207 | useFunc = name 208 | useName = void 0 209 | } 210 | 211 | // Set a name if one wasn't provided 212 | if (!useName) { 213 | useName = this.filters.length 214 | } 215 | 216 | if (typeof useFunc !== 'function') { 217 | throw new Error('Attempted to add a filter that is not a function') 218 | } 219 | 220 | this.filters.forEach(function (filter) { 221 | if (filter.func === useFunc) { 222 | throw new Error('Attempted to add the same filter more than once') 223 | } 224 | }) 225 | 226 | this.filters.push({ func: useFunc, name: useName }) 227 | return true 228 | } 229 | 230 | /** 231 | * Adds an output plugin to use for events 232 | * 233 | * @param {Object} plugin An already instantiated output plugin 234 | * 235 | * @returns {boolean} True if the plugin was added, false if not allowed 236 | */ 237 | StreamStash.prototype.addOutputPlugin = function (plugin) { 238 | var self = this 239 | 240 | if (self.state !== StreamStash.STATE.CONFIGURING) { 241 | return false 242 | } 243 | 244 | if (plugin.hasOwnProperty('name') === false) { 245 | throw new Error('Input plugin did not have a name') 246 | } 247 | 248 | if (self._uniqueName(self.outputs, plugin) === false) { 249 | throw new Error('Each output plugin must have a unique name', plugin.name) 250 | } 251 | 252 | plugin.once('started', function () { 253 | self.logger.info(plugin.name, 'started') 254 | self.stats.plugins.started++ 255 | }) 256 | 257 | plugin.on('complete', function (eventContainer) { 258 | self._handleOutputComplete(plugin, eventContainer) 259 | }) 260 | 261 | plugin.on('failed', function (eventContainer) { 262 | self._handleOutputComplete(plugin, eventContainer, EventContainer.STATE.FAILED) 263 | }) 264 | 265 | plugin.pluginId = self.outputs.push(plugin) - 1 266 | self.stats.plugins.total++ 267 | 268 | return true 269 | } 270 | 271 | StreamStash.prototype.enableTelemetry = function (statsClient) { 272 | this.telemetry.statsClient = statsClient 273 | } 274 | 275 | /** 276 | * Starts processing events 277 | * At least 1 input and 1 output must be configured 278 | * 279 | * @returns {boolean} True if started, false if already started or shutting down 280 | * 281 | * @fires StreamStash#start 282 | */ 283 | StreamStash.prototype.start = function () { 284 | if (this.state !== StreamStash.STATE.CONFIGURING) { 285 | return false 286 | } 287 | 288 | if (this.inputs.length === 0) { 289 | throw new Error('At least 1 input plugin must be configured') 290 | } 291 | 292 | if (this.outputs.length === 0) { 293 | throw new Error('At least 1 output plugin must be configured') 294 | } 295 | 296 | this.logger.info('StreamStash', 'Starting!') 297 | this.state = StreamStash.STATE.STARTED 298 | this.stats.startTime = new Date() 299 | 300 | this.emit('start') 301 | return true 302 | } 303 | 304 | /** 305 | * Kicks off the stop sequence 306 | * Use this to shutdown gracefully 307 | * 308 | * @returns {boolean} True if the stop has begun, false if not allowed 309 | */ 310 | StreamStash.prototype.stop = function () { 311 | if (this.state !== StreamStash.STATE.STARTED) { 312 | return false 313 | } 314 | 315 | var self = this 316 | 317 | var waitStop = function (plugin) { 318 | plugin.once('stopped', function () { 319 | self.stats.plugins.stopped++ 320 | 321 | if (self.stats.plugins.stopped === self.stats.plugins.total) { 322 | //TODO: need to make sure all in flights have completed 323 | self.logger.info('StreamStash', 'All plugins have completely stopped') 324 | self.state = StreamStash.STATE.STOPPED 325 | clearTimeout(self._killTimer) 326 | self.emit('stopped') 327 | } 328 | }) 329 | } 330 | 331 | self.inputs.forEach(function (input) { 332 | input.once('stoppedInput', function () { 333 | self.stats.plugins.stoppedInput++ 334 | 335 | if (self.stats.plugins.stoppedInput === self.inputs.length) { 336 | self.logger.info('StreamStash', 'All input plugins have stopped emitting') 337 | self.logger.info('StreamStash', 'Waiting for ' + self.stats.events.processing + ' in flight events to complete') 338 | self._attemptStop() 339 | } 340 | }) 341 | 342 | waitStop(input) 343 | }) 344 | 345 | self.outputs.forEach(waitStop) 346 | 347 | self.logger.info('StreamStash', 'Telling all input plugins to stop emitting') 348 | self.state = StreamStash.STATE.STOPPING_INPUT 349 | this.emit('stopInput') 350 | return true 351 | } 352 | 353 | /** 354 | * Starts an event moving through filters after being received from an input plugin 355 | * 356 | * @param {Object} plugin The plugin that provided the event 357 | * @param {EventContainer} eventContainer The newly received event 358 | * 359 | * @returns {boolean} True if the event was taken, false if not ready for events 360 | * 361 | * @private 362 | */ 363 | StreamStash.prototype._handleInputEvent = function (plugin, eventContainer) { 364 | var self = this 365 | 366 | if (self.state !== StreamStash.STATE.STARTED && self.state !== StreamStash.STATE.STOPPING_INPUT) { 367 | self.logger.info('StreamStash', 'Dropping event from ' + eventContainer.data.source) 368 | return false 369 | } 370 | 371 | if (!eventContainer.data.hasOwnProperty('event_source')) { 372 | self.logger.error('StreamStash', 'Event received from ' + plugin.name + ' has no event_source property') 373 | } 374 | 375 | if (!eventContainer.data.hasOwnProperty('message')) { 376 | self.logger.error('StreamStash', 'Event received from ' + plugin.name + ' has no message property') 377 | } 378 | 379 | self.stats.events.processing++ 380 | 381 | eventContainer._pluginsLeft = self.outputs.length 382 | eventContainer.eventId = self.idGen() 383 | 384 | self.stats.events.total++ 385 | 386 | if (self.pausedInputs === false && self.stats.events.processing > self.highWatermark && self.highWatermark > 0) { 387 | self.pausedInputs = true 388 | 389 | if (self.pauseTimeout > 0) { 390 | self._pausedTimer = setTimeout( 391 | function () { 392 | self.shutdown('Never got below low watermark of ' + self.lowWatermark) 393 | }, 394 | self.pauseTimeout * 1000 395 | ) 396 | } 397 | 398 | self.logger.info('StreamStash', 'Pausing all input plugins because we are over the high watermark of ' + self.highWatermark) 399 | self.emit('stopInput') 400 | } 401 | 402 | self._doFilter(0, eventContainer) 403 | return true 404 | } 405 | 406 | /** 407 | * Runs an event through the supplied filters 408 | * Sets up EventContainer#next, EventContainer#done, and EventContainer#cancel for each filter 409 | * 410 | * @param {Number} index Index of the filter to call, if it does not exist the event is moved to output plugins 411 | * @param {EventContainer} eventContainer The event to run through the filters 412 | * 413 | * @private 414 | */ 415 | StreamStash.prototype._doFilter = function (index, eventContainer) { 416 | var self = this, 417 | nexted = false, 418 | doned = false, 419 | canceled = false, 420 | filter = self.filters[index] 421 | 422 | //No more filters, go to output 423 | if (!filter) { 424 | return self._handleOutputEvent(eventContainer) 425 | } 426 | 427 | var timer = new Date() 428 | 429 | /** 430 | * Makes sure the event hasn't already progressed out of the current filter 431 | * 432 | * @returns {boolean} False if the filter has already acted, true if not 433 | */ 434 | var canAct = function () { 435 | if (nexted) { 436 | self.logger.error('StreamStash', 'Filter ' + filter.name + ' tried to call `next` on an event that has already progressed', { event: eventContainer }) 437 | return false 438 | } 439 | 440 | if (doned) { 441 | self.logger.error('StreamStash', 'Filter ' + filter.name + ' tried to call `done` on an event that has already progressed', { event: eventContainer }) 442 | return false 443 | } 444 | 445 | 446 | if (canceled) { 447 | self.logger.error('StreamStash', 'Filter ' + filter.name + ' tried to call `cancel` on an event that has already progressed', { event: eventContainer }) 448 | return false 449 | } 450 | 451 | self.telemetry.timing('filter.' + filter.name, timer) 452 | return true 453 | } 454 | 455 | eventContainer.next = function () { 456 | if (!canAct()) { 457 | return 458 | } 459 | 460 | nexted = true 461 | self._doFilter(index + 1, eventContainer) 462 | } 463 | 464 | eventContainer.done = function () { 465 | if (!canAct()) { 466 | return 467 | } 468 | 469 | doned = true 470 | self._handleOutputEvent(eventContainer) 471 | } 472 | 473 | eventContainer.cancel = function () { 474 | if (!canAct()) { 475 | return 476 | } 477 | 478 | canceled = true 479 | eventContainer.state = EventContainer.STATE.CANCELED 480 | self._handleProcessingComplete(eventContainer) 481 | } 482 | 483 | filter.func(eventContainer) 484 | } 485 | 486 | /** 487 | * Sends an event to the output plugins for processing 488 | * 489 | * @param {EventContainer} eventContainer The event to output 490 | * 491 | * @private 492 | */ 493 | StreamStash.prototype._handleOutputEvent = function (eventContainer) { 494 | this.logger.debug('StreamStash', 'Beginning output for event ' + eventContainer.eventId) 495 | this.emit('output', eventContainer) 496 | } 497 | 498 | /** 499 | * Handles an output plugin that completed outputting an event 500 | * 501 | * @param {Object} output The output plugin that completed an event 502 | * @param {EventContainer|EventContainer[]} eventContainers An event or array of events that were output 503 | * @param {Number} [state=EventContainer.STATE.COMPLETED] The state the event should be set to 504 | * 505 | * @private 506 | */ 507 | StreamStash.prototype._handleOutputComplete = function (output, eventContainers, state) { 508 | var self = this, 509 | events = (Array.isArray(eventContainers)) ? eventContainers : [eventContainers], 510 | useState = state || EventContainer.STATE.COMPLETED, 511 | completed = [] 512 | 513 | for (var i = 0, len = events.length; i < len; i++) { 514 | var eventContainer = events[i] 515 | if (eventContainer._plugins[output.pluginId]) { 516 | self.logger.error(output.name, 'Output plugin completed the same event twice') 517 | return 518 | } 519 | 520 | eventContainer._plugins[output.name] = true 521 | eventContainer._pluginsLeft-- 522 | 523 | self.logger.debug('StreamStash', output.name + ' completed output. ' + eventContainer._pluginsLeft + ' plugins left') 524 | if (eventContainer._pluginsLeft === 0 || useState === EventContainer.STATE.FAILED) { 525 | eventContainer.state = useState 526 | completed.push(eventContainer) 527 | } 528 | } 529 | 530 | self._handleProcessingComplete(completed) 531 | } 532 | 533 | /** 534 | * Handles completing an event entirely 535 | * 536 | * @param {EventContainer|EventContainer[]} eventContainers An event or array of events that have finished processing 537 | * 538 | * @private 539 | */ 540 | StreamStash.prototype._handleProcessingComplete = function (eventContainers) { 541 | var self = this, 542 | events = (Array.isArray(eventContainers)) ? eventContainers : [eventContainers] 543 | 544 | for (var i = 0, len = events.length; i < len; i++) { 545 | var eventContainer = events[i], 546 | stateStr 547 | 548 | if (!eventContainer.processingComplete()) { 549 | self.logger.error( 550 | 'StreamStash', 551 | 'Could not complete processing for event ' + eventContainer.eventId + ' because it was previously completed' 552 | ) 553 | return 554 | } 555 | 556 | switch (eventContainer.state) { 557 | case EventContainer.STATE.CANCELED: 558 | self.stats.events.canceled++ 559 | stateStr = 'canceled' 560 | break 561 | 562 | case EventContainer.STATE.FAILED: 563 | self.stats.events.failed++ 564 | stateStr = 'failed' 565 | break 566 | 567 | default: 568 | stateStr = 'completed' 569 | } 570 | 571 | self.logger.debug('StreamStash', 'Event ' + eventContainer.eventId + ' processing ' + stateStr) 572 | } 573 | 574 | self.stats.events.processing -= events.length 575 | if (self.pausedInputs && self.stats.events.processing < self.lowWatermark) { 576 | clearTimeout(self._pausedTimer) 577 | self.pausedInputs = false 578 | self.logger.info('StreamStash', 'Resuming all input plugins because we are below the low watermark of ' + self.lowWatermark) 579 | self.emit('start') 580 | } 581 | 582 | //TODO: just make this a timer? 583 | if (self.state === StreamStash.STATE.STOPPING_INPUT) { 584 | self._attemptStop() 585 | } 586 | } 587 | 588 | /** 589 | * Attempts to emit `stop` 590 | * Can only succeed if all in flight events have completed 591 | * 592 | * @private 593 | */ 594 | StreamStash.prototype._attemptStop = function () { 595 | if (this.stats.events.processing === 0) { 596 | this.logger.info('StreamStash', 'All in flight events have completed') 597 | this.logger.info('StreamStash', 'Stopping all ' + this.stats.plugins.total + ' plugins') 598 | this.state = StreamStash.STATE.STOPPING_ALL 599 | this.emit('stop') 600 | } 601 | } 602 | 603 | /** 604 | * Attempts to stop the process. If an attempt has already been made then the process is halted immediately. 605 | * If the process does not stop on its own by the time this.killTimeout time passes the process will be halted. 606 | * 607 | * @param {String} forceReason The reason a force shutdown was performed, if any. 608 | */ 609 | StreamStash.prototype.shutdown = function (forceReason) { 610 | var self = this 611 | 612 | var forceKill = function (reason) { 613 | self.logger.error('StreamStash', 'Forcefully exiting. ' + reason) 614 | process.exit(1) 615 | } 616 | 617 | if (self._killTimer) { 618 | if (forceReason) { 619 | forceKill(forceReason) 620 | } 621 | return 622 | } 623 | 624 | self._killTimer = setTimeout( 625 | function () { 626 | forceKill('Shutdown timeout after ' + self.killTimeout + ' seconds') 627 | }, 628 | self.killTimeout * 1000 629 | ) 630 | 631 | self.logger.info('StreamStash', 'Forceful exit in ' + self.killTimeout + ' seconds') 632 | 633 | self.stop() 634 | } 635 | 636 | /** 637 | * Checks if a plugin has a unique name 638 | * 639 | * @param {Object} currentPlugins Object containing the plugins to check the name against against 640 | * @param {Object} testPlugin The plugin to test the existing names against 641 | * 642 | * @returns {boolean} False if not unique, true if unique 643 | * 644 | * @private 645 | */ 646 | StreamStash.prototype._uniqueName = function (currentPlugins, testPlugin) { 647 | var unique = true 648 | 649 | currentPlugins.some(function (plugin) { 650 | if (plugin.name.toLowerCase() == testPlugin.name.toLowerCase()) { 651 | unique = false 652 | return true 653 | } 654 | }) 655 | 656 | return unique 657 | } 658 | -------------------------------------------------------------------------------- /lib/Telemetry.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Passthrough to the underlying statsd client. 3 | * If no statsd client is configured this will noop the function calls 4 | * 5 | * @param {Object} statsClient A configured stats client that implements the necessary methods 6 | * 7 | * @constructor 8 | */ 9 | var Telemetry = function (statsClient) { 10 | this.statsClient = statsClient 11 | } 12 | 13 | module.exports = Telemetry 14 | 15 | Telemetry.prototype.increment = function () { 16 | if (this.statsClient) { 17 | this.statsClient.increment.apply(this.statsClient, arguments) 18 | } 19 | } 20 | 21 | Telemetry.prototype.decrement = function () { 22 | if (this.statsClient) { 23 | this.statsClient.decrement.apply(this.statsClient, arguments) 24 | } 25 | } 26 | 27 | Telemetry.prototype.counter = function () { 28 | if (this.statsClient) { 29 | this.statsClient.counter.apply(this.statsClient, arguments) 30 | } 31 | } 32 | 33 | Telemetry.prototype.gauge = function () { 34 | if (this.statsClient) { 35 | this.statsClient.gauge.apply(this.statsClient, arguments) 36 | } 37 | } 38 | 39 | Telemetry.prototype.gaugeDelta = function () { 40 | if (this.statsClient) { 41 | this.statsClient.gaugeDelta.apply(this.statsClient, arguments) 42 | } 43 | } 44 | 45 | Telemetry.prototype.sets = function () { 46 | if (this.statsClient) { 47 | this.statsClient.sets.apply(this.statsClient, arguments) 48 | } 49 | } 50 | 51 | Telemetry.prototype.timing = function () { 52 | if (this.statsClient) { 53 | this.statsClient.timing.apply(this.statsClient, arguments) 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /lib/index.js: -------------------------------------------------------------------------------- 1 | module.exports = require('./StreamStash') 2 | module.exports.inputs = require('./inputs') 3 | module.exports.outputs = require('./outputs') 4 | module.exports.parsers = require('./parsers') 5 | module.exports.EventContainer = require('./EventContainer') 6 | module.exports.Logger = require('./Logger') 7 | 8 | -------------------------------------------------------------------------------- /lib/inputs/BaseInput.js: -------------------------------------------------------------------------------- 1 | var util = require('util'), 2 | EventEmitter = require('events').EventEmitter, 3 | _ = require('underscore') 4 | 5 | /** 6 | * Base input constructor 7 | *
8 | * Every input should inherit from this object 9 | * 10 | * @param {Object} options Configuration object 11 | * @param {StreamStash} options.streamStash The main stream stash object 12 | * @param {Function} options.EventContainer The event container constructor to use 13 | * @param {Object} options.logger A logger to use for logging 14 | * @param {String} options.name A name to use for the `source` value as well as in logging. Must be unique 15 | * to other input plugins 16 | * @param {Object} [options.fields] Extra fields to add on each event 17 | * @param {Object} [options.parser] Parser to use to decode incoming data 18 | * 19 | * @abstract 20 | * @constructor 21 | */ 22 | var BaseInput = function (options) { 23 | BaseInput.super_.call(this) 24 | 25 | this.streamStash = options.streamStash 26 | this.EventContainer = options.EventContainer 27 | this.logger = options.logger 28 | this.fields = options.fields || {} 29 | this.parser = options.parser 30 | this.name = options.name 31 | this.state = 0 32 | } 33 | 34 | util.inherits(BaseInput, EventEmitter) 35 | module.exports = BaseInput 36 | 37 | /** 38 | * Helper function to create and emit a new EventContainer with the proper fields. 39 | * 40 | * @param {String} message The message for this event 41 | * @param {BaseInput~emitCallback} [callback] A callback to call instead of directly emitting the event 42 | */ 43 | BaseInput.prototype._emitEvent = function (message, callback) { 44 | // We don't inspect the current state because the underlying inputs should pause and may continue to emit while 45 | // consuming existing buffers. 46 | 47 | var self = this, 48 | sourceData = _.extend( 49 | {}, 50 | { name: self.name }, 51 | self.fields 52 | ) 53 | 54 | sourceData.timestamp = new Date() 55 | var event = new self.EventContainer({ event_source: sourceData, message: message }) 56 | 57 | if (callback === void 0) { 58 | callback = function () { 59 | self.emit('event', event) 60 | } 61 | } 62 | 63 | if (self.parser) { 64 | self.parser(event) 65 | callback(event) 66 | } else { 67 | callback(event) 68 | } 69 | } 70 | 71 | /** 72 | * Handles final eventContainer processing and emits the event 73 | * 74 | * @callback BaseInput~emitCallback 75 | * 76 | * @param {EventContainer} event The event container that was created 77 | */ 78 | -------------------------------------------------------------------------------- /lib/inputs/HTTPInput.js: -------------------------------------------------------------------------------- 1 | let util = require('util'), 2 | BaseInput = require('./BaseInput'), 3 | EventContainer = require('../EventContainer') 4 | 5 | /** 6 | * Turns http requests into events 7 | * You must call `listen` after adding this input 8 | * 9 | * const http = require('http') 10 | * let s = http.createServer() 11 | * s.listen(9012) 12 | * addInputPlugin('http', { server: s }) 13 | * 14 | * @param {String} options.name 15 | * @param {Object} options.server A server object or anything that emits a readable stream. You must call listen. 16 | * 17 | * @extends BaseInput 18 | * @constructor 19 | */ 20 | let HTTPInput = function (options) { 21 | HTTPInput.super_.call(this, options) 22 | 23 | let self = this 24 | 25 | self.name = options.name || HTTPInput.NAME 26 | self.server = options.server 27 | 28 | self.sockets = {} 29 | self._id = 0 30 | 31 | self.logger.debug(self.name, 'starting up') 32 | 33 | self._wireServer() 34 | 35 | self.streamStash.on('start', function () { 36 | self.state = 1 37 | for (let socketId in self.sockets) { 38 | self.sockets[socketId].resume() 39 | } 40 | 41 | self.emit('started') 42 | }) 43 | 44 | self.streamStash.on('stopInput', function () { 45 | self.state = 0 46 | 47 | for (let socketId in self.sockets) { 48 | self.sockets[socketId].pause() 49 | } 50 | 51 | self.emit('stoppedInput') 52 | }) 53 | 54 | self.streamStash.on('stop', function () { 55 | self.emit('stopped') 56 | }) 57 | } 58 | 59 | HTTPInput.NAME = "HTTP" 60 | HTTPInput.DESCRIPTION = "Listens to an http server and emits requests as events" 61 | 62 | util.inherits(HTTPInput, BaseInput) 63 | module.exports = HTTPInput 64 | 65 | HTTPInput.prototype._wireSocket = function (socket) { 66 | let self = this 67 | 68 | socket._id = self._id++ 69 | self.sockets[socket._id] = socket 70 | 71 | socket.on('error', function (error) { 72 | self.logger.error(self.name, 'Connection # ' + socket.socketId + ' (' + self.remoteAddress + ') had an error', { error: error.stack || error }) 73 | socket.destroy() 74 | }) 75 | 76 | socket.on('close', function () { 77 | delete self.sockets[socket._id] 78 | }) 79 | } 80 | 81 | HTTPInput.prototype._wireServer = function () { 82 | let self = this 83 | 84 | self.server.on('connection', function (socket) { 85 | if (self.state !== 1) { 86 | socket.pause() 87 | } 88 | 89 | self._wireSocket(socket) 90 | }) 91 | 92 | self.server.on('request', (req, res) => { 93 | let body = [] 94 | 95 | req.on('end', () => { 96 | self._emitEvent( 97 | req.method + ' ' + req.url + ' HTTP/' + req.httpVersion, 98 | event => { 99 | event.data.http_request = { 100 | url: req.url, 101 | headers: req.headers, 102 | trailers: req.trailers, 103 | body: Buffer.concat(body), 104 | version: req.httpVersion, 105 | method: req.method, 106 | remote: { 107 | address: req.socket.remoteAddress, 108 | family: req.socket.remoteFamily, 109 | port: req.socket.remotePort, 110 | } 111 | } 112 | 113 | event.on('complete', function () { 114 | if (event.state === EventContainer.STATE.FAILED) { 115 | self.logger.debug(self.name, 'Nacking event', { event_id: event.eventId }) 116 | res.statusCode = 500 117 | 118 | } else { 119 | self.logger.debug(self.name, 'Acking event', { event_id: event.eventId }) 120 | res.statusCode = 200 121 | } 122 | 123 | res.end() 124 | }) 125 | 126 | self.emit('event', event) 127 | } 128 | ) 129 | }) 130 | 131 | req.on('data', (data) => { 132 | body.push(data) 133 | }) 134 | }) 135 | } 136 | -------------------------------------------------------------------------------- /lib/inputs/ProxyProtocol.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Wraps a server object to intercept a proxy protocol header 3 | * Should handle most socket servers, certainly net.Server and tls.Server 4 | * Proxy details are exposed on the socket as socket.proxyDetails 5 | * 6 | * @param server 7 | */ 8 | module.exports = function (server) { 9 | // We need to backup all the connection listeners to delay them until we have had a chance to remove the 10 | // proxy protocol heading 11 | var connListeners = server.listeners('connection'), 12 | secConnListeners = server.listeners('secureConnection') 13 | 14 | server.removeAllListeners('connection') 15 | server.removeAllListeners('secureConnection') 16 | 17 | server.on('secureConnection', function (tlsSocket) { 18 | tlsSocket.proxyDetails = tlsSocket._parent.proxyDetails 19 | 20 | for (var i in secConnListeners) { 21 | secConnListeners[i].call(secConnListeners[i], tlsSocket) 22 | } 23 | }) 24 | 25 | server.on('connection', function (socket) { 26 | var emit = socket.emit, 27 | events = [] 28 | 29 | socket.emit = function (event) { 30 | if (event != "data") { 31 | events.push(arguments) 32 | } 33 | 34 | if (event == 'readable') { 35 | var chunk = socket.read() 36 | if (chunk === null) { 37 | chunk = new Buffer(0) 38 | } 39 | 40 | var line = chunk.toString('utf8'), 41 | nextToken = line.indexOf('\r\n') 42 | 43 | if (nextToken < 0) { 44 | throw new Error('Expected \\r\\n but did not find one') 45 | } 46 | 47 | var parts = line.slice(0, nextToken).split(' ') 48 | 49 | if (parts.length < 6) { 50 | throw new Error('Expected 6 parts for PROXY protocol got ' + parts.length) 51 | } 52 | 53 | if (parts[0] != 'PROXY') { 54 | throw new Error('Expect PROXY protocol but got ' + parts[0]) 55 | } 56 | 57 | nextToken += 2 58 | 59 | socket.proxyDetails = { 60 | protocol: parts[1], 61 | clientIp: parts[2], 62 | proxyIp: parts[3], 63 | clientPort: parts[4], 64 | proxyPort: parts[5] 65 | } 66 | 67 | socket.emit = emit 68 | 69 | // Put back any bytes we didn't consume 70 | socket.unshift(chunk.slice(nextToken)) 71 | 72 | for (var i in connListeners) { 73 | connListeners[i].call(connListeners[i], socket) 74 | } 75 | 76 | for (var i in events) { 77 | emit.apply(socket, events[i]) 78 | } 79 | 80 | events = [] 81 | } 82 | } 83 | }) 84 | } 85 | -------------------------------------------------------------------------------- /lib/inputs/RELPInput.js: -------------------------------------------------------------------------------- 1 | var util = require('util'), 2 | BaseInput = require('./BaseInput'), 3 | RELPLib = require('relp'), 4 | parsers = require('../parsers'), 5 | EventContainer = require('../EventContainer'), 6 | ProxyProtocolHandler = require('./ProxyProtocol') 7 | 8 | /** 9 | * Turns data received from a RELP client into events 10 | * 11 | * @param {Object} options.host The host to listen on 12 | * @param {Object} options.port The port to listen on 13 | * @param {Boolean} [options.proxyProtocol=false] True enables proxy protocol 14 | * @param {net.Server} [options.server] A custom server instance, mainly for tls 15 | * @param {String} [options.connectionEvent='connection'] Change the event name for new connections. 16 | * This is especially useful for TLS support since a TLS server emits 'secureConnection` events when 17 | * a socket is ready 18 | * 19 | * @extends BaseInput 20 | * @constructor 21 | */ 22 | var RELPInput = function (options) { 23 | var useOptions = options || {} 24 | 25 | useOptions.parser = options.parser || parsers.relpSyslogParser 26 | useOptions.name = options.name || RELPInput.NAME 27 | 28 | RELPInput.super_.call(this, useOptions) 29 | 30 | var self = this 31 | 32 | self.server = new RELPLib.Server({ 33 | host: useOptions.host, 34 | port: useOptions.port, 35 | connectionEvent: useOptions.connectionEvent, 36 | server: useOptions.server, 37 | logger: self.logger 38 | }) 39 | 40 | if (useOptions.proxyProtocol) { 41 | ProxyProtocolHandler(self.server.server) 42 | } 43 | 44 | //TODO: if a socket dies try to stop outputs, only if it is still processing in filters 45 | self.server.pause() 46 | 47 | self.logger.debug(self.name, 'starting up') 48 | 49 | self.streamStash.on('start', function () { 50 | self.state = 1 51 | self.server.resume() 52 | self.emit('started') 53 | }) 54 | 55 | self.streamStash.on('stopInput', function () { 56 | self.state = 0 57 | self.server.pause() 58 | self.emit('stoppedInput') 59 | }) 60 | 61 | self.streamStash.on('stop', function () { 62 | //TODO: shutdown 63 | self.emit('stopped') 64 | }) 65 | 66 | self.server.on('message', function (message) { 67 | self._handleInput(message) 68 | }) 69 | 70 | setInterval( 71 | function () { 72 | self.streamStash.telemetry.gauge('inputs.' + self.name + '.connections', Object.keys(self.server.sockets).length) 73 | }, 74 | 5000 75 | ) 76 | } 77 | 78 | RELPInput.NAME = 'RELP' 79 | RELPInput.DESCRIPTION = 'Turns data received from a RELP client into events' 80 | 81 | util.inherits(RELPInput, BaseInput) 82 | module.exports = RELPInput 83 | 84 | RELPInput.prototype._handleInput = function (message) { 85 | var self = this 86 | 87 | this._emitEvent(message.body, function (event) { 88 | if (message.hasOwnProperty('proxyDetails') && message.proxyDetails) { 89 | event.data.event_source['proxy_client_ip'] = message.proxyDetails.clientIp 90 | event.data.event_source['proxy_client_port'] = message.proxyDetails.clientPort 91 | event.data.event_source['proxy_ip'] = message.proxyDetails.proxyIp 92 | event.data.event_source['proxy_port'] = message.proxyDetails.proxyPort 93 | } else { 94 | event.data.event_source['remote_address'] = message.remoteAddress 95 | } 96 | 97 | event.on('complete', function () { 98 | self.logger.debug(self.name, 'Acking event', { event_id: event.eventId }) 99 | 100 | if (event.state === EventContainer.STATE.FAILED) { 101 | self.server.nack(message) 102 | } else { 103 | self.server.ack(message) 104 | } 105 | }) 106 | 107 | self.emit('event', event) 108 | }) 109 | } 110 | -------------------------------------------------------------------------------- /lib/inputs/SMTPInput.js: -------------------------------------------------------------------------------- 1 | let util = require('util'), 2 | BaseInput = require('./BaseInput'), 3 | SMTPServer = require('smtp-server').SMTPServer, 4 | EventContainer = require('../EventContainer'), 5 | _ = require('underscore') 6 | 7 | /** 8 | * Turns email received from SMTP client into events 9 | * 10 | * @param {String} options.host The host or ip address to listen on 11 | * @param {Number} options.port The port to listen on 12 | * @param {Object} options.smtpOptions A list of options for the smtp server, see https://nodemailer.com/extras/smtp-server/ 13 | * 14 | * @extends BaseInput 15 | * @constructor 16 | */ 17 | let SMTPInput = function (options) { 18 | let useOptions = _.extend({ port: 25, host: '0.0.0.0' }, options) 19 | 20 | useOptions.smtpOptions = _.extend( 21 | { 22 | disableReverseLookup: true, 23 | logger: false, 24 | secure: false, 25 | hideSTARTTLS: true, 26 | authOptional: true 27 | }, 28 | useOptions.smtpOptions 29 | ) 30 | 31 | useOptions.name = options.name || SMTPInput.NAME 32 | SMTPInput.super_.call(this, useOptions) 33 | 34 | let self = this 35 | 36 | useOptions.smtpOptions.onData = function (stream, session, callback) { 37 | self._handleInput(stream, session, callback) 38 | } 39 | 40 | try { 41 | self.server = new SMTPServer(useOptions.smtpOptions) 42 | } catch (e) { 43 | console.log(e) 44 | } 45 | 46 | self.server.on('error', err => { 47 | self.logger.error(self.name, 'SMTP Error', { error: err.stack || err }) 48 | }) 49 | 50 | self.logger.debug(self.name, 'starting up') 51 | 52 | //TODO: start on start event and not here 53 | self.server.listen(useOptions.port, useOptions.host) 54 | 55 | self.streamStash.on('start', () => { 56 | self.state = 1 57 | self.emit('started') 58 | }) 59 | 60 | self.streamStash.on('stopInput', () => { 61 | self.state = 0 62 | self.emit('stoppedInput') 63 | //TODO: proper pause 64 | }) 65 | 66 | self.streamStash.on('stop', () => { 67 | self.server.close(() => { 68 | self.emit('stopped') 69 | }) 70 | }) 71 | } 72 | 73 | SMTPInput.NAME = 'SMTP' 74 | SMTPInput.DESCRIPTION = 'Turns email received from SMTP client into events' 75 | 76 | util.inherits(SMTPInput, BaseInput) 77 | module.exports = SMTPInput 78 | 79 | SMTPInput.prototype._handleInput = function (stream, session, callback) { 80 | let self = this, 81 | buf = '' 82 | 83 | stream.on('data', data => { 84 | buf += data.toString() 85 | }) 86 | 87 | stream.on('end', () => { 88 | self._emitEvent(buf, event => { 89 | event.data.smtp = { 90 | from: session.envelope.mailFrom.address, 91 | to: session.envelope.rcptTo.map(addr => addr.address), 92 | remoteAddress: { 93 | host: session.remoteAddress, 94 | port: session.remotePort, 95 | }, 96 | clientHostname: session.hostNameAppearsAs 97 | } 98 | 99 | event.on('complete', () => { 100 | self.logger.debug(self.name, 'Acking email', event.state) 101 | 102 | if (event.state === EventContainer.STATE.FAILED) { 103 | callback(new Error('Failed to save event')) 104 | } else { 105 | callback() 106 | } 107 | }) 108 | 109 | self.emit('event', event) 110 | }) 111 | }) 112 | } 113 | -------------------------------------------------------------------------------- /lib/inputs/SocketInput.js: -------------------------------------------------------------------------------- 1 | var util = require('util'), 2 | BaseInput = require('./BaseInput'), 3 | readline = require('readline'), 4 | ProxyProtocolHandler = require('./ProxyProtocol') 5 | 6 | /** 7 | * Turns data received from a socket server into events 8 | * You must call `listen` after adding this input 9 | * 10 | * @param {String} options.name 11 | * @param {Object} options.server A server object or anything that emits a readable stream 12 | * @param {Boolean} [options.proxyProtocol=false] True enables proxy protocol 13 | * @param {string} [options.connectionEvent='connection'] The connection event to listen on for new sockets 14 | * 15 | * @extends BaseInput 16 | * @constructor 17 | */ 18 | var SocketInput = function (options) { 19 | SocketInput.super_.call(this, options) 20 | 21 | var self = this 22 | 23 | self.name = options.name || SocketInput.NAME 24 | self.server = options.server 25 | self.connectionEvent = options.connectionEvent || 'connection' 26 | 27 | self.sockets = {} 28 | self._id = 0 29 | 30 | self.logger.debug(self.name, 'starting up') 31 | 32 | self._wireServer() 33 | 34 | if (options.proxyProtocol) { 35 | ProxyProtocolHandler(self.server) 36 | } 37 | 38 | self.streamStash.on('start', function () { 39 | self.state = 1 40 | for (var socketId in self.sockets) { 41 | self.sockets[socketId].resume() 42 | } 43 | self.emit('started') 44 | }) 45 | 46 | self.streamStash.on('stopInput', function () { 47 | self.state = 0 48 | 49 | for (var socketId in self.sockets) { 50 | self.sockets[socketId].pause() 51 | } 52 | 53 | self.emit('stoppedInput') 54 | }) 55 | 56 | self.streamStash.on('stop', function () { 57 | self.emit('stopped') 58 | }) 59 | } 60 | 61 | SocketInput.NAME = "SocketInput" 62 | SocketInput.DESCRIPTION = "Handles objects that emit connection events the can consumed as readable streams" 63 | 64 | util.inherits(SocketInput, BaseInput) 65 | module.exports = SocketInput 66 | 67 | SocketInput.prototype._wireSocket = function (socket) { 68 | //TODO: handle close, error events, etc 69 | var self = this 70 | 71 | socket._id = self._id++ 72 | self.sockets[socket._id] = socket 73 | 74 | socket.on('error', function (error) { 75 | self.logger.error(self.name, 'Connection # ' + socket.socketId + ' (' + self.remoteAddress + ') had an error', { error: error.stack || error }) 76 | socket.destroy() 77 | }) 78 | 79 | socket.on('close', function () { 80 | delete self.sockets[socket._id] 81 | }) 82 | 83 | socket._rl = readline.createInterface({ input: socket }) 84 | 85 | socket._rl.on('line', function (line) { 86 | self._emitEvent(line) 87 | }) 88 | } 89 | 90 | SocketInput.prototype._wireServer = function () { 91 | var self = this 92 | 93 | self.server.on('connection', function (socket) { 94 | if (self.state !== 1) { 95 | socket.pause() 96 | } 97 | self._wireSocket(socket) 98 | }) 99 | } 100 | -------------------------------------------------------------------------------- /lib/inputs/StaticFileInput.js: -------------------------------------------------------------------------------- 1 | var util = require('util'), 2 | BaseInput = require('./BaseInput'), 3 | readline = require('readline'), 4 | fs = require('fs') 5 | 6 | /** 7 | * Consumes a single static file 8 | * 9 | * @param {String} options.file Path to the file to consume 10 | * 11 | * @extends BaseInput 12 | * @constructor 13 | */ 14 | var StaticFileInput = function (options) { 15 | var useOptions = options || {} 16 | 17 | useOptions.name = options.name || StaticFileInput.NAME 18 | 19 | StaticFileInput.super_.call(this, useOptions) 20 | 21 | if (!options.file) { 22 | throw new Error('A file path must be provided') 23 | } 24 | 25 | var self = this, 26 | rs = fs.createReadStream(options.file), 27 | rl = readline.createInterface({ input: rs }) 28 | 29 | rs.on('end', function () { 30 | self.logger.info(self.name, 'finished consuming the file', { file: options.file }) 31 | }) 32 | 33 | rs.on('error', function (error) { 34 | self.logger.error(self.name, 'error while consuming the file', { file: options.file, error: error.stack || error }) 35 | }) 36 | 37 | self.logger.debug(self.name, 'starting up') 38 | 39 | self.streamStash.on('start', function () { 40 | self.state = 1 41 | rl.resume() 42 | self.emit('started') 43 | }) 44 | 45 | self.streamStash.on('stopInput', function () { 46 | self.state = 0 47 | rl.pause() 48 | self.emit('stoppedInput') 49 | }) 50 | 51 | self.streamStash.on('stop', function () { 52 | self.state = 0 53 | rl.pause() 54 | self.emit('stopped') 55 | }) 56 | 57 | rl.on('line', function (line) { 58 | self._emitEvent(line) 59 | }) 60 | } 61 | 62 | StaticFileInput.NAME = 'StaticFile' 63 | StaticFileInput.DESCRIPTION = 'Consumes a single static file' 64 | 65 | util.inherits(StaticFileInput, BaseInput) 66 | module.exports = StaticFileInput 67 | -------------------------------------------------------------------------------- /lib/inputs/StdInInput.js: -------------------------------------------------------------------------------- 1 | var util = require('util'), 2 | BaseInput = require('./BaseInput'), 3 | readline = require('readline') 4 | 5 | /** 6 | * Turns data received from stdin into events 7 | * 8 | * @param {Object} [options.stream=process.stdin] The stream to use for reading data from 9 | * 10 | * @extends BaseInput 11 | * @constructor 12 | */ 13 | var StdInInput = function (options) { 14 | var useOptions = options || {}, 15 | buf = '' 16 | 17 | useOptions.name = options.name || StdInInput.NAME 18 | 19 | StdInInput.super_.call(this, useOptions) 20 | 21 | var self = this, 22 | stream = options.stream || process.stdin 23 | 24 | self.logger.debug(self.name, 'starting up') 25 | 26 | self.streamStash.on('start', function () { 27 | self.state = 1 28 | stream.resume() 29 | self.emit('started') 30 | }) 31 | 32 | self.streamStash.on('stopInput', function () { 33 | self.state = 0 34 | stream.pause() 35 | self.emit('stoppedInput') 36 | }) 37 | 38 | self.streamStash.on('stop', function () { 39 | self.emit('stopped') 40 | }) 41 | 42 | var rl = readline.createInterface({ input: stream }) 43 | 44 | rl.on('line', function (line) { 45 | self._emitEvent(line) 46 | }) 47 | } 48 | 49 | StdInInput.NAME = "StdIn" 50 | StdInInput.DESCRIPTION = "Turns new line delimited data into events" 51 | 52 | util.inherits(StdInInput, BaseInput) 53 | module.exports = StdInInput 54 | -------------------------------------------------------------------------------- /lib/inputs/index.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | RELPInput: require('./RELPInput'), 3 | StdInInput: require('./StdInInput'), 4 | SocketInput: require('./SocketInput'), 5 | StaticFileInput: require('./StaticFileInput'), 6 | SMTPInput: require('./SMTPInput'), 7 | HTTPInput: require('./HTTPInput') 8 | } 9 | -------------------------------------------------------------------------------- /lib/outputs/BlackholeOutput.js: -------------------------------------------------------------------------------- 1 | var util = require('util'), 2 | EventEmitter = require('events').EventEmitter 3 | 4 | /** 5 | * Outputs events to nowhere 6 | * 7 | * @param {Object} options Configuration object 8 | * @param {StreamStash} options.streamStash The main stream stash object 9 | * @param {Object} options.logger A logger to use for logging 10 | * @param {String} [options.name='Blackhole'] A name to use for logging, must be unique to other output plugins 11 | * 12 | * @constructor 13 | */ 14 | var BlackholeOutput = function (options) { 15 | BlackholeOutput.super_.call(this) 16 | 17 | var self = this, 18 | streamStash = options.streamStash, 19 | logger = options.logger, 20 | state = 0 21 | 22 | self.name = options.name || 'Blackhole' 23 | 24 | logger.debug(self.name, 'starting up') 25 | 26 | streamStash.once('start', function () { 27 | state = 1 28 | self.emit('started') 29 | }) 30 | 31 | streamStash.once('stop', function () { 32 | state = 0 33 | self.emit('stopped') 34 | }) 35 | 36 | streamStash.on('output', function (eventContainer) { 37 | if (state !== 1) { 38 | return 39 | } 40 | 41 | self.emit('complete', eventContainer) 42 | }) 43 | } 44 | 45 | BlackholeOutput.NAME = "Blackhole" 46 | BlackholeOutput.DESCRIPTION = "Outputs events to nowhere" 47 | 48 | util.inherits(BlackholeOutput, EventEmitter) 49 | module.exports = BlackholeOutput 50 | -------------------------------------------------------------------------------- /lib/outputs/ElasticSearchOutput.js: -------------------------------------------------------------------------------- 1 | var util = require('util'), 2 | EventEmitter = require('events').EventEmitter, 3 | http = require('http'), 4 | https = require('https'), 5 | _ = require('underscore') 6 | 7 | /** 8 | * Outputs events to ElasticSearch vi REST 9 | * TODO: index prefix or full name 10 | * 11 | * @param {Object} options Configuration object 12 | * @param {StreamStash} options.streamStash The main stream stash object 13 | * @param {Object} options.logger A logger to use for logging 14 | * @param {String} options.hostname The elastic search host to send requests to 15 | * @param {Boolean} [options.tls=false] Whether or not to use tls (true for https, false for http) 16 | * @param {String} [options.auth] A basic auth header to use in the http request, format: `username:password` 17 | * @param {String} [options.indexPrefix='logstash'] The elasticsearch index prefix to use 18 | * @param {object} [options.agent] An http or https agent, useful for providing your own CA, among other things 19 | * @param {Number} [options.batchSize=100] The ideal size for each batch insertion request 20 | * @param {Number} [options.batchTimeout=1000] The time to wait for a batch to reach `options.batchSize`. If the batch size 21 | * isn't met within this time the insertion request is sent with whatever is in the current batch 22 | * @param {Number} [options.port=9200] The port on the host to send requests to 23 | * @param {String} [options.typeField=] The top level field in the event#data object to use for the type value 24 | * @param {String} [options.defaultType='unknown'] The default type value if typeField is undefined 25 | * @param {Object} [options.headers] Headers to use when sending http(s) requests to elasticsearch 26 | * @param {Function} [options.preRequest] A function that will be called before sending the bulk request 27 | * @param {String} [options.timestampField='timestamp'] The top level field in the event#data object to use for the timestamp value 28 | * @param {String} [options.idField=undefined] The top level field in the event#data object to use for the id value 29 | * @param {String} [options.name='ElasticSearch'] A name to use for logging, must be unique to other output plugins 30 | * 31 | * @constructor 32 | */ 33 | let ElasticSearchOutput = function (options) { 34 | ElasticSearchOutput.super_.call(this) 35 | 36 | let self = this, 37 | useOptions = options || {} 38 | 39 | if (!useOptions.streamStash) { 40 | throw new Error('The streamstash object was not provided to the constructor') 41 | } 42 | 43 | if (!useOptions.hostname) { 44 | throw new Error('No hostname was provided') 45 | } 46 | 47 | self.name = useOptions.name || 'ElasticSearch' 48 | self.indexPrefix = useOptions.indexPrefix || 'logstash' 49 | 50 | let streamStash = useOptions.streamStash 51 | 52 | if (useOptions.tls) { 53 | self.agent = useOptions.agent || new https.Agent({ maxSockets: 1000, rejectUnauthorized: true }) 54 | self.driver = https 55 | } else { 56 | self.agent = useOptions.agent || new http.Agent({ maxSockets: 1000 }) 57 | self.driver = http 58 | } 59 | 60 | self.httpOptions = { 61 | hostname: useOptions.hostname, 62 | port: useOptions.port || 9200, 63 | path: '/_bulk', 64 | method: 'POST', 65 | agent: self.agent 66 | } 67 | 68 | if (useOptions.auth) { 69 | self.httpOptions.auth = useOptions.auth 70 | } 71 | 72 | self.httpOptions.headers = {} 73 | if (useOptions.headers) { 74 | self.httpOptions.headers = useOptions.headers 75 | } 76 | 77 | self.httpOptions.headers['content-type'] = 'application/json' 78 | 79 | self.queueOptions = { 80 | batchSize: useOptions.batchSize || 100, 81 | timeout: useOptions.batchTimeout || 1000 82 | } 83 | 84 | self.stats = { 85 | requests: 0, 86 | pendingRequests: 0 87 | } 88 | 89 | self.preRequest = function (options) { return options } 90 | if (useOptions.preRequest) { 91 | self.preRequest = useOptions.preRequest 92 | } 93 | 94 | self.logger = useOptions.logger 95 | self.typeField = useOptions.typeField 96 | self.defaultType = useOptions.defaultType || 'unknown' 97 | self.timestampField = useOptions.timestampField || 'timestamp' 98 | self.idField = useOptions.idField 99 | 100 | self.queue = { 101 | items: [], 102 | timer: null 103 | } 104 | 105 | self.state = 0 106 | 107 | self.logger.debug(self.name, 'starting up') 108 | 109 | streamStash.once('start', function () { 110 | self.state = 1 111 | self.emit('started') 112 | }) 113 | 114 | streamStash.once('stop', function () { 115 | self.state = 0 116 | 117 | //TODO: make sure agent has completed all requests 118 | 119 | self.emit('stopped') 120 | }) 121 | 122 | streamStash.on('output', function (eventContainer) { 123 | self._handleOutput(eventContainer) 124 | }) 125 | 126 | setInterval( 127 | function () { 128 | //TODO: add how long a request is taking 129 | streamStash.telemetry.gauge('outputs.' + self.name + '.total_requests', self.stats.requests) 130 | streamStash.telemetry.gauge('outputs.' + self.name + '.current_requests', self.stats.pendingRequests) 131 | streamStash.telemetry.gauge('outputs.' + self.name + '.current_batch_size', self.queue.items.length) 132 | }, 133 | 5000 134 | ) 135 | } 136 | 137 | ElasticSearchOutput.NAME = "ElasticSearch" 138 | ElasticSearchOutput.DESCRIPTION = "Outputs events to ElasticSearch vi REST" 139 | 140 | util.inherits(ElasticSearchOutput, EventEmitter) 141 | module.exports = ElasticSearchOutput 142 | 143 | /** 144 | * Prepares and queues an event for insertion into Elasticsearch 145 | * 146 | * @param {EventContainer} eventContainer The event emitted from Streamstash 147 | * 148 | * @private 149 | */ 150 | ElasticSearchOutput.prototype._handleOutput = function (eventContainer) { 151 | if (this.state !== 1) { 152 | return 153 | } 154 | 155 | let self = this 156 | 157 | this.queue.items.push(eventContainer) 158 | 159 | if (!self.queue.timer) { 160 | self.queue.timer = setTimeout( 161 | function () { 162 | self._performPost(true) 163 | }, 164 | self.queueOptions.timeout 165 | ) 166 | } 167 | 168 | self._performPost() 169 | } 170 | 171 | /** 172 | * Attempts to put data into Elasticsearch 173 | * Will only perform the request if the queue has hit the configured batchSize limit or if the action was forced due to 174 | * batchTimeout firing 175 | * 176 | * @param {Boolean} [forced] Whether or not the action is being forced due to batchTimeout firing 177 | * 178 | * @private 179 | */ 180 | ElasticSearchOutput.prototype._performPost = function (forced) { 181 | let self = this 182 | 183 | if (self.queue.items.length < self.queueOptions.batchSize && !forced) { 184 | return 185 | } 186 | 187 | //Copy the queue into here and allow a new queue to fill up 188 | //TODO: Clean this queueing up! 189 | let queue = self.queue 190 | 191 | self.queue = { 192 | items: [], 193 | write: [], 194 | timer: null 195 | } 196 | 197 | clearTimeout(queue.timer) 198 | 199 | let write = '' 200 | 201 | for (let eventId in queue.items) { 202 | //TODO: if event.data[self.timestampField] does not exist we should log a warning 203 | 204 | let event = queue.items[eventId], 205 | type = event.data[self.typeField] || self.defaultType, 206 | timestamp = event.data[self.timestampField] || new Date(), 207 | dateString = timestamp.getFullYear(), 208 | month = timestamp.getMonth() + 1, 209 | day = timestamp.getDate(), 210 | useData = util._extend({}, event.data) 211 | 212 | dateString += '.' + ((String(month).length === 1) ? '0' + month : month) 213 | dateString += '.' + ((String(day).length === 1) ? '0' + day : day) 214 | 215 | let header = { "index": { "_index": self.indexPrefix + '-' + dateString, "_type": type } } 216 | 217 | if (self.idField && event.data.hasOwnProperty(self.idField)) { 218 | header['index']['_id'] = event.data[self.idField] 219 | } else { 220 | header['index']['_id'] = event.eventId 221 | } 222 | 223 | write += JSON.stringify(header) + '\n' 224 | 225 | // Remove fields that elasticseach 2.0 cant have in the source 226 | //TODO: include these fields in the index line above? 227 | delete useData['_type'] 228 | delete useData['_uid'] 229 | delete useData['_id'] 230 | delete useData['_type'] 231 | delete useData['_source'] 232 | delete useData['_all'] 233 | delete useData['_parent'] 234 | delete useData['_field_names'] 235 | delete useData['_routing'] 236 | delete useData['_index'] 237 | delete useData['_size'] 238 | delete useData['_timestamp'] 239 | delete useData['_ttl'] 240 | 241 | write += JSON.stringify(useData) + '\n' 242 | } 243 | 244 | if (write === '') { 245 | // Don't bother sending emtpy requests 246 | return 247 | } 248 | 249 | function handleResponse (response) { 250 | self.stats.pendingRequests-- 251 | 252 | if (response.statusCode === 200) { 253 | let allData = '' 254 | response.on('data', function (data) { 255 | allData += String(data) 256 | }) 257 | 258 | response.on('end', function () { 259 | self._finishRequest(allData, queue) 260 | }) 261 | 262 | return 263 | } 264 | 265 | response.on('data', function (data) { 266 | self.logger.error(self.name, 'Failed to write events', { status_code: response.statusCode, elasticsearch_response: data.toString(), request_body: write }) 267 | self.emit('failed', queue.items) 268 | }) 269 | } 270 | 271 | let request = self.driver.request(self.preRequest(self.httpOptions, write), handleResponse) 272 | 273 | request.on('error', function (error) { 274 | self.stats.pendingRequests-- 275 | self.emit('failed', queue.items) 276 | self.logger.error(self.name, 'Error during request', { error: error.stack }) 277 | }) 278 | 279 | self.stats.pendingRequests++ 280 | self.stats.requests++ 281 | request.write(write) 282 | request.end() 283 | } 284 | 285 | /** 286 | * Deals with consuming the es bulk index response 287 | * 288 | * @param {String} data The es response string 289 | * @param {Object} queue The queue object for this request 290 | * 291 | * @private 292 | */ 293 | ElasticSearchOutput.prototype._finishRequest = function (data, queue) { 294 | let self = this 295 | 296 | // Shortcut to avoid json decoding the whole return blob. if there is no `"errors":true` then all was good 297 | if (data.indexOf('errors":t') === -1) { 298 | self.emit('complete', queue.items) 299 | return 300 | } 301 | 302 | let failed = [] 303 | 304 | try { 305 | let res = JSON.parse(data), 306 | badIds = {} 307 | 308 | res.items.forEach(function (esEvent) { 309 | // If the http response code is 300 or above then consider it a failure 310 | if (esEvent['index']['status'] > 299) { 311 | badIds[esEvent['index']['_id']] = esEvent['index']['error'] 312 | } 313 | }) 314 | 315 | for (let i = queue.items.length - 1; i >= 0; i--) { 316 | if (badIds.hasOwnProperty(queue.items[i].eventId) === true) { 317 | self.logger.error( 318 | self.name, 319 | 'Event ' + queue.items[i].eventId + ' could not be inserted into elasticsearch', 320 | { 321 | elasticsearch_error: JSON.stringify(badIds[queue.items[i].eventId]), 322 | event_data: JSON.stringify(queue.items[i].data) 323 | } 324 | ) 325 | failed.push(queue.items[i]) 326 | queue.items.splice(i, 1) 327 | } 328 | } 329 | 330 | if (queue.items.length > 0) { 331 | self.emit('complete', queue.items) 332 | } 333 | 334 | if (failed.length > 0) { 335 | self.emit('failed', failed) 336 | } 337 | 338 | } catch (error) { 339 | self.logger.error(self.name, 'Error consuming elasticsearch response', { error: error.stack, elasticsearch_response: data }) 340 | // We aren't sure where we failed so every event fails 341 | if (queue.items.length > 0) { 342 | self.emit('failed', queue.items) 343 | } 344 | 345 | if (failed.length > 0) { 346 | self.emit('failed', failed) 347 | } 348 | } 349 | } 350 | -------------------------------------------------------------------------------- /lib/outputs/StdOutOutput.js: -------------------------------------------------------------------------------- 1 | var util = require('util'), 2 | EventEmitter = require('events').EventEmitter, 3 | _ = require('underscore') 4 | 5 | /** 6 | * Outputs events to stdout 7 | * 8 | * @param {Object} options Configuration object 9 | * @param {StreamStash} options.streamStash The main stream stash object 10 | * @param {Object} options.logger A logger to use for logging 11 | * @param {String} [options.name='StdOut'] A name to use for logging, must be unique to other output plugins 12 | * @param [options.prettyPrint=false] True will enable pretty printing the event 13 | * @param [options.stream=process.stdout] The stream to write the event output to 14 | * @param {Object} [options.fields] Extra fields to add on each event 15 | * 16 | * @constructor 17 | */ 18 | var StdOutOutput = function (options) { 19 | StdOutOutput.super_.call(this) 20 | 21 | var self = this, 22 | streamStash = options.streamStash, 23 | logger = options.logger, 24 | stream = options.stream || process.stdout, 25 | fields = options.fields || {}, 26 | indent = options.prettyPrint ? 4 : 0, 27 | state = 0 28 | 29 | self.name = options.name || 'StdOut' 30 | 31 | logger.debug(self.name, 'starting up') 32 | 33 | streamStash.once('start', function () { 34 | state = 1 35 | self.emit('started') 36 | }) 37 | 38 | streamStash.once('stop', function () { 39 | state = 0 40 | self.emit('stopped') 41 | }) 42 | 43 | streamStash.on('output', function (eventContainer) { 44 | if (state !== 1) { 45 | return 46 | } 47 | 48 | var output = _.extend({}, fields, eventContainer.data) 49 | stream.write(JSON.stringify(output, void 0, indent) + '\n') 50 | self.emit('complete', eventContainer) 51 | }) 52 | } 53 | 54 | StdOutOutput.NAME = "StdOut" 55 | StdOutOutput.DESCRIPTION = "Outputs events to stdout" 56 | 57 | util.inherits(StdOutOutput, EventEmitter) 58 | module.exports = StdOutOutput 59 | -------------------------------------------------------------------------------- /lib/outputs/index.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | BlackholeOutput: require('./BlackholeOutput'), 3 | ElasticSearchOutput: require('./ElasticSearchOutput'), 4 | StdOutOutput: require('./StdOutOutput') 5 | } 6 | -------------------------------------------------------------------------------- /lib/parsers/goAuditParser.js: -------------------------------------------------------------------------------- 1 | var constants = require('./goAuditParserConstants') 2 | 3 | //TODO: attach unknown fields to the data object so it can be understood what is not being fully parsed 4 | 5 | /** 6 | * Parses go-audit log lines 7 | * Assumes the message has NOT been json decoded already 8 | * 9 | * @param {String} message Message to try and parse 10 | * 11 | * @returns {parserResult} 12 | */ 13 | module.exports = function (message) { 14 | var result = { data: void 0, error: void 0}, 15 | data = {} 16 | 17 | try { 18 | data = JSON.parse(message) 19 | } catch (error) { 20 | result.error = error.toString() 21 | return result 22 | } 23 | 24 | if (data.hasOwnProperty('messages') === false) { 25 | result.error = 'No messages property' 26 | return result 27 | } 28 | 29 | var uidMap = data['uid_map'] || {} 30 | 31 | result.data = { 32 | timestamp: new Date(data.timestamp * 1000), 33 | sequence: data.sequence, 34 | unknown: [] 35 | } 36 | 37 | var groups = gatherTypes(data) 38 | 39 | for (var type in groups) { 40 | var msgs = groups[type] 41 | 42 | switch (type) { 43 | case constants.types.config_change: 44 | //TODO: parseConfigChange(msg) 45 | break 46 | case constants.types.syscall: 47 | parseSyscall(msgs, result, uidMap) 48 | break 49 | case constants.types.execve: 50 | parseExecve(msgs, result) 51 | break 52 | case constants.types.path: 53 | parsePath(msgs, result, uidMap) 54 | break 55 | case constants.types.cwd: 56 | parseCwd(msgs, result) 57 | break 58 | case constants.types.sockaddr: 59 | parseSockaddr(msgs, result) 60 | break 61 | case constants.types.proctitle: 62 | parseProctitle(msgs, result) 63 | break 64 | default: 65 | result.data.unknown.push(msgs) 66 | result.error = 'unknown kauditd type ' + type 67 | } 68 | } 69 | 70 | buildMessage(result) 71 | 72 | return result 73 | } 74 | 75 | module.exports.propertyName = 'go-audit' 76 | 77 | var parseConfigChange = function (msg) { 78 | //audit_pid=14842 old=14842 auid=1000 ses=37 res=1 79 | } 80 | 81 | /** 82 | * Parses types.syscall messages 83 | * 84 | * @param {String[]} msgs List of messages to parse 85 | * @param {Object} result The final result object to modify 86 | * @param {Object} uidMap An object of uid mapping to use for mapping uids 87 | */ 88 | var parseSyscall = function (msgs, result, uidMap) { 89 | var msg = msgs.join(' ') 90 | 91 | //TODO: It doesn't seem like we will get much info from these though 92 | // a0=56446febeb60 93 | // a1=56446febeaf8 94 | // a2=56446febeb48 95 | // a3=0 96 | 97 | // items=2 TODO: this appears to be a reference to how many 1302 msgs to expect later 98 | 99 | //TODO: convert to name and id 100 | // gid=0 101 | // egid=0 102 | // sgid=0 103 | // fsgid=0 104 | 105 | //TODO: convert to actual null? 106 | // key=(null) 107 | 108 | //TODO: check if syscall is already defined because that would be an error 109 | //TODO: don't split on space in a quote 110 | result.data.syscall = splitFields(msg) 111 | 112 | mapArch(result.data.syscall) 113 | mapUid('uid', result.data.syscall, uidMap) 114 | mapUid('auid', result.data.syscall, uidMap) 115 | mapUid('euid', result.data.syscall, uidMap) 116 | mapUid('fsuid', result.data.syscall, uidMap) 117 | mapUid('suid', result.data.syscall, uidMap) 118 | 119 | result.data.syscall.key = convertValue(result.data.syscall.key, true) 120 | 121 | // Remap some values 122 | result.data.syscall.id = result.data.syscall.syscall 123 | delete result.data.syscall.syscall 124 | 125 | result.data.syscall.session_id = result.data.syscall.ses 126 | delete result.data.syscall.ses 127 | 128 | if (constants.syscalls.hasOwnProperty(result.data.syscall.arch.name)) { 129 | if (constants.syscalls[result.data.syscall.arch.name].hasOwnProperty(result.data.syscall.id)) { 130 | result.data.syscall.name = constants.syscalls[result.data.syscall.arch.name][result.data.syscall.id] 131 | } 132 | } 133 | 134 | result.data.syscall.command = convertValue(result.data.syscall.comm || '', true) 135 | delete result.data.syscall.comm 136 | 137 | result.data.syscall.executable = convertValue(result.data.syscall.exe || '', true) 138 | delete result.data.syscall.exe 139 | } 140 | 141 | /** 142 | * Parses types.execve messages 143 | * 144 | * @param {String[]} msgs List of messages to parse 145 | * @param {Object} result The final result object to modify 146 | */ 147 | var parseExecve = function (msgs, result) { 148 | var msg = msgs.join(' '), 149 | execve = splitFields(msg) 150 | 151 | if (execve.hasOwnProperty('argc') === false) { 152 | //TODO: error! 153 | return 154 | } 155 | 156 | var argc = parseInt(execve.argc) 157 | 158 | delete execve['argc'] 159 | 160 | execve.command = '' 161 | 162 | for (var i = 0; i < argc; i++) { 163 | var find = 'a' + i 164 | smashArgs(find, execve) 165 | 166 | if (execve.hasOwnProperty(find) === false) { 167 | //TODO: this is an error 168 | continue 169 | } 170 | 171 | var argv = convertValue(execve[find], true) 172 | execve.command += argv + ' ' 173 | delete execve[find] 174 | } 175 | 176 | execve.command = execve.command.trim() 177 | result.data.execve = execve 178 | } 179 | 180 | /** 181 | * Parses types.path messages 182 | * 183 | * @param {String[]} msgs List of messages to parse 184 | * @param {Object} result The final result object to modify 185 | * @param {Object} uidMap An object of uid mapping to use for mapping uids 186 | */ 187 | var parsePath = function (msgs, result, uidMap) { 188 | //TODO: we should know how many of these to parse from result.syscall 189 | var paths = [] 190 | 191 | for (var i in msgs) { 192 | var entries = splitFields(msgs[i]) 193 | delete msgs[i] 194 | 195 | mapUid('ouid', entries, uidMap) 196 | //TODO: ogid is not mapped 197 | //TODO: parse mode? 198 | 199 | entries.name = convertValue(entries.name || '', true) 200 | 201 | //TODO: check for .item and that parseint works 202 | var index = parseInt(entries.item) 203 | delete entries['item'] 204 | paths[index] = entries 205 | } 206 | 207 | result.data.paths = paths 208 | } 209 | 210 | /** 211 | * Parses types.cwd messages 212 | * 213 | * @param {String[]} msgs List of messages to parse 214 | * @param {Object} result The final result object to modify 215 | */ 216 | var parseCwd = function (msgs, result) { 217 | var msg = msgs.join(' '), 218 | data = splitFields(msg) 219 | 220 | result.data.cwd = convertValue(data.cwd || '', true) 221 | } 222 | 223 | var parseSockaddr = function (msgs, result) { 224 | var msg = msgs.join(' '), 225 | data = splitFields(msg) 226 | 227 | result.data.socket_address = parseAddr(data.saddr) 228 | } 229 | 230 | var parseProctitle = function (msgs, result) { 231 | var msg = msgs.join(' '), 232 | data = splitFields(msg) 233 | 234 | result.data.proctitle = convertValue(data.proctitle || '', true) 235 | } 236 | 237 | /** 238 | * Gathers all aN[N] object into a single string. 239 | * kauditd will break apart really large arguments into a sub array. 240 | * 241 | * @param {String} arg The arg name to look for, like `a1` 242 | * @param {Object} data The data object containing the args, modified in place 243 | */ 244 | var smashArgs = function (arg, data) { 245 | if (data.hasOwnProperty(arg + '_len') === false) { 246 | return 247 | } 248 | 249 | var argLen = parseInt(data[arg + '_len']), 250 | val = '' 251 | 252 | delete data[arg + '_len'] 253 | 254 | //TODO: don't bother if arglen doesnt parse 255 | for (var i = 0; i >= 0; i++) { 256 | var subArg = arg + '[' + i + ']' 257 | if (data.hasOwnProperty(subArg) === false) { 258 | //TODO: error! 259 | break 260 | } 261 | 262 | val += data[subArg] 263 | delete data[subArg] 264 | } 265 | 266 | //TODO: check length 267 | data[arg] = val 268 | } 269 | 270 | /** 271 | * Turns a flat array into an object keyed by msg.type 272 | * Each key will be an array of msg.data 273 | * 274 | * @param {Object} data 275 | * 276 | * @returns {{string:string[]}} 277 | */ 278 | var gatherTypes = function (data) { 279 | var groups = {}, 280 | str = '' 281 | 282 | for (var i in data.messages) { 283 | var msg = data.messages[i] 284 | 285 | if (msg.hasOwnProperty('type') === false) { 286 | result.error = 'A message was missing the type property' 287 | return result 288 | } 289 | 290 | if (groups.hasOwnProperty(msg.type) === false) { 291 | groups[msg.type] = [] 292 | } 293 | 294 | groups[msg.type].push(msg.data) 295 | delete data.messages[i] 296 | } 297 | 298 | return groups 299 | } 300 | 301 | /** 302 | * Converts a value to what it should be, for the most part. 303 | * Currently only strips quotes and optionally converts hex strings to actual strings 304 | * 305 | * @param {String} str The string to convert 306 | * @param {boolean} parseHex If true, will treat `str` as a hex string as a last resort. 307 | * 308 | * @returns {*} The converted string representation of `str` 309 | */ 310 | var convertValue = function (str, parseHex) { 311 | if (str[0] === '"') { 312 | return str.slice(1,-1) 313 | 314 | } else if (parseHex) { 315 | var newStr = ''; 316 | 317 | for (var i = 0; i < str.length; i += 2) { 318 | var v = parseInt(str.substr(i, 2), 16) 319 | if (v) { 320 | newStr += String.fromCharCode(v) 321 | } else { 322 | newStr += ' ' 323 | } 324 | } 325 | 326 | return newStr.trim() 327 | } 328 | 329 | return str 330 | } 331 | 332 | var parseAddr = function (str) { 333 | if (str.length < 2) { 334 | return { unknown: str } 335 | } 336 | 337 | var family = parseInt(str.substr(0, 2), 16) + (256 * parseInt(str.substr(2, 2), 16)) 338 | 339 | if (!constants.addressFamilies.hasOwnProperty(family)) { 340 | return { unknown: str } 341 | } 342 | 343 | var details = { 344 | family: constants.addressFamilies[family], 345 | } 346 | 347 | switch (family) { 348 | case 1: // unix/local/file 349 | parseAddrLocal(str, details) 350 | break 351 | 352 | case 2: // inet 353 | parseAddrInet(str, details) 354 | break 355 | 356 | case 10: // inet6 357 | parseAddrInet6(str, details) 358 | break 359 | 360 | default: 361 | details.unknown = str.substr(4) 362 | } 363 | 364 | return details 365 | } 366 | 367 | var parseAddrInet = function (str, details) { 368 | if (str.length < 16) { 369 | details.unknown = str.substr(2) 370 | return 371 | } 372 | 373 | details.port = (parseInt(str.substr(4, 2), 16) * 256) + parseInt(str.substr(6, 2), 16) 374 | 375 | details.ip = parseInt(str.substr(8, 2), 16) + '.' + 376 | parseInt(str.substr(10, 2), 16) + '.' + 377 | parseInt(str.substr(12, 2), 16) + '.' + 378 | parseInt(str.substr(14, 2), 16) 379 | 380 | if (str.length > 16) { 381 | details.unknown = str.substr(16) 382 | } 383 | } 384 | 385 | var parseAddrInet6 = function (str, details) { 386 | if (str.length < 56) { 387 | details.unknown = str.substr(2) 388 | return 389 | } 390 | 391 | details.port = (parseInt(str.substr(4, 2), 16) * 256) + parseInt(str.substr(6, 2), 16) 392 | //TODO: Figure out translation 393 | details.flow_info = str.substr(8, 8) 394 | 395 | details.ip = str.substr(16, 4).toLowerCase() + ':' + 396 | str.substr(20, 4).toLowerCase() + ':' + 397 | str.substr(24, 4).toLowerCase() + ':' + 398 | str.substr(28, 4).toLowerCase() + ':' + 399 | str.substr(32, 4).toLowerCase() + ':' + 400 | str.substr(36, 4).toLowerCase() + ':' + 401 | str.substr(40, 4).toLowerCase() + ':' + 402 | str.substr(44, 4).toLowerCase() 403 | 404 | //TODO: Figure out translation 405 | details.scope_id = str.substr(48, 8) 406 | 407 | if (str.length > 56) { 408 | details.unknown = str.substr(56) 409 | } 410 | } 411 | 412 | var parseAddrLocal = function (str, details) { 413 | if (str.length < 5) { 414 | details.unknown = str.substr(2) 415 | return 416 | } 417 | 418 | //Find the next null byte 419 | var endPos = str.indexOf('00', 4) - 4 420 | //TODO: sometimes the path is null prefixed! 421 | if (endPos < 0) { 422 | endPos = str.length - 4 423 | } 424 | 425 | details.path = convertValue(str.substr(4, endPos), true) 426 | 427 | if (str.length > endPos + 5) { 428 | details.unknown = str.substr(endPos + 4) 429 | } 430 | } 431 | 432 | /** 433 | * Splits a string containing many `key=value` pairs into an object of key: value pairs 434 | * 435 | * @param {String} str The string to break apart 436 | * 437 | * @returns {Object} An object of key: value pairs 438 | */ 439 | var splitFields = function (str) { 440 | var groups = str.split(" "), 441 | ret = {} 442 | 443 | for (var i in groups) { 444 | var group = groups[i], 445 | splitAt = group.indexOf('=') 446 | 447 | if (splitAt < 1) { 448 | //TODO: error! 449 | continue 450 | } 451 | 452 | //TODO: if it already exists??? 453 | ret[group.slice(0, splitAt)] = group.slice(splitAt + 1) 454 | } 455 | 456 | return ret 457 | } 458 | 459 | /** 460 | * Converts the `arch` property, which should be a hex string, to an object. 461 | * Modifies the data object in place 462 | * 463 | * @param {Object} data The object to map out the `arch` property 464 | */ 465 | var mapArch = function(data) { 466 | if (data.hasOwnProperty('arch') === false) { 467 | return 468 | } 469 | 470 | var tArch = parseInt(data.arch, 16) 471 | 472 | data.arch = { 473 | bits: 'unknown', 474 | endianness: 'unknown', 475 | name: 'unknown' 476 | } 477 | 478 | if ((tArch & constants.arch['64bit']) === 0) { 479 | data.arch.bits = '32' 480 | } else { 481 | tArch ^= constants.arch['64bit'] 482 | data.arch.bits = '64' 483 | } 484 | 485 | if ((tArch & constants.arch.little_endian) === 0) { 486 | data.arch.endianness = 'big' 487 | } else { 488 | tArch ^= constants.arch.little_endian 489 | data.arch.endianness = 'little' 490 | } 491 | 492 | if ((tArch & constants.arch.convention_mips64_n32) !== 0) { 493 | tArch ^= constants.arch.convention_mips64_n32 494 | } 495 | 496 | if (constants.machines.hasOwnProperty(tArch)) { 497 | data.arch.name = constants.machines[tArch] 498 | } else { 499 | //TODO: 500 | console.log('SOMETHING IS WRONG') 501 | } 502 | } 503 | 504 | /** 505 | * Maps a given uid to a name. 506 | * Modifies the data object in place 507 | * 508 | * @param {String} findUid The name of the uid in data 509 | * @param {Object} data The object possibly containing `findUid` 510 | * @param {Object} uidMap Keys are uids, values are usernames. 511 | */ 512 | var mapUid = function (findUid, data, uidMap) { 513 | if (data.hasOwnProperty(findUid)) { 514 | uid = data[findUid] 515 | 516 | // Overflow uint32 is `null` 517 | if (uid === '4294967295') { 518 | data[findUid] = null 519 | return 520 | } 521 | 522 | data[findUid] = { 523 | name: uidMap.hasOwnProperty(uid) ? uidMap[uid] : 'UNKNOWN_USER', 524 | id: uid 525 | } 526 | } 527 | } 528 | 529 | var truncateStr = function (str, len) { 530 | if (str.length > len) { 531 | return str.substring(0, len - 3) + '...' 532 | } 533 | 534 | return str 535 | } 536 | 537 | var buildMessage = function (result) { 538 | var data = result.data, 539 | message = '' 540 | 541 | if (data.hasOwnProperty('syscall')) { 542 | if (data.syscall.hasOwnProperty('auid') && data.syscall.auid && data.syscall.auid.id !== data.syscall.uid.id) { 543 | message += data.syscall.auid.name + ' as ' 544 | } 545 | 546 | // Who did it? 547 | if (data.syscall.hasOwnProperty('uid') && data.syscall.uid) { 548 | message += data.syscall.uid.name + ' ' 549 | } 550 | 551 | // Succeeded or failed? 552 | if (data.syscall.hasOwnProperty('success')) { 553 | if (data.syscall.success === 'yes') { 554 | message += 'succeeded to ' 555 | } else { 556 | message += 'failed to ' 557 | } 558 | } 559 | 560 | // To do what? 561 | var created, deleted, file, path, 562 | includeCmd = false 563 | 564 | message += data.syscall.name + ' ' 565 | 566 | if (data.hasOwnProperty('execve') && data.execve.hasOwnProperty('command')) { 567 | path = data.execve.command.substring(0, data.execve.command.indexOf(' ') || 0) 568 | message += '`' + truncateStr(data.execve.command, 25) + '` ' 569 | 570 | } else if (data.syscall.hasOwnProperty('name')) { 571 | switch (data.syscall.name) { 572 | case 'rename': 573 | deleted = findPathType(data.paths, 'DELETE') 574 | created = findPathType(data.paths, 'CREATE') 575 | 576 | message += '`' + getPathName(deleted) + '` to `' + getPathName(created) + '` ' 577 | break 578 | 579 | case 'sendto': 580 | case 'connect': 581 | case 'bind': 582 | includeCmd = true 583 | message += 'to ' 584 | 585 | if (data.hasOwnProperty('socket_address')) { 586 | if (data.socket_address.hasOwnProperty('ip')) { 587 | message += '`' + data.socket_address.ip + ':' + data.socket_address.port + '` ' 588 | } else if (data.socket_address.hasOwnProperty('path')) { 589 | message += '`' + data.socket_address.path + '` ' 590 | } else { 591 | //TODO: put the family in here 592 | message += '`unknown address` ' 593 | } 594 | 595 | } else { 596 | message += '`unknown address` ' 597 | } 598 | 599 | break 600 | 601 | default: 602 | if (created = findPathType(data.paths, 'CREATE')) { 603 | path = getPathName(created) 604 | message += 'and create ' 605 | } else if (file = findPathType(data.paths, 'NORMAL')) { 606 | path = getPathName(file) 607 | } else { 608 | path = 'unknown path' 609 | } 610 | 611 | message += '`' + path + '` ' 612 | break 613 | } 614 | } 615 | 616 | if (data.syscall.executable && data.syscall.executable != path) { 617 | message += 'via `' + data.syscall.executable + '` ' 618 | } 619 | 620 | if (includeCmd && data.syscall.command) { 621 | message += 'as `' + data.syscall.command + '` ' 622 | } 623 | } 624 | 625 | data.message = message.trim() 626 | } 627 | 628 | var getPathName = function (path) { 629 | if (!path) { 630 | return 'unknown path' 631 | } 632 | 633 | return path.name || 'inode: ' + path.inode 634 | } 635 | 636 | var findPathType = function (paths, type) { 637 | for (var i in paths) { 638 | if (paths[i].nametype === type || paths[i].objtype === type) { 639 | return paths[i] 640 | } 641 | } 642 | } 643 | -------------------------------------------------------------------------------- /lib/parsers/goAuditParserConstants.js: -------------------------------------------------------------------------------- 1 | var types = { 2 | syscall : '1300', /* Syscall event */ 3 | path : '1302', /* Filename path information */ 4 | ipc : '1303', /* IPC record */ 5 | socketcall : '1304', /* sys_socketcall arguments */ 6 | config_change : '1305', /* Audit system configuration change */ 7 | sockaddr : '1306', /* sockaddr copied as syscall arg */ 8 | cwd : '1307', /* Current working directory */ 9 | execve : '1309', /* execve arguments */ 10 | ipc_set_perm : '1311', /* IPC new permissions record type */ 11 | mq_open : '1312', /* POSIX MQ open record type */ 12 | mq_sendrecv : '1313', /* POSIX MQ send/receive record type */ 13 | mq_notify : '1314', /* POSIX MQ notify record type */ 14 | mq_getsetattr : '1315', /* POSIX MQ get/set attribute record type */ 15 | kernel_other : '1316', /* For use by 3rd party modules */ 16 | fd_pair : '1317', /* audit record for pipe/socketpair */ 17 | obj_pid : '1318', /* ptrace target */ 18 | tty : '1319', /* Input on an administrative TTY */ 19 | eoe : '1320', /* End of multi-record event */ 20 | bprm_fcaps : '1321', /* Information about fcaps increasing perms */ 21 | capset : '1322', /* Record showing argument to sys_capset */ 22 | mmap : '1323', /* Record showing descriptor and flags in mmap */ 23 | netfilter_pkt : '1324', /* Packets traversing netfilter chains */ 24 | netfilter_cfg : '1325', /* Netfilter chain modifications */ 25 | seccomp : '1326', /* Secure Computing event */ 26 | proctitle : '1327', /* Proctitle emit event */ 27 | feature_change : '1328', /* audit log listing feature changes */ 28 | replace : '1329' /* Replace auditd if this packet unanswerd */ 29 | } 30 | 31 | var arch = { 32 | '64bit': 0x80000000, 33 | little_endian: 0x40000000, 34 | convention_mips64_n32: 0x20000000 35 | } 36 | 37 | var machines = { 38 | '0': 'none', /* Unknown machine. */ 39 | '1': 'm32', /* AT&T WE32100. */ 40 | '2': 'sparc', /* Sun SPARC. */ 41 | '3': '386', /* Intel i386. */ 42 | '4': '68k', /* Motorola 68000. */ 43 | '5': '88k', /* Motorola 88000. */ 44 | '7': '860', /* Intel i860. */ 45 | '8': 'mips', /* MIPS R3000 Big-Endian only. */ 46 | '9': 's370', /* IBM System/370. */ 47 | '10': 'mips_rs3_le', /* MIPS R3000 Little-Endian. */ 48 | '15': 'parisc', /* HP PA-RISC. */ 49 | '17': 'vpp500', /* Fujitsu VPP500. */ 50 | '18': 'sparc32plus', /* SPARC v8plus. */ 51 | '19': '960', /* Intel 80960. */ 52 | '20': 'ppc', /* PowerPC 32-bit. */ 53 | '21': 'ppc64', /* PowerPC 64-bit. */ 54 | '22': 's390', /* IBM System/390. */ 55 | '36': 'v800', /* NEC V800. */ 56 | '37': 'fr20', /* Fujitsu FR20. */ 57 | '38': 'rh32', /* TRW RH-32. */ 58 | '39': 'rce', /* Motorola RCE. */ 59 | '40': 'arm', /* ARM. */ 60 | '42': 'sh', /* Hitachi SH. */ 61 | '43': 'sparcv9', /* SPARC v9 64-bit. */ 62 | '44': 'tricore', /* Siemens TriCore embedded processor. */ 63 | '45': 'arc', /* Argonaut RISC Core. */ 64 | '46': 'h8_300', /* Hitachi H8/300. */ 65 | '47': 'h8_300h', /* Hitachi H8/300H. */ 66 | '48': 'h8s', /* Hitachi H8S. */ 67 | '49': 'h8_500', /* Hitachi H8/500. */ 68 | '50': 'ia_64', /* Intel IA-64 Processor. */ 69 | '51': 'mips_x', /* Stanford MIPS-X. */ 70 | '52': 'coldfire', /* Motorola ColdFire. */ 71 | '53': '68hc12', /* Motorola M68HC12. */ 72 | '54': 'mma', /* Fujitsu MMA. */ 73 | '55': 'pcp', /* Siemens PCP. */ 74 | '56': 'ncpu', /* Sony nCPU. */ 75 | '57': 'ndr1', /* Denso NDR1 microprocessor. */ 76 | '58': 'starcore', /* Motorola Star*Core processor. */ 77 | '59': 'me16', /* Toyota ME16 processor. */ 78 | '60': 'st100', /* STMicroelectronics ST100 processor. */ 79 | '61': 'tinyj', /* Advanced Logic Corp. TinyJ processor. */ 80 | '62': 'x86_64', /* Advanced Micro Devices x86-64 */ 81 | '183': 'aarch64' /* ARM 64-bit Architecture (AArch64) */ 82 | } 83 | 84 | var syscalls = { 85 | 'x86_64': { 86 | '0': 'read', 87 | '1': 'write', 88 | '2': 'open', 89 | '3': 'close', 90 | '4': 'stat', 91 | '5': 'fstat', 92 | '6': 'lstat', 93 | '7': 'poll', 94 | '8': 'lseek', 95 | '9': 'mmap', 96 | '10': 'mprotect', 97 | '11': 'munmap', 98 | '12': 'brk', 99 | '13': 'rt_sigaction', 100 | '14': 'rt_sigprocmask', 101 | '15': 'rt_sigreturn', 102 | '16': 'ioctl', 103 | '17': 'pread64', 104 | '18': 'pwrite64', 105 | '19': 'readv', 106 | '20': 'writev', 107 | '21': 'access', 108 | '22': 'pipe', 109 | '23': 'select', 110 | '24': 'sched_yield', 111 | '25': 'mremap', 112 | '26': 'msync', 113 | '27': 'mincore', 114 | '28': 'madvise', 115 | '29': 'shmget', 116 | '30': 'shmat', 117 | '31': 'shmctl', 118 | '32': 'dup', 119 | '33': 'dup2', 120 | '34': 'pause', 121 | '35': 'nanosleep', 122 | '36': 'getitimer', 123 | '37': 'alarm', 124 | '38': 'setitimer', 125 | '39': 'getpid', 126 | '40': 'sendfile', 127 | '41': 'socket', 128 | '42': 'connect', 129 | '43': 'accept', 130 | '44': 'sendto', 131 | '45': 'recvfrom', 132 | '46': 'sendmsg', 133 | '47': 'recvmsg', 134 | '48': 'shutdown', 135 | '49': 'bind', 136 | '50': 'listen', 137 | '51': 'getsockname', 138 | '52': 'getpeername', 139 | '53': 'socketpair', 140 | '54': 'setsockopt', 141 | '55': 'getsockopt', 142 | '56': 'clone', 143 | '57': 'fork', 144 | '58': 'vfork', 145 | '59': 'execve', 146 | '60': 'exit', 147 | '61': 'wait4', 148 | '62': 'kill', 149 | '63': 'uname', 150 | '64': 'semget', 151 | '65': 'semop', 152 | '66': 'semctl', 153 | '67': 'shmdt', 154 | '68': 'msgget', 155 | '69': 'msgsnd', 156 | '70': 'msgrcv', 157 | '71': 'msgctl', 158 | '72': 'fcntl', 159 | '73': 'flock', 160 | '74': 'fsync', 161 | '75': 'fdatasync', 162 | '76': 'truncate', 163 | '77': 'ftruncate', 164 | '78': 'getdents', 165 | '79': 'getcwd', 166 | '80': 'chdir', 167 | '81': 'fchdir', 168 | '82': 'rename', 169 | '83': 'mkdir', 170 | '84': 'rmdir', 171 | '85': 'creat', 172 | '86': 'link', 173 | '87': 'unlink', 174 | '88': 'symlink', 175 | '89': 'readlink', 176 | '90': 'chmod', 177 | '91': 'fchmod', 178 | '92': 'chown', 179 | '93': 'fchown', 180 | '94': 'lchown', 181 | '95': 'umask', 182 | '96': 'gettimeofday', 183 | '97': 'getrlimit', 184 | '98': 'getrusage', 185 | '99': 'sysinfo', 186 | '100': 'times', 187 | '101': 'ptrace', 188 | '102': 'getuid', 189 | '103': 'syslog', 190 | '104': 'getgid', 191 | '105': 'setuid', 192 | '106': 'setgid', 193 | '107': 'geteuid', 194 | '108': 'getegid', 195 | '109': 'setpgid', 196 | '110': 'getppid', 197 | '111': 'getpgrp', 198 | '112': 'setsid', 199 | '113': 'setreuid', 200 | '114': 'setregid', 201 | '115': 'getgroups', 202 | '116': 'setgroups', 203 | '117': 'setresuid', 204 | '118': 'getresuid', 205 | '119': 'setresgid', 206 | '120': 'getresgid', 207 | '121': 'getpgid', 208 | '122': 'setfsuid', 209 | '123': 'setfsgid', 210 | '124': 'getsid', 211 | '125': 'capget', 212 | '126': 'capset', 213 | '127': 'rt_sigpending', 214 | '128': 'rt_sigtimedwait', 215 | '129': 'rt_sigqueueinfo', 216 | '130': 'rt_sigsuspend', 217 | '131': 'sigaltstack', 218 | '132': 'utime', 219 | '133': 'mknod', 220 | '134': 'uselib', 221 | '135': 'personality', 222 | '136': 'ustat', 223 | '137': 'statfs', 224 | '138': 'fstatfs', 225 | '139': 'sysfs', 226 | '140': 'getpriority', 227 | '141': 'setpriority', 228 | '142': 'sched_setparam', 229 | '143': 'sched_getparam', 230 | '144': 'sched_setscheduler', 231 | '145': 'sched_getscheduler', 232 | '146': 'sched_get_priority_max', 233 | '147': 'sched_get_priority_min', 234 | '148': 'sched_rr_get_interval', 235 | '149': 'mlock', 236 | '150': 'munlock', 237 | '151': 'mlockall', 238 | '152': 'munlockall', 239 | '153': 'vhangup', 240 | '154': 'modify_ldt', 241 | '155': 'pivot_root', 242 | '156': '_sysctl', 243 | '157': 'prctl', 244 | '158': 'arch_prctl', 245 | '159': 'adjtimex', 246 | '160': 'setrlimit', 247 | '161': 'chroot', 248 | '162': 'sync', 249 | '163': 'acct', 250 | '164': 'settimeofday', 251 | '165': 'mount', 252 | '166': 'umount2', 253 | '167': 'swapon', 254 | '168': 'swapoff', 255 | '169': 'reboot', 256 | '170': 'sethostname', 257 | '171': 'setdomainname', 258 | '172': 'iopl', 259 | '173': 'ioperm', 260 | '174': 'create_module ', 261 | '175': 'init_module', 262 | '176': 'delete_module', 263 | '177': 'get_kernel_syms ', 264 | '178': 'query_module ', 265 | '179': 'quotactl', 266 | '180': 'nfsservctl ', 267 | '181': 'getpmsg ', 268 | '182': 'putpmsg ', 269 | '183': 'afs_syscall ', 270 | '184': 'tuxcall ', 271 | '185': 'security ', 272 | '186': 'gettid', 273 | '187': 'readahead', 274 | '188': 'setxattr', 275 | '189': 'lsetxattr', 276 | '190': 'fsetxattr', 277 | '191': 'getxattr', 278 | '192': 'lgetxattr', 279 | '193': 'fgetxattr', 280 | '194': 'listxattr', 281 | '195': 'llistxattr', 282 | '196': 'flistxattr', 283 | '197': 'removexattr', 284 | '198': 'lremovexattr', 285 | '199': 'fremovexattr', 286 | '200': 'tkill', 287 | '201': 'time', 288 | '202': 'futex', 289 | '203': 'sched_setaffinity', 290 | '204': 'sched_getaffinity', 291 | '205': 'set_thread_area ', 292 | '206': 'io_setup', 293 | '207': 'io_destroy', 294 | '208': 'io_getevents', 295 | '209': 'io_submit', 296 | '210': 'io_cancel', 297 | '211': 'get_thread_area ', 298 | '212': 'lookup_dcookie', 299 | '213': 'epoll_create', 300 | '214': 'epoll_ctl_old ', 301 | '215': 'epoll_wait_old ', 302 | '216': 'remap_file_pages', 303 | '217': 'getdents64', 304 | '218': 'set_tid_address', 305 | '219': 'restart_syscall', 306 | '220': 'semtimedop', 307 | '221': 'fadvise64', 308 | '222': 'timer_create', 309 | '223': 'timer_settime', 310 | '224': 'timer_gettime', 311 | '225': 'timer_getoverrun', 312 | '226': 'timer_delete', 313 | '227': 'clock_settime', 314 | '228': 'clock_gettime', 315 | '229': 'clock_getres', 316 | '230': 'clock_nanosleep', 317 | '231': 'exit_group', 318 | '232': 'epoll_wait', 319 | '233': 'epoll_ctl', 320 | '234': 'tgkill', 321 | '235': 'utimes', 322 | '236': 'vserver ', 323 | '237': 'mbind', 324 | '238': 'set_mempolicy', 325 | '239': 'get_mempolicy', 326 | '240': 'mq_open', 327 | '241': 'mq_unlink', 328 | '242': 'mq_timedsend', 329 | '243': 'mq_timedreceive', 330 | '244': 'mq_notify', 331 | '245': 'mq_getsetattr', 332 | '246': 'kexec_load', 333 | '247': 'waitid', 334 | '248': 'add_key', 335 | '249': 'request_key', 336 | '250': 'keyctl', 337 | '251': 'ioprio_set', 338 | '252': 'ioprio_get', 339 | '253': 'inotify_init', 340 | '254': 'inotify_add_watch', 341 | '255': 'inotify_rm_watch', 342 | '256': 'migrate_pages', 343 | '257': 'openat', 344 | '258': 'mkdirat', 345 | '259': 'mknodat', 346 | '260': 'fchownat', 347 | '261': 'futimesat', 348 | '262': 'newfstatat', 349 | '263': 'unlinkat', 350 | '264': 'renameat', 351 | '265': 'linkat', 352 | '266': 'symlinkat', 353 | '267': 'readlinkat', 354 | '268': 'fchmodat', 355 | '269': 'faccessat', 356 | '270': 'pselect6', 357 | '271': 'ppoll', 358 | '272': 'unshare', 359 | '273': 'set_robust_list', 360 | '274': 'get_robust_list', 361 | '275': 'splice', 362 | '276': 'tee', 363 | '277': 'sync_file_range', 364 | '278': 'vmsplice', 365 | '279': 'move_pages', 366 | '280': 'utimensat', 367 | '281': 'epoll_pwait', 368 | '282': 'signalfd', 369 | '283': 'timerfd_create', 370 | '284': 'eventfd', 371 | '285': 'fallocate', 372 | '286': 'timerfd_settime', 373 | '287': 'timerfd_gettime', 374 | '288': 'accept4', 375 | '289': 'signalfd4', 376 | '290': 'eventfd2', 377 | '291': 'epoll_create1', 378 | '292': 'dup3', 379 | '293': 'pipe2', 380 | '294': 'inotify_init1', 381 | '295': 'preadv', 382 | '296': 'pwritev', 383 | '297': 'rt_tgsigqueueinfo', 384 | '298': 'perf_event_open', 385 | '299': 'recvmmsg', 386 | '300': 'fanotify_init', 387 | '301': 'fanotify_mark', 388 | '302': 'prlimit64', 389 | '303': 'name_to_handle_at', 390 | '304': 'open_by_handle_at', 391 | '305': 'clock_adjtime', 392 | '306': 'syncfs', 393 | '307': 'sendmmsg', 394 | '308': 'setns', 395 | '309': 'getcpu', 396 | '310': 'process_vm_readv', 397 | '311': 'process_vm_writev', 398 | '312': 'kcmp', 399 | '313': 'finit_module' 400 | } 401 | } 402 | 403 | var addressFamilies = { 404 | '0': 'unspecified', 405 | '1': 'local', 406 | '2': 'inet', 407 | '3': 'ax25', 408 | '4': 'ipx', 409 | '5': 'appletalk', 410 | '6': 'netrom', 411 | '7': 'bridge', 412 | '8': 'atmpvc', 413 | '9': 'x25', 414 | '10': 'inet6', 415 | '11': 'rose', 416 | '12': 'decnet', 417 | '13': 'netbeui', 418 | '14': 'security', 419 | '15': 'key', 420 | '16': 'netlink', 421 | '17': 'packet', 422 | '18': 'ash', 423 | '19': 'econet', 424 | '20': 'atmsvc', 425 | '21': 'rds', 426 | '22': 'sna', 427 | '23': 'irda', 428 | '24': 'pppox', 429 | '25': 'wanpipe', 430 | '26': 'llc', 431 | '27': 'ib', 432 | '28': 'mpls', 433 | '29': 'can', 434 | '30': 'tipc', 435 | '31': 'bluetooth', 436 | '32': 'iucv', 437 | '33': 'rxrpc', 438 | '34': 'isdn', 439 | '35': 'phonet', 440 | '36': 'ieee802154', 441 | '37': 'caif', 442 | '38': 'alg', 443 | '39': 'nfc', 444 | '40': 'vsock', 445 | '41': 'kcm', 446 | '42': 'qipcrtr', 447 | } 448 | 449 | module.exports = { 450 | types: types, 451 | arch: arch, 452 | machines: machines, 453 | syscalls: syscalls, 454 | addressFamilies: addressFamilies 455 | } 456 | -------------------------------------------------------------------------------- /lib/parsers/httpCombinedAccessParser.js: -------------------------------------------------------------------------------- 1 | var regex = /^(\S+) (\S+) (\S+) \[([^\]]+)] "(\S+) (\S+) (\S+)" (\d+) (\d+) "([^"]*)" "([^"]*)"$/, 2 | fields = ['remote_host', 'identd', 'user', 'timestamp', 'method', 'path', 'version', 'status_code', 'bytes', 'referrer', 'user_agent'] 3 | 4 | /** 5 | * Parses the common combined access log lines that an httpd outputs 6 | * 7 | * @param {String} message Message to try and parse 8 | * 9 | * @returns {parserResult} 10 | */ 11 | module.exports = function (message) { 12 | var parts = [], 13 | data = {} 14 | 15 | parts = regex.exec(message) 16 | if (!parts) { 17 | return { data: void 0, error: 'No match' } 18 | } 19 | 20 | fields.forEach(function (name, index) { 21 | var value = parts[index + 1] 22 | 23 | // Skip empty values 24 | if (!value || value == '-') { 25 | return 26 | } 27 | 28 | data[name] = value 29 | }) 30 | 31 | data.message = data.method + ' ' + data.path + ' ' + data.version 32 | 33 | var testDate = parseDate(data['timestamp']) 34 | if (testDate == 'Invalid Date') { 35 | delete data['timestamp'] 36 | return { data: data, error: 'Invalid date' } 37 | } else { 38 | data['timestamp'] = testDate 39 | } 40 | 41 | return { data: data, error: void 0 } 42 | } 43 | 44 | var parseDate = function (str) { 45 | var parts = str.split('/'), 46 | day = parts[0], 47 | month = parts[1] 48 | 49 | parts = parts[2].split(':') 50 | 51 | var year = parts[0], 52 | hour = parts[1], 53 | min = parts[2] 54 | 55 | parts = parts[3].split(' ') 56 | 57 | var sec = parts[0], 58 | tz = parts[1] 59 | 60 | 61 | return new Date(month + ' ' + day + ', ' + year + ' ' + hour + ':' + min + ':' + sec + ' ' + tz) 62 | } 63 | 64 | module.exports.propertyName = 'http_access' 65 | -------------------------------------------------------------------------------- /lib/parsers/httpVHostCombinedAccessParser.js: -------------------------------------------------------------------------------- 1 | var regex = /^([^:]+:\d+) (\S+) (\S+) (\S+) \[([^\]]+)] "(\S+) (\S+) (\S+)" (\d+) (\d+) "([^"]*)" "([^"]*)"$/, 2 | fields = ['virtual_host', 'remote_host', 'identd', 'user', 'timestamp', 'method', 'path', 'version', 'status_code', 'bytes', 'referrer', 'user_agent'] 3 | 4 | /** 5 | * Parses the common vhost combined access log lines that an httpd outputs 6 | * Its the combined log format with virtual_host:local_port on the front 7 | * 8 | * @param {String} message Message to try and parse 9 | * 10 | * @returns {parserResult} 11 | */ 12 | module.exports = function (message) { 13 | var parts = [], 14 | data = {} 15 | 16 | parts = regex.exec(message) 17 | if (!parts) { 18 | return { data: void 0, error: 'No match' } 19 | } 20 | 21 | fields.forEach(function (name, index) { 22 | var value = parts[index + 1] 23 | 24 | // Skip empty values 25 | if (!value || value == '-') { 26 | return 27 | } 28 | 29 | data[name] = value 30 | }) 31 | 32 | data.message = data.method + ' ' + data.path + ' ' + data.version 33 | 34 | var testDate = parseDate(data['timestamp']) 35 | if (testDate == 'Invalid Date') { 36 | delete data['timestamp'] 37 | return { data: data, error: 'Invalid date' } 38 | } else { 39 | data['timestamp'] = testDate 40 | } 41 | 42 | return { data: data, error: void 0 } 43 | } 44 | 45 | var parseDate = function (str) { 46 | var parts = str.split('/'), 47 | day = parts[0], 48 | month = parts[1] 49 | 50 | parts = parts[2].split(':') 51 | 52 | var year = parts[0], 53 | hour = parts[1], 54 | min = parts[2] 55 | 56 | parts = parts[3].split(' ') 57 | 58 | var sec = parts[0], 59 | tz = parts[1] 60 | 61 | 62 | return new Date(month + ' ' + day + ', ' + year + ' ' + hour + ':' + min + ':' + sec + ' ' + tz) 63 | } 64 | 65 | module.exports.propertyName = 'http_access' 66 | -------------------------------------------------------------------------------- /lib/parsers/index.js: -------------------------------------------------------------------------------- 1 | var util = require('util'), 2 | fs = require('fs') 3 | 4 | /** 5 | * Performs the parser wrapping 6 | * 7 | * @param {Function} parser The original parser to wrap 8 | * 9 | * @returns {Function} A helper function with the actual parser available under .raw 10 | */ 11 | var wrap = function (parser) { 12 | /** 13 | * Wraps a parser and provides some common functionality when using parsers 14 | * The actual parser will attempt to parse event.data.message 15 | * If the parsed result contains a message property the event message property will be replaced 16 | * 17 | * If a parse error occurs the typeField will be set to the error message 18 | * If the parser returned data it will be merged into event.data 19 | * 20 | * @param {EventContainer} event The event to attempt to parse 21 | * @param {String} [typeField='_type'] The field to set in event.data on failure 22 | * @param {boolean} [backupMessage=false] If true the original message will be stored in event.data.originalMessage 23 | * @param {String} [propertyName] The property name to put all parsed data under, the default is the parsers 24 | * configured name 25 | * 26 | * @returns {boolean} False on parse error otherwise true 27 | */ 28 | var helper = function (event, typeField, backupMessage, propertyName) { 29 | var result = parser(event.data.message), 30 | usePropertyName = propertyName || parser.propertyName || 'unknown_parser_name' 31 | 32 | if (result.data) { 33 | if (result.data.message) { 34 | if (backupMessage) { 35 | event.data.originalMessage = event.data.message 36 | } 37 | 38 | event.data.message = result.data.message 39 | delete result.data.message 40 | } 41 | 42 | event.data[usePropertyName] = result.data 43 | } 44 | 45 | if (result.error) { 46 | event.data[typeField || '_type'] = 'unparseable' 47 | //TODO: need to handle the case where multiple parse errors occur 48 | event.data['parse_error'] = usePropertyName + ': ' + result.error 49 | return false 50 | 51 | } 52 | 53 | return true 54 | } 55 | 56 | helper.raw = parser 57 | return helper 58 | } 59 | 60 | module.exports = { 61 | wrap: wrap, 62 | goAuditParser: wrap(require('./goAuditParser')), 63 | httpCombinedAccessParser: wrap(require('./httpCombinedAccessParser')), 64 | httpVHostCombinedAccessParser: wrap(require('./httpVHostCombinedAccessParser')), 65 | jsonParser: wrap(require('./jsonParser')), 66 | relpSyslogParser: wrap(require('./relpSyslogParser')), 67 | sshdParser: wrap(require('./sshdParser')), 68 | sudoParser: wrap(require('./sudoParser')), 69 | } 70 | 71 | /** 72 | * The result of parser execution 73 | * 74 | * @typedef {Object} parserResult 75 | * @property {*} data Data parsed from the event, if any 76 | * @property {String} error Description of the parser failure, if one occurred 77 | */ 78 | -------------------------------------------------------------------------------- /lib/parsers/jsonParser.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Parses json formatted log lines 3 | * 4 | * @param {String} message Message to try and parse 5 | * 6 | * @returns {parserResult} 7 | */ 8 | module.exports = function (message) { 9 | var result = { data: void 0, error: void 0 } 10 | 11 | try { 12 | result.data = JSON.parse(message) 13 | } catch (error) { 14 | result.error = error.toString() 15 | } 16 | 17 | return result 18 | } 19 | 20 | module.exports.propertyName = 'json' 21 | -------------------------------------------------------------------------------- /lib/parsers/relpSyslogParser.js: -------------------------------------------------------------------------------- 1 | var facilityNames = [ 2 | 'kernel', 3 | 'user', 4 | 'mail', 5 | 'daemon', 6 | 'security/auth', 7 | 'syslogd', 8 | 'line printer', 9 | 'news', 10 | 'uucp', 11 | 'clock', 12 | 'security/auth', 13 | 'ftp', 14 | 'ntp', 15 | 'audit', 16 | 'alert', 17 | 'clock', 18 | 'local0', 19 | 'local1', 20 | 'local2', 21 | 'local3', 22 | 'local4', 23 | 'local5', 24 | 'local6', 25 | 'local7' 26 | ], 27 | severityNames = [ 28 | 'emergency', 29 | 'alert', 30 | 'critical', 31 | 'error', 32 | 'warning', 33 | 'notice', 34 | 'info', 35 | 'debug' 36 | ] 37 | 38 | /** 39 | * Parses syslog formatted log lines 40 | * 41 | * @param {String} message Message to try and parse 42 | * 43 | * @returns {parserResult} 44 | */ 45 | module.exports = function (message) { 46 | var data = {} 47 | //TODO: more error checking 48 | 49 | if (message[0] !== '<') { 50 | return { data: void 0, error: 'Not a syslog message' } 51 | } 52 | 53 | var nextPos = message.indexOf('>') 54 | if (nextPos < 1 || nextPos > 4) { 55 | return { data: void 0, error: 'No priority value' } 56 | } 57 | 58 | data.priority = parseInt(message.substring(1, nextPos)) 59 | if (Number.isNaN(data.priority) || data.priority > 191) { 60 | return { data: void 0, error: 'Invalid priority value' } 61 | } 62 | 63 | //Parse out the priority values 64 | data.facility = parseInt(data.priority / 8, 10) 65 | data.severity = data.priority - (data.facility * 8) 66 | 67 | data.facilityName = facilityNames[data.facility] 68 | data.severityName = severityNames[data.severity] 69 | 70 | //Parse out the date string 71 | var lastPos = nextPos + 1 72 | nextPos = message.indexOf(' ', lastPos) 73 | data.timestamp = new Date(message.substring(lastPos, nextPos)) 74 | if (nextPos < 1 || data.timestamp == 'Invalid Date') { 75 | return { data: void 0, error: 'Invalid date' } 76 | } 77 | 78 | //Parse out the host 79 | lastPos = nextPos + 1 80 | nextPos = message.indexOf(' ', lastPos) 81 | if (nextPos < 1) { 82 | return { data: void 0, error: 'Invalid host' } 83 | } 84 | data.host = message.substring(lastPos, nextPos) 85 | 86 | //Parse out the service 87 | lastPos = nextPos + 1 88 | nextPos = message.indexOf(':', nextPos) 89 | if (nextPos < 1) { 90 | return { data: void 0, error: 'Invalid service' } 91 | } 92 | data.service = message.substring(lastPos, nextPos) 93 | 94 | //Parse out the message 95 | lastPos = nextPos + 2 96 | data.message = message.substring(lastPos) 97 | 98 | return { data: data, error: void 0 } 99 | } 100 | 101 | module.exports.propertyName = 'syslog' 102 | -------------------------------------------------------------------------------- /lib/parsers/sshdParser.js: -------------------------------------------------------------------------------- 1 | //TODO: Starting session: command for root from 10.0.0.0 port 20720 2 | //TODO: Connection from 10.0.0.0 port 34815 on 10.0.0.0 port 22 3 | //TODO: Connection from 10.0.0.0 port 49210 4 | //TODO: Partial publickey for username from 99.99.99.99 port 63259 ssh2: RSA 00:bb:0b:00:b5:88:00:77:5b:a0:00:02:53:b8:d2:2b 5 | //TODO: Starting session: shell on pts/3 for username from 10.0.0.0 port 47627 6 | 7 | var regexes = [ 8 | { 9 | name: 'accepted_connection', 10 | regex: /^Accepted (\S+) for (\S+) from ((?:\d{1,3}\.){3}\d{1,3}) port (\d+) (\w+)(?:: (\S+ \S+))?/, 11 | parts: [ 'auth_type', 'user', 'client_ip', 'client_port', 'protocol', 'fingerprint' ] 12 | }, 13 | { 14 | name: 'closed_connection', 15 | regex: /^Connection closed by ((?:\d{1,3}\.){3}\d{1,3}) (.+)?/, 16 | parts: [ 'client_ip' ] 17 | }, 18 | { 19 | name: 'bad_protocol_version', 20 | regex: /^Bad protocol version identification '(.+)?' from ((?:\d{1,3}\.){3}\d{1,3}) port (\d+)/, 21 | parts: [ 'version', 'client_ip', 'client_port' ] 22 | }, 23 | { 24 | name: 'disconnecting', 25 | regex: /^Disconnecting: (.*)/, 26 | parts: [ 'reason' ] 27 | }, 28 | { 29 | name: 'no_identification', 30 | regex: /^Did not receive identification string from ((?:\d{1,3}\.){3}\d{1,3})/, 31 | parts: [ 'client_ip' ] 32 | }, 33 | { 34 | name: 'error', 35 | regex: /^error: (.*)/, 36 | parts: [ 'error' ] 37 | }, 38 | { 39 | name: 'failed_event', 40 | regex: /^Failed (.+) for '?(\S+?)'? from ((?:\d{1,3}\.){3}\d{1,3})(?: port (\d+) (\S+))?/, 41 | parts: [ 'auth_type', 'user', 'client_ip', 'client_port', 'protocol' ] 42 | }, 43 | { 44 | name: 'successful_event', 45 | regex: /^Successful (.+) for '?(\S+?)'? from ((?:\d{1,3}\.){3}\d{1,3})(?: port (\d+) (\S+))?/, 46 | parts: [ 'auth_type', 'user', 'client_ip', 'client_port', 'protocol' ] 47 | }, 48 | { 49 | name: 'fatal', 50 | regex: /^fatal: (.*)/, 51 | parts: [ 'error' ] 52 | }, 53 | { 54 | name: 'invalid_user', 55 | regex: /^Invalid user (\S+) from ((?:\d{1,3}\.){3}\d{1,3})/, 56 | parts: [ 'user', 'client_ip' ] 57 | }, 58 | { 59 | name: 'pam_session', 60 | regex: /^pam_unix\(sshd:session\): session (closed|opened) for user (\S+)/, 61 | parts: [ 'state', 'user' ] 62 | }, 63 | { 64 | name: 'postponed_connection', 65 | regex: /^Postponed (.+) for (\S+) from ((?:\d{1,3}\.){3}\d{1,3}) port (\d+) (\S+)/, 66 | parts: [ 'auth_type', 'user', 'client_ip', 'client_port', 'protocol' ] 67 | }, 68 | { 69 | name: 'received_disconnect', 70 | regex: /^Received disconnect from ((?:\d{1,3}\.){3}\d{1,3}): \d+: (.+)?(:? .+)/, 71 | parts: ['client_ip', 'reason'] 72 | } 73 | ] 74 | 75 | /** 76 | * Parses sshd log lines 77 | * 78 | * @param {String} message Message to try and parse 79 | * 80 | * @returns {parserResult} 81 | */ 82 | module.exports = function (message) { 83 | var parts = [], 84 | matched, 85 | data = {} 86 | 87 | regexes.some(function (group) { 88 | parts = group.regex.exec(message) 89 | if (parts) { 90 | matched = group 91 | return true 92 | } 93 | }) 94 | 95 | if (!matched) { 96 | return { data: void 0, error: 'No matches' } 97 | } 98 | 99 | data.event = matched.name 100 | matched.parts.forEach(function (name, index) { 101 | data[name] = parts[index + 1] 102 | }) 103 | 104 | return { data: data, error: void 0 } 105 | } 106 | 107 | module.exports.propertyName = 'sshd' 108 | -------------------------------------------------------------------------------- /lib/parsers/sudoParser.js: -------------------------------------------------------------------------------- 1 | var regexes = [ 2 | { 3 | name: 'command', 4 | regex: /^(.+) : TTY=(.+) ; PWD=(.+) ; USER=(.+) ; COMMAND=(.*)/, 5 | parts: [ 'user', 'tty', 'pwd', 'as_user', 'command' ] 6 | }, 7 | { 8 | name: 'error', 9 | regex: /^(.+) : (.+?) ; TTY=(.+) ; PWD=(.+) ; USER=(.+) ; COMMAND=(.*)/, 10 | parts: [ 'user', 'error', 'tty', 'pwd', 'as_user', 'command' ] 11 | }, 12 | ] 13 | 14 | /** 15 | * Parses sudo log lines 16 | * 17 | * @param {String} message Message to try and parse 18 | * 19 | * @returns {parserResult} 20 | */ 21 | module.exports = function (message) { 22 | var parts = [], 23 | matched, 24 | data = {} 25 | 26 | regexes.some(function (group) { 27 | parts = group.regex.exec(message) 28 | if (parts) { 29 | matched = group 30 | return true 31 | } 32 | }) 33 | 34 | if (!matched) { 35 | return { data: void 0, error: 'No matches' } 36 | } 37 | 38 | data.event = matched.name 39 | matched.parts.forEach(function (name, index) { 40 | data[name] = parts[index + 1].trim() 41 | }) 42 | 43 | return { data: data, error: void 0 } 44 | 45 | } 46 | 47 | module.exports.propertyName = 'sudo' 48 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "streamstash", 3 | "version": "1.3.0", 4 | "description": "A log aggregating/filtering/redirecting service", 5 | "keywords": [ 6 | "log", 7 | "flume", 8 | "logstash", 9 | "fluentd", 10 | "logs", 11 | "elasticsearch", 12 | "relp", 13 | "syslog", 14 | "rsyslog" 15 | ], 16 | "author": { 17 | "name": "Nathan Brown", 18 | "email": "nbrown.us@gmail.com" 19 | }, 20 | "repository": { 21 | "type": "git", 22 | "url": "https://github.com/nbrownus/streamstash" 23 | }, 24 | "main": "./index", 25 | "license": "MIT", 26 | "engines": { 27 | "node": ">= 0.10.x" 28 | }, 29 | "scripts": { 30 | "test": "make test" 31 | }, 32 | "dependencies": { 33 | "dashdash": "1.13.0", 34 | "relp": "git://github.com/nbrownus/node-relp", 35 | "smtp-server": "^3.4.7", 36 | "statsd-client": "0.2.1", 37 | "underscore": "1.8.3", 38 | "uuid": "3.0.1" 39 | }, 40 | "devDependencies": { 41 | "istanbul": "0.4.x", 42 | "jsdoc": "3.3.3", 43 | "nodemailer": "^4.6.8", 44 | "ppunit": "x", 45 | "should": "7.1.x" 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /sendmail.js: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nbrownus/streamstash/28338157bfdfe73f2eafddd11d720012c79cf51c/sendmail.js -------------------------------------------------------------------------------- /test/EventContainer.test.js: -------------------------------------------------------------------------------- 1 | var EventContainer = require('../').EventContainer 2 | , should = require('should') 3 | 4 | describe('EventContainer', function () { 5 | 6 | it('Should have constant constants', function () { 7 | EventContainer.STATE.should.eql( 8 | { 9 | FAILED: -1, 10 | CANCELED: 0, 11 | PROCESSING: 1, 12 | COMPLETED: 2 13 | } 14 | , 'States constant object was different' 15 | ) 16 | }) 17 | 18 | it('Should instantiate properly', function () { 19 | var event = new EventContainer({ things: true }) 20 | event.data.should.eql({ things: true}, 'Data object was wrong') 21 | event.state.should.equal(EventContainer.STATE.PROCESSING, 'Initial state was wrong') 22 | should.strictEqual(void 0, event.eventId, 'Event id should be undefined') 23 | }) 24 | 25 | it('Should return true if processingComplete succeeded', function () { 26 | var event = new EventContainer(), 27 | sawComplete = false 28 | 29 | event.on('complete', function () { 30 | sawComplete = true 31 | }) 32 | 33 | event.processingComplete().should.equal(true) 34 | sawComplete.should.equal(true) 35 | }) 36 | 37 | 38 | it('Should only mark as complete if not already completed', function () { 39 | var event = new EventContainer() 40 | event.processingComplete().should.equal(true) 41 | event.processingComplete().should.equal(false) 42 | }) 43 | 44 | it('Should just return false for #next by default', function () { 45 | var event = new EventContainer() 46 | event.next().should.equal(false) 47 | }) 48 | 49 | it('Should just return false for #done by default', function () { 50 | var event = new EventContainer() 51 | event.done().should.equal(false) 52 | }) 53 | 54 | it('Should just return false for #cancel by default', function () { 55 | var event = new EventContainer() 56 | event.cancel().should.equal(false) 57 | }) 58 | }) 59 | -------------------------------------------------------------------------------- /test/Logger.test.js: -------------------------------------------------------------------------------- 1 | var Logger = require('../').Logger 2 | 3 | require('should') 4 | 5 | describe('Logger', function () { 6 | 7 | it('Should have constant constants', function () { 8 | Logger.LEVEL.OFF.should.equal(-1, 'Off level was wrong') 9 | Logger.LEVEL.ERROR.should.equal(0, 'Error level was wrong') 10 | Logger.LEVEL.INFO.should.equal(1, 'Info level was wrong') 11 | Logger.LEVEL.DEBUG.should.equal(2, 'Debug level was wrong') 12 | 13 | Logger.NAMES[Logger.LEVEL.ERROR].should.equal('ERROR', 'Error name was wrong') 14 | Logger.NAMES[Logger.LEVEL.INFO].should.equal('INFO', 'Info name was wrong') 15 | Logger.NAMES[Logger.LEVEL.DEBUG].should.equal('DEBUG', 'Debug name was wrong') 16 | }) 17 | 18 | describe('Constructor', function () { 19 | 20 | it('Should set the log level to INFO if level is not provided', function () { 21 | var logger = new Logger() 22 | logger.level.should.equal(Logger.LEVEL.INFO, 'Default log level was wrong') 23 | }) 24 | 25 | it('Should use process.stdout if stream is not provided', function () { 26 | var logger = new Logger() 27 | logger.stream.should.equal(process.stdout, 'Default stream was wrong') 28 | }) 29 | 30 | it('Should allow level to be overridden', function () { 31 | var logger = new Logger({ level: Logger.LEVEL.DEBUG }) 32 | logger.level.should.equal(Logger.LEVEL.DEBUG, 'Log level override was not respected') 33 | }) 34 | 35 | it('Should allow stream to be overridden', function () { 36 | var stream = {} 37 | , logger = new Logger({ stream: stream }) 38 | 39 | logger.stream.should.equal(stream, 'Stream override was not respected') 40 | }) 41 | 42 | }) 43 | 44 | it('Should not log anything if level is OFF', function () { 45 | var stream = { write: function () { throw new Error('Log was written') }} 46 | , logger = new Logger({ level: Logger.LEVEL.OFF, stream: stream }) 47 | 48 | logger.error('error') 49 | logger.info('info') 50 | logger.debug('debug') 51 | }) 52 | 53 | it('Should add timestamp and level to all messages', function () { 54 | var level = Logger.NAMES[Logger.LEVEL.ERROR] 55 | , stream = { 56 | write: function (message) { 57 | var m = JSON.parse(message) 58 | m.should.have.keys('timestamp', 'message', 'name', 'level', 'data') 59 | m.data.should.have.keys('extra') 60 | } 61 | } 62 | , logger = new Logger({ level: Logger.LEVEL.DEBUG, stream: stream }) 63 | 64 | level = Logger.NAMES[Logger.LEVEL.ERROR] 65 | logger.error('test', 'msg', {extra: 'stuff'}) 66 | 67 | level = Logger.NAMES[Logger.LEVEL.INFO] 68 | logger.info('test', 'msg', {extra: 'stuff'}) 69 | 70 | level = Logger.NAMES[Logger.LEVEL.DEBUG] 71 | logger.debug('test', 'msg', {extra: 'stuff'}) 72 | }) 73 | 74 | }) 75 | -------------------------------------------------------------------------------- /test/StreamStash.test.js: -------------------------------------------------------------------------------- 1 | var StreamStash = require('../') 2 | , EventEmitter = require('events').EventEmitter 3 | , Logger = StreamStash.Logger 4 | 5 | require('should') 6 | 7 | describe('StreamStash', function () { 8 | 9 | it('Should have constant constants', function () { 10 | StreamStash.STATE.CONFIGURING.should.equal(0, 'CONFIGURING was wrong') 11 | StreamStash.STATE.STARTED.should.equal(1, 'STARTED was wrong') 12 | StreamStash.STATE.STOPPING_INPUT.should.equal(2, 'STOPPING_INPUT was wrong') 13 | StreamStash.STATE.STOPPING_ALL.should.equal(3, 'STOPPING_ALL was wrong') 14 | StreamStash.STATE.STOPPED.should.equal(4, 'STOPPED was wrong') 15 | }) 16 | 17 | describe('Constructor', function () { 18 | 19 | it('Should start off in `configuring`', function () { 20 | var streamStash = new StreamStash({ logger: new Logger() }) 21 | streamStash.state.should.equal(StreamStash.STATE.CONFIGURING, 'Initial state was wrong') 22 | }) 23 | 24 | it('Should set max listeners to 0', function () { 25 | var streamStash = new StreamStash({ logger: new Logger() }) 26 | streamStash._maxListeners.should.equal(0, 'Max listeners was wrong') 27 | }) 28 | 29 | it('Should use the provided logger', function () { 30 | var logger = new Logger() 31 | 32 | var streamStash = new StreamStash({ logger: logger }) 33 | streamStash.logger.should.equal(logger, 'Did not use the provided logger') 34 | }) 35 | 36 | it('Should initialize properties properly', function () { 37 | var streamStash = new StreamStash({ logger: new Logger() }) 38 | 39 | streamStash.inputs.should.eql([], 'inputs was wrong') 40 | streamStash.outputs.should.eql([], 'outputs was wrong') 41 | streamStash.filters.should.eql([], 'filters was wrong') 42 | 43 | streamStash.stats.should.eql( 44 | { 45 | startTime: null, 46 | events: { 47 | processing: 0, 48 | total: 0, 49 | canceled: 0, 50 | failed: 0 51 | }, 52 | plugins: { 53 | started: 0, 54 | stoppedInput: 0, 55 | stopped: 0, 56 | total: 0 57 | } 58 | }, 59 | 'stats was wrong' 60 | ) 61 | }) 62 | 63 | it.skip('Should emit telemetry', function (done) { 64 | //TODO: newer versions of node add zap and malloced stats, this needs to be a less brittle test 65 | var streamStash = new StreamStash({ logger: new Logger(), telemetryInterval: 10 }), 66 | seen = 0, 67 | stats = ['events.processing', 'events.total', 'events.canceled', 'events.failed'] 68 | 69 | try { 70 | var v8 = require('v8') 71 | stats.push( 72 | 'process.memory.heap_size_limit', 73 | 'process.memory.total_available_size', 74 | 'process.memory.total_heap_size', 75 | 'process.memory.total_heap_size_executable', 76 | 'process.memory.total_physical_size', 77 | 'process.memory.used_heap_size', 78 | 'process.memory.malloced_memory', 79 | 'process.memory.peak_malloced_memory', 80 | 'process.memory.does_zap_garbage', 81 | 'process.memory.does_zap_garbage' 82 | 83 | ) 84 | } catch (e) { 85 | stats.push( 86 | 'process.memory.heap_total', 87 | 'process.memory.heap_used' 88 | ) 89 | } 90 | 91 | streamStash.telemetry.gauge = function (metric) { 92 | clearInterval(streamStash._telemetryInterval) 93 | if (stats.indexOf(metric) < 0) { 94 | delete stats.indexOf(metric) 95 | throw new Error('Got a metric we did not expect (possibly again): ' + metric) 96 | } 97 | 98 | seen++ 99 | 100 | //This is brittle and may miss added stats 101 | if (seen == stats.length) { 102 | setTimeout(done, 20) 103 | } 104 | } 105 | 106 | }) 107 | 108 | }) 109 | 110 | describe('#addInputPlugin', function () { 111 | 112 | it('Should not add any inputs if not in `configuring` state', function () { 113 | var streamStash = new StreamStash({ logger: new Logger() }) 114 | streamStash.state = 2 115 | streamStash.addInputPlugin().should.equal(false) 116 | }) 117 | 118 | it('Should throw an error if the plugin did not have a name', function () { 119 | var streamStash = new StreamStash({ logger: new Logger() }) 120 | , plugin = new EventEmitter() 121 | 122 | try { 123 | streamStash.addInputPlugin(plugin) 124 | } catch (error) { 125 | error.message.should.equal('Input plugin did not have a name') 126 | return 127 | } 128 | 129 | return new Error('Should have had an error') 130 | }) 131 | 132 | it('Should throw an error if the name is already used', function () { 133 | var streamStash = new StreamStash({ logger: new Logger() }) 134 | , plugin = new EventEmitter() 135 | 136 | plugin.name = '1' 137 | streamStash.addInputPlugin(plugin).should.equal(true) 138 | 139 | try { 140 | streamStash.addInputPlugin(plugin) 141 | } catch (error) { 142 | error.message.should.equal('Each input plugin must have a unique name') 143 | return 144 | } 145 | 146 | return new Error('Should have had an error') 147 | }) 148 | 149 | it('Should give the plugin an id, save it, and update the stats', function () { 150 | var streamStash = new StreamStash({ logger: new Logger() }) 151 | , plugin = new EventEmitter() 152 | 153 | plugin.name = '1' 154 | streamStash.addInputPlugin(plugin).should.equal(true) 155 | plugin.pluginId.should.equal(0, 'Plugin id was wrong') 156 | 157 | streamStash.inputs[0].should.equal(plugin, 'Plugin was not recorded properly') 158 | streamStash.stats.plugins.total.should.equal(1, 'Stats were not updated properly') 159 | 160 | plugin = new EventEmitter() 161 | plugin.name = '2' 162 | streamStash.addInputPlugin(plugin).should.equal(true) 163 | plugin.pluginId.should.equal(1, 'Plugin id was wrong') 164 | 165 | streamStash.inputs[1].should.equal(plugin, 'Plugin was not recorded properly') 166 | streamStash.stats.plugins.total.should.equal(2, 'Stats were not updated properly') 167 | }) 168 | }) 169 | 170 | describe('#addOutputPlugin', function () { 171 | 172 | it('Should not add any outputs if not in `configuring` state', function () { 173 | var streamStash = new StreamStash({ logger: new Logger() }) 174 | streamStash.state = 2 175 | streamStash.addOutputPlugin().should.equal(false) 176 | }) 177 | 178 | it('Should throw an error if the plugin did not have a name', function () { 179 | var streamStash = new StreamStash({ logger: new Logger() }) 180 | , plugin = new EventEmitter() 181 | 182 | try { 183 | streamStash.addOutputPlugin(plugin) 184 | } catch (error) { 185 | error.message.should.equal('Input plugin did not have a name') 186 | return 187 | } 188 | 189 | return new Error('Should have had an error') 190 | }) 191 | 192 | it('Should throw an error if the name is already used', function () { 193 | var streamStash = new StreamStash({ logger: new Logger() }) 194 | , plugin = new EventEmitter() 195 | 196 | plugin.name = '1' 197 | streamStash.addOutputPlugin(plugin).should.equal(true) 198 | 199 | try { 200 | streamStash.addOutputPlugin(plugin) 201 | } catch (error) { 202 | error.message.should.equal('Each output plugin must have a unique name') 203 | return 204 | } 205 | 206 | return new Error('Should have had an error') 207 | }) 208 | 209 | it('Should give the plugin an id, save it, and update the stats', function () { 210 | var streamStash = new StreamStash({ logger: new Logger() }) 211 | , plugin = new EventEmitter() 212 | 213 | plugin.name = '1' 214 | streamStash.addOutputPlugin(plugin).should.equal(true) 215 | plugin.pluginId.should.equal(0, 'Plugin id was wrong') 216 | 217 | streamStash.outputs[0].should.equal(plugin, 'Plugin was not recorded properly') 218 | streamStash.stats.plugins.total.should.equal(1, 'Stats were not updated properly') 219 | 220 | plugin = new EventEmitter() 221 | plugin.name = '2' 222 | streamStash.addOutputPlugin(plugin).should.equal(true) 223 | plugin.pluginId.should.equal(1, 'Plugin id was wrong') 224 | 225 | streamStash.outputs[1].should.equal(plugin, 'Plugin was not recorded properly') 226 | streamStash.stats.plugins.total.should.equal(2, 'Stats were not updated properly') 227 | }) 228 | }) 229 | 230 | describe('#addFilter', function () { 231 | 232 | it('Should not add any filters if not in `configuring` state', function () { 233 | var streamStash = new StreamStash({ logger: new Logger() }) 234 | streamStash.state = 2 235 | streamStash.addFilter().should.equal(false) 236 | }) 237 | 238 | it('Should not add non function filters', function () { 239 | var streamStash = new StreamStash({ logger: new Logger() }) 240 | 241 | try { 242 | streamStash.addFilter('asf') 243 | } catch (error) { 244 | error.message.should.equal('Attempted to add a filter that is not a function') 245 | return 246 | } 247 | 248 | return new Error('Should have had an error') 249 | }) 250 | 251 | it('Should not allow the same filter to be added more than once', function () { 252 | var streamStash = new StreamStash({ logger: new Logger() }) 253 | , filter = function () {} 254 | 255 | streamStash.addFilter(filter) 256 | 257 | try { 258 | streamStash.addFilter(filter) 259 | } catch (error) { 260 | error.message.should.equal('Attempted to add the same filter more than once') 261 | return 262 | } 263 | 264 | return new Error('Should have had an error') 265 | }) 266 | 267 | it('Should record filters on successful add', function () { 268 | var streamStash = new StreamStash({ logger: new Logger() }) 269 | 270 | var filter1 = function () {} 271 | streamStash.addFilter(filter1) 272 | streamStash.filters[0].func.should.equal(filter1) 273 | streamStash.filters[0].name.should.eql(0) 274 | 275 | var filter2 = function () {} 276 | streamStash.addFilter(filter2) 277 | streamStash.filters[1].func.should.equal(filter2) 278 | streamStash.filters[1].name.should.eql(1) 279 | 280 | var filter3 = function () {} 281 | streamStash.addFilter('test', filter3) 282 | streamStash.filters[2].func.should.equal(filter3) 283 | streamStash.filters[2].name.should.eql('test') 284 | }) 285 | 286 | }) 287 | 288 | describe('#start', function () { 289 | 290 | it('Should not start if not in `configuring` state', function () { 291 | var streamStash = new StreamStash({ logger: new Logger() }) 292 | streamStash.state = 2 293 | streamStash.start().should.equal(false) 294 | }) 295 | 296 | it('Should throw an error if no inputs were configured', function () { 297 | var streamStash = new StreamStash({ logger: new Logger() }) 298 | 299 | try { 300 | streamStash.start() 301 | } catch (error) { 302 | error.message.should.equal('At least 1 input plugin must be configured') 303 | return 304 | } 305 | 306 | return new Error('Should have had an error') 307 | }) 308 | 309 | it('Should throw an error if no inputs were configured', function () { 310 | var streamStash = new StreamStash({ logger: new Logger() }) 311 | , plugin = new EventEmitter() 312 | 313 | plugin.name = '1' 314 | streamStash.addInputPlugin(plugin) 315 | 316 | try { 317 | streamStash.start() 318 | } catch (error) { 319 | error.message.should.equal('At least 1 output plugin must be configured') 320 | return 321 | } 322 | 323 | return new Error('Should have had an error') 324 | }) 325 | 326 | it('Should start up properly', function () { 327 | var streamStash = new StreamStash({ logger: new Logger() }) 328 | , plugin = new EventEmitter() 329 | , sawStart = false 330 | 331 | plugin.name = '1' 332 | streamStash.addInputPlugin(plugin) 333 | streamStash.addOutputPlugin(plugin) 334 | 335 | streamStash.on('start', function () { 336 | sawStart = true 337 | }) 338 | 339 | //TODO: info log assert 340 | 341 | streamStash.start().should.equal(true) 342 | streamStash.stats.startTime.should.instanceof(Date) 343 | streamStash.state.should.equal(StreamStash.STATE.STARTED) 344 | sawStart.should.equal(true) 345 | }) 346 | 347 | }) 348 | 349 | describe('#stop', function () { 350 | 351 | it('Should not stop if not in `started` state', function () { 352 | var streamStash = new StreamStash({ logger: new Logger() }) 353 | streamStash.stop().should.equal(false) 354 | }) 355 | 356 | it('Should begin a shutdown properly', function () { 357 | var streamStash = new StreamStash({ logger: new Logger() }) 358 | , sawEvent = false 359 | 360 | streamStash.state = StreamStash.STATE.STARTED 361 | 362 | streamStash.on('stopInput', function () { 363 | sawEvent = true 364 | }) 365 | 366 | streamStash.stop().should.equal(true) 367 | streamStash.state.should.equal(StreamStash.STATE.STOPPING_INPUT) 368 | sawEvent.should.equal(true) 369 | //TODO: info log assert 370 | }) 371 | 372 | it('Should wait for all inputs to stop then stop all plugins') 373 | 374 | it('Should wait for all in flight events to complete before stopping all plugins') 375 | 376 | it('Should properly stop once all plugins have stopped') 377 | 378 | }) 379 | 380 | it('Should emit filter timing stats').skip() 381 | 382 | }) 383 | -------------------------------------------------------------------------------- /test/inputs/StdIn.test.js: -------------------------------------------------------------------------------- 1 | // var StreamStash = require('../../'), 2 | // Readable = require('stream').Readable, 3 | // StdInInput = StreamStash.inputs.StdInInput, 4 | // EventContainer = StreamStash.EventContainer, 5 | // Logger = StreamStash.Logger, 6 | // EventEmitter = require('events').EventEmitter 7 | // 8 | // require('should') 9 | // 10 | // describe('StdInInput', function () { 11 | // 12 | // it('Should use the provided streamStash object', function () { 13 | // var streamStash = new EventEmitter() 14 | // , stdin = new StdInInput({ streamStash: streamStash, logger: new Logger() }) 15 | // 16 | // streamStash.listeners('start').length.should.equal(1) 17 | // }) 18 | // 19 | // it('Should use the provided logger object', function () { 20 | // var streamStash = new EventEmitter() 21 | // , logger = { 22 | // debug: function (message) { 23 | // arguments[0].should.equal('StdIn', 'message was wrong') 24 | // arguments[1].should.equal('starting up', 'message was wrong') 25 | // } 26 | // } 27 | // , stdin = new StdInInput({ streamStash: streamStash, logger: logger }) 28 | // }) 29 | // 30 | // it('Should default the name to `StdIn`', function () { 31 | // var stdin = new StdInInput({ streamStash: new EventEmitter(), logger: new Logger() }) 32 | // stdin.name.should.equal('StdIn', 'Default name was wrong') 33 | // }) 34 | // 35 | // it('Should use the provided name', function () { 36 | // var stdin = new StdInInput({ streamStash: new EventEmitter(), logger: new Logger(), name: 'hey' }) 37 | // stdin.name.should.equal('hey', 'name was wrong') 38 | // }) 39 | // 40 | // it('Should not emit events if not told to start', function (done) { 41 | // var stream = new Readable() 42 | // , stdin = new StdInInput({ 43 | // streamStash: new EventEmitter() 44 | // , logger: new Logger() 45 | // , EventContainer: EventContainer 46 | // , stream: stream 47 | // }) 48 | // 49 | // stream.read = function () { 50 | // stream.emit('data', 'a') 51 | // } 52 | // 53 | // stdin.on('event', function () { 54 | // done(new Error('An event was emitted')) 55 | // }) 56 | // 57 | // stream.on('data', function () { 58 | // done() 59 | // }) 60 | // 61 | // stream.emit('data', 'hey') 62 | // }) 63 | // 64 | // it('Should emit `started` after being told to start', function (done) { 65 | // var streamStash = new EventEmitter() 66 | // , stdin = new StdInInput({ 67 | // streamStash: streamStash 68 | // , logger: new Logger() 69 | // , EventContainer: EventContainer 70 | // }) 71 | // 72 | // stdin.on('started', function () { 73 | // done() 74 | // }) 75 | // 76 | // streamStash.emit('start') 77 | // }) 78 | // 79 | // it('Should emit `stoppedInput` after being told to stop input', function (done) { 80 | // var streamStash = new EventEmitter() 81 | // , stdin = new StdInInput({ 82 | // streamStash: streamStash 83 | // , logger: new Logger() 84 | // , EventContainer: EventContainer 85 | // }) 86 | // 87 | // stdin.on('stoppedInput', function () { 88 | // done() 89 | // }) 90 | // 91 | // streamStash.emit('start') 92 | // streamStash.emit('stopInput') 93 | // }) 94 | // 95 | // it('Should emit `stopped` after being told to stop', function (done) { 96 | // var streamStash = new EventEmitter() 97 | // , stdin = new StdInInput({ 98 | // streamStash: streamStash 99 | // , logger: new Logger() 100 | // , EventContainer: EventContainer 101 | // }) 102 | // 103 | // stdin.on('stopped', function () { 104 | // done() 105 | // }) 106 | // 107 | // streamStash.emit('start') 108 | // streamStash.emit('stopInput') 109 | // streamStash.emit('stop') 110 | // }) 111 | // 112 | // it('Should emit events after being told to start', function (done) { 113 | // var streamStash = new EventEmitter(), 114 | // stream = new Readable() 115 | // 116 | // stream.read = function () { 117 | // stream.emit('data', '') 118 | // } 119 | // 120 | // var stdin = new StdInInput({ 121 | // streamStash: streamStash 122 | // , logger: new Logger() 123 | // , EventContainer: EventContainer 124 | // , stream: stream 125 | // }) 126 | // 127 | // 128 | // stdin.on('event', function () { 129 | // done() 130 | // }) 131 | // 132 | // streamStash.emit('start') 133 | // stream.emit('data', 'hey') 134 | // }) 135 | // 136 | // it('Should not emit events after being told to stopInput', function (done) { 137 | // var streamStash = new EventEmitter() 138 | // , stream = new Readable() 139 | // 140 | // stream.read = function () { 141 | // stream.emit('data', '') 142 | // } 143 | // 144 | // var stdin = new StdInInput({ 145 | // streamStash: streamStash 146 | // , logger: new Logger() 147 | // , EventContainer: EventContainer 148 | // , stream: stream 149 | // }) 150 | // 151 | // stdin.on('event', function () { 152 | // done(new Error('Should not have emitted an event')) 153 | // }) 154 | // 155 | // streamStash.emit('start') 156 | // streamStash.emit('stopInput') 157 | // 158 | // stream.on('data', function () { 159 | // done() 160 | // }) 161 | // 162 | // stream.emit('data', 'hey') 163 | // }) 164 | // 165 | // it('Should use the provided EventContainer', function (done) { 166 | // var eventContainer = function (data) { 167 | // data.hey = 'yay' 168 | // return { data: data } 169 | // } 170 | // , streamStash = new EventEmitter() 171 | // , stream = new Readable() 172 | // 173 | // stream.read = function () { 174 | // stream.emit('data', '') 175 | // } 176 | // 177 | // var stdin = new StdInInput({ 178 | // streamStash: streamStash 179 | // , logger: new Logger() 180 | // , EventContainer: eventContainer 181 | // , stream: stream 182 | // }) 183 | // 184 | // stdin.on('event', function (event) { 185 | // event.data.hey.should.equal('yay', 'Did not use the provided EventContainer') 186 | // done() 187 | // }) 188 | // 189 | // streamStash.emit('start') 190 | // stream.emit('data', 'hey') 191 | // }) 192 | // 193 | // it('Should emit proper events', function (done) { 194 | // var streamStash = new EventEmitter() 195 | // , stream = new Readable() 196 | // 197 | // stream.read = function () { 198 | // stream.emit('data', '') 199 | // } 200 | // 201 | // var stdin = new StdInInput({ 202 | // streamStash: streamStash 203 | // , logger: new Logger() 204 | // , EventContainer: EventContainer 205 | // , stream: stream 206 | // }) 207 | // 208 | // stdin.on('event', function (event) { 209 | // event.state.should.equal(EventContainer.STATE.PROCESSING, 'Event state was wrong') 210 | // event.data.event_source.should.eql({ name: 'StdIn', timestamp: event.data.event_source.timestamp }, 'Event source was wrong') 211 | // event.data.message.should.equal('hey', 'Event message was wrong') 212 | // done() 213 | // }) 214 | // 215 | // streamStash.emit('start') 216 | // stream.emit('data', 'hey') 217 | // }) 218 | // 219 | // it('Should add provided fields to events', function (done) { 220 | // var streamStash = new EventEmitter() 221 | // , stream = new Readable() 222 | // 223 | // stream.read = function () { 224 | // stream.emit('data', '') 225 | // } 226 | // 227 | // var stdin = new StdInInput({ 228 | // streamStash: streamStash 229 | // , logger: new Logger() 230 | // , EventContainer: EventContainer 231 | // , stream: stream 232 | // , fields: { 233 | // added: 'yup' 234 | // } 235 | // }) 236 | // 237 | // stdin.on('event', function (event) { 238 | // event.data.event_source.should.eql({ added: 'yup', name: 'StdIn', timestamp: event.data.event_source.timestamp }, 'Added field was wrong') 239 | // done() 240 | // }) 241 | // 242 | // streamStash.emit('start') 243 | // stream.emit('data', 'hey') 244 | // }) 245 | // 246 | // it('Should overwrite provided fields with built in events', function (done) { 247 | // var streamStash = new EventEmitter() 248 | // , stream = new Readable() 249 | // 250 | // stream.read = function () { 251 | // stream.emit('data', '') 252 | // } 253 | // 254 | // var stdin = new StdInInput({ 255 | // streamStash: streamStash, 256 | // logger: new Logger(), 257 | // EventContainer: EventContainer, 258 | // stream: stream, 259 | // fields: { 260 | // added: 'yup', 261 | // name: 'overwrite' 262 | // } 263 | // }) 264 | // 265 | // stdin.on('event', function (event) { 266 | // event.data.event_source.should.eql({ name: 'overwrite', added: 'yup', timestamp: event.data.event_source.timestamp }) 267 | // event.data.message.should.equal('hey', 'Event message was wrong') 268 | // done() 269 | // }) 270 | // 271 | // streamStash.emit('start') 272 | // stream.emit('data', 'hey') 273 | // }) 274 | // 275 | // }) 276 | -------------------------------------------------------------------------------- /test/outputs/StdOut.test.js: -------------------------------------------------------------------------------- 1 | var StreamStash = require('../../') 2 | , StdOutOutput = StreamStash.outputs.StdOutOutput 3 | , EventContainer = StreamStash.EventContainer 4 | , Logger = StreamStash.Logger 5 | , EventEmitter = require('events').EventEmitter 6 | 7 | require('should') 8 | 9 | describe('StdOutOutput', function () { 10 | it('Should use the provided streamStash object', function () { 11 | var streamStash = new EventEmitter() 12 | , stdout = new StdOutOutput({ streamStash: streamStash, logger: new Logger() }) 13 | 14 | streamStash.listeners('start').length.should.equal(1) 15 | }) 16 | 17 | it('Should use the provided logger object', function () { 18 | var streamStash = new EventEmitter() 19 | , logger = { 20 | debug: function (message) { 21 | arguments[0].should.equal('StdOut', 'message was wrong') 22 | arguments[1].should.equal('starting up', 'message was wrong') 23 | } 24 | } 25 | , stdout = new StdOutOutput({ streamStash: streamStash, logger: logger }) 26 | }) 27 | 28 | it('Should default the name to `StdOut`', function () { 29 | var stdout = new StdOutOutput({ streamStash: new EventEmitter(), logger: new Logger() }) 30 | stdout.name.should.equal('StdOut', 'Default name was wrong') 31 | }) 32 | 33 | it('Should use the provided name', function () { 34 | var stdout = new StdOutOutput({ streamStash: new EventEmitter(), logger: new Logger(), name: 'hey' }) 35 | stdout.name.should.equal('hey', 'name was wrong') 36 | }) 37 | 38 | it('Should not output any events if not told to start', function (done) { 39 | var streamStash = new EventEmitter() 40 | , stream = { 41 | write: function () { 42 | done(new Error('Output an event')) 43 | } 44 | } 45 | , stdout = new StdOutOutput({ streamStash: streamStash, logger: new Logger(), stream: stream }) 46 | 47 | streamStash.emit('output', new EventContainer()) 48 | done() 49 | }) 50 | 51 | it('Should emit `started` after being told to start', function (done) { 52 | var streamStash = new EventEmitter() 53 | , stdout = new StdOutOutput({ streamStash: streamStash, logger: new Logger() }) 54 | 55 | stdout.on('started', function () { 56 | done() 57 | }) 58 | 59 | streamStash.emit('start') 60 | }) 61 | 62 | it('Should emit `stopped` after being told to stop', function (done) { 63 | var streamStash = new EventEmitter() 64 | , stdout = new StdOutOutput({ streamStash: streamStash, logger: new Logger() }) 65 | 66 | stdout.on('stopped', function () { 67 | done() 68 | }) 69 | 70 | 71 | streamStash.emit('start') 72 | streamStash.emit('stop') 73 | }) 74 | 75 | it('Should output events after told to start', function (done) { 76 | var streamStash = new EventEmitter() 77 | , stream = { 78 | write: function () { 79 | done() 80 | } 81 | } 82 | , stdout = new StdOutOutput({ streamStash: streamStash, logger: new Logger(), stream: stream }) 83 | 84 | streamStash.emit('start') 85 | streamStash.emit('output', new EventContainer()) 86 | }) 87 | 88 | it('Should emit `complete` after outputting events', function (done) { 89 | var streamStash = new EventEmitter() 90 | , stream = { write: function () {} } 91 | , stdout = new StdOutOutput({ streamStash: streamStash, logger: new Logger(), stream: stream }) 92 | 93 | stdout.on('complete', function (event) { 94 | event.should.equal('yay') 95 | done() 96 | }) 97 | 98 | streamStash.emit('start') 99 | streamStash.emit('output', 'yay') 100 | }) 101 | 102 | it('Should add provided fields to the event', function (done) { 103 | var streamStash = new EventEmitter() 104 | , event = new EventContainer({ message: 'here' }) 105 | , stream = { 106 | write: function (message) { 107 | message.should.equal('{"added":"yup","message":"here"}\n', 'Output message was wrong') 108 | done() 109 | } 110 | } 111 | , stdout = new StdOutOutput({ 112 | streamStash: streamStash 113 | , logger: new Logger() 114 | , stream: stream 115 | , fields: { 116 | added: 'yup' 117 | } 118 | }) 119 | 120 | streamStash.emit('start') 121 | streamStash.emit('output', event) 122 | }) 123 | 124 | it('Should not overwrite event fields with provided fields', function (done) { 125 | var streamStash = new EventEmitter() 126 | , event = new EventContainer({ message: 'here' }) 127 | , stream = { 128 | write: function (message) { 129 | message.should.equal('{"added":"yup","message":"here"}\n', 'Output message was wrong') 130 | done() 131 | } 132 | } 133 | , stdout = new StdOutOutput({ 134 | streamStash: streamStash 135 | , logger: new Logger() 136 | , stream: stream 137 | , fields: { 138 | added: 'yup' 139 | , message: 'overwrite' 140 | } 141 | }) 142 | 143 | streamStash.emit('start') 144 | streamStash.emit('output', event) 145 | }) 146 | 147 | it('Should pretty print events if configured', function (done) { 148 | var streamStash = new EventEmitter() 149 | , event = new EventContainer({ message: 'here' }) 150 | , stream = { 151 | write: function (message) { 152 | message.should.equal('{\n "message": "here"\n}\n', 'Output message was wrong') 153 | done() 154 | } 155 | } 156 | , stdout = new StdOutOutput({ 157 | streamStash: streamStash 158 | , logger: new Logger() 159 | , stream: stream 160 | , prettyPrint: true 161 | }) 162 | 163 | streamStash.emit('start') 164 | streamStash.emit('output', event) 165 | }) 166 | 167 | it('Should stop outputting events when told to stop', function (done) { 168 | var streamStash = new EventEmitter() 169 | , stream = { write: function () {} } 170 | , stdout = new StdOutOutput({ streamStash: streamStash, logger: new Logger(), stream: stream }) 171 | 172 | stdout.on('complete', function () { 173 | done(new Error('Should not have output the event')) 174 | }) 175 | 176 | streamStash.emit('start') 177 | streamStash.emit('stop') 178 | 179 | streamStash.on('output', function () { 180 | done() 181 | }) 182 | 183 | streamStash.emit('output', 'yay') 184 | }) 185 | 186 | }) 187 | -------------------------------------------------------------------------------- /test/parsers/goAuditParser.test.js: -------------------------------------------------------------------------------- 1 | var StreamStash = require('../../'), 2 | assertParserResult = require('./util').assertParserResult 3 | 4 | describe('goAuditParser', function () { 5 | 6 | it.skip('Should parse a single event', function () { 7 | var thing = { 8 | "sequence":1226433, 9 | "timestamp":"1459447820.317", 10 | "messages":[{"type":1305,"data":"audit_pid=14842 old=14842 auid=1000 ses=37 res=1"}], 11 | "uid_map":{"1000":"ubuntu"} 12 | } 13 | 14 | assertParserResult( 15 | StreamStash.parsers.goAuditParser.raw, 16 | JSON.stringify(thing), 17 | {} 18 | ) 19 | }) 20 | 21 | it('Should parse a syscall event', function () { 22 | var data = { 23 | "sequence":1226679, 24 | "timestamp":"1459449216.329", 25 | "messages":[ 26 | {"type":1300,"data":"arch=c000003e syscall=59 success=yes exit=0 a0=7f7242278f28 a1=7f7242278e60 a2=7f7242278e78 a3=7f7241707a10 items=2 ppid=15125 pid=15126 auid=1000 uid=1000 gid=1000 euid=1000 suid=1000 fsuid=1000 egid=1000 sgid=1000 fsgid=1000 tty=pts0 ses=37 comm=\"curl\" exe=\"/usr/bin/curl\" key=(null)"}, 27 | ], 28 | "uid_map":{"0":"root","1000":"ubuntu"} 29 | } 30 | 31 | assertParserResult( 32 | StreamStash.parsers.goAuditParser.raw, 33 | JSON.stringify(data), 34 | { 35 | syscall: { 36 | arch: { bits: '64', endianness: 'little', name: 'x86_64' }, 37 | id: '59', 38 | name: 'execve', 39 | success: 'yes', 40 | exit: '0', 41 | a0: '7f7242278f28', 42 | a1: '7f7242278e60', 43 | a2: '7f7242278e78', 44 | a3: '7f7241707a10', 45 | items: '2', 46 | ppid: '15125', 47 | pid: '15126', 48 | auid: { name: 'ubuntu', id: '1000' }, 49 | uid: { name: 'ubuntu', id: '1000' }, 50 | gid: '1000', 51 | euid: { name: 'ubuntu', id: '1000' }, 52 | suid: { name: 'ubuntu', id: '1000' }, 53 | fsuid: { name: 'ubuntu', id: '1000' }, 54 | egid: '1000', 55 | sgid: '1000', 56 | fsgid: '1000', 57 | tty: 'pts0', 58 | session_id: '37', 59 | command: 'curl', 60 | executable: '/usr/bin/curl', 61 | key: '' 62 | }, 63 | sequence: 1226679, 64 | unknown: [], 65 | timestamp: new Date('1459449216.329' * 1000), 66 | message: "ubuntu succeeded to execve `unknown path` via `/usr/bin/curl`" 67 | } 68 | ) 69 | }) 70 | 71 | it('Should parse a complex execve event', function () { 72 | var data = { 73 | "sequence":1226679, 74 | "timestamp":'1459449216.329', 75 | "messages":[ 76 | {"type":1309,"data":"argc=2 a0=\"curl\""}, 77 | {"type":1309,"data":" a1_len=52082 a1[0]=68"}, 78 | {"type":1309,"data":" a1[1]=68"}, 79 | {"type":1309,"data":" a1[2]=68"}, 80 | {"type":1309,"data":" a1[3]=68"}, 81 | {"type":1309,"data":" a1[4]=68"}, 82 | {"type":1309,"data":" a1[5]=68"}, 83 | {"type":1309,"data":" a1[6]=68"}, 84 | ], 85 | "uid_map":{"0":"root","1000":"ubuntu"} 86 | } 87 | 88 | assertParserResult( 89 | StreamStash.parsers.goAuditParser.raw, 90 | JSON.stringify(data), 91 | { 92 | execve: { 93 | command: 'curl hhhhhhh' 94 | }, 95 | sequence: 1226679, 96 | unknown: [], 97 | timestamp: new Date('1459449216.329' * 1000), 98 | message: "" 99 | } 100 | ) 101 | }) 102 | 103 | it('Should parse paths', function () { 104 | var data = { 105 | "sequence":1226679, 106 | "timestamp":'1459449216.329', 107 | "messages":[ 108 | {"type":1302,"data":"item=0 name=\"/usr/bin/curl\" inode=638 dev=ca:01 mode=0100755 ouid=0 ogid=0 rdev=00:00 nametype=NORMAL"}, 109 | {"type":1302,"data":"item=1 name=\"/lib64/ld-linux-x86-64.so.2\" inode=396037 dev=ca:01 mode=0100755 ouid=0 ogid=0 rdev=00:00 nametype=NORMAL"} 110 | ], 111 | "uid_map":{"0":"root","1000":"ubuntu"} 112 | } 113 | 114 | assertParserResult( 115 | StreamStash.parsers.goAuditParser.raw, 116 | JSON.stringify(data), 117 | { 118 | paths: [ 119 | { 120 | dev: 'ca:01', 121 | inode: '638', 122 | mode: '0100755', 123 | name: '/usr/bin/curl', 124 | nametype: 'NORMAL', 125 | ogid: '0', 126 | ouid: { id: '0', name: 'root' }, 127 | rdev: '00:00' 128 | }, 129 | { 130 | dev: 'ca:01', 131 | inode: '396037', 132 | mode: '0100755', 133 | name: '/lib64/ld-linux-x86-64.so.2', 134 | nametype: 'NORMAL', 135 | ogid: '0', 136 | ouid: { id: '0', name: 'root' }, 137 | rdev: '00:00' 138 | } 139 | ], 140 | sequence: 1226679, 141 | unknown: [], 142 | timestamp: new Date('1459449216.329' * 1000), 143 | message: "" 144 | } 145 | ) 146 | }) 147 | 148 | it('Should parse cwd', function () { 149 | var data = { 150 | "sequence":1226679, 151 | "timestamp":'1459449216.329', 152 | "messages":[ 153 | {"type":1307,"data":" cwd=2F686F6D652F7562756E74752F74657374207769746820737061636573"} 154 | ], 155 | "uid_map":{"0":"root","1000":"ubuntu"} 156 | } 157 | 158 | assertParserResult( 159 | StreamStash.parsers.goAuditParser.raw, 160 | JSON.stringify(data), 161 | { 162 | cwd: '/home/ubuntu/test with spaces', 163 | sequence: 1226679, 164 | unknown: [], 165 | timestamp: new Date('1459449216.329' * 1000), 166 | message: "" 167 | } 168 | ) 169 | }) 170 | 171 | it('Normal execve test', function () { 172 | var data = { 173 | "sequence":1226679, 174 | "timestamp":'1459449216.329', 175 | "messages":[ 176 | {"type":1307,"data":" cwd=2F686F6D652F7562756E74752F74657374207769746820737061636573"}, 177 | {"type":1302,"data":"item=0 name=\"/usr/bin/curl\" inode=638 dev=ca:01 mode=0100755 ouid=0 ogid=0 rdev=00:00 nametype=NORMAL"}, 178 | {"type":1302,"data":"item=1 name=\"/lib64/ld-linux-x86-64.so.2\" inode=396037 dev=ca:01 mode=0100755 ouid=0 ogid=0 rdev=00:00 nametype=NORMAL"}, 179 | {"type":1309,"data":"argc=2 a0=\"curl\""}, 180 | {"type":1309,"data":" a1_len=52082 a1[0]=68"}, 181 | {"type":1309,"data":" a1[1]=68"}, 182 | {"type":1309,"data":" a1[2]=68"}, 183 | {"type":1309,"data":" a1[3]=68"}, 184 | {"type":1309,"data":" a1[4]=68"}, 185 | {"type":1309,"data":" a1[5]=68"}, 186 | {"type":1309,"data":" a1[6]=68"}, 187 | {"type":1300,"data":"arch=c000003e syscall=59 success=yes exit=0 a0=7f7242278f28 a1=7f7242278e60 a2=7f7242278e78 a3=7f7241707a10 items=2 ppid=15125 pid=15126 auid=1000 uid=1000 gid=1000 euid=1000 suid=1000 fsuid=1000 egid=1000 sgid=1000 fsgid=1000 tty=pts0 ses=37 comm=\"curl\" exe=\"/usr/bin/curl\" key=(null)"} 188 | ], 189 | "uid_map":{"0":"root","1000":"ubuntu"} 190 | } 191 | 192 | assertParserResult( 193 | StreamStash.parsers.goAuditParser.raw, 194 | JSON.stringify(data), 195 | { 196 | "timestamp":new Date('1459449216.329' * 1000), 197 | "sequence":1226679, 198 | "unknown": [], 199 | "syscall":{ 200 | "arch":{"bits":"64","endianness":"little","name":"x86_64"}, 201 | "success":"yes", 202 | "exit":"0", 203 | "a0":"7f7242278f28", 204 | "a1":"7f7242278e60", 205 | "a2":"7f7242278e78", 206 | "a3":"7f7241707a10", 207 | "items":"2", 208 | "ppid":"15125", 209 | "pid":"15126", 210 | "auid":{"name":"ubuntu","id":"1000"}, 211 | "uid":{"name":"ubuntu","id":"1000"}, 212 | "gid":"1000", 213 | "euid":{"name":"ubuntu","id":"1000"}, 214 | "suid":{"name":"ubuntu","id":"1000"}, 215 | "fsuid":{"name":"ubuntu","id":"1000"}, 216 | "egid":"1000", 217 | "sgid":"1000", 218 | "fsgid":"1000", 219 | "tty":"pts0", 220 | "key":"", 221 | "id":"59", 222 | "session_id":"37", 223 | "name":"execve", 224 | "command":"curl", 225 | "executable":"/usr/bin/curl" 226 | }, 227 | "paths":[ 228 | {"name":"/usr/bin/curl","inode":"638","dev":"ca:01","mode":"0100755","ouid":{"name":"root","id":"0"},"ogid":"0","rdev":"00:00","nametype":"NORMAL"}, 229 | {"name":"/lib64/ld-linux-x86-64.so.2","inode":"396037","dev":"ca:01","mode":"0100755","ouid":{"name":"root","id":"0"},"ogid":"0","rdev":"00:00","nametype":"NORMAL"} 230 | ], 231 | "cwd":"/home/ubuntu/test with spaces", 232 | "execve":{ 233 | "command":"curl hhhhhhh" 234 | }, 235 | "message":"ubuntu succeeded to execve `curl hhhhhhh` via `/usr/bin/curl`" 236 | } 237 | ) 238 | }) 239 | 240 | it('Should handle = in values properly', function () { 241 | var data = {"sequence":10453717,"timestamp":"1462897538.564","messages":[{"type":1309,"data":"argc=1 a0=\"stuff=things\""}]}, 242 | result = StreamStash.parsers.goAuditParser.raw(JSON.stringify(data)) 243 | 244 | result.data.execve.command.should.eql("stuff=things") 245 | 246 | data = {"sequence":10453717,"timestamp":"1462897538.564","messages":[{"type":1309,"data":"argc=1 a0=\"stuff=\""}]} 247 | result = StreamStash.parsers.goAuditParser.raw(JSON.stringify(data)) 248 | 249 | result.data.execve.command.should.eql("stuff=") 250 | }) 251 | 252 | it('Should parse a sockaddr', function () { 253 | var data = {"sequence":10453717,"timestamp":"1462897538.564","messages":[{"type":1306,"data":"saddr=0200270F000000000000000000000000"}]}, 254 | result = StreamStash.parsers.goAuditParser.raw(JSON.stringify(data)) 255 | result.data.socket_address.should.eql({"family":"inet","port":9999,"ip":"0.0.0.0","unknown":"0000000000000000"}) 256 | 257 | data = {"sequence":10453717,"timestamp":"1462897538.564","messages":[{"type":1306,"data":"saddr=0A00270F0000000000000000000000000000000000000001000000000000"}]} 258 | result = StreamStash.parsers.goAuditParser.raw(JSON.stringify(data)) 259 | result.data.socket_address.should.eql({"family":"inet6","port":9999,"flow_info":"00000000","ip":"0000:0000:0000:0000:0000:0000:0000:0001","scope_id":"00000000","unknown":"0000"}) 260 | 261 | data = {"sequence":10453717,"timestamp":"1462897538.564","messages":[{"type":1306,"data":"saddr=01002F686F6D652F6E6174652F736F636B65740010"}]} 262 | result = StreamStash.parsers.goAuditParser.raw(JSON.stringify(data)) 263 | result.data.socket_address.should.eql({"family":"local","path":"/home/nate/socket","unknown":"0010"}) 264 | }) 265 | 266 | it('should parse a proctitle', function () { 267 | var data = {"sequence":1188,"timestamp":"1473790050.668","messages":[{"type":1327,"data":"proctitle=6E63002D6C0039393939"}]}, 268 | result = StreamStash.parsers.goAuditParser.raw(JSON.stringify(data)) 269 | 270 | result.data.proctitle.should.eql('nc -l 9999') 271 | }) 272 | 273 | }) 274 | -------------------------------------------------------------------------------- /test/parsers/httpCombinedAccessParser.test.js: -------------------------------------------------------------------------------- 1 | var StreamStash = require('../../'), 2 | assertParserResult = require('./util').assertParserResult, 3 | EventContainer = StreamStash.EventContainer 4 | 5 | describe('httpCombinedAccessParser', function () { 6 | 7 | it('Should parse a regular combined access log line', function () { 8 | assertParserResult( 9 | StreamStash.parsers.httpCombinedAccessParser.raw, 10 | '127.0.0.1 derp user_name [09/Feb/2016:13:43:01 +0800] "GET /path HTTP/1.1" 200 140 "http://referrer" "really long user agent"', 11 | { 12 | bytes: '140', 13 | identd: 'derp', 14 | message: 'GET /path HTTP/1.1', 15 | method: 'GET', 16 | path: '/path', 17 | version: 'HTTP/1.1', 18 | referrer: 'http://referrer', 19 | remote_host: '127.0.0.1', 20 | status_code: '200', 21 | timestamp: new Date('2016-02-08 21:43:01.000 -0800'), 22 | user: 'user_name', 23 | user_agent: 'really long user agent' 24 | } 25 | ) 26 | }) 27 | 28 | it('Should filter out fields that had - in them', function () { 29 | assertParserResult( 30 | StreamStash.parsers.httpCombinedAccessParser.raw, 31 | '- - - [09/Feb/2016:13:43:01 +0800] "GET /path HTTP/1.1" 200 140 "-" "-"', 32 | { 33 | bytes: '140', 34 | message: 'GET /path HTTP/1.1', 35 | method: 'GET', 36 | path: '/path', 37 | version: 'HTTP/1.1', 38 | status_code: '200', 39 | timestamp: new Date('2016-02-08 21:43:01.000 -0800') 40 | } 41 | ) 42 | }) 43 | 44 | it('Should provide both data and error with a bad date', function () { 45 | assertParserResult( 46 | StreamStash.parsers.httpCombinedAccessParser.raw, 47 | '- - - [09/Derp/2016:13:43:01 +0800] "GET /path HTTP/1.1" 200 140 "-" "-"', 48 | { 49 | bytes: '140', 50 | message: 'GET /path HTTP/1.1', 51 | method: 'GET', 52 | path: '/path', 53 | version: 'HTTP/1.1', 54 | status_code: '200' 55 | }, 56 | 'Invalid date' 57 | ) 58 | }) 59 | 60 | it('Should have the proper error on no match', function () { 61 | assertParserResult( 62 | StreamStash.parsers.httpCombinedAccessParser.raw, 63 | '- - - [09/Derp/2016:13:43:01 +0800 "GET /path HTTP/1.1" 200 140 "-" "-"', 64 | void 0, 65 | 'No match' 66 | ) 67 | }) 68 | 69 | }) 70 | -------------------------------------------------------------------------------- /test/parsers/httpVHostCombinedAccessParser.test.js: -------------------------------------------------------------------------------- 1 | var StreamStash = require('../../'), 2 | assertParserResult = require('./util').assertParserResult 3 | 4 | describe('httpVHostCombinedAccessParser', function () { 5 | 6 | it('Should parse a regular vhost combined access log line', function () { 7 | assertParserResult( 8 | StreamStash.parsers.httpVHostCombinedAccessParser.raw, 9 | 'thing.der.com:443 127.0.0.1 derp user_name [09/Feb/2016:13:43:01 +0800] "GET /path HTTP/1.1" 200 140 "http://referrer" "really long user agent"', 10 | { 11 | virtual_host: 'thing.der.com:443', 12 | bytes: '140', 13 | identd: 'derp', 14 | message: 'GET /path HTTP/1.1', 15 | method: 'GET', 16 | path: '/path', 17 | version: 'HTTP/1.1', 18 | referrer: 'http://referrer', 19 | remote_host: '127.0.0.1', 20 | status_code: '200', 21 | timestamp: new Date('2016-02-08 21:43:01.000 -0800'), 22 | user: 'user_name', 23 | user_agent: 'really long user agent' 24 | } 25 | ) 26 | }) 27 | 28 | }) 29 | -------------------------------------------------------------------------------- /test/parsers/index.test.js: -------------------------------------------------------------------------------- 1 | var StreamStash = require('../../'), 2 | assertParserResult = require('./util').assertParserResult, 3 | EventContainer = StreamStash.EventContainer 4 | 5 | describe('parser helper wrapper', function () { 6 | 7 | it('Copy the data into event.data', function () { 8 | var event = new EventContainer({ message: '{"derp":"flerp","message":"hi"}' }), 9 | result = StreamStash.parsers.jsonParser(event) 10 | 11 | result.should.eql(true) 12 | event.data.should.eql({ 13 | message: 'hi', 14 | json: { 15 | derp: 'flerp' 16 | } 17 | }) 18 | 19 | }) 20 | 21 | it('Should set parseError and _type on error', function () { 22 | var event = new EventContainer({ message: '"derp":"flerp","message":"hi"}' }), 23 | result = StreamStash.parsers.jsonParser(event) 24 | 25 | result.should.eql(false) 26 | event.data.parse_error.should.containEql( 27 | 'json: SyntaxError: Unexpected token :') 28 | }) 29 | 30 | it('Should set parseError and the specified type property on error', function () { 31 | var event = new EventContainer({ message: '"derp":"flerp","message":"hi"}' }), 32 | result = StreamStash.parsers.jsonParser(event, 'customType') 33 | 34 | result.should.eql(false) 35 | event.data.parse_error.should.containEql( 36 | 'json: SyntaxError: Unexpected token :') 37 | }) 38 | 39 | it('Should not modify event.data if no data was parsed', function () { 40 | var event = new EventContainer({ message: '""' }), 41 | result = StreamStash.parsers.jsonParser(event) 42 | 43 | result.should.eql(true) 44 | event.data.should.eql({ 45 | message: '""' 46 | }) 47 | }) 48 | 49 | }) 50 | -------------------------------------------------------------------------------- /test/parsers/jsonParser.test.js: -------------------------------------------------------------------------------- 1 | var StreamStash = require('../../'), 2 | assertParserResult = require('./util').assertParserResult, 3 | assertParserResultContains = require('./util').assertParserResultContains, 4 | EventContainer = StreamStash.EventContainer 5 | 6 | describe('jsonParser', function () { 7 | 8 | it('Should parse json', function () { 9 | assertParserResult( 10 | StreamStash.parsers.jsonParser.raw, 11 | '{"thing":{"here":["hi"]}}', 12 | { thing: { here: [ 'hi' ] } } 13 | ) 14 | }) 15 | 16 | it('Should have the proper error', function () { 17 | assertParserResultContains( 18 | StreamStash.parsers.jsonParser.raw, 19 | 'derp', 20 | void 0, 21 | 'SyntaxError: Unexpected token d' 22 | ) 23 | }) 24 | 25 | }) 26 | -------------------------------------------------------------------------------- /test/parsers/relpSyslogParser.test.js: -------------------------------------------------------------------------------- 1 | var StreamStash = require('../../'), 2 | assertParserResult = require('./util').assertParserResult, 3 | EventContainer = StreamStash.EventContainer 4 | 5 | //TODO: should probably test all facility and severity names 6 | 7 | describe('relpSyslogParser', function () { 8 | 9 | it('Should parse a proper message', function () { 10 | assertParserResult( 11 | StreamStash.parsers.relpSyslogParser.raw, 12 | '<86>2014-01-06T12:10:22.625376-08:00 localhost sshd[1001]: test', 13 | { 14 | facility: 10, 15 | facilityName: 'security/auth', 16 | host: 'localhost', 17 | message: 'test', 18 | priority: 86, 19 | service: 'sshd[1001]', 20 | severity: 6, 21 | severityName: 'info', 22 | timestamp: new Date('2014-01-06 12:10:22.625 -0800') 23 | } 24 | ) 25 | }) 26 | 27 | it('Should provide the proper error if not a syslog string', function () { 28 | assertParserResult( 29 | StreamStash.parsers.relpSyslogParser.raw, 30 | 'nope', 31 | void 0, 32 | 'Not a syslog message' 33 | ) 34 | }) 35 | 36 | it('Should provide the proper error if not a syslog string', function () { 37 | assertParserResult( 38 | StreamStash.parsers.relpSyslogParser.raw, 39 | '', 49 | void 0, 50 | 'Invalid priority value' 51 | ) 52 | 53 | assertParserResult( 54 | StreamStash.parsers.relpSyslogParser.raw, 55 | '< >', 56 | void 0, 57 | 'Invalid priority value' 58 | ) 59 | }) 60 | 61 | it('Should provide the proper error on a bad date value', function () { 62 | assertParserResult( 63 | StreamStash.parsers.relpSyslogParser.raw, 64 | '<191>nope', 65 | void 0, 66 | 'Invalid date' 67 | ) 68 | 69 | assertParserResult( 70 | StreamStash.parsers.relpSyslogParser.raw, 71 | '<191>nope ', 72 | void 0, 73 | 'Invalid date' 74 | ) 75 | }) 76 | 77 | it('Should provide the proper error on a bad host', function () { 78 | assertParserResult( 79 | StreamStash.parsers.relpSyslogParser.raw, 80 | '<86>2014-01-06T12:10:22.625376-08:00 ', 81 | void 0, 82 | 'Invalid host' 83 | ) 84 | }) 85 | 86 | it('Should provide the proper error on a bad service', function () { 87 | assertParserResult( 88 | StreamStash.parsers.relpSyslogParser.raw, 89 | '<86>2014-01-06T12:10:22.625376-08:00 host ', 90 | void 0, 91 | 'Invalid service' 92 | ) 93 | 94 | assertParserResult( 95 | StreamStash.parsers.relpSyslogParser.raw, 96 | '<86>2014-01-06T12:10:22.625376-08:00 host asdfasd', 97 | void 0, 98 | 'Invalid service' 99 | ) 100 | }) 101 | 102 | }) 103 | -------------------------------------------------------------------------------- /test/parsers/sshdParser.test.js: -------------------------------------------------------------------------------- 1 | var StreamStash = require('../../'), 2 | assertParserResult = require('./util').assertParserResult, 3 | EventContainer = StreamStash.EventContainer 4 | 5 | describe('sshdParser', function () { 6 | 7 | it('Should parse simple accepted_connection', function () { 8 | assertParserResult( 9 | StreamStash.parsers.sshdParser.raw, 10 | 'Accepted keyboard-interactive/pam for user from 10.0.0.1 port 01810 ssh2', 11 | { 12 | event: 'accepted_connection', 13 | auth_type: 'keyboard-interactive/pam', 14 | user: 'user', 15 | client_ip: '10.0.0.1', 16 | client_port: '01810', 17 | protocol: 'ssh2', 18 | fingerprint: undefined 19 | } 20 | ) 21 | }) 22 | 23 | it('Should parse complex accepted_connection', function () { 24 | assertParserResult( 25 | StreamStash.parsers.sshdParser.raw, 26 | 'Accepted publickey for user from 10.0.0.1 port 01712 ssh2: ED2-519 00:00:00:00:00:00:00:00:00:de', 27 | { 28 | event: 'accepted_connection', 29 | auth_type: 'publickey', 30 | user: 'user', 31 | client_ip: '10.0.0.1', 32 | client_port: '01712', 33 | protocol: 'ssh2', 34 | fingerprint: 'ED2-519 00:00:00:00:00:00:00:00:00:de' 35 | } 36 | ) 37 | }) 38 | 39 | it('Should parse bad_protocol_version', function () { 40 | assertParserResult( 41 | StreamStash.parsers.sshdParser.raw, 42 | 'Bad protocol version identification \'herp derp\' from 10.0.0.1 port 04617', 43 | { 44 | event: 'bad_protocol_version', 45 | version: 'herp derp', 46 | client_ip: '10.0.0.1', 47 | client_port: '04617' 48 | } 49 | ) 50 | }) 51 | 52 | it('Should parse bad_protocol_version without version', function () { 53 | assertParserResult( 54 | StreamStash.parsers.sshdParser.raw, 55 | 'Bad protocol version identification \'\' from 10.0.0.1 port 04617', 56 | { 57 | event: 'bad_protocol_version', 58 | version: undefined, 59 | client_ip: '10.0.0.1', 60 | client_port: '04617' 61 | } 62 | ) 63 | }) 64 | 65 | it('Should parse disconnecting', function () { 66 | assertParserResult( 67 | StreamStash.parsers.sshdParser.raw, 68 | 'Disconnecting: Change of username thing: (flerp,ssh-connection) -> (derp,ssh-connection) [preauth]', 69 | { 70 | event: 'disconnecting', 71 | reason: 'Change of username thing: (flerp,ssh-connection) -> (derp,ssh-connection) [preauth]' 72 | } 73 | ) 74 | }) 75 | 76 | it('Should parse no_identification', function () { 77 | assertParserResult( 78 | StreamStash.parsers.sshdParser.raw, 79 | 'Did not receive identification string from 10.0.0.1', 80 | { 81 | event: 'no_identification', 82 | client_ip: '10.0.0.1' 83 | } 84 | ) 85 | }) 86 | 87 | it('Should parse error', function () { 88 | assertParserResult( 89 | StreamStash.parsers.sshdParser.raw, 90 | 'error: this_happened_here: that thing failed', 91 | { 92 | event: 'error', 93 | error: 'this_happened_here: that thing failed' 94 | } 95 | ) 96 | }) 97 | 98 | it('Should parse simple failed_event', function () { 99 | assertParserResult( 100 | StreamStash.parsers.sshdParser.raw, 101 | 'Failed Custom function for \'user\' from 10.0.0.1', 102 | { 103 | event: 'failed_event', 104 | auth_type: 'Custom function', 105 | user: 'user', 106 | client_ip: '10.0.0.1', 107 | client_port: undefined, 108 | protocol: undefined 109 | } 110 | ) 111 | }) 112 | 113 | it('Should parse complex failed_event', function () { 114 | assertParserResult( 115 | StreamStash.parsers.sshdParser.raw, 116 | 'Failed keyboard-interactive/pam for user from 10.0.0.1 port 06070 ssh2', 117 | { 118 | event: 'failed_event', 119 | auth_type: 'keyboard-interactive/pam', 120 | user: 'user', 121 | client_ip: '10.0.0.1', 122 | client_port: '06070', 123 | protocol: 'ssh2' 124 | } 125 | ) 126 | }) 127 | 128 | it('Should parse simple successful_event', function () { 129 | assertParserResult( 130 | StreamStash.parsers.sshdParser.raw, 131 | 'Successful Custom function for \'user\' from 10.0.0.1', 132 | { 133 | event: 'successful_event', 134 | auth_type: 'Custom function', 135 | user: 'user', 136 | client_ip: '10.0.0.1', 137 | client_port: undefined, 138 | protocol: undefined 139 | } 140 | ) 141 | }) 142 | 143 | it('Should parse complex successful_event', function () { 144 | assertParserResult( 145 | StreamStash.parsers.sshdParser.raw, 146 | 'Successful keyboard-interactive/pam for user from 10.0.0.1 port 06070 ssh2', 147 | { 148 | event: 'successful_event', 149 | auth_type: 'keyboard-interactive/pam', 150 | user: 'user', 151 | client_ip: '10.0.0.1', 152 | client_port: '06070', 153 | protocol: 'ssh2' 154 | } 155 | ) 156 | }) 157 | 158 | it('Should parse fatal', function () { 159 | assertParserResult( 160 | StreamStash.parsers.sshdParser.raw, 161 | 'fatal: Read from socket failed: Connection reset by peer [preauth]', 162 | { 163 | event: 'fatal', 164 | error: 'Read from socket failed: Connection reset by peer [preauth]' 165 | } 166 | ) 167 | }) 168 | 169 | it('Should parse invalid_user', function () { 170 | assertParserResult( 171 | StreamStash.parsers.sshdParser.raw, 172 | 'Invalid user ftp from 10.0.0.1', 173 | { 174 | event: 'invalid_user', 175 | user: 'ftp', 176 | client_ip: '10.0.0.1' 177 | } 178 | ) 179 | }) 180 | 181 | it('Should parse pam_session opened', function () { 182 | assertParserResult( 183 | StreamStash.parsers.sshdParser.raw, 184 | 'pam_unix(sshd:session): session opened for user person', 185 | { 186 | event: 'pam_session', 187 | user: 'person', 188 | state: 'opened' 189 | } 190 | ) 191 | }) 192 | 193 | it('Should parse pam_session closed', function () { 194 | assertParserResult( 195 | StreamStash.parsers.sshdParser.raw, 196 | 'pam_unix(sshd:session): session closed for user person', 197 | { 198 | event: 'pam_session', 199 | user: 'person', 200 | state: 'closed' 201 | } 202 | ) 203 | }) 204 | 205 | it('Should parse postponed_connection', function () { 206 | assertParserResult( 207 | StreamStash.parsers.sshdParser.raw, 208 | 'Postponed keyboard-interactive/pam for user from 10.0.0.1 port 06070 ssh2 [preauth]', 209 | { 210 | event: 'postponed_connection', 211 | auth_type: 'keyboard-interactive/pam', 212 | user: 'user', 213 | client_ip: '10.0.0.1', 214 | client_port: '06070', 215 | protocol: 'ssh2' 216 | } 217 | ) 218 | }) 219 | 220 | it('Should parse received_disconnect without message', function () { 221 | assertParserResult( 222 | StreamStash.parsers.sshdParser.raw, 223 | 'Received disconnect from 10.0.0.1: 11: [preauth]', 224 | { 225 | event: 'received_disconnect', 226 | client_ip: '10.0.0.1', 227 | reason: undefined 228 | } 229 | ) 230 | }) 231 | 232 | it('Should parse received_disconnect with message', function () { 233 | assertParserResult( 234 | StreamStash.parsers.sshdParser.raw, 235 | 'Received disconnect from 10.0.0.1: 11: things are happening [preauth]', 236 | { 237 | event: 'received_disconnect', 238 | client_ip: '10.0.0.1', 239 | reason: 'things are happening' 240 | } 241 | ) 242 | }) 243 | 244 | it('Should return a proper error on failure to parse', function () { 245 | assertParserResult(StreamStash.parsers.sshdParser.raw, 'This will not match', void 0, 'No matches') 246 | }) 247 | 248 | it('Should parse a connection closed event', function () { 249 | assertParserResult( 250 | StreamStash.parsers.sshdParser.raw, 251 | 'Connection closed by 10.0.0.1 [preauth]', 252 | { event: 'closed_connection', client_ip: '10.0.0.1' } 253 | ) 254 | }) 255 | }) 256 | -------------------------------------------------------------------------------- /test/parsers/sudoParser.test.js: -------------------------------------------------------------------------------- 1 | var StreamStash = require('../../'), 2 | assertParserResult = require('./util').assertParserResult 3 | 4 | describe('sudoParser', function () { 5 | 6 | it('Should parse simple command', function () { 7 | assertParserResult( 8 | StreamStash.parsers.sudoParser.raw, 9 | ' nate : TTY=pts/0 ; PWD=/home/nate/hi there/oops; ; USER=root ; COMMAND=/bin/ls ../oops; -l', 10 | { 11 | as_user: 'root', 12 | command: '/bin/ls ../oops; -l', 13 | event: 'command', 14 | pwd: '/home/nate/hi there/oops;', 15 | tty: 'pts/0', 16 | user: 'nate' 17 | } 18 | ) 19 | }) 20 | 21 | it('Should parse a sudo error', function () { 22 | assertParserResult( 23 | StreamStash.parsers.sudoParser.raw, 24 | ' someone : command not allowed ; TTY=pts/40 ; PWD=/home/somewhere ; USER=root ; COMMAND=/bin/something', 25 | { 26 | as_user: 'root', 27 | command: '/bin/something', 28 | error: 'command not allowed', 29 | event: 'error', 30 | pwd: '/home/somewhere', 31 | tty: 'pts/40', 32 | user: 'someone' 33 | } 34 | ) 35 | }) 36 | 37 | }) 38 | -------------------------------------------------------------------------------- /test/parsers/util.js: -------------------------------------------------------------------------------- 1 | module.exports.assertParserResult = function (parserFunc, message, data, error) { 2 | var result = parserFunc(message) 3 | 4 | if (data === void 0) { 5 | if (result.data !== void 0) { 6 | throw new Error('Expected result.data to be undefined') 7 | } 8 | } else if (result.data !== void 0) { 9 | result.data.should.eql(data) 10 | } else { 11 | throw new Error('Did not get any result data back') 12 | } 13 | 14 | if (error === void 0) { 15 | if (result.error !== void 0) { 16 | throw new Error('Expected result.error to be undefined') 17 | } 18 | } else { 19 | result.error.should.eql(error) 20 | } 21 | } 22 | 23 | module.exports.assertParserResultContains = function (parserFunc, message, data, error) { 24 | var result = parserFunc(message) 25 | 26 | if (data === void 0) { 27 | if (result.data !== void 0) { 28 | throw new Error('Expected result.data to be undefined') 29 | } 30 | } else if (result.data !== void 0) { 31 | result.data.should.eql(data) 32 | } else { 33 | throw new Error('Did not get any result data back') 34 | } 35 | 36 | if (error === void 0) { 37 | if (result.error !== void 0) { 38 | throw new Error('Expected result.error to be undefined') 39 | } 40 | } else { 41 | result.error.should.containEql(error) 42 | } 43 | } 44 | --------------------------------------------------------------------------------