├── pkg ├── debian │ ├── debian │ │ ├── docs │ │ ├── compat │ │ ├── logstash.install │ │ ├── README │ │ ├── release.conf │ │ ├── logstash.logrotate │ │ ├── logstash.postrm │ │ ├── changelog │ │ ├── control │ │ ├── dirs │ │ ├── watch.ex │ │ ├── preinst.ex │ │ ├── logstash.default │ │ ├── prerm.ex │ │ ├── copyright │ │ ├── rules │ │ └── manpage.1.ex │ ├── after-install.sh │ ├── before-remove.sh │ ├── before-install.sh │ └── build.sh ├── centos │ ├── after-install.sh │ ├── before-install.sh │ ├── before-remove.sh │ └── sysconfig ├── rpm │ ├── readme.md │ └── SOURCES │ │ ├── logstash.sysconfig │ │ ├── logstash.logrotate │ │ ├── logstash.conf │ │ └── logstash.init ├── logrotate.conf ├── ubuntu │ ├── after-install.sh │ ├── before-remove.sh │ └── before-install.sh ├── logstash-web.upstart.ubuntu ├── logstash.upstart.ubuntu ├── logstash-web.default └── logstash.default ├── lib ├── logstash-event.rb ├── logstash │ ├── config │ │ ├── Makefile │ │ ├── test.conf │ │ ├── registry.rb │ │ └── file.rb │ ├── monkeypatches-for-bugs.rb │ ├── util │ │ ├── socket_peer.rb │ │ ├── prctl.rb │ │ ├── require-helper.rb │ │ ├── password.rb │ │ ├── fieldreference.rb │ │ ├── charset.rb │ │ └── zeromq.rb │ ├── version.rb │ ├── loadlibs.rb │ ├── sized_queue.rb │ ├── errors.rb │ ├── codecs │ │ ├── noop.rb │ │ ├── dots.rb │ │ ├── rubydebug.rb │ │ ├── json_spooler.rb │ │ ├── edn.rb │ │ ├── edn_lines.rb │ │ ├── spool.rb │ │ ├── base.rb │ │ ├── msgpack.rb │ │ ├── compress_spooler.rb │ │ ├── json.rb │ │ ├── fluent.rb │ │ ├── plain.rb │ │ ├── oldlogstashjson.rb │ │ ├── line.rb │ │ └── json_lines.rb │ ├── program.rb │ ├── namespace.rb │ ├── outputs │ │ ├── null.rb │ │ ├── websocket │ │ │ ├── app.rb │ │ │ └── pubsub.rb │ │ ├── udp.rb │ │ ├── exec.rb │ │ ├── elasticsearch-template.json │ │ ├── websocket.rb │ │ ├── lumberjack.rb │ │ ├── stdout.rb │ │ └── stomp.rb │ ├── time_addon.rb │ ├── filters │ │ ├── noop.rb │ │ ├── unique.rb │ │ ├── environment.rb │ │ ├── drop.rb │ │ ├── clone.rb │ │ ├── ruby.rb │ │ ├── i18n.rb │ │ ├── uuid.rb │ │ ├── json_encode.rb │ │ ├── urldecode.rb │ │ ├── gelfify.rb │ │ ├── checksum.rb │ │ ├── metaevent.rb │ │ └── extractnumbers.rb │ ├── inputs │ │ ├── threadable.rb │ │ ├── stdin.rb │ │ ├── varnishlog.rb │ │ ├── graphite.rb │ │ ├── websocket.rb │ │ ├── eventlog │ │ │ └── racob_fix.rb │ │ ├── heroku.rb │ │ ├── drupal_dblog │ │ │ └── jdbcconnection.rb │ │ ├── lumberjack.rb │ │ ├── pipe.rb │ │ ├── wmi.rb │ │ └── udp.rb │ ├── monkeypatches-for-debugging.rb │ ├── JRUBY-6970-openssl.rb │ ├── threadwatchdog.rb │ └── multiqueue.rb └── logstash.rb ├── patterns ├── mcollective ├── redis ├── postgresql ├── java ├── ruby ├── mcollective-patterns └── linux-syslog ├── bin ├── logstash-test └── logstash-web ├── docs ├── tutorials │ ├── media │ │ └── frontend-response-codes.png │ ├── 10-minute-walkthrough │ │ ├── apache_log.2.bz2 │ │ ├── apache_log.1 │ │ ├── hello.conf │ │ ├── hello-search.conf │ │ ├── apache-parse.conf │ │ ├── step-5-output.txt │ │ └── apache-elasticsearch.conf │ └── getting-started-centralized-overview-diagram.png ├── release-test-results.md ├── plugin-synopsis.html.erb ├── generate_index.rb ├── flags.md ├── plugin-milestones.md ├── release-engineering.md ├── learn.md └── logging-tool-comparisons.md ├── etc ├── perfdots.conf ├── perf.conf ├── examples │ ├── agent-stomp.conf │ ├── jsoninput.conf │ ├── agent-twitter.conf │ ├── groktest.conf │ ├── esriver.conf │ ├── agent-nagios.conf │ ├── apache-logs-to-statsd.conf │ ├── jsonfilter.conf │ ├── indexer.conf │ ├── exec-graphite.conf │ └── exec-split.conf ├── agent.conf.example ├── jira-output.conf.sample └── agent.lgtm.conf ├── misc ├── patterns │ ├── redis │ ├── php5 │ └── mysql ├── screencast │ ├── README │ ├── 000.intro │ ├── 001.config │ ├── 002.webdemo │ └── run.rb ├── rate.sh ├── presentation-description.txt └── pl.rb ├── spec ├── README.md ├── filters │ ├── drop.rb │ ├── i18n.rb │ ├── unique.rb │ ├── json_encode.rb │ ├── date_performance.rb │ ├── environment.rb │ ├── checksum.rb │ ├── useragent.rb │ ├── split.rb │ ├── geoip.rb │ ├── translate.rb │ └── json.rb ├── speed.rb ├── support │ ├── date-http.rb │ ├── pull375.rb │ ├── LOGSTASH-733.rb │ ├── postwait1.rb │ ├── LOGSTASH-820.rb │ └── akamai-grok.rb ├── jar.rb ├── config.rb ├── inputs │ ├── generator.rb │ ├── gelf.rb │ ├── redis.rb │ ├── relp.rb │ └── imap.rb ├── examples │ ├── fail2ban.rb │ ├── graphite-input.rb │ └── syslog.rb ├── codecs │ ├── spool.rb │ ├── plain.rb │ ├── json.rb │ ├── edn.rb │ ├── msgpack.rb │ ├── json_spooler.rb │ ├── oldlogstashjson.rb │ ├── json_lines.rb │ └── edn_lines.rb └── util │ └── fieldeval_spec.rb ├── .tailor ├── .gitignore ├── .travis.yml ├── tools └── Gemfile ├── LICENSE ├── test └── jenkins │ └── create-jobs.rb ├── extract_services.rb ├── pull_release_note.rb ├── logstash-event.gemspec └── gembag.rb /pkg/debian/debian/docs: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pkg/debian/debian/compat: -------------------------------------------------------------------------------- 1 | 5 2 | -------------------------------------------------------------------------------- /pkg/centos/after-install.sh: -------------------------------------------------------------------------------- 1 | /sbin/chkconfig --add logstash 2 | -------------------------------------------------------------------------------- /lib/logstash-event.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/event" 3 | -------------------------------------------------------------------------------- /patterns/mcollective: -------------------------------------------------------------------------------- 1 | MCOLLECTIVEAUDIT %{TIMESTAMP_ISO8601:timestamp}: 2 | -------------------------------------------------------------------------------- /pkg/debian/debian/logstash.install: -------------------------------------------------------------------------------- 1 | logstash.jar usr/share/logstash 2 | -------------------------------------------------------------------------------- /pkg/rpm/readme.md: -------------------------------------------------------------------------------- 1 | # RPM build 2 | 3 | spectool -g SPECS/logstash.spec 4 | rpmbuild -bb SPECS/logstash.spec 5 | -------------------------------------------------------------------------------- /bin/logstash-test: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | basedir=$(cd `dirname $0`/..; pwd) 4 | exec $basedir/bin/logstash rspec "$@" 5 | -------------------------------------------------------------------------------- /bin/logstash-web: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | basedir=$(cd `dirname $0`/..; pwd) 4 | exec $basedir/bin/logstash web "$@" 5 | -------------------------------------------------------------------------------- /lib/logstash/config/Makefile: -------------------------------------------------------------------------------- 1 | 2 | #ragel -R grammar.rl 3 | grammar.rb: grammar.treetop 4 | tt grammar.treetop 5 | -------------------------------------------------------------------------------- /lib/logstash/monkeypatches-for-bugs.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/JRUBY-6970" if RUBY_PLATFORM == "java" 3 | -------------------------------------------------------------------------------- /lib/logstash.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/agent" 3 | require "logstash/event" 4 | require "logstash/namespace" 5 | -------------------------------------------------------------------------------- /patterns/redis: -------------------------------------------------------------------------------- 1 | REDISTIMESTAMP %{MONTHDAY} %{MONTH} %{TIME} 2 | REDISLOG \[%{POSINT:pid}\] %{REDISTIMESTAMP:timestamp} \* 3 | 4 | -------------------------------------------------------------------------------- /docs/tutorials/media/frontend-response-codes.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zen/logstash/master/docs/tutorials/media/frontend-response-codes.png -------------------------------------------------------------------------------- /docs/tutorials/10-minute-walkthrough/apache_log.2.bz2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zen/logstash/master/docs/tutorials/10-minute-walkthrough/apache_log.2.bz2 -------------------------------------------------------------------------------- /patterns/postgresql: -------------------------------------------------------------------------------- 1 | # Default postgresql pg_log format pattern 2 | POSTGRESQL %{DATESTAMP:timestamp} %{TZ} %{DATA:user_id} %{GREEDYDATA:connection_id} %{POSINT:pid} 3 | 4 | -------------------------------------------------------------------------------- /docs/tutorials/getting-started-centralized-overview-diagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zen/logstash/master/docs/tutorials/getting-started-centralized-overview-diagram.png -------------------------------------------------------------------------------- /pkg/debian/after-install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | chown -R logstash:logstash /opt/logstash 4 | chown logstash /var/log/logstash 5 | chown logstash:logstash /var/lib/logstash 6 | -------------------------------------------------------------------------------- /pkg/logrotate.conf: -------------------------------------------------------------------------------- 1 | /var/log/logstash/*.log { 2 | daily 3 | rotate 7 4 | copytruncate 5 | compress 6 | missingok 7 | notifempty 8 | } 9 | -------------------------------------------------------------------------------- /pkg/rpm/SOURCES/logstash.sysconfig: -------------------------------------------------------------------------------- 1 | #LOGSTASH_LOGFILE=@@@LOGDIR@@@/@@@NAME@@@.log 2 | #LOGSTASH_PATH_CONF=@@@CONFDIR@@@ 3 | #LOGSTASH_JAVA_OPTS="-Djava.io.tmpdir=/opt/@@@NAME@@@/tmp" 4 | -------------------------------------------------------------------------------- /etc/perfdots.conf: -------------------------------------------------------------------------------- 1 | input { 2 | generator { 3 | type => gen 4 | } 5 | } 6 | 7 | output { 8 | stdout { 9 | debug => true 10 | debug_format => dots 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /misc/patterns/redis: -------------------------------------------------------------------------------- 1 | REDISLOG_WITH_LEVEL \[%{POSINT:pid}\] %{REDISTIMESTAMP:timestamp} # %{LOGLEVEL:redis_log_level} %{GREEDYDATA} 2 | REDISLOG_FIXED (%{REDISLOG}|%{REDISLOG_WITH_LEVEL}) 3 | -------------------------------------------------------------------------------- /misc/screencast/README: -------------------------------------------------------------------------------- 1 | The code here was used to automatically direct a screencast demonstrating logstash. 2 | 3 | The resulting video is here: http://www.youtube.com/watch?v=Fi7OaiNqPCc 4 | -------------------------------------------------------------------------------- /patterns/java: -------------------------------------------------------------------------------- 1 | JAVACLASS (?:[a-zA-Z0-9-]+\.)+[A-Za-z0-9$]+ 2 | JAVAFILE (?:[A-Za-z0-9_.-]+) 3 | JAVASTACKTRACEPART at %{JAVACLASS:class}\.%{WORD:method}\(%{JAVAFILE:file}:%{NUMBER:line}\) 4 | -------------------------------------------------------------------------------- /pkg/rpm/SOURCES/logstash.logrotate: -------------------------------------------------------------------------------- 1 | /var/log/logstash/*.log { 2 | daily 3 | rotate 7 4 | copytruncate 5 | compress 6 | missingok 7 | notifempty 8 | } 9 | -------------------------------------------------------------------------------- /lib/logstash/util/socket_peer.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | module ::LogStash::Util::SocketPeer 3 | public 4 | def peer 5 | "#{peeraddr[3]}:#{peeraddr[1]}" 6 | end # def peer 7 | end # module SocketPeer 8 | -------------------------------------------------------------------------------- /patterns/ruby: -------------------------------------------------------------------------------- 1 | RUBY_LOGLEVEL (?:DEBUG|FATAL|ERROR|WARN|INFO) 2 | RUBY_LOGGER [DFEWI], \[%{TIMESTAMP_ISO8601:timestamp} #%{POSINT:pid}\] *%{RUBY_LOGLEVEL:loglevel} -- +%{DATA:progname}: %{GREEDYDATA:message} 3 | -------------------------------------------------------------------------------- /pkg/debian/debian/README: -------------------------------------------------------------------------------- 1 | The Debian Package logstash 2 | ---------------------------- 3 | 4 | Comments regarding the Package 5 | 6 | -- Corey Quinn Sun, 24 Feb 2013 18:24:41 -0800 7 | -------------------------------------------------------------------------------- /pkg/debian/debian/release.conf: -------------------------------------------------------------------------------- 1 | // Used to drive apt-ftparchive 2 | APT::FTPArchive::Release::Origin "logstash.net"; 3 | APT::FTPArchive::Release::Suite "binary"; 4 | APT::FTPArchive::Release::Architectures "all"; 5 | 6 | -------------------------------------------------------------------------------- /patterns/mcollective-patterns: -------------------------------------------------------------------------------- 1 | # Remember, these can be multi-line events. 2 | MCOLLECTIVE ., \[%{TIMESTAMP_ISO8601:timestamp} #%{POSINT:pid}\]%{SPACE}%{LOGLEVEL:event_level} 3 | 4 | MCOLLECTIVEAUDIT %{TIMESTAMP_ISO8601:timestamp}: 5 | -------------------------------------------------------------------------------- /lib/logstash/version.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | # The version of logstash. 3 | LOGSTASH_VERSION = "1.2.3.dev" 4 | 5 | # Note to authors: this should not include dashes because 'gem' barfs if 6 | # you include a dash in the version string. 7 | -------------------------------------------------------------------------------- /lib/logstash/loadlibs.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | jarpath = File.join(File.dirname(__FILE__), "../../vendor/**/*.jar") 3 | Dir[jarpath].each do |jar| 4 | if $DEBUG 5 | puts "Loading #{jar}" 6 | end 7 | require jar 8 | end 9 | 10 | -------------------------------------------------------------------------------- /pkg/debian/debian/logstash.logrotate: -------------------------------------------------------------------------------- 1 | /var/log/logstash/logstash.log { 2 | weekly 3 | copytruncate 4 | missingok 5 | rotate 52 6 | compress 7 | delaycompress 8 | notifempty 9 | } 10 | -------------------------------------------------------------------------------- /pkg/ubuntu/after-install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | chown -R logstash:logstash /opt/logstash 4 | chown logstash /var/log/logstash 5 | chown logstash:logstash /var/lib/logstash 6 | 7 | ln -sf /opt/logstash/logstash.jar /var/lib/logstash/logstash.jar 8 | -------------------------------------------------------------------------------- /spec/README.md: -------------------------------------------------------------------------------- 1 | # How to run these tests 2 | 3 | Run one: 4 | 5 | `rspec spec/the/test.rb` 6 | 7 | Run them all: 8 | 9 | `rspec spec/**/*.rb` 10 | 11 | Debug one test: 12 | 13 | `LOGSTASH_DEBUG=y rspec spec/the/test.rb` 14 | 15 | -------------------------------------------------------------------------------- /lib/logstash/sized_queue.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/namespace" 3 | require "logstash/logging" 4 | 5 | require "thread" # for SizedQueue 6 | class LogStash::SizedQueue < SizedQueue 7 | # TODO(sissel): Soon will implement push/pop stats, etc 8 | end 9 | -------------------------------------------------------------------------------- /etc/perf.conf: -------------------------------------------------------------------------------- 1 | input { 2 | generator { 3 | type => gen 4 | } 5 | } 6 | 7 | filter { 8 | metrics { 9 | meter => "events" 10 | add_tag => "metric" 11 | } 12 | } 13 | 14 | output { 15 | stdout { 16 | tags => metric 17 | debug => true 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /lib/logstash/util/prctl.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | module LibC 4 | require "ffi" 5 | extend FFI::Library 6 | ffi_lib 'c' 7 | 8 | # Ok so the 2nd arg isn't really a string... but whaatever 9 | attach_function :prctl, [:int, :string, :long, :long, :long], :int 10 | end 11 | 12 | -------------------------------------------------------------------------------- /.tailor: -------------------------------------------------------------------------------- 1 | Tailor.config do |config| 2 | config.file_set '*.rb' do |style| 3 | style.indentation_spaces 2, :level => :off 4 | style.max_line_length 80, :level => :off 5 | style.allow_trailing_line_spaces true, :level => :off 6 | style.spaces_after_comma false, :level => :off 7 | end 8 | end 9 | -------------------------------------------------------------------------------- /lib/logstash/config/test.conf: -------------------------------------------------------------------------------- 1 | input { 2 | rabbitmq { 3 | port => 12345 4 | tag => [ a, b, c ] 5 | } 6 | 7 | stomp { 8 | port => 12345 9 | tag => [ stomp ] 10 | } 11 | } 12 | 13 | filter { 14 | date { 15 | hello => world 16 | hello => "Hello" 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /pkg/ubuntu/before-remove.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | if [ $1 == "remove" ]; then 4 | stop logstash >/dev/null 2>&1 || true 5 | 6 | if getent passwd logstash >/dev/null ; then 7 | userdel logstash 8 | fi 9 | 10 | if getent group logstash >/dev/null ; then 11 | groupdel logstash 12 | fi 13 | fi 14 | -------------------------------------------------------------------------------- /etc/examples/agent-stomp.conf: -------------------------------------------------------------------------------- 1 | input { 2 | stdin { } 3 | stomp { 4 | host => "localhost" 5 | destination => "/topic/foo" 6 | } 7 | } 8 | 9 | output { 10 | stdout { 11 | debug => true 12 | } 13 | 14 | stomp { 15 | host => "localhost" 16 | destination => "/topic/foo" 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /etc/examples/jsoninput.conf: -------------------------------------------------------------------------------- 1 | # Example config demonstrating the use of message_format 2 | 3 | input { 4 | stdin { 5 | type => test 6 | format => json 7 | message_format => "%{date} | %{user} | %{action} | %{reason}" 8 | } 9 | } 10 | 11 | output { 12 | stdout { 13 | debug => true 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /pkg/centos/before-install.sh: -------------------------------------------------------------------------------- 1 | # create logstash group 2 | if ! getent group logstash >/dev/null; then 3 | groupadd -r logstash 4 | fi 5 | 6 | # create logstash user 7 | if ! getent passwd logstash >/dev/null; then 8 | useradd -r -g logstash -d /opt/logstash \ 9 | -s /sbin/nologin -c "logstash" logstash 10 | fi 11 | -------------------------------------------------------------------------------- /pkg/debian/before-remove.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | if [ $1 = "remove" ]; then 4 | /etc/init.d/logstash stop >/dev/null 2>&1 || true 5 | 6 | if getent passwd logstash >/dev/null ; then 7 | userdel logstash 8 | fi 9 | 10 | if getent group logstash >/dev/null ; then 11 | groupdel logstash 12 | fi 13 | fi 14 | -------------------------------------------------------------------------------- /lib/logstash/errors.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | module LogStash 3 | class Error < ::StandardError; end 4 | class ConfigurationError < Error; end 5 | class PluginLoadingError < Error; end 6 | class ShutdownSignal < StandardError; end 7 | 8 | class Bug < Error; end 9 | class ThisMethodWasRemoved < Bug; end 10 | end 11 | -------------------------------------------------------------------------------- /pkg/centos/before-remove.sh: -------------------------------------------------------------------------------- 1 | if [ $1 -eq 0 ]; then 2 | /sbin/service logstash stop >/dev/null 2>&1 || true 3 | /sbin/chkconfig --del logstash 4 | if getent passwd logstash >/dev/null ; then 5 | userdel logstash 6 | fi 7 | 8 | if getent group logstash > /dev/null ; then 9 | groupdel logstash 10 | fi 11 | fi 12 | -------------------------------------------------------------------------------- /etc/examples/agent-twitter.conf: -------------------------------------------------------------------------------- 1 | input { 2 | twitter { 3 | type => "twitter" 4 | user => "USER" 5 | password => "PASSWORD" 6 | keywords => ["python", "ruby", "perl", "sysadmiN"] 7 | } 8 | stdin { 9 | type => "testingstdin" 10 | } 11 | } 12 | 13 | output { 14 | stdout { } 15 | elasticsearch { } 16 | } 17 | -------------------------------------------------------------------------------- /docs/tutorials/10-minute-walkthrough/apache_log.1: -------------------------------------------------------------------------------- 1 | 129.92.249.70 - - [18/Aug/2011:06:00:14 -0700] "GET /style2.css HTTP/1.1" 200 1820 "http://www.semicomplete.com/blog/geekery/bypassing-captive-portals.html" "Mozilla/5.0 (iPad; U; CPU OS 4_3_5 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8L1 Safari/6533.18.5" 2 | -------------------------------------------------------------------------------- /etc/examples/groktest.conf: -------------------------------------------------------------------------------- 1 | # Useful config for testing grok expressions (update "pattern" below) 2 | 3 | input { 4 | stdin { 5 | type => test 6 | } 7 | } 8 | 9 | filter { 10 | grok { 11 | type => "test" 12 | pattern => "%{SYSLOGLINE}" 13 | } 14 | } 15 | 16 | output { 17 | stdout { 18 | debug => true 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /lib/logstash/config/registry.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/namespace" 3 | 4 | # Global config registry. 5 | module LogStash::Config::Registry 6 | @registry = Hash.new 7 | class << self 8 | attr_accessor :registry 9 | 10 | # TODO(sissel): Add some helper methods here. 11 | end 12 | end # module LogStash::Config::Registry 13 | 14 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .*.swp 2 | *.gem 3 | pkg/*.deb 4 | pkg/*.rpm 5 | *.class 6 | .rbx 7 | Gemfile.lock 8 | .rbx 9 | *.tar.gz 10 | *.jar 11 | .bundle 12 | build 13 | local 14 | test/setup/elasticsearch/elasticsearch-* 15 | vendor 16 | .sass-cache 17 | data 18 | .buildpath 19 | .project 20 | .DS_Store 21 | *.pyc 22 | etc/jira-output.conf 23 | coverage/* 24 | .VERSION.mk 25 | -------------------------------------------------------------------------------- /pkg/ubuntu/before-install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # create logstash group 4 | if ! getent group logstash >/dev/null; then 5 | groupadd -r logstash 6 | fi 7 | 8 | # create logstash user 9 | if ! getent passwd logstash >/dev/null; then 10 | useradd -M -r -g logstash -d /var/lib/logstash \ 11 | -s /sbin/nologin -c "LogStash Service User" logstash 12 | fi 13 | -------------------------------------------------------------------------------- /pkg/debian/before-install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # create logstash group 4 | if ! getent group logstash >/dev/null; then 5 | groupadd -r logstash 6 | fi 7 | 8 | # create logstash user 9 | if ! getent passwd logstash >/dev/null; then 10 | useradd -M -r -g logstash -d /var/lib/logstash \ 11 | -s /sbin/nologin -c "LogStash Service User" logstash 12 | fi 13 | 14 | -------------------------------------------------------------------------------- /etc/examples/esriver.conf: -------------------------------------------------------------------------------- 1 | input { 2 | file { 3 | type => "syslog" 4 | path => "/var/log/messages" 5 | } 6 | } 7 | 8 | filter { 9 | grok { 10 | type => "syslog" 11 | pattern => ["%{SYSLOGLINE}"] 12 | } 13 | } 14 | 15 | output { 16 | stdout { } 17 | elasticsearch_river { 18 | es_host => "localhost" 19 | rabbitmq_host => "localhost" 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /spec/filters/drop.rb: -------------------------------------------------------------------------------- 1 | require "test_utils" 2 | require "logstash/filters/drop" 3 | 4 | describe LogStash::Filters::Drop do 5 | extend LogStash::RSpec 6 | 7 | describe "drop the event" do 8 | config <<-CONFIG 9 | filter { 10 | drop { } 11 | } 12 | CONFIG 13 | 14 | sample "hello" do 15 | insist { subject }.nil? 16 | end 17 | end 18 | 19 | end 20 | -------------------------------------------------------------------------------- /spec/speed.rb: -------------------------------------------------------------------------------- 1 | require "test_utils" 2 | 3 | describe "speed tests" do 4 | extend LogStash::RSpec 5 | count = 1000000 6 | 7 | config <<-CONFIG 8 | input { 9 | generator { 10 | type => foo 11 | count => #{count} 12 | } 13 | } 14 | output { null { } } 15 | CONFIG 16 | 17 | agent do 18 | puts "Rate: #{count / @duration}" 19 | end 20 | end 21 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: ruby 2 | rvm: 3 | - jruby-19mode 4 | jdk: 5 | - oraclejdk7 6 | - openjdk7 7 | 8 | script: 9 | - JRUBY_OPTS=--debug COVERAGE=true GEM_HOME=./vendor/bundle/jruby/1.9 GEM_PATH= ./vendor/bundle/jruby/1.9/bin/rspec spec/support/*.rb spec/filters/*.rb spec/examples/*.rb spec/codecs/*.rb spec/conditionals/*.rb spec/event.rb 10 | install: 11 | - ruby gembag.rb 12 | - make vendor-geoip 13 | -------------------------------------------------------------------------------- /lib/logstash/codecs/noop.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/codecs/base" 3 | 4 | class LogStash::Codecs::Noop < LogStash::Codecs::Base 5 | config_name "noop" 6 | 7 | milestone 1 8 | 9 | public 10 | def decode(data) 11 | yield data 12 | end # def decode 13 | 14 | public 15 | def encode(data) 16 | @on_event.call data 17 | end # def encode 18 | 19 | end # class LogStash::Codecs::Noop 20 | -------------------------------------------------------------------------------- /docs/release-test-results.md: -------------------------------------------------------------------------------- 1 | # Testing for a release 2 | 3 | * exec + split + stdout 4 | * tcp input (server and client modes) 5 | * tcp output (server and client modes) 6 | * graphite output (tested server failure conditions, netcat receiver) 7 | * statsd output (increment, netcat receiver) 8 | 9 | ## Test Suite 10 | 11 | Finished in 16.826 seconds. 12 | 13 | 29 tests, 119 assertions, 0 failures, 0 errors 14 | 15 | -------------------------------------------------------------------------------- /lib/logstash/codecs/dots.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/codecs/base" 3 | 4 | class LogStash::Codecs::Dots < LogStash::Codecs::Base 5 | config_name "dots" 6 | milestone 1 7 | 8 | public 9 | def decode(data) 10 | raise "Not implemented" 11 | end # def decode 12 | 13 | public 14 | def encode(data) 15 | @on_event.call(".") 16 | end # def encode 17 | 18 | end # class LogStash::Codecs::Dots 19 | -------------------------------------------------------------------------------- /lib/logstash/program.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | require "logstash/namespace" 4 | 5 | module LogStash::Program 6 | public 7 | def exit(value) 8 | if RUBY_ENGINE == "jruby" 9 | # Kernel::exit() in jruby just tosses an exception? Let's actually exit. 10 | Java::java.lang.System.exit(value) 11 | else 12 | Kernel::exit(value) 13 | end 14 | end # def exit 15 | end # module LogStash::Program 16 | -------------------------------------------------------------------------------- /pkg/logstash-web.upstart.ubuntu: -------------------------------------------------------------------------------- 1 | # logstash-web - web server 2 | # 3 | 4 | description "logstash-web web server" 5 | 6 | start on virtual-filesystems 7 | stop on runlevel [06] 8 | 9 | # Respawn it if the process exits 10 | respawn 11 | 12 | setuid logstash 13 | setgid logstash 14 | 15 | # Change into a writable directory 16 | chdir /var/lib/logstash 17 | console log 18 | exec /usr/bin/java -jar /opt/logstash/logstash.jar web 19 | -------------------------------------------------------------------------------- /lib/logstash/namespace.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | #$: << File.join(File.dirname(__FILE__), "..", "..", "vendor", "bundle") 3 | 4 | module LogStash 5 | module Inputs; end 6 | module Outputs; end 7 | module Filters; end 8 | module Search; end 9 | module Config; end 10 | module File; end 11 | module Web; end 12 | module Util; end 13 | module PluginMixins; end 14 | 15 | SHUTDOWN = :shutdown 16 | end # module LogStash 17 | -------------------------------------------------------------------------------- /tools/Gemfile: -------------------------------------------------------------------------------- 1 | source "https://rubygems.org" 2 | #gemspec(:name => "logstash", :path => "../") 3 | 4 | gemspec = File.join(File.dirname(__FILE__), "..", "logstash.gemspec") 5 | spec = Gem::Specification.load(gemspec) 6 | spec.runtime_dependencies.each do |dep| 7 | gem dep.name, dep.requirement.to_s 8 | end 9 | 10 | group :development do 11 | spec.development_dependencies.each do |dep| 12 | gem dep.name, dep.requirement.to_s 13 | end 14 | end 15 | -------------------------------------------------------------------------------- /spec/support/date-http.rb: -------------------------------------------------------------------------------- 1 | require "test_utils" 2 | 3 | describe "http dates", :if => RUBY_ENGINE == "jruby" do 4 | extend LogStash::RSpec 5 | 6 | config <<-'CONFIG' 7 | filter { 8 | date { 9 | match => [ "timestamp", "dd/MMM/yyyy:HH:mm:ss Z" ] 10 | } 11 | } 12 | CONFIG 13 | 14 | sample("timestamp" => "25/Mar/2013:20:33:56 +0000") do 15 | insist { subject["@timestamp"] } == Time.iso8601("2013-03-25T20:33:56.000Z") 16 | end 17 | end 18 | -------------------------------------------------------------------------------- /etc/examples/agent-nagios.conf: -------------------------------------------------------------------------------- 1 | input { 2 | stdin { type => "foo" } 3 | } 4 | 5 | filter { 6 | grep { 7 | type => "foo" 8 | match => [ "@message", ".*" ] 9 | add_field => [ 10 | "nagios_host", "%{@source_host}", 11 | "nagios_service", "example service", 12 | "nagios_annotation", "my annotation" 13 | ] 14 | } 15 | } 16 | 17 | output { 18 | stdout { 19 | } 20 | 21 | nagios { 22 | commandfile => "/tmp/cmdfile" 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /pkg/centos/sysconfig: -------------------------------------------------------------------------------- 1 | # Set 'START' to true to have logstash start. 2 | START=false 3 | 4 | # The 'ulimit -n' setting for logstash. 5 | # You may need to increase this if you are using 6 | # the elasticsearch output with 'embedded => true' 7 | #OPEN_FILES=16384 8 | 9 | # How many filter workers to run. 10 | # Note this requires you use thread-safe filters 11 | #FILTER_THREADS=1 12 | 13 | # The flags to provide to java 14 | #LS_JAVA_OPTS="-Xmx256m -Djava.io.tmpdir=$LS_HOME/tmp" 15 | 16 | -------------------------------------------------------------------------------- /lib/logstash/outputs/null.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/outputs/base" 3 | require "logstash/namespace" 4 | 5 | # A null output. This is useful for testing logstash inputs and filters for 6 | # performance. 7 | class LogStash::Outputs::Null < LogStash::Outputs::Base 8 | config_name "null" 9 | milestone 3 10 | 11 | public 12 | def register 13 | end # def register 14 | 15 | public 16 | def receive(event) 17 | end # def event 18 | end # class LogStash::Outputs::Null 19 | -------------------------------------------------------------------------------- /pkg/debian/debian/logstash.postrm: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -e 4 | 5 | case "$1" in 6 | purge) 7 | userdel logstash || true 8 | rm -rf /var/lib/logstash /var/log/logstash \ 9 | /var/run/logstash /var/cache/logstash 10 | ;; 11 | 12 | remove|upgrade|failed-upgrade|abort-install|abort-upgrade|disappear) 13 | ;; 14 | 15 | *) 16 | echo "postrm called with unknown argument \`$1'" >&2 17 | exit 1 18 | ;; 19 | esac 20 | 21 | #DEBHELPER# 22 | 23 | exit 0 24 | -------------------------------------------------------------------------------- /lib/logstash/time_addon.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/namespace" 3 | 4 | # Provide our own Time wrapper for ISO8601 support 5 | # Example: 6 | # >> LogStash::Time.now.to_iso8601 7 | # => "2010-10-17 00:25:24.619014-0700" 8 | # 9 | # >> LogStash::Time.now.utc.to_iso8601 10 | # => "2010-10-17 07:25:26.788704Z" 11 | module LogStash::Time 12 | ISO8601_STRFTIME = "%04d-%02d-%02dT%02d:%02d:%02d.%06d%+03d:00".freeze 13 | def self.now 14 | return Time.new.utc 15 | end 16 | end # module LogStash::Time 17 | -------------------------------------------------------------------------------- /docs/tutorials/10-minute-walkthrough/hello.conf: -------------------------------------------------------------------------------- 1 | input { 2 | stdin { 3 | # A type is a label applied to an event. It is used later with filters 4 | # to restrict what filters are run against each event. 5 | type => "human" 6 | } 7 | } 8 | 9 | output { 10 | # Print each event to stdout. 11 | stdout { 12 | # Enabling 'rubydebug' codec on the stdout output will make logstash 13 | # pretty-print the entire event as something similar to a JSON representation. 14 | codec => rubydebug 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /lib/logstash/codecs/rubydebug.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/codecs/base" 3 | 4 | class LogStash::Codecs::RubyDebug < LogStash::Codecs::Base 5 | config_name "rubydebug" 6 | milestone 3 7 | 8 | def register 9 | require "ap" 10 | end 11 | 12 | public 13 | def decode(data) 14 | raise "Not implemented" 15 | end # def decode 16 | 17 | public 18 | def encode(data) 19 | @on_event.call(data.to_hash.awesome_inspect + "\n") 20 | end # def encode 21 | 22 | end # class LogStash::Codecs::Dots 23 | -------------------------------------------------------------------------------- /misc/patterns/php5: -------------------------------------------------------------------------------- 1 | PHP_LOG_CONTENT (.+) 2 | PHP_DATE_TIME %{MONTHDAY}-%{MONTH}-%{YEAR}\s+%{TIME} 3 | PHP_TZ_NAME [A-Z]{3} 4 | PHP_ERROR_LOG \s*+\[%{PHP_DATE_TIME:timestamp} %{PHP_TZ_NAME}\] PHP %{LOGLEVEL:php_log_level} error: %{PHP_LOG_CONTENT:php_log_content} 5 | PHP_FPM_ERROR_LOG \[%{PHP_DATE_TIME:timestamp}\] %{LOGLEVEL:php_log_level}: (\[%{GREEDYDATA:php_fpm_pool}\] child %{POSINT}, %{GREEDYDATA:php_log_content}|%{GREEDYDATA:php_log_content}) 6 | PHP_FPM_SLOW_LOG \[%{GREEDYDATA:stack_addr}\] %{GREEDYDATA:func_name} %{UNIXPATH:script_path} 7 | -------------------------------------------------------------------------------- /etc/agent.conf.example: -------------------------------------------------------------------------------- 1 | input { 2 | file { 3 | path => [ "/var/log/messages", "/var/log/*.log" ] 4 | type => "linux-syslog" 5 | } 6 | } 7 | 8 | filter { 9 | grok { 10 | type => "linux-syslog" 11 | pattern => "%{SYSLOGLINE}" 12 | } 13 | 14 | date { 15 | type => "linux-syslog" 16 | timestamp => "MMM dd HH:mm:ss" 17 | timestamp8601 => ISO8601 18 | } 19 | } 20 | 21 | output { 22 | stdout { 23 | } 24 | 25 | elasticsearch { 26 | index => "logstash" 27 | type => "%{@type}" 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /etc/examples/apache-logs-to-statsd.conf: -------------------------------------------------------------------------------- 1 | input { 2 | file { 3 | path => "/var/log/apache2/access_log" 4 | type => "apache-access" 5 | } 6 | tcp { 7 | type => "apache-access" 8 | port => 3333 9 | } 10 | } 11 | 12 | filter { 13 | grok { 14 | type => "apache-access" 15 | pattern => "%{COMBINEDAPACHELOG}" 16 | } 17 | } 18 | 19 | output { 20 | statsd { 21 | increment => [ "apache.response.%{response}" ] 22 | count => [ "apache.bytes", "%{bytes}" ] 23 | } 24 | #stdout { debug => true } 25 | } 26 | -------------------------------------------------------------------------------- /lib/logstash/util/require-helper.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/namespace" 3 | require "logstash/logging" 4 | 5 | module LogStash::Util::Require 6 | class << self 7 | attr_accessor :logger 8 | 9 | def require(lib, gemdep, message=nil) 10 | @logger ||= LogStash::Logger.new(STDERR) 11 | begin 12 | require lib 13 | rescue LoadError => e 14 | @logger.error("Failed loading '#{lib}'") 15 | end 16 | end # def require 17 | end # class << self 18 | end # def LogStash::Util::Require 19 | -------------------------------------------------------------------------------- /pkg/logstash.upstart.ubuntu: -------------------------------------------------------------------------------- 1 | # logstash - agent instance 2 | # 3 | 4 | description "logstash agent" 5 | 6 | start on virtual-filesystems 7 | stop on runlevel [06] 8 | 9 | # Respawn it if the process exits 10 | respawn 11 | 12 | limit nofile 65550 65550 13 | setuid logstash 14 | setgid logstash 15 | 16 | # You need to chdir somewhere writable because logstash needs to unpack a few 17 | # temporary files on startup. 18 | chdir /var/lib/logstash 19 | console log 20 | exec /usr/bin/java -jar /opt/logstash/logstash.jar agent -f /etc/logstash/conf.d 21 | -------------------------------------------------------------------------------- /pkg/debian/debian/changelog: -------------------------------------------------------------------------------- 1 | logstash (1.1.10.dev~1) unstable; urgency=low 2 | 3 | * Dev builds of master. 4 | 5 | -- Jordan Sissel Tue, 2 Apr 2013 21:55:33 -0700 6 | 7 | logstash (1.1.9~1) unstable; urgency=low 8 | 9 | * Depend on openjdk. Don't start daemon by default. 10 | 11 | -- Rudy Gevaert Wed, 27 Feb 2013 21:54:41 +0100 12 | 13 | logstash (1.1.9) unstable; urgency=low 14 | 15 | * Initial packaging release 16 | 17 | -- Corey Quinn Sun, 24 Feb 2013 18:24:41 -0800 18 | -------------------------------------------------------------------------------- /lib/logstash/filters/noop.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/filters/base" 3 | require "logstash/namespace" 4 | 5 | # No-op filter. This is used generally for internal/dev testing. 6 | class LogStash::Filters::NOOP < LogStash::Filters::Base 7 | config_name "noop" 8 | milestone 2 9 | 10 | public 11 | def register 12 | # Nothing 13 | end # def register 14 | 15 | public 16 | def filter(event) 17 | return unless filter?(event) 18 | # Nothing to do 19 | filter_matched(event) 20 | end # def filter 21 | end # class LogStash::Filters::NOOP 22 | -------------------------------------------------------------------------------- /etc/examples/jsonfilter.conf: -------------------------------------------------------------------------------- 1 | input { 2 | file { 3 | type => syslog 4 | path => "/var/log/messages" 5 | } 6 | } 7 | 8 | filter { 9 | grok { 10 | type => syslog 11 | pattern => ["%{SYSLOGLINE}"] 12 | named_captures_only => true 13 | } 14 | 15 | grep { 16 | type => syslog 17 | match => ["program", "jsontest"] 18 | drop => false 19 | add_tag => json 20 | } 21 | 22 | json { 23 | type => syslog 24 | tags => json 25 | message => data 26 | } 27 | } 28 | 29 | output { 30 | stdout { 31 | debug => true 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /spec/filters/i18n.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "test_utils" 3 | require "logstash/filters/i18n" 4 | 5 | describe LogStash::Filters::I18n do 6 | extend LogStash::RSpec 7 | 8 | describe "transliterate" do 9 | config <<-CONFIG 10 | filter { 11 | i18n { 12 | transliterate => [ "transliterateme" ] 13 | } 14 | } 15 | CONFIG 16 | 17 | event = { 18 | "transliterateme" => [ "Ærøskøbing" ] 19 | } 20 | 21 | sample event do 22 | insist { subject["transliterateme"] } == [ "AEroskobing" ] 23 | end 24 | end 25 | end 26 | -------------------------------------------------------------------------------- /lib/logstash/codecs/json_spooler.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/codecs/base" 3 | require "logstash/codecs/spool" 4 | 5 | # This is the base class for logstash codecs. 6 | class LogStash::Codecs::JsonSpooler < LogStash::Codecs::Spool 7 | config_name "json_spooler" 8 | milestone 1 9 | 10 | public 11 | def decode(data) 12 | super(JSON.parse(data.force_encoding("UTF-8"))) do |event| 13 | yield event 14 | end 15 | end # def decode 16 | 17 | public 18 | def encode(data) 19 | super(data) 20 | end # def encode 21 | 22 | end # class LogStash::Codecs::Json 23 | -------------------------------------------------------------------------------- /etc/examples/indexer.conf: -------------------------------------------------------------------------------- 1 | input { 2 | rabbitmq { 3 | host => "127.0.0.1" 4 | user => "guest" 5 | pass => "guest" 6 | exchange => "logstash" 7 | queue => "testing" 8 | type => "all" 9 | } 10 | 11 | tcp { 12 | port => 1234 13 | type => "linux-syslog" 14 | } 15 | } 16 | 17 | filter { 18 | grok { 19 | type => "linux-syslog" 20 | pattern => ["%{SYSLOG_SUDO}", "%{SYSLOG_KERNEL}", "%{SYSLOGLINE}"] 21 | add_tag => "grok" 22 | add_field => ["test_key", "the pid is %{pid}"] 23 | } 24 | } 25 | 26 | output { 27 | stdout { 28 | debug => true 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /pkg/rpm/SOURCES/logstash.conf: -------------------------------------------------------------------------------- 1 | ### THIS IS A EXAMPLE CONFIG SO LOGSTASH WILL RUN ### 2 | ### PLEASE UPDATE THIS TO WHATEVER YOU WANT TO USE ### 3 | 4 | input { 5 | syslog { 6 | type => syslog 7 | port => 5544 8 | } 9 | } 10 | 11 | 12 | filter { 13 | mutate { 14 | add_field => [ "hostip", "%{host}" ] 15 | } 16 | dns { 17 | reverse => [ "host" ] 18 | action => "replace" 19 | } 20 | } 21 | 22 | output { 23 | elasticsearch { 24 | host => "localhost" 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /spec/support/pull375.rb: -------------------------------------------------------------------------------- 1 | # This spec covers the question here: 2 | # https://github.com/logstash/logstash/pull/375 3 | 4 | require "test_utils" 5 | 6 | describe "pull #375" do 7 | extend LogStash::RSpec 8 | describe "kv after grok" do 9 | config <<-CONFIG 10 | filter { 11 | grok { pattern => "%{URIPATH:mypath}%{URIPARAM:myparams}" } 12 | kv { source => "myparams" field_split => "&?" } 13 | } 14 | CONFIG 15 | 16 | sample "/some/path?foo=bar&baz=fizz" do 17 | insist { subject["foo"] } == "bar" 18 | insist { subject["baz"] } == "fizz" 19 | end 20 | end 21 | end 22 | -------------------------------------------------------------------------------- /etc/examples/exec-graphite.conf: -------------------------------------------------------------------------------- 1 | input { 2 | exec { 3 | type => "foo" 4 | command => "top -bn1" 5 | interval => 10 6 | } 7 | } 8 | 9 | filter { 10 | split { type => "foo" } 11 | grok { 12 | type => "foo" 13 | pattern => "%{NUMBER:pid} *%{WORD:user} *%{NUMBER:priority} *%{NUMBER:nice} *%{NOTSPACE:vmsize} *%{NOTSPACE:rss} *%{NOTSPACE:sharedmemory} *%{WORD:state} *%{NUMBER:cpu_pct} *%{NUMBER:memory_pct} *%{NOTSPACE:cputime} %{DATA:command}" 14 | } 15 | } 16 | 17 | output { 18 | graphite { 19 | metrics => [ "process.%{pid}.rss", "%{rss}", "process.%{pid}.cpu_pct", "%{cpu_pct}" ] 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /spec/support/LOGSTASH-733.rb: -------------------------------------------------------------------------------- 1 | # This spec covers the question here: 2 | # https://logstash.jira.com/browse/LOGSTASH-733 3 | 4 | require "test_utils" 5 | 6 | describe "LOGSTASH-733" do 7 | extend LogStash::RSpec 8 | describe "pipe-delimited fields" do 9 | config <<-CONFIG 10 | filter { 11 | kv { field_split => "|" } 12 | } 13 | CONFIG 14 | 15 | sample "field1=test|field2=another test|field3=test3" do 16 | insist { subject["field1"] } == "test" 17 | insist { subject["field2"] } == "another test" 18 | insist { subject["field3"] } == "test3" 19 | end 20 | end 21 | end 22 | -------------------------------------------------------------------------------- /spec/jar.rb: -------------------------------------------------------------------------------- 1 | require "insist" 2 | 3 | describe "logstash jar features", :if => (__FILE__ =~ /file:.*~/) do 4 | before :each do 5 | @jar_root = __FILE__.split("!").first + "!" 6 | end 7 | 8 | it "must be only run from a jar" do 9 | insist { __FILE__ } =~ /file:.*!/ 10 | end 11 | 12 | it "must contain GeoLiteCity.dat" do 13 | path = File.join(@jar_root, "GeoLiteCity.dat") 14 | insist { File }.exists?(path) 15 | end 16 | 17 | it "must contain vendor/ua-parser/regexes.yaml" do 18 | path = File.join(@jar_root, "vendor/ua-parser/regexes.yaml") 19 | insist { File }.exists?(path) 20 | end 21 | end 22 | -------------------------------------------------------------------------------- /lib/logstash/util/password.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/namespace" 3 | require "logstash/util" 4 | 5 | # This class exists to quietly wrap a password string so that, when printed or 6 | # logged, you don't accidentally print the password itself. 7 | class LogStash::Util::Password 8 | attr_reader :value 9 | 10 | public 11 | def initialize(password) 12 | @value = password 13 | end # def initialize 14 | 15 | public 16 | def to_s 17 | return "" 18 | end # def to_s 19 | 20 | public 21 | def inspect 22 | return to_s 23 | end # def inspect 24 | end # class LogStash::Util::Password 25 | 26 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2009-2013 Jordan Sissel, Pete Fritchman, and contributors. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | 15 | -------------------------------------------------------------------------------- /lib/logstash/inputs/threadable.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/namespace" 3 | require "logstash/inputs/base" 4 | 5 | # This is the threadable class for logstash inputs. 6 | # Use this class in your inputs if it can support multiple threads 7 | class LogStash::Inputs::Threadable < LogStash::Inputs::Base 8 | 9 | # Set this to the number of threads you want this input to spawn. 10 | # This is the same as declaring the input multiple times 11 | config :threads, :validate => :number, :default => 1 12 | 13 | def initialize(params) 14 | super 15 | @threadable = true 16 | end 17 | 18 | end # class LogStash::Inputs::Threadable 19 | -------------------------------------------------------------------------------- /spec/filters/unique.rb: -------------------------------------------------------------------------------- 1 | require "test_utils" 2 | require "logstash/filters/unique" 3 | 4 | describe LogStash::Filters::Unique do 5 | extend LogStash::RSpec 6 | 7 | describe "unique when field is array" do 8 | config <<-CONFIG 9 | filter { 10 | unique { 11 | fields => ["noisy_field", "not_an_array"] 12 | } 13 | } 14 | CONFIG 15 | 16 | sample("noisy_field" => %w(cat dog cat cat)) do 17 | insist { subject["noisy_field"] } == %w(cat dog) 18 | end 19 | 20 | sample("not_an_array" => "Hello, world!") do 21 | insist { subject["not_an_array"] } == "Hello, world!" 22 | end 23 | 24 | end 25 | end 26 | -------------------------------------------------------------------------------- /lib/logstash/codecs/edn.rb: -------------------------------------------------------------------------------- 1 | require "logstash/codecs/base" 2 | require "logstash/codecs/line" 3 | 4 | class LogStash::Codecs::EDN < LogStash::Codecs::Base 5 | config_name "edn" 6 | 7 | milestone 1 8 | 9 | def register 10 | require "edn" 11 | end 12 | 13 | public 14 | def decode(data) 15 | begin 16 | yield LogStash::Event.new(EDN.read(data)) 17 | rescue 18 | @logger.info("EDN parse failure. Falling back to plain-text", :error => e, :data => data) 19 | yield LogStash::Event.new("message" => data) 20 | end 21 | end 22 | 23 | public 24 | def encode(data) 25 | @on_event.call(data.to_hash.to_edn) 26 | end 27 | 28 | end 29 | -------------------------------------------------------------------------------- /etc/jira-output.conf.sample: -------------------------------------------------------------------------------- 1 | input { stdin { type => "stdin-type"}} 2 | 3 | output { 4 | jira { 5 | host => "YOUR HOST NAME" 6 | username => "USERNAME" 7 | password => "PASSWORD" 8 | # project => "LOGSTASH" 9 | projectid => "11203" # would have prefered a project key, https://github.com/jstewart/jiralicious/issues/16 10 | issuetypeid => "7" 11 | summary => "Driven from config file" 12 | reporter => "admin" 13 | assignee => "admin" 14 | priority => "1" 15 | } 16 | 17 | stdout { 18 | debug => true 19 | debug_format => "ruby" 20 | } 21 | 22 | # elasticsearch { 23 | # index => "logstash" 24 | # type => "%{@type}" 25 | # } 26 | } 27 | -------------------------------------------------------------------------------- /spec/support/postwait1.rb: -------------------------------------------------------------------------------- 1 | require "test_utils" 2 | 3 | describe "some stuff" do 4 | extend LogStash::RSpec 5 | 6 | config <<-'CONFIG' 7 | filter { 8 | grep { 9 | tags => web 10 | drop => false 11 | add_field => [ "application", "web" ] 12 | } 13 | 14 | mutate { 15 | tags => web 16 | #replace => [ "message", "%{request}" ] 17 | add_field => [ "message", "%{request}" ] 18 | } 19 | } 20 | CONFIG 21 | 22 | sample("tags" => [ "web" ], "request" => "hello") do 23 | insist { subject["tags"] }.include?("web") 24 | insist { subject["message"] } == "hello" 25 | end 26 | end 27 | -------------------------------------------------------------------------------- /pkg/debian/debian/control: -------------------------------------------------------------------------------- 1 | Source: logstash 2 | Section: devel 3 | Priority: extra 4 | Maintainer: Corey Quinn 5 | Build-Depends: debhelper (>= 5) 6 | Standards-Version: 3.7.2 7 | Homepage: http://logstash.net 8 | 9 | Package: logstash 10 | Architecture: all 11 | Depends: ${shlibs:Depends}, ${misc:Depends}, daemon, adduser, psmisc, default-jre 12 | Description: tool for managing events and logs 13 | logstash is a tool for managing events and logs. You can use it to collect logs, 14 | parse them, and store them for later use (like, for searching). Speaking of 15 | searching, logstash comes with a web interface for searching and drilling into 16 | all of your logs. 17 | -------------------------------------------------------------------------------- /pkg/debian/debian/dirs: -------------------------------------------------------------------------------- 1 | # moved from rules install 2 | usr/share/logstash 3 | 4 | # this is where LS_HOME is stored, so let's leave it as is for now 5 | var/lib/logstash 6 | 7 | # Need to create logstash's own directory to track the pid since the daemon is not run as root 8 | # and doesn't have permission to write the pid to /var/run (which would be the preferred location). 9 | var/run/logstash 10 | 11 | # Store logstash log file in it's own directory since they can become rather large and in the future 12 | # rotating logs can be easily added. 13 | var/log/logstash 14 | 15 | # Cache directory for the unpacked logstash.jar file. 16 | var/cache/logstash 17 | 18 | # Temp dir for java 19 | var/logstash/ 20 | -------------------------------------------------------------------------------- /lib/logstash/monkeypatches-for-debugging.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | if $DEBUGLIST.include?("require") 3 | module Kernel 4 | alias_method :require_debug, :require 5 | 6 | def require(path) 7 | result = require_debug(path) 8 | origin = caller[1] 9 | if origin =~ /rubygems\/custom_require/ 10 | origin = caller[3] 11 | end 12 | puts "require(\"#{path}\")" if result 13 | #puts "require(\"#{path}\") => #{result} (from: #{origin})" 14 | #puts caller.map { |c| " => #{c}" }.join("\n") 15 | end 16 | 17 | alias_method :load_debug, :load 18 | 19 | def load(path) 20 | puts "load(\"#{path}\")" 21 | return load_debug(path) 22 | end 23 | end 24 | end 25 | -------------------------------------------------------------------------------- /spec/config.rb: -------------------------------------------------------------------------------- 1 | # config syntax tests 2 | # 3 | 4 | require "logstash/config/grammar" 5 | require "logstash/config/config_ast" 6 | 7 | describe LogStashConfigParser do 8 | it "should permit single-quoted attribute names" do 9 | parser = LogStashConfigParser.new 10 | config = parser.parse(%q( 11 | input { 12 | example { 13 | 'foo' => 'bar' 14 | test => { 'bar' => 'baz' } 15 | } 16 | } 17 | )) 18 | 19 | reject { config }.nil? 20 | end 21 | 22 | it "should permit empty plugin sections" do 23 | parser = LogStashConfigParser.new 24 | config = parser.parse(%q( 25 | filter { 26 | } 27 | )) 28 | 29 | reject { config }.nil? 30 | end 31 | end 32 | -------------------------------------------------------------------------------- /lib/logstash/filters/unique.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/filters/base" 3 | require "logstash/namespace" 4 | 5 | class LogStash::Filters::Unique < LogStash::Filters::Base 6 | 7 | config_name "unique" 8 | milestone 1 9 | 10 | # The fields on which to run the unique filter. 11 | config :fields, :validate => :array, :required => true 12 | 13 | public 14 | def register 15 | # Nothing to do 16 | end # def register 17 | 18 | public 19 | def filter(event) 20 | return unless filter?(event) 21 | 22 | @fields.each do |field| 23 | next unless event[field].class == Array 24 | 25 | event[field] = event[field].uniq 26 | end 27 | end # def filter 28 | 29 | end # class Logstash::Filters::Unique 30 | -------------------------------------------------------------------------------- /etc/examples/exec-split.conf: -------------------------------------------------------------------------------- 1 | input { 2 | exec { 3 | type => "foo" 4 | 5 | # Grab one run of top 6 | command => "top -bn1" 7 | 8 | # Every 5 seconds 9 | interval => 5 10 | } 11 | } 12 | 13 | filter { 14 | split { type => "foo" } 15 | 16 | # Parse fields out of top. This assumes the default headers of: 17 | # PID USER PR NI VIRT RES SHR S %CPU %MEM TIME+ COMMAND 18 | grok { 19 | type => "foo" 20 | pattern => "%{NUMBER:pid} *%{WORD:user} *%{NUMBER:priority} *%{NUMBER:nice} *%{NOTSPACE:vmsize} *%{NOTSPACE:rss} *%{NOTSPACE:sharedmemory} *%{WORD:state} *%{NUMBER:cpu_pct} *%{NUMBER:memory_pct} *%{NOTSPACE:cputime} %{DATA:command}" 21 | } 22 | } 23 | 24 | output { stdout { debug => true } } 25 | 26 | -------------------------------------------------------------------------------- /lib/logstash/JRUBY-6970-openssl.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | # TODO(sissel): require "openssl" takes *ages* from the logstash jar 3 | # TODO(sissel): monkeypatch Kernel.require to apply this monkeypatch only after 4 | # a 'require "openssl" has occurred. 5 | class OpenSSL::SSL::SSLContext 6 | alias_method :ca_path_JRUBY_6970=, :ca_path= 7 | alias_method :ca_file_JRUBY_6970=, :ca_file= 8 | 9 | def ca_file=(arg) 10 | if arg =~ /^jar:file:\// 11 | return ca_file_JRUBY_6970=(arg.gsub(/^jar:/, "")) 12 | end 13 | return ca_file_JRUBY_6970=(arg) 14 | end 15 | 16 | def ca_path=(arg) 17 | if arg =~ /^jar:file:\// 18 | return ca_path_JRUBY_6970=(arg.gsub(/^jar:/, "")) 19 | end 20 | return ca_path_JRUBY_6970=(arg) 21 | end 22 | end 23 | -------------------------------------------------------------------------------- /misc/patterns/mysql: -------------------------------------------------------------------------------- 1 | MYSQL_ERROR_TIMESTAMP %{NUMBER:date} %{TIME} 2 | MYSQL_ERORR_LOG_CONTENT_P1 \[%{WORD:mysql_error_log_level}\] %{GREEDYDATA:mysql_error_log_content} 3 | MYSQL_ERORR_LOG_CONTENT_P2 %{GREEDYDATA:mysql_error_log_content} 4 | MYSQL_ERROR_LOG %{MYSQL_ERROR_TIMESTAMP} (%{MYSQL_ERORR_LOG_CONTENT_P1}|%{MYSQL_ERORR_LOG_CONTENT_P2}) 5 | 6 | MYSQL_SLOW_FROM ^# User@Host: %{USER:user}\[[^\]]+\] @ %{HOST:host} \[%{IP:ip_addr}?] 7 | MYSQL_SLOW_STAT ^# Query_time: %{NUMBER:duration:float} \s*Lock_time: %{NUMBER:lock_wait:float} \s*Rows_sent: %{NUMBER:results:int} \s*Rows_examined: %{NUMBER:scanned:int} 8 | MYSQL_SLOW_TIMESTAMP ^SET timestamp=%{NUMBER:timestamp}; 9 | MYSQL_SLOW_DB ^use %{WORD:db_name}; 10 | MYSQL_SLOW_QUERY ^%{WORD:action}%{SPACE}%{GREEDYDATA}; 11 | 12 | -------------------------------------------------------------------------------- /spec/inputs/generator.rb: -------------------------------------------------------------------------------- 1 | require "test_utils" 2 | 3 | describe "inputs/generator" do 4 | extend LogStash::RSpec 5 | 6 | describe "generate events" do 7 | event_count = 100000 + rand(50000) 8 | 9 | config <<-CONFIG 10 | input { 11 | generator { 12 | type => "blah" 13 | count => #{event_count} 14 | } 15 | } 16 | CONFIG 17 | 18 | input do |pipeline, queue| 19 | start = Time.now 20 | Thread.new { pipeline.run } 21 | event_count.times do |i| 22 | event = queue.pop 23 | insist { event["sequence"] } == i 24 | end 25 | duration = Time.now - start 26 | puts "Rate: #{event_count / duration}" 27 | pipeline.shutdown 28 | end # input 29 | end 30 | end 31 | -------------------------------------------------------------------------------- /pkg/debian/debian/watch.ex: -------------------------------------------------------------------------------- 1 | # Example watch control file for uscan 2 | # Rename this file to "watch" and then you can run the "uscan" command 3 | # to check for upstream updates and more. 4 | # See uscan(1) for format 5 | 6 | # Compulsory line, this is a version 3 file 7 | version=3 8 | 9 | # Uncomment to examine a Webpage 10 | # 11 | #http://www.example.com/downloads.php jenkins-(.*)\.tar\.gz 12 | 13 | # Uncomment to examine a Webserver directory 14 | #http://www.example.com/pub/jenkins-(.*)\.tar\.gz 15 | 16 | # Uncommment to examine a FTP server 17 | #ftp://ftp.example.com/pub/jenkins-(.*)\.tar\.gz debian uupdate 18 | 19 | # Uncomment to find new files on sourceforge, for debscripts >= 2.9 20 | # http://sf.net/jenkins/jenkins-(.*)\.tar\.gz 21 | 22 | 23 | -------------------------------------------------------------------------------- /docs/plugin-synopsis.html.erb: -------------------------------------------------------------------------------- 1 | <%= name %> { 2 | <% sorted_attributes.each do |name, config| 3 | next if config[:deprecated] 4 | if config[:validate].is_a?(Array) 5 | annotation = "string, one of #{config[:validate].inspect}" 6 | elsif config[:validate] == :path 7 | annotation = "a valid filesystem path" 8 | else 9 | annotation = "#{config[:validate]}" 10 | end 11 | 12 | if name.is_a?(Regexp) 13 | name = "/" + name.to_s.gsub(/^\(\?-mix:/, "").gsub(/\)$/, "") + "/" 14 | end 15 | if config[:required] 16 | annotation += " (required)" 17 | else 18 | annotation += " (optional)" 19 | end 20 | annotation += ", default: #{config[:default].inspect}" if config.include?(:default) 21 | -%> 22 | <%= name %> => ... # <%= annotation %> 23 | <% end -%> 24 | } 25 | -------------------------------------------------------------------------------- /lib/logstash/filters/environment.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/filters/base" 3 | require "logstash/namespace" 4 | 5 | # Set fields from environment variables 6 | class LogStash::Filters::Environment < LogStash::Filters::Base 7 | config_name "environment" 8 | milestone 1 9 | 10 | # Specify a hash of fields to the environment variable 11 | # A hash of matches of field => environment variable 12 | config :add_field_from_env, :validate => :hash, :default => {} 13 | 14 | public 15 | def register 16 | # Nothing 17 | end # def register 18 | 19 | public 20 | def filter(event) 21 | return unless filter?(event) 22 | @add_field_from_env.each do |field, env| 23 | event[field] = ENV[env] 24 | end 25 | filter_matched(event) 26 | end # def filter 27 | end # class LogStash::Filters::Environment 28 | -------------------------------------------------------------------------------- /lib/logstash/filters/drop.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/filters/base" 3 | require "logstash/namespace" 4 | 5 | # Drop filter. 6 | # 7 | # Drops everything that gets to this filter. 8 | # 9 | # This is best used in combination with conditionals, for example: 10 | # 11 | # filter { 12 | # if [loglevel] == "debug" { 13 | # drop { } 14 | # } 15 | # } 16 | # 17 | # The above will only pass events to the drop filter if the loglevel field is 18 | # "debug". This will cause all events matching to be dropped. 19 | class LogStash::Filters::Drop < LogStash::Filters::Base 20 | config_name "drop" 21 | milestone 3 22 | 23 | public 24 | def register 25 | # nothing to do. 26 | end 27 | 28 | public 29 | def filter(event) 30 | event.cancel 31 | end # def filter 32 | end # class LogStash::Filters::Drop 33 | -------------------------------------------------------------------------------- /test/jenkins/create-jobs.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | require "erb" 4 | 5 | if ENV["JENKINS_HOME"].nil? 6 | puts "No JENKINS_HOME set." 7 | exit 1 8 | end 9 | 10 | plugindir = File.join(File.dirname(__FILE__), "..", "..", "lib", "logstash") 11 | 12 | plugins = %w(inputs filters outputs).collect { |t| Dir.glob(File.join(plugindir, t, "*.rb")) }.flatten 13 | 14 | template = ERB.new(File.read(File.join(File.dirname(__FILE__), "config.xml.erb"))) 15 | plugins.each do |path| 16 | job = path.gsub(/.*\/([^\/]+)\/([^\/]+)\.rb$/, 'plugin.\1.\2') 17 | plugin_path = path.gsub(/.*\/([^\/]+)\/([^\/]+)$/, '\1/\2') 18 | 19 | jobdir = File.join(ENV["JENKINS_HOME"], "jobs", job) 20 | puts "Writing #{jobdir}/config.xml" 21 | Dir.mkdir(jobdir) if !Dir.exists?(jobdir) 22 | File.write(File.join(jobdir, "config.xml"), template.result(binding)) 23 | end 24 | -------------------------------------------------------------------------------- /lib/logstash/outputs/websocket/app.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/namespace" 3 | require "logstash/outputs/websocket" 4 | require "sinatra/base" 5 | require "rack/handler/ftw" # from ftw 6 | require "ftw/websocket/rack" # from ftw 7 | 8 | class LogStash::Outputs::WebSocket::App < Sinatra::Base 9 | def initialize(pubsub, logger) 10 | @pubsub = pubsub 11 | @logger = logger 12 | end 13 | 14 | set :reload_templates, false 15 | 16 | get "/" do 17 | # TODO(sissel): Support filters/etc. 18 | ws = ::FTW::WebSocket::Rack.new(env) 19 | @logger.debug("New websocket client") 20 | stream(:keep_open) do |out| 21 | @pubsub.subscribe do |event| 22 | ws.publish(event) 23 | end # pubsub 24 | end # stream 25 | 26 | ws.rack_response 27 | end # get / 28 | end # class LogStash::Outputs::WebSocket::App 29 | 30 | -------------------------------------------------------------------------------- /misc/rate.sh: -------------------------------------------------------------------------------- 1 | #!/bin/zsh 2 | 3 | if [ "$#" -ne 1 ] ; then 4 | echo "Usage; $0 logfile" 5 | exit 1 6 | fi 7 | logfile="$1" 8 | 9 | pid=$(ps -u $USER -f | awk '/bin.logstash -[f]/ {print $2}') 10 | fileno=$(lsof -nPp $pid | grep -F "$logfile" | awk '{ print int($4) }') 11 | pos=$(awk '/pos:/ {print $2}' /proc/$pid/fdinfo/$fileno) 12 | size=$(ls -ld "$logfile" | awk '{print $5}') 13 | starttime=$(awk '{print $22}' /proc/$pid/stat) 14 | curtime=$(awk '{print $1}' /proc/uptime) 15 | lines=$(dd if="$logfile" bs=$pos count=1 2> /dev/null | wc -l) 16 | percent=$(printf "%.2f%%" $(( ($pos / ($size + 0.0)) * 100 ))) 17 | 18 | duration=$(($curtime - ($starttime / 100.))) 19 | rate=$(( $lines / (0.0 + $duration) )) 20 | 21 | ps --no-header -o "pid user args" -p $pid 22 | echo "Duration: $duration" 23 | echo "Lines: $lines (position: $pos, $percent)" 24 | echo "Rate: $rate" 25 | 26 | -------------------------------------------------------------------------------- /pkg/debian/debian/preinst.ex: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # preinst script for logstash 3 | # 4 | # see: dh_installdeb(1) 5 | 6 | set -e 7 | 8 | # summary of how this script can be called: 9 | # * `install' 10 | # * `install' 11 | # * `upgrade' 12 | # * `abort-upgrade' 13 | # for details, see http://www.debian.org/doc/debian-policy/ or 14 | # the debian-policy package 15 | 16 | 17 | case "$1" in 18 | install|upgrade) 19 | ;; 20 | 21 | abort-upgrade) 22 | ;; 23 | 24 | *) 25 | echo "preinst called with unknown argument \`$1'" >&2 26 | exit 1 27 | ;; 28 | esac 29 | 30 | # dh_installdeb will replace this with shell code automatically 31 | # generated by other debhelper scripts. 32 | 33 | #DEBHELPER# 34 | 35 | exit 0 36 | 37 | 38 | -------------------------------------------------------------------------------- /pkg/debian/debian/logstash.default: -------------------------------------------------------------------------------- 1 | # defaults for logstash 2 | 3 | # Start logstash on boot? 4 | START=no 5 | 6 | # pulled in from the init script; makes things easier. 7 | NAME=logstash 8 | 9 | # location of java 10 | JAVA=/usr/bin/java 11 | 12 | # arguments to pass to java 13 | LS_JAVA_OPTS="-Xmx256m -Djava.io.tmpdir=/var/lib/logstash/" 14 | 15 | PIDFILE=/var/run/logstash.pid 16 | 17 | # user id to be invoked as 18 | LS_USER=logstash 19 | 20 | # location of the logstas jar file 21 | LS_JAR=/usr/share/logstash/logstash.jar 22 | 23 | # logstash home location 24 | LS_HOME=/var/lib/logstash 25 | 26 | # logstash log directory 27 | LOG_DIR=/var/log/logstash 28 | 29 | # logstash log file 30 | LOG_FILE=$LOG_DIR/$NAME.log 31 | 32 | # logstash configuration directory 33 | CONF_DIR=/etc/logstash/conf.d 34 | 35 | # Open file limit 36 | OPEN_FILES=2048 37 | 38 | # Nice level 39 | NICE=19 40 | -------------------------------------------------------------------------------- /docs/generate_index.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | require "erb" 4 | 5 | if ARGV.size != 1 6 | $stderr.puts "No path given to search for plugin docs" 7 | $stderr.puts "Usage: #{$0} plugin_doc_dir" 8 | exit 1 9 | end 10 | 11 | def plugins(glob) 12 | files = Dir.glob(glob) 13 | names = files.collect { |f| File.basename(f).gsub(".html", "") } 14 | return names.sort 15 | end # def plugins 16 | 17 | basedir = ARGV[0] 18 | docs = { 19 | "inputs" => plugins(File.join(basedir, "inputs/*.html")), 20 | "codecs" => plugins(File.join(basedir, "codecs/*.html")), 21 | "filters" => plugins(File.join(basedir, "filters/*.html")), 22 | "outputs" => plugins(File.join(basedir, "outputs/*.html")), 23 | } 24 | 25 | template_path = File.join(File.dirname(__FILE__), "index.html.erb") 26 | template = File.new(template_path).read 27 | erb = ERB.new(template, nil, "-") 28 | puts erb.result(binding) 29 | -------------------------------------------------------------------------------- /lib/logstash/codecs/edn_lines.rb: -------------------------------------------------------------------------------- 1 | require "logstash/codecs/base" 2 | require "logstash/codecs/line" 3 | 4 | class LogStash::Codecs::EDNLines < LogStash::Codecs::Base 5 | config_name "edn_lines" 6 | 7 | milestone 1 8 | 9 | def register 10 | require "edn" 11 | end 12 | 13 | public 14 | def initialize(params={}) 15 | super(params) 16 | @lines = LogStash::Codecs::Line.new 17 | end 18 | 19 | public 20 | def decode(data) 21 | @lines.decode(data) do |event| 22 | begin 23 | yield LogStash::Event.new(EDN.read(event["message"])) 24 | rescue => e 25 | @logger.info("EDN parse failure. Falling back to plain-text", :error => e, :data => data) 26 | yield LogStash::Event.new("message" => data) 27 | end 28 | end 29 | end 30 | 31 | public 32 | def encode(data) 33 | @on_event.call(data.to_hash.to_edn + "\n") 34 | end 35 | 36 | end 37 | -------------------------------------------------------------------------------- /extract_services.rb: -------------------------------------------------------------------------------- 1 | # Extract META-INFO/services/* files from jars 2 | # 3 | require "optparse" 4 | 5 | output = nil 6 | 7 | flags = OptionParser.new do |opts| 8 | opts.on("-o", "--output DIR", 9 | "Where to write the merged META-INF/services/* files") do |dir| 10 | output = dir 11 | end 12 | end 13 | 14 | flags.parse!(ARGV) 15 | 16 | ARGV.each do |jar| 17 | # Find any files matching /META-INF/services/* in any jar given on the 18 | # command line. 19 | # Append all file content to the output directory with the same file name 20 | # as is in the jar. 21 | glob = "file:///#{File.expand_path(jar)}!/META-INF/services/*" 22 | Dir.glob(glob).each do |service| 23 | name = File.basename(service) 24 | File.open(File.join(output, name), "a") do |fd| 25 | puts "Adding #{name} from #{File.basename(jar)}" 26 | fd.write(File.read(service)) 27 | end 28 | end 29 | end 30 | -------------------------------------------------------------------------------- /misc/screencast/000.intro: -------------------------------------------------------------------------------- 1 | %K Escape 2 | 1GdGi 3 | tail -f is nice, but it doesn't scale. 4 | 5 | Plus, the output is just a stream of text. Aren't logs really messages? 6 | 7 | Enter logstash. 8 | 9 | logstash gives you a pipe metaphor similar to the unix model. Stuff goes in; stuff gets modified; stuff goes out. Think: sed. 10 | 11 | Powershell built on the unix pipe model by allowing you to pipe objects instead of just text. (If you haven't seen powershell yet, go check it out, it is awesome) 12 | 13 | Let's take that piped object model and apply it to logs, events, and the network. 14 | 15 | * Input from files, processes, etc. 16 | * Parse it and package it into an object. 17 | * Ship it to anything willing to listen. 18 | 19 | If we provide a framework for doing this, you can easily ship logs to message queues, databases, archive servers, web browsers, etc. 20 | 21 | Let's show a bit of logstash. 22 | -------------------------------------------------------------------------------- /pkg/logstash-web.default: -------------------------------------------------------------------------------- 1 | # defaults for logstash 2 | 3 | # Start logstash on boot? 4 | START=no 5 | 6 | # pulled in from the init script; makes things easier. 7 | NAME=logstash-web 8 | 9 | # location of java 10 | JAVA=/usr/bin/java 11 | 12 | # arguments to pass to java 13 | LS_JAVA_OPTS="-Xmx256m -Djava.io.tmpdir=/var/lib/logstash/" 14 | 15 | PIDFILE=/var/run/logstash-web.pid 16 | 17 | # user id to be invoked as 18 | LS_USER=logstash 19 | 20 | # location of the logstas jar file 21 | LS_JAR=/opt/logstash/logstash.jar 22 | 23 | # logstash home location 24 | LS_HOME=/var/lib/logstash 25 | 26 | # logstash log directory 27 | LOG_DIR=/var/log/logstash 28 | 29 | # logstash log file 30 | LOG_FILE=$LOG_DIR/$NAME.log 31 | 32 | # logstash configuration directory 33 | CONF_DIR=/etc/logstash/conf.d 34 | 35 | # Open file limit 36 | OPEN_FILES=2048 37 | 38 | # Nice level 39 | NICE=19 40 | 41 | HOME=/var/lib/logstash 42 | -------------------------------------------------------------------------------- /spec/examples/fail2ban.rb: -------------------------------------------------------------------------------- 1 | require "test_utils" 2 | 3 | describe "fail2ban logs", :if => RUBY_ENGINE == "jruby" do 4 | extend LogStash::RSpec 5 | 6 | # The logstash config goes here. 7 | # At this time, only filters are supported. 8 | config <<-CONFIG 9 | filter { 10 | grok { 11 | pattern => "^%{TIMESTAMP_ISO8601:timestamp} fail2ban\.actions: %{WORD:level} \\[%{WORD:program}\\] %{WORD:action} %{IP:ip}" 12 | singles => true 13 | } 14 | date { 15 | match => [ "timestamp", "yyyy-MM-dd HH:mm:ss,SSS" ] 16 | } 17 | mutate { 18 | remove => timestamp 19 | } 20 | } 21 | CONFIG 22 | 23 | sample "2013-06-28 15:10:59,891 fail2ban.actions: WARNING [ssh] Ban 95.78.163.5" do 24 | insist { subject["program"] } == "ssh" 25 | insist { subject["action"] } == "Ban" 26 | insist { subject["ip"] } == "95.78.163.5" 27 | end 28 | end 29 | -------------------------------------------------------------------------------- /pull_release_note.rb: -------------------------------------------------------------------------------- 1 | require "octokit" 2 | 3 | 4 | @repository= "logstash/logstash" 5 | @releaseNote= "releaseNote.html" 6 | 7 | #Last release == last tag 8 | lastReleaseSha = Octokit.tags(@repository).first.commit.sha 9 | 10 | currentReleaseSha ="HEAD" 11 | 12 | #Collect PR Merge in a file 13 | File.open(@releaseNote, "a") do |f| 14 | f.puts "

Merged pull request

" 15 | f.puts "" 25 | end -------------------------------------------------------------------------------- /docs/tutorials/10-minute-walkthrough/hello-search.conf: -------------------------------------------------------------------------------- 1 | input { 2 | stdin { 3 | # A type is a label applied to an event. It is used later with filters 4 | # to restrict what filters are run against each event. 5 | type => "human" 6 | } 7 | } 8 | 9 | output { 10 | # Print each event to stdout. 11 | stdout { 12 | # Enabling 'rubydebug' codec on the stdout output will make logstash 13 | # pretty-print the entire event as something similar to a JSON representation. 14 | codec => rubydebug 15 | } 16 | 17 | # You can have multiple outputs. All events generally to all outputs. 18 | # Output events to elasticsearch 19 | elasticsearch { 20 | # Setting 'embedded' will run a real elasticsearch server inside logstash. 21 | # This option below saves you from having to run a separate process just 22 | # for ElasticSearch, so you can get started quicker! 23 | embedded => true 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /spec/codecs/spool.rb: -------------------------------------------------------------------------------- 1 | require "logstash/codecs/spool" 2 | require "logstash/event" 3 | require "insist" 4 | 5 | describe LogStash::Codecs::Spool do 6 | subject do 7 | next LogStash::Codecs::Spool.new 8 | end 9 | 10 | context "#decode" do 11 | it "should return multiple spooled events" do 12 | e1 = LogStash::Event.new 13 | e2 = LogStash::Event.new 14 | e3 = LogStash::Event.new 15 | subject.decode([e1,e2,e3]) do |event| 16 | insist { event.is_a? LogStash::Event } 17 | end 18 | end 19 | end 20 | 21 | context "#encode" do 22 | it "should return a spooled event" do 23 | spool_size = Random.rand(10) 24 | subject.spool_size = spool_size 25 | got_event = false 26 | subject.on_event do |data| 27 | got_event = true 28 | end 29 | spool_size.times do 30 | subject.encode(LogStash::Event.new) 31 | end 32 | insist { got_event } 33 | end 34 | end 35 | end 36 | -------------------------------------------------------------------------------- /lib/logstash/codecs/spool.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/codecs/base" 3 | 4 | class LogStash::Codecs::Spool < LogStash::Codecs::Base 5 | config_name 'spool' 6 | milestone 1 7 | config :spool_size, :validate => :number, :default => 50 8 | 9 | attr_reader :buffer 10 | 11 | public 12 | def decode(data) 13 | data.each do |event| 14 | yield event 15 | end 16 | end # def decode 17 | 18 | public 19 | def encode(data) 20 | @buffer = [] if @buffer.nil? 21 | #buffer size is hard coded for now until a 22 | #better way to pass args into codecs is implemented 23 | if @buffer.length >= @spool_size 24 | @on_event.call @buffer 25 | @buffer = [] 26 | else 27 | @buffer << data 28 | end 29 | end # def encode 30 | 31 | public 32 | def teardown 33 | if !@buffer.nil? and @buffer.length > 0 34 | @on_event.call @buffer 35 | end 36 | @buffer = [] 37 | end 38 | end # class LogStash::Codecs::Spool 39 | -------------------------------------------------------------------------------- /lib/logstash/outputs/udp.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/outputs/base" 3 | require "logstash/namespace" 4 | require "socket" 5 | 6 | # Send events over UDP 7 | # 8 | # Keep in mind that UDP will lose messages. 9 | class LogStash::Outputs::UDP < LogStash::Outputs::Base 10 | config_name "udp" 11 | milestone 1 12 | 13 | default :codec, "json" 14 | 15 | # The address to send messages to 16 | config :host, :validate => :string, :required => true 17 | 18 | # The port to send messages on 19 | config :port, :validate => :number, :required => true 20 | 21 | public 22 | def register 23 | @socket = UDPSocket.new 24 | @codec.on_event do |payload| 25 | @socket.send(payload, 0, @host, @port) 26 | end 27 | end 28 | 29 | def receive(event) 30 | return unless output?(event) 31 | if event == LogStash::SHUTDOWN 32 | finished 33 | return 34 | end 35 | @codec.encode(event) 36 | end 37 | 38 | end # class LogStash::Outputs::Stdout 39 | -------------------------------------------------------------------------------- /pkg/logstash.default: -------------------------------------------------------------------------------- 1 | # defaults for logstash 2 | 3 | # Start logstash on boot? 4 | START=no 5 | 6 | # pulled in from the init script; makes things easier. 7 | NAME=logstash 8 | 9 | # location of java 10 | JAVA=/usr/bin/java 11 | 12 | # arguments to pass to java 13 | LS_JAVA_OPTS="-Xmx256m -Djava.io.tmpdir=/var/lib/logstash/" 14 | 15 | PIDFILE=/var/run/logstash.pid 16 | 17 | # user id to be invoked as 18 | LS_USER=logstash 19 | 20 | # location of the logstas jar file 21 | LS_JAR=/opt/logstash/logstash.jar 22 | 23 | # logstash home location 24 | LS_HOME=/var/lib/logstash 25 | 26 | # logstash log directory 27 | LOG_DIR=/var/log/logstash 28 | 29 | # logstash log file 30 | LOG_FILE=$LOG_DIR/$NAME.log 31 | 32 | # logstash configuration directory 33 | CONF_DIR=/etc/logstash/conf.d 34 | 35 | # Open file limit 36 | OPEN_FILES=2048 37 | 38 | # Nice level 39 | NICE=19 40 | 41 | # Set LogStash options 42 | LS_OPTS="--log ${LOG_FILE}" 43 | 44 | # Set a home directory 45 | HOME=/var/lib/logstash 46 | -------------------------------------------------------------------------------- /spec/filters/json_encode.rb: -------------------------------------------------------------------------------- 1 | require "test_utils" 2 | require "logstash/filters/json_encode" 3 | 4 | describe LogStash::Filters::JSONEncode do 5 | extend LogStash::RSpec 6 | 7 | describe "encode a field as json" do 8 | config <<-CONFIG 9 | filter { 10 | json_encode { 11 | source => "hello" 12 | target => "fancy" 13 | } 14 | } 15 | CONFIG 16 | 17 | hash = { "hello" => { "whoa" => [ 1,2,3 ] } } 18 | sample(hash) do 19 | insist { JSON.parse(subject["fancy"]).to_json } == hash["hello"].to_json 20 | end 21 | end 22 | 23 | describe "encode a field as json and overwrite the original" do 24 | config <<-CONFIG 25 | filter { 26 | json_encode { 27 | source => "hello" 28 | } 29 | } 30 | CONFIG 31 | 32 | hash = { "hello" => { "whoa" => [ 1,2,3 ] } } 33 | sample(hash) do 34 | insist { JSON.parse(subject["hello"]).to_json } == hash["hello"].to_json 35 | end 36 | end 37 | end 38 | -------------------------------------------------------------------------------- /patterns/linux-syslog: -------------------------------------------------------------------------------- 1 | SYSLOGBASE2 (?:%{SYSLOGTIMESTAMP:timestamp}|%{TIMESTAMP_ISO8601:timestamp8601}) (?:%{SYSLOGFACILITY} )?%{SYSLOGHOST:logsource} %{SYSLOGPROG}: 2 | SYSLOGPAMSESSION %{SYSLOGBASE} (?=%{GREEDYDATA:message})%{WORD:pam_module}\(%{DATA:pam_caller}\): session %{WORD:pam_session_state} for user %{USERNAME:username}(?: by %{GREEDYDATA:pam_by})? 3 | 4 | CRON_ACTION [A-Z ]+ 5 | CRONLOG %{SYSLOGBASE} \(%{USER:user}\) %{CRON_ACTION:action} \(%{DATA:message}\) 6 | 7 | SYSLOGLINE %{SYSLOGBASE2} %{GREEDYDATA:message} 8 | 9 | # IETF 5424 syslog(8) format (see http://www.rfc-editor.org/info/rfc5424) 10 | SYSLOG5424PRI <%{NONNEGINT:syslog5424_pri}> 11 | SYSLOG5424SD \[%{DATA}\]+ 12 | SYSLOG5424BASE %{SYSLOG5424PRI}%{NONNEGINT:syslog5424_ver} +(?:%{TIMESTAMP_ISO8601:syslog5424_ts}|-) +(?:%{HOSTNAME:syslog5424_host}|-) +(?:%{WORD:syslog5424_app}|-) +(?:%{WORD:syslog5424_proc}|-) +(?:%{WORD:syslog5424_msgid}|-) +(?:%{SYSLOG5424SD:syslog5424_sd}|-|) 13 | 14 | SYSLOG5424LINE %{SYSLOG5424BASE} +%{GREEDYDATA:syslog5424_msg} 15 | -------------------------------------------------------------------------------- /docs/tutorials/10-minute-walkthrough/apache-parse.conf: -------------------------------------------------------------------------------- 1 | input { 2 | tcp { 3 | type => "apache" 4 | port => 3333 5 | } 6 | } 7 | 8 | filter { 9 | if [type] == "apache" { 10 | grok { 11 | # See the following URL for a complete list of named patterns 12 | # logstash/grok ships with by default: 13 | # https://github.com/logstash/logstash/tree/master/patterns 14 | # 15 | # The grok filter will use the below pattern and on successful match use 16 | # any captured values as new fields in the event. 17 | match => { "message" => "%{COMBINEDAPACHELOG}" } 18 | } 19 | 20 | date { 21 | # Try to pull the timestamp from the 'timestamp' field (parsed above with 22 | # grok). The apache time format looks like: "18/Aug/2011:05:44:34 -0700" 23 | match => [ "timestamp", "dd/MMM/yyyy:HH:mm:ss Z" ] 24 | } 25 | } 26 | } 27 | 28 | output { 29 | # Use stdout in debug mode again to see what logstash makes of the event. 30 | stdout { 31 | debug => true 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /lib/logstash/filters/clone.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/filters/base" 3 | require "logstash/namespace" 4 | 5 | # The clone filter is for duplicating events. 6 | # A clone will be made for each type in the clone list. 7 | # The original event is left unchanged. 8 | class LogStash::Filters::Clone < LogStash::Filters::Base 9 | 10 | config_name "clone" 11 | milestone 2 12 | 13 | # A new clone will be created with the given type for each type in this list. 14 | config :clones, :validate => :array, :default => [] 15 | 16 | public 17 | def register 18 | # Nothing to do 19 | end 20 | 21 | public 22 | def filter(event) 23 | return unless filter?(event) 24 | @clones.each do |type| 25 | clone = event.clone 26 | clone["type"] = type 27 | filter_matched(clone) 28 | @logger.debug("Cloned event", :clone => clone, :event => event) 29 | 30 | # Push this new event onto the stack at the LogStash::FilterWorker 31 | yield clone 32 | end 33 | end 34 | 35 | end # class LogStash::Filters::Clone 36 | -------------------------------------------------------------------------------- /pkg/debian/debian/prerm.ex: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # prerm script for logstash 3 | # 4 | # see: dh_installdeb(1) 5 | 6 | set -e 7 | 8 | # summary of how this script can be called: 9 | # * `remove' 10 | # * `upgrade' 11 | # * `failed-upgrade' 12 | # * `remove' `in-favour' 13 | # * `deconfigure' `in-favour' 14 | # `removing' 15 | # 16 | # for details, see http://www.debian.org/doc/debian-policy/ or 17 | # the debian-policy package 18 | 19 | 20 | case "$1" in 21 | remove|upgrade|deconfigure) 22 | ;; 23 | 24 | failed-upgrade) 25 | ;; 26 | 27 | *) 28 | echo "prerm called with unknown argument \`$1'" >&2 29 | exit 1 30 | ;; 31 | esac 32 | 33 | # dh_installdeb will replace this with shell code automatically 34 | # generated by other debhelper scripts. 35 | 36 | #DEBHELPER# 37 | 38 | exit 0 39 | 40 | 41 | -------------------------------------------------------------------------------- /lib/logstash/config/file.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/namespace" 3 | require "logstash/config/grammar" 4 | require "logstash/config/config_ast" 5 | require "logstash/config/registry" 6 | require "logstash/errors" 7 | require "logger" 8 | 9 | class LogStash::Config::File 10 | include Enumerable 11 | attr_accessor :logger 12 | 13 | public 14 | def initialize(text) 15 | @logger = Cabin::Channel.get(LogStash) 16 | @text = text 17 | @config = parse(text) 18 | end # def initialize 19 | 20 | def parse(text) 21 | grammar = LogStashConfigParser.new 22 | result = grammar.parse(text) 23 | if result.nil? 24 | raise LogStash::ConfigurationError, grammar.failure_reason 25 | end 26 | return result 27 | end # def parse 28 | 29 | def plugin(plugin_type, name, *args) 30 | klass = LogStash::Plugin.lookup(plugin_type, name) 31 | return klass.new(*args) 32 | end 33 | 34 | def each 35 | @config.recursive_select(LogStash::Config::AST::Plugin) 36 | end 37 | end # class LogStash::Config::Parser 38 | 39 | #agent.config(cfg) 40 | -------------------------------------------------------------------------------- /logstash-event.gemspec: -------------------------------------------------------------------------------- 1 | # -*- encoding: utf-8 -*- 2 | Gem::Specification.new do |gem| 3 | gem.authors = ["Jordan Sissel"] 4 | gem.email = ["jls@semicomplete.com"] 5 | gem.description = %q{Library that contains the classes required to create LogStash events} 6 | gem.summary = %q{Library that contains the classes required to create LogStash events} 7 | gem.homepage = "https://github.com/logstash/logstash" 8 | gem.license = "Apache License (2.0)" 9 | 10 | gem.files = %w{ 11 | lib/logstash-event.rb 12 | lib/logstash/event.rb 13 | lib/logstash/namespace.rb 14 | lib/logstash/util/fieldreference.rb 15 | lib/logstash/util.rb 16 | spec/event.rb 17 | LICENSE 18 | } 19 | 20 | gem.test_files = [] 21 | gem.name = "logstash-event" 22 | gem.require_paths = ["lib"] 23 | gem.version = "1.2.02" 24 | 25 | gem.add_development_dependency "rspec" 26 | gem.add_development_dependency "guard" 27 | gem.add_development_dependency "guard-rspec" 28 | gem.add_development_dependency "insist", "1.0.0" 29 | end 30 | -------------------------------------------------------------------------------- /spec/codecs/plain.rb: -------------------------------------------------------------------------------- 1 | require "logstash/codecs/plain" 2 | require "logstash/event" 3 | require "insist" 4 | 5 | describe LogStash::Codecs::Plain do 6 | context "#decode" do 7 | it "should return a valid event" do 8 | subject.decode("Testing decoding.") do |event| 9 | insist { event.is_a? LogStash::Event } 10 | end 11 | end 12 | end 13 | 14 | context "#encode" do 15 | it "should return a plain text encoding" do 16 | event = LogStash::Event.new 17 | event["message"] = "Hello World." 18 | subject.on_event do |data| 19 | insist { data } == event.to_s 20 | end 21 | subject.encode(event) 22 | end 23 | 24 | it "should respect the format setting" do 25 | format = "%{[hello]} %{[something][fancy]}" 26 | codec = LogStash::Codecs::Plain.new("format" => format) 27 | event = LogStash::Event.new("hello" => "world", "something" => { "fancy" => 123 }) 28 | codec.on_event do |data| 29 | insist { data } == event.sprintf(format) 30 | end 31 | codec.encode(event) 32 | end 33 | 34 | end 35 | end 36 | -------------------------------------------------------------------------------- /spec/filters/date_performance.rb: -------------------------------------------------------------------------------- 1 | require "test_utils" 2 | require "logstash/filters/date" 3 | 4 | puts "Skipping date tests because this ruby is not jruby" if RUBY_ENGINE != "jruby" 5 | describe LogStash::Filters::Date, :if => RUBY_ENGINE == "jruby" do 6 | extend LogStash::RSpec 7 | 8 | describe "performance test of java syntax parsing", :if => ENV["SPEEDTEST"] do 9 | 10 | event_count = 100000 11 | min_rate = 4000 12 | 13 | max_duration = event_count / min_rate 14 | input = "Nov 24 01:29:01 -0800" 15 | config <<-CONFIG 16 | input { 17 | generator { 18 | add_field => ["mydate", "#{input}"] 19 | count => #{event_count} 20 | type => "generator" 21 | } 22 | } 23 | filter { 24 | date { 25 | match => [ "mydate", "MMM dd HH:mm:ss Z" ] 26 | } 27 | } 28 | output { null { } } 29 | CONFIG 30 | 31 | 2.times do 32 | agent do 33 | puts "date parse rate: #{event_count / @duration}" 34 | insist { @duration } < max_duration 35 | end 36 | end 37 | end 38 | end 39 | -------------------------------------------------------------------------------- /spec/examples/graphite-input.rb: -------------------------------------------------------------------------------- 1 | require "test_utils" 2 | 3 | describe "receive graphite input", :if => RUBY_ENGINE == "jruby" do 4 | extend LogStash::RSpec 5 | 6 | # The logstash config goes here. 7 | # At this time, only filters are supported. 8 | config <<-CONFIG 9 | # input { 10 | # tcp { 11 | # port => 1234 12 | # mode => server 13 | # type => graphite 14 | # } 15 | # } 16 | filter { 17 | grok { 18 | pattern => "%{DATA:name} %{NUMBER:value:float} %{POSINT:ts}" 19 | singles => true 20 | } 21 | date { 22 | match => ["ts", UNIX] 23 | } 24 | mutate { 25 | remove => ts 26 | } 27 | } 28 | CONFIG 29 | 30 | type "graphite" 31 | 32 | sample "foo.bar.baz 4025.34 1364606522" do 33 | insist { subject }.include?("name") 34 | insist { subject }.include?("value") 35 | 36 | insist { subject["name"] } == "foo.bar.baz" 37 | insist { subject["value"] } == 4025.34 38 | insist { subject["@timestamp"] } == Time.iso8601("2013-03-30T01:22:02.000Z") 39 | 40 | end 41 | end 42 | -------------------------------------------------------------------------------- /docs/tutorials/10-minute-walkthrough/step-5-output.txt: -------------------------------------------------------------------------------- 1 | { 2 | "type" => "apache", 3 | "clientip" => "129.92.249.70", 4 | "ident" => "-", 5 | "auth" => "-", 6 | "timestamp" => "18/Aug/2011:06:00:14 -0700", 7 | "verb" => "GET", 8 | "request" => "/style2.css", 9 | "httpversion" => "1.1", 10 | "response" => "200", 11 | "bytes" => "1820", 12 | "referrer" => "http://www.semicomplete.com/blog/geekery/bypassing-captive-portals.html", 13 | "agent" => "\"Mozilla/5.0 (iPad; U; CPU OS 4_3_5 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8L1 Safari/6533.18.5\"", 14 | "@timestamp" => "2011-08-18T13:00:14.000Z", 15 | "host" => "127.0.0.1", 16 | "message" => "129.92.249.70 - - [18/Aug/2011:06:00:14 -0700] \"GET /style2.css HTTP/1.1\" 200 1820 \"http://www.semicomplete.com/blog/geekery/bypassing-captive-portals.html\" \"Mozilla/5.0 (iPad; U; CPU OS 4_3_5 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8L1 Safari/6533.18.5\"\n" 17 | } 18 | -------------------------------------------------------------------------------- /lib/logstash/outputs/exec.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/namespace" 3 | require "logstash/outputs/base" 4 | 5 | # This output will run a command for any matching event. 6 | # 7 | # Example: 8 | # 9 | # output { 10 | # exec { 11 | # type => abuse 12 | # command => "iptables -A INPUT -s %{clientip} -j DROP" 13 | # } 14 | # } 15 | # 16 | # Run subprocesses via system ruby function 17 | # 18 | # WARNING: if you want it non-blocking you should use & or dtach or other such 19 | # techniques 20 | class LogStash::Outputs::Exec < LogStash::Outputs::Base 21 | 22 | config_name "exec" 23 | milestone 1 24 | 25 | # Command line to execute via subprocess. Use dtach or screen to make it non blocking 26 | config :command, :validate => :string, :required => true 27 | 28 | public 29 | def register 30 | @logger.debug("exec output registered", :config => @config) 31 | end # def register 32 | 33 | public 34 | def receive(event) 35 | return unless output?(event) 36 | @logger.debug("running exec command", :command => event.sprintf(@command)) 37 | system(event.sprintf(@command)) 38 | end # def receive 39 | 40 | end 41 | -------------------------------------------------------------------------------- /spec/support/LOGSTASH-820.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | # This spec covers the question here: 3 | # https://logstash.jira.com/browse/LOGSTASH-820 4 | 5 | require "test_utils" 6 | 7 | describe "LOGSTASH-820" do 8 | extend LogStash::RSpec 9 | describe "grok with unicode" do 10 | config <<-CONFIG 11 | filter { 12 | grok { 13 | #pattern => "<%{POSINT:syslog_pri}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{PROG:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" 14 | pattern => "<%{POSINT:syslog_pri}>%{SPACE}%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{PROG:syslog_program}(:?)(?:\\[%{GREEDYDATA:syslog_pid}\\])?(:?) %{GREEDYDATA:syslog_message}" 15 | } 16 | } 17 | CONFIG 18 | 19 | sample "<22>Jan 4 07:50:46 mailmaster postfix/policy-spf[9454]: : SPF permerror (Junk encountered in record 'v=spf1 mx a:mail.domain.no ip4:192.168.0.4 �all'): Envelope-from: email@domain.no" do 20 | insist { subject["tags"] }.nil? 21 | insist { subject["syslog_pri"] } == "22" 22 | insist { subject["syslog_program"] } == "postfix/policy-spf" 23 | end 24 | end 25 | end 26 | -------------------------------------------------------------------------------- /docs/tutorials/10-minute-walkthrough/apache-elasticsearch.conf: -------------------------------------------------------------------------------- 1 | input { 2 | tcp { 3 | type => "apache" 4 | port => 3333 5 | } 6 | } 7 | 8 | filter { 9 | if [type] == "apache" { 10 | grok { 11 | # See the following URL for a complete list of named patterns 12 | # logstash/grok ships with by default: 13 | # https://github.com/logstash/logstash/tree/master/patterns 14 | # 15 | # The grok filter will use the below pattern and on successful match use 16 | # any captured values as new fields in the event. 17 | match => { "message" => "%{COMBINEDAPACHELOG}" } 18 | } 19 | 20 | date { 21 | # Try to pull the timestamp from the 'timestamp' field (parsed above with 22 | # grok). The apache time format looks like: "18/Aug/2011:05:44:34 -0700" 23 | match => [ "timestamp", "dd/MMM/yyyy:HH:mm:ss Z" ] 24 | } 25 | } 26 | } 27 | 28 | output { 29 | elasticsearch { 30 | # Setting 'embedded' will run a real elasticsearch server inside logstash. 31 | # This option below saves you from having to run a separate process just 32 | # for ElasticSearch, so you can get started quicker! 33 | embedded => true 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /spec/filters/environment.rb: -------------------------------------------------------------------------------- 1 | require "test_utils" 2 | require "logstash/filters/environment" 3 | 4 | describe LogStash::Filters::Environment do 5 | extend LogStash::RSpec 6 | 7 | describe "add a field from the environment" do 8 | # The logstash config goes here. 9 | # At this time, only filters are supported. 10 | config <<-CONFIG 11 | filter { 12 | environment { 13 | add_field_from_env => [ "newfield", "MY_ENV_VAR" ] 14 | } 15 | } 16 | CONFIG 17 | 18 | ENV["MY_ENV_VAR"] = "hello world" 19 | 20 | sample "example" do 21 | insist { subject["newfield"] } == "hello world" 22 | end 23 | end 24 | 25 | describe "does nothing on non-matching events" do 26 | # The logstash config goes here. 27 | # At this time, only filters are supported. 28 | config <<-CONFIG 29 | filter { 30 | environment { 31 | type => "foo" 32 | add_field_from_env => [ "newfield", "MY_ENV_VAR" ] 33 | } 34 | } 35 | CONFIG 36 | 37 | ENV["MY_ENV_VAR"] = "hello world" 38 | 39 | sample("type" => "bar", "message" => "fizz") do 40 | insist { subject["newfield"] }.nil? 41 | end 42 | end 43 | end 44 | -------------------------------------------------------------------------------- /lib/logstash/codecs/base.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/namespace" 3 | require "logstash/event" 4 | require "logstash/plugin" 5 | require "logstash/logging" 6 | 7 | # This is the base class for logstash codecs. 8 | module LogStash::Codecs; class Base < LogStash::Plugin 9 | include LogStash::Config::Mixin 10 | config_name "codec" 11 | 12 | def initialize(params={}) 13 | super 14 | config_init(params) 15 | register if respond_to?(:register) 16 | end 17 | 18 | public 19 | def decode(data) 20 | raise "#{self.class}#decode must be overidden" 21 | end # def decode 22 | 23 | alias_method :<<, :decode 24 | 25 | public 26 | def encode(data) 27 | raise "#{self.class}#encode must be overidden" 28 | end # def encode 29 | 30 | public 31 | def teardown; end; 32 | 33 | public 34 | def on_event(&block) 35 | @on_event = block 36 | end 37 | 38 | public 39 | def flush(&block) 40 | # does nothing by default. 41 | # if your codec needs a flush method (like you are spooling things) 42 | # you must implement this. 43 | end 44 | 45 | public 46 | def clone 47 | return self.class.new(params) 48 | end 49 | end; end # class LogStash::Codecs::Base 50 | -------------------------------------------------------------------------------- /lib/logstash/threadwatchdog.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/namespace" 3 | require "logstash/logging" 4 | 5 | class LogStash::ThreadWatchdog 6 | attr_accessor :logger 7 | attr_accessor :threads 8 | 9 | class TimeoutError < StandardError; end 10 | 11 | public 12 | def initialize(threads, watchdog_timeout=10) 13 | @threads = threads 14 | @watchdog_timeout = watchdog_timeout 15 | end # def initialize 16 | 17 | public 18 | def watch 19 | while sleep(1) 20 | cutoff = Time.now - @watchdog_timeout 21 | @threads.each do |t| 22 | watchdog = t[:watchdog] 23 | if watchdog and watchdog <= cutoff 24 | age = Time.now - watchdog 25 | @logger.fatal("thread watchdog timeout", 26 | :thread => t, 27 | :backtrace => t.backtrace, 28 | :thread_watchdog => watchdog, 29 | :age => age, 30 | :cutoff => @watchdog_timeout, 31 | :state => t[:watchdog_state]) 32 | raise TimeoutError, "watchdog timeout" 33 | end 34 | end 35 | end 36 | end # def watch 37 | end # class LogStash::ThreadWatchdog 38 | -------------------------------------------------------------------------------- /lib/logstash/multiqueue.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/namespace" 3 | require "cabin" 4 | 5 | class LogStash::MultiQueue 6 | attr_accessor :logger 7 | 8 | public 9 | def initialize(*queues) 10 | @logger = Cabin::Channel.get(LogStash) 11 | @mutex = Mutex.new 12 | @queues = queues 13 | end # def initialize 14 | 15 | public 16 | def logger=(_logger) 17 | @logger = _logger 18 | 19 | # Set the logger for all known queues, too. 20 | @queues.each do |q| 21 | q.logger = _logger 22 | end 23 | end # def logger= 24 | 25 | # Push an object to all queues. 26 | public 27 | def push(object) 28 | @queues.each { |q| q.push(object) } 29 | end # def push 30 | alias :<< :push 31 | 32 | alias_method :<<, :push 33 | 34 | # Add a new Queue to this queue. 35 | public 36 | def add_queue(queue) 37 | @mutex.synchronize do 38 | @queues << queue 39 | end 40 | end # def add_queue 41 | 42 | public 43 | def remove_queue(queue) 44 | @mutex.synchronize do 45 | @queues.delete(queue) 46 | end 47 | end # def remove_queue 48 | 49 | public 50 | def size 51 | return @queues.collect { |q| q.size } 52 | end # def size 53 | end # class LogStash::MultiQueue 54 | -------------------------------------------------------------------------------- /misc/presentation-description.txt: -------------------------------------------------------------------------------- 1 | logstash: get awesome with your logs. 2 | 3 | This talk will introduce the free and open source tool, logstash, and cover how 4 | it can be used to debug and analyze problems with your infrastructure and your 5 | business: centralize your event and log collection, analyze data, and correlate 6 | failures. It will also cover some experiences and best practices to help you 7 | get value the most out of your code. 8 | 9 | -- 10 | 11 | Logstash is an open source, free, and scalable tool that can help you get a 12 | grip on your logs and events. Search and analyze your infrastructure with ease, 13 | Let logstash be a crystal ball for viewing events in real-time across your 14 | infrastructure and your business, Logstash acts as a pipeline, so you can 15 | easily automate reactions and alerts to create a self-healing and monitored 16 | infrastructure. 17 | 18 | This talk introduces logstash and covers how it can be used to debug and 19 | analyze problems with your infrastructure and your business: centralize your 20 | event and log collection, analyze data, and correlate failures. 21 | 22 | This talk targets folks software engineers, sysadmins, and engineering managers. 23 | 24 | Project site: http://logstash.net/ 25 | -------------------------------------------------------------------------------- /lib/logstash/filters/ruby.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/filters/base" 3 | require "logstash/namespace" 4 | 5 | # Execute ruby code. 6 | # 7 | # For example, to cancel 90% of events, you can do this: 8 | # 9 | # filter { 10 | # ruby { 11 | # # Cancel 90% of events 12 | # code => "event.cancel if rand <= 0.90" 13 | # } 14 | # } 15 | # 16 | class LogStash::Filters::Ruby < LogStash::Filters::Base 17 | config_name "ruby" 18 | milestone 1 19 | 20 | # Any code to execute at logstash startup-time 21 | config :init, :validate => :string 22 | 23 | # The code to execute for every event. 24 | # You will have an 'event' variable available that is the event itself. 25 | config :code, :validate => :string, :required => true 26 | 27 | public 28 | def register 29 | # TODO(sissel): Compile the ruby code 30 | eval(@init, binding, "(ruby filter init)") if @init 31 | eval("@codeblock = lambda { |event| #{@code} }", binding, "(ruby filter code)") 32 | end # def register 33 | 34 | public 35 | def filter(event) 36 | return unless filter?(event) 37 | 38 | @codeblock.call(event) 39 | 40 | filter_matched(event) 41 | end # def filter 42 | end # class LogStash::Filters::Ruby 43 | -------------------------------------------------------------------------------- /spec/filters/checksum.rb: -------------------------------------------------------------------------------- 1 | require "test_utils" 2 | require "logstash/filters/checksum" 3 | require 'openssl' 4 | 5 | describe LogStash::Filters::Checksum do 6 | extend LogStash::RSpec 7 | 8 | LogStash::Filters::Checksum::ALGORITHMS.each do |alg| 9 | describe "#{alg} checksum with single field" do 10 | config <<-CONFIG 11 | filter { 12 | checksum { 13 | algorithm => "#{alg}" 14 | keys => ["test"] 15 | } 16 | } 17 | CONFIG 18 | 19 | sample "test" => "foo bar" do 20 | insist { !subject["logstash_checksum"].nil? } 21 | insist { subject["logstash_checksum"] } == OpenSSL::Digest.hexdigest(alg, "|test|foo bar|") 22 | end 23 | end 24 | 25 | describe "#{alg} checksum with multiple keys" do 26 | config <<-CONFIG 27 | filter { 28 | checksum { 29 | algorithm => "#{alg}" 30 | keys => ["test1", "test2"] 31 | } 32 | } 33 | CONFIG 34 | 35 | sample "test1" => "foo", "test2" => "bar" do 36 | insist { !subject["logstash_checksum"].nil? } 37 | insist { subject["logstash_checksum"] } == OpenSSL::Digest.hexdigest(alg, "|test1|foo|test2|bar|") 38 | end 39 | end 40 | end 41 | end 42 | -------------------------------------------------------------------------------- /lib/logstash/outputs/websocket/pubsub.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/namespace" 3 | require "logstash/outputs/websocket" 4 | 5 | class LogStash::Outputs::WebSocket::Pubsub 6 | attr_accessor :logger 7 | 8 | def initialize 9 | @subscribers = [] 10 | @subscribers_lock = Mutex.new 11 | end # def initialize 12 | 13 | def publish(object) 14 | @subscribers_lock.synchronize do 15 | break if @subscribers.size == 0 16 | 17 | failed = [] 18 | @subscribers.each do |subscriber| 19 | begin 20 | subscriber.call(object) 21 | rescue => e 22 | @logger.error("Failed to publish to subscriber", :subscriber => subscriber, :exception => e) 23 | failed << subscriber 24 | end 25 | end 26 | 27 | failed.each do |subscriber| 28 | @subscribers.delete(subscriber) 29 | end 30 | end # @subscribers_lock.synchronize 31 | end # def Pubsub 32 | 33 | def subscribe(&block) 34 | queue = Queue.new 35 | @subscribers_lock.synchronize do 36 | @subscribers << proc do |event| 37 | queue << event 38 | end 39 | end 40 | 41 | while true 42 | block.call(queue.pop) 43 | end 44 | end # def subscribe 45 | end # class LogStash::Outputs::WebSocket::Pubsub 46 | -------------------------------------------------------------------------------- /pkg/debian/debian/copyright: -------------------------------------------------------------------------------- 1 | This package was debianized by Corey Quinn . 2 | 3 | License: 4 | 5 | This package is free software; you can redistribute it and/or modify 6 | it under the terms of the GNU General Public License as published by 7 | the Free Software Foundation; either version 2 of the License, or 8 | (at your option) any later version. 9 | 10 | This package is distributed in the hope that it will be useful, 11 | but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | GNU General Public License for more details. 14 | 15 | You should have received a copy of the GNU General Public License 16 | along with this package; if not, write to the Free Software 17 | Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA 18 | 19 | On Debian systems, the complete text of the GNU General 20 | Public License can be found in `/usr/share/common-licenses/GPL'. 21 | 22 | The Debian packaging is (C) 2013, Corey Quinn and 23 | is licensed under the GPL, see above. 24 | 25 | 26 | # Please also look if there are files or directories which have a 27 | # different copyright/license attached and list them here. 28 | -------------------------------------------------------------------------------- /spec/codecs/json.rb: -------------------------------------------------------------------------------- 1 | require "logstash/codecs/json" 2 | require "logstash/event" 3 | require "insist" 4 | 5 | describe LogStash::Codecs::JSON do 6 | subject do 7 | next LogStash::Codecs::JSON.new 8 | end 9 | 10 | context "#decode" do 11 | it "should return an event from json data" do 12 | data = {"foo" => "bar", "baz" => {"bah" => ["a","b","c"]}} 13 | subject.decode(data.to_json) do |event| 14 | insist { event.is_a? LogStash::Event } 15 | insist { event["foo"] } == data["foo"] 16 | insist { event["baz"] } == data["baz"] 17 | insist { event["bah"] } == data["bah"] 18 | end 19 | end 20 | end 21 | 22 | context "#encode" do 23 | it "should return json data" do 24 | data = {"foo" => "bar", "baz" => {"bah" => ["a","b","c"]}} 25 | event = LogStash::Event.new(data) 26 | got_event = false 27 | subject.on_event do |d| 28 | insist { d.chomp } == LogStash::Event.new(data).to_json 29 | insist { JSON.parse(d)["foo"] } == data["foo"] 30 | insist { JSON.parse(d)["baz"] } == data["baz"] 31 | insist { JSON.parse(d)["bah"] } == data["bah"] 32 | got_event = true 33 | end 34 | subject.encode(event) 35 | insist { got_event } 36 | end 37 | end 38 | end 39 | -------------------------------------------------------------------------------- /spec/filters/useragent.rb: -------------------------------------------------------------------------------- 1 | require "test_utils" 2 | require "logstash/filters/useragent" 3 | 4 | describe LogStash::Filters::UserAgent do 5 | extend LogStash::RSpec 6 | 7 | describe "defaults" do 8 | config <<-CONFIG 9 | filter { 10 | useragent { 11 | source => "message" 12 | target => "ua" 13 | } 14 | } 15 | CONFIG 16 | 17 | sample "Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.31 (KHTML, like Gecko) Chrome/26.0.1410.63 Safari/537.31" do 18 | insist { subject }.include?("ua") 19 | insist { subject["ua"]["name"] } == "Chrome" 20 | insist { subject["ua"]["os"] } == "Linux" 21 | insist { subject["ua"]["major"] } == "26" 22 | insist { subject["ua"]["minor"] } == "0" 23 | end 24 | end 25 | 26 | describe "" do 27 | config <<-CONFIG 28 | filter { 29 | useragent { 30 | source => "message" 31 | } 32 | } 33 | CONFIG 34 | 35 | sample "Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.31 (KHTML, like Gecko) Chrome/26.0.1410.63 Safari/537.31" do 36 | insist { subject["name"] } == "Chrome" 37 | insist { subject["os"] } == "Linux" 38 | insist { subject["major"] } == "26" 39 | insist { subject["minor"] } == "0" 40 | end 41 | end 42 | end 43 | -------------------------------------------------------------------------------- /spec/codecs/edn.rb: -------------------------------------------------------------------------------- 1 | require "logstash/codecs/edn" 2 | require "logstash/event" 3 | require "insist" 4 | require "edn" 5 | 6 | describe LogStash::Codecs::EDN do 7 | subject do 8 | next LogStash::Codecs::EDN.new 9 | end 10 | 11 | context "#decode" do 12 | it "should return an event from edn data" do 13 | data = {"foo" => "bar", "baz" => {"bah" => ["a", "b", "c"]}} 14 | subject.decode(data.to_edn) do |event| 15 | insist { event }.is_a?(LogStash::Event) 16 | insist { event["foo"] } == data["foo"] 17 | insist { event["baz"] } == data["baz"] 18 | insist { event["bah"] } == data["bah"] 19 | end 20 | end 21 | end 22 | 23 | context "#encode" do 24 | it "should return edn data" do 25 | data = {"foo" => "bar", "baz" => {"bah" => ["a","b","c"]}} 26 | event = LogStash::Event.new(data) 27 | got_event = false 28 | subject.on_event do |d| 29 | insist { d.chomp } == LogStash::Event.new(data).to_hash.to_edn 30 | insist { EDN.read(d)["foo"] } == data["foo"] 31 | insist { EDN.read(d)["baz"] } == data["baz"] 32 | insist { EDN.read(d)["bah"] } == data["bah"] 33 | got_event = true 34 | end 35 | subject.encode(event) 36 | insist { got_event } 37 | end 38 | end 39 | 40 | end 41 | -------------------------------------------------------------------------------- /lib/logstash/codecs/msgpack.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/codecs/base" 3 | 4 | class LogStash::Codecs::Msgpack < LogStash::Codecs::Base 5 | config_name "msgpack" 6 | 7 | milestone 1 8 | 9 | config :format, :validate => :string, :default => nil 10 | 11 | public 12 | def register 13 | require "msgpack" 14 | end 15 | 16 | public 17 | def decode(data) 18 | begin 19 | # Msgpack does not care about UTF-8 20 | event = LogStash::Event.new(MessagePack.unpack(data)) 21 | event["@timestamp"] = Time.at(event["@timestamp"]).utc if event["@timestamp"].is_a? Float 22 | event["tags"] ||= [] 23 | if @format 24 | event["message"] ||= event.sprintf(@format) 25 | end 26 | rescue => e 27 | # Treat as plain text and try to do the best we can with it? 28 | @logger.warn("Trouble parsing msgpack input, falling back to plain text", 29 | :input => data, :exception => e) 30 | event["message"] = data 31 | event["tags"] ||= [] 32 | event["tags"] << "_msgpackparsefailure" 33 | end 34 | yield event 35 | end # def decode 36 | 37 | public 38 | def encode(event) 39 | event["@timestamp"] = event["@timestamp"].to_f 40 | @on_event.call event.to_hash.to_msgpack 41 | end # def encode 42 | 43 | end # class LogStash::Codecs::Msgpack 44 | -------------------------------------------------------------------------------- /lib/logstash/outputs/elasticsearch-template.json: -------------------------------------------------------------------------------- 1 | { 2 | "template" : "logstash-*", 3 | "settings" : { 4 | "index.refresh_interval" : "5s", 5 | "analysis" : { 6 | "analyzer" : { 7 | "default" : { 8 | "type" : "standard", 9 | "stopwords" : "_none_" 10 | } 11 | } 12 | } 13 | }, 14 | "mappings" : { 15 | "_default_" : { 16 | "_all" : {"enabled" : true}, 17 | "dynamic_templates" : [ { 18 | "string_fields" : { 19 | "match" : "*", 20 | "match_mapping_type" : "string", 21 | "mapping" : { 22 | "type" : "multi_field", 23 | "fields" : { 24 | "{name}" : {"type": "string", "index" : "analyzed", "omit_norms" : true, "index_options" : "docs"}, 25 | "{name}.raw" : {"type": "string", "index" : "not_analyzed", "ignore_above" : 256} 26 | } 27 | } 28 | } 29 | } ], 30 | "properties" : { 31 | "@version": { "type": "string", "index": "not_analyzed" }, 32 | "geoip" : { 33 | "type" : "object", 34 | "dynamic": true, 35 | "path": "full", 36 | "properties" : { 37 | "location" : { "type" : "geo_point" } 38 | } 39 | } 40 | } 41 | } 42 | } 43 | } 44 | 45 | -------------------------------------------------------------------------------- /lib/logstash/inputs/stdin.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/inputs/base" 3 | require "logstash/namespace" 4 | require "socket" # for Socket.gethostname 5 | 6 | # Read events from standard input. 7 | # 8 | # By default, each event is assumed to be one line. If you 9 | # want to join lines, you'll want to use the multiline filter. 10 | class LogStash::Inputs::Stdin < LogStash::Inputs::Base 11 | config_name "stdin" 12 | milestone 3 13 | 14 | default :codec, "line" 15 | 16 | public 17 | def register 18 | @host = Socket.gethostname 19 | end # def register 20 | 21 | def run(queue) 22 | while true 23 | begin 24 | # Based on some testing, there is no way to interrupt an IO.sysread nor 25 | # IO.select call in JRuby. Bummer :( 26 | data = $stdin.sysread(16384) 27 | @codec.decode(data) do |event| 28 | decorate(event) 29 | event["host"] = @host 30 | queue << event 31 | end 32 | rescue EOFError, LogStash::ShutdownSignal 33 | # stdin closed or a requested shutdown 34 | break 35 | end 36 | end # while true 37 | finished 38 | end # def run 39 | 40 | public 41 | def teardown 42 | @logger.debug("stdin shutting down.") 43 | $stdin.close rescue nil 44 | finished 45 | end # def teardown 46 | end # class LogStash::Inputs::Stdin 47 | -------------------------------------------------------------------------------- /spec/inputs/gelf.rb: -------------------------------------------------------------------------------- 1 | 2 | require "test_utils" 3 | require "gelf" 4 | describe "inputs/gelf" do 5 | extend LogStash::RSpec 6 | 7 | describe "reads chunked gelf messages " do 8 | port = 12209 9 | host = "127.0.0.1" 10 | chunksize = 1420 11 | gelfclient = GELF::Notifier.new(host,port,chunksize) 12 | 13 | config <<-CONFIG 14 | input { 15 | gelf { 16 | port => "#{port}" 17 | host => "#{host}" 18 | } 19 | } 20 | CONFIG 21 | 22 | input do |pipeline, queue| 23 | Thread.new { pipeline.run } 24 | sleep 0.1 while !pipeline.ready? 25 | 26 | # generate random characters (message is zipped!) from printable ascii ( SPACE till ~ ) 27 | # to trigger gelf chunking 28 | s = StringIO.new 29 | for i in 1..2000 30 | s << 32 + rand(126-32) 31 | end 32 | large_random = s.string 33 | 34 | [ "hello", 35 | "world", 36 | large_random, 37 | "we survived gelf!" 38 | ].each do |m| 39 | gelfclient.notify!( "short_message" => m ) 40 | # poll at most 10 times 41 | waits = 0 42 | while waits < 10 and queue.size == 0 43 | sleep 0.1 44 | waits += 1 45 | end 46 | insist { queue.size } > 0 47 | insist { queue.pop["message"] } == m 48 | end 49 | 50 | end 51 | end 52 | end 53 | -------------------------------------------------------------------------------- /misc/screencast/001.config: -------------------------------------------------------------------------------- 1 | cd ~/projects/logstash 2 | %E rm ~/projects/logstash/etc/logstash-demo.yaml 3 | 4 | vi etc/logstash-demo.yaml 5 | :set paste 6 | 7 | %K control+l 8 | i 9 | # Remember that logstash provides a way to specify inputs, filters, and 10 | # outputs. For this demo, I'll just show inputs + outputs 11 | --- 12 | inputs: 13 | # You can also tag inputs for easier handling later in your pipeline. 14 | linux-syslog: # this is the 'linux-syslog' tag 15 | - /var/log/messages # watch /var/log/messages (uses eventmachine-tail) 16 | - /var/log/kern.log 17 | - /var/log/auth.log 18 | - /var/log/user.log 19 | apache-access: # similar, different tag. 20 | - /var/log/apache2/access.log 21 | apache-error: 22 | - /var/log/apache2/access.log 23 | #other: 24 | #- amqp://myamqpserver/fanout/rawlogs # an amqp fanout as input 25 | #- amqp://myamqpserver/topic/rawlogs # an amqp topic as input 26 | #- syslog:/// # take input via syslog protocol over the network 27 | outputs: 28 | #- amqp://myamqpserver/topic/logs # broadcast logs to an AMQP topic 29 | #- mongodb://mongoserver/logs # store events in mongodb 30 | #- stdout:/// # send to stdout (like tail -f, but better) 31 | #- syslog://syslogserver/ # send to another syslog server 32 | - websocket:/// # send to websockets 33 | %E sleep 3 34 | 35 | %K Escape 36 | 37 | ZZ 38 | -------------------------------------------------------------------------------- /lib/logstash/inputs/varnishlog.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/inputs/threadable" 3 | require "logstash/namespace" 4 | require "socket" # for Socket.gethostname 5 | 6 | # Read from varnish cache's shared memory log 7 | class LogStash::Inputs::Varnishlog < LogStash::Inputs::Threadable 8 | config_name "varnishlog" 9 | milestone 1 10 | 11 | public 12 | def register 13 | require 'varnish' 14 | @vd = Varnish::VSM.VSM_New 15 | Varnish::VSL.VSL_Setup(@vd) 16 | Varnish::VSL.VSL_Open(@vd, 1) 17 | 18 | end # def register 19 | 20 | def run(queue) 21 | @q = queue 22 | @hostname = Socket.gethostname 23 | Varnish::VSL.VSL_Dispatch(@vd, self.method(:cb).to_proc, FFI::MemoryPointer.new(:pointer)) 24 | end # def run 25 | 26 | private 27 | def cb(priv, tag, fd, len, spec, ptr, bitmap) 28 | begin 29 | str = ptr.read_string(len) 30 | event = LogStash::Event.new("message" => str, "host" => @host) 31 | decorate(event) 32 | event["varnish_tag"] = tag 33 | event["varnish_fd"] = fd 34 | event["varnish_spec"] = spec 35 | event["varnish_bitmap"] = bitmap 36 | @q << event 37 | rescue => e 38 | @logger.warn("varnishlog exception: #{e.inspect}") 39 | ensure 40 | return 0 41 | end 42 | end 43 | 44 | public 45 | def teardown 46 | finished 47 | end # def teardown 48 | end # class LogStash::Inputs::Stdin 49 | -------------------------------------------------------------------------------- /lib/logstash/inputs/graphite.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/inputs/tcp" 3 | require "logstash/namespace" 4 | 5 | # Receive graphite metrics. This plugin understands the text-based graphite 6 | # carbon protocol. Both 'N' and specific-timestamp forms are supported, example: 7 | # 8 | # mysql.slow_query.count 204 N 9 | # haproxy.live_backends 7 1364608909 10 | # 11 | # 'N' means 'now' for a timestamp. This plugin also supports having the time 12 | # specified in the metric payload: 13 | # 14 | # For every metric received from a client, a single event will be emitted with 15 | # the metric name as the field (like 'mysql.slow_query.count') and the metric 16 | # value as the field's value. 17 | class LogStash::Inputs::Graphite < LogStash::Inputs::Tcp 18 | config_name "graphite" 19 | milestone 1 20 | 21 | ISO8601_STRFTIME = "%04d-%02d-%02dT%02d:%02d:%02d.%06d%+03d:00".freeze 22 | 23 | public 24 | def run(output_queue) 25 | @queue = output_queue 26 | super(self) 27 | end 28 | 29 | # This is a silly hack to make the superclass (Tcp) give us a finished event 30 | # so that we can parse it accordingly. 31 | def <<(event) 32 | name, value, time = event["message"].split(" ") 33 | event[name] = value.to_f 34 | 35 | if time != "N" 36 | event["@timestamp"] = Time.at(time.to_i).gmtime 37 | end 38 | 39 | @queue << event 40 | end 41 | end # class LogStash::Inputs::Graphite 42 | -------------------------------------------------------------------------------- /spec/codecs/msgpack.rb: -------------------------------------------------------------------------------- 1 | require "logstash/codecs/msgpack" 2 | require "logstash/event" 3 | require "insist" 4 | 5 | # Skip msgpack for now since Hash#to_msgpack seems to not be a valid method? 6 | describe LogStash::Codecs::Msgpack, :if => false do 7 | subject do 8 | next LogStash::Codecs::Msgpack.new 9 | end 10 | 11 | context "#decode" do 12 | it "should return an event from msgpack data" do 13 | data = {"foo" => "bar", "baz" => {"bah" => ["a","b","c"]}} 14 | subject.decode(data.to_msgpack) do |event| 15 | insist { event.is_a? LogStash::Event } 16 | insist { event["foo"] } == data["foo"] 17 | insist { event["baz"] } == data["baz"] 18 | insist { event["bah"] } == data["bah"] 19 | end 20 | end 21 | end 22 | 23 | context "#encode" do 24 | it "should return msgpack data" do 25 | data = {"foo" => "bar", "baz" => {"bah" => ["a","b","c"]}} 26 | event = LogStash::Event.new(data) 27 | got_event = false 28 | subject.on_event do |d| 29 | insist { d } == LogStash::Event.new(data).to_hash.to_msgpack 30 | insist { MessagePack.unpack(d)["foo"] } == data["foo"] 31 | insist { MessagePack.unpack(d)["baz"] } == data["baz"] 32 | insist { MessagePack.unpack(d)["bah"] } == data["bah"] 33 | got_event = true 34 | end 35 | subject.encode(event) 36 | insist { got_event } 37 | end 38 | end 39 | end 40 | -------------------------------------------------------------------------------- /lib/logstash/util/fieldreference.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/namespace" 3 | require "logstash/util" 4 | 5 | module LogStash::Util::FieldReference 6 | def compile(str) 7 | if str[0,1] != '[' 8 | return <<-"CODE" 9 | lambda do |e, &block| 10 | return block.call(e, #{str.inspect}) unless block.nil? 11 | return e[#{str.inspect}] 12 | end 13 | CODE 14 | end 15 | 16 | code = "lambda do |e, &block|\n" 17 | selectors = str.scan(/(?<=\[).+?(?=\])/) 18 | selectors.each_with_index do |tok, i| 19 | last = (i == selectors.count() - 1) 20 | code << " # [#{tok}]#{ last ? " (last selector)" : "" }\n" 21 | 22 | if last 23 | code << <<-"CODE" 24 | return block.call(e, #{tok.inspect}) unless block.nil? 25 | CODE 26 | end 27 | 28 | code << <<-"CODE" 29 | if e.is_a?(Array) 30 | e = e[#{tok.to_i}] 31 | else 32 | e = e[#{tok.inspect}] 33 | end 34 | return e if e.nil? 35 | CODE 36 | 37 | end 38 | code << "return e\nend" 39 | #puts code 40 | return code 41 | end # def compile 42 | 43 | def exec(str, obj, &block) 44 | @__fieldeval_cache ||= {} 45 | @__fieldeval_cache[str] ||= eval(compile(str)) 46 | return @__fieldeval_cache[str].call(obj, &block) 47 | end 48 | 49 | extend self 50 | end # module LogStash::Util::FieldReference 51 | -------------------------------------------------------------------------------- /spec/codecs/json_spooler.rb: -------------------------------------------------------------------------------- 1 | require "logstash/codecs/json_spooler" 2 | require "logstash/event" 3 | require "insist" 4 | 5 | describe LogStash::Codecs::JsonSpooler do 6 | subject do 7 | next LogStash::Codecs::JsonSpooler.new 8 | end 9 | 10 | context "#decode" do 11 | it "should return an event from spooled json data" do 12 | data = {"a" => 1} 13 | events = [LogStash::Event.new(data), LogStash::Event.new(data), 14 | LogStash::Event.new(data)] 15 | subject.decode(events.to_json) do |event| 16 | insist { event.is_a? LogStash::Event } 17 | insist { event["a"] } == data["a"] 18 | end 19 | end 20 | end 21 | 22 | context "#encode" do 23 | it "should return spooled json data" do 24 | data = {"foo" => "bar", "baz" => {"bah" => ["a","b","c"]}} 25 | subject.spool_size = 3 26 | got_event = false 27 | subject.on_event do |d| 28 | events = JSON.parse(d) 29 | insist { events.is_a? Array } 30 | insist { events[0].is_a? LogStash::Event } 31 | insist { events[0]["foo"] } == data["foo"] 32 | insist { events[0]["baz"] } == data["baz"] 33 | insist { events[0]["bah"] } == data["bah"] 34 | insist { events.length } == 3 35 | got_event = true 36 | end 37 | 3.times do 38 | subject.encode(LogStash::Event.new(data)) 39 | end 40 | insist { got_event } 41 | end 42 | end 43 | end 44 | -------------------------------------------------------------------------------- /lib/logstash/codecs/compress_spooler.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/codecs/base" 3 | 4 | class LogStash::Codecs::CompressSpooler < LogStash::Codecs::Base 5 | config_name 'compress_spooler' 6 | milestone 1 7 | config :spool_size, :validate => :number, :default => 50 8 | config :compress_level, :validate => :number, :default => 6 9 | 10 | public 11 | def register 12 | require "msgpack" 13 | require "zlib" 14 | @buffer = [] 15 | end 16 | 17 | public 18 | def decode(data) 19 | z = Zlib::Inflate.new 20 | data = MessagePack.unpack(z.inflate(data)) 21 | z.finish 22 | z.close 23 | data.each do |event| 24 | event = LogStash::Event.new(event) 25 | event["@timestamp"] = Time.at(event["@timestamp"]).utc if event["@timestamp"].is_a? Float 26 | yield event 27 | end 28 | end # def decode 29 | 30 | public 31 | def encode(data) 32 | if @buffer.length >= @spool_size 33 | z = Zlib::Deflate.new(@compress_level) 34 | @on_event.call z.deflate(MessagePack.pack(@buffer), Zlib::FINISH) 35 | z.close 36 | @buffer.clear 37 | else 38 | data["@timestamp"] = data["@timestamp"].to_f 39 | @buffer << data.to_hash 40 | end 41 | end # def encode 42 | 43 | public 44 | def teardown 45 | if !@buffer.nil? and @buffer.length > 0 46 | @on_event.call @buffer 47 | end 48 | @buffer.clear 49 | end 50 | end # class LogStash::Codecs::CompressSpooler 51 | -------------------------------------------------------------------------------- /misc/screencast/002.webdemo: -------------------------------------------------------------------------------- 1 | %K control+a c 2 | %E sleep 2 3 | cd ~/projects/logstash 4 | export RUBYLIB=lib 5 | ruby bin/logstash -f etc/logstash-demo.yaml 6 | 7 | %E sleep 2 8 | # Now let's pop open google chrome (supports WebSockets) and watch 9 | # some logs... 10 | 11 | %E xdotool search --title " - Google Chrome" windowactivate --sync %@ 12 | %K control+l BackSpace 13 | http://snack.home/~jls/ws 14 | 15 | %E logger -p 1 -t demo "This log is coming to you live."; sleep 2 16 | %E logger -p 1 -t demo "Any log being received on a logstash input can be viewed here, or stored in a database, or shipped elsewhere for processing."; sleep 2; 17 | %E logger -p 1 -t demo "Everything is piped input -> filter -> output."; sleep 2; 18 | %E logger -p 1 -t demo "The output of one can be the input of another. Chain by chain. "; sleep 2; 19 | %E logger -p 1 -t demo "The way you deal with logs is about to change."; sleep 2; 20 | 21 | %E xdotool search --onlyvisible gnome-terminal windowsize --usehints 70 7 windowactivate --sync windowmove 3000 0 22 | %K control+minus 23 | %K control+a c 24 | %E sleep 2 25 | # Now we can watch logs in the browser... 26 | curl -o /dev/null http://snack.home/~jls/something 27 | !! 28 | !! 29 | !! 30 | 31 | logger -p 1 -t logging-example 'Hello world!' 32 | logger -p 1 -t logging-example "Welcome to logstash. $RANDOM" 33 | !! 34 | !! 35 | !! 36 | %E sleep 2 37 | 38 | 39 | # It's fast, too. 40 | seq 15 | xargs -n1 logger -p 1 -t fastlogs "real time feeds == :)" 41 | 42 | -------------------------------------------------------------------------------- /lib/logstash/codecs/json.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/codecs/base" 3 | require "logstash/codecs/line" 4 | require "json" 5 | 6 | # The codec should be used to decode full json messages. 7 | # If you are streaming JSON messages delimited by '\n' then 8 | # see the json_lines codec. 9 | # Encoding will result in a single json string. 10 | class LogStash::Codecs::JSON < LogStash::Codecs::Base 11 | config_name "json" 12 | 13 | milestone 1 14 | 15 | # The character encoding used in this codec. Examples include "UTF-8" and 16 | # "CP1252" 17 | # 18 | # JSON requires valid UTF-8 strings, but in some cases, software that 19 | # emits JSON does so in another encoding (nxlog, for example). In 20 | # weird cases like this, you can set the charset setting to the 21 | # actual encoding of the text and logstash will convert it for you. 22 | # 23 | # For nxlog users, you'll want to set this to "CP1252" 24 | config :charset, :validate => ::Encoding.name_list, :default => "UTF-8" 25 | 26 | public 27 | def decode(data) 28 | begin 29 | yield LogStash::Event.new(JSON.parse(data)) 30 | rescue JSON::ParserError => e 31 | @logger.info("JSON parse failure. Falling back to plain-text", :error => e, :data => data) 32 | yield LogStash::Event.new("message" => data) 33 | end 34 | end # def decode 35 | 36 | public 37 | def encode(data) 38 | @on_event.call(data.to_json) 39 | end # def encode 40 | 41 | end # class LogStash::Codecs::JSON 42 | -------------------------------------------------------------------------------- /spec/codecs/oldlogstashjson.rb: -------------------------------------------------------------------------------- 1 | require "logstash/codecs/oldlogstashjson" 2 | require "logstash/event" 3 | require "insist" 4 | 5 | describe LogStash::Codecs::OldLogStashJSON do 6 | subject do 7 | next LogStash::Codecs::OldLogStashJSON.new 8 | end 9 | 10 | context "#decode" do 11 | it "should return a new (v1) event from old (v0) json data" do 12 | data = {"@message" => "bar", "@source_host" => "localhost", 13 | "@tags" => ["a","b","c"]} 14 | subject.decode(data.to_json) do |event| 15 | insist { event.is_a? LogStash::Event } 16 | insist { event["@timestamp"] } != nil 17 | insist { event["type"] } == data["@type"] 18 | insist { event["message"] } == data["@message"] 19 | insist { event["host"] } == data["@source_host"] 20 | insist { event["tags"] } == data["@tags"] 21 | end 22 | end 23 | end 24 | 25 | context "#encode" do 26 | it "should return old (v0) json data" do 27 | data = {"type" => "t", "message" => "wat!?", 28 | "host" => "localhost", "path" => "/foo", 29 | "tags" => ["a","b","c"]} 30 | event = LogStash::Event.new(data) 31 | got_event = false 32 | subject.on_event do |d| 33 | insist { JSON.parse(d)["@timestamp"] } != nil 34 | insist { JSON.parse(d)["@message"] } == data["message"] 35 | got_event = true 36 | end 37 | subject.encode(event) 38 | insist { got_event } 39 | end 40 | end 41 | end 42 | -------------------------------------------------------------------------------- /pkg/debian/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | # The MIT License 3 | # 4 | # Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi, Jamie Whitehouse 5 | # Modified in 2013 by Corey Quinn for logstash. 6 | # 7 | # Permission is hereby granted, free of charge, to any person obtaining a copy 8 | # of this software and associated documentation files (the "Software"), to deal 9 | # in the Software without restriction, including without limitation the rights 10 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 11 | # copies of the Software, and to permit persons to whom the Software is 12 | # furnished to do so, subject to the following conditions: 13 | # 14 | # The above copyright notice and this permission notice shall be included in 15 | # all copies or substantial portions of the Software. 16 | # 17 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 18 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 19 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 20 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 21 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 22 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 23 | # THE SOFTWARE. 24 | 25 | if [ -z "$1" ]; then 26 | echo "Usage: build.sh path/to/logstash.jar" 27 | exit 1 28 | fi 29 | 30 | d=$(dirname $0) 31 | cp "$1" $d/logstash.jar 32 | 33 | cd $d 34 | exec debuild -us -uc -B 35 | -------------------------------------------------------------------------------- /spec/util/fieldeval_spec.rb: -------------------------------------------------------------------------------- 1 | require "test_utils" 2 | require "logstash/util/fieldreference" 3 | 4 | describe LogStash::Util::FieldReference, :if => true do 5 | it "should permit simple key names" do 6 | str = "hello" 7 | m = eval(subject.compile(str)) 8 | data = { "hello" => "world" } 9 | insist { m.call(data) } == data[str] 10 | end 11 | 12 | it "should permit [key][access]" do 13 | str = "[hello][world]" 14 | m = eval(subject.compile(str)) 15 | data = { "hello" => { "world" => "foo", "bar" => "baz" } } 16 | insist { m.call(data) } == data["hello"]["world"] 17 | end 18 | it "should permit [key][access]" do 19 | str = "[hello][world]" 20 | m = eval(subject.compile(str)) 21 | data = { "hello" => { "world" => "foo", "bar" => "baz" } } 22 | insist { m.call(data) } == data["hello"]["world"] 23 | end 24 | 25 | it "should permit blocks" do 26 | str = "[hello][world]" 27 | code = subject.compile(str) 28 | m = eval(subject.compile(str)) 29 | data = { "hello" => { "world" => "foo", "bar" => "baz" } } 30 | m.call(data) { |obj, key| obj.delete(key) } 31 | 32 | # Make sure the "world" key is removed. 33 | insist { data["hello"] } == { "bar" => "baz" } 34 | end 35 | 36 | it "should permit blocks #2" do 37 | str = "simple" 38 | code = subject.compile(str) 39 | m = eval(subject.compile(str)) 40 | data = { "simple" => "things" } 41 | m.call(data) { |obj, key| obj.delete(key) } 42 | insist { data }.empty? 43 | end 44 | end 45 | -------------------------------------------------------------------------------- /lib/logstash/filters/i18n.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "i18n" 3 | require "logstash/filters/base" 4 | require "logstash/namespace" 5 | 6 | # The i18n filter allows you to remove special characters from 7 | # from a field 8 | class LogStash::Filters::I18n < LogStash::Filters::Base 9 | config_name "i18n" 10 | milestone 0 11 | 12 | # Replaces non-ASCII characters with an ASCII approximation, or 13 | # if none exists, a replacement character which defaults to “?” 14 | # 15 | # Example: 16 | # 17 | # filter { 18 | # i18n { 19 | # transliterate => ["field1", "field2"] 20 | # } 21 | # } 22 | config :transliterate, :validate => :array 23 | 24 | public 25 | def register 26 | end # def register 27 | 28 | public 29 | def filter(event) 30 | return unless filter?(event) 31 | 32 | transliterate(event) if @transliterate 33 | 34 | filter_matched(event) 35 | end # def filter 36 | 37 | private 38 | def transliterate(event) 39 | @transliterate.each do |field| 40 | if event[field].is_a?(Array) 41 | event[field].map! { |v| I18n.transliterate(v).encode('UTF-8') } 42 | elsif event[field].is_a?(String) 43 | event[field] = I18n.transliterate(event[field].encode('UTF-8')) 44 | else 45 | @logger.debug("Can't transliterate something that isn't a string", 46 | :field => field, :value => event[field]) 47 | end 48 | end 49 | end # def transliterate 50 | 51 | end # class LogStash::Filters::I18n 52 | -------------------------------------------------------------------------------- /docs/flags.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Command-line flags - logstash 3 | layout: content_right 4 | --- 5 | # Command-line flags 6 | 7 | ## Agent 8 | 9 | The logstash agent has the following flags (also try using the '--help' flag) 10 | 11 |
12 |
-f, --config CONFIGFILE
13 |
Load the logstash config from a specific file, directory, or a 14 | wildcard. If given a directory or wildcard, config files will be read 15 | from the directory in alphabetical order.
16 |
-e CONFIGSTRING
17 |
Use the given string as the configuration data. Same syntax as the 18 | config file. If not input is specified, 'stdin { type => stdin }' is 19 | default. If no output is specified, 'stdout { debug => true }}' is 20 | default.
21 |
-w, --filterworkers COUNT
22 |
Run COUNT filter workers (default: 1)
23 |
--watchdog-timeout TIMEOUT
24 |
Set watchdog timeout value in seconds. Default is 10.
25 |
-l, --log FILE
26 |
Log to a given path. Default is to log to stdout
27 |
-v
28 |
Increase verbosity. There are multiple levels of verbosity available with 29 | '-vv' currently being the highest
30 |
--pluginpath PLUGIN_PATH
31 |
A colon-delimted path to find other logstash plugins in
32 |
33 | 34 | 35 | ## Web 36 | 37 |
38 |
-a, --address ADDRESS
39 |
Address on which to start webserver. Default is 0.0.0.0.
40 |
-p, --port PORT
41 |
Port on which to start webserver. Default is 9292.
42 |
43 | 44 | -------------------------------------------------------------------------------- /lib/logstash/outputs/websocket.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/namespace" 3 | require "logstash/outputs/base" 4 | 5 | # This output runs a websocket server and publishes any 6 | # messages to all connected websocket clients. 7 | # 8 | # You can connect to it with ws://:/ 9 | # 10 | # If no clients are connected, any messages received are ignored. 11 | class LogStash::Outputs::WebSocket < LogStash::Outputs::Base 12 | config_name "websocket" 13 | milestone 1 14 | 15 | # The address to serve websocket data from 16 | config :host, :validate => :string, :default => "0.0.0.0" 17 | 18 | # The port to serve websocket data from 19 | config :port, :validate => :number, :default => 3232 20 | 21 | public 22 | def register 23 | require "ftw" 24 | require "logstash/outputs/websocket/app" 25 | require "logstash/outputs/websocket/pubsub" 26 | @pubsub = LogStash::Outputs::WebSocket::Pubsub.new 27 | @pubsub.logger = @logger 28 | @server = Thread.new(@pubsub) do |pubsub| 29 | begin 30 | Rack::Handler::FTW.run(LogStash::Outputs::WebSocket::App.new(pubsub, @logger), 31 | :Host => @host, :Port => @port) 32 | rescue => e 33 | @logger.error("websocket server failed", :exception => e) 34 | sleep 1 35 | retry 36 | end 37 | end 38 | end # def register 39 | 40 | public 41 | def receive(event) 42 | return unless output?(event) 43 | @pubsub.publish(event.to_json) 44 | end # def receive 45 | 46 | end # class LogStash::Outputs::Websocket 47 | -------------------------------------------------------------------------------- /etc/agent.lgtm.conf: -------------------------------------------------------------------------------- 1 | input { 2 | file { 3 | path => [ "/var/log/messages", "/var/log/kern.log" ] 4 | type => "linux-syslog" 5 | } 6 | 7 | file { 8 | path => "/var/log/apache2/access.log" 9 | type => "apache-access" 10 | } 11 | 12 | file { 13 | path => "/var/log/apache2/error.log" 14 | type => "apache-error" 15 | } 16 | } 17 | 18 | output { 19 | # This will be your durable shipping mechanism 20 | rabbitmq { 21 | host => "myrabbitmqserver" 22 | exchange_type => "fanout" 23 | exchange => "rawlogs" 24 | } 25 | # This is an optional non-durable shipping mechanism 26 | # With this, you can sniff logs from your own code 27 | rabbitmq { 28 | host => "127.0.0.1" 29 | exchange_type => "topic" 30 | exchange => "logsniff" 31 | durable => false 32 | persistent => false 33 | # The following is optional 34 | # but allows you to consume based on sender 35 | key => "logstash.%{host}" 36 | } 37 | stdout { } 38 | } 39 | 40 | # Filters are applied in the order the appear. 41 | filter { 42 | multiline { 43 | type => "supervisorlogs" 44 | pattern => "^\s" 45 | what => previous 46 | } 47 | 48 | multiline { 49 | type => "testing" 50 | pattern => "^\s" 51 | what => previous 52 | } 53 | 54 | grok { 55 | type => "linux-syslog" 56 | pattern => ["%{SYSLOG_SUDO}", "%{SYSLOG_KERNEL}", "%{SYSLOGLINE}" ] 57 | } 58 | 59 | grok { 60 | type => "nagios" 61 | pattern => "%{NAGIOSLOGLINE}" 62 | } 63 | 64 | #date { 65 | #" testing" => fizzle 66 | #} 67 | } 68 | -------------------------------------------------------------------------------- /docs/plugin-milestones.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Plugin Milestones - logstash 3 | layout: content_right 4 | --- 5 | # Plugin Milestones 6 | 7 | Plugins (inputs/outputs/filters/codecs) have a milestone label in logstash. 8 | This is to provide an indicator to the end-user as to the kinds of changes 9 | a given plugin could have between logstash releases. 10 | 11 | The desire here is to allow plugin developers to quickly iterate on possible 12 | new plugins while conveying to the end-user a set of expectations about that 13 | plugin. 14 | 15 | ## Milestone 1 16 | 17 | Plugins at this milestone need your feedback to improve! Plugins at this 18 | milestone may change between releases as the community figures out the best way 19 | for the plugin to behave and be configured. 20 | 21 | ## Milestone 2 22 | 23 | Plugins at this milestone are more likely to have backwards-compatibility to 24 | previous releases than do Milestone 1 plugins. This milestone also indicates 25 | a greater level of in-the-wild usage by the community than the previous 26 | milestone. 27 | 28 | ## Milestone 3 29 | 30 | Plugins at this milestone have strong promises towards backwards-compatibility. 31 | This is enforced with automated tests to ensure behavior and configuration are 32 | consistent across releases. 33 | 34 | ## Milestone 0 35 | 36 | This milestone appears at the bottom of the page because it is very 37 | infrequently used. 38 | 39 | This milestone marker is used to generally indicate that a plugin has no 40 | active code maintainer nor does it have support from the community in terms 41 | of getting help. 42 | -------------------------------------------------------------------------------- /lib/logstash/codecs/fluent.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/codecs/base" 3 | require "logstash/util/charset" 4 | 5 | # This codec handles fluentd's msgpack schema. 6 | # 7 | # For example, you can receive logs from fluent-logger-ruby with: 8 | # 9 | # input { 10 | # tcp { 11 | # codec => fluent 12 | # port => 4000 13 | # } 14 | # } 15 | # 16 | # And from your ruby code in your own application: 17 | # 18 | # logger = Fluent::Logger::FluentLogger.new(nil, :host => "example.log", :port => 4000) 19 | # logger.post("some_tag", { "your" => "data", "here" => "yay!" }) 20 | # 21 | # Notes: 22 | # 23 | # * the fluent uses a second-precision time for events, so you will never see 24 | # subsecond precision on events processed by this codec. 25 | # 26 | class LogStash::Codecs::Fluent < LogStash::Codecs::Base 27 | config_name "fluent" 28 | milestone 1 29 | 30 | public 31 | def register 32 | require "msgpack" 33 | @decoder = MessagePack::Unpacker.new 34 | end 35 | 36 | public 37 | def decode(data) 38 | @decoder.feed(data) 39 | @decoder.each do |tag, epochtime, map| 40 | event = LogStash::Event.new(map.merge( 41 | "@timestamp" => Time.at(epochtime), 42 | "tags" => tag 43 | )) 44 | yield event 45 | end 46 | end # def decode 47 | 48 | public 49 | def encode(event) 50 | tag = event["tags"] || "log" 51 | epochtime = event["@timestamp"].to_i 52 | @on_event.call(MessagePack.pack([ tag, epochtime, event.to_hash ])) 53 | end # def encode 54 | 55 | end # class LogStash::Codecs::Fluent 56 | -------------------------------------------------------------------------------- /lib/logstash/util/charset.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/namespace" 3 | require "logstash/util" 4 | 5 | class LogStash::Util::Charset 6 | attr_accessor :logger 7 | def initialize(charset) 8 | @charset = charset 9 | end 10 | 11 | def convert(data) 12 | data.force_encoding(@charset) 13 | if @charset == "UTF-8" 14 | # Some users don't know the charset of their logs or just don't know they 15 | # can set the charset setting. 16 | if !data.valid_encoding? 17 | @logger.warn("Received an event that has a different character encoding than you configured.", :text => data.inspect[1..-2], :expected_charset => @charset) 18 | #if @force_lossy_charset_conversion 19 | ## Janky hack to force ruby to re-encode UTF-8 with replacement chars. 20 | #data.force_encoding("CP65001") 21 | #data = data.encode("UTF-8", :invalid => :replace, :undef => :replace) 22 | #else 23 | #end 24 | 25 | # A silly hack to help convert some of the unknown bytes to 26 | # somewhat-readable escape codes. The [1..-2] is to trim the quotes 27 | # ruby puts on the value. 28 | data = data.inspect[1..-2] 29 | else 30 | # The user has declared the character encoding of this data is 31 | # something other than UTF-8. Let's convert it (as cleanly as possible) 32 | # into UTF-8 so we can use it with JSON, etc. 33 | data = data.encode("UTF-8", :invalid => :replace, :undef => :replace) 34 | end 35 | end 36 | return data 37 | end # def convert 38 | end # class LogStash::Util::Charset 39 | 40 | -------------------------------------------------------------------------------- /lib/logstash/outputs/lumberjack.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | class LogStash::Outputs::Lumberjack < LogStash::Outputs::Base 3 | 4 | config_name "lumberjack" 5 | milestone 1 6 | 7 | # list of addresses lumberjack can send to 8 | config :hosts, :validate => :array, :required => true 9 | 10 | # the port to connect to 11 | config :port, :validate => :number, :required => true 12 | 13 | # ssl certificate to use 14 | config :ssl_certificate, :validate => :path, :required => true 15 | 16 | # window size 17 | config :window_size, :validate => :number, :default => 5000 18 | 19 | public 20 | def register 21 | require 'lumberjack/client' 22 | connect 23 | end # def register 24 | 25 | public 26 | def receive(event) 27 | return unless output?(event) 28 | begin 29 | @client.write(event.to_hash) 30 | rescue Exception => e 31 | @logger.error("Client write error", :e => e, :backtrace => e.backtrace) 32 | connect 33 | retry 34 | end 35 | end # def receive 36 | 37 | private 38 | def connect 39 | @logger.info("Connecting to lumberjack server.", :addresses => @hosts, :port => @port, 40 | :ssl_certificate => @ssl_certificate, :window_size => @window_size) 41 | begin 42 | @client = Lumberjack::Client.new(:addresses => @hosts, :port => @port, 43 | :ssl_certificate => @ssl_certificate, :window_size => @window_size) 44 | rescue Exception => e 45 | @logger.error("All hosts unavailable, sleeping", :hosts => @hosts, :e => e, 46 | :backtrace => e.backtrace) 47 | sleep(10) 48 | retry 49 | end 50 | end 51 | end 52 | -------------------------------------------------------------------------------- /lib/logstash/codecs/plain.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/codecs/base" 3 | require "logstash/util/charset" 4 | 5 | # The "plain" codec is for plain text with no delimiting between events. 6 | # 7 | # This is mainly useful on inputs and outputs that already have a defined 8 | # framing in their transport protocol (such as zeromq, rabbitmq, redis, etc) 9 | class LogStash::Codecs::Plain < LogStash::Codecs::Base 10 | config_name "plain" 11 | milestone 3 12 | 13 | # Set the message you which to emit for each event. This supports sprintf 14 | # strings. 15 | # 16 | # This setting only affects outputs (encoding of events). 17 | config :format, :validate => :string 18 | 19 | # The character encoding used in this input. Examples include "UTF-8" 20 | # and "cp1252" 21 | # 22 | # This setting is useful if your log files are in Latin-1 (aka cp1252) 23 | # or in another character set other than UTF-8. 24 | # 25 | # This only affects "plain" format logs since json is UTF-8 already. 26 | config :charset, :validate => ::Encoding.name_list, :default => "UTF-8" 27 | 28 | public 29 | def register 30 | @converter = LogStash::Util::Charset.new(@charset) 31 | @converter.logger = @logger 32 | end 33 | 34 | public 35 | def decode(data) 36 | yield LogStash::Event.new("message" => @converter.convert(data)) 37 | end # def decode 38 | 39 | public 40 | def encode(data) 41 | if data.is_a? LogStash::Event and @format 42 | @on_event.call(data.sprintf(@format)) 43 | else 44 | @on_event.call(data.to_s) 45 | end 46 | end # def encode 47 | 48 | end # class LogStash::Codecs::Plain 49 | -------------------------------------------------------------------------------- /lib/logstash/filters/uuid.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/filters/base" 3 | require "logstash/namespace" 4 | require "securerandom" 5 | 6 | # The uuid filter allows you to add a UUID field to messages. 7 | # This is useful to be able to control the _id messages are indexed into Elasticsearch 8 | # with, so that you can insert duplicate messages (i.e. the same message multiple times 9 | # without creating duplicates) - for log pipeline reliability 10 | # 11 | class LogStash::Filters::Uuid < LogStash::Filters::Base 12 | config_name "uuid" 13 | milestone 2 14 | 15 | # Add a UUID to a field. 16 | # 17 | # Example: 18 | # 19 | # filter { 20 | # uuid { 21 | # field => "@uuid" 22 | # } 23 | # } 24 | config :field, :validate => :string 25 | 26 | # If the value in the field currently (if any) should be overridden 27 | # by the generated UUID. Defaults to false (i.e. if the field is 28 | # present, with ANY value, it won't be overridden) 29 | # 30 | # Example: 31 | # 32 | # filter { 33 | # uuid { 34 | # field => "@uuid" 35 | # overwrite => true 36 | # } 37 | # } 38 | config :overwrite, :validate => :boolean, :default => false 39 | 40 | public 41 | def register 42 | end # def register 43 | 44 | public 45 | def filter(event) 46 | return unless filter?(event) 47 | 48 | if overwrite 49 | event[field] = SecureRandom.uuid 50 | else 51 | event[field] ||= SecureRandom.uuid 52 | end 53 | 54 | filter_matched(event) 55 | end # def filter 56 | 57 | end # class LogStash::Filters::Uuid 58 | 59 | -------------------------------------------------------------------------------- /lib/logstash/inputs/websocket.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/inputs/base" 3 | require "logstash/namespace" 4 | require "socket" 5 | 6 | # Read events over the websocket protocol. 7 | class LogStash::Inputs::Websocket < LogStash::Inputs::Base 8 | config_name "websocket" 9 | milestone 1 10 | 11 | default :codec, "json" 12 | 13 | # The url to connect to or serve from 14 | config :url, :validate => :string, :default => "0.0.0.0" 15 | 16 | # Operate as a client or a server. 17 | # 18 | # Client mode causes this plugin to connect as a websocket client 19 | # to the URL given. It expects to receive events as websocket messages. 20 | # 21 | # (NOT IMPLEMENTED YET) Server mode causes this plugin to listen on 22 | # the given URL for websocket clients. It expects to receive events 23 | # as websocket messages from these clients. 24 | config :mode, :validate => [ "server", "client" ], :default => "client" 25 | 26 | def register 27 | require "ftw" 28 | end # def register 29 | 30 | public 31 | def run(output_queue) 32 | # TODO(sissel): Implement server mode. 33 | agent = FTW::Agent.new 34 | begin 35 | websocket = agent.websocket!(@url) 36 | websocket.each do |payload| 37 | @codec.decode(payload) do |event| 38 | decorate(event) 39 | output_queue << event 40 | end 41 | end 42 | rescue => e 43 | @logger.warn("websocket input client threw exception, restarting", 44 | :exception => e) 45 | sleep(1) 46 | retry 47 | end # begin 48 | end # def run 49 | 50 | end # class LogStash::Inputs::Websocket 51 | -------------------------------------------------------------------------------- /spec/filters/split.rb: -------------------------------------------------------------------------------- 1 | require "test_utils" 2 | require "logstash/filters/split" 3 | 4 | describe LogStash::Filters::Split do 5 | extend LogStash::RSpec 6 | 7 | describe "all defaults" do 8 | config <<-CONFIG 9 | filter { 10 | split { } 11 | } 12 | CONFIG 13 | 14 | sample "big\nbird\nsesame street" do 15 | insist { subject.length } == 3 16 | insist { subject[0]["message"] } == "big" 17 | insist { subject[1]["message"] } == "bird" 18 | insist { subject[2]["message"] } == "sesame street" 19 | end 20 | end 21 | 22 | describe "custome terminator" do 23 | config <<-CONFIG 24 | filter { 25 | split { 26 | terminator => "\t" 27 | } 28 | } 29 | CONFIG 30 | 31 | sample "big\tbird\tsesame street" do 32 | insist { subject.length } == 3 33 | insist { subject[0]["message"] } == "big" 34 | insist { subject[1]["message"] } == "bird" 35 | insist { subject[2]["message"] } == "sesame street" 36 | end 37 | end 38 | 39 | describe "custom field" do 40 | config <<-CONFIG 41 | filter { 42 | split { 43 | field => "custom" 44 | } 45 | } 46 | CONFIG 47 | 48 | sample("custom" => "big\nbird\nsesame street", "do_not_touch" => "1\n2\n3") do 49 | insist { subject.length } == 3 50 | subject.each do |s| 51 | insist { s["do_not_touch"] } == "1\n2\n3" 52 | end 53 | insist { subject[0]["custom"] } == "big" 54 | insist { subject[1]["custom"] } == "bird" 55 | insist { subject[2]["custom"] } == "sesame street" 56 | end 57 | end 58 | end 59 | -------------------------------------------------------------------------------- /lib/logstash/inputs/eventlog/racob_fix.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | # The jruby-win32ole gem uses 'java.lang.System.set_property' to 3 | # tell java(?) where to find the racob dll. 4 | # 5 | # However, it fails when it tries to load the racob dll from the jar 6 | # (UnsatisfiedLinkError). 7 | # 8 | # So easy fix, right? Monkeypatch the set_property to do two things: 9 | # - extract the racob dll somewhere 10 | # - set the property to the extracted path 11 | # 12 | 13 | require "fileutils" 14 | require "tmpdir" 15 | 16 | class << java.lang.System 17 | alias_method :set_property_seriously, :set_property 18 | def set_property(key, value) 19 | if key == "racob.dll.path" && value =~ /file:.*\.jar!\// 20 | # Path is set in a jar, we'll need to extract it to a 21 | # temporary location, then load it. 22 | filename = File.basename(value) 23 | extracted_path = File.join(Dir.tmpdir, filename) 24 | # FileUtils.cp (and copy, and copy_file) are broken 25 | # when copying this file. I have not debugged it other 26 | # than to see it doesn't work. racob_x86.dll is 105kb, 27 | # but FileUtils.cp only copies 4kb of it. 28 | 29 | # open both files in 'binary' mode (sets encoding to BINARY aka 30 | # ASCII-8BIT). 31 | input = File.new(value, "rb") 32 | output = File.new(extracted_path, "wb") 33 | while chunk = input.read(16384) 34 | output.write(chunk) 35 | end 36 | input.close 37 | output.close 38 | 39 | return set_property_seriously(key, extracted_path) 40 | else 41 | return set_property_seriously(key, value) 42 | end 43 | end 44 | end 45 | -------------------------------------------------------------------------------- /lib/logstash/util/zeromq.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require 'ffi-rzmq' 3 | require "logstash/namespace" 4 | 5 | module LogStash::Util::ZeroMQ 6 | CONTEXT = ZMQ::Context.new 7 | # LOGSTASH-400 8 | # see https://github.com/chuckremes/ffi-rzmq/blob/master/lib/ffi-rzmq/socket.rb#L93-117 9 | STRING_OPTS = %w{IDENTITY SUBSCRIBE UNSUBSCRIBE} 10 | 11 | def context 12 | CONTEXT 13 | end 14 | 15 | def setup(socket, address) 16 | if server? 17 | error_check(socket.bind(address), "binding to #{address}") 18 | else 19 | error_check(socket.connect(address), "connecting to #{address}") 20 | end 21 | @logger.info("0mq: #{server? ? 'connected' : 'bound'}", :address => address) 22 | end 23 | 24 | def error_check(rc, doing) 25 | unless ZMQ::Util.resultcode_ok?(rc) 26 | @logger.error("ZeroMQ error while #{doing}", { :error_code => rc }) 27 | raise "ZeroMQ Error while #{doing}" 28 | end 29 | end # def error_check 30 | 31 | def setopts(socket, options) 32 | options.each do |opt,value| 33 | sockopt = opt.split('::')[1] 34 | option = ZMQ.const_defined?(sockopt) ? ZMQ.const_get(sockopt) : ZMQ.const_missing(sockopt) 35 | unless STRING_OPTS.include?(sockopt) 36 | begin 37 | Float(value) 38 | value = value.to_i 39 | rescue ArgumentError 40 | raise "#{sockopt} requires a numeric value. #{value} is not numeric" 41 | end 42 | end # end unless 43 | error_check(socket.setsockopt(option, value), 44 | "while setting #{opt} == #{value}") 45 | end # end each 46 | end # end setopts 47 | end # module LogStash::Util::ZeroMQ 48 | -------------------------------------------------------------------------------- /lib/logstash/filters/json_encode.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/filters/base" 3 | require "logstash/namespace" 4 | 5 | # JSON encode filter. Takes a field and serializes it into JSON 6 | # 7 | # If no target is specified, the source field is overwritten with the JSON 8 | # text. 9 | # 10 | # For example, if you have a field named 'foo', and you want to store the 11 | # JSON encoded string in 'bar', do this: 12 | # 13 | # filter { 14 | # json_encode { 15 | # source => "foo" 16 | # target => "bar" 17 | # } 18 | # } 19 | class LogStash::Filters::JSONEncode < LogStash::Filters::Base 20 | 21 | config_name "json_encode" 22 | milestone 2 23 | 24 | # The field to convert to JSON. 25 | config :source, :validate => :string, :required => true 26 | 27 | # The field to write the JSON into. If not specified, the source 28 | # field will be overwritten. 29 | config :target, :validate => :string 30 | 31 | public 32 | def register 33 | @target = @source if @target.nil? 34 | end # def register 35 | 36 | public 37 | def filter(event) 38 | return unless filter?(event) 39 | 40 | @logger.debug("Running JSON encoder", :event => event) 41 | 42 | begin 43 | event[@target] = JSON.pretty_generate(event[@source]) 44 | filter_matched(event) 45 | rescue => e 46 | event.tag "_jsongeneratefailure" 47 | @logger.warn("Trouble encoding JSON", :source => @source, :raw => event[@source].inspect, :exception => e) 48 | end 49 | 50 | @logger.debug? && @logger.debug("Event after JSON encoder", :event => event) 51 | end # def filter 52 | end # class LogStash::Filters::JSONEncode 53 | -------------------------------------------------------------------------------- /lib/logstash/codecs/oldlogstashjson.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/codecs/base" 3 | 4 | class LogStash::Codecs::OldLogStashJSON < LogStash::Codecs::Base 5 | config_name "oldlogstashjson" 6 | milestone 1 7 | 8 | public 9 | def decode(data) 10 | obj = JSON.parse(data.force_encoding("UTF-8")) 11 | 12 | h = {} 13 | 14 | # Convert the old logstash schema to the new one. 15 | basics = %w(@message @tags @type) 16 | basics.each do |key| 17 | # Convert '@message' to 'message', etc 18 | h[key[1..-1]] = obj[key] if obj.include?(key) 19 | end 20 | 21 | # fix other mappings 22 | h["host"] = obj["@source_host"] 23 | h["path"] = obj["@source_path"] 24 | # Note: @source is gone and has no similar field. 25 | 26 | h["@timestamp"] = obj["@timestamp"] if obj.include?("@timestamp") 27 | 28 | h.merge!(obj["@fields"]) if obj["@fields"].is_a?(Hash) 29 | yield LogStash::Event.new(h) 30 | end # def decode 31 | 32 | public 33 | def encode(data) 34 | h = {} 35 | 36 | h["@timestamp"] = data["@timestamp"] 37 | h["@message"] = data["message"] if data.include?("message") 38 | h["@source_host"] = data["source_host"] if data.include?("source_host") 39 | # Convert the old logstash schema to the new one. 40 | basics = %w(@timestamp @message @source_host @source_path @source 41 | @tags @type) 42 | basics.each do |key| 43 | h[key] = data[key] if data.include?(key) 44 | end 45 | 46 | h.merge!(data["@fields"]) if data["@fields"].is_a?(Hash) 47 | @on_event.call(h.to_json) 48 | end # def encode 49 | 50 | end # class LogStash::Codecs::OldLogStashJSON 51 | -------------------------------------------------------------------------------- /misc/screencast/run.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | # 3 | 4 | require "rubygems" 5 | 6 | #if ENV["DISPLAY"] != ":1" 7 | #puts "$DISPLAY is wrong." 8 | #exit 1 9 | #end 10 | 11 | def type(string) 12 | system("xdotool", "type", "--clearmodifiers", "--delay", "100", string) 13 | puts "Typing: #{string}" 14 | #puts string.inspect 15 | #$stdout.flush 16 | end 17 | 18 | def run(string) 19 | command = string[3..-1].chomp 20 | system(command) 21 | end 22 | 23 | def key(string) 24 | keyseq = string[3..-1].chomp.split(/ +/) 25 | system("xdotool", "key", "--clearmodifiers", *keyseq) 26 | puts keyseq.inspect 27 | #puts string.inspect 28 | #$stdout.flush 29 | end 30 | 31 | handlers = [ 32 | [/^[,]/m, proc { |s| type(s); sleep(0.4) } ], # comma 33 | [/^[.;:?!]+/m, proc { |s| type(s); sleep(1) } ], # punctuation 34 | [/^[\n]{2}/m, proc { |s| type(s); sleep(1) } ], # new paragraph 35 | #[/^[\n](?! *[*-])/m, proc { |s| type(" ") } ], # continuation of a paragraph 36 | #[/^[\n](?= *[*-])/m, proc { |s| type("\n") } ], # lists or other itemized things 37 | [/^[\n]/m, proc { |s| type(s) } ], # lists or other itemized things 38 | [/^%E[^\n]*\n/m, proc { |s| run(s) } ], # execute a command 39 | [/^%K[^\n]*\n/m, proc { |s| key(s) } ], # type a specific keystroke 40 | [/^[^,.;:?!\n]+/m, proc { |s| type(s) } ], # otherwise just type it 41 | ] 42 | 43 | data = $stdin.read 44 | while data.length > 0 45 | match, func = handlers.collect { |re, f| [re.match(data), f] }\ 46 | .select { |m,f| m.begin(0) == 0 rescue false }.first 47 | str = match.to_s 48 | func.call(str) 49 | $stdout.flush 50 | #sleep 3 51 | data = data[match.end(0)..-1] 52 | end 53 | -------------------------------------------------------------------------------- /lib/logstash/filters/urldecode.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/filters/base" 3 | require "logstash/namespace" 4 | require "uri" 5 | 6 | # The urldecode filter is for decoding fields that are urlencoded. 7 | class LogStash::Filters::Urldecode < LogStash::Filters::Base 8 | config_name "urldecode" 9 | milestone 2 10 | 11 | # The field which value is urldecoded 12 | config :field, :validate => :string, :default => "message" 13 | 14 | # Urldecode all fields 15 | config :all_fields, :validate => :boolean, :default => false 16 | 17 | public 18 | def register 19 | # Nothing to do 20 | end #def register 21 | 22 | public 23 | def filter(event) 24 | return unless filter?(event) 25 | 26 | # If all_fields is true then try to decode them all 27 | if @all_fields 28 | event.to_hash.each do |name, value| 29 | event[name] = urldecode(value) 30 | end 31 | # Else decode the specified field 32 | else 33 | event[@field] = urldecode(event[@field]) 34 | end 35 | filter_matched(event) 36 | end # def filter 37 | 38 | # Attempt to handle string, array, and hash values for fields. 39 | # For all other datatypes, just return, URI.unescape doesn't support them. 40 | private 41 | def urldecode(value) 42 | case value 43 | when String 44 | return URI.unescape(value) 45 | when Array 46 | ret_values = [] 47 | value.each { |v| ret_values << urldecode(v) } 48 | return ret_values 49 | when Hash 50 | ret_values = {} 51 | value.each { |k,v| ret_values[k] = urldecode(v) } 52 | return ret_values 53 | else 54 | return value 55 | end 56 | end 57 | end # class LogStash::Filters::Urldecode 58 | -------------------------------------------------------------------------------- /lib/logstash/outputs/stdout.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/outputs/base" 3 | require "logstash/namespace" 4 | 5 | class LogStash::Outputs::Stdout < LogStash::Outputs::Base 6 | begin 7 | require "ap" 8 | rescue LoadError 9 | end 10 | 11 | config_name "stdout" 12 | milestone 3 13 | 14 | default :codec, "line" 15 | 16 | # Enable debugging. Tries to pretty-print the entire event object. 17 | config :debug, :validate => :boolean, :default => false 18 | 19 | # Debug output format: ruby (default), json 20 | config :debug_format, :default => "ruby", :validate => ["ruby", "dots", "json"], :deprecated => true 21 | 22 | # The message to emit to stdout. 23 | config :message, :validate => :string, :deprecated => "You can use the 'line' codec instead. For example: output { stdout { codec => line { format => \"%{somefield} your message\" } } }" 24 | 25 | public 26 | def register 27 | if @debug 28 | require "logstash/codecs/rubydebug" 29 | require "logstash/codecs/dots" 30 | require "logstash/codecs/json" 31 | case @debug_format 32 | when "ruby"; @codec = LogStash::Codecs::RubyDebug.new 33 | when "json"; @codec = LogStash::Codecs::JSON.new 34 | when "dots"; @codec = LogStash::Codecs::Dots.new 35 | end 36 | elsif @message 37 | @codec = LogStash::Codecs::Line.new("format" => @message) 38 | end 39 | @codec.on_event do |event| 40 | $stdout.write(event) 41 | end 42 | end 43 | 44 | def receive(event) 45 | return unless output?(event) 46 | if event == LogStash::SHUTDOWN 47 | finished 48 | return 49 | end 50 | @codec.encode(event) 51 | end 52 | 53 | end # class LogStash::Outputs::Stdout 54 | -------------------------------------------------------------------------------- /lib/logstash/inputs/heroku.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/inputs/base" 3 | require "logstash/namespace" 4 | 5 | # Stream events from a heroku app's logs. 6 | # 7 | # This will read events in a manner similar to how the `heroku logs -t` command 8 | # fetches logs. 9 | # 10 | # Recommended filters: 11 | # 12 | # filter { 13 | # grok { 14 | # pattern => "^%{TIMESTAMP_ISO8601:timestamp} %{WORD:component}\[%{WORD:process}(?:\.%{INT:instance:int})?\]: %{DATA:message}$" 15 | # } 16 | # date { timestamp => ISO8601 } 17 | # } 18 | class LogStash::Inputs::Heroku < LogStash::Inputs::Base 19 | config_name "heroku" 20 | milestone 1 21 | 22 | default :codec, "plain" 23 | 24 | # The name of your heroku application. This is usually the first part of the 25 | # the domain name 'my-app-name.herokuapp.com' 26 | config :app, :validate => :string, :required => true 27 | 28 | public 29 | def register 30 | require "heroku" 31 | require "logstash/util/buftok" 32 | end # def register 33 | 34 | public 35 | def run(queue) 36 | client = Heroku::Client.new(Heroku::Auth.user, Heroku::Auth.password) 37 | 38 | # The 'Herok::Client#read_logs' method emits chunks of text not bounded 39 | # by event barriers like newlines. 40 | # tail=1 means to follow logs 41 | # I *think* setting num=1 means we only get 1 historical event. Setting 42 | # this to 0 makes it fetch *all* events, not what I want. 43 | client.read_logs(@app, ["tail=1", "num=1"]) do |chunk| 44 | @codec.decode(chunk) do |event| 45 | decorate(event) 46 | event["app"] = @app 47 | queue << event 48 | end 49 | end 50 | end # def run 51 | end # class LogStash::Inputs::Heroku 52 | -------------------------------------------------------------------------------- /lib/logstash/inputs/drupal_dblog/jdbcconnection.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "java" 3 | require "rubygems" 4 | require "jdbc/mysql" 5 | 6 | java_import "com.mysql.jdbc.Driver" 7 | 8 | # A JDBC mysql connection class. 9 | # The interface is compatible with the mysql2 API. 10 | class LogStash::DrupalDblogJavaMysqlConnection 11 | 12 | def initialize(host, username, password, database, port = nil) 13 | port ||= 3306 14 | 15 | address = "jdbc:mysql://#{host}:#{port}/#{database}" 16 | @connection = java.sql.DriverManager.getConnection(address, username, password) 17 | end # def initialize 18 | 19 | def query(sql) 20 | if sql =~ /select/i 21 | return select(sql) 22 | else 23 | return update(sql) 24 | end 25 | end # def query 26 | 27 | def select(sql) 28 | stmt = @connection.createStatement 29 | resultSet = stmt.executeQuery(sql) 30 | 31 | meta = resultSet.getMetaData 32 | column_count = meta.getColumnCount 33 | 34 | rows = [] 35 | 36 | while resultSet.next 37 | res = {} 38 | 39 | (1..column_count).each do |i| 40 | name = meta.getColumnName(i) 41 | case meta.getColumnType(i) 42 | when java.sql.Types::INTEGER 43 | res[name] = resultSet.getInt(name) 44 | else 45 | res[name] = resultSet.getString(name) 46 | end 47 | end 48 | 49 | rows << res 50 | end 51 | 52 | stmt.close 53 | return rows 54 | end # def select 55 | 56 | def update(sql) 57 | stmt = @connection.createStatement 58 | stmt.execute_update(sql) 59 | stmt.close 60 | end # def update 61 | 62 | def close 63 | @connection.close 64 | end # def close 65 | 66 | end # class LogStash::DrupalDblogJavaMysqlConnection 67 | -------------------------------------------------------------------------------- /spec/inputs/redis.rb: -------------------------------------------------------------------------------- 1 | require "test_utils" 2 | require "redis" 3 | 4 | def populate(key, event_count) 5 | require "logstash/event" 6 | redis = Redis.new(:host => "localhost") 7 | event_count.times do |value| 8 | event = LogStash::Event.new("sequence" => value) 9 | Stud::try(10.times) do 10 | redis.rpush(key, event.to_json) 11 | end 12 | end 13 | end 14 | 15 | def process(pipeline, queue, event_count) 16 | sequence = 0 17 | Thread.new { pipeline.run } 18 | event_count.times do |i| 19 | event = queue.pop 20 | insist { event["sequence"] } == i 21 | end 22 | pipeline.shutdown 23 | end # process 24 | 25 | describe "inputs/redis" do 26 | extend LogStash::RSpec 27 | 28 | describe "read events from a list" do 29 | key = 10.times.collect { rand(10).to_s }.join("") 30 | event_count = 1000 + rand(50) 31 | config <<-CONFIG 32 | input { 33 | redis { 34 | type => "blah" 35 | key => "#{key}" 36 | data_type => "list" 37 | } 38 | } 39 | CONFIG 40 | 41 | before(:each) { populate(key, event_count) } 42 | 43 | input { |pipeline, queue| process(pipeline, queue, event_count) } 44 | end 45 | 46 | describe "read events from a list with batch_count=5" do 47 | key = 10.times.collect { rand(10).to_s }.join("") 48 | event_count = 1000 + rand(50) 49 | config <<-CONFIG 50 | input { 51 | redis { 52 | type => "blah" 53 | key => "#{key}" 54 | data_type => "list" 55 | batch_count => #{rand(20)+1} 56 | } 57 | } 58 | CONFIG 59 | 60 | before(:each) { populate(key, event_count) } 61 | input { |pipeline, queue| process(pipeline, queue, event_count) } 62 | end 63 | end 64 | -------------------------------------------------------------------------------- /lib/logstash/inputs/lumberjack.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/inputs/base" 3 | require "logstash/namespace" 4 | 5 | # Receive events using the lumberjack protocol. 6 | # 7 | # This is mainly to receive events shipped with lumberjack, 8 | # 9 | class LogStash::Inputs::Lumberjack < LogStash::Inputs::Base 10 | 11 | config_name "lumberjack" 12 | milestone 1 13 | 14 | default :codec, "plain" 15 | 16 | # the address to listen on. 17 | config :host, :validate => :string, :default => "0.0.0.0" 18 | 19 | # the port to listen on. 20 | config :port, :validate => :number, :required => true 21 | 22 | # ssl certificate to use 23 | config :ssl_certificate, :validate => :path, :required => true 24 | 25 | # ssl key to use 26 | config :ssl_key, :validate => :path, :required => true 27 | 28 | # ssl key passphrase to use 29 | config :ssl_key_passphrase, :validate => :password 30 | 31 | # TODO(sissel): Add CA to authenticate clients with. 32 | 33 | public 34 | def register 35 | require "lumberjack/server" 36 | 37 | @logger.info("Starting lumberjack input listener", :address => "#{@host}:#{@port}") 38 | @lumberjack = Lumberjack::Server.new(:address => @host, :port => @port, 39 | :ssl_certificate => @ssl_certificate, :ssl_key => @ssl_key, 40 | :ssl_key_passphrase => @ssl_key_passphrase) 41 | end # def register 42 | 43 | public 44 | def run(output_queue) 45 | @lumberjack.run do |l| 46 | @codec.decode(l.delete("line")) do |event| 47 | decorate(event) 48 | l.each { |k,v| event[k] = v; v.force_encoding("UTF-8") } 49 | output_queue << event 50 | end 51 | end 52 | end # def run 53 | end # class LogStash::Inputs::Lumberjack 54 | -------------------------------------------------------------------------------- /lib/logstash/filters/gelfify.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/filters/base" 3 | require "logstash/namespace" 4 | 5 | # The GELFify filter parses RFC3164 severity levels to 6 | # corresponding GELF levels. 7 | class LogStash::Filters::Gelfify < LogStash::Filters::Base 8 | config_name "gelfify" 9 | milestone 2 10 | 11 | SYSLOG_LEVEL_MAP = { 12 | 0 => 3, # Emergency => FATAL 13 | 1 => 5, # Alert => WARN 14 | 2 => 3, # Critical => FATAL 15 | 3 => 4, # Error => ERROR 16 | 4 => 5, # Warning => WARN 17 | 5 => 6, # Notice => INFO 18 | 6 => 6, # Informat. => INFO 19 | 7 => 7 # Debug => DEBUG 20 | } 21 | 22 | public 23 | def register 24 | # nothing 25 | end # def register 26 | 27 | public 28 | def filter(event) 29 | return unless event["type"] == @type 30 | @logger.debug("GELFIFY FILTER: received event of type #{event["type"]}") 31 | 32 | if event.include?("severity") 33 | sev = event["severity"].to_i rescue nil 34 | if sev.to_s != event["severity"].to_s 35 | # severity isn't convertable to an integer. 36 | # "foo".to_i => 0, which would default to EMERG. 37 | @logger.debug("GELFIFY FILTER: existing severity field is not an int") 38 | elsif SYSLOG_LEVEL_MAP[sev] 39 | @logger.debug("GELFIFY FILTER: Severity level successfully mapped") 40 | event["GELF_severity"] = SYSLOG_LEVEL_MAP[sev] 41 | else 42 | @logger.debug("GELFIFY FILTER: unknown severity #{sev}") 43 | end 44 | else 45 | @logger.debug("GELFIFY FILTER: No 'severity' field found") 46 | end 47 | 48 | if !event.cancelled? 49 | filter_matched(event) 50 | end 51 | end # def filter 52 | end # class LogStash::Filters::Gelfify 53 | -------------------------------------------------------------------------------- /lib/logstash/inputs/pipe.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/inputs/base" 3 | require "logstash/namespace" 4 | require "socket" # for Socket.gethostname 5 | 6 | # Stream events from a long running command pipe. 7 | # 8 | # By default, each event is assumed to be one line. If you 9 | # want to join lines, you'll want to use the multiline filter. 10 | # 11 | class LogStash::Inputs::Pipe < LogStash::Inputs::Base 12 | config_name "pipe" 13 | milestone 1 14 | 15 | # TODO(sissel): This should switch to use the 'line' codec by default 16 | # once we switch away from doing 'readline' 17 | default :codec, "plain" 18 | 19 | # Command to run and read events from, one line at a time. 20 | # 21 | # Example: 22 | # 23 | # command => "echo hello world" 24 | config :command, :validate => :string, :required => true 25 | 26 | public 27 | def register 28 | @logger.info("Registering pipe input", :command => @command) 29 | end # def register 30 | 31 | public 32 | def run(queue) 33 | begin 34 | @pipe = IO.popen(@command, mode="r") 35 | hostname = Socket.gethostname 36 | 37 | @pipe.each do |line| 38 | line = line.chomp 39 | source = "pipe://#{hostname}/#{@command}" 40 | @logger.debug? && @logger.debug("Received line", :command => @command, :line => line) 41 | @codec.decode(line) do |event| 42 | event["host"] = hostname 43 | event["command"] = @command 44 | decorate(event) 45 | queue << event 46 | end 47 | end 48 | rescue Exception => e 49 | @logger.error("Exception while running command", :e => e, :backtrace => e.backtrace) 50 | sleep(10) 51 | retry 52 | end 53 | end # def run 54 | end # class LogStash::Inputs::Pipe 55 | -------------------------------------------------------------------------------- /lib/logstash/filters/checksum.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/filters/base" 3 | require "logstash/namespace" 4 | require "yaml" 5 | 6 | # This filter let's you create a checksum based on various parts 7 | # of the logstash event. 8 | # This can be useful for deduplication of messages or simply to provide 9 | # a custom unique identifier. 10 | # 11 | # This is VERY experimental and is largely a proof-of-concept 12 | class LogStash::Filters::Checksum < LogStash::Filters::Base 13 | 14 | config_name "checksum" 15 | milestone 1 16 | 17 | ALGORITHMS = ["md5", "sha", "sha1", "sha256", "sha384",] 18 | 19 | # A list of keys to use in creating the string to checksum 20 | # Keys will be sorted before building the string 21 | # keys and values will then be concatenated with pipe delimeters 22 | # and checksummed 23 | config :keys, :validate => :array, :default => ["message", "@timestamp", "type"] 24 | 25 | config :algorithm, :validate => ALGORITHMS, :default => "sha256" 26 | 27 | public 28 | def register 29 | require 'openssl' 30 | @to_checksum = "" 31 | end 32 | 33 | public 34 | def filter(event) 35 | return unless filter?(event) 36 | 37 | @logger.debug("Running checksum filter", :event => event) 38 | 39 | @keys.sort.each do |k| 40 | @logger.debug("Adding key to string", :current_key => k) 41 | @to_checksum << "|#{k}|#{event[k]}" 42 | end 43 | @to_checksum << "|" 44 | @logger.debug("Final string built", :to_checksum => @to_checksum) 45 | 46 | digested_string = OpenSSL::Digest.hexdigest(@algorithm, @to_checksum) 47 | @logger.debug("Digested string", :digested_string => digested_string) 48 | event['logstash_checksum'] = digested_string 49 | end 50 | end # class LogStash::Filters::Checksum 51 | -------------------------------------------------------------------------------- /pkg/debian/debian/rules: -------------------------------------------------------------------------------- 1 | #!/usr/bin/make -f 2 | # -*- makefile -*- 3 | 4 | # Uncomment this to turn on verbose mode. 5 | #export DH_VERBOSE=1 6 | 7 | 8 | configure: configure-stamp 9 | configure-stamp: 10 | dh_testdir 11 | # Add here commands to configure the package. 12 | 13 | touch configure-stamp 14 | 15 | 16 | build: build-stamp 17 | 18 | build-stamp: configure-stamp 19 | dh_testdir 20 | 21 | # Add here commands to compile the package. 22 | 23 | touch $@ 24 | 25 | clean: 26 | dh_testdir 27 | dh_testroot 28 | rm -f build-stamp configure-stamp 29 | 30 | # Add here commands to clean up after the build process. 31 | 32 | dh_clean 33 | 34 | install: build 35 | dh_testdir 36 | dh_testroot 37 | dh_clean -k 38 | dh_installdirs 39 | dh_install 40 | 41 | # Add here commands to install the package into debian/jenkins. 42 | 43 | # Moved creation of dirs to dirs template to be invoked by dh_installdirs 44 | 45 | # Build architecture-independent files here. 46 | binary-indep: build install 47 | # We have nothing to do by default. 48 | 49 | # Build architecture-dependent files here. 50 | binary-arch: build install 51 | dh_testdir 52 | dh_testroot 53 | dh_installchangelogs 54 | dh_installdocs 55 | dh_installexamples 56 | # dh_installmenu 57 | # dh_installdebconf 58 | dh_installlogrotate 59 | # dh_installemacsen 60 | # dh_installpam 61 | # dh_installmime 62 | # dh_python 63 | dh_installinit 64 | # dh_installcron 65 | # dh_installinfo 66 | dh_installman 67 | dh_link 68 | dh_strip 69 | dh_compress 70 | dh_fixperms 71 | # dh_perl 72 | # dh_makeshlibs 73 | dh_installdeb 74 | dh_shlibdeps 75 | dh_gencontrol 76 | dh_md5sums 77 | dh_builddeb 78 | 79 | binary: binary-indep binary-arch 80 | .PHONY: build clean binary-indep binary-arch binary install configure 81 | -------------------------------------------------------------------------------- /lib/logstash/codecs/line.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/codecs/base" 3 | require "logstash/util/charset" 4 | 5 | # Line-oriented text data. 6 | # 7 | # Decoding behavior: Only whole line events will be emitted. 8 | # 9 | # Encoding behavior: Each event will be emitted with a trailing newline. 10 | class LogStash::Codecs::Line < LogStash::Codecs::Base 11 | config_name "line" 12 | milestone 3 13 | 14 | # Set the desired text format for encoding. 15 | config :format, :validate => :string 16 | 17 | # The character encoding used in this input. Examples include "UTF-8" 18 | # and "cp1252" 19 | # 20 | # This setting is useful if your log files are in Latin-1 (aka cp1252) 21 | # or in another character set other than UTF-8. 22 | # 23 | # This only affects "plain" format logs since json is UTF-8 already. 24 | config :charset, :validate => ::Encoding.name_list, :default => "UTF-8" 25 | 26 | public 27 | def register 28 | require "logstash/util/buftok" 29 | @buffer = FileWatch::BufferedTokenizer.new 30 | @converter = LogStash::Util::Charset.new(@charset) 31 | @converter.logger = @logger 32 | end 33 | 34 | public 35 | def decode(data) 36 | @buffer.extract(data).each do |line| 37 | yield LogStash::Event.new("message" => @converter.convert(line)) 38 | end 39 | end # def decode 40 | 41 | public 42 | def flush(&block) 43 | remainder = @buffer.flush 44 | if !remainder.empty? 45 | block.call(LogStash::Event.new({"message" => remainder})) 46 | end 47 | end 48 | 49 | public 50 | def encode(data) 51 | if data.is_a? LogStash::Event and @format 52 | @on_event.call(data.sprintf(@format) + "\n") 53 | else 54 | @on_event.call(data.to_s + "\n") 55 | end 56 | end # def encode 57 | 58 | end # class LogStash::Codecs::Plain 59 | -------------------------------------------------------------------------------- /spec/codecs/json_lines.rb: -------------------------------------------------------------------------------- 1 | require "logstash/codecs/json_lines" 2 | require "logstash/event" 3 | require "insist" 4 | 5 | describe LogStash::Codecs::JSONLines do 6 | subject do 7 | next LogStash::Codecs::JSONLines.new 8 | end 9 | 10 | context "#decode" do 11 | it "should return an event from json data" do 12 | data = {"foo" => "bar", "baz" => {"bah" => ["a","b","c"]}} 13 | subject.decode(data.to_json+"\n") do |event| 14 | insist { event.is_a? LogStash::Event } 15 | insist { event["foo"] } == data["foo"] 16 | insist { event["baz"] } == data["baz"] 17 | insist { event["bah"] } == data["bah"] 18 | end 19 | end 20 | 21 | it "should return an event from json data when a newline is recieved" do 22 | data = {"foo" => "bar", "baz" => {"bah" => ["a","b","c"]}} 23 | subject.decode(data.to_json) do |event| 24 | insist {false} 25 | end 26 | subject.decode("\n") do |event| 27 | insist { event.is_a? LogStash::Event } 28 | insist { event["foo"] } == data["foo"] 29 | insist { event["baz"] } == data["baz"] 30 | insist { event["bah"] } == data["bah"] 31 | end 32 | end 33 | end 34 | 35 | context "#encode" do 36 | it "should return json data" do 37 | data = {"foo" => "bar", "baz" => {"bah" => ["a","b","c"]}} 38 | event = LogStash::Event.new(data) 39 | got_event = false 40 | subject.on_event do |d| 41 | insist { d.chomp } == LogStash::Event.new(data).to_json 42 | insist { JSON.parse(d)["foo"] } == data["foo"] 43 | insist { JSON.parse(d)["baz"] } == data["baz"] 44 | insist { JSON.parse(d)["bah"] } == data["bah"] 45 | got_event = true 46 | end 47 | subject.encode(event) 48 | insist { got_event } 49 | end 50 | end 51 | end 52 | -------------------------------------------------------------------------------- /spec/codecs/edn_lines.rb: -------------------------------------------------------------------------------- 1 | require "logstash/codecs/edn_lines" 2 | require "logstash/event" 3 | require "insist" 4 | require "edn" 5 | 6 | describe LogStash::Codecs::EDNLines do 7 | subject do 8 | next LogStash::Codecs::EDNLines.new 9 | end 10 | 11 | context "#decode" do 12 | it "should return an event from edn data" do 13 | data = {"foo" => "bar", "baz" => {"bah" => ["a", "b", "c"]}} 14 | subject.decode(data.to_edn + "\n") do |event| 15 | insist { event }.is_a?(LogStash::Event) 16 | insist { event["foo"] } == data["foo"] 17 | insist { event["baz"] } == data["baz"] 18 | insist { event["bah"] } == data["bah"] 19 | end 20 | end 21 | 22 | it "should return an event from edn data when a newline is recieved" do 23 | data = {"foo" => "bar", "baz" => {"bah" => ["a","b","c"]}} 24 | subject.decode(data.to_edn) do |event| 25 | insist {false} 26 | end 27 | subject.decode("\n") do |event| 28 | insist { event.is_a? LogStash::Event } 29 | insist { event["foo"] } == data["foo"] 30 | insist { event["baz"] } == data["baz"] 31 | insist { event["bah"] } == data["bah"] 32 | end 33 | end 34 | end 35 | 36 | context "#encode" do 37 | it "should return edn data" do 38 | data = {"foo" => "bar", "baz" => {"bah" => ["a","b","c"]}} 39 | event = LogStash::Event.new(data) 40 | got_event = false 41 | subject.on_event do |d| 42 | insist { d.chomp } == LogStash::Event.new(data).to_hash.to_edn 43 | insist { EDN.read(d)["foo"] } == data["foo"] 44 | insist { EDN.read(d)["baz"] } == data["baz"] 45 | insist { EDN.read(d)["bah"] } == data["bah"] 46 | got_event = true 47 | end 48 | subject.encode(event) 49 | insist { got_event } 50 | end 51 | end 52 | 53 | end 54 | -------------------------------------------------------------------------------- /spec/filters/geoip.rb: -------------------------------------------------------------------------------- 1 | require "test_utils" 2 | require "logstash/filters/geoip" 3 | 4 | describe LogStash::Filters::GeoIP do 5 | extend LogStash::RSpec 6 | describe "defaults" do 7 | config <<-CONFIG 8 | filter { 9 | geoip { 10 | source => "ip" 11 | #database => "vendor/geoip/GeoLiteCity.dat" 12 | } 13 | } 14 | CONFIG 15 | 16 | sample("ip" => "8.8.8.8") do 17 | insist { subject }.include?("geoip") 18 | 19 | expected_fields = %w(ip country_code2 country_code3 country_name 20 | continent_code region_name city_name postal_code 21 | latitude longitude dma_code area_code timezone) 22 | expected_fields.each do |f| 23 | insist { subject["geoip"] }.include?(f) 24 | end 25 | end 26 | 27 | sample("ip" => "127.0.0.1") do 28 | # assume geoip fails on localhost lookups 29 | reject { subject }.include?("geoip") 30 | end 31 | end 32 | 33 | describe "Specify the target" do 34 | config <<-CONFIG 35 | filter { 36 | geoip { 37 | source => "ip" 38 | #database => "vendor/geoip/GeoLiteCity.dat" 39 | target => src_ip 40 | } 41 | } 42 | CONFIG 43 | 44 | sample("ip" => "8.8.8.8") do 45 | insist { subject }.include?("src_ip") 46 | 47 | expected_fields = %w(ip country_code2 country_code3 country_name 48 | continent_code region_name city_name postal_code 49 | latitude longitude dma_code area_code timezone) 50 | expected_fields.each do |f| 51 | insist { subject["src_ip"] }.include?(f) 52 | end 53 | end 54 | 55 | sample("ip" => "127.0.0.1") do 56 | # assume geoip fails on localhost lookups 57 | reject { subject }.include?("src_ip") 58 | end 59 | end 60 | 61 | end 62 | -------------------------------------------------------------------------------- /lib/logstash/filters/metaevent.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/filters/base" 3 | require "logstash/namespace" 4 | 5 | class LogStash::Filters::Metaevent < LogStash::Filters::Base 6 | config_name "metaevent" 7 | milestone 1 8 | 9 | # syntax: `followed_by_tags => [ "tag", "tag" ]` 10 | config :followed_by_tags, :validate => :array, :required => true 11 | 12 | # syntax: `period => 60` 13 | config :period, :validate => :number, :default => 5 14 | 15 | def register 16 | @logger.debug("registering") 17 | @metaevents = [] 18 | end 19 | 20 | def filter(event) 21 | if filter?(event) 22 | start_period(event) 23 | elsif within_period(event) 24 | if followed_by_tags_match(event) 25 | trigger(event) 26 | else 27 | @logger.debug(["metaevent", @add_tag, "ignoring (tags don't match)", event]) 28 | end 29 | else 30 | @logger.debug(["metaevent", @add_tag, "ignoring (not in period)", event]) 31 | end 32 | end 33 | 34 | def flush 35 | return if @metaevents.empty? 36 | 37 | new_events = @metaevents 38 | @metaevents = [] 39 | new_events 40 | end 41 | 42 | private 43 | 44 | def start_period(event) 45 | @logger.debug(["metaevent", @add_tag, "start_period", event]) 46 | @start_event = event 47 | end 48 | 49 | def trigger(event) 50 | @logger.debug(["metaevent", @add_tag, "trigger", event]) 51 | 52 | event = LogStash::Event.new 53 | event["source"] = Socket.gethostname 54 | event["tags"] = [@add_tag] 55 | 56 | @metaevents << event 57 | @start_event = nil 58 | end 59 | 60 | def followed_by_tags_match(event) 61 | (event["tags"] & @followed_by_tags).size == @followed_by_tags.size 62 | end 63 | 64 | def within_period(event) 65 | time_delta = event["@timestamp"] - @start_event["@timestamp"] 66 | time_delta >= 0 && time_delta <= @period 67 | end 68 | end 69 | -------------------------------------------------------------------------------- /lib/logstash/codecs/json_lines.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/codecs/base" 3 | require "logstash/codecs/line" 4 | require "json" 5 | 6 | # This codec will decode streamed JSON that is newline delimited. 7 | # For decoding JSON payload in the redis input for example, use the json codec instead. 8 | # Encoding will emit a single JSON string ending in a '\n' 9 | class LogStash::Codecs::JSONLines < LogStash::Codecs::Base 10 | config_name "json_lines" 11 | 12 | milestone 1 13 | 14 | # The character encoding used in this codec. Examples include "UTF-8" and 15 | # "CP1252" 16 | # 17 | # JSON requires valid UTF-8 strings, but in some cases, software that 18 | # emits JSON does so in another encoding (nxlog, for example). In 19 | # weird cases like this, you can set the charset setting to the 20 | # actual encoding of the text and logstash will convert it for you. 21 | # 22 | # For nxlog users, you'll want to set this to "CP1252" 23 | config :charset, :validate => ::Encoding.name_list, :default => "UTF-8" 24 | 25 | public 26 | def initialize(params={}) 27 | super(params) 28 | @lines = LogStash::Codecs::Line.new 29 | @lines.charset = @charset 30 | end 31 | 32 | public 33 | def decode(data) 34 | 35 | @lines.decode(data) do |event| 36 | begin 37 | yield LogStash::Event.new(JSON.parse(event["message"])) 38 | rescue JSON::ParserError => e 39 | @logger.info("JSON parse failure. Falling back to plain-text", :error => e, :data => data) 40 | yield LogStash::Event.new("message" => data) 41 | end 42 | end 43 | end # def decode 44 | 45 | public 46 | def encode(data) 47 | # Tack on a \n for now because previously most of logstash's JSON 48 | # outputs emitted one per line, and whitespace is OK in json. 49 | @on_event.call(data.to_json + "\n") 50 | end # def encode 51 | 52 | end # class LogStash::Codecs::JSON 53 | -------------------------------------------------------------------------------- /gembag.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | require "rbconfig" 4 | 5 | rubyabi = RbConfig::CONFIG["ruby_version"] 6 | target = "#{Dir.pwd}/vendor/bundle" 7 | gemdir = "#{target}/#{RUBY_ENGINE}/#{rubyabi}/" 8 | ENV["GEM_HOME"] = gemdir 9 | ENV["GEM_PATH"] = "" 10 | 11 | require "rubygems/specification" 12 | require "rubygems/commands/install_command" 13 | 14 | def install_gem(name, requirement, target) 15 | puts "Fetching and installing gem: #{name} (#{requirement})" 16 | 17 | installer = Gem::Commands::InstallCommand.new 18 | installer.options[:generate_rdoc] = false 19 | installer.options[:generate_ri] = false 20 | installer.options[:version] = requirement 21 | installer.options[:args] = [name] 22 | installer.options[:install_dir] = target 23 | 24 | # ruby 2.0.0 / rubygems 2.x; disable documentation generation 25 | installer.options[:document] = [] 26 | begin 27 | installer.execute 28 | rescue Gem::SystemExitException => e 29 | if e.exit_code != 0 30 | puts "Installation of #{name} failed" 31 | raise 32 | end 33 | end 34 | end # def install_gem 35 | 36 | # Ensure bundler is available. 37 | begin 38 | gem("bundler", ">=1.3.5") 39 | rescue Gem::LoadError => e 40 | install_gem("bundler", ">= 1.3.5", ENV["GEM_HOME"]) 41 | end 42 | 43 | require "bundler/cli" 44 | 45 | # Monkeypatch bundler to write a .lock file specific to the version of ruby. 46 | # This keeps MRI/JRuby/RBX from conflicting over the Gemfile.lock updates 47 | module Bundler 48 | module SharedHelpers 49 | def default_lockfile 50 | ruby = "#{RUBY_ENGINE}-#{RbConfig::CONFIG["ruby_version"]}" 51 | return Pathname.new("#{default_gemfile}.#{ruby}.lock") 52 | end 53 | end 54 | end 55 | 56 | if RUBY_ENGINE == "rbx" 57 | begin 58 | gem("rubysl") 59 | rescue Gem::LoadError => e 60 | install_gem("rubysl", ">= 0", ENV["GEM_HOME"]) 61 | end 62 | end 63 | 64 | Bundler::CLI.start(["install", "--gemfile=tools/Gemfile", "--path", target, "--clean"]) 65 | -------------------------------------------------------------------------------- /docs/release-engineering.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Release Engineering - logstash 3 | layout: content_right 4 | --- 5 | 6 | # logstash rel-eng. 7 | 8 | The version patterns for logstash are x.y.z 9 | 10 | * In the same x.y release, no backwards-incompatible changes will be made. 11 | * Between x.y.z and x.y.(z+1), deprecations are allowed but should be 12 | functional through the next release. 13 | * Any backwards-incompatible changes should be well-documented and, if 14 | possible, should include tools to help in migrating. 15 | * It is OK to add features, plugins, etc, in minor releases as long as they do 16 | not break existing functionality. 17 | 18 | I do not suspect the 'x' (currently 1) will change frequently. It should only change 19 | if there are major, backwards-incompatible changes made to logstash, and I'm 20 | trying to not make those changes, so logstash should forever be at 1.y,z, 21 | right? ;) 22 | 23 | # building a release. 24 | 25 | * Make sure all tests pass (make test) 26 | * `ruby bin/logstash test` 27 | * `java -jar logstash-x.y.z-flatjar.jar test` 28 | * Update VERSION.rb 29 | * VERSION=$(ruby -r./VERSION -e 'puts LOGSTASH_VERSION') 30 | * Ensure CHANGELOG is up-to-date 31 | * `git tag v$VERSION; git push origin master; git push --tags` 32 | * Build binaries 33 | * `make jar` 34 | * make docs 35 | * copy build/docs to ../logstash.github.com/docs/$VERSION 36 | * Note: you will need to use C-ruby 1.9.2 for this. 37 | * You'll need 'bluecloth' and 'cabin' rubygems installed. 38 | * cd ../logstash.github.com 39 | * `make clean update VERSION=$VERSION` 40 | * `git add docs/$VERSION docs/latest.html index.html _layouts/*` 41 | * `git commit -m "version $VERSION docs" && git push origin master` 42 | * Publish binaries 43 | * Stage binaries at `carrera.databits.net:/home/jls/s/files/logstash/` 44 | * Update #logstash IRC /topic 45 | * Send announcement email to logstash-users@, include relevant download URLs & 46 | changelog (see past emails for a template) 47 | -------------------------------------------------------------------------------- /pkg/rpm/SOURCES/logstash.init: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # chkconfig: - 80 15 3 | ### BEGIN INIT INFO 4 | # Provides: logstash 5 | # Required-Start: $all 6 | # Required-Stop: $all 7 | # Default-Start: 8 | # Default-Stop: 0 1 6 9 | # Short-Description: Starts logstash 10 | # Description: Logstash agent 11 | ### END INIT INFO 12 | 13 | # Source function library. 14 | . /etc/rc.d/init.d/functions 15 | 16 | NAME=@@@NAME@@@ 17 | 18 | [ -f /etc/sysconfig/$NAME ] && . /etc/sysconfig/$NAME 19 | 20 | LOGSTASH_USER=${NAME} 21 | DAEMON="@@@DAEMON@@@/${NAME}" 22 | SERVICE=agent 23 | 24 | PID_FILE=${PIDFILE:-@@@PIDDIR@@@/${NAME}.pid} 25 | LOCK_FILE=${LOCKFILE:-@@@LOCKFILE@@@} 26 | LOG_FILE=${LOGFILE:-@@@LOGDIR@@@/${NAME}.log} 27 | 28 | LOGSTASH_PATH_CONF=${LOGSTASH_PATH_CONF:-@@@CONFDIR@@@} 29 | LOGSTASH_LOGLEVEL=${LOGSTASH_LOGLEVEL:-"warn"} 30 | 31 | DAEMON_OPTS="\ 32 | -P ${PID_FILE} \ 33 | -l ${LOG_FILE} \ 34 | -f ${LOGSTASH_PATH_CONF} \ 35 | -v $LOGSTASH_LOGLEVEL \ 36 | " 37 | 38 | start() { 39 | echo -n $"Starting ${NAME}: " 40 | export JAVA_OPTS="$JAVA_OPTS $LOGSTASH_JAVA_OPTS" 41 | daemon --pidfile=${PID_FILE} --user $LOGSTASH_USER $DAEMON $SERVICE $DAEMON_OPTS 42 | RETVAL=$? 43 | echo 44 | [ $RETVAL -eq 0 ] && touch $LOCK_FILE 45 | return $RETVAL 46 | } 47 | 48 | stop() { 49 | 50 | echo -n $"Stopping ${NAME}: " 51 | killproc -p ${PID_FILE} -d 10 $DAEMON 52 | RETVAL=$? 53 | echo 54 | [ $RETVAL = 0 ] && rm -f ${LOCK_FILE} ${PID_FILE} 55 | return $RETVAL 56 | } 57 | 58 | case "$1" in 59 | start) 60 | start 61 | ;; 62 | stop) 63 | stop 64 | ;; 65 | status) 66 | status -p ${PID_FILE} $DAEMON 67 | RETVAL=$? 68 | ;; 69 | restart|force-reload) 70 | stop 71 | start 72 | ;; 73 | *) 74 | N=/etc/init.d/${NAME} 75 | echo "Usage: $N {start|stop|restart|force-reload}" >&2 76 | RETVAL=2 77 | ;; 78 | esac 79 | 80 | exit $RETVAL 81 | -------------------------------------------------------------------------------- /spec/examples/syslog.rb: -------------------------------------------------------------------------------- 1 | require "test_utils" 2 | 3 | describe "parse syslog", :if => RUBY_ENGINE == "jruby" do 4 | extend LogStash::RSpec 5 | 6 | config <<-'CONFIG' 7 | filter { 8 | grok { 9 | type => "syslog" 10 | singles => true 11 | pattern => [ "<%{POSINT:syslog_pri}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{PROG:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" ] 12 | add_field => [ "received_at", "%{@timestamp}" ] 13 | add_field => [ "received_from", "%{source_host}" ] 14 | } 15 | syslog_pri { 16 | type => "syslog" 17 | } 18 | date { 19 | type => "syslog" 20 | match => ["syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ] 21 | } 22 | mutate { 23 | type => "syslog" 24 | exclude_tags => "_grokparsefailure" 25 | replace => [ "source_host", "%{syslog_hostname}" ] 26 | replace => [ "message", "%{syslog_message}" ] 27 | } 28 | mutate { 29 | type => "syslog" 30 | remove => [ "syslog_hostname", "syslog_message", "syslog_timestamp" ] 31 | } 32 | } 33 | CONFIG 34 | 35 | sample("message" => "<164>Oct 26 15:19:25 1.2.3.4 %ASA-4-106023: Deny udp src DRAC:10.1.2.3/43434 dst outside:192.168.0.1/53 by access-group \"acl_drac\" [0x0, 0x0]", "type" => "syslog") do 36 | insist { subject["type"] } == "syslog" 37 | insist { subject["tags"] }.nil? 38 | insist { subject["syslog_pri"] } == "164" 39 | end 40 | 41 | # Single digit day 42 | sample("message" => "<164>Oct 6 15:19:25 1.2.3.4 %ASA-4-106023: Deny udp src DRAC:10.1.2.3/43434 dst outside:192.168.0.1/53 by access-group \"acl_drac\" [0x0, 0x0]", "type" => "syslog") do 43 | insist { subject["type"] } == "syslog" 44 | insist { subject["tags"] }.nil? 45 | insist { subject["syslog_pri"] } == "164" 46 | #insist { subject.timestamp } == "2012-10-26T15:19:25.000Z" 47 | end 48 | end 49 | -------------------------------------------------------------------------------- /pkg/debian/debian/manpage.1.ex: -------------------------------------------------------------------------------- 1 | .\" Hey, EMACS: -*- nroff -*- 2 | .\" First parameter, NAME, should be all caps 3 | .\" Second parameter, SECTION, should be 1-8, maybe w/ subsection 4 | .\" other parameters are allowed: see man(7), man(1) 5 | .TH LOGSTASH SECTION "April 25, 2008" 6 | .\" Please adjust this date whenever revising the manpage. 7 | .\" 8 | .\" Some roff macros, for reference: 9 | .\" .nh disable hyphenation 10 | .\" .hy enable hyphenation 11 | .\" .ad l left justify 12 | .\" .ad b justify to both left and right margins 13 | .\" .nf disable filling 14 | .\" .fi enable filling 15 | .\" .br insert line break 16 | .\" .sp insert n+1 empty lines 17 | .\" for manpage-specific macros, see man(7) 18 | .SH NAME 19 | logstash \- program to do something 20 | .SH SYNOPSIS 21 | .B logstash 22 | .RI [ options ] " files" ... 23 | .br 24 | .B bar 25 | .RI [ options ] " files" ... 26 | .SH DESCRIPTION 27 | This manual page documents briefly the 28 | .B logstash 29 | and 30 | .B bar 31 | commands. 32 | .PP 33 | .\" TeX users may be more comfortable with the \fB\fP and 34 | .\" \fI\fP escape sequences to invode bold face and italics, 35 | .\" respectively. 36 | \fBlogstash\fP is a program that... 37 | .SH OPTIONS 38 | These programs follow the usual GNU command line syntax, with long 39 | options starting with two dashes (`-'). 40 | A summary of options is included below. 41 | For a complete description, see the Info files. 42 | .TP 43 | .B \-h, \-\-help 44 | Show summary of options. 45 | .TP 46 | .B \-v, \-\-version 47 | Show version of program. 48 | .SH SEE ALSO 49 | .BR bar (1), 50 | .BR baz (1). 51 | .br 52 | The programs are documented fully by 53 | .IR "The Rise and Fall of a Fooish Bar" , 54 | available via the Info system. 55 | .SH AUTHOR 56 | logstash was written by . 57 | .PP 58 | This manual page was written by Kohsuke Kawaguchi , 59 | for the Debian project (but may be used by others). 60 | -------------------------------------------------------------------------------- /spec/inputs/relp.rb: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | require "test_utils" 3 | require "socket" 4 | require "logstash/util/relp" 5 | 6 | describe "inputs/relp" do 7 | extend LogStash::RSpec 8 | 9 | describe "Single client connection" do 10 | event_count = 10 11 | port = 5511 12 | config <<-CONFIG 13 | input { 14 | relp { 15 | type => "blah" 16 | port => #{port} 17 | } 18 | } 19 | CONFIG 20 | 21 | input do |pipeline, queue| 22 | th = Thread.new { pipeline.run } 23 | sleep 0.1 while !pipeline.ready? 24 | 25 | #Send events from clients 26 | client = RelpClient.new("0.0.0.0", port, ["syslog"]) 27 | event_count.times do |value| 28 | client.syslog_write("Hello #{value}") 29 | end 30 | 31 | events = event_count.times.collect { queue.pop } 32 | event_count.times do |i| 33 | insist { events[i]["message"] } == "Hello #{i}" 34 | end 35 | 36 | pipeline.shutdown 37 | th.join 38 | end # input 39 | end 40 | describe "Two client connection" do 41 | event_count = 100 42 | port = 5512 43 | config <<-CONFIG 44 | input { 45 | relp { 46 | type => "blah" 47 | port => #{port} 48 | } 49 | } 50 | CONFIG 51 | 52 | input do |pipeline, queue| 53 | Thread.new { pipeline.run } 54 | sleep 0.1 while !pipeline.ready? 55 | 56 | #Send events from clients sockets 57 | client = RelpClient.new("0.0.0.0", port, ["syslog"]) 58 | client2 = RelpClient.new("0.0.0.0", port, ["syslog"]) 59 | 60 | event_count.times do |value| 61 | client.syslog_write("Hello from client") 62 | client2.syslog_write("Hello from client 2") 63 | end 64 | 65 | events = (event_count*2).times.collect { queue.pop } 66 | insist { events.select{|event| event["message"]=="Hello from client" }.size } == event_count 67 | insist { events.select{|event| event["message"]=="Hello from client 2" }.size } == event_count 68 | end # input 69 | end 70 | end 71 | -------------------------------------------------------------------------------- /spec/inputs/imap.rb: -------------------------------------------------------------------------------- 1 | require "logstash/inputs/imap" 2 | require "mail" 3 | 4 | describe LogStash::Inputs::IMAP do 5 | user = "logstash" 6 | password = "secret" 7 | msg_time = Time.new 8 | msg_text = "foo\nbar\nbaz" 9 | msg_html = "

a paragraph

\n\n" 10 | 11 | msg = Mail.new do 12 | from "me@example.com" 13 | to "you@example.com" 14 | subject "logstash imap input test" 15 | date msg_time 16 | body msg_text 17 | add_file :filename => "some.html", :content => msg_html 18 | end 19 | 20 | context "with both text and html parts" do 21 | context "when no content-type selected" do 22 | it "should select text/plain part" do 23 | config = {"type" => "imap", "host" => "localhost", 24 | "user" => "#{user}", "password" => "#{password}"} 25 | 26 | input = LogStash::Inputs::IMAP.new config 27 | input.register 28 | event = input.parse_mail(msg) 29 | insist { event["message"] } == msg_text 30 | end 31 | end 32 | 33 | context "when text/html content-type selected" do 34 | it "should select text/html part" do 35 | config = {"type" => "imap", "host" => "localhost", 36 | "user" => "#{user}", "password" => "#{password}", 37 | "content_type" => "text/html"} 38 | 39 | input = LogStash::Inputs::IMAP.new config 40 | input.register 41 | event = input.parse_mail(msg) 42 | insist { event["message"] } == msg_html 43 | end 44 | end 45 | end 46 | 47 | context "when subject is in RFC 2047 encoded-word format" do 48 | it "should be decoded" do 49 | msg.subject = "=?iso-8859-1?Q?foo_:_bar?=" 50 | config = {"type" => "imap", "host" => "localhost", 51 | "user" => "#{user}", "password" => "#{password}"} 52 | 53 | input = LogStash::Inputs::IMAP.new config 54 | input.register 55 | event = input.parse_mail(msg) 56 | insist { event["subject"] } == "foo : bar" 57 | end 58 | end 59 | 60 | end 61 | -------------------------------------------------------------------------------- /docs/learn.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Learn - logstash 3 | layout: content_right 4 | --- 5 | # What is logstash? 6 | 7 | logstash is a tool for managing your logs. 8 | 9 | It helps you take logs and other event data from your systems and move it into 10 | a central place. logstash is open source and completely free. You can find 11 | support on the mailing list and on IRC. 12 | 13 | For an overview of logstash and why you would use it, you should watch the 14 | presentation I gave at CarolinaCon 2011: 15 | [video here](http://carolinacon.blip.tv/file/5105901/). This presentation covers 16 | logstash, how you can use it, some alternatives, logging best practices, 17 | parsing tools, etc. Video also below: 18 | 19 | 25 | 26 | 27 | The slides are available online here: [slides](http://semicomplete.com/presentations/logstash-puppetconf-2012/). 28 | 29 | ## Getting Help 30 | 31 | There's [documentation](.) here on this site. If that isn't sufficient, you can 32 | email the mailing list (logstash-users@googlegroups.com). Further, there is also 33 | an IRC channel - #logstash on irc.freenode.org. 34 | 35 | If you find a bug or have a feature request, file them 36 | on . (Honestly though, if you prefer email or irc 37 | for such things, that works for me, too.) 38 | 39 | ## Download It 40 | 41 | [Download logstash-%VERSION%](https://download.elasticsearch.org/logstash/logstash/logstash-%VERSION%-flatjar.jar) 42 | 43 | ## What's next? 44 | 45 | Try the [standalone logstash guide](tutorials/getting-started-simple) for a simple 46 | real-world example getting started using logstash. 47 | -------------------------------------------------------------------------------- /spec/support/akamai-grok.rb: -------------------------------------------------------------------------------- 1 | require "test_utils" 2 | 3 | describe "..." do 4 | extend LogStash::RSpec 5 | 6 | config <<-'CONFIG' 7 | filter { 8 | grok { 9 | pattern => "%{COMBINEDAPACHELOG}" 10 | } 11 | 12 | 13 | date { 14 | # Try to pull the timestamp from the 'timestamp' field 15 | match => [ "timestamp", "dd'/'MMM'/'yyyy:HH:mm:ss Z" ] 16 | } 17 | } 18 | CONFIG 19 | 20 | line = '192.168.1.1 - - [25/Mar/2013:20:33:56 +0000] "GET /www.somewebsite.co.uk/dwr/interface/AjaxNewsletter.js HTTP/1.1" 200 794 "http://www.somewebsite.co.uk/money/index.html" "Mozilla/5.0 (Linux; U; Android 2.3.6; en-gb; GT-I8160 Build/GINGERBREAD) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1" "NREUM=s=1364243891214&r=589267&p=101913; __utma=259942479.284548354.1358973919.1363109625.1364243485.15; __utmb=259942479.4.10.1364243485; __utmc=259942479; __utmz=259942479.1359409342.3.3.utmcsr=investing.somewebsite.co.uk|utmccn=(referral)|utmcmd=referral|utmcct=/performance/overview/; asi_segs=D05509_10903|D05509_11337|D05509_11335|D05509_11341|D05509_11125|D05509_11301|D05509_11355|D05509_11508|D05509_11624|D05509_10784|D05509_11003|D05509_10699|D05509_11024|D05509_11096|D05509_11466|D05509_11514|D05509_11598|D05509_11599|D05509_11628|D05509_11681; rsi_segs=D05509_10903|D05509_11337|D05509_11335|D05509_11341|D05509_11125|D05509_11301|D05509_11355|D05509_11508|D05509_11624|D05509_10784|D05509_11003|D05509_10699|D05509_11024|D05509_11096|D05509_11466|D05509_11514|D05509_11598|D05509_11599|D05509_11628|D05509_11681|D05509_11701|D05509_11818|D05509_11850|D05509_11892|D05509_11893|D05509_12074|D05509_12091|D05509_12093|D05509_12095|D05509_12136|D05509_12137|D05509_12156|D05509_0; s_pers=%20s_nr%3D1361998955946%7C1364590955946%3B%20s_pn2%3D/money/home%7C1364245284228%3B%20s_c39%3D/money/home%7C1364245522830%3B%20s_visit%3D1%7C1364245693534%3B; s_sess=%20s_pn%3D/money/home%3B%20s_cc%3Dtrue%3B%20s_sq%3D%3B"' 21 | 22 | sample line do 23 | #puts subject["@timestamp"] 24 | #puts subject["timestamp"] 25 | end 26 | end 27 | -------------------------------------------------------------------------------- /misc/pl.rb: -------------------------------------------------------------------------------- 1 | # pipeline tests 2 | 3 | $: << "lib" 4 | require "logstash/config/file" 5 | config = LogStash::Config::File.new(nil, ARGV[0]) 6 | agent = LogStash::Agent.new 7 | inputs, filters, outputs = agent.instance_eval { parse_config(config) } 8 | 9 | inputs.collect(&:register) 10 | filters.collect(&:register) 11 | outputs.collect(&:register) 12 | 13 | i2f = SizedQueue.new(16) 14 | f2o = SizedQueue.new(16) 15 | i2f = f2o if filters.empty? 16 | 17 | input_threads = inputs.collect do |i| 18 | t = Thread.new do 19 | begin 20 | i.run(i2f) 21 | rescue => e 22 | puts :input => i.class, :exception => e 23 | end 24 | end 25 | t[:name] = i.class 26 | t 27 | end 28 | 29 | #input_supervisor_thread = Thread.new do 30 | #while true 31 | #input_threads.collect(&:join) 32 | #i2f << :shutdown 33 | #end 34 | #end 35 | 36 | filter_thread = Thread.new(filters) do |filters| 37 | if filters.any? 38 | event = i2f.pop 39 | filters.each do |filter| 40 | filter.filter(event) 41 | end 42 | f2o << event 43 | end 44 | end 45 | filter_thread[:name] = "filterworker" 46 | 47 | output_thread = Thread.new do 48 | begin 49 | while true 50 | event = f2o.pop 51 | outputs.each do |output| 52 | output.receive(event) 53 | end 54 | end 55 | rescue => e 56 | puts :output_thread => e 57 | end 58 | end 59 | output_thread[:name] = "outputworker" 60 | 61 | def twait(thread) 62 | begin 63 | puts :waiting => thread[:name] 64 | thread.join 65 | puts :donewaiting => thread[:name] 66 | rescue => e 67 | puts thread => e 68 | end 69 | end 70 | 71 | def shutdown(input, filter, output) 72 | input.each do |i| 73 | i.raise("SHUTDOWN") 74 | twait(i) 75 | end 76 | 77 | #filter.raise("SHUTDOWN") 78 | #twait(filter) 79 | output.raise("SHUTDOWN") 80 | twait(output) 81 | end 82 | 83 | trap("INT") do 84 | puts "SIGINT"; shutdown(input_threads, filter_thread, output_thread) 85 | exit 1 86 | end 87 | 88 | #[*input_threads, filter_thread, output_thread].collect(&:join) 89 | sleep 30 90 | 91 | 92 | -------------------------------------------------------------------------------- /spec/filters/translate.rb: -------------------------------------------------------------------------------- 1 | require "test_utils" 2 | require "logstash/filters/translate" 3 | 4 | describe LogStash::Filters::Translate do 5 | extend LogStash::RSpec 6 | 7 | describe "exact translation" do 8 | config <<-CONFIG 9 | filter { 10 | translate { 11 | field => "status" 12 | destination => "translation" 13 | dictionary => [ "200", "OK", 14 | "300", "Redirect", 15 | "400", "Client Error", 16 | "500", "Server Error" ] 17 | exact => true 18 | regex => false 19 | } 20 | } 21 | CONFIG 22 | 23 | sample("status" => 200) do 24 | insist { subject["translation"] } == "OK" 25 | end 26 | end 27 | 28 | describe "multi translation" do 29 | config <<-CONFIG 30 | filter { 31 | translate { 32 | field => "status" 33 | destination => "translation" 34 | dictionary => [ "200", "OK", 35 | "300", "Redirect", 36 | "400", "Client Error", 37 | "500", "Server Error" ] 38 | exact => false 39 | regex => false 40 | } 41 | } 42 | CONFIG 43 | 44 | sample("status" => "200 & 500") do 45 | insist { subject["translation"] } == "OK & Server Error" 46 | end 47 | end 48 | 49 | describe "regex translation" do 50 | config <<-CONFIG 51 | filter { 52 | translate { 53 | field => "status" 54 | destination => "translation" 55 | dictionary => [ "^2[0-9][0-9]$", "OK", 56 | "^3[0-9][0-9]$", "Redirect", 57 | "^4[0-9][0-9]$", "Client Error", 58 | "^5[0-9][0-9]$", "Server Error" ] 59 | exact => true 60 | regex => true 61 | } 62 | } 63 | CONFIG 64 | 65 | sample("status" => "200") do 66 | insist { subject["translation"] } == "OK" 67 | end 68 | end 69 | 70 | end 71 | -------------------------------------------------------------------------------- /lib/logstash/outputs/stomp.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/outputs/base" 3 | require "logstash/namespace" 4 | 5 | class LogStash::Outputs::Stomp < LogStash::Outputs::Base 6 | config_name "stomp" 7 | milestone 2 8 | 9 | # The address of the STOMP server. 10 | config :host, :validate => :string, :required => true 11 | 12 | # The port to connect to on your STOMP server. 13 | config :port, :validate => :number, :default => 61613 14 | 15 | # The username to authenticate with. 16 | config :user, :validate => :string, :default => "" 17 | 18 | # The password to authenticate with. 19 | config :password, :validate => :password, :default => "" 20 | 21 | # The destination to read events from. Supports string expansion, meaning 22 | # %{foo} values will expand to the field value. 23 | # 24 | # Example: "/topic/logstash" 25 | config :destination, :validate => :string, :required => true 26 | 27 | # The vhost to use 28 | config :vhost, :validate => :string, :default => nil 29 | 30 | # Enable debugging output? 31 | config :debug, :validate => :boolean, :default => false 32 | 33 | private 34 | def connect 35 | begin 36 | @client.connect 37 | @logger.debug("Connected to stomp server") if @client.connected? 38 | rescue => e 39 | @logger.debug("Failed to connect to stomp server, will retry", 40 | :exception => e, :backtrace => e.backtrace) 41 | sleep 2 42 | retry 43 | end 44 | end 45 | 46 | 47 | public 48 | def register 49 | require "onstomp" 50 | @client = OnStomp::Client.new("stomp://#{@host}:#{@port}", :login => @user, :passcode => @password.value) 51 | @client.host = @vhost if @vhost 52 | 53 | # Handle disconnects 54 | @client.on_connection_closed { 55 | connect 56 | } 57 | 58 | connect 59 | end # def register 60 | 61 | def receive(event) 62 | return unless output?(event) 63 | 64 | @logger.debug(["stomp sending event", { :host => @host, :event => event }]) 65 | @client.send(event.sprintf(@destination), event.to_json) 66 | end # def receive 67 | end # class LogStash::Outputs::Stomp 68 | -------------------------------------------------------------------------------- /spec/filters/json.rb: -------------------------------------------------------------------------------- 1 | require "test_utils" 2 | require "logstash/filters/json" 3 | 4 | describe LogStash::Filters::Json do 5 | extend LogStash::RSpec 6 | 7 | describe "parse message into the event" do 8 | config <<-CONFIG 9 | filter { 10 | json { 11 | # Parse message as JSON, store the results in the 'data' field' 12 | source => "message" 13 | } 14 | } 15 | CONFIG 16 | 17 | sample '{ "hello": "world", "list": [ 1, 2, 3 ], "hash": { "k": "v" } }' do 18 | insist { subject["hello"] } == "world" 19 | insist { subject["list" ] } == [1,2,3] 20 | insist { subject["hash"] } == { "k" => "v" } 21 | end 22 | end 23 | 24 | describe "parse message into a target field" do 25 | config <<-CONFIG 26 | filter { 27 | json { 28 | # Parse message as JSON, store the results in the 'data' field' 29 | source => "message" 30 | target => "data" 31 | } 32 | } 33 | CONFIG 34 | 35 | sample '{ "hello": "world", "list": [ 1, 2, 3 ], "hash": { "k": "v" } }' do 36 | insist { subject["data"]["hello"] } == "world" 37 | insist { subject["data"]["list" ] } == [1,2,3] 38 | insist { subject["data"]["hash"] } == { "k" => "v" } 39 | end 40 | end 41 | 42 | describe "tag invalid json" do 43 | config <<-CONFIG 44 | filter { 45 | json { 46 | # Parse message as JSON, store the results in the 'data' field' 47 | source => "message" 48 | target => "data" 49 | } 50 | } 51 | CONFIG 52 | 53 | sample "invalid json" do 54 | insist { subject["tags"] }.include?("_jsonparsefailure") 55 | end 56 | end 57 | 58 | describe "fixing @timestamp (#pull 733)" do 59 | config <<-CONFIG 60 | filter { 61 | json { 62 | source => "message" 63 | } 64 | } 65 | CONFIG 66 | 67 | sample "{ \"@timestamp\": \"2013-10-19T00:14:32.996Z\" }" do 68 | insist { subject["@timestamp"] }.is_a?(Time) 69 | insist { subject["@timestamp"].to_json } == "\"2013-10-19T00:14:32.996Z\"" 70 | end 71 | end 72 | end 73 | -------------------------------------------------------------------------------- /lib/logstash/inputs/wmi.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/inputs/base" 3 | require "logstash/namespace" 4 | require "socket" 5 | 6 | # Collect data from WMI query 7 | # 8 | # This is useful for collecting performance metrics and other data 9 | # which is accessible via WMI on a Windows host 10 | # 11 | # Example: 12 | # 13 | # input { 14 | # wmi { 15 | # query => "select * from Win32_Process" 16 | # interval => 10 17 | # } 18 | # wmi { 19 | # query => "select PercentProcessorTime from Win32_PerfFormattedData_PerfOS_Processor where name = '_Total'" 20 | # } 21 | # } 22 | class LogStash::Inputs::WMI < LogStash::Inputs::Base 23 | 24 | config_name "wmi" 25 | milestone 1 26 | 27 | # WMI query 28 | config :query, :validate => :string, :required => true 29 | # Polling interval 30 | config :interval, :validate => :number, :default => 10 31 | 32 | public 33 | def register 34 | 35 | @host = Socket.gethostname 36 | @logger.info("Registering wmi input", :query => @query) 37 | 38 | if RUBY_PLATFORM == "java" 39 | # make use of the same fix used for the eventlog input 40 | require "logstash/inputs/eventlog/racob_fix" 41 | require "jruby-win32ole" 42 | else 43 | require "win32ole" 44 | end 45 | end # def register 46 | 47 | public 48 | def run(queue) 49 | @wmi = WIN32OLE.connect("winmgmts://") 50 | 51 | begin 52 | @logger.debug("Executing WMI query '#{@query}'") 53 | loop do 54 | @wmi.ExecQuery(@query).each do |wmiobj| 55 | # create a single event for all properties in the collection 56 | event = LogStash::Event.new 57 | event["host"] = @host 58 | decorate(event) 59 | wmiobj.Properties_.each do |prop| 60 | event[prop.name] = prop.value 61 | end 62 | queue << event 63 | end 64 | sleep @interval 65 | end # loop 66 | rescue Exception => ex 67 | @logger.error("WMI query error: #{ex}\n#{ex.backtrace}") 68 | sleep @interval 69 | retry 70 | end # begin/rescue 71 | end # def run 72 | end # class LogStash::Inputs::WMI 73 | -------------------------------------------------------------------------------- /lib/logstash/inputs/udp.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "date" 3 | require "logstash/inputs/base" 4 | require "logstash/namespace" 5 | require "socket" 6 | 7 | # Read messages as events over the network via udp. 8 | # 9 | class LogStash::Inputs::Udp < LogStash::Inputs::Base 10 | config_name "udp" 11 | milestone 2 12 | 13 | default :codec, "plain" 14 | 15 | # The address to listen on 16 | config :host, :validate => :string, :default => "0.0.0.0" 17 | 18 | # The port to listen on. Remember that ports less than 1024 (privileged 19 | # ports) may require root or elevated privileges to use. 20 | config :port, :validate => :number, :required => true 21 | 22 | # Buffer size 23 | config :buffer_size, :validate => :number, :default => 8192 24 | 25 | public 26 | def initialize(params) 27 | super 28 | BasicSocket.do_not_reverse_lookup = true 29 | end # def initialize 30 | 31 | public 32 | def register 33 | @udp = nil 34 | end # def register 35 | 36 | public 37 | def run(output_queue) 38 | begin 39 | # udp server 40 | udp_listener(output_queue) 41 | rescue LogStash::ShutdownSignal 42 | # do nothing, shutdown was requested. 43 | rescue => e 44 | @logger.warn("UDP listener died", :exception => e, :backtrace => e.backtrace) 45 | sleep(5) 46 | retry 47 | end # begin 48 | end # def run 49 | 50 | private 51 | def udp_listener(output_queue) 52 | @logger.info("Starting UDP listener", :address => "#{@host}:#{@port}") 53 | 54 | if @udp && ! @udp.closed? 55 | @udp.close 56 | end 57 | 58 | @udp = UDPSocket.new(Socket::AF_INET) 59 | @udp.bind(@host, @port) 60 | 61 | loop do 62 | payload, client = @udp.recvfrom(@buffer_size) 63 | @codec.decode(payload) do |event| 64 | decorate(event) 65 | event["host"] = client[3] 66 | output_queue << event 67 | end 68 | end 69 | ensure 70 | if @udp 71 | @udp.close_read rescue nil 72 | @udp.close_write rescue nil 73 | end 74 | end # def udp_listener 75 | 76 | public 77 | def teardown 78 | @udp.close if @udp && !@udp.closed? 79 | end 80 | 81 | end # class LogStash::Inputs::Udp 82 | -------------------------------------------------------------------------------- /docs/logging-tool-comparisons.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Logging tools comparisons - logstash 3 | layout: content_right 4 | --- 5 | # Logging tools comparison 6 | 7 | The information below is provided as "best effort" and is not strictly intended 8 | as a complete source of truth. If the information below is unclear or incorrect, please 9 | email the logstash-users list (or send a pull request with the fix) :) 10 | 11 | Where feasible, this document will also provide information on how you can use 12 | logstash with these other projects. 13 | 14 | # logstash 15 | 16 | Primary goal: Make log/event data and analytics accessible. 17 | 18 | Overview: Where your logs come from, how you store them, or what you do with 19 | them is up to you. Logstash exists to help make such actions easier and faster. 20 | 21 | It provides you a simple event pipeline for taking events and logs from any 22 | input, manipulating them with filters, and sending them to any output. Inputs 23 | can be files, network, message brokers, etc. Filters are date and string 24 | parsers, grep-like, etc. Outputs are data stores (elasticsearch, mongodb, etc), 25 | message systems (rabbitmq, stomp, etc), network (tcp, syslog), etc. 26 | 27 | It also provides a web interface for doing search and analytics on your 28 | logs. 29 | 30 | # graylog2 31 | 32 | [http://graylog2.org/](http://graylog2.org) 33 | 34 | _Overview to be written_ 35 | 36 | You can use graylog2 with logstash by using the 'gelf' output to send logstash 37 | events to a graylog2 server. This gives you logstash's excellent input and 38 | filter features while still being able to use the graylog2 web interface. 39 | 40 | # whoops 41 | 42 | [whoops site](http://www.whoopsapp.com/) 43 | 44 | _Overview to be written_ 45 | 46 | A logstash output to whoops is coming soon - 47 | 48 | # flume 49 | 50 | [flume site](https://github.com/cloudera/flume/wiki) 51 | 52 | Flume is primarily a transport system aimed at reliably copying logs from 53 | application servers to HDFS. 54 | 55 | You can use it with logstash by having a syslog sink configured to shoot logs 56 | at a logstash syslog input. 57 | 58 | # scribe 59 | 60 | _Overview to be written_ 61 | -------------------------------------------------------------------------------- /lib/logstash/filters/extractnumbers.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require 'logstash/namespace' 3 | require 'logstash/filters/base' 4 | 5 | 6 | # This filter automatically extracts all numbers found inside a string 7 | # 8 | # This is useful when you have lines that don't match a grok pattern 9 | # or use json but you still need to extract numbers. 10 | # 11 | # Each numbers is returned in a @fields.intX or @fields.floatX field 12 | # where X indicates the position in the string. 13 | # 14 | # The fields produced by this filter are extra useful used in combination 15 | # with kibana number plotting features. 16 | class LogStash::Filters::ExtractNumbers < LogStash::Filters::Base 17 | config_name 'extractnumbers' 18 | milestone 1 19 | 20 | # The source field for the data. By default is message. 21 | config :source, :validate => :string, :default => 'message' 22 | 23 | public 24 | def register 25 | end 26 | 27 | public 28 | def filter(event) 29 | integers = nil 30 | floats = nil 31 | 32 | msg = event[@source] 33 | 34 | if not msg 35 | return 36 | end 37 | 38 | # If for some reason the field is an array of values, take the first only. 39 | msg = msg.first if msg.is_a?(Array) 40 | 41 | 42 | fields = msg.split 43 | for elem in fields 44 | int = str_as_integer(elem) 45 | if int != nil 46 | if not integers 47 | integers = Array.new 48 | end 49 | integers.push(int) 50 | next 51 | end 52 | f = str_as_float(elem) 53 | if f != nil 54 | if not floats 55 | floats = Array.new 56 | end 57 | floats.push(f) 58 | end 59 | end 60 | 61 | if integers 62 | index = 0 63 | for i in integers 64 | index += 1 65 | event["int" + index.to_s] = i 66 | end 67 | end 68 | if floats 69 | index = 0 70 | for f in floats 71 | index += 1 72 | event["float" + index.to_s] = f 73 | end 74 | end 75 | end 76 | 77 | def str_as_integer(str) 78 | Integer(str) rescue nil 79 | end 80 | 81 | def str_as_float(str) 82 | Float(str) rescue nil 83 | end 84 | end # class LogStash::Filters::ExtractNumbers 85 | --------------------------------------------------------------------------------