├── .github ├── ISSUE_TEMPLATE.md └── stale.yml ├── .gitignore ├── .rubocop.yml ├── .travis.yml ├── CHANGELOG.md ├── Gemfile ├── LICENSE.txt ├── README.md ├── Rakefile ├── THANKS.md ├── Vagrantfile ├── examples ├── apache-phoenix-hbase-sql.md ├── apache-phoenix-thin-hbase-sql.md ├── cockroachdb.md ├── mariadb.md ├── mysql.md ├── odps.md ├── oracle.md ├── postgres.md ├── sql-server.md └── sqlite.md ├── lib ├── logstash-output-jdbc_jars.rb └── logstash │ └── outputs │ └── jdbc.rb ├── log4j2.xml ├── logstash-output-jdbc.gemspec ├── scripts ├── minutes_to_retries.rb ├── travis-before_script.sh └── travis-variables.sh └── spec ├── jdbc_spec_helper.rb └── outputs ├── jdbc_derby_spec.rb ├── jdbc_mysql_spec.rb ├── jdbc_postgres_spec.rb ├── jdbc_spec.rb └── jdbc_sqlite_spec.rb /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 14 | 15 | 16 | 17 | ## Expected & Actual Behavior 18 | 19 | 20 | 21 | ## Your Environment 22 | 23 | * Version of plugin used: [ ] 24 | * Version of Logstash used: [ ] 25 | * Database engine & version you're connecting to: [ ] 26 | * Have you checked you've met the Logstash requirements for Java versions?: [ ] 27 | * Have you checked that the JDBC jar you are using is suitable for your version of Java?: [ ] 28 | * If you are also using logstash-input-jdbc with an older jar please first try logstash-output-jdbc with a different newer jar. 29 | * The logstash-input-jdbc and logstash-output-jdbc plugins work differently and just because it works with the input plugin does not mean the same version jar will necssarily work with the output. 30 | -------------------------------------------------------------------------------- /.github/stale.yml: -------------------------------------------------------------------------------- 1 | # Number of days of inactivity before an issue becomes stale 2 | daysUntilStale: 60 3 | # Number of days of inactivity before a stale issue is closed 4 | daysUntilClose: 7 5 | # Issues with these labels will never be considered stale 6 | exemptLabels: 7 | - pinned 8 | - security 9 | - no-stale-bot 10 | # Label to use when marking an issue as stale 11 | staleLabel: stale 12 | # Comment to post when marking an issue as stale. Set to `false` to disable 13 | markComment: > 14 | This issue has been automatically marked as stale because it has not had 15 | recent activity. It will be closed if no further activity occurs. Thank you 16 | for your contributions. 17 | # Comment to post when closing a stale issue. Set to `false` to disable 18 | closeComment: false 19 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.gem 2 | Gemfile.lock 3 | Gemfile.bak 4 | .bundle 5 | .vagrant 6 | .mvn 7 | vendor 8 | lib/**/*.jar 9 | 10 | .DS_Store 11 | *.swp 12 | *.log 13 | -------------------------------------------------------------------------------- /.rubocop.yml: -------------------------------------------------------------------------------- 1 | # I don't care for underscores in numbers. 2 | Style/NumericLiterals: 3 | Enabled: false 4 | 5 | Style/ClassAndModuleChildren: 6 | Enabled: false 7 | 8 | Metrics/AbcSize: 9 | Enabled: false 10 | 11 | Metrics/CyclomaticComplexity: 12 | Max: 9 13 | 14 | Metrics/PerceivedComplexity: 15 | Max: 10 16 | 17 | Metrics/LineLength: 18 | Enabled: false 19 | 20 | Metrics/MethodLength: 21 | Max: 50 22 | 23 | Style/FileName: 24 | Exclude: 25 | - 'lib/logstash-output-jdbc_jars.rb' 26 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: required 2 | language: ruby 3 | cache: bundler 4 | rvm: 5 | - jruby-1.7.25 6 | jdk: 7 | - oraclejdk8 8 | before_script: 9 | - bundle exec rake vendor 10 | - bundle exec rake install_jars 11 | - ./scripts/travis-before_script.sh 12 | - source ./scripts/travis-variables.sh 13 | script: bundle exec rspec 14 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Change Log 2 | All notable changes to this project will be documented in this file, from 0.2.0. 3 | 4 | ## [5.3.0] - 2017-11-08 5 | - Adds configuration options `enable_event_as_json_keyword` and `event_as_json_keyword` 6 | - Adds BigDecimal support 7 | - Adds additional logging for debugging purposes (with thanks to @mlkmhd's work) 8 | 9 | ## [5.2.1] - 2017-04-09 10 | - Adds Array and Hash to_json support for non-sprintf syntax 11 | 12 | ## [5.2.0] - 2017-04-01 13 | - Upgrades HikariCP to latest 14 | - Fixes HikariCP logging integration issues 15 | 16 | ## [5.1.0] - 2016-12-17 17 | - phoenix-thin fixes for issue #60 18 | 19 | ## [5.0.0] - 2016-11-03 20 | - logstash v5 support 21 | 22 | ## [0.3.1] - 2016-08-28 23 | - Adds connection_test configuration option, to prevent the connection test from occuring, allowing the error to be suppressed. 24 | Useful for cockroachdb deployments. https://github.com/theangryangel/logstash-output-jdbc/issues/53 25 | 26 | ## [0.3.0] - 2016-07-24 27 | - Brings tests from v5 branch, providing greater coverage 28 | - Removes bulk update support, due to inconsistent behaviour 29 | - Plugin now marked as threadsafe, meaning only 1 instance per-Logstash 30 | - Raises default max_pool_size to match the default number of workers (1 connection per worker) 31 | 32 | ## [0.2.10] - 2016-07-07 33 | - Support non-string entries in statement array 34 | - Adds backtrace to exception logging 35 | 36 | ## [0.2.9] - 2016-06-29 37 | - Fix NameError exception. 38 | - Moved log_jdbc_exception calls 39 | 40 | ## [0.2.7] - 2016-05-29 41 | - Backport retry exception logic from v5 branch 42 | - Backport improved timestamp compatibility from v5 branch 43 | 44 | ## [0.2.6] - 2016-05-02 45 | - Fix for exception infinite loop 46 | 47 | ## [0.2.5] - 2016-04-11 48 | ### Added 49 | - Basic tests running against DerbyDB 50 | - Fix for converting Logstash::Timestamp to iso8601 from @hordijk 51 | 52 | ## [0.2.4] - 2016-04-07 53 | - Documentation fixes from @hordijk 54 | 55 | ## [0.2.3] - 2016-02-16 56 | - Bug fixes 57 | 58 | ## [0.2.2] - 2015-12-30 59 | - Bug fixes 60 | 61 | ## [0.2.1] - 2015-12-22 62 | - Support for connection pooling support added through HikariCP 63 | - Support for unsafe statement handling (allowing dynamic queries) 64 | - Altered exception handling to now count sequential flushes with exceptions thrown 65 | -------------------------------------------------------------------------------- /Gemfile: -------------------------------------------------------------------------------- 1 | source 'https://rubygems.org' 2 | 3 | gemspec 4 | 5 | logstash_path = ENV["LOGSTASH_PATH"] || "../../logstash" 6 | use_logstash_source = ENV["LOGSTASH_SOURCE"] && ENV["LOGSTASH_SOURCE"].to_s == "1" 7 | 8 | if Dir.exist?(logstash_path) && use_logstash_source 9 | gem 'logstash-core', :path => "#{logstash_path}/logstash-core" 10 | gem 'logstash-core-plugin-api', :path => "#{logstash_path}/logstash-core-plugin-api" 11 | end 12 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2014 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # logstash-output-jdbc 2 | 3 | [![Build Status](https://travis-ci.org/theangryangel/logstash-output-jdbc.svg?branch=master)](https://travis-ci.org/theangryangel/logstash-output-jdbc) [![Flattr this git repo](http://api.flattr.com/button/flattr-badge-large.png)](https://flattr.com/submit/auto?user_id=the_angry_angel&url=https://github.com/the_angry_angel/logstash-output-jdbc&title=logstash-output-jdbc&language=&tags=github&category=software) 4 | 5 | This plugin is provided as an external plugin and is not part of the Logstash project. 6 | 7 | This plugin allows you to output to SQL databases, using JDBC adapters. 8 | See below for tested adapters, and example configurations. 9 | 10 | This has not yet been extensively tested with all JDBC drivers and may not yet work for you. 11 | 12 | If you do find this works for a JDBC driver without an example, let me know and provide a small example configuration if you can. 13 | 14 | This plugin does not bundle any JDBC jar files, and does expect them to be in a 15 | particular location. Please ensure you read the 4 installation lines below. 16 | 17 | ## Support & release schedule 18 | I no longer have time at work to maintain this plugin in step with Logstash's releases, and I am not completely immersed in the Logstash ecosystem. If something is broken for you I will do my best to help, but I cannot guarantee timeframes. 19 | 20 | Pull requests are always welcome. 21 | 22 | If you want to act as an additional maintainer, please reach out to discuss. 23 | 24 | ## Changelog 25 | See CHANGELOG.md 26 | 27 | ## Versions 28 | Released versions are available via rubygems, and typically tagged. 29 | 30 | For development: 31 | - See master branch for logstash v5 & v6 :warning: This is untested under Logstash 6.3 at this time, and there has been 1 unverified report of an issue. Please use at your own risk until I can find the time to evaluate and test 6.3. 32 | - See v2.x branch for logstash v2 33 | - See v1.5 branch for logstash v1.5 34 | - See v1.4 branch for logstash 1.4 35 | 36 | ## Installation 37 | - Run `bin/logstash-plugin install logstash-output-jdbc` in your logstash installation directory 38 | - Now either: 39 | - Use driver_jar_path in your configuraton to specify a path to your jar file 40 | - Or: 41 | - Create the directory vendor/jar/jdbc in your logstash installation (`mkdir -p vendor/jar/jdbc/`) 42 | - Add JDBC jar files to vendor/jar/jdbc in your logstash installation 43 | - And then configure (examples can be found in the examples directory) 44 | 45 | ## Configuration options 46 | 47 | | Option | Type | Description | Required? | Default | 48 | | ------ | ---- | ----------- | --------- | ------- | 49 | | driver_class | String | Specify a driver class if autoloading fails | No | | 50 | | driver_auto_commit | Boolean | If the driver does not support auto commit, you should set this to false | No | True | 51 | | driver_jar_path | String | File path to jar file containing your JDBC driver. This is optional, and all JDBC jars may be placed in $LOGSTASH_HOME/vendor/jar/jdbc instead. | No | | 52 | | connection_string | String | JDBC connection URL | Yes | | 53 | | connection_test | Boolean | Run a JDBC connection test. Some drivers do not function correctly, and you may need to disable the connection test to supress an error. Cockroach with the postgres JDBC driver is such an example. | No | Yes | 54 | | connection_test_query | String | Connection test and init query string, required for some JDBC drivers that don't support isValid(). Typically you'd set to this "SELECT 1" | No | | 55 | | username | String | JDBC username - this is optional as it may be included in the connection string, for many drivers | No | | 56 | | password | String | JDBC password - this is optional as it may be included in the connection string, for many drivers | No | | 57 | | statement | Array | An array of strings representing the SQL statement to run. Index 0 is the SQL statement that is prepared, all other array entries are passed in as parameters (in order). A parameter may either be a property of the event (i.e. "@timestamp", or "host") or a formatted string (i.e. "%{host} - %{message}" or "%{message}"). If a key is passed then it will be automatically converted as required for insertion into SQL. If it's a formatted string then it will be passed in verbatim. | Yes | | 58 | | unsafe_statement | Boolean | If yes, the statement is evaluated for event fields - this allows you to use dynamic table names, etc. **This is highly dangerous** and you should **not** use this unless you are 100% sure that the field(s) you are passing in are 100% safe. Failure to do so will result in possible SQL injections. Example statement: [ "insert into %{table_name_field} (column) values(?)", "fieldname" ] | No | False | 59 | | max_pool_size | Number | Maximum number of connections to open to the SQL server at any 1 time | No | 5 | 60 | | connection_timeout | Number | Number of milliseconds before a SQL connection is closed | No | 10000 | 61 | | flush_size | Number | Maximum number of entries to buffer before sending to SQL - if this is reached before idle_flush_time | No | 1000 | 62 | | max_flush_exceptions | Number | Number of sequential flushes which cause an exception, before the set of events are discarded. Set to a value less than 1 if you never want it to stop. This should be carefully configured with respect to retry_initial_interval and retry_max_interval, if your SQL server is not highly available | No | 10 | 63 | | retry_initial_interval | Number | Number of seconds before the initial retry in the event of a failure. On each failure it will be doubled until it reaches retry_max_interval | No | 2 | 64 | | retry_max_interval | Number | Maximum number of seconds between each retry | No | 128 | 65 | | retry_sql_states | Array of strings | An array of custom SQL state codes you wish to retry until `max_flush_exceptions`. Useful if you're using a JDBC driver which returns retry-able, but non-standard SQL state codes in it's exceptions. | No | [] | 66 | | event_as_json_keyword | String | The magic key word that the plugin looks for to convert the entire event into a JSON object. As Logstash does not support this out of the box with it's `sprintf` implementation, you can use whatever this field is set to in the statement parameters | No | @event | 67 | | enable_event_as_json_keyword | Boolean | Enables the magic keyword set in the configuration option `event_as_json_keyword`. Without this enabled the plugin will not convert the `event_as_json_keyword` into JSON encoding of the entire event. | No | False | 68 | 69 | ## Example configurations 70 | Example logstash configurations, can now be found in the examples directory. Where possible we try to link every configuration with a tested jar. 71 | 72 | If you have a working sample configuration, for a DB thats not listed, pull requests are welcome. 73 | 74 | ## Development and Running tests 75 | For development tests are recommended to run inside a virtual machine (Vagrantfile is included in the repo), as it requires 76 | access to various database engines and could completely destroy any data in a live system. 77 | 78 | If you have vagrant available (this is temporary whilst I'm hacking on v5 support. I'll make this more streamlined later): 79 | - `vagrant up` 80 | - `vagrant ssh` 81 | - `cd /vagrant` 82 | - `gem install bundler` 83 | - `cd /vagrant && bundle install && bundle exec rake vendor && bundle exec rake install_jars` 84 | - `./scripts/travis-before_script.sh && source ./scripts/travis-variables.sh` 85 | - `bundle exec rspec` 86 | 87 | ## Releasing 88 | - Update Changelog 89 | - Bump version in gemspec 90 | - Commit 91 | - Create tag `git tag v` 92 | - `bundle exec rake install_jars` 93 | - `bundle exec rake pre_release_checks` 94 | - `gem build logstash-output-jdbc.gemspec` 95 | - `gem push` 96 | -------------------------------------------------------------------------------- /Rakefile: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require 'logstash/devutils/rake' 3 | require 'jars/installer' 4 | require 'rubygems' 5 | 6 | desc 'Fetch any jars required for this plugin' 7 | task :install_jars do 8 | ENV['JARS_HOME'] = Dir.pwd + '/vendor/jar-dependencies/runtime-jars' 9 | ENV['JARS_VENDOR'] = 'false' 10 | Jars::Installer.new.vendor_jars!(false) 11 | end 12 | -------------------------------------------------------------------------------- /THANKS.md: -------------------------------------------------------------------------------- 1 | logstash-output-jdbc is a project originally created by Karl Southern 2 | (the_angry_angel), but there are a number of people that have contributed 3 | or implemented key features over time. We do our best to keep this list 4 | up-to-date, but you can also have a look at the nice contributor graphs 5 | produced by GitHub: https://github.com/theangryangel/logstash-output-jdbc/graphs/contributors 6 | 7 | * [hordijk](https://github.com/hordijk) 8 | * [dmitryakadiamond](https://github.com/dmitryakadiamond) 9 | * [MassimoSporchia](https://github.com/MassimoSporchia) 10 | * [ebuildy](https://github.com/ebuildy) 11 | * [kushtrimjunuzi](https://github.com/kushtrimjunuzi) 12 | * [josemazo](https://github.com/josemazo) 13 | * [aceoliver](https://github.com/aceoliver) 14 | * [roflmao](https://github.com/roflmao) 15 | * [onesuper](https://github.com/onesuper) 16 | * [phr0gz](https://github.com/phr0gz) 17 | * [jMonsinjon](https://github.com/jMonsinjon) 18 | * [mlkmhd](https://github.com/mlkmhd) 19 | -------------------------------------------------------------------------------- /Vagrantfile: -------------------------------------------------------------------------------- 1 | # -*- mode: ruby -*- 2 | # vi: set ft=ruby : 3 | 4 | JRUBY_VERSION = "jruby-1.7" 5 | 6 | Vagrant.configure(2) do |config| 7 | 8 | config.vm.define "debian" do |deb| 9 | deb.vm.box = 'debian/stretch64' 10 | deb.vm.synced_folder '.', '/vagrant', type: :virtualbox 11 | 12 | deb.vm.provision 'shell', inline: <<-EOP 13 | apt-get update 14 | apt-get install openjdk-8-jre ca-certificates-java git curl -y -q 15 | curl -sSL https://rvm.io/mpapis.asc | sudo gpg --import - 16 | curl -sSL https://get.rvm.io | bash -s stable --ruby=#{JRUBY_VERSION} 17 | usermod -a -G rvm vagrant 18 | EOP 19 | end 20 | 21 | config.vm.define "centos" do |centos| 22 | centos.vm.box = 'centos/7' 23 | centos.ssh.insert_key = false # https://github.com/mitchellh/vagrant/issues/7610 24 | centos.vm.synced_folder '.', '/vagrant', type: :virtualbox 25 | 26 | centos.vm.provision 'shell', inline: <<-EOP 27 | yum update 28 | yum install java-1.7.0-openjdk 29 | curl -sSL https://rvm.io/mpapis.asc | sudo gpg --import - 30 | curl -sSL https://get.rvm.io | bash -s stable --ruby=#{JRUBY_VERSION} 31 | usermod -a -G rvm vagrant 32 | EOP 33 | end 34 | 35 | end 36 | -------------------------------------------------------------------------------- /examples/apache-phoenix-hbase-sql.md: -------------------------------------------------------------------------------- 1 | # Example: Apache Phoenix (HBase SQL) 2 | * Tested with Ubuntu 14.04.03 / Logstash 2.1 / Apache Phoenix 4.6 3 | * HBase and Zookeeper must be both accessible from logstash machine 4 | * Please see apache-phoenix-thin-hbase-sql for phoenix-thin. The examples are different. 5 | ``` 6 | input 7 | { 8 | stdin { } 9 | } 10 | output { 11 | jdbc { 12 | connection_string => "jdbc:phoenix:ZOOKEEPER_HOSTNAME" 13 | statement => [ "UPSERT INTO EVENTS log (host, timestamp, message) VALUES(?, ?, ?)", "host", "@timestamp", "message" ] 14 | } 15 | 16 | } 17 | ``` 18 | -------------------------------------------------------------------------------- /examples/apache-phoenix-thin-hbase-sql.md: -------------------------------------------------------------------------------- 1 | # Example: Apache Phoenix-Thin (HBase SQL) 2 | 3 | **There are special instructions for phoenix-thin. Please read carefully!** 4 | 5 | * Tested with Logstash 5.1.1 / Apache Phoenix 4.9 6 | * HBase and Zookeeper must be both accessible from logstash machine 7 | * At time of writing phoenix-client does not include all the required jars (see https://issues.apache.org/jira/browse/PHOENIX-3476), therefore you must *not* use the driver_jar_path configuration option and instead: 8 | - `mkdir -p vendor/jar/jdbc` in your logstash installation path 9 | - copy `phoenix-queryserver-client-4.9.0-HBase-1.2.jar` from the phoenix distribution into this folder 10 | - download the calcite jar from https://mvnrepository.com/artifact/org.apache.calcite/calcite-avatica/1.6.0 and place it into your `vendor/jar/jdbc` directory 11 | * Use the following configuration as a base. The connection_test => false and connection_test_query are very important and should not be omitted. Phoenix-thin does not appear to support isValid and these are necessary for the connection to be added to the pool and be available. 12 | 13 | ``` 14 | input 15 | { 16 | stdin { } 17 | } 18 | output { 19 | jdbc { 20 | connection_test => false 21 | connection_test_query => "select 1" 22 | driver_class => "org.apache.phoenix.queryserver.client.Driver" 23 | connection_string => "jdbc:phoenix:thin:url=http://localhost:8765;serialization=PROTOBUF" 24 | statement => [ "UPSERT INTO log (host, timestamp, message) VALUES(?, ?, ?)", "host", "@timestamp", "message" ] 25 | } 26 | 27 | } 28 | ``` 29 | -------------------------------------------------------------------------------- /examples/cockroachdb.md: -------------------------------------------------------------------------------- 1 | # Example: CockroachDB 2 | - Tested using postgresql-9.4.1209.jre6.jar 3 | - **Warning** cockroach is known to throw a warning on connection test (at time of writing), thus the connection test is explicitly disabled. 4 | 5 | ``` 6 | input 7 | { 8 | stdin { } 9 | } 10 | output { 11 | jdbc { 12 | driver_jar_path => '/opt/postgresql-9.4.1209.jre6.jar' 13 | connection_test => false 14 | connection_string => 'jdbc:postgresql://127.0.0.1:26257/test?user=root' 15 | statement => [ "INSERT INTO log (host, timestamp, message) VALUES(?, CAST (? AS timestamp), ?)", "host", "@timestamp", "message" ] 16 | } 17 | } 18 | ``` 19 | -------------------------------------------------------------------------------- /examples/mariadb.md: -------------------------------------------------------------------------------- 1 | # Example: MariaDB 2 | * Tested with Ubuntu 14.04.3 LTS, Server version: 10.1.9-MariaDB-1~trusty-log mariadb.org binary distribution 3 | * Tested using https://downloads.mariadb.com/enterprise/tqge-whfa/connectors/java/connector-java-1.3.2/mariadb-java-client-1.3.2.jar (mariadb-java-client-1.3.2.jar) 4 | ``` 5 | input 6 | { 7 | stdin { } 8 | } 9 | output { 10 | jdbc { 11 | connection_string => "jdbc:mariadb://HOSTNAME/DATABASE?user=USER&password=PASSWORD" 12 | statement => [ "INSERT INTO log (host, timestamp, message) VALUES(?, ?, ?)", "host", "@timestamp", "message" ] 13 | } 14 | 15 | } 16 | ``` 17 | -------------------------------------------------------------------------------- /examples/mysql.md: -------------------------------------------------------------------------------- 1 | # Example: Mysql 2 | With thanks to [@jMonsinjon](https://github.com/jMonsinjon) 3 | * Tested with Version 14.14 Distrib 5.5.43, for debian-linux-gnu (x86_64) 4 | * Tested using http://dev.mysql.com/downloads/file.php?id=457911 (mysql-connector-java-5.1.36-bin.jar) 5 | ``` 6 | input 7 | { 8 | stdin { } 9 | } 10 | output { 11 | jdbc { 12 | driver_class => "com.mysql.jdbc.Driver" 13 | connection_string => "jdbc:mysql://HOSTNAME/DATABASE?user=USER&password=PASSWORD" 14 | statement => [ "INSERT INTO log (host, timestamp, message) VALUES(?, CAST(? AS timestamp), ?)", "host", "@timestamp", "message" ] 15 | } 16 | } 17 | ``` 18 | -------------------------------------------------------------------------------- /examples/odps.md: -------------------------------------------------------------------------------- 1 | # Example: ODPS 2 | With thanks to [@onesuper](https://github.com/onesuper) 3 | ``` 4 | input 5 | { 6 | stdin { } 7 | } 8 | output { 9 | jdbc { 10 | driver_class => "com.aliyun.odps.jdbc.OdpsDriver" 11 | driver_auto_commit => false 12 | connection_string => "jdbc:odps:http://service.odps.aliyun.com/api?project=meta_dev&loglevel=DEBUG" 13 | username => "abcd" 14 | password => "1234" 15 | max_pool_size => 5 16 | flush_size => 10 17 | statement => [ "INSERT INTO test_logstash VALUES(?, ?, ?);", "host", "@timestamp", "message" ] 18 | } 19 | } 20 | ``` 21 | -------------------------------------------------------------------------------- /examples/oracle.md: -------------------------------------------------------------------------------- 1 | # Example: Oracle 2 | With thanks to [@josemazo](https://github.com/josemazo) 3 | * Tested with Express Edition 11g Release 2 4 | * Tested using http://www.oracle.com/technetwork/database/enterprise-edition/jdbc-112010-090769.html (ojdbc6.jar) 5 | ``` 6 | input 7 | { 8 | stdin { } 9 | } 10 | output { 11 | jdbc { 12 | connection_string => "jdbc:oracle:thin:USER/PASS@HOST:PORT:SID" 13 | statement => [ "INSERT INTO log (host, timestamp, message) VALUES(?, CAST (? AS timestamp), ?)", "host", "@timestamp", "message" ] 14 | } 15 | } 16 | ``` 17 | -------------------------------------------------------------------------------- /examples/postgres.md: -------------------------------------------------------------------------------- 1 | # Example 1: Postgres 2 | With thanks to [@roflmao](https://github.com/roflmao) 3 | ``` 4 | input 5 | { 6 | stdin { } 7 | } 8 | output { 9 | jdbc { 10 | connection_string => 'jdbc:postgresql://hostname:5432/database?user=username&password=password' 11 | statement => [ "INSERT INTO log (host, timestamp, message) VALUES(?, CAST (? AS timestamp), ?)", "host", "@timestamp", "message" ] 12 | } 13 | } 14 | ``` 15 | 16 | # Example 2: If the previous example doesn't work (i.e. connection errors) 17 | 18 | > Tested with https://jdbc.postgresql.org/download/postgresql-42.1.4.jre7.jar saved to /opt/logstash/vendor/jar/jdbc/ 19 | 20 | ``` 21 | input 22 | { 23 | stdin { } 24 | } 25 | output { 26 | jdbc { 27 | connection_string => 'jdbc:postgresql://hostname:5432/database' 28 | username => 'username' 29 | password => 'password' 30 | statement => [ "INSERT INTO log (host, timestamp, message) VALUES(?, CAST (? AS timestamp), ?)", "host", "@timestamp", "message" ] 31 | } 32 | } 33 | ``` 34 | -------------------------------------------------------------------------------- /examples/sql-server.md: -------------------------------------------------------------------------------- 1 | # Example: SQL Server 2 | * Tested using http://msdn.microsoft.com/en-gb/sqlserver/aa937724.aspx 3 | * Known to be working with Microsoft SQL Server Always-On Cluster (see https://github.com/theangryangel/logstash-output-jdbc/issues/37). With thanks to [@phr0gz](https://github.com/phr0gz) 4 | ``` 5 | input 6 | { 7 | stdin { } 8 | } 9 | output { 10 | jdbc { 11 | driver_jar_path => '/opt/sqljdbc42.jar' 12 | connection_string => "jdbc:sqlserver://server:1433;databaseName=databasename;user=username;password=password" 13 | statement => [ "INSERT INTO log (host, timestamp, message) VALUES(?, ?, ?)", "host", "@timestamp", "message" ] 14 | } 15 | 16 | } 17 | ``` 18 | 19 | Another example, with mixed static strings and parameters, with thanks to [@MassimoSporchia](https://github.com/MassimoSporchia) 20 | ``` 21 | input 22 | { 23 | stdin { } 24 | } 25 | output { 26 | jdbc { 27 | driver_jar_path => '/opt/sqljdbc42.jar' 28 | connection_string => "jdbc:sqlserver://server:1433;databaseName=databasename;user=username;password=password" 29 | statement => [ "INSERT INTO log (host, timestamp, message, comment) VALUES(?, ?, ?, 'static string')", "host", "@timestamp", "message" ] 30 | } 31 | } 32 | ``` 33 | 34 | Note: Windows users need to use windows paths (e.g. `C:\lib\mssql-jdbc-6.4.0.jre8.jar`). Paths with forward slashes will not work. 35 | -------------------------------------------------------------------------------- /examples/sqlite.md: -------------------------------------------------------------------------------- 1 | # Example: SQLite3 2 | * Tested using https://bitbucket.org/xerial/sqlite-jdbc 3 | * SQLite setup - `echo "CREATE table log (host text, timestamp datetime, message text);" | sqlite3 test.db` 4 | ``` 5 | input 6 | { 7 | stdin { } 8 | } 9 | output { 10 | stdout { } 11 | 12 | jdbc { 13 | driver_class => "org.sqlite.JDBC" 14 | connection_string => 'jdbc:sqlite:test.db' 15 | statement => [ "INSERT INTO log (host, timestamp, message) VALUES(?, ?, ?)", "host", "@timestamp", "message" ] 16 | } 17 | } 18 | ``` 19 | -------------------------------------------------------------------------------- /lib/logstash-output-jdbc_jars.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require 'logstash/environment' 3 | 4 | root_dir = File.expand_path(File.join(File.dirname(__FILE__), '..')) 5 | LogStash::Environment.load_runtime_jars! File.join(root_dir, 'vendor') 6 | -------------------------------------------------------------------------------- /lib/logstash/outputs/jdbc.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require 'logstash/outputs/base' 3 | require 'logstash/namespace' 4 | require 'concurrent' 5 | require 'stud/interval' 6 | require 'java' 7 | require 'logstash-output-jdbc_jars' 8 | require 'json' 9 | require 'bigdecimal' 10 | 11 | # Write events to a SQL engine, using JDBC. 12 | # 13 | # It is upto the user of the plugin to correctly configure the plugin. This 14 | # includes correctly crafting the SQL statement, and matching the number of 15 | # parameters correctly. 16 | class LogStash::Outputs::Jdbc < LogStash::Outputs::Base 17 | concurrency :shared 18 | 19 | STRFTIME_FMT = '%Y-%m-%d %T.%L'.freeze 20 | 21 | RETRYABLE_SQLSTATE_CLASSES = [ 22 | # Classes of retryable SQLSTATE codes 23 | # Not all in the class will be retryable. However, this is the best that 24 | # we've got right now. 25 | # If a custom state code is required, set it in retry_sql_states. 26 | '08', # Connection Exception 27 | '24', # Invalid Cursor State (Maybe retry-able in some circumstances) 28 | '25', # Invalid Transaction State 29 | '40', # Transaction Rollback 30 | '53', # Insufficient Resources 31 | '54', # Program Limit Exceeded (MAYBE) 32 | '55', # Object Not In Prerequisite State 33 | '57', # Operator Intervention 34 | '58', # System Error 35 | ].freeze 36 | 37 | config_name 'jdbc' 38 | 39 | # Driver class - Reintroduced for https://github.com/theangryangel/logstash-output-jdbc/issues/26 40 | config :driver_class, validate: :string 41 | 42 | # Does the JDBC driver support autocommit? 43 | config :driver_auto_commit, validate: :boolean, default: true, required: true 44 | 45 | # Where to find the jar 46 | # Defaults to not required, and to the original behaviour 47 | config :driver_jar_path, validate: :string, required: false 48 | 49 | # jdbc connection string 50 | config :connection_string, validate: :string, required: true 51 | 52 | # jdbc username - optional, maybe in the connection string 53 | config :username, validate: :string, required: false 54 | 55 | # jdbc password - optional, maybe in the connection string 56 | config :password, validate: :string, required: false 57 | 58 | # [ "insert into table (message) values(?)", "%{message}" ] 59 | config :statement, validate: :array, required: true 60 | 61 | # If this is an unsafe statement, use event.sprintf 62 | # This also has potential performance penalties due to having to create a 63 | # new statement for each event, rather than adding to the batch and issuing 64 | # multiple inserts in 1 go 65 | config :unsafe_statement, validate: :boolean, default: false 66 | 67 | # Number of connections in the pool to maintain 68 | config :max_pool_size, validate: :number, default: 5 69 | 70 | # Connection timeout 71 | config :connection_timeout, validate: :number, default: 10000 72 | 73 | # We buffer a certain number of events before flushing that out to SQL. 74 | # This setting controls how many events will be buffered before sending a 75 | # batch of events. 76 | config :flush_size, validate: :number, default: 1000 77 | 78 | # Set initial interval in seconds between retries. Doubled on each retry up to `retry_max_interval` 79 | config :retry_initial_interval, validate: :number, default: 2 80 | 81 | # Maximum time between retries, in seconds 82 | config :retry_max_interval, validate: :number, default: 128 83 | 84 | # Any additional custom, retryable SQL state codes. 85 | # Suitable for configuring retryable custom JDBC SQL state codes. 86 | config :retry_sql_states, validate: :array, default: [] 87 | 88 | # Run a connection test on start. 89 | config :connection_test, validate: :boolean, default: true 90 | 91 | # Connection test and init string, required for some JDBC endpoints 92 | # notable phoenix-thin - see logstash-output-jdbc issue #60 93 | config :connection_test_query, validate: :string, required: false 94 | 95 | # Maximum number of sequential failed attempts, before we stop retrying. 96 | # If set to < 1, then it will infinitely retry. 97 | # At the default values this is a little over 10 minutes 98 | config :max_flush_exceptions, validate: :number, default: 10 99 | 100 | config :max_repeat_exceptions, obsolete: 'This has been replaced by max_flush_exceptions - which behaves slightly differently. Please check the documentation.' 101 | config :max_repeat_exceptions_time, obsolete: 'This is no longer required' 102 | config :idle_flush_time, obsolete: 'No longer necessary under Logstash v5' 103 | 104 | # Allows the whole event to be converted to JSON 105 | config :enable_event_as_json_keyword, validate: :boolean, default: false 106 | 107 | # The magic key used to convert the whole event to JSON. If you need this, and you have the default in your events, you can use this to change your magic keyword. 108 | config :event_as_json_keyword, validate: :string, default: '@event' 109 | 110 | def register 111 | @logger.info('JDBC - Starting up') 112 | 113 | load_jar_files! 114 | 115 | @stopping = Concurrent::AtomicBoolean.new(false) 116 | 117 | @logger.warn('JDBC - Flush size is set to > 1000') if @flush_size > 1000 118 | 119 | if @statement.empty? 120 | @logger.error('JDBC - No statement provided. Configuration error.') 121 | end 122 | 123 | if !@unsafe_statement && @statement.length < 2 124 | @logger.error("JDBC - Statement has no parameters. No events will be inserted into SQL as you're not passing any event data. Likely configuration error.") 125 | end 126 | 127 | setup_and_test_pool! 128 | end 129 | 130 | def multi_receive(events) 131 | events.each_slice(@flush_size) do |slice| 132 | retrying_submit(slice) 133 | end 134 | end 135 | 136 | def close 137 | @stopping.make_true 138 | @pool.close 139 | super 140 | end 141 | 142 | private 143 | 144 | def setup_and_test_pool! 145 | # Setup pool 146 | @pool = Java::ComZaxxerHikari::HikariDataSource.new 147 | 148 | @pool.setAutoCommit(@driver_auto_commit) 149 | @pool.setDriverClassName(@driver_class) if @driver_class 150 | 151 | @pool.setJdbcUrl(@connection_string) 152 | 153 | @pool.setUsername(@username) if @username 154 | @pool.setPassword(@password) if @password 155 | 156 | @pool.setMaximumPoolSize(@max_pool_size) 157 | @pool.setConnectionTimeout(@connection_timeout) 158 | 159 | validate_connection_timeout = (@connection_timeout / 1000) / 2 160 | 161 | if !@connection_test_query.nil? and @connection_test_query.length > 1 162 | @pool.setConnectionTestQuery(@connection_test_query) 163 | @pool.setConnectionInitSql(@connection_test_query) 164 | end 165 | 166 | return unless @connection_test 167 | 168 | # Test connection 169 | test_connection = @pool.getConnection 170 | unless test_connection.isValid(validate_connection_timeout) 171 | @logger.warn('JDBC - Connection is not reporting as validate. Either connection is invalid, or driver is not getting the appropriate response.') 172 | end 173 | test_connection.close 174 | end 175 | 176 | def load_jar_files! 177 | # Load jar from driver path 178 | unless @driver_jar_path.nil? 179 | raise LogStash::ConfigurationError, 'JDBC - Could not find jar file at given path. Check config.' unless File.exist? @driver_jar_path 180 | require @driver_jar_path 181 | return 182 | end 183 | 184 | # Revert original behaviour of loading from vendor directory 185 | # if no path given 186 | jarpath = if ENV['LOGSTASH_HOME'] 187 | File.join(ENV['LOGSTASH_HOME'], '/vendor/jar/jdbc/*.jar') 188 | else 189 | File.join(File.dirname(__FILE__), '../../../vendor/jar/jdbc/*.jar') 190 | end 191 | 192 | @logger.trace('JDBC - jarpath', path: jarpath) 193 | 194 | jars = Dir[jarpath] 195 | raise LogStash::ConfigurationError, 'JDBC - No jars found. Have you read the README?' if jars.empty? 196 | 197 | jars.each do |jar| 198 | @logger.trace('JDBC - Loaded jar', jar: jar) 199 | require jar 200 | end 201 | end 202 | 203 | def submit(events) 204 | connection = nil 205 | statement = nil 206 | events_to_retry = [] 207 | 208 | begin 209 | connection = @pool.getConnection 210 | rescue => e 211 | log_jdbc_exception(e, true, nil) 212 | # If a connection is not available, then the server has gone away 213 | # We're not counting that towards our retry count. 214 | return events, false 215 | end 216 | 217 | events.each do |event| 218 | begin 219 | statement = connection.prepareStatement( 220 | (@unsafe_statement == true) ? event.sprintf(@statement[0]) : @statement[0] 221 | ) 222 | statement = add_statement_event_params(statement, event) if @statement.length > 1 223 | statement.execute 224 | rescue => e 225 | if retry_exception?(e, event.to_json()) 226 | events_to_retry.push(event) 227 | end 228 | ensure 229 | statement.close unless statement.nil? 230 | end 231 | end 232 | 233 | connection.close unless connection.nil? 234 | 235 | return events_to_retry, true 236 | end 237 | 238 | def retrying_submit(actions) 239 | # Initially we submit the full list of actions 240 | submit_actions = actions 241 | count_as_attempt = true 242 | 243 | attempts = 1 244 | 245 | sleep_interval = @retry_initial_interval 246 | while @stopping.false? and (submit_actions and !submit_actions.empty?) 247 | return if !submit_actions || submit_actions.empty? # If everything's a success we move along 248 | # We retry whatever didn't succeed 249 | submit_actions, count_as_attempt = submit(submit_actions) 250 | 251 | # Everything was a success! 252 | break if !submit_actions || submit_actions.empty? 253 | 254 | if @max_flush_exceptions > 0 and count_as_attempt == true 255 | attempts += 1 256 | 257 | if attempts > @max_flush_exceptions 258 | @logger.error("JDBC - max_flush_exceptions has been reached. #{submit_actions.length} events have been unable to be sent to SQL and are being dropped. See previously logged exceptions for details.") 259 | break 260 | end 261 | end 262 | 263 | # If we're retrying the action sleep for the recommended interval 264 | # Double the interval for the next time through to achieve exponential backoff 265 | Stud.stoppable_sleep(sleep_interval) { @stopping.true? } 266 | sleep_interval = next_sleep_interval(sleep_interval) 267 | end 268 | end 269 | 270 | def add_statement_event_params(statement, event) 271 | @statement[1..-1].each_with_index do |i, idx| 272 | if @enable_event_as_json_keyword == true and i.is_a? String and i == @event_as_json_keyword 273 | value = event.to_json 274 | elsif i.is_a? String 275 | value = event.get(i) 276 | if value.nil? and i =~ /%\{/ 277 | value = event.sprintf(i) 278 | end 279 | else 280 | value = i 281 | end 282 | 283 | case value 284 | when Time 285 | # See LogStash::Timestamp, below, for the why behind strftime. 286 | statement.setString(idx + 1, value.strftime(STRFTIME_FMT)) 287 | when LogStash::Timestamp 288 | # XXX: Using setString as opposed to setTimestamp, because setTimestamp 289 | # doesn't behave correctly in some drivers (Known: sqlite) 290 | # 291 | # Additionally this does not use `to_iso8601`, since some SQL databases 292 | # choke on the 'T' in the string (Known: Derby). 293 | # 294 | # strftime appears to be the most reliable across drivers. 295 | statement.setString(idx + 1, value.time.strftime(STRFTIME_FMT)) 296 | when Fixnum, Integer 297 | if value > 2147483647 or value < -2147483648 298 | statement.setLong(idx + 1, value) 299 | else 300 | statement.setInt(idx + 1, value) 301 | end 302 | when BigDecimal 303 | statement.setBigDecimal(idx + 1, value.to_java) 304 | when Float 305 | statement.setFloat(idx + 1, value) 306 | when String 307 | statement.setString(idx + 1, value) 308 | when Array, Hash 309 | statement.setString(idx + 1, value.to_json) 310 | when true, false 311 | statement.setBoolean(idx + 1, value) 312 | else 313 | statement.setString(idx + 1, nil) 314 | end 315 | end 316 | 317 | statement 318 | end 319 | 320 | def retry_exception?(exception, event) 321 | retrying = (exception.respond_to? 'getSQLState' and (RETRYABLE_SQLSTATE_CLASSES.include?(exception.getSQLState.to_s[0,2]) or @retry_sql_states.include?(exception.getSQLState))) 322 | log_jdbc_exception(exception, retrying, event) 323 | 324 | retrying 325 | end 326 | 327 | def log_jdbc_exception(exception, retrying, event) 328 | current_exception = exception 329 | log_text = 'JDBC - Exception. ' + (retrying ? 'Retrying' : 'Not retrying') 330 | 331 | log_method = (retrying ? 'warn' : 'error') 332 | 333 | loop do 334 | # TODO reformat event output so that it only shows the fields necessary. 335 | 336 | @logger.send(log_method, log_text, :exception => current_exception, :statement => @statement[0], :event => event) 337 | 338 | if current_exception.respond_to? 'getNextException' 339 | current_exception = current_exception.getNextException() 340 | else 341 | current_exception = nil 342 | end 343 | 344 | break if current_exception == nil 345 | end 346 | end 347 | 348 | def next_sleep_interval(current_interval) 349 | doubled = current_interval * 2 350 | doubled > @retry_max_interval ? @retry_max_interval : doubled 351 | end 352 | end # class LogStash::Outputs::jdbc 353 | -------------------------------------------------------------------------------- /log4j2.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | -------------------------------------------------------------------------------- /logstash-output-jdbc.gemspec: -------------------------------------------------------------------------------- 1 | Gem::Specification.new do |s| 2 | s.name = 'logstash-output-jdbc' 3 | s.version = '5.4.0' 4 | s.licenses = ['Apache License (2.0)'] 5 | s.summary = 'This plugin allows you to output to SQL, via JDBC' 6 | s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install 'logstash-output-jdbc'. This gem is not a stand-alone program" 7 | s.authors = ['the_angry_angel'] 8 | s.email = 'karl+github@theangryangel.co.uk' 9 | s.homepage = 'https://github.com/theangryangel/logstash-output-jdbc' 10 | s.require_paths = ['lib'] 11 | 12 | # Files 13 | s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile','LICENSE','NOTICE.TXT'] 14 | # Tests 15 | s.test_files = s.files.grep(%r{^(test|spec|features)/}) 16 | 17 | # Special flag to let us know this is actually a logstash plugin 18 | s.metadata = { 'logstash_plugin' => 'true', 'logstash_group' => 'output' } 19 | 20 | # Gem dependencies 21 | # 22 | s.add_runtime_dependency 'logstash-core-plugin-api', ">= 1.60", "<= 2.99" 23 | s.add_runtime_dependency 'logstash-codec-plain' 24 | s.add_development_dependency 'logstash-devutils' 25 | 26 | s.requirements << "jar 'com.zaxxer:HikariCP', '2.7.2'" 27 | s.requirements << "jar 'org.apache.logging.log4j:log4j-slf4j-impl', '2.6.2'" 28 | 29 | s.add_development_dependency 'jar-dependencies' 30 | s.add_development_dependency 'ruby-maven', '~> 3.3' 31 | s.add_development_dependency 'rubocop', '0.41.2' 32 | end 33 | -------------------------------------------------------------------------------- /scripts/minutes_to_retries.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby -w 2 | 3 | seconds_to_reach = 10 * 60 4 | retry_max_interval = 128 5 | 6 | current_interval = 2 7 | total_interval = 0 8 | exceptions_count = 1 9 | 10 | loop do 11 | break if total_interval > seconds_to_reach 12 | exceptions_count += 1 13 | 14 | current_interval = current_interval * 2 > retry_max_interval ? retry_max_interval : current_interval * 2 15 | 16 | total_interval += current_interval 17 | end 18 | 19 | puts exceptions_count 20 | -------------------------------------------------------------------------------- /scripts/travis-before_script.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | wget http://search.maven.org/remotecontent?filepath=org/apache/derby/derby/10.12.1.1/derby-10.12.1.1.jar -O /tmp/derby.jar 3 | 4 | sudo apt-get install mysql-server postgresql-client postgresql -qq -y 5 | echo "create database logstash; grant all privileges on logstash.* to 'logstash'@'localhost' identified by 'logstash'; flush privileges;" | sudo -u root mysql 6 | echo "create user logstash PASSWORD 'logstash'; create database logstash; grant all privileges on database logstash to logstash;" | sudo -u postgres psql 7 | 8 | wget http://search.maven.org/remotecontent?filepath=mysql/mysql-connector-java/5.1.38/mysql-connector-java-5.1.38.jar -O /tmp/mysql.jar 9 | wget http://search.maven.org/remotecontent?filepath=org/xerial/sqlite-jdbc/3.8.11.2/sqlite-jdbc-3.8.11.2.jar -O /tmp/sqlite.jar 10 | wget http://central.maven.org/maven2/org/postgresql/postgresql/42.1.4/postgresql-42.1.4.jar -O /tmp/postgres.jar 11 | -------------------------------------------------------------------------------- /scripts/travis-variables.sh: -------------------------------------------------------------------------------- 1 | export JDBC_DERBY_JAR=/tmp/derby.jar 2 | export JDBC_MYSQL_JAR=/tmp/mysql.jar 3 | export JDBC_SQLITE_JAR=/tmp/sqlite.jar 4 | export JDBC_POSTGRES_JAR=/tmp/postgres.jar 5 | 6 | -------------------------------------------------------------------------------- /spec/jdbc_spec_helper.rb: -------------------------------------------------------------------------------- 1 | require 'logstash/devutils/rspec/spec_helper' 2 | require 'logstash/outputs/jdbc' 3 | require 'stud/temporary' 4 | require 'java' 5 | require 'securerandom' 6 | 7 | RSpec::Support::ObjectFormatter.default_instance.max_formatted_output_length = 80000 8 | 9 | RSpec.configure do |c| 10 | 11 | def start_service(name) 12 | cmd = "sudo /etc/init.d/#{name}* start" 13 | 14 | `which systemctl` 15 | if $?.success? 16 | cmd = "sudo systemctl start #{name}" 17 | end 18 | 19 | `#{cmd}` 20 | end 21 | 22 | def stop_service(name) 23 | cmd = "sudo /etc/init.d/#{name}* stop" 24 | 25 | `which systemctl` 26 | if $?.success? 27 | cmd = "sudo systemctl stop #{name}" 28 | end 29 | 30 | `#{cmd}` 31 | end 32 | 33 | end 34 | 35 | RSpec.shared_context 'rspec setup' do 36 | it 'ensure jar is available' do 37 | expect(ENV[jdbc_jar_env]).not_to be_nil, "#{jdbc_jar_env} not defined, required to run tests" 38 | expect(File.exist?(ENV[jdbc_jar_env])).to eq(true), "#{jdbc_jar_env} defined, but not valid" 39 | end 40 | end 41 | 42 | RSpec.shared_context 'when initializing' do 43 | it 'shouldn\'t register with a missing jar file' do 44 | jdbc_settings['driver_jar_path'] = nil 45 | plugin = LogStash::Plugin.lookup('output', 'jdbc').new(jdbc_settings) 46 | expect { plugin.register }.to raise_error(LogStash::ConfigurationError) 47 | end 48 | end 49 | 50 | RSpec.shared_context 'when outputting messages' do 51 | let(:logger) { 52 | double("logger") 53 | } 54 | 55 | let(:jdbc_test_table) do 56 | 'logstash_output_jdbc_test' 57 | end 58 | 59 | let(:jdbc_drop_table) do 60 | "DROP TABLE #{jdbc_test_table}" 61 | end 62 | 63 | let(:jdbc_statement_fields) do 64 | [ 65 | {db_field: "created_at", db_type: "datetime", db_value: '?', event_field: '@timestamp'}, 66 | {db_field: "message", db_type: "varchar(512)", db_value: '?', event_field: 'message'}, 67 | {db_field: "message_sprintf", db_type: "varchar(512)", db_value: '?', event_field: 'sprintf-%{message}'}, 68 | {db_field: "static_int", db_type: "int", db_value: '?', event_field: 'int'}, 69 | {db_field: "static_bigint", db_type: "bigint", db_value: '?', event_field: 'bigint'}, 70 | {db_field: "static_float", db_type: "float", db_value: '?', event_field: 'float'}, 71 | {db_field: "static_bool", db_type: "boolean", db_value: '?', event_field: 'bool'}, 72 | {db_field: "static_bigdec", db_type: "decimal", db_value: '?', event_field: 'bigdec'} 73 | ] 74 | end 75 | 76 | let(:jdbc_create_table) do 77 | fields = jdbc_statement_fields.collect { |entry| "#{entry[:db_field]} #{entry[:db_type]} not null" }.join(", ") 78 | 79 | "CREATE table #{jdbc_test_table} (#{fields})" 80 | end 81 | 82 | let(:jdbc_drop_table) do 83 | "DROP table #{jdbc_test_table}" 84 | end 85 | 86 | let(:jdbc_statement) do 87 | fields = jdbc_statement_fields.collect { |entry| "#{entry[:db_field]}" }.join(", ") 88 | values = jdbc_statement_fields.collect { |entry| "#{entry[:db_value]}" }.join(", ") 89 | statement = jdbc_statement_fields.collect { |entry| entry[:event_field] } 90 | 91 | statement.insert(0, "insert into #{jdbc_test_table} (#{fields}) values(#{values})") 92 | end 93 | 94 | let(:systemd_database_service) do 95 | nil 96 | end 97 | 98 | let(:event) do 99 | # TODO: Auto generate fields from jdbc_statement_fields 100 | LogStash::Event.new({ 101 | message: "test-message #{SecureRandom.uuid}", 102 | float: 12.1, 103 | bigint: 4000881632477184, 104 | bool: true, 105 | int: 1, 106 | bigdec: BigDecimal.new("123.123") 107 | }) 108 | end 109 | 110 | let(:plugin) do 111 | # Setup logger 112 | allow(LogStash::Outputs::Jdbc).to receive(:logger).and_return(logger) 113 | 114 | # XXX: Suppress reflection logging. There has to be a better way around this. 115 | allow(logger).to receive(:debug).with(/config LogStash::/) 116 | 117 | # Suppress beta warnings. 118 | allow(logger).to receive(:info).with(/Please let us know if you find bugs or have suggestions on how to improve this plugin./) 119 | 120 | # Suppress start up messages. 121 | expect(logger).to receive(:info).once.with(/JDBC - Starting up/) 122 | 123 | # Setup plugin 124 | output = LogStash::Plugin.lookup('output', 'jdbc').new(jdbc_settings) 125 | output.register 126 | 127 | output 128 | end 129 | 130 | before :each do 131 | # Setup table 132 | c = plugin.instance_variable_get(:@pool).getConnection 133 | 134 | # Derby doesn't support IF EXISTS. 135 | # Seems like the quickest solution. Bleurgh. 136 | begin 137 | stmt = c.createStatement 138 | stmt.executeUpdate(jdbc_drop_table) 139 | rescue 140 | # noop 141 | ensure 142 | stmt.close 143 | 144 | stmt = c.createStatement 145 | stmt.executeUpdate(jdbc_create_table) 146 | stmt.close 147 | c.close 148 | end 149 | end 150 | 151 | # Delete table after each 152 | after :each do 153 | c = plugin.instance_variable_get(:@pool).getConnection 154 | 155 | stmt = c.createStatement 156 | stmt.executeUpdate(jdbc_drop_table) 157 | stmt.close 158 | c.close 159 | end 160 | 161 | it 'should save a event' do 162 | expect { plugin.multi_receive([event]) }.to_not raise_error 163 | 164 | # Verify the number of items in the output table 165 | c = plugin.instance_variable_get(:@pool).getConnection 166 | 167 | # TODO replace this simple count with a check of the actual contents 168 | 169 | stmt = c.prepareStatement("select count(*) as total from #{jdbc_test_table} where message = ?") 170 | stmt.setString(1, event.get('message')) 171 | rs = stmt.executeQuery 172 | count = 0 173 | count = rs.getInt('total') while rs.next 174 | stmt.close 175 | c.close 176 | 177 | expect(count).to eq(1) 178 | end 179 | 180 | it 'should not save event, and log an unretryable exception' do 181 | e = event 182 | original_event = e.get('message') 183 | e.set('message', nil) 184 | 185 | expect(logger).to receive(:error).once.with(/JDBC - Exception. Not retrying/, Hash) 186 | expect { plugin.multi_receive([event]) }.to_not raise_error 187 | 188 | e.set('message', original_event) 189 | end 190 | 191 | it 'it should retry after a connection loss, and log a warning' do 192 | skip "does not run as a service, or known issue with test" if systemd_database_service.nil? 193 | 194 | p = plugin 195 | 196 | # Check that everything is fine right now 197 | expect { p.multi_receive([event]) }.not_to raise_error 198 | 199 | stop_service(systemd_database_service) 200 | 201 | # Start a thread to restart the service after the fact. 202 | t = Thread.new(systemd_database_service) { |systemd_database_service| 203 | sleep 20 204 | 205 | start_service(systemd_database_service) 206 | } 207 | 208 | t.run 209 | 210 | expect(logger).to receive(:warn).at_least(:once).with(/JDBC - Exception. Retrying/, Hash) 211 | expect { p.multi_receive([event]) }.to_not raise_error 212 | 213 | # Wait for the thread to finish 214 | t.join 215 | end 216 | end 217 | -------------------------------------------------------------------------------- /spec/outputs/jdbc_derby_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative '../jdbc_spec_helper' 2 | 3 | describe 'logstash-output-jdbc: derby', if: ENV['JDBC_DERBY_JAR'] do 4 | include_context 'rspec setup' 5 | include_context 'when outputting messages' 6 | 7 | let(:jdbc_jar_env) do 8 | 'JDBC_DERBY_JAR' 9 | end 10 | 11 | let(:jdbc_statement_fields) do 12 | [ 13 | {db_field: "created_at", db_type: "timestamp", db_value: 'CAST(? as timestamp)', event_field: '@timestamp'}, 14 | {db_field: "message", db_type: "varchar(512)", db_value: '?', event_field: 'message'}, 15 | {db_field: "message_sprintf", db_type: "varchar(512)", db_value: '?', event_field: 'sprintf-%{message}'}, 16 | {db_field: "static_int", db_type: "int", db_value: '?', event_field: 'int'}, 17 | {db_field: "static_bigint", db_type: "bigint", db_value: '?', event_field: 'bigint'}, 18 | {db_field: "static_float", db_type: "float", db_value: '?', event_field: 'float'}, 19 | {db_field: "static_bool", db_type: "boolean", db_value: '?', event_field: 'bool'}, 20 | {db_field: "static_bigdec", db_type: "decimal", db_value: '?', event_field: 'bigdec'} 21 | ] 22 | end 23 | 24 | let(:jdbc_settings) do 25 | { 26 | 'driver_class' => 'org.apache.derby.jdbc.EmbeddedDriver', 27 | 'connection_string' => 'jdbc:derby:memory:testdb;create=true', 28 | 'driver_jar_path' => ENV[jdbc_jar_env], 29 | 'statement' => jdbc_statement, 30 | 'max_flush_exceptions' => 1 31 | } 32 | end 33 | end 34 | -------------------------------------------------------------------------------- /spec/outputs/jdbc_mysql_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative '../jdbc_spec_helper' 2 | 3 | describe 'logstash-output-jdbc: mysql', if: ENV['JDBC_MYSQL_JAR'] do 4 | include_context 'rspec setup' 5 | include_context 'when outputting messages' 6 | 7 | let(:jdbc_jar_env) do 8 | 'JDBC_MYSQL_JAR' 9 | end 10 | 11 | let(:systemd_database_service) do 12 | 'mysql' 13 | end 14 | 15 | let(:jdbc_settings) do 16 | { 17 | 'driver_class' => 'com.mysql.jdbc.Driver', 18 | 'connection_string' => 'jdbc:mysql://localhost/logstash?user=logstash&password=logstash', 19 | 'driver_jar_path' => ENV[jdbc_jar_env], 20 | 'statement' => jdbc_statement, 21 | 'max_flush_exceptions' => 1 22 | } 23 | end 24 | end 25 | -------------------------------------------------------------------------------- /spec/outputs/jdbc_postgres_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative '../jdbc_spec_helper' 2 | 3 | describe 'logstash-output-jdbc: postgres', if: ENV['JDBC_POSTGRES_JAR'] do 4 | include_context 'rspec setup' 5 | include_context 'when outputting messages' 6 | 7 | let(:jdbc_jar_env) do 8 | 'JDBC_POSTGRES_JAR' 9 | end 10 | 11 | # TODO: Postgres doesnt kill connections fast enough for the test to pass 12 | # Investigate options. 13 | 14 | #let(:systemd_database_service) do 15 | # 'postgresql' 16 | #end 17 | 18 | let(:jdbc_statement_fields) do 19 | [ 20 | {db_field: "created_at", db_type: "timestamp", db_value: 'CAST(? as timestamp)', event_field: '@timestamp'}, 21 | {db_field: "message", db_type: "varchar(512)", db_value: '?', event_field: 'message'}, 22 | {db_field: "message_sprintf", db_type: "varchar(512)", db_value: '?', event_field: 'sprintf-%{message}'}, 23 | {db_field: "static_int", db_type: "int", db_value: '?', event_field: 'int'}, 24 | {db_field: "static_bigint", db_type: "bigint", db_value: '?', event_field: 'bigint'}, 25 | {db_field: "static_float", db_type: "float", db_value: '?', event_field: 'float'}, 26 | {db_field: "static_bool", db_type: "boolean", db_value: '?', event_field: 'bool'}, 27 | {db_field: "static_bigdec", db_type: "decimal", db_value: '?', event_field: 'bigdec'} 28 | 29 | ] 30 | end 31 | 32 | let(:jdbc_settings) do 33 | { 34 | 'driver_class' => 'org.postgresql.Driver', 35 | 'connection_string' => 'jdbc:postgresql://localhost/logstash?user=logstash&password=logstash', 36 | 'driver_jar_path' => ENV[jdbc_jar_env], 37 | 'statement' => jdbc_statement, 38 | 'max_flush_exceptions' => 1 39 | } 40 | end 41 | end 42 | -------------------------------------------------------------------------------- /spec/outputs/jdbc_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative '../jdbc_spec_helper' 2 | 3 | describe LogStash::Outputs::Jdbc do 4 | context 'when initializing' do 5 | it 'shouldn\'t register without a config' do 6 | expect do 7 | LogStash::Plugin.lookup('output', 'jdbc').new 8 | end.to raise_error(LogStash::ConfigurationError) 9 | end 10 | end 11 | end 12 | -------------------------------------------------------------------------------- /spec/outputs/jdbc_sqlite_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative '../jdbc_spec_helper' 2 | 3 | describe 'logstash-output-jdbc: sqlite', if: ENV['JDBC_SQLITE_JAR'] do 4 | JDBC_SQLITE_FILE = '/tmp/logstash_output_jdbc_test.db'.freeze 5 | 6 | before(:context) do 7 | File.delete(JDBC_SQLITE_FILE) if File.exist? JDBC_SQLITE_FILE 8 | end 9 | 10 | include_context 'rspec setup' 11 | include_context 'when outputting messages' 12 | 13 | let(:jdbc_jar_env) do 14 | 'JDBC_SQLITE_JAR' 15 | end 16 | 17 | let(:jdbc_settings) do 18 | { 19 | 'driver_class' => 'org.sqlite.JDBC', 20 | 'connection_string' => "jdbc:sqlite:#{JDBC_SQLITE_FILE}", 21 | 'driver_jar_path' => ENV[jdbc_jar_env], 22 | 'statement' => jdbc_statement, 23 | 'max_flush_exceptions' => 1 24 | } 25 | end 26 | end 27 | --------------------------------------------------------------------------------