├── .gitignore ├── LICENSE.txt ├── README.md ├── Rakefile ├── lib └── logstash │ └── outputs │ └── hdfs.rb └── logstash-output-hdfs.gemspec /.gitignore: -------------------------------------------------------------------------------- 1 | *.gem 2 | *.rbc 3 | .bundle 4 | .config 5 | coverage 6 | InstalledFiles 7 | lib/bundler/man 8 | pkg 9 | rdoc 10 | spec/reports 11 | test/tmp 12 | test/version_tmp 13 | tmp 14 | 15 | # YARD artifacts 16 | .yardoc 17 | _yardoc 18 | doc/ 19 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Logstash HDFS plugin 2 | 3 | An HDFS plugin for [Logstash](http://logstash.net). This plugin is provided as an external plugin (see Usage below) and is not part of the Logstash project. 4 | 5 | # Usage 6 | 7 | ## Logstash 1.4.x 8 | 9 | Run logstash with the `--pluginpath` (`-p`) command line argument to let logstash know where the plugin is. Also, you need to let Java know where your Hadoop JARs are, so set the `CLASSPATH` variable correctly. 10 | On Logstash 1.4.x use the following command (ajusting paths as neccessary of course): 11 | 12 | LD_LIBRARY_PATH="/usr/lib/hadoop/lib/native" GEM_HOME=./logstash-1.4.2/vendor/bundle/jruby/1.9 CLASSPATH=$(find ./logstash-1.4.2/vendor/jar -type f -name '*.jar'|tr '\n' ':'):$(find /usr/lib/hadoop-hdfs -type f -name '*.jar' | tr '\n' ':'):$(find /usr/lib/hadoop -type f -name '*.jar' | tr '\n' ':'):/etc/hadoop/conf java org.jruby.Main -I./logstash-1.4.2/lib ./logstash-1.4.2/lib/logstash/runner.rb agent -f logstash.conf -p ./logstash-hdfs/lib 13 | 14 | Note that logstash is not executed with `java -jar` because executable jars ignore external classpath. Instead we put the logstash jar on the class path and call the runner class. 15 | Important: the Hadoop configuration dir containing `hdfs-site.xml` must be on the classpath. 16 | 17 | ## Logstash 1.5.x 18 | 19 | Logstash 1.5.x supports distribution of plugins as rubygems which makes life a lot easier. To install the plugin from the version on rubygems: 20 | 21 | $LOGSTASH_DIR/bin/plugin install logstash-output-hdfs 22 | 23 | Or from source (after checking out the source, run in checkout directory): 24 | 25 | $LOGSTASH_DIR/bin/plugin build logstash-output-hdfs.gemspec 26 | 27 | Then run logstash with the following command (change jar versions and locations as per your hadoop installation): 28 | 29 | LD_LIBRARY_PATH="$HADOOP_DIR/lib/native" CLASSPATH=$HADOOP_DIR/share/hadoop/common/lib/htrace-core-3.0.4.jar:$HADOOP_DIR/share/hadoop/common/lib/protobuf-java-2.5.0.jar:$HADOOP_DIR/share/hadoop/common/lib/commons-cli-1.2.jar:$HADOOP_DIR/share/hadoop/common/lib/slf4j-api-1.7.5.jar:$HADOOP_DIR/share/hadoop/common/lib/hadoop-auth-2.6.0.jar:$HADOOP_DIR/share/hadoop/common/lib/commons-lang-2.6.jar:$HADOOP_DIR/share/hadoop/common/lib/commons-configuration-1.6.jar:$HADOOP_DIR/share/hadoop/common/lib/commons-collections-3.2.1.jar:$HADOOP_DIR/share/hadoop/common/lib/guava-11.0.2.jar:$HADOOP_DIR/share/hadoop/common/lib/commons-logging-1.1.3.jar:$HADOOP_DIR/share/hadoop/hdfs/hadoop-hdfs-2.6.0.jar:$HADOOP_DIR/share/hadoop/common/hadoop-common-2.6.0.jar:$HADOOP_DIR/conf $LOGSTASH_DIR/bin/logstash agent -f logstash.conf 30 | 31 | Hadoop paths may need adjustments depending on the distribution and version you are using. The important thing is to have `hadoop-hdfs`, `hadoop-common` and the dependencies from `common/lib` on the classpath. 32 | 33 | The following command line will work on most distributions (but will take a little longer to load since it loads many unnecessary jars) - note there may be conflicts with other plugins (e.g. elasticsearch) if you load all the jars: 34 | 35 | LD_LIBRARY_PATH="/usr/lib/hadoop/lib/native" CLASSPATH=$(hadoop classpath) $LOGSTASH_DIR/bin/logstash agent -f logstash.conf 36 | 37 | 38 | # HDFS Configuration 39 | 40 | By default, the plugin will load Hadoop's configuration from the classpath. However, a logstash configuration option named 'hadoop_config_resources' has 41 | been added that will allow the user to pass in multiple configuration locations to override this default configuration. 42 | 43 | output { 44 | hdfs { 45 | path => "/path/to/output_file.log" 46 | hadoop_config_resources => ['path/to/configuration/on/classpath/hdfs-site.xml', 'path/to/configuration/on/classpath/core-site.xml'] 47 | } 48 | } 49 | 50 | 51 | # HDFS Append and rewriting files 52 | 53 | Please note, HDFS versions prior to 2.x do not properly support append. See [HADOOP-8230](https://issues.apache.org/jira/browse/HADOOP-8230) for reference. 54 | To enable append on HDFS, set _dfs.support.append_ in hdfs-site.conf (2.x) or _dfs.support.broken.append_ on 1.x, and use the *enable_append* config option: 55 | 56 | output { 57 | hdfs { 58 | path => "/path/to/output_file.log" 59 | enable_append => true 60 | } 61 | } 62 | 63 | If append is not supported and the file already exists, the plugin will cowardly refuse to reopen the file for writing unless *enable_reopen* is set to true. 64 | This is probably a very bad idea, you have been warned! 65 | 66 | # HFDS Flush 67 | 68 | Flush and sync don't actually work as promised on HDFS (see [HDFS-536](https://issues.apache.org/jira/browse/HDFS-536)). 69 | In Hadoop 2.x, `hflush` provides flush-like functionality and the plugin will use `hflush` if it is available. 70 | Nevertheless, flushing code has been left in the plugin in case `flush` and `sync` will work on some HDFS implementation. 71 | 72 | # License 73 | 74 | The plugin is released under the Apache V2 license. 75 | 76 | -------------------------------------------------------------------------------- /Rakefile: -------------------------------------------------------------------------------- 1 | @files=[] 2 | 3 | task :default do 4 | system("rake -T") 5 | end 6 | 7 | require "logstash/devutils/rake" 8 | -------------------------------------------------------------------------------- /lib/logstash/outputs/hdfs.rb: -------------------------------------------------------------------------------- 1 | require "logstash/namespace" 2 | require "logstash/outputs/base" 3 | 4 | # HDFS output. 5 | # 6 | # Write events to files to HDFS. You can use fields from the 7 | # event as parts of the filename. 8 | class LogStash::Outputs::HDFS < LogStash::Outputs::Base 9 | 10 | config_name "hdfs" 11 | 12 | # The path to the file to write. Event fields can be used here, 13 | # like "/var/log/logstash/%{@source_host}/%{application}" 14 | config :path, :validate => :string, :required => true 15 | 16 | # The format to use when writing events to the file. This value 17 | # supports any string and can include %{name} and other dynamic 18 | # strings. 19 | # 20 | # If this setting is omitted, the full json representation of the 21 | # event will be written as a single line. 22 | config :message_format, :validate => :string 23 | 24 | # Flush interval for flushing writes to log files. 0 will flush on every message 25 | # Flush doesn't actually work on most Hadoop 1.x versions. if you really care about flush, use 2.x 26 | config :flush_interval, :validate => :number, :default => 60 27 | 28 | # Enable the use of append. This only works with Hadoop 2.x dfs.support.append or 1.x with dfs.support.broken.append 29 | config :enable_append, :validate => :boolean, :default => false 30 | 31 | # Enable re-opening files. This is a really a bad idea because HDFS will truncate files. Only use if you know what you're doing 32 | config :enable_reopen, :validate => :boolean, :default => false 33 | 34 | # The classpath resource locations of the hadoop configuration 35 | config :hadoop_config_resources, :validate => :array 36 | 37 | public 38 | def register 39 | require "java" 40 | java_import "org.apache.hadoop.fs.Path" 41 | java_import "org.apache.hadoop.fs.FileSystem" 42 | java_import "org.apache.hadoop.conf.Configuration" 43 | 44 | @files = {} 45 | now = Time.now 46 | @last_flush_cycle = now 47 | @last_stale_cleanup_cycle = now 48 | flush_interval = @flush_interval.to_i 49 | @stale_cleanup_interval = 10 50 | conf = Configuration.new 51 | 52 | if @hadoop_config_resources 53 | @hadoop_config_resources.each { |resource| 54 | conf.addResource(resource) 55 | } 56 | end 57 | 58 | @logger.info "Using Hadoop configuration: #{conf.get("fs.defaultFS")}" 59 | @hdfs = FileSystem.get(conf) 60 | end # def register 61 | 62 | public 63 | def receive(event) 64 | return unless output?(event) 65 | out = get_output_stream(event.sprintf(@path)) 66 | 67 | if @message_format 68 | output = event.sprintf(@message_format) 69 | else 70 | output = event.to_json 71 | end 72 | output += "\n" unless output.end_with? "\n" 73 | 74 | out.write(output) 75 | 76 | flush(out) 77 | close_stale_files 78 | end # def receive 79 | 80 | def teardown 81 | @logger.debug("Teardown: closing files") 82 | @files.each do |path, fd| 83 | begin 84 | fd.close 85 | @logger.debug("Closed file #{path}", :fd => fd) 86 | rescue Exception => e 87 | @logger.error("Excpetion while flushing and closing files.", :exception => e) 88 | end 89 | end 90 | finished 91 | end 92 | 93 | private 94 | def get_output_stream(path_string) 95 | return @files[path_string] if @files.has_key?(path_string) 96 | path = Path.new(path_string) 97 | if @hdfs.exists(path) 98 | if enable_append 99 | begin 100 | dfs_data_output_stream = @hdfs.append(path) 101 | rescue java.io.IOException => e 102 | logger.error("Error opening path for append, trying to recover lease", :exception => e) 103 | recover_lease(path) 104 | retry 105 | end 106 | elsif enable_reopen 107 | logger.warn "Overwritting HDFS file", :path => path_string 108 | dfs_data_output_stream = @hdfs.create(path, true) 109 | else 110 | raise IOError, "Cowardly refusing to open pre existing file (#{path_string}) because HDFS will truncate the file!" 111 | end 112 | else 113 | dfs_data_output_stream = @hdfs.create(path) 114 | end 115 | @files[path_string] = DFSOutputStreamWrapper.new(dfs_data_output_stream) 116 | end 117 | 118 | def flush(fd) 119 | if flush_interval > 0 120 | flush_pending_files 121 | else 122 | fd.flush 123 | end 124 | end 125 | 126 | # every flush_interval seconds or so (triggered by events, but if there are no events there's no point flushing files anyway) 127 | def flush_pending_files 128 | return unless Time.now - @last_flush_cycle >= flush_interval 129 | @logger.debug("Starting flush cycle") 130 | @files.each do |path, fd| 131 | @logger.debug("Flushing file", :path => path, :fd => fd) 132 | fd.flush 133 | end 134 | @last_flush_cycle = Time.now 135 | end 136 | 137 | # every 10 seconds or so (triggered by events, but if there are no events there's no point closing files anyway) 138 | def close_stale_files 139 | now = Time.now 140 | return unless now - @last_stale_cleanup_cycle >= @stale_cleanup_interval 141 | @logger.info("Starting stale files cleanup cycle", :files => @files) 142 | inactive_files = @files.select { |path, file| not file.active } 143 | @logger.debug("%d stale files found" % inactive_files.count, :inactive_files => inactive_files) 144 | inactive_files.each do |path, file| 145 | @logger.info("Closing file %s" % path) 146 | file.close 147 | @files.delete(path) 148 | end 149 | # mark all files as inactive, a call to write will mark them as active again 150 | @files.each { |path, fd| fd.active = false } 151 | @last_stale_cleanup_cycle = now 152 | end 153 | 154 | def recover_lease(path) 155 | is_file_closed_available = @hdfs.respond_to? :isFileClosed 156 | 157 | # Not all Hadoop file systems support recover lease (e.g. LocalFileSystem) 158 | return true unless @hdfs.respond_to? :recoverLease 159 | 160 | start = Time.now 161 | first_retry = true 162 | 163 | until start - Time.now > 900 # 15 minutes timeout 164 | recovered = @hdfs.recoverLease(path) 165 | return true if recovered 166 | # first retry is fast 167 | if first_retry 168 | sleep 4 169 | first_retry = false 170 | next 171 | end 172 | 173 | # on further retries we backoff and spin on isFileClosed in hopes of catching an early break 174 | 61.times do 175 | return if is_file_closed_available and @hdfs.isFileClosed(path) 176 | sleep 1 177 | end 178 | end 179 | false 180 | end 181 | 182 | class DFSOutputStreamWrapper 183 | # reflection locks java objects, so only do this once 184 | if org.apache.hadoop.fs.FSDataOutputStream.instance_methods.include? :hflush 185 | # hadoop 2.x uses hflush 186 | FLUSH_METHOD = :hflush 187 | else 188 | FLUSH_METHOD = :flush 189 | end 190 | attr_accessor :active 191 | def initialize(output_stream) 192 | @output_stream = output_stream 193 | end 194 | def close 195 | @output_stream.close 196 | rescue IOException => e 197 | logger.error("Failed to close file", :exception => e) 198 | end 199 | def flush 200 | if FLUSH_METHOD == :hflush 201 | @output_stream.hflush 202 | else 203 | @output_stream.flush 204 | @output_stream.sync 205 | end 206 | rescue 207 | 208 | end 209 | def write(str) 210 | bytes = str.to_java_bytes 211 | @output_stream.write(bytes, 0, bytes.length) 212 | @active = true 213 | end 214 | end 215 | end # class LogStash::Outputs::File 216 | 217 | -------------------------------------------------------------------------------- /logstash-output-hdfs.gemspec: -------------------------------------------------------------------------------- 1 | Gem::Specification.new do |s| 2 | 3 | s.name = 'logstash-output-hdfs' 4 | s.version = '0.2.3' 5 | s.licenses = ['Apache License (2.0)'] 6 | s.summary = "$summary" 7 | s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program" 8 | s.authors = ["Avishai Ish-Shalom"] 9 | s.email = 'avishai@fewbytes.com' 10 | s.homepage = "https://github.com/avishai-ish-shalom/logstash-hdfs" 11 | s.require_paths = ["lib"] 12 | 13 | # Files 14 | s.files = `git ls-files`.split($\)+::Dir.glob('vendor/*') 15 | 16 | # Tests 17 | s.test_files = s.files.grep(%r{^(test|spec|features)/}) 18 | 19 | # Special flag to let us know this is actually a logstash plugin 20 | s.metadata = { "logstash_plugin" => "true", "logstash_group" => "output" } 21 | 22 | # Gem dependencies 23 | s.add_runtime_dependency 'logstash-core', '>= 1.4.0', '< 2.0.0' 24 | 25 | s.add_development_dependency 'logstash-devutils' 26 | end 27 | --------------------------------------------------------------------------------