├── .github ├── CONTRIBUTING.md ├── ISSUE_TEMPLATE.md └── PULL_REQUEST_TEMPLATE.md ├── .gitignore ├── .travis.yml ├── CHANGELOG.md ├── CONTRIBUTORS ├── Gemfile ├── JAR_VERSION ├── LICENSE ├── NOTICE.TXT ├── README.md ├── Rakefile ├── build.gradle ├── docs └── index.asciidoc ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── lib ├── filewatch │ ├── bootstrap.rb │ ├── discoverer.rb │ ├── helper.rb │ ├── observing_base.rb │ ├── observing_read.rb │ ├── observing_tail.rb │ ├── processor.rb │ ├── read_mode │ │ ├── handlers │ │ │ ├── base.rb │ │ │ ├── read_file.rb │ │ │ └── read_zip_file.rb │ │ └── processor.rb │ ├── settings.rb │ ├── sincedb_collection.rb │ ├── sincedb_record_serializer.rb │ ├── sincedb_value.rb │ ├── stat │ │ ├── generic.rb │ │ └── windows_path.rb │ ├── tail_mode │ │ ├── handlers │ │ │ ├── base.rb │ │ │ ├── create.rb │ │ │ ├── create_initial.rb │ │ │ ├── delete.rb │ │ │ ├── grow.rb │ │ │ ├── shrink.rb │ │ │ ├── timeout.rb │ │ │ └── unignore.rb │ │ └── processor.rb │ ├── watch.rb │ ├── watched_file.rb │ ├── watched_files_collection.rb │ └── winhelper.rb └── logstash │ └── inputs │ ├── delete_completed_file_handler.rb │ ├── file.rb │ ├── file │ └── patch.rb │ ├── file_listener.rb │ ├── friendly_durations.rb │ └── log_completed_file_handler.rb ├── logstash-input-file.gemspec ├── run_until_fail.sh ├── settings.gradle ├── spec ├── file_ext │ └── file_ext_windows_spec.rb ├── filewatch │ ├── buftok_spec.rb │ ├── read_mode_handlers_read_file_spec.rb │ ├── reading_spec.rb │ ├── rotate_spec.rb │ ├── settings_spec.rb │ ├── sincedb_record_serializer_spec.rb │ ├── spec_helper.rb │ ├── tailing_spec.rb │ ├── watched_file_spec.rb │ ├── watched_files_collection_spec.rb │ └── winhelper_spec.rb ├── fixtures │ ├── compressed.log.gz │ ├── compressed.log.gzip │ ├── invalid_utf8.gbk.log │ ├── no-final-newline.log │ └── uncompressed.log ├── helpers │ ├── logging_level_helper.rb │ ├── rspec_wait_handler_helper.rb │ └── spec_helper.rb └── inputs │ ├── file_read_spec.rb │ ├── file_tail_spec.rb │ └── friendly_durations_spec.rb └── src └── main └── java ├── JrubyFileWatchService.java ├── jnr └── posix │ └── windows │ └── WindowsFileInformationByHandle.java └── org └── logstash └── filewatch ├── JrubyFileWatchLibrary.java ├── RubyWinIO.java └── WatchedFilesCollection.java /.github/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to Logstash 2 | 3 | All contributions are welcome: ideas, patches, documentation, bug reports, 4 | complaints, etc! 5 | 6 | Programming is not a required skill, and there are many ways to help out! 7 | It is more important to us that you are able to contribute. 8 | 9 | That said, some basic guidelines, which you are free to ignore :) 10 | 11 | ## Want to learn? 12 | 13 | Want to lurk about and see what others are doing with Logstash? 14 | 15 | * The irc channel (#logstash on irc.freenode.org) is a good place for this 16 | * The [forum](https://discuss.elastic.co/c/logstash) is also 17 | great for learning from others. 18 | 19 | ## Got Questions? 20 | 21 | Have a problem you want Logstash to solve for you? 22 | 23 | * You can ask a question in the [forum](https://discuss.elastic.co/c/logstash) 24 | * Alternately, you are welcome to join the IRC channel #logstash on 25 | irc.freenode.org and ask for help there! 26 | 27 | ## Have an Idea or Feature Request? 28 | 29 | * File a ticket on [GitHub](https://github.com/elastic/logstash/issues). Please remember that GitHub is used only for issues and feature requests. If you have a general question, the [forum](https://discuss.elastic.co/c/logstash) or IRC would be the best place to ask. 30 | 31 | ## Something Not Working? Found a Bug? 32 | 33 | If you think you found a bug, it probably is a bug. 34 | 35 | * If it is a general Logstash or a pipeline issue, file it in [Logstash GitHub](https://github.com/elasticsearch/logstash/issues) 36 | * If it is specific to a plugin, please file it in the respective repository under [logstash-plugins](https://github.com/logstash-plugins) 37 | * or ask the [forum](https://discuss.elastic.co/c/logstash). 38 | 39 | # Contributing Documentation and Code Changes 40 | 41 | If you have a bugfix or new feature that you would like to contribute to 42 | logstash, and you think it will take more than a few minutes to produce the fix 43 | (ie; write code), it is worth discussing the change with the Logstash users and developers first! You can reach us via [GitHub](https://github.com/elastic/logstash/issues), the [forum](https://discuss.elastic.co/c/logstash), or via IRC (#logstash on freenode irc) 44 | Please note that Pull Requests without tests will not be merged. If you would like to contribute but do not have experience with writing tests, please ping us on IRC/forum or create a PR and ask our help. 45 | 46 | ## Contributing to plugins 47 | 48 | Check our [documentation](https://www.elastic.co/guide/en/logstash/current/contributing-to-logstash.html) on how to contribute to plugins or write your own! It is super easy! 49 | 50 | ## Contribution Steps 51 | 52 | 1. Test your changes! [Run](https://github.com/elastic/logstash#testing) the test suite 53 | 2. Please make sure you have signed our [Contributor License 54 | Agreement](https://www.elastic.co/contributor-agreement/). We are not 55 | asking you to assign copyright to us, but to give us the right to distribute 56 | your code without restriction. We ask this of all contributors in order to 57 | assure our users of the origin and continuing existence of the code. You 58 | only need to sign the CLA once. 59 | 3. Send a pull request! Push your changes to your fork of the repository and 60 | [submit a pull 61 | request](https://help.github.com/articles/using-pull-requests). In the pull 62 | request, describe what your changes do and mention any bugs/issues related 63 | to the pull request. 64 | 65 | 66 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | Please post all product and debugging questions on our [forum](https://discuss.elastic.co/c/logstash). Your questions will reach our wider community members there, and if we confirm that there is a bug, then we can open a new issue here. 2 | 3 | For all general issues, please provide the following details for fast resolution: 4 | 5 | - Version: 6 | - Operating System: 7 | - Config File (if you have sensitive info, please remove it): 8 | - Sample Data: 9 | - Steps to Reproduce: 10 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | Thanks for contributing to Logstash! If you haven't already signed our CLA, here's a handy link: https://www.elastic.co/contributor-agreement/ 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.gem 2 | Gemfile.lock 3 | .bundle 4 | vendor 5 | lib/jars 6 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | import: 2 | - logstash-plugins/.ci:travis/travis.yml@1.x -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## 4.4.6 2 | - Change read mode to immediately stop consuming buffered lines when shutdown is requested [#322](https://github.com/logstash-plugins/logstash-input-file/pull/322) 3 | 4 | ## 4.4.5 5 | - Handle EOF when checking archive validity [#321](https://github.com/logstash-plugins/logstash-input-file/pull/321) 6 | 7 | ## 4.4.4 8 | - Fixes gzip file handling in read mode when run on JDK12+, including JDK17 that is bundled with Logstash 8.4+ [#312](https://github.com/logstash-plugins/logstash-input-file/pull/312) 9 | 10 | ## 4.4.3 11 | - Fixes read mode to restart the read from reference stored in sincedb in case the file wasn't completely consumed. [#307](https://github.com/logstash-plugins/logstash-input-file/pull/307) 12 | 13 | ## 4.4.2 14 | - Doc: Fix attribute by removing extra character [#310](https://github.com/logstash-plugins/logstash-input-file/pull/310) 15 | 16 | ## 4.4.1 17 | - Fix: update to Gradle 7 [#305](https://github.com/logstash-plugins/logstash-input-file/pull/305) 18 | - [DOC] Add version attributes to doc source file [#308](https://github.com/logstash-plugins/logstash-input-file/pull/308) 19 | 20 | ## 4.4.0 21 | - Add support for ECS v8 [#301](https://github.com/logstash-plugins/logstash-input-file/pull/301) 22 | 23 | ## 4.3.1 24 | - Add extra safety to `chown` call in `atomic_write`, avoiding plugin crashes and falling back to a 25 | `non_atomic_write` in the event of failure [#295](https://github.com/logstash-plugins/logstash-input-file/pull/295) 26 | - Refactor: unify event updates to happen in one place [#297](https://github.com/logstash-plugins/logstash-input-file/pull/297) 27 | - Test: Actually retry tests on `RSpec::Expectations::ExpectationNotMetError` and retry instead of relying on timeout 28 | [#297](https://github.com/logstash-plugins/logstash-input-file/pull/297) 29 | 30 | ## 4.3.0 31 | - Add ECS Compatibility Mode [#291](https://github.com/logstash-plugins/logstash-input-file/pull/291) 32 | 33 | ## 4.2.4 34 | - Fix: sincedb_write issue on Windows machines [#283](https://github.com/logstash-plugins/logstash-input-file/pull/283) 35 | 36 | ## 4.2.3 37 | - Refactor: improve debug logging (log catched exceptions) [#280](https://github.com/logstash-plugins/logstash-input-file/pull/280) 38 | 39 | ## 4.2.2 40 | - Fix: sincedb_clean_after not being respected [#276](https://github.com/logstash-plugins/logstash-input-file/pull/276) 41 | 42 | ## 4.2.1 43 | - Fix: skip sincedb eviction if read mode completion deletes file during flush [#273](https://github.com/logstash-plugins/logstash-input-file/pull/273) 44 | 45 | ## 4.2.0 46 | - Fix: watched files performance with huge filesets [#268](https://github.com/logstash-plugins/logstash-input-file/pull/268) 47 | - Updated logging to include full traces in debug (and trace) levels 48 | 49 | ## 4.1.18 50 | - Fix: release watched files on completion (in read-mode) [#271](https://github.com/logstash-plugins/logstash-input-file/pull/271) 51 | 52 | ## 4.1.17 53 | - Added configuration setting `check_archive_validity` settings to enable 54 | gzipped files verification, issue 55 | [#261](https://github.com/logstash-plugins/logstash-input-file/issues/261) 56 | - [DOC] Added clarification for settings available with `read` mode [#235](https://github.com/logstash-plugins/logstash-input-file/pull/235) 57 | - [DOC] Rearranged text and fixed formatting for `mode` setting [266](https://github.com/logstash-plugins/logstash-input-file/pull/266) 58 | 59 | ## 4.1.16 60 | - Added configuration setting exit_after_read to read to EOF and terminate 61 | the input [#240](https://github.com/logstash-plugins/logstash-input-file/pull/240) 62 | 63 | ## 4.1.15 64 | - Fixed bug in conversion of sincedb_clean_after setting [#257](https://github.com/logstash-plugins/logstash-input-file/pull/257) 65 | 66 | ## 4.1.14 67 | - Fixed bug in delete of multiple watched files [#254](https://github.com/logstash-plugins/logstash-input-file/pull/254) 68 | 69 | ## 4.1.13 70 | - Fixed sinceDB to work spaces filename [#249](https://github.com/logstash-plugins/logstash-input-file/pull/249) 71 | 72 | ## 4.1.12 73 | - Fix regression in `exclude` handling. Patterns are matched against the filename, not full path. 74 | [Issue #237](https://github.com/logstash-plugins/logstash-input-file/issues/237) 75 | 76 | ## 4.1.11 77 | - Fixed link to FAQ [#247](https://github.com/logstash-plugins/logstash-input-file/pull/247) 78 | 79 | ## 4.1.10 80 | - Fixed problem in Windows where some paths would fail to return an identifier ("inode"). Make path into a C style String before encoding to UTF-16LE. [#232](https://github.com/logstash-plugins/logstash-input-file/issues/232) 81 | 82 | ## 4.1.9 83 | - Fixed issue where logs were being spammed with needless error messages [#224](https://github.com/logstash-plugins/logstash-input-file/pull/224) 84 | 85 | ## 4.1.8 86 | - Fixed problem in tail and read modes where the read loop could get stuck if an IO error occurs in the loop. 87 | The file appears to be being read but it is not, suspected with file truncation schemes. 88 | [Issue #205](https://github.com/logstash-plugins/logstash-input-file/issues/205) 89 | 90 | ## 4.1.7 91 | - Fixed problem in rotation handling where the target file being rotated was 92 | subjected to the start_position setting when it must always start from the beginning. 93 | [Issue #214](https://github.com/logstash-plugins/logstash-input-file/issues/214) 94 | 95 | ## 4.1.6 96 | - Fixed Errno::ENOENT exception in Discoverer. [Issue #204](https://github.com/logstash-plugins/logstash-input-file/issues/204) 97 | 98 | ## 4.1.5 99 | - Fixed text anchor by changing it from hardcoded to asciidoc reference to 100 | work in versioned plugin reference 101 | 102 | ## 4.1.4 103 | - Fixed a regression where files discovered after first discovery were not 104 | always read from the beginning. Applies to tail mode only. 105 | [#198](https://github.com/logstash-plugins/logstash-input-file/issues/198) 106 | - Added much better support for file rotation schemes of copy/truncate and 107 | rename cascading. Applies to tail mode only. 108 | - Added support for processing files over remote mounts e.g. NFS. Before, it 109 | was possible to read into memory allocated but not filled with data resulting 110 | in ASCII NUL (0) bytes in the message field. Now, files are read up to the 111 | size as given by the remote filesystem client. Applies to tail and read modes. 112 | 113 | ## 4.1.3 114 | - Fixed `read` mode of regular files sincedb write is requested in each read loop 115 | iteration rather than waiting for the end-of-file to be reached. Note: for gz files, 116 | the sincedb entry can only be updated at the end of the file as it is not possible 117 | to seek into a compressed file and begin reading from that position. 118 | [#196](https://github.com/logstash-plugins/logstash-input-file/pull/196) 119 | - Added support for String Durations in some settings e.g. `stat_interval => "750 ms"` 120 | [#194](https://github.com/logstash-plugins/logstash-input-file/pull/194) 121 | 122 | ## 4.1.2 123 | - Fix `require winhelper` error in WINDOWS. 124 | [Issue #184](https://github.com/logstash-plugins/logstash-input-file/issues/184) 125 | - Fix when no delimiter is found in a chunk, the chunk is reread - no forward progress 126 | is made in the file. 127 | [Issue #185](https://github.com/logstash-plugins/logstash-input-file/issues/185) 128 | 129 | ## 4.1.1 130 | - Fix JAR_VERSION read problem, prevented Logstash from starting. 131 | [Issue #180](https://github.com/logstash-plugins/logstash-input-file/issues/180) 132 | - Fix sincedb write error when using /dev/null, repeatedly causes a plugin restart. 133 | [Issue #182](https://github.com/logstash-plugins/logstash-input-file/issues/182) 134 | 135 | ## 4.1.0 136 | - Move Filewatch code into the plugin folder, rework Filewatch code to use 137 | Logstash facilities like logging and environment. 138 | - New feature: `mode` setting. Introduces two modes, `tail` mode is the 139 | existing behaviour for tailing, `read` mode is new behaviour that is 140 | optimized for the read complete content scenario. Please read the docs to 141 | fully appreciate the benefits of `read` mode. 142 | - New feature: File completion actions. Settings `file_completed_action` 143 | and `file_completed_log_path` control what actions to do after a file is 144 | completely read. Applicable: `read` mode only. 145 | - New feature: in `read` mode, compressed files can be processed, GZIP only. 146 | - New feature: Files are sorted after being discovered. Settings `file_sort_by` 147 | and `file_sort_direction` control the sort order. Applicable: any mode. 148 | - New feature: Banded or striped file processing. Settings: `file_chunk_size` 149 | and `file_chunk_count` control banded or striped processing. Applicable: any mode. 150 | - New feature: `sincedb_clean_after` setting. Introduces expiry of sincedb 151 | records. The default is 14 days. If, after `sincedb_clean_after` days, no 152 | activity has been detected on a file (inode) the record expires and is not 153 | written to disk. The persisted record now includes the "last activity seen" 154 | timestamp. Applicable: any mode. 155 | - Docs: extensive additions to introduce the new features. 156 | 157 | ## 4.0.5 158 | - Docs: Set the default_codec doc attribute. 159 | 160 | ## 4.0.4 161 | - Update gemspec summary 162 | 163 | ## 4.0.3 164 | - Fix some documentation issues 165 | 166 | ## 4.0.1 167 | - Docs: Fix the description with the logstash documentation generator 168 | - Fix an issue with the rspec suite not finding log4j 169 | 170 | ## 4.0.0 171 | - Breaking: `ignore_older` settings is disabled by default. Previously if the file was older than 172 | 24 hours (the default for ignore_older), it would be ignored. This confused new users a lot, specially 173 | when they were reading new files with Logstash (with `start_position => beginning`). This setting also 174 | makes it consistent with Filebeat. 175 | 176 | ## 3.1.2 177 | - Adjust a few log call levels 178 | 179 | ## 3.1.1 180 | - Add host to @metadata 181 | 182 | ## 3.1.0 183 | - Breaking: Use native `--path.data` for Logstash 5.0 for sincedb files. 184 | 185 | ## 3.0.3 186 | - Relax constraint on logstash-core-plugin-api to >= 1.60 <= 2.99 187 | 188 | ## 3.0.2 189 | - relax constrains of `logstash-devutils` see https://github.com/elastic/logstash-devutils/issues/48 190 | 191 | ## 3.0.1 192 | - Republish all the gems under jruby. 193 | 194 | ## 3.0.0 195 | - Update the plugin to the version 2.0 of the plugin api, this change is required for Logstash 5.0 compatibility. See https://github.com/elastic/logstash/issues/5141 196 | 197 | # 2.2.5 198 | - Depend on logstash-core-plugin-api instead of logstash-core, removing the need to mass update plugins on major releases of logstash 199 | 200 | # 2.2.3 201 | - New dependency requirements for logstash-core for the 5.0 release 202 | 203 | ## 2.2.2 204 | - Fix for: Filewatch library complains if HOME or SINCEDB_PATH variables are unset. 205 | - [Issue #101](https://github.com/logstash-plugins/logstash-input-file/issues/101) 206 | - [PR, filewatch 78](https://github.com/jordansissel/ruby-filewatch/pull/78) introduces the fix 207 | - [Issue, filewatch 76](https://github.com/jordansissel/ruby-filewatch/issues/76) 208 | - Improve documentation on ignore_older and close_older options [#104](https://github.com/logstash-plugins/logstash-input-file/issues/104) Documentation 209 | 210 | ## 2.2.1 211 | - Fix spec failures on CI Linux builds (not seen on local OSX and Linux) 212 | 213 | ## 2.2.0 214 | - Use ruby-filewatch 0.8.0, major rework of filewatch. See [Pull Request 74](https://github.com/jordansissel/ruby-filewatch/pull/74) 215 | - add max_open_files config option, defaults to 4095, the input will process much more than this but have this number of files open at any time - files are closed based on the close_older setting, thereby making others openable. 216 | - Changes the close_older logic to measure the time since the file was last read internlly rather than using the file stat modified time. 217 | - Use logstash-codec-multiline 2.0.7, fixes a bug with auto_flush deadlocking when multiple file inputs are defined in the LS config. 218 | 219 | ## 2.1.3 220 | - Use ruby-filewatch 0.7.1, re-enable close after file is modified again 221 | 222 | ## 2.1.2 223 | - Isolate test helper class in their own namespace 224 | 225 | ## 2.1.1 226 | - Correct LS core dependency version 227 | 228 | ## 2.1.0 229 | - Implement new config options: ignore_older and close_older. When close_older is set, any buffered data will be flushed. 230 | - Fixes [#81](https://github.com/logstash-plugins/logstash-input-file/issues/81) 231 | - Fixes [#81](https://github.com/logstash-plugins/logstash-input-file/issues/89) 232 | - Fixes [#81](https://github.com/logstash-plugins/logstash-input-file/issues/90) 233 | 234 | ## 2.0.3 235 | - Implement Stream Identity mapping of codecs: distinct codecs will collect input per stream identity (filename) 236 | 237 | ## 2.0.2 238 | - Change LS core dependency version 239 | - Add CI badge 240 | 241 | ## 2.0.1 242 | - Change LS core dependency version 243 | 244 | ## 2.0.0 245 | - Plugins were updated to follow the new shutdown semantic, this mainly allows Logstash to instruct input plugins to terminate gracefully, 246 | instead of using Thread.raise on the plugins' threads. Ref: https://github.com/elastic/logstash/pull/3895 247 | - Dependency on logstash-core update to 2.0 248 | 249 | ## 1.0.1 250 | - Force dependency on filewatch >= 0.6.5 that fixes a sincedb bug 251 | - Better documentation and error handling regarding the "sincedb_path" parameter 252 | -------------------------------------------------------------------------------- /CONTRIBUTORS: -------------------------------------------------------------------------------- 1 | The following is a list of people who have contributed ideas, code, bug 2 | reports, or in general have helped logstash along its way. 3 | 4 | Contributors: 5 | * Colin Surprenant (colinsurprenant) 6 | * Davi Alexandre (davialexandre) 7 | * James Turnbull (jamtur01) 8 | * John E. Vincent (lusis) 9 | * Jordan Sissel (jordansissel) 10 | * Kurt Hurtado (kurtado) 11 | * Nick Ethier (nickethier) 12 | * Pete Fritchman (fetep) 13 | * Philippe Weber (wiibaa) 14 | * Pier-Hugues Pellerin (ph) 15 | * Richard Pijnenburg (electrical) 16 | * Suyog Rao (suyograo) 17 | * Tejay Cardon (tejaycar) 18 | * elliot moore (em295) 19 | * yjpa7145 20 | * Guy Boertje (guyboertje) 21 | * Aaron Mildenstein (untergeek) 22 | 23 | Note: If you've sent us patches, bug reports, or otherwise contributed to 24 | Logstash, and you aren't on the list above and want to be, please let us know 25 | and we'll make sure you're here. Contributions from folks like you are what make 26 | open source awesome. 27 | -------------------------------------------------------------------------------- /Gemfile: -------------------------------------------------------------------------------- 1 | source 'https://rubygems.org' 2 | 3 | gemspec 4 | 5 | logstash_path = ENV["LOGSTASH_PATH"] || "../../logstash" 6 | use_logstash_source = ENV["LOGSTASH_SOURCE"] && ENV["LOGSTASH_SOURCE"].to_s == "1" 7 | 8 | if Dir.exist?(logstash_path) && use_logstash_source 9 | gem 'logstash-core', :path => "#{logstash_path}/logstash-core" 10 | gem 'logstash-core-plugin-api', :path => "#{logstash_path}/logstash-core-plugin-api" 11 | end 12 | -------------------------------------------------------------------------------- /JAR_VERSION: -------------------------------------------------------------------------------- 1 | 1.0.1 2 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright 2020 Elastic and contributors 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /NOTICE.TXT: -------------------------------------------------------------------------------- 1 | Elasticsearch 2 | Copyright 2012-2015 Elasticsearch 3 | 4 | This product includes software developed by The Apache Software 5 | Foundation (http://www.apache.org/). -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Logstash Plugin 2 | Travis Build 3 | [![Travis Build Status](https://travis-ci.com/logstash-plugins/logstash-input-file.svg)](https://travis-ci.com/logstash-plugins/logstash-input-file) 4 | 5 | This is a plugin for [Logstash](https://github.com/elastic/logstash). 6 | 7 | It is fully free and fully open source. The license is Apache 2.0, meaning you are pretty much free to use it however you want in whatever way. 8 | 9 | ## Documentation 10 | 11 | Logstash provides infrastructure to automatically generate documentation for this plugin. We use the asciidoc format to write documentation so any comments in the source code will be first converted into asciidoc and then into html. All plugin documentation are placed under one [central location](http://www.elastic.co/guide/en/logstash/current/). 12 | 13 | - For formatting code or config example, you can use the asciidoc `[source,ruby]` directive 14 | - For more asciidoc formatting tips, see the excellent reference here https://github.com/elastic/docs#asciidoc-guide 15 | 16 | ## Need Help? 17 | 18 | Need help? Try #logstash on freenode IRC or the https://discuss.elastic.co/c/logstash discussion forum. 19 | 20 | ## Developing 21 | 22 | ### 1. Plugin Developement and Testing 23 | 24 | #### Code 25 | - To get started, you'll need JRuby with the Bundler gem installed. 26 | 27 | - Create a new plugin or clone and existing from the GitHub [logstash-plugins](https://github.com/logstash-plugins) organization. We also provide [example plugins](https://github.com/logstash-plugins?query=example). 28 | 29 | - Install dependencies 30 | ```sh 31 | bundle install 32 | ``` 33 | 34 | #### Test 35 | 36 | - Update your dependencies 37 | 38 | ```sh 39 | bundle install 40 | ``` 41 | 42 | - Build the jar library used for watching files 43 | ```bash 44 | ./gradlew build 45 | ``` 46 | 47 | - Run tests 48 | 49 | ```sh 50 | bundle exec rspec 51 | ``` 52 | 53 | ### 2. Running your unpublished Plugin in Logstash 54 | 55 | #### 2.1 Run in a local Logstash clone 56 | 57 | - Edit Logstash `Gemfile` and add the local plugin path, for example: 58 | ```ruby 59 | gem "logstash-filter-awesome", :path => "/your/local/logstash-filter-awesome" 60 | ``` 61 | - Install plugin 62 | ```sh 63 | # Logstash 2.3 and higher 64 | bin/logstash-plugin install --no-verify 65 | 66 | # Prior to Logstash 2.3 67 | bin/plugin install --no-verify 68 | 69 | ``` 70 | - Run Logstash with your plugin 71 | ```sh 72 | bin/logstash -e 'filter {awesome {}}' 73 | ``` 74 | At this point any modifications to the plugin code will be applied to this local Logstash setup. After modifying the plugin, simply rerun Logstash. 75 | 76 | #### 2.2 Run in an installed Logstash 77 | 78 | You can use the same **2.1** method to run your plugin in an installed Logstash by editing its `Gemfile` and pointing the `:path` to your local plugin development directory or you can build the gem and install it using: 79 | 80 | - Build your plugin gem 81 | ```sh 82 | gem build logstash-filter-awesome.gemspec 83 | ``` 84 | - Install the plugin from the Logstash home 85 | ```sh 86 | # Logstash 2.3 and higher 87 | bin/logstash-plugin install --no-verify 88 | 89 | # Prior to Logstash 2.3 90 | bin/plugin install --no-verify 91 | 92 | ``` 93 | - Start Logstash and proceed to test the plugin 94 | 95 | ## Contributing 96 | 97 | All contributions are welcome: ideas, patches, documentation, bug reports, complaints, and even something you drew up on a napkin. 98 | 99 | Programming is not a required skill. Whatever you've seen about open source and maintainers or community members saying "send patches or die" - you will not see that here. 100 | 101 | It is more important to the community that you are able to contribute. 102 | 103 | For more information about contributing, see the [CONTRIBUTING](https://github.com/elastic/logstash/blob/master/CONTRIBUTING.md) file. 104 | -------------------------------------------------------------------------------- /Rakefile: -------------------------------------------------------------------------------- 1 | @files=[] 2 | 3 | task :default do 4 | system("rake -T") 5 | end 6 | 7 | require "logstash/devutils/rake" 8 | 9 | desc "Compile and put filewatch jar into lib/jars" 10 | task :vendor do 11 | exit(1) unless system './gradlew --no-daemon clean jar' 12 | puts "-------------------> built filewatch jar via rake" 13 | end 14 | -------------------------------------------------------------------------------- /build.gradle: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to Elasticsearch under one or more contributor 3 | * license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright 5 | * ownership. Elasticsearch licenses this file to you under 6 | * the Apache License, Version 2.0 (the "License"); you may 7 | * not use this file except in compliance with the License. 8 | * You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, 13 | * software distributed under the License is distributed on an 14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | * KIND, either express or implied. See the License for the 16 | * specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | import java.nio.file.Files 21 | import static java.nio.file.StandardCopyOption.REPLACE_EXISTING 22 | 23 | plugins { 24 | id 'java' 25 | id 'distribution' 26 | id 'idea' 27 | } 28 | 29 | group = 'org.logstash.filewatch' 30 | version file("JAR_VERSION").text.replaceAll("\\s","") 31 | 32 | repositories { 33 | mavenCentral() 34 | } 35 | 36 | java { 37 | sourceCompatibility = JavaVersion.VERSION_1_8 38 | targetCompatibility = JavaVersion.VERSION_1_8 39 | withSourcesJar() 40 | withJavadocJar() 41 | } 42 | 43 | dependencies { 44 | compileOnly group: 'org.jruby', name: 'jruby-complete', version: "9.1.13.0" 45 | } 46 | 47 | task copyGemjar(type: Copy, dependsOn: sourcesJar) { 48 | from project.jar 49 | into project.file('lib/jars/') 50 | } 51 | 52 | task cleanGemjar { 53 | delete fileTree(project.file('lib/jars/')) { 54 | include '*.jar' 55 | } 56 | } 57 | 58 | clean.dependsOn(cleanGemjar) 59 | jar.finalizedBy(copyGemjar) 60 | 61 | 62 | task generateGemJarRequiresFile { 63 | doLast { 64 | File jars_file = file('lib/logstash-input-file_jars.rb') 65 | jars_file.newWriter().withWriter { w -> 66 | w << "# AUTOGENERATED BY THE GRADLE SCRIPT. DO NOT EDIT.\n\n" 67 | w << "require \'jar_dependencies\'\n" 68 | configurations.runtimeClasspath.allDependencies.each { 69 | w << "require_jar(\'${it.group}\', \'${it.name}\', \'${it.version}\')\n" 70 | } 71 | w << "\nrequire_jar(\'${project.group}\', \'${project.name}\', \'${project.version}\')\n" 72 | } 73 | } 74 | } 75 | 76 | task vendor { 77 | doLast { 78 | String vendorPathPrefix = "vendor/jar-dependencies" 79 | configurations.runtimeClasspath.allDependencies.each { dep -> 80 | File f = configurations.runtimeClasspath.filter { it.absolutePath.contains("${dep.group}/${dep.name}/${dep.version}") }.singleFile 81 | String groupPath = dep.group.replaceAll('\\.', '/') 82 | File newJarFile = file("${vendorPathPrefix}/${groupPath}/${dep.name}/${dep.version}/${dep.name}-${dep.version}.jar") 83 | newJarFile.mkdirs() 84 | Files.copy(f.toPath(), newJarFile.toPath(), REPLACE_EXISTING) 85 | } 86 | String projectGroupPath = project.group.replaceAll('\\.', '/') 87 | File projectJarFile = file("${vendorPathPrefix}/${projectGroupPath}/${project.name}/${project.version}/${project.name}-${project.version}.jar") 88 | projectJarFile.mkdirs() 89 | Files.copy(file("$buildDir/libs/${project.name}-${project.version}.jar").toPath(), projectJarFile.toPath(), REPLACE_EXISTING) 90 | } 91 | } 92 | 93 | vendor.dependsOn(jar, generateGemJarRequiresFile) -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/logstash-plugins/logstash-input-file/55a4a7099f05f29351672417036c1342850c7adc/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-8.7-bin.zip 4 | networkTimeout=10000 5 | validateDistributionUrl=true 6 | zipStoreBase=GRADLE_USER_HOME 7 | zipStorePath=wrapper/dists 8 | -------------------------------------------------------------------------------- /gradlew: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # 4 | # Copyright © 2015-2021 the original authors. 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # https://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | 19 | ############################################################################## 20 | # 21 | # Gradle start up script for POSIX generated by Gradle. 22 | # 23 | # Important for running: 24 | # 25 | # (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is 26 | # noncompliant, but you have some other compliant shell such as ksh or 27 | # bash, then to run this script, type that shell name before the whole 28 | # command line, like: 29 | # 30 | # ksh Gradle 31 | # 32 | # Busybox and similar reduced shells will NOT work, because this script 33 | # requires all of these POSIX shell features: 34 | # * functions; 35 | # * expansions «$var», «${var}», «${var:-default}», «${var+SET}», 36 | # «${var#prefix}», «${var%suffix}», and «$( cmd )»; 37 | # * compound commands having a testable exit status, especially «case»; 38 | # * various built-in commands including «command», «set», and «ulimit». 39 | # 40 | # Important for patching: 41 | # 42 | # (2) This script targets any POSIX shell, so it avoids extensions provided 43 | # by Bash, Ksh, etc; in particular arrays are avoided. 44 | # 45 | # The "traditional" practice of packing multiple parameters into a 46 | # space-separated string is a well documented source of bugs and security 47 | # problems, so this is (mostly) avoided, by progressively accumulating 48 | # options in "$@", and eventually passing that to Java. 49 | # 50 | # Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, 51 | # and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; 52 | # see the in-line comments for details. 53 | # 54 | # There are tweaks for specific operating systems such as AIX, CygWin, 55 | # Darwin, MinGW, and NonStop. 56 | # 57 | # (3) This script is generated from the Groovy template 58 | # https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt 59 | # within the Gradle project. 60 | # 61 | # You can find Gradle at https://github.com/gradle/gradle/. 62 | # 63 | ############################################################################## 64 | 65 | # Attempt to set APP_HOME 66 | 67 | # Resolve links: $0 may be a link 68 | app_path=$0 69 | 70 | # Need this for daisy-chained symlinks. 71 | while 72 | APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path 73 | [ -h "$app_path" ] 74 | do 75 | ls=$( ls -ld "$app_path" ) 76 | link=${ls#*' -> '} 77 | case $link in #( 78 | /*) app_path=$link ;; #( 79 | *) app_path=$APP_HOME$link ;; 80 | esac 81 | done 82 | 83 | # This is normally unused 84 | # shellcheck disable=SC2034 85 | APP_BASE_NAME=${0##*/} 86 | # Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) 87 | APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit 88 | 89 | # Use the maximum available, or set MAX_FD != -1 to use that value. 90 | MAX_FD=maximum 91 | 92 | warn () { 93 | echo "$*" 94 | } >&2 95 | 96 | die () { 97 | echo 98 | echo "$*" 99 | echo 100 | exit 1 101 | } >&2 102 | 103 | # OS specific support (must be 'true' or 'false'). 104 | cygwin=false 105 | msys=false 106 | darwin=false 107 | nonstop=false 108 | case "$( uname )" in #( 109 | CYGWIN* ) cygwin=true ;; #( 110 | Darwin* ) darwin=true ;; #( 111 | MSYS* | MINGW* ) msys=true ;; #( 112 | NONSTOP* ) nonstop=true ;; 113 | esac 114 | 115 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 116 | 117 | 118 | # Determine the Java command to use to start the JVM. 119 | if [ -n "$JAVA_HOME" ] ; then 120 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 121 | # IBM's JDK on AIX uses strange locations for the executables 122 | JAVACMD=$JAVA_HOME/jre/sh/java 123 | else 124 | JAVACMD=$JAVA_HOME/bin/java 125 | fi 126 | if [ ! -x "$JAVACMD" ] ; then 127 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 128 | 129 | Please set the JAVA_HOME variable in your environment to match the 130 | location of your Java installation." 131 | fi 132 | else 133 | JAVACMD=java 134 | if ! command -v java >/dev/null 2>&1 135 | then 136 | die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 137 | 138 | Please set the JAVA_HOME variable in your environment to match the 139 | location of your Java installation." 140 | fi 141 | fi 142 | 143 | # Increase the maximum file descriptors if we can. 144 | if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then 145 | case $MAX_FD in #( 146 | max*) 147 | # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. 148 | # shellcheck disable=SC2039,SC3045 149 | MAX_FD=$( ulimit -H -n ) || 150 | warn "Could not query maximum file descriptor limit" 151 | esac 152 | case $MAX_FD in #( 153 | '' | soft) :;; #( 154 | *) 155 | # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. 156 | # shellcheck disable=SC2039,SC3045 157 | ulimit -n "$MAX_FD" || 158 | warn "Could not set maximum file descriptor limit to $MAX_FD" 159 | esac 160 | fi 161 | 162 | # Collect all arguments for the java command, stacking in reverse order: 163 | # * args from the command line 164 | # * the main class name 165 | # * -classpath 166 | # * -D...appname settings 167 | # * --module-path (only if needed) 168 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. 169 | 170 | # For Cygwin or MSYS, switch paths to Windows format before running java 171 | if "$cygwin" || "$msys" ; then 172 | APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) 173 | CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) 174 | 175 | JAVACMD=$( cygpath --unix "$JAVACMD" ) 176 | 177 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 178 | for arg do 179 | if 180 | case $arg in #( 181 | -*) false ;; # don't mess with options #( 182 | /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath 183 | [ -e "$t" ] ;; #( 184 | *) false ;; 185 | esac 186 | then 187 | arg=$( cygpath --path --ignore --mixed "$arg" ) 188 | fi 189 | # Roll the args list around exactly as many times as the number of 190 | # args, so each arg winds up back in the position where it started, but 191 | # possibly modified. 192 | # 193 | # NB: a `for` loop captures its iteration list before it begins, so 194 | # changing the positional parameters here affects neither the number of 195 | # iterations, nor the values presented in `arg`. 196 | shift # remove old arg 197 | set -- "$@" "$arg" # push replacement arg 198 | done 199 | fi 200 | 201 | 202 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 203 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' 204 | 205 | # Collect all arguments for the java command: 206 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments, 207 | # and any embedded shellness will be escaped. 208 | # * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be 209 | # treated as '${Hostname}' itself on the command line. 210 | 211 | set -- \ 212 | "-Dorg.gradle.appname=$APP_BASE_NAME" \ 213 | -classpath "$CLASSPATH" \ 214 | org.gradle.wrapper.GradleWrapperMain \ 215 | "$@" 216 | 217 | # Stop when "xargs" is not available. 218 | if ! command -v xargs >/dev/null 2>&1 219 | then 220 | die "xargs is not available" 221 | fi 222 | 223 | # Use "xargs" to parse quoted args. 224 | # 225 | # With -n1 it outputs one arg per line, with the quotes and backslashes removed. 226 | # 227 | # In Bash we could simply go: 228 | # 229 | # readarray ARGS < <( xargs -n1 <<<"$var" ) && 230 | # set -- "${ARGS[@]}" "$@" 231 | # 232 | # but POSIX shell has neither arrays nor command substitution, so instead we 233 | # post-process each arg (as a line of input to sed) to backslash-escape any 234 | # character that might be a shell metacharacter, then use eval to reverse 235 | # that process (while maintaining the separation between arguments), and wrap 236 | # the whole thing up as a single "set" statement. 237 | # 238 | # This will of course break if any of these variables contains a newline or 239 | # an unmatched quote. 240 | # 241 | 242 | eval "set -- $( 243 | printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | 244 | xargs -n1 | 245 | sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | 246 | tr '\n' ' ' 247 | )" '"$@"' 248 | 249 | exec "$JAVACMD" "$@" 250 | -------------------------------------------------------------------------------- /gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | 17 | @if "%DEBUG%"=="" @echo off 18 | @rem ########################################################################## 19 | @rem 20 | @rem Gradle startup script for Windows 21 | @rem 22 | @rem ########################################################################## 23 | 24 | @rem Set local scope for the variables with windows NT shell 25 | if "%OS%"=="Windows_NT" setlocal 26 | 27 | set DIRNAME=%~dp0 28 | if "%DIRNAME%"=="" set DIRNAME=. 29 | @rem This is normally unused 30 | set APP_BASE_NAME=%~n0 31 | set APP_HOME=%DIRNAME% 32 | 33 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 34 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 35 | 36 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 37 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 38 | 39 | @rem Find java.exe 40 | if defined JAVA_HOME goto findJavaFromJavaHome 41 | 42 | set JAVA_EXE=java.exe 43 | %JAVA_EXE% -version >NUL 2>&1 44 | if %ERRORLEVEL% equ 0 goto execute 45 | 46 | echo. 1>&2 47 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2 48 | echo. 1>&2 49 | echo Please set the JAVA_HOME variable in your environment to match the 1>&2 50 | echo location of your Java installation. 1>&2 51 | 52 | goto fail 53 | 54 | :findJavaFromJavaHome 55 | set JAVA_HOME=%JAVA_HOME:"=% 56 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 57 | 58 | if exist "%JAVA_EXE%" goto execute 59 | 60 | echo. 1>&2 61 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2 62 | echo. 1>&2 63 | echo Please set the JAVA_HOME variable in your environment to match the 1>&2 64 | echo location of your Java installation. 1>&2 65 | 66 | goto fail 67 | 68 | :execute 69 | @rem Setup the command line 70 | 71 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 72 | 73 | 74 | @rem Execute Gradle 75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* 76 | 77 | :end 78 | @rem End local scope for the variables with windows NT shell 79 | if %ERRORLEVEL% equ 0 goto mainEnd 80 | 81 | :fail 82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 83 | rem the _cmd.exe /c_ return code! 84 | set EXIT_CODE=%ERRORLEVEL% 85 | if %EXIT_CODE% equ 0 set EXIT_CODE=1 86 | if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% 87 | exit /b %EXIT_CODE% 88 | 89 | :mainEnd 90 | if "%OS%"=="Windows_NT" endlocal 91 | 92 | :omega 93 | -------------------------------------------------------------------------------- /lib/filewatch/bootstrap.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "pathname" 3 | 4 | ## Common setup 5 | # all the required constants and files 6 | # defined in one place 7 | module FileWatch 8 | # the number of bytes read from a file during the read phase 9 | FILE_READ_SIZE = 32768 10 | # the largest fixnum in ruby 11 | # this is used in the read loop e.g. 12 | # @opts[:file_chunk_count].times do 13 | # where file_chunk_count defaults to this constant 14 | MAX_ITERATIONS = (2**(0.size * 8 - 2) - 2) / 32768 15 | 16 | require_relative "helper" 17 | 18 | gem_root_dir = Pathname.new(__FILE__).dirname.join("../../").realpath 19 | jar_version = gem_root_dir.join("JAR_VERSION").read.strip 20 | fullpath = gem_root_dir.join("lib/jars/filewatch-#{jar_version}.jar").expand_path.to_path 21 | require "java" 22 | require fullpath 23 | require "jruby_file_watch" 24 | 25 | if LogStash::Environment.windows? 26 | require_relative "winhelper" 27 | require_relative "stat/windows_path" 28 | PathStatClass = Stat::WindowsPath 29 | FileOpener = FileExt 30 | else 31 | require_relative "stat/generic" 32 | PathStatClass = Stat::Generic 33 | FileOpener = ::File 34 | end 35 | 36 | # Structs can be used as hash keys because they compare by value 37 | # this is used as the key for values in the sincedb hash 38 | InodeStruct = Struct.new(:inode, :maj, :min) do 39 | def to_s 40 | to_a.join(" ") 41 | end 42 | end 43 | 44 | BufferExtractResult = Struct.new(:lines, :warning, :additional) 45 | 46 | class LoopControlResult 47 | attr_reader :count, :size, :more 48 | 49 | def initialize(count, size, more) 50 | @count, @size, @more = count, size, more 51 | @read_error_detected = false 52 | end 53 | 54 | def flag_read_error 55 | @read_error_detected = true 56 | end 57 | 58 | def keep_looping? 59 | !@read_error_detected && @more 60 | end 61 | end 62 | 63 | class NoSinceDBPathGiven < StandardError; end 64 | 65 | # how often (in seconds) we logger.warn a failed file open, per path. 66 | OPEN_WARN_INTERVAL = ENV.fetch("FILEWATCH_OPEN_WARN_INTERVAL", 300).to_i 67 | MAX_FILES_WARN_INTERVAL = ENV.fetch("FILEWATCH_MAX_FILES_WARN_INTERVAL", 20).to_i 68 | 69 | require "logstash/util/buftok" 70 | require_relative "settings" 71 | require_relative "sincedb_value" 72 | require_relative "sincedb_record_serializer" 73 | require_relative "watched_files_collection" 74 | require_relative "sincedb_collection" 75 | require_relative "watch" 76 | require_relative "watched_file" 77 | require_relative "discoverer" 78 | require_relative "observing_base" 79 | require_relative "observing_tail" 80 | require_relative "observing_read" 81 | end 82 | -------------------------------------------------------------------------------- /lib/filewatch/discoverer.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/util/loggable" 3 | 4 | module FileWatch 5 | class Discoverer 6 | # given a path or glob will prepare for and discover files to watch 7 | # if they are not excluded or ignorable 8 | # they are added to the watched_files collection and 9 | # associated with a sincedb entry if one can be found 10 | include LogStash::Util::Loggable 11 | 12 | attr_reader :watched_files_collection 13 | 14 | def initialize(watched_files_collection, sincedb_collection, settings) 15 | @watching = Concurrent::Array.new 16 | @exclude = Concurrent::Array.new 17 | @watched_files_collection = watched_files_collection 18 | @sincedb_collection = sincedb_collection 19 | @settings = settings 20 | @settings.exclude.each { |p| @exclude << p } 21 | end 22 | 23 | def add_path(path) 24 | return if @watching.member?(path) 25 | @watching << path 26 | discover_files_new_path(path) 27 | self 28 | end 29 | 30 | def discover 31 | @watching.each do |path| 32 | discover_files_ongoing(path) 33 | end 34 | end 35 | 36 | private 37 | 38 | def can_exclude?(watched_file, new_discovery) 39 | @exclude.each do |pattern| 40 | if watched_file.pathname.basename.fnmatch?(pattern) 41 | if new_discovery 42 | logger.trace("skipping file because it matches exclude", :path => watched_file.path, :pattern => pattern) 43 | end 44 | watched_file.unwatch 45 | return true 46 | end 47 | end 48 | false 49 | end 50 | 51 | def discover_files_new_path(path) 52 | discover_any_files(path, false) 53 | end 54 | 55 | def discover_files_ongoing(path) 56 | discover_any_files(path, true) 57 | end 58 | 59 | def discover_any_files(path, ongoing) 60 | fileset = Dir.glob(path).select { |f| File.file?(f) } 61 | logger.trace("discover_files", :count => fileset.size) 62 | fileset.each do |file| 63 | new_discovery = false 64 | watched_file = @watched_files_collection.get(file) 65 | if watched_file.nil? 66 | pathname = Pathname.new(file) 67 | begin 68 | path_stat = PathStatClass.new(pathname) 69 | rescue Errno::ENOENT 70 | next 71 | end 72 | watched_file = WatchedFile.new(pathname, path_stat, @settings) 73 | new_discovery = true 74 | end 75 | # if it already unwatched or its excluded then we can skip 76 | next if watched_file.unwatched? || can_exclude?(watched_file, new_discovery) 77 | 78 | logger.trace? && logger.trace("handling:", :new_discovery => new_discovery, :watched_file => watched_file.details) 79 | 80 | if new_discovery 81 | watched_file.initial_completed if ongoing 82 | # initially when the sincedb collection is filled with records from the persistence file 83 | # each value is not associated with a watched file 84 | # a sincedb_value can be: 85 | # unassociated 86 | # associated with this watched_file 87 | # associated with a different watched_file 88 | if @sincedb_collection.associate(watched_file) 89 | if watched_file.file_ignorable? 90 | logger.trace("skipping file because it was last modified more than #{@settings.ignore_older} seconds ago", :path => file) 91 | # on discovery ignorable watched_files are put into the ignored state and that 92 | # updates the size from the internal stat 93 | # so the existing contents are not read. 94 | # because, normally, a newly discovered file will 95 | # have a watched_file size of zero 96 | # they are still added to the collection so we know they are there for the next periodic discovery 97 | watched_file.ignore_as_unread 98 | end 99 | # now add the discovered file to the watched_files collection and adjust the sincedb collections 100 | @watched_files_collection.add(watched_file) 101 | end 102 | end 103 | # at this point the watched file is created, is in the db but not yet opened or being processed 104 | end 105 | end 106 | end 107 | end 108 | -------------------------------------------------------------------------------- /lib/filewatch/helper.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | # code downloaded from Ruby on Rails 4.2.1 3 | # https://raw.githubusercontent.com/rails/rails/v4.2.1/activesupport/lib/active_support/core_ext/file/atomic.rb 4 | # change method name to avoid borking active_support and vice versa 5 | require 'fileutils' 6 | 7 | module FileHelper 8 | extend self 9 | # Write to a file atomically. Useful for situations where you don't 10 | # want other processes or threads to see half-written files. 11 | # 12 | # File.write_atomically('important.file') do |file| 13 | # file.write('hello') 14 | # end 15 | def write_atomically(file_name) 16 | 17 | if File.exist?(file_name) 18 | # Get original file permissions 19 | old_stat = File.stat(file_name) 20 | else 21 | # If not possible, probe which are the default permissions in the 22 | # destination directory. 23 | old_stat = probe_stat_in(File.dirname(file_name)) 24 | end 25 | 26 | mode = old_stat ? old_stat.mode : nil 27 | 28 | # Create temporary file with identical permissions 29 | temp_file = File.new(rand_filename(file_name), "w", mode) 30 | temp_file.binmode 31 | return_val = yield temp_file 32 | temp_file.close 33 | new_stat = File.stat(temp_file) 34 | 35 | # Overwrite original file with temp file 36 | File.rename(temp_file.path, file_name) 37 | 38 | # Unable to get permissions of the original file => return 39 | return return_val if old_stat.nil? 40 | 41 | # Set correct uid/gid on new file if ownership is different. 42 | if old_stat && (old_stat.gid != new_stat.gid || old_stat.uid != new_stat.uid) 43 | File.chown(old_stat.uid, old_stat.gid, file_name) if old_stat 44 | end 45 | 46 | return_val 47 | end 48 | 49 | def device?(file_name) 50 | File.chardev?(file_name) || File.blockdev?(file_name) 51 | end 52 | 53 | # Private utility method. 54 | def probe_stat_in(dir) #:nodoc: 55 | basename = rand_filename(".permissions_check") 56 | file_name = File.join(dir, basename) 57 | FileUtils.touch(file_name) 58 | File.stat(file_name) 59 | rescue 60 | # ... 61 | ensure 62 | FileUtils.rm_f(file_name) if File.exist?(file_name) 63 | end 64 | 65 | def rand_filename(prefix) 66 | [ prefix, Thread.current.object_id, Process.pid, rand(1000000) ].join('.') 67 | end 68 | end 69 | -------------------------------------------------------------------------------- /lib/filewatch/observing_base.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | ## Interface API topology 4 | # ObservingBase module (this file) 5 | # is a module mixin proving common constructor and external API for File Input Plugin interaction 6 | # calls build_specific_processor on ObservingRead or ObservingTail 7 | # ObservingRead and ObservingTail 8 | # provides the External API method subscribe(observer = NullObserver.new) 9 | # build_specific_processor(settings) - provide a Tail or Read specific Processor. 10 | # TailMode::Processor or ReadMode::Processor 11 | # initialize_handlers(sincedb_collection, observer) - called when the observer subscribes to changes in a Mode, 12 | # builds mode specific handler instances with references to the observer 13 | # process_closed(watched_files) - provide specific processing of watched_files in the closed state 14 | # process_ignored(watched_files) - provide specific processing of watched_files in the ignored state 15 | # process_watched(watched_files) - provide specific processing of watched_files in the watched state 16 | # process_active(watched_files) - provide specific processing of watched_files in the active state 17 | # These methods can call "handler" methods that delegate to the specific Handler classes. 18 | # TailMode::Handlers module namespace 19 | # contains the Handler classes that deals with Tail mode file lifecycle "events". 20 | # The TailMode::Handlers::Base 21 | # handle(watched_file) - this method calls handle_specifically defined in a subclass 22 | # handle_specifically(watched_file) - this is a noop method 23 | # update_existing_specifically(watched_file, sincedb_value) - this is a noop method 24 | # Each handler extends the Base class to provide specific implementations of these two methods: 25 | # handle_specifically(watched_file) 26 | # update_existing_specifically(watched_file, sincedb_value) 27 | # ReadMode::Handlers module namespace 28 | # contains the Handler classes that deals with Read mode file lifecycle "events". 29 | # The ReadMode::Handlers::Base 30 | # handle(watched_file) - this method calls handle_specifically defined in a subclass 31 | # handle_specifically(watched_file) - this is a noop method 32 | # Each handler extends the Base class to provide specific implementations of this method: 33 | # handle_specifically(watched_file) 34 | 35 | module FileWatch 36 | module ObservingBase 37 | attr_reader :watch, :sincedb_collection, :settings 38 | 39 | def initialize(opts={}) 40 | options = { 41 | :sincedb_write_interval => 10, 42 | :stat_interval => 1, 43 | :discover_interval => 5, 44 | :exclude => [], 45 | :start_new_files_at => :end, 46 | :delimiter => "\n", 47 | :file_chunk_count => MAX_ITERATIONS, 48 | :file_chunk_size => FILE_READ_SIZE, 49 | :file_sort_by => "last_modified", 50 | :file_sort_direction => "asc", 51 | }.merge(opts) 52 | unless options.include?(:sincedb_path) 53 | raise NoSinceDBPathGiven.new("No sincedb_path set in options. This should have been added in the main LogStash::Inputs::File class") 54 | end 55 | @settings = Settings.from_options(options) 56 | build_watch_and_dependencies 57 | end 58 | 59 | def build_watch_and_dependencies 60 | logger.info("START, creating Discoverer, Watch with file and sincedb collections") 61 | watched_files_collection = WatchedFilesCollection.new(@settings) 62 | @sincedb_collection = SincedbCollection.new(@settings) 63 | @sincedb_collection.open 64 | discoverer = Discoverer.new(watched_files_collection, @sincedb_collection, @settings) 65 | @watch = Watch.new(discoverer, build_specific_processor(@settings), @settings) 66 | end 67 | 68 | def watch_this(path) 69 | @watch.watch(path) 70 | end 71 | 72 | def sincedb_write(reason=nil) 73 | # can be invoked from the file input 74 | @sincedb_collection.write(reason) 75 | end 76 | 77 | # quit is a sort-of finalizer, 78 | # it should be called for clean up 79 | # before the instance is disposed of. 80 | def quit 81 | logger.info("QUIT - closing all files and shutting down.") 82 | @watch.quit # <-- should close all the files 83 | # sincedb_write("shutting down") 84 | end 85 | 86 | end 87 | end 88 | -------------------------------------------------------------------------------- /lib/filewatch/observing_read.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/util/loggable" 3 | require_relative "read_mode/processor" 4 | 5 | module FileWatch 6 | class ObservingRead 7 | include LogStash::Util::Loggable 8 | include ObservingBase 9 | 10 | def subscribe(observer) 11 | # observer here is the file input 12 | watch.subscribe(observer, sincedb_collection) 13 | sincedb_collection.write("read mode subscribe complete - shutting down") 14 | end 15 | 16 | private 17 | 18 | def build_specific_processor(settings) 19 | ReadMode::Processor.new(settings) 20 | end 21 | end 22 | end 23 | -------------------------------------------------------------------------------- /lib/filewatch/observing_tail.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/util/loggable" 3 | require_relative 'tail_mode/processor' 4 | 5 | module FileWatch 6 | class ObservingTail 7 | include LogStash::Util::Loggable 8 | include ObservingBase 9 | 10 | def subscribe(observer) 11 | # observer here is the file input 12 | watch.subscribe(observer, sincedb_collection) 13 | sincedb_collection.write("tail mode subscribe complete - shutting down") 14 | end 15 | 16 | private 17 | 18 | def build_specific_processor(settings) 19 | TailMode::Processor.new(settings) 20 | end 21 | end 22 | end 23 | -------------------------------------------------------------------------------- /lib/filewatch/processor.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/util/loggable" 3 | require 'concurrent/atomic/atomic_reference' 4 | 5 | module FileWatch 6 | class Processor 7 | include LogStash::Util::Loggable 8 | 9 | attr_reader :watch 10 | 11 | def initialize(settings) 12 | @settings = settings 13 | @deletable_paths = Concurrent::AtomicReference.new [] 14 | end 15 | 16 | def add_watch(watch) 17 | @watch = watch 18 | self 19 | end 20 | 21 | def clear_deletable_paths 22 | @deletable_paths.get_and_set [] 23 | end 24 | 25 | def add_deletable_path(path) 26 | @deletable_paths.get << path 27 | end 28 | 29 | def restat(watched_file) 30 | changed = watched_file.restat! 31 | if changed 32 | # the collection (when sorted by modified_at) needs to re-sort every time watched-file is modified, 33 | # we can perform these update operation while processing files (stat interval) instead of having to 34 | # re-sort the whole collection every time an entry is accessed 35 | @watch.watched_files_collection.update(watched_file) 36 | end 37 | end 38 | 39 | private 40 | 41 | def error_details(error, watched_file) 42 | details = { :path => watched_file.path, 43 | :exception => error.class, 44 | :message => error.message, 45 | :backtrace => error.backtrace } 46 | if logger.debug? 47 | details[:file] = watched_file 48 | else 49 | details[:backtrace] = details[:backtrace].take(8) if details[:backtrace] 50 | end 51 | details 52 | end 53 | 54 | end 55 | end 56 | -------------------------------------------------------------------------------- /lib/filewatch/read_mode/handlers/base.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/util/loggable" 3 | 4 | module FileWatch module ReadMode module Handlers 5 | class Base 6 | include LogStash::Util::Loggable 7 | 8 | attr_reader :sincedb_collection 9 | 10 | def initialize(processor, sincedb_collection, observer, settings) 11 | @settings = settings 12 | @processor = processor 13 | @sincedb_collection = sincedb_collection 14 | @observer = observer 15 | end 16 | 17 | def quit? 18 | @processor.watch.quit? 19 | end 20 | 21 | def handle(watched_file) 22 | logger.trace? && logger.trace("handling:", :path => watched_file.path) 23 | unless watched_file.has_listener? 24 | watched_file.set_listener(@observer) 25 | end 26 | handle_specifically(watched_file) 27 | end 28 | 29 | def handle_specifically(watched_file) 30 | # some handlers don't need to define this method 31 | end 32 | 33 | private 34 | 35 | def open_file(watched_file) 36 | return true if watched_file.file_open? 37 | logger.trace? && logger.trace("opening", :path => watched_file.path) 38 | begin 39 | watched_file.open 40 | rescue => e 41 | # don't emit this message too often. if a file that we can't 42 | # read is changing a lot, we'll try to open it more often, and spam the logs. 43 | now = Time.now.to_i 44 | logger.trace? && logger.trace("opening OPEN_WARN_INTERVAL is '#{OPEN_WARN_INTERVAL}'") 45 | if watched_file.last_open_warning_at.nil? || now - watched_file.last_open_warning_at > OPEN_WARN_INTERVAL 46 | backtrace = e.backtrace 47 | backtrace = backtrace.take(3) if backtrace && !logger.debug? 48 | logger.warn("failed to open", :path => watched_file.path, :exception => e.class, :message => e.message, :backtrace => backtrace) 49 | watched_file.last_open_warning_at = now 50 | else 51 | logger.trace? && logger.trace("suppressed warning (failed to open)", :path => watched_file.path, :exception => e.class, :message => e.message) 52 | end 53 | watched_file.watch # set it back to watch so we can try it again 54 | end 55 | if watched_file.file_open? 56 | watched_file.listener.opened 57 | true 58 | else 59 | false 60 | end 61 | end 62 | 63 | def add_or_update_sincedb_collection(watched_file) 64 | sincedb_value = @sincedb_collection.find(watched_file) 65 | if sincedb_value.nil? 66 | add_new_value_sincedb_collection(watched_file) 67 | elsif sincedb_value.watched_file == watched_file 68 | update_existing_sincedb_collection_value(watched_file, sincedb_value) 69 | else 70 | logger.trace? && logger.trace("add_or_update_sincedb_collection: the found sincedb_value has a watched_file - this is a rename, switching inode to this watched file") 71 | existing_watched_file = sincedb_value.watched_file 72 | if existing_watched_file.nil? 73 | sincedb_value.set_watched_file(watched_file) 74 | logger.trace("add_or_update_sincedb_collection: switching as new file") 75 | watched_file.rotate_as_file 76 | watched_file.update_bytes_read(sincedb_value.position) 77 | else 78 | sincedb_value.set_watched_file(watched_file) 79 | logger.trace? && logger.trace("add_or_update_sincedb_collection: switching from", :watched_file => watched_file.details) 80 | watched_file.rotate_from(existing_watched_file) 81 | end 82 | 83 | end 84 | watched_file.initial_completed 85 | end 86 | 87 | def update_existing_sincedb_collection_value(watched_file, sincedb_value) 88 | logger.trace? && logger.trace("update_existing_sincedb_collection_value: #{watched_file.path}, last value #{sincedb_value.position}, cur size #{watched_file.last_stat_size}") 89 | # sincedb_value is the source of truth 90 | watched_file.update_bytes_read(sincedb_value.position) 91 | end 92 | 93 | def add_new_value_sincedb_collection(watched_file) 94 | sincedb_value = SincedbValue.new(0) 95 | sincedb_value.set_watched_file(watched_file) 96 | logger.trace? && logger.trace("add_new_value_sincedb_collection:", :path => watched_file.path, :position => sincedb_value.position) 97 | sincedb_collection.set(watched_file.sincedb_key, sincedb_value) 98 | end 99 | end 100 | end end end 101 | -------------------------------------------------------------------------------- /lib/filewatch/read_mode/handlers/read_file.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | module FileWatch module ReadMode module Handlers 4 | class ReadFile < Base 5 | 6 | # seek file to which ever is furthest: either current bytes read or sincedb position 7 | private 8 | def seek_to_furthest_position(watched_file) 9 | previous_pos = sincedb_collection.find(watched_file).position 10 | watched_file.file_seek([watched_file.bytes_read, previous_pos].max) 11 | end 12 | 13 | public 14 | def handle_specifically(watched_file) 15 | if open_file(watched_file) 16 | add_or_update_sincedb_collection(watched_file) unless sincedb_collection.member?(watched_file.sincedb_key) 17 | seek_to_furthest_position(watched_file) 18 | loop do 19 | break if quit? 20 | loop_control = watched_file.loop_control_adjusted_for_stat_size 21 | controlled_read(watched_file, loop_control) 22 | sincedb_collection.request_disk_flush 23 | break unless loop_control.keep_looping? 24 | end 25 | if watched_file.all_read? 26 | # flush the buffer now in case there is no final delimiter 27 | line = watched_file.buffer.flush 28 | watched_file.listener.accept(line) unless line.empty? 29 | watched_file.listener.eof 30 | watched_file.file_close 31 | key = watched_file.sincedb_key 32 | if sincedb_collection.get(key) 33 | sincedb_collection.reading_completed(key) 34 | sincedb_collection.clear_watched_file(key) 35 | end 36 | watched_file.listener.deleted 37 | # NOTE: on top of un-watching we should also remove from the watched files collection 38 | # if the file is getting deleted (on completion), that part currently resides in 39 | # DeleteCompletedFileHandler - triggered above using `watched_file.listener.deleted` 40 | watched_file.unwatch 41 | end 42 | end 43 | end 44 | 45 | def controlled_read(watched_file, loop_control) 46 | logger.trace? && logger.trace("reading...", :filename => watched_file.filename, :iterations => loop_control.count, :amount => loop_control.size) 47 | loop_control.count.times do 48 | break if quit? 49 | begin 50 | result = watched_file.read_extract_lines(loop_control.size) # expect BufferExtractResult 51 | logger.info(result.warning, result.additional) unless result.warning.empty? 52 | result.lines.each do |line| 53 | watched_file.listener.accept(line) 54 | # sincedb position is independent from the watched_file bytes_read 55 | delta = line.bytesize + @settings.delimiter_byte_size 56 | sincedb_collection.increment(watched_file.sincedb_key, delta) 57 | break if quit? 58 | end 59 | rescue EOFError => e 60 | log_error("controlled_read: eof error reading file", watched_file, e) 61 | loop_control.flag_read_error 62 | break 63 | rescue Errno::EWOULDBLOCK, Errno::EINTR => e 64 | log_error("controlled_read: block or interrupt error reading file", watched_file, e) 65 | watched_file.listener.error 66 | loop_control.flag_read_error 67 | break 68 | rescue => e 69 | log_error("controlled_read: general error reading file", watched_file, e) 70 | watched_file.listener.error 71 | loop_control.flag_read_error 72 | break 73 | end 74 | end 75 | end 76 | 77 | def log_error(msg, watched_file, error) 78 | details = { :path => watched_file.path, 79 | :exception => error.class, 80 | :message => error.message, 81 | :backtrace => error.backtrace } 82 | if logger.debug? 83 | details[:file] = watched_file 84 | else 85 | details[:backtrace] = details[:backtrace].take(8) if details[:backtrace] 86 | end 87 | logger.error(msg, details) 88 | end 89 | end 90 | end end end 91 | -------------------------------------------------------------------------------- /lib/filewatch/read_mode/handlers/read_zip_file.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require 'java' 3 | 4 | module FileWatch module ReadMode module Handlers 5 | 6 | java_import java.io.InputStream 7 | java_import java.io.InputStreamReader 8 | java_import java.io.FileInputStream 9 | java_import java.io.BufferedReader 10 | java_import java.util.zip.GZIPInputStream 11 | java_import java.util.zip.ZipException 12 | 13 | class ReadZipFile < Base 14 | def handle_specifically(watched_file) 15 | add_or_update_sincedb_collection(watched_file) unless sincedb_collection.member?(watched_file.sincedb_key) 16 | # can't really stripe read a zip file, its all or nothing. 17 | watched_file.listener.opened 18 | # what do we do about quit when we have just begun reading the zipped file (e.g. pipeline reloading) 19 | # should we track lines read in the sincedb and 20 | # fast forward through the lines until we reach unseen content? 21 | # meaning that we can quit in the middle of a zip file 22 | key = watched_file.sincedb_key 23 | 24 | if @settings.check_archive_validity && corrupted?(watched_file) 25 | watched_file.unwatch 26 | else 27 | begin 28 | file_stream = FileInputStream.new(watched_file.path) 29 | gzip_stream = GZIPInputStream.new(file_stream) 30 | decoder = InputStreamReader.new(gzip_stream, "UTF-8") 31 | buffered = BufferedReader.new(decoder) 32 | while (line = buffered.readLine()) 33 | watched_file.listener.accept(line) 34 | # can't quit, if we did then we would incorrectly write a 'completed' sincedb entry 35 | # what do we do about quit when we have just begun reading the zipped file (e.g. pipeline reloading) 36 | # should we track lines read in the sincedb and 37 | # fast forward through the lines until we reach unseen content? 38 | # meaning that we can quit in the middle of a zip file 39 | end 40 | watched_file.listener.eof 41 | rescue ZipException => e 42 | logger.error("Cannot decompress the gzip file at path: #{watched_file.path}", :exception => e.class, 43 | :message => e.message, :backtrace => e.backtrace) 44 | watched_file.listener.error 45 | else 46 | sincedb_collection.store_last_read(key, watched_file.last_stat_size) 47 | sincedb_collection.request_disk_flush 48 | watched_file.listener.deleted 49 | watched_file.unwatch 50 | ensure 51 | # rescue each close individually so all close attempts are tried 52 | close_and_ignore_ioexception(buffered) unless buffered.nil? 53 | close_and_ignore_ioexception(decoder) unless decoder.nil? 54 | close_and_ignore_ioexception(gzip_stream) unless gzip_stream.nil? 55 | close_and_ignore_ioexception(file_stream) unless file_stream.nil? 56 | end 57 | end 58 | sincedb_collection.clear_watched_file(key) 59 | end 60 | 61 | private 62 | 63 | def close_and_ignore_ioexception(closeable) 64 | begin 65 | closeable.close 66 | rescue Exception => e # IOException can be thrown by any of the Java classes that implement the Closable interface. 67 | logger.warn("Ignoring an IOException when closing an instance of #{closeable.class.name}", 68 | :exception => e.class, :message => e.message, :backtrace => e.backtrace) 69 | end 70 | end 71 | 72 | def corrupted?(watched_file) 73 | begin 74 | start = Time.new 75 | file_stream = FileInputStream.new(watched_file.path) 76 | gzip_stream = GZIPInputStream.new(file_stream) 77 | buffer = Java::byte[8192].new 78 | until gzip_stream.read(buffer) == -1 79 | end 80 | return false 81 | rescue ZipException, Java::JavaIo::EOFException => e 82 | duration = Time.now - start 83 | logger.warn("Detected corrupted archive #{watched_file.path} file won't be processed", :message => e.message, 84 | :duration => duration.round(3)) 85 | return true 86 | ensure 87 | close_and_ignore_ioexception(gzip_stream) unless gzip_stream.nil? 88 | close_and_ignore_ioexception(file_stream) unless file_stream.nil? 89 | end 90 | end 91 | end 92 | end end end 93 | -------------------------------------------------------------------------------- /lib/filewatch/read_mode/processor.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require 'filewatch/processor' 3 | require_relative "handlers/base" 4 | require_relative "handlers/read_file" 5 | require_relative "handlers/read_zip_file" 6 | 7 | module FileWatch module ReadMode 8 | # Must handle 9 | # :read_file 10 | # :read_zip_file 11 | class Processor < FileWatch::Processor 12 | 13 | def initialize_handlers(sincedb_collection, observer) 14 | # we deviate from the tail mode handler initialization here 15 | # by adding a reference to self so we can read the quit flag during a (depth first) read loop 16 | @read_file = Handlers::ReadFile.new(self, sincedb_collection, observer, @settings) 17 | @read_zip_file = Handlers::ReadZipFile.new(self, sincedb_collection, observer, @settings) 18 | end 19 | 20 | def read_file(watched_file) 21 | @read_file.handle(watched_file) 22 | end 23 | 24 | def read_zip_file(watched_file) 25 | @read_zip_file.handle(watched_file) 26 | end 27 | 28 | def process_all_states(watched_files) 29 | process_watched(watched_files) 30 | return if watch.quit? 31 | process_active(watched_files) 32 | end 33 | 34 | private 35 | 36 | def process_watched(watched_files) 37 | logger.trace(__method__.to_s) 38 | # Handles watched_files in the watched state. 39 | # for a slice of them: 40 | # move to the active state 41 | # should never have been active before 42 | # how much of the max active window is available 43 | to_take = @settings.max_active - watched_files.count { |wf| wf.active? } 44 | if to_take > 0 45 | watched_files.select(&:watched?).take(to_take).each do |watched_file| 46 | begin 47 | restat(watched_file) 48 | watched_file.activate 49 | rescue Errno::ENOENT 50 | common_deleted_reaction(watched_file, __method__) 51 | next 52 | rescue => e 53 | common_error_reaction(watched_file, e, __method__) 54 | next 55 | end 56 | break if watch.quit? 57 | end 58 | else 59 | now = Time.now.to_i 60 | if (now - watch.lastwarn_max_files) > MAX_FILES_WARN_INTERVAL 61 | waiting = watched_files.size - @settings.max_active 62 | logger.warn("#{@settings.max_warn_msg}, files yet to open: #{waiting}") 63 | watch.lastwarn_max_files = now 64 | end 65 | end 66 | end 67 | 68 | ## TODO add process_rotation_in_progress 69 | 70 | def process_active(watched_files) 71 | logger.trace(__method__.to_s) 72 | # Handles watched_files in the active state. 73 | watched_files.each do |watched_file| 74 | next unless watched_file.active? 75 | 76 | begin 77 | restat(watched_file) 78 | rescue Errno::ENOENT 79 | common_deleted_reaction(watched_file, __method__) 80 | next 81 | rescue => e 82 | common_error_reaction(watched_file, e, __method__) 83 | next 84 | end 85 | break if watch.quit? 86 | 87 | if watched_file.compressed? 88 | read_zip_file(watched_file) 89 | else 90 | read_file(watched_file) 91 | end 92 | 93 | if @settings.exit_after_read 94 | common_detach_when_allread(watched_file) 95 | end 96 | # handlers take care of closing and unwatching 97 | end 98 | end 99 | 100 | def common_detach_when_allread(watched_file) 101 | watched_file.unwatch 102 | watched_file.listener.reading_completed 103 | add_deletable_path watched_file.path 104 | logger.trace? && logger.trace("whole file read, removing from collection", :path => watched_file.path) 105 | end 106 | 107 | def common_deleted_reaction(watched_file, action) 108 | # file has gone away or we can't read it anymore. 109 | watched_file.unwatch 110 | add_deletable_path watched_file.path 111 | logger.trace? && logger.trace("#{action} - stat failed, removing from collection", :path => watched_file.path) 112 | end 113 | 114 | def common_error_reaction(watched_file, error, action) 115 | logger.error("#{action} - other error", error_details(error, watched_file)) 116 | end 117 | end 118 | end end 119 | -------------------------------------------------------------------------------- /lib/filewatch/settings.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | module FileWatch 4 | class Settings 5 | attr_reader :delimiter, :close_older, :ignore_older, :delimiter_byte_size 6 | attr_reader :max_active, :max_warn_msg, :lastwarn_max_files 7 | attr_reader :sincedb_write_interval, :stat_interval, :discover_interval 8 | attr_reader :exclude, :start_new_files_at, :file_chunk_count, :file_chunk_size 9 | attr_reader :sincedb_path, :sincedb_expiry_duration 10 | attr_reader :file_sort_by, :file_sort_direction 11 | attr_reader :exit_after_read 12 | attr_reader :check_archive_validity 13 | 14 | def self.from_options(opts) 15 | new.add_options(opts) 16 | end 17 | 18 | def initialize 19 | defaults = { 20 | :delimiter => "\n", 21 | :file_chunk_size => FILE_READ_SIZE, 22 | :max_open_files => 4095, 23 | :file_chunk_count => MAX_ITERATIONS, 24 | :sincedb_clean_after => 14, 25 | :exclude => [], 26 | :stat_interval => 1, 27 | :discover_interval => 5, 28 | :file_sort_by => "last_modified", 29 | :file_sort_direction => "asc", 30 | } 31 | @opts = {} 32 | @lastwarn_max_files = 0 33 | add_options(defaults) 34 | end 35 | 36 | def add_options(opts) 37 | @opts.update(opts) 38 | self.max_open_files = @opts[:max_open_files] 39 | @delimiter = @opts[:delimiter] 40 | @delimiter_byte_size = @delimiter.bytesize 41 | @file_chunk_size = @opts[:file_chunk_size] 42 | @close_older = @opts[:close_older] 43 | @ignore_older = @opts[:ignore_older] 44 | @stat_interval = @opts[:stat_interval] 45 | @discover_interval = @opts[:discover_interval] 46 | @exclude = Array(@opts[:exclude]) 47 | @start_new_files_at = @opts[:start_new_files_at] 48 | @file_chunk_count = @opts[:file_chunk_count] 49 | @sincedb_path = @opts[:sincedb_path] 50 | @sincedb_write_interval = @opts[:sincedb_write_interval] 51 | @sincedb_expiry_duration = @opts.fetch(:sincedb_clean_after) 52 | @file_sort_by = @opts[:file_sort_by] 53 | @file_sort_direction = @opts[:file_sort_direction] 54 | @exit_after_read = @opts[:exit_after_read] 55 | @check_archive_validity = @opts[:check_archive_validity] 56 | self 57 | end 58 | 59 | def max_open_files=(value) 60 | val = value.to_i 61 | val = 4095 if value.nil? || val <= 0 62 | @max_warn_msg = "Reached open files limit: #{val}, set by the 'max_open_files' option or default" 63 | @max_active = val 64 | end 65 | end 66 | end 67 | -------------------------------------------------------------------------------- /lib/filewatch/sincedb_collection.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/util/loggable" 3 | 4 | module FileWatch 5 | # this KV collection has a watched_file storage_key (an InodeStruct) as the key 6 | # and a SincedbValue as the value. 7 | # the SincedbValues are built by reading the sincedb file. 8 | class SincedbCollection 9 | include LogStash::Util::Loggable 10 | 11 | attr_reader :path 12 | attr_writer :serializer 13 | 14 | def initialize(settings) 15 | @settings = settings 16 | @sincedb_last_write = 0 17 | @sincedb = {} 18 | @serializer = SincedbRecordSerializer.new(@settings.sincedb_expiry_duration) 19 | @path = Pathname.new(@settings.sincedb_path) 20 | @write_method = LogStash::Environment.windows? || @path.chardev? || @path.blockdev? ? method(:non_atomic_write) : method(:atomic_write) 21 | @full_path = @path.to_path 22 | FileUtils.touch(@full_path) 23 | @write_requested = false 24 | end 25 | 26 | def write_requested? 27 | @write_requested 28 | end 29 | 30 | def request_disk_flush 31 | @write_requested = true 32 | flush_at_interval 33 | end 34 | 35 | def write_if_requested 36 | if write_requested? 37 | flush_at_interval 38 | end 39 | end 40 | 41 | def write(reason=nil) 42 | logger.trace("caller requested sincedb write (#{reason})") 43 | sincedb_write 44 | end 45 | 46 | def open 47 | @time_sdb_opened = Time.now.to_f 48 | begin 49 | path.open do |file| 50 | logger.debug("open: reading from #{path}") 51 | @serializer.deserialize(file) do |key, value| 52 | logger.trace? && logger.trace("open: importing #{key.inspect} => #{value.inspect}") 53 | set_key_value(key, value) 54 | end 55 | end 56 | logger.trace("open: count of keys read: #{@sincedb.keys.size}") 57 | rescue => e 58 | #No existing sincedb to load 59 | logger.debug("open: error opening #{path}", :exception => e.class, :message => e.message) 60 | end 61 | end 62 | 63 | def associate(watched_file) 64 | logger.trace? && logger.trace("associate: finding", :path => watched_file.path, :inode => watched_file.sincedb_key.inode) 65 | sincedb_value = find(watched_file) 66 | if sincedb_value.nil? 67 | # sincedb has no record of this inode 68 | # and due to the window handling of many files 69 | # this file may not be opened in this session. 70 | # a new value will be added when the file is opened 71 | logger.trace("associate: unmatched", :filename => watched_file.filename) 72 | return true 73 | end 74 | logger.trace? && logger.trace("associate: found sincedb record", :filename => watched_file.filename, 75 | :sincedb_key => watched_file.sincedb_key, :sincedb_value => sincedb_value) 76 | if sincedb_value.watched_file.nil? # not associated 77 | if sincedb_value.path_in_sincedb.nil? 78 | handle_association(sincedb_value, watched_file) 79 | logger.trace? && logger.trace("associate: inode matched but no path in sincedb", :filename => watched_file.filename) 80 | return true 81 | end 82 | if sincedb_value.path_in_sincedb == watched_file.path 83 | # the path on disk is the same as discovered path and the inode is the same. 84 | handle_association(sincedb_value, watched_file) 85 | logger.trace? && logger.trace("associate: inode and path matched", :filename => watched_file.filename) 86 | return true 87 | end 88 | # the path on disk is different from discovered unassociated path but they have the same key (inode) 89 | # treat as a new file, a new value will be added when the file is opened 90 | sincedb_value.clear_watched_file 91 | delete(watched_file.sincedb_key) 92 | logger.trace? && logger.trace("associate: matched but allocated to another", :filename => watched_file.filename) 93 | return true 94 | end 95 | if sincedb_value.watched_file.equal?(watched_file) # pointer equals 96 | logger.trace? && logger.trace("associate: already associated", :filename => watched_file.filename) 97 | return true 98 | end 99 | # sincedb_value.watched_file is not this discovered watched_file but they have the same key (inode) 100 | # this means that the filename path was changed during this session. 101 | # renamed file can be discovered... 102 | # before the original is detected as deleted: state is `active` 103 | # after the original is detected as deleted but before it is actually deleted: state is `delayed_delete` 104 | # after the original is deleted 105 | # are not yet in the delete phase, let this play out 106 | existing_watched_file = sincedb_value.watched_file 107 | logger.trace? && logger.trace("associate: found sincedb_value has a watched_file - this is a rename", 108 | :this_watched_file => watched_file.details, :existing_watched_file => existing_watched_file.details) 109 | watched_file.rotation_in_progress 110 | true 111 | end 112 | 113 | def find(watched_file) 114 | get(watched_file.sincedb_key) 115 | end 116 | 117 | def member?(key) 118 | @sincedb.member?(key) 119 | end 120 | 121 | def get(key) 122 | @sincedb[key] 123 | end 124 | 125 | def set(key, value) 126 | @sincedb[key] = value 127 | value 128 | end 129 | 130 | def delete(key) 131 | @sincedb.delete(key) 132 | end 133 | 134 | def last_read(key) 135 | @sincedb[key].position 136 | end 137 | 138 | def rewind(key) 139 | @sincedb[key].update_position(0) 140 | end 141 | 142 | def increment(key, amount) 143 | @sincedb[key].increment_position(amount) 144 | end 145 | 146 | def set_watched_file(key, watched_file) 147 | @sincedb[key].set_watched_file(watched_file) 148 | end 149 | 150 | def watched_file_deleted(watched_file) 151 | value = @sincedb[watched_file.sincedb_key] 152 | value.unset_watched_file if value 153 | end 154 | 155 | def store_last_read(key, pos) 156 | @sincedb[key].update_position(pos) 157 | end 158 | 159 | def clear_watched_file(key) 160 | @sincedb[key].clear_watched_file 161 | end 162 | 163 | def reading_completed(key) 164 | @sincedb[key].reading_completed 165 | end 166 | 167 | def clear 168 | @sincedb.clear 169 | end 170 | 171 | def keys 172 | @sincedb.keys 173 | end 174 | 175 | def watched_file_unset?(key) 176 | return false unless member?(key) 177 | get(key).watched_file.nil? 178 | end 179 | 180 | def flush_at_interval 181 | now = Time.now 182 | delta = now.to_i - @sincedb_last_write 183 | if delta >= @settings.sincedb_write_interval 184 | logger.debug("writing sincedb (delta since last write = #{delta})") 185 | sincedb_write(now) 186 | end 187 | end 188 | 189 | private 190 | 191 | def handle_association(sincedb_value, watched_file) 192 | watched_file.update_bytes_read(sincedb_value.position) 193 | sincedb_value.set_watched_file(watched_file) 194 | watched_file.initial_completed 195 | if watched_file.all_read? 196 | watched_file.ignore 197 | logger.trace? && logger.trace("handle_association fully read, ignoring", :watched_file => watched_file.details, :sincedb_value => sincedb_value) 198 | end 199 | end 200 | 201 | def set_key_value(key, value) 202 | if @time_sdb_opened < value.last_changed_at_expires(@settings.sincedb_expiry_duration) 203 | set(key, value) 204 | else 205 | logger.debug("set_key_value: record has expired, skipping: #{key.inspect} => #{value.inspect}") 206 | end 207 | end 208 | 209 | def sincedb_write(time = Time.now) 210 | logger.trace? && logger.trace("sincedb_write: #{path} (time = #{time})") 211 | begin 212 | expired_keys = @write_method.call(time) 213 | expired_keys.each do |key| 214 | @sincedb[key].unset_watched_file 215 | delete(key) 216 | logger.trace? && logger.trace("sincedb_write: cleaned", :key => key) 217 | end 218 | @sincedb_last_write = time.to_i 219 | @write_requested = false 220 | rescue Errno::EACCES => e 221 | # no file handles free perhaps - maybe it will work next time 222 | logger.debug("sincedb_write: #{path} error:", :exception => e.class, :message => e.message) 223 | end 224 | end 225 | 226 | # @return expired keys 227 | def atomic_write(time) 228 | logger.trace? && logger.trace("non_atomic_write: ", :time => time) 229 | begin 230 | FileHelper.write_atomically(@full_path) do |io| 231 | @serializer.serialize(@sincedb, io, time.to_f) 232 | end 233 | rescue Errno::EPERM, Errno::EACCES => e 234 | logger.warn("sincedb_write: unable to write atomically due to permissions error, falling back to non-atomic write: #{path} error:", :exception => e.class, :message => e.message) 235 | @write_method = method(:non_atomic_write) 236 | non_atomic_write(time) 237 | rescue => e 238 | logger.warn("sincedb_write: unable to write atomically, attempting non-atomic write: #{path} error:", :exception => e.class, :message => e.message) 239 | non_atomic_write(time) 240 | end 241 | end 242 | 243 | # @return expired keys 244 | def non_atomic_write(time) 245 | logger.trace? && logger.trace("non_atomic_write: ", :time => time) 246 | File.open(@full_path, "w+") do |io| 247 | @serializer.serialize(@sincedb, io, time.to_f) 248 | end 249 | end 250 | end 251 | end 252 | -------------------------------------------------------------------------------- /lib/filewatch/sincedb_record_serializer.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | module FileWatch 4 | class SincedbRecordSerializer 5 | 6 | def self.days_to_seconds(days) 7 | (24 * 3600) * days.to_f 8 | end 9 | 10 | def initialize(sincedb_value_expiry) 11 | @sincedb_value_expiry = sincedb_value_expiry 12 | end 13 | 14 | # @return Array expired keys (ones that were not written to the file) 15 | def serialize(db, io, as_of = Time.now.to_f) 16 | expired_keys = [] 17 | db.each do |key, value| 18 | if as_of > value.last_changed_at_expires(@sincedb_value_expiry) 19 | expired_keys << key 20 | next 21 | end 22 | io.write(serialize_record(key, value)) 23 | end 24 | expired_keys 25 | end 26 | 27 | def deserialize(io) 28 | io.each do |record| 29 | yield deserialize_record(record) #.tap{|val| STDERR.puts val} 30 | end 31 | end 32 | 33 | def serialize_record(k, v) 34 | "#{k} #{v}\n" # effectively InodeStruct#to_s SincedbValue#to_s 35 | end 36 | 37 | def deserialize_record(record) 38 | return [] if record.nil? || record.empty? 39 | parts = record.split(" ") 40 | parse_line_v2(parts) || parse_line_v1(parts) 41 | end 42 | 43 | private 44 | 45 | def parse_line_v2(parts) 46 | # new format e.g. 2977152 1 4 94 1519319662.852678 'path/to/file' 47 | # do we want to store the last known state of the watched file too? 48 | return false if parts.size < 5 49 | inode_struct = prepare_inode_struct(parts) 50 | pos = parts.shift.to_i 51 | expires_at = Float(parts.shift) # this is like Time.now.to_f 52 | path_in_sincedb = parts.join(" ") 53 | value = SincedbValue.new(pos, expires_at).add_path_in_sincedb(path_in_sincedb) 54 | [inode_struct, value] 55 | end 56 | 57 | def parse_line_v1(parts) 58 | # old inode based e.g. 2977152 1 4 94 59 | inode_struct = prepare_inode_struct(parts) 60 | pos = parts.shift.to_i 61 | [inode_struct, SincedbValue.new(pos)] 62 | end 63 | 64 | def prepare_inode_struct(parts) 65 | InodeStruct.new(parts.shift, *parts.shift(2).map(&:to_i)) 66 | end 67 | end 68 | end 69 | -------------------------------------------------------------------------------- /lib/filewatch/sincedb_value.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | module FileWatch 4 | # Tracks the position and expiry of the offset of a file-of-interest 5 | # NOTE: the `watched_file.bytes_read` and this `sincedb_value.position` can diverge 6 | # At any given moment IF the `watched_file.bytes_read` is greater than `sincedb_value.position` 7 | # then it is larger to account for bytes held in the `watched_file.buffer` 8 | # in Tail mode if we quit the buffer is not flushed and we restart from 9 | # the `sincedb_value.position` (end of the last line read). 10 | # in Read mode the buffer is flushed as a line and both values should be the same. 11 | class SincedbValue 12 | attr_reader :last_changed_at, :watched_file, :path_in_sincedb, :position 13 | 14 | def initialize(position, last_changed_at = nil, watched_file = nil) 15 | @position = position # this is the value read from disk 16 | @last_changed_at = last_changed_at 17 | @watched_file = watched_file 18 | touch if @last_changed_at.nil? || @last_changed_at.zero? 19 | end 20 | 21 | def add_path_in_sincedb(path) 22 | @path_in_sincedb = path # can be nil 23 | self 24 | end 25 | 26 | def last_changed_at_expires(duration) 27 | @last_changed_at + duration 28 | end 29 | 30 | def update_position(pos) 31 | # called when we reset the position to bof or eof on shrink or file read complete 32 | touch 33 | @position = pos 34 | @watched_file.update_bytes_read(pos) unless @watched_file.nil? 35 | end 36 | 37 | def increment_position(pos) 38 | # called when actual lines are sent to the observer listener 39 | # this gets serialized as its a more true indication of position than 40 | # chunk read size 41 | touch 42 | @position += pos 43 | end 44 | 45 | def set_watched_file(watched_file) 46 | touch 47 | @watched_file = watched_file 48 | end 49 | 50 | def touch 51 | @last_changed_at = Time.now.to_f 52 | end 53 | 54 | def to_s 55 | # consider serializing the watched_file state as well 56 | "#{position} #{last_changed_at}".tap do |s| 57 | if @watched_file.nil? 58 | s.concat(" ").concat(@path_in_sincedb) unless @path_in_sincedb.nil? 59 | else 60 | s.concat(" ").concat(@watched_file.path) 61 | end 62 | end 63 | end 64 | 65 | def clear_watched_file 66 | @watched_file = nil 67 | end 68 | 69 | def reading_completed 70 | touch 71 | @path_in_sincedb = @watched_file.path 72 | @position = @watched_file.bytes_read 73 | end 74 | 75 | def unset_watched_file 76 | # called in read mode only because we flushed any remaining bytes as a final line. 77 | # cache the position 78 | # we don't cache the path here because we know we are done with this file. 79 | # either due via the `delete` handling 80 | # or when read mode is done with a file. 81 | # in the case of `delete` if the file was renamed then @watched_file is the 82 | # watched_file of the previous path and the new path will be discovered and 83 | # it should have the same inode as before. 84 | # The key from the new watched_file should then locate this entry and we 85 | # can resume from the cached position 86 | return if @watched_file.nil? 87 | wf = @watched_file 88 | @watched_file = nil 89 | @position = wf.bytes_read 90 | end 91 | end 92 | end 93 | -------------------------------------------------------------------------------- /lib/filewatch/stat/generic.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | module FileWatch module Stat 4 | class Generic 5 | 6 | attr_reader :inode, :modified_at, :size, :inode_struct 7 | 8 | def initialize(source) 9 | @source = source # Pathname 10 | restat 11 | end 12 | 13 | def restat 14 | stat = @source.stat 15 | @inode = stat.ino.to_s 16 | @modified_at = stat.mtime.to_f 17 | @size = stat.size 18 | @inode_struct = InodeStruct.new(@inode, stat.dev_major, stat.dev_minor) 19 | end 20 | 21 | def windows? 22 | false 23 | end 24 | 25 | def inspect 26 | "<#{self.class.name} size=#{@size}, modified_at=#{@modified_at}, inode='#{@inode}', inode_struct=#{@inode_struct}>" 27 | end 28 | end 29 | end end 30 | -------------------------------------------------------------------------------- /lib/filewatch/stat/windows_path.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | module FileWatch module Stat 4 | class WindowsPath 5 | 6 | attr_reader :inode, :modified_at, :size, :inode_struct 7 | 8 | def initialize(source) 9 | @source = source # Pathname 10 | @inode = Winhelper.identifier_from_path(@source.to_path) 11 | # in windows the dev hi and low are in the identifier 12 | @inode_struct = InodeStruct.new(@inode, 0, 0) 13 | restat 14 | end 15 | 16 | def restat 17 | stat = @source.stat 18 | @modified_at = stat.mtime.to_f 19 | @size = stat.size 20 | end 21 | 22 | def windows? 23 | true 24 | end 25 | 26 | def inspect 27 | "<#{self.class.name} size=#{@size}, modified_at=#{@modified_at}, inode=#{@inode}, inode_struct=#{@inode_struct}>" 28 | end 29 | end 30 | end end 31 | -------------------------------------------------------------------------------- /lib/filewatch/tail_mode/handlers/base.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/util/loggable" 3 | 4 | module FileWatch module TailMode module Handlers 5 | class Base 6 | include LogStash::Util::Loggable 7 | attr_reader :sincedb_collection 8 | 9 | def initialize(processor, sincedb_collection, observer, settings) 10 | @settings = settings 11 | @processor = processor 12 | @sincedb_collection = sincedb_collection 13 | @observer = observer 14 | end 15 | 16 | def quit? 17 | @processor.watch.quit? 18 | end 19 | 20 | def handle(watched_file) 21 | logger.trace? && logger.trace("handling:", :path => watched_file.path) 22 | unless watched_file.has_listener? 23 | watched_file.set_listener(@observer) 24 | end 25 | handle_specifically(watched_file) 26 | end 27 | 28 | def handle_specifically(watched_file) 29 | # some handlers don't need to define this method 30 | end 31 | 32 | def update_existing_specifically(watched_file, sincedb_value) 33 | # when a handler subclass does not implement this then do nothing 34 | end 35 | 36 | private 37 | 38 | def controlled_read(watched_file, loop_control) 39 | changed = false 40 | logger.trace? && logger.trace(__method__.to_s, :iterations => loop_control.count, :amount => loop_control.size, :filename => watched_file.filename) 41 | # from a real config (has 102 file inputs) 42 | # -- This cfg creates a file input for every log file to create a dedicated file pointer and read all file simultaneously 43 | # -- If we put all log files in one file input glob we will have indexing delay, because Logstash waits until the first file becomes EOF 44 | # by allowing the user to specify a combo of `file_chunk_count` X `file_chunk_size`... 45 | # we enable the pseudo parallel processing of each file. 46 | # user also has the option to specify a low `stat_interval` and a very high `discover_interval`to respond 47 | # quicker to changing files and not allowing too much content to build up before reading it. 48 | loop_control.count.times do 49 | break if quit? 50 | begin 51 | logger.debug? && logger.debug("#{__method__} get chunk") 52 | result = watched_file.read_extract_lines(loop_control.size) # expect BufferExtractResult 53 | logger.trace(result.warning, result.additional) unless result.warning.empty? 54 | changed = true 55 | result.lines.each do |line| 56 | watched_file.listener.accept(line) 57 | # sincedb position is now independent from the watched_file bytes_read 58 | sincedb_collection.increment(watched_file.sincedb_key, line.bytesize + @settings.delimiter_byte_size) 59 | end 60 | rescue EOFError => e 61 | # it only makes sense to signal EOF in "read" mode not "tail" 62 | logger.debug(__method__.to_s, exception_details(watched_file.path, e, false)) 63 | loop_control.flag_read_error 64 | break 65 | rescue Errno::EWOULDBLOCK, Errno::EINTR => e 66 | logger.debug(__method__.to_s, exception_details(watched_file.path, e, false)) 67 | watched_file.listener.error 68 | loop_control.flag_read_error 69 | break 70 | rescue => e 71 | logger.error("#{__method__} general error reading", exception_details(watched_file.path, e)) 72 | watched_file.listener.error 73 | loop_control.flag_read_error 74 | break 75 | end 76 | end 77 | logger.debug("#{__method__} stopped loop due quit") if quit? 78 | sincedb_collection.request_disk_flush if changed 79 | end 80 | 81 | def open_file(watched_file) 82 | return true if watched_file.file_open? 83 | logger.trace? && logger.trace("open_file", :filename => watched_file.filename) 84 | begin 85 | watched_file.open 86 | rescue => e 87 | # don't emit this message too often. if a file that we can't 88 | # read is changing a lot, we'll try to open it more often, and spam the logs. 89 | now = Time.now.to_i 90 | logger.trace? && logger.trace("open_file OPEN_WARN_INTERVAL is '#{OPEN_WARN_INTERVAL}'") 91 | if watched_file.last_open_warning_at.nil? || now - watched_file.last_open_warning_at > OPEN_WARN_INTERVAL 92 | logger.warn("failed to open file", exception_details(watched_file.path, e)) 93 | watched_file.last_open_warning_at = now 94 | else 95 | logger.debug("open_file suppressed warning `failed to open file`", exception_details(watched_file.path, e, false)) 96 | end 97 | watched_file.watch # set it back to watch so we can try it again 98 | else 99 | watched_file.listener.opened 100 | end 101 | watched_file.file_open? 102 | end 103 | 104 | def add_or_update_sincedb_collection(watched_file) 105 | sincedb_value = @sincedb_collection.find(watched_file) 106 | if sincedb_value.nil? 107 | sincedb_value = add_new_value_sincedb_collection(watched_file) 108 | watched_file.initial_completed 109 | elsif sincedb_value.watched_file == watched_file 110 | update_existing_sincedb_collection_value(watched_file, sincedb_value) 111 | watched_file.initial_completed 112 | else 113 | logger.trace? && logger.trace("add_or_update_sincedb_collection: found sincedb record", 114 | :sincedb_key => watched_file.sincedb_key, :sincedb_value => sincedb_value) 115 | # detected a rotation, Discoverer can't handle this because this watched file is not a new discovery. 116 | # we must handle it here, by transferring state and have the sincedb value track this watched file 117 | # rotate_as_file and rotate_from will switch the sincedb key to the inode that the path is now pointing to 118 | # and pickup the sincedb_value from before. 119 | logger.debug("add_or_update_sincedb_collection: the found sincedb_value has a watched_file - this is a rename, switching inode to this watched file") 120 | existing_watched_file = sincedb_value.watched_file 121 | if existing_watched_file.nil? 122 | sincedb_value.set_watched_file(watched_file) 123 | logger.trace? && logger.trace("add_or_update_sincedb_collection: switching as new file") 124 | watched_file.rotate_as_file 125 | watched_file.update_bytes_read(sincedb_value.position) 126 | else 127 | sincedb_value.set_watched_file(watched_file) 128 | logger.trace? && logger.trace("add_or_update_sincedb_collection: switching from:", :watched_file => watched_file.details) 129 | watched_file.rotate_from(existing_watched_file) 130 | end 131 | end 132 | sincedb_value 133 | end 134 | 135 | def update_existing_sincedb_collection_value(watched_file, sincedb_value) 136 | logger.trace? && logger.trace("update_existing_sincedb_collection_value", :position => sincedb_value.position, 137 | :filename => watched_file.filename, :last_stat_size => watched_file.last_stat_size) 138 | update_existing_specifically(watched_file, sincedb_value) 139 | end 140 | 141 | def add_new_value_sincedb_collection(watched_file) 142 | sincedb_value = get_new_value_specifically(watched_file) 143 | logger.trace? && logger.trace("add_new_value_sincedb_collection", :position => sincedb_value.position, 144 | :watched_file => watched_file.details) 145 | sincedb_collection.set(watched_file.sincedb_key, sincedb_value) 146 | sincedb_value 147 | end 148 | 149 | def get_new_value_specifically(watched_file) 150 | position = watched_file.position_for_new_sincedb_value 151 | value = SincedbValue.new(position) 152 | value.set_watched_file(watched_file) 153 | watched_file.update_bytes_read(position) 154 | value 155 | end 156 | 157 | private 158 | 159 | def exception_details(path, e, trace = true) 160 | details = { :path => path, :exception => e.class, :message => e.message } 161 | details[:backtrace] = e.backtrace if trace && logger.debug? 162 | details 163 | end 164 | 165 | end 166 | end end end 167 | -------------------------------------------------------------------------------- /lib/filewatch/tail_mode/handlers/create.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | module FileWatch module TailMode module Handlers 4 | class Create < Base 5 | def handle_specifically(watched_file) 6 | if open_file(watched_file) 7 | add_or_update_sincedb_collection(watched_file) unless sincedb_collection.member?(watched_file.sincedb_key) 8 | end 9 | end 10 | 11 | def update_existing_specifically(watched_file, sincedb_value) 12 | # sincedb_value is the source of truth 13 | watched_file.update_bytes_read(sincedb_value.position) 14 | end 15 | end 16 | end end end 17 | -------------------------------------------------------------------------------- /lib/filewatch/tail_mode/handlers/create_initial.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | module FileWatch module TailMode module Handlers 4 | class CreateInitial < Base 5 | def handle_specifically(watched_file) 6 | if open_file(watched_file) 7 | logger.trace("handle_specifically opened file handle: #{watched_file.file.fileno}, path: #{watched_file.filename}") 8 | add_or_update_sincedb_collection(watched_file) 9 | end 10 | end 11 | 12 | def update_existing_specifically(watched_file, sincedb_value) 13 | position = watched_file.last_stat_size 14 | if @settings.start_new_files_at == :beginning 15 | position = 0 16 | end 17 | logger.trace("update_existing_specifically - #{watched_file.path}: seeking to #{position}") 18 | watched_file.update_bytes_read(position) 19 | sincedb_value.update_position(position) 20 | end 21 | end 22 | end end end 23 | -------------------------------------------------------------------------------- /lib/filewatch/tail_mode/handlers/delete.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | module FileWatch module TailMode module Handlers 4 | class Delete < Base 5 | DATA_LOSS_WARNING = "watched file path was deleted or rotated before all content was read, if the file is found again it will be read from the last position" 6 | def handle_specifically(watched_file) 7 | # TODO consider trying to find the renamed file - it will have the same inode. 8 | # Needs a rotate scheme rename hint from user e.g. "-YYYY-MM-DD-N." or "..N" 9 | # send the found content to the same listener (stream identity) 10 | logger.trace? && logger.trace(__method__.to_s, :path => watched_file.path, :watched_file => watched_file.details) 11 | if watched_file.bytes_unread > 0 12 | logger.warn(DATA_LOSS_WARNING, :path => watched_file.path, :unread_bytes => watched_file.bytes_unread) 13 | end 14 | watched_file.listener.deleted 15 | # no need to worry about data in the buffer 16 | # if found it will be associated by inode and read from last position 17 | sincedb_collection.watched_file_deleted(watched_file) 18 | watched_file.file_close 19 | end 20 | end 21 | end end end 22 | -------------------------------------------------------------------------------- /lib/filewatch/tail_mode/handlers/grow.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | module FileWatch module TailMode module Handlers 4 | class Grow < Base 5 | def handle_specifically(watched_file) 6 | watched_file.file_seek(watched_file.bytes_read) 7 | loop do 8 | break if quit? 9 | loop_control = watched_file.loop_control_adjusted_for_stat_size 10 | controlled_read(watched_file, loop_control) 11 | break unless loop_control.keep_looping? 12 | end 13 | end 14 | end 15 | end end end 16 | -------------------------------------------------------------------------------- /lib/filewatch/tail_mode/handlers/shrink.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | module FileWatch module TailMode module Handlers 4 | class Shrink < Base 5 | def handle_specifically(watched_file) 6 | add_or_update_sincedb_collection(watched_file) 7 | watched_file.file_seek(watched_file.bytes_read) 8 | loop do 9 | break if quit? 10 | loop_control = watched_file.loop_control_adjusted_for_stat_size 11 | controlled_read(watched_file, loop_control) 12 | break unless loop_control.keep_looping? 13 | end 14 | end 15 | 16 | def update_existing_specifically(watched_file, sincedb_value) 17 | # we have a match but size is smaller - set all to zero 18 | watched_file.reset_bytes_unread 19 | sincedb_value.update_position(0) 20 | logger.trace? && logger.trace("update_existing_specifically: was truncated seeking to beginning", :watched_file => watched_file.details, :sincedb_value => sincedb_value) 21 | end 22 | end 23 | end end end 24 | -------------------------------------------------------------------------------- /lib/filewatch/tail_mode/handlers/timeout.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | module FileWatch module TailMode module Handlers 4 | class Timeout < Base 5 | def handle_specifically(watched_file) 6 | watched_file.listener.timed_out 7 | watched_file.file_close 8 | end 9 | end 10 | end end end 11 | -------------------------------------------------------------------------------- /lib/filewatch/tail_mode/handlers/unignore.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | module FileWatch module TailMode module Handlers 4 | class Unignore < Base 5 | # a watched file can be put straight into the ignored state 6 | # before any other handling has been done 7 | # at a minimum we create or associate a sincedb value 8 | def handle_specifically(watched_file) 9 | add_or_update_sincedb_collection(watched_file) 10 | end 11 | 12 | def get_new_value_specifically(watched_file) 13 | # for file initially ignored their bytes_read was set to stat.size 14 | # use this value not the `start_new_files_at` for the position 15 | # logger.trace("get_new_value_specifically", "watched_file" => watched_file.inspect) 16 | SincedbValue.new(watched_file.bytes_read).tap do |sincedb_value| 17 | sincedb_value.set_watched_file(watched_file) 18 | logger.trace? && logger.trace("get_new_value_specifically: unignore", :watched_file => watched_file.details, :sincedb_value => sincedb_value) 19 | end 20 | end 21 | 22 | def update_existing_specifically(watched_file, sincedb_value) 23 | # when this watched_file was ignored it had it bytes_read set to eof 24 | # now the file has changed (watched_file.size_changed?) 25 | # it has been put into the watched state so when it becomes active 26 | # we will handle grow or shrink 27 | # for now we seek to where we were before the file got ignored (grow) 28 | # or to the start (shrink) 29 | logger.trace? && logger.trace("update_existing_specifically: unignore", :watched_file => watched_file.details, :sincedb_value => sincedb_value) 30 | position = 0 31 | if watched_file.shrunk? 32 | watched_file.update_bytes_read(0) 33 | else 34 | position = watched_file.bytes_read 35 | end 36 | sincedb_value.update_position(position) 37 | end 38 | end 39 | end end end 40 | -------------------------------------------------------------------------------- /lib/filewatch/tail_mode/processor.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require 'filewatch/processor' 3 | require_relative "handlers/base" 4 | require_relative "handlers/create_initial" 5 | require_relative "handlers/create" 6 | require_relative "handlers/delete" 7 | require_relative "handlers/grow" 8 | require_relative "handlers/shrink" 9 | require_relative "handlers/timeout" 10 | require_relative "handlers/unignore" 11 | 12 | module FileWatch module TailMode 13 | # Must handle 14 | # :create_initial - file is discovered and we have no record of it in the sincedb 15 | # :create - file is discovered and we have seen it before in the sincedb 16 | # :grow - file has more content 17 | # :shrink - file has less content 18 | # :delete - file can't be read 19 | # :timeout - file is closable 20 | # :unignore - file was ignored, but have now received new content 21 | class Processor < FileWatch::Processor 22 | 23 | def initialize_handlers(sincedb_collection, observer) 24 | @sincedb_collection = sincedb_collection 25 | @create_initial = Handlers::CreateInitial.new(self, sincedb_collection, observer, @settings) 26 | @create = Handlers::Create.new(self, sincedb_collection, observer, @settings) 27 | @grow = Handlers::Grow.new(self, sincedb_collection, observer, @settings) 28 | @shrink = Handlers::Shrink.new(self, sincedb_collection, observer, @settings) 29 | @delete = Handlers::Delete.new(self, sincedb_collection, observer, @settings) 30 | @timeout = Handlers::Timeout.new(self, sincedb_collection, observer, @settings) 31 | @unignore = Handlers::Unignore.new(self, sincedb_collection, observer, @settings) 32 | end 33 | 34 | def create(watched_file) 35 | @create.handle(watched_file) 36 | end 37 | 38 | def create_initial(watched_file) 39 | @create_initial.handle(watched_file) 40 | end 41 | 42 | def grow(watched_file) 43 | @grow.handle(watched_file) 44 | end 45 | 46 | def shrink(watched_file) 47 | @shrink.handle(watched_file) 48 | end 49 | 50 | def delete(watched_file) 51 | @delete.handle(watched_file) 52 | end 53 | 54 | def timeout(watched_file) 55 | @timeout.handle(watched_file) 56 | end 57 | 58 | def unignore(watched_file) 59 | @unignore.handle(watched_file) 60 | end 61 | 62 | def process_all_states(watched_files) 63 | process_closed(watched_files) 64 | return if watch.quit? 65 | process_ignored(watched_files) 66 | return if watch.quit? 67 | process_delayed_delete(watched_files) 68 | return if watch.quit? 69 | process_restat_for_watched_and_active(watched_files) 70 | return if watch.quit? 71 | process_rotation_in_progress(watched_files) 72 | return if watch.quit? 73 | process_watched(watched_files) 74 | return if watch.quit? 75 | process_active(watched_files) 76 | end 77 | 78 | private 79 | 80 | def process_closed(watched_files) 81 | logger.trace(__method__.to_s) 82 | # Handles watched_files in the closed state. 83 | # if its size changed it is put into the watched state 84 | watched_files.each do |watched_file| 85 | next unless watched_file.closed? 86 | common_restat_with_delay(watched_file, __method__) do 87 | # it won't do this if rotation is detected 88 | if watched_file.size_changed? 89 | # if the closed file changed, move it to the watched state 90 | # not to active state because we want to respect the active files window. 91 | watched_file.watch 92 | end 93 | end 94 | break if watch.quit? 95 | end 96 | end 97 | 98 | def process_ignored(watched_files) 99 | logger.trace(__method__.to_s) 100 | # Handles watched_files in the ignored state. 101 | # if its size changed: 102 | # put it in the watched state 103 | # invoke unignore 104 | watched_files.each do |watched_file| 105 | next unless watched_file.ignored? 106 | common_restat_with_delay(watched_file, __method__) do 107 | # it won't do this if rotation is detected 108 | if watched_file.size_changed? 109 | watched_file.watch 110 | unignore(watched_file) 111 | end 112 | end 113 | break if watch.quit? 114 | end 115 | end 116 | 117 | def process_delayed_delete(watched_files) 118 | # defer the delete to one loop later to ensure that the stat really really can't find a renamed file 119 | # because a `stat` can be called right in the middle of the rotation rename cascade 120 | logger.trace(__method__.to_s) 121 | watched_files.each do |watched_file| 122 | next unless watched_file.delayed_delete? 123 | logger.trace(">>> Delayed Delete", :path => watched_file.path) 124 | common_restat_without_delay(watched_file, __method__) do 125 | logger.trace(">>> Delayed Delete: file at path found again", :watched_file => watched_file.details) 126 | watched_file.file_at_path_found_again 127 | end 128 | end 129 | end 130 | 131 | def process_restat_for_watched_and_active(watched_files) 132 | # do restat on all watched and active states once now. closed and ignored have been handled already 133 | logger.trace(__method__.to_s) 134 | watched_files.each do |watched_file| 135 | next if !watched_file.watched? && !watched_file.active? 136 | common_restat_with_delay(watched_file, __method__) 137 | end 138 | end 139 | 140 | def process_rotation_in_progress(watched_files) 141 | logger.trace(__method__.to_s) 142 | watched_files.each do |watched_file| 143 | next unless watched_file.rotation_in_progress? 144 | if !watched_file.all_read? 145 | if watched_file.file_open? 146 | # rotated file but original opened file is not fully read 147 | # we need to keep reading the open file, if we close it we lose it because the path is now pointing at a different file. 148 | logger.trace(">>> Rotation In Progress - inode change detected and original content is not fully read, reading all", :watched_file => watched_file.details) 149 | # need to fully read open file while we can 150 | watched_file.set_maximum_read_loop 151 | grow(watched_file) 152 | watched_file.set_standard_read_loop 153 | else 154 | logger.warn(">>> Rotation In Progress - inode change detected and original content is not fully read, file is closed and path points to new content", :watched_file => watched_file.details) 155 | end 156 | end 157 | current_key = watched_file.sincedb_key 158 | sdb_value = @sincedb_collection.get(current_key) 159 | potential_key = watched_file.stat_sincedb_key 160 | potential_sdb_value = @sincedb_collection.get(potential_key) 161 | logger.trace(">>> Rotation In Progress", :watched_file => watched_file.details, :found_sdb_value => sdb_value, :potential_key => potential_key, :potential_sdb_value => potential_sdb_value) 162 | if potential_sdb_value.nil? 163 | logger.trace("---------- >>>> Rotation In Progress: rotating as existing file") 164 | watched_file.rotate_as_file 165 | trace_message = "---------- >>>> Rotation In Progress: no potential sincedb value " 166 | if sdb_value.nil? 167 | trace_message.concat("AND no found sincedb value") 168 | else 169 | trace_message.concat("BUT found sincedb value") 170 | sdb_value.clear_watched_file 171 | end 172 | logger.trace(trace_message) 173 | new_sdb_value = SincedbValue.new(0) 174 | new_sdb_value.set_watched_file(watched_file) 175 | @sincedb_collection.set(potential_key, new_sdb_value) 176 | else 177 | other_watched_file = potential_sdb_value.watched_file 178 | if other_watched_file.nil? 179 | logger.trace("---------- >>>> Rotation In Progress: rotating as existing file WITH potential sincedb value that does not have a watched file reference !!!!!!!!!!!!!!!!!") 180 | watched_file.rotate_as_file(potential_sdb_value.position) 181 | sdb_value.clear_watched_file unless sdb_value.nil? 182 | potential_sdb_value.set_watched_file(watched_file) 183 | else 184 | logger.trace("---------- >>>> Rotation In Progress: rotating from...", :this_watched_file => watched_file.details, :other_watched_file => other_watched_file.details) 185 | watched_file.rotate_from(other_watched_file) 186 | sdb_value.clear_watched_file unless sdb_value.nil? 187 | potential_sdb_value.set_watched_file(watched_file) 188 | end 189 | end 190 | logger.trace("---------- >>>> Rotation In Progress: after handling rotation", :this_watched_file => watched_file.details, :sincedb_value => (potential_sdb_value || sdb_value)) 191 | end 192 | end 193 | 194 | def process_watched(watched_files) 195 | # Handles watched_files in the watched state. 196 | # for a slice of them: 197 | # move to the active state 198 | # and we allow the block to open the file and create a sincedb collection record if needed 199 | # some have never been active and some have 200 | # those that were active before but are watched now were closed under constraint 201 | logger.trace(__method__.to_s) 202 | # how much of the max active window is available 203 | to_take = @settings.max_active - watched_files.count(&:active?) 204 | if to_take > 0 205 | watched_files.select(&:watched?).take(to_take).each do |watched_file| 206 | watched_file.activate 207 | if watched_file.initial? 208 | create_initial(watched_file) 209 | else 210 | create(watched_file) 211 | end 212 | break if watch.quit? 213 | end 214 | else 215 | now = Time.now.to_i 216 | if (now - watch.lastwarn_max_files) > MAX_FILES_WARN_INTERVAL 217 | waiting = watched_files.size - @settings.max_active 218 | logger.warn("#{@settings.max_warn_msg}, files yet to open: #{waiting}") 219 | watch.lastwarn_max_files = now 220 | end 221 | end 222 | end 223 | 224 | def process_active(watched_files) 225 | logger.trace(__method__.to_s) 226 | # Handles watched_files in the active state. 227 | # files have been opened at this point 228 | watched_files.each do |watched_file| 229 | next unless watched_file.active? 230 | break if watch.quit? 231 | path = watched_file.filename 232 | if watched_file.grown? 233 | logger.trace("#{__method__} file grew: new size is #{watched_file.last_stat_size}, bytes read #{watched_file.bytes_read}", :path => path) 234 | grow(watched_file) 235 | elsif watched_file.shrunk? 236 | if watched_file.bytes_unread > 0 237 | logger.warn("potential data loss, file truncate detected with #{watched_file.bytes_unread} unread bytes", :path => path) 238 | end 239 | # we don't update the size here, its updated when we actually read 240 | logger.trace("#{__method__} file shrunk: new size is #{watched_file.last_stat_size}, old size #{watched_file.bytes_read}", :path => path) 241 | shrink(watched_file) 242 | else 243 | # same size, do nothing 244 | logger.trace("#{__method__} no change", :path => path) 245 | end 246 | # can any active files be closed to make way for waiting files? 247 | if watched_file.file_closable? 248 | logger.trace("#{__method__} file expired", :path => path) 249 | timeout(watched_file) 250 | watched_file.close 251 | end 252 | end 253 | end 254 | 255 | def common_restat_with_delay(watched_file, action, &block) 256 | common_restat(watched_file, action, true, &block) 257 | end 258 | 259 | def common_restat_without_delay(watched_file, action, &block) 260 | common_restat(watched_file, action, false, &block) 261 | end 262 | 263 | def common_restat(watched_file, action, delay, &block) 264 | all_ok = true 265 | begin 266 | restat(watched_file) 267 | if watched_file.rotation_in_progress? 268 | logger.trace("-------------------- >>>>> restat - rotation_detected", :watched_file => watched_file.details, :new_sincedb_key => watched_file.stat_sincedb_key) 269 | # don't yield to closed and ignore processing 270 | else 271 | yield if block_given? 272 | end 273 | rescue Errno::ENOENT 274 | if delay 275 | logger.trace("#{action} - delaying the stat fail on", :filename => watched_file.filename) 276 | watched_file.delay_delete 277 | else 278 | # file has gone away or we can't read it anymore. 279 | logger.trace("#{action} - after a delay, really can't find this file", :path => watched_file.path) 280 | watched_file.unwatch 281 | logger.trace("#{action} - removing from collection", :filename => watched_file.filename) 282 | delete(watched_file) 283 | add_deletable_path watched_file.path 284 | all_ok = false 285 | end 286 | rescue => e 287 | logger.error("#{action} - other error", error_details(e, watched_file)) 288 | all_ok = false 289 | end 290 | all_ok 291 | end 292 | end 293 | end end 294 | -------------------------------------------------------------------------------- /lib/filewatch/watch.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "logstash/util/loggable" 3 | require "concurrent/atomic/atomic_boolean" 4 | 5 | module FileWatch 6 | class Watch 7 | include LogStash::Util::Loggable 8 | 9 | attr_accessor :lastwarn_max_files 10 | attr_reader :discoverer, :processor, :watched_files_collection 11 | 12 | def initialize(discoverer, processor, settings) 13 | @discoverer = discoverer 14 | @watched_files_collection = discoverer.watched_files_collection 15 | @settings = settings 16 | 17 | # we need to be threadsafe about the quit mutation 18 | @quit = Concurrent::AtomicBoolean.new(false) 19 | @lastwarn_max_files = 0 20 | 21 | @processor = processor 22 | @processor.add_watch(self) 23 | end 24 | 25 | def watch(path) 26 | @discoverer.add_path(path) 27 | # don't return whatever @discoverer.add_path returns 28 | return true 29 | end 30 | 31 | def discover 32 | @discoverer.discover 33 | # don't return whatever @discoverer.discover returns 34 | return true 35 | end 36 | 37 | def subscribe(observer, sincedb_collection) 38 | @processor.initialize_handlers(sincedb_collection, observer) 39 | 40 | glob = 0 41 | interval = @settings.discover_interval 42 | reset_quit 43 | until quit? 44 | iterate_on_state 45 | # Don't discover new files when files to read are known at the beginning 46 | break if quit? 47 | sincedb_collection.write_if_requested 48 | glob += 1 49 | if glob == interval && !@settings.exit_after_read 50 | discover 51 | glob = 0 52 | end 53 | break if quit? 54 | # NOTE: maybe the plugin should validate stat_interval <= sincedb_write_interval <= sincedb_clean_after 55 | sleep(@settings.stat_interval) 56 | # we need to check potential expired keys (sincedb_clean_after) periodically 57 | sincedb_collection.flush_at_interval 58 | end 59 | sincedb_collection.write_if_requested # does nothing if no requests to write were lodged. 60 | @watched_files_collection.close_all 61 | end # def subscribe 62 | 63 | # Read mode processor will handle watched_files in the closed, ignored, watched and active state 64 | # differently from Tail mode - see the ReadMode::Processor and TailMode::Processor 65 | def iterate_on_state 66 | return if @watched_files_collection.empty? 67 | begin 68 | # creates this snapshot of watched_file values just once 69 | watched_files = @watched_files_collection.values 70 | @processor.process_all_states(watched_files) 71 | ensure 72 | @watched_files_collection.remove_paths(@processor.clear_deletable_paths) 73 | end 74 | end 75 | 76 | def quit 77 | @quit.make_true 78 | end 79 | 80 | def quit? 81 | @quit.true? || (@settings.exit_after_read && @watched_files_collection.empty?) 82 | end 83 | 84 | private 85 | 86 | def reset_quit 87 | @quit.make_false 88 | end 89 | end 90 | end 91 | -------------------------------------------------------------------------------- /lib/filewatch/watched_files_collection.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | require 'java' 4 | 5 | module FileWatch 6 | # @see `org.logstash.filewatch.WatchedFilesCollection` 7 | class WatchedFilesCollection 8 | 9 | # Closes all managed watched files. 10 | # @see FileWatch::WatchedFile#file_close 11 | def close_all 12 | each_file(&:file_close) # synchronized 13 | end 14 | 15 | # @return [Enumerable] managed path keys (snapshot) 16 | alias keys paths 17 | 18 | # @return [Enumerable] managed files (snapshot) 19 | alias values files 20 | 21 | end 22 | end 23 | -------------------------------------------------------------------------------- /lib/filewatch/winhelper.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "ffi" 3 | 4 | module Winhelper 5 | extend FFI::Library 6 | 7 | ffi_lib 'kernel32' 8 | ffi_convention :stdcall 9 | 10 | class FileTime < FFI::Struct 11 | layout :lowDateTime, :uint, :highDateTime, :uint 12 | end 13 | 14 | #http://msdn.microsoft.com/en-us/library/windows/desktop/aa363788(v=vs.85).aspx 15 | class FileInformation < FFI::Struct 16 | layout :fileAttributes, :uint, #DWORD dwFileAttributes; 17 | :createTime, FileTime, # FILETIME ftCreationTime; 18 | :lastAccessTime, FileTime, # FILETIME ftLastAccessTime; 19 | :lastWriteTime, FileTime, # FILETIME ftLastWriteTime; 20 | :volumeSerialNumber, :uint, # DWORD dwVolumeSerialNumber; 21 | :fileSizeHigh, :uint, # DWORD nFileSizeHigh; 22 | :fileSizeLow, :uint, # DWORD nFileSizeLow; 23 | :numberOfLinks, :uint, # DWORD nNumberOfLinks; 24 | :fileIndexHigh, :uint, # DWORD nFileIndexHigh; 25 | :fileIndexLow, :uint # DWORD nFileIndexLow; 26 | end 27 | 28 | # https://msdn.microsoft.com/en-us/library/windows/desktop/hh965605(v=vs.85).aspx 29 | class FileId128 < FFI::Struct 30 | layout :lowPart, :ulong_long, :highPart, :ulong_long 31 | end 32 | 33 | # https://msdn.microsoft.com/en-us/library/windows/desktop/hh802691(v=vs.85).aspx 34 | class FileIdInfo < FFI::Struct 35 | layout :volumeSerialNumber, :ulong_long, :fileId, FileId128 36 | # ULONGLONG VolumeSerialNumber; 37 | # FILE_ID_128 FileId; 38 | end 39 | 40 | FileInfoEnum = enum( 41 | :FileBasicInfo, 42 | :FileStandardInfo, 43 | :FileNameInfo, 44 | :FileRenameInfo, 45 | :FileDispositionInfo, 46 | :FileAllocationInfo, 47 | :FileEndOfFileInfo, 48 | :FileStreamInfo, 49 | :FileCompressionInfo, 50 | :FileAttributeTagInfo, 51 | :FileIdBothDirectoryInfo, 52 | :FileIdBothDirectoryRestartInfo, 53 | :FileIoPriorityHintInfo, 54 | :FileRemoteProtocolInfo, 55 | :FileFullDirectoryInfo, 56 | :FileFullDirectoryRestartInfo, 57 | :FileStorageInfo, 58 | :FileAlignmentInfo, 59 | :FileIdInfo, 60 | :FileIdExtdDirectoryInfo, 61 | :FileIdExtdDirectoryRestartInfo 62 | ) 63 | 64 | #http://msdn.microsoft.com/en-us/library/windows/desktop/aa363858(v=vs.85).aspx 65 | #HANDLE WINAPI CreateFile( 66 | # _In_ LPCTSTR lpFileName, 67 | # _In_ DWORD dwDesiredAccess, 68 | # _In_ DWORD dwShareMode, 69 | # _In_opt_ LPSECURITY_ATTRIBUTES lpSecurityAttributes, 70 | # _In_ DWORD dwCreationDisposition, 71 | # _In_ DWORD dwFlagsAndAttributes, _In_opt_ HANDLE hTemplateFile); 72 | attach_function :CreateFileA, [:pointer, :uint, :uint, :pointer, :uint, :uint, :pointer], :pointer 73 | attach_function :CreateFileW, [:pointer, :uint, :uint, :pointer, :uint, :uint, :pointer], :pointer 74 | 75 | #http://msdn.microsoft.com/en-us/library/windows/desktop/aa364952(v=vs.85).aspx 76 | #BOOL WINAPI GetFileInformationByHandle( 77 | # _In_ HANDLE hFile, 78 | # _Out_ LPBY_HANDLE_FILE_INFORMATION lpFileInformation); 79 | attach_function :GetFileInformationByHandle, [:pointer, :pointer], :int 80 | 81 | #https://msdn.microsoft.com/en-us/library/windows/desktop/aa364953(v=vs.85).aspx 82 | #BOOL WINAPI GetFileInformationByHandleEx( 83 | # _In_ HANDLE hFile, 84 | # _In_ FILE_INFO_BY_HANDLE_CLASS FileInformationClass, 85 | # _Out_ LPVOID lpFileInformation, 86 | # _In_ DWORD dwBufferSize ); 87 | attach_function :GetFileInformationByHandleEx, [:pointer, FileInfoEnum, :pointer, :uint], :uint 88 | 89 | attach_function :CloseHandle, [:pointer], :int 90 | 91 | #https://msdn.microsoft.com/en-us/library/windows/desktop/aa964920(v=vs.85).aspx 92 | #BOOL WINAPI GetVolumeInformationByHandleW( 93 | # _In_ HANDLE hFile, 94 | # _Out_opt_ LPWSTR lpVolumeNameBuffer, 95 | # _In_ DWORD nVolumeNameSize, 96 | # _Out_opt_ LPDWORD lpVolumeSerialNumber, 97 | # _Out_opt_ LPDWORD lpMaximumComponentLength, 98 | # _Out_opt_ LPDWORD lpFileSystemFlags, 99 | # _Out_opt_ LPWSTR lpFileSystemNameBuffer, 100 | # _In_ DWORD nFileSystemNameSize); 101 | attach_function :GetVolumeInformationByHandleW, [:pointer, :pointer, :uint, :pointer, :pointer, :pointer, :pointer, :uint], :int 102 | 103 | def self.file_system_type_from_path(path) 104 | file_system_type_from_handle(open_handle_from_path(path)) 105 | end 106 | 107 | def self.file_system_type_from_io(io) 108 | FileWatch::FileExt.io_handle(io) do |pointer| 109 | file_system_type_from_handle(pointer, false) 110 | end 111 | end 112 | 113 | def self.file_system_type_from_handle(handle, close_handle = true) 114 | out = FFI::MemoryPointer.new(:char, 256, true) 115 | if GetVolumeInformationByHandleW(handle, nil, 0, nil, nil, nil, out, 256) > 0 116 | char_pointer_to_ruby_string(out) 117 | else 118 | "unknown" 119 | end 120 | ensure 121 | CloseHandle(handle) if close_handle 122 | end 123 | 124 | def self.identifier_from_io(io) 125 | FileWatch::FileExt.io_handle(io) do |pointer| 126 | identifier_from_handle(pointer, false) 127 | end 128 | end 129 | 130 | def self.identifier_from_path(path) 131 | identifier_from_handle(open_handle_from_path(path)) 132 | end 133 | 134 | def self.identifier_from_path_ex(path) 135 | identifier_from_handle_ex(open_handle_from_path(path)) 136 | end 137 | 138 | def self.identifier_from_io_ex(io) 139 | FileWatch::FileExt.io_handle(io) do |pointer| 140 | identifier_from_handle_ex(pointer, false) 141 | end 142 | end 143 | 144 | def self.identifier_from_handle_ex(handle, close_handle = true) 145 | fileIdInfo = Winhelper::FileIdInfo.new 146 | success = GetFileInformationByHandleEx(handle, :FileIdInfo, fileIdInfo, fileIdInfo.size) 147 | if success > 0 148 | vsn = fileIdInfo[:volumeSerialNumber] 149 | lpfid = fileIdInfo[:fileId][:lowPart] 150 | hpfid = fileIdInfo[:fileId][:highPart] 151 | return "#{vsn}-#{lpfid}-#{hpfid}" 152 | else 153 | return 'unknown' 154 | end 155 | ensure 156 | CloseHandle(handle) if close_handle 157 | end 158 | 159 | def self.identifier_from_handle(handle, close_handle = true) 160 | fileInfo = Winhelper::FileInformation.new 161 | success = GetFileInformationByHandle(handle, fileInfo) 162 | if success > 0 163 | #args = [ 164 | # fileInfo[:fileAttributes], fileInfo[:volumeSerialNumber], fileInfo[:fileSizeHigh], fileInfo[:fileSizeLow], 165 | # fileInfo[:numberOfLinks], fileInfo[:fileIndexHigh], fileInfo[:fileIndexLow] 166 | # ] 167 | #p "Information: %u %u %u %u %u %u %u " % args 168 | #this is only guaranteed on NTFS, for ReFS on windows 2012, GetFileInformationByHandleEx should be used with FILE_ID_INFO, which returns a 128 bit identifier 169 | return "#{fileInfo[:volumeSerialNumber]}-#{fileInfo[:fileIndexLow]}-#{fileInfo[:fileIndexHigh]}" 170 | else 171 | return 'unknown' 172 | end 173 | ensure 174 | CloseHandle(handle) if close_handle 175 | end 176 | 177 | private 178 | 179 | def self.open_handle_from_path(path) 180 | CreateFileW(utf16le(path), 0, 7, nil, 3, 128, nil) 181 | end 182 | 183 | def self.char_pointer_to_ruby_string(char_pointer, length = 256) 184 | bytes = char_pointer.get_array_of_uchar(0, length) 185 | ignore = bytes.reverse.index{|b| b != 0} - 1 186 | our_bytes = bytes[0, bytes.length - ignore] 187 | our_bytes.pack("C*").force_encoding("UTF-16LE").encode("UTF-8") 188 | end 189 | 190 | def self.utf16le(string) 191 | to_cstring(string).encode("UTF-16LE") 192 | end 193 | 194 | def self.to_cstring(rubystring) 195 | rubystring + 0.chr 196 | end 197 | 198 | def self.win1252(string) 199 | string.encode("Windows-1252") 200 | end 201 | end 202 | 203 | 204 | #fileId = Winhelper.GetWindowsUniqueFileIdentifier('C:\inetpub\logs\LogFiles\W3SVC1\u_ex1fdsadfsadfasdf30612.log') 205 | #p "FileId: " + fileId 206 | #p "outside function, sleeping" 207 | #sleep(10) 208 | -------------------------------------------------------------------------------- /lib/logstash/inputs/delete_completed_file_handler.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | module LogStash module Inputs 4 | class DeleteCompletedFileHandler 5 | def initialize(watch) 6 | @watch = watch 7 | end 8 | 9 | def handle(path) 10 | Pathname.new(path).unlink rescue nil 11 | @watch.watched_files_collection.remove_paths([path]) 12 | end 13 | end 14 | end end 15 | -------------------------------------------------------------------------------- /lib/logstash/inputs/file/patch.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | class LogStash::Codecs::Base 3 | # TODO - move this to core 4 | if !method_defined?(:accept) 5 | def accept(listener) 6 | decode(listener.data) do |event| 7 | listener.process_event(event) 8 | end 9 | end 10 | end 11 | if !method_defined?(:auto_flush) 12 | def auto_flush(*) 13 | end 14 | end 15 | end 16 | 17 | -------------------------------------------------------------------------------- /lib/logstash/inputs/file_listener.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | module LogStash module Inputs 4 | # As and when a new WatchedFile is processed FileWatch asks for an instance of this class for the 5 | # file path of that WatchedFile. All subsequent callbacks are sent via this listener instance. 6 | # The file is essentially a stream and the path is the identity of that stream. 7 | class FileListener 8 | attr_reader :input, :path, :data 9 | # construct with link back to the input plugin instance. 10 | def initialize(path, input, data = nil) 11 | @path, @input = path, input 12 | @data = data 13 | end 14 | 15 | def opened 16 | end 17 | 18 | def eof 19 | end 20 | 21 | def error 22 | end 23 | 24 | def reading_completed 25 | end 26 | 27 | def timed_out 28 | input.codec.evict(path) 29 | end 30 | 31 | def deleted 32 | input.codec.evict(path) 33 | input.handle_deletable_path(path) 34 | end 35 | 36 | def accept(data) 37 | # and push transient data filled dup listener downstream 38 | input.log_line_received(path, data) 39 | input.codec.accept(self.class.new(path, input, data)) 40 | end 41 | 42 | def process_event(event) 43 | input.post_process_this(event, path) 44 | end 45 | 46 | end 47 | 48 | class FlushableListener < FileListener 49 | attr_writer :path 50 | end 51 | end end 52 | -------------------------------------------------------------------------------- /lib/logstash/inputs/friendly_durations.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | module LogStash module Inputs 4 | module FriendlyDurations 5 | NUMBERS_RE = /^(?\d+(\.\d+)?)\s?(?s((ec)?(ond)?)(s)?|m((in)?(ute)?)(s)?|h(our)?(s)?|d(ay)?(s)?|w(eek)?(s)?|us(ec)?(s)?|ms(ec)?(s)?)?$/ 6 | HOURS = 3600 7 | DAYS = 24 * HOURS 8 | MEGA = 10**6 9 | KILO = 10**3 10 | 11 | ValidatedStruct = Struct.new(:value, :error_message) do 12 | def to_a 13 | error_message.nil? ? [true, value] : [false, error_message] 14 | end 15 | end 16 | 17 | def self.call(value, unit = "sec") 18 | # coerce into seconds 19 | val_string = value.to_s.strip 20 | matched = NUMBERS_RE.match(val_string) 21 | if matched.nil? 22 | failed_message = "Value '#{val_string}' is not a valid duration string e.g. 200 usec, 250ms, 60 sec, 18h, 21.5d, 1 day, 2w, 6 weeks" 23 | return ValidatedStruct.new(nil, failed_message) 24 | end 25 | multiplier = matched[:units] || unit 26 | numeric = matched[:number].to_f 27 | case multiplier 28 | when "m","min","mins","minute","minutes" 29 | ValidatedStruct.new(numeric * 60, nil) 30 | when "h","hour","hours" 31 | ValidatedStruct.new(numeric * HOURS, nil) 32 | when "d","day","days" 33 | ValidatedStruct.new(numeric * DAYS, nil) 34 | when "w","week","weeks" 35 | ValidatedStruct.new(numeric * 7 * DAYS, nil) 36 | when "ms","msec","msecs" 37 | ValidatedStruct.new(numeric / KILO, nil) 38 | when "us","usec","usecs" 39 | ValidatedStruct.new(numeric / MEGA, nil) 40 | else 41 | ValidatedStruct.new(numeric, nil) 42 | end 43 | end 44 | end 45 | end end 46 | -------------------------------------------------------------------------------- /lib/logstash/inputs/log_completed_file_handler.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | module LogStash module Inputs 4 | class LogCompletedFileHandler 5 | def initialize(log_completed_file_path) 6 | @log_completed_file_path = Pathname.new(log_completed_file_path) 7 | end 8 | 9 | def handle(path) 10 | @log_completed_file_path.open("a") { |fd| fd.puts(path) } 11 | end 12 | end 13 | end end 14 | -------------------------------------------------------------------------------- /logstash-input-file.gemspec: -------------------------------------------------------------------------------- 1 | Gem::Specification.new do |s| 2 | 3 | s.name = 'logstash-input-file' 4 | s.version = '4.4.6' 5 | s.licenses = ['Apache-2.0'] 6 | s.summary = "Streams events from files" 7 | s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program" 8 | s.authors = ["Elastic"] 9 | s.email = 'info@elastic.co' 10 | s.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html" 11 | s.require_paths = ["lib"] 12 | 13 | # Files 14 | s.files = Dir["lib/**/*","spec/**/*","*.gemspec","*.md","CONTRIBUTORS","Gemfile","LICENSE","NOTICE.TXT", "vendor/jar-dependencies/**/*.jar", "vendor/jar-dependencies/**/*.rb", "VERSION", "JAR_VERSION", "docs/**/*"] 15 | 16 | # Tests 17 | s.test_files = s.files.grep(%r{^(test|spec|features)/}) 18 | 19 | # Special flag to let us know this is actually a logstash plugin 20 | s.metadata = { "logstash_plugin" => "true", "logstash_group" => "input" } 21 | 22 | # Gem dependencies 23 | s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99" 24 | 25 | s.add_runtime_dependency 'logstash-codec-plain' 26 | 27 | if RUBY_VERSION.start_with?("1") 28 | s.add_runtime_dependency 'rake', '~> 12.2.0' 29 | s.add_runtime_dependency 'addressable', '~> 2.4.0' 30 | else 31 | s.add_runtime_dependency 'addressable' 32 | end 33 | 34 | s.add_runtime_dependency 'concurrent-ruby', '~> 1.0' 35 | s.add_runtime_dependency 'logstash-codec-multiline', ['~> 3.0'] 36 | s.add_runtime_dependency 'logstash-mixin-ecs_compatibility_support', '~>1.3' 37 | 38 | s.add_development_dependency 'stud', ['~> 0.0.19'] 39 | s.add_development_dependency 'logstash-devutils' 40 | s.add_development_dependency 'logstash-codec-json' 41 | s.add_development_dependency 'rspec-sequencing' 42 | s.add_development_dependency "rspec-wait" 43 | s.add_development_dependency 'timecop' 44 | end 45 | -------------------------------------------------------------------------------- /run_until_fail.sh: -------------------------------------------------------------------------------- 1 | while true 2 | do 3 | LOG_AT=ERROR bundle exec rspec -fd --fail-fast --tag ~lsof ./spec || break 4 | done 5 | -------------------------------------------------------------------------------- /settings.gradle: -------------------------------------------------------------------------------- 1 | rootProject.name = 'filewatch' -------------------------------------------------------------------------------- /spec/file_ext/file_ext_windows_spec.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | require_relative '../filewatch/spec_helper' 4 | 5 | if LogStash::Environment.windows? 6 | describe "basic ops" do 7 | let(:fixture_dir) { Pathname.new(FileWatch::FIXTURE_DIR).expand_path } 8 | let(:file_path) { fixture_dir.join('uncompressed.log') } 9 | it "path works" do 10 | path = file_path.to_path 11 | identifier = Winhelper.identifier_from_path(path) 12 | STDOUT.puts("--- >>", identifier, "------") 13 | expect(identifier.count('-')).to eq(2) 14 | fs_name = Winhelper.file_system_type_from_path(path) 15 | STDOUT.puts("--- >>", fs_name, "------") 16 | expect(fs_name).to eq("NTFS") 17 | # identifier = Winhelper.identifier_from_path_ex(path) 18 | # STDOUT.puts("--- >>", identifier, "------") 19 | # expect(identifier.count('-')).to eq(2) 20 | end 21 | 22 | it "io works" do 23 | file = FileWatch::FileOpener.open(file_path.to_path) 24 | identifier = Winhelper.identifier_from_io(file) 25 | file.close 26 | STDOUT.puts("--- >>", identifier, "------") 27 | expect(identifier.count('-')).to eq(2) 28 | # fs_name = Winhelper.file_system_type_from_io(file) 29 | # STDOUT.puts("--- >>", fs_name, "------") 30 | # expect(fs_name).to eq("NTFS") 31 | # identifier = Winhelper.identifier_from_path_ex(path) 32 | # STDOUT.puts("--- >>", identifier, "------") 33 | # expect(identifier.count('-')).to eq(2) 34 | end 35 | end 36 | end 37 | -------------------------------------------------------------------------------- /spec/filewatch/buftok_spec.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require_relative 'spec_helper' 3 | 4 | describe FileWatch::BufferedTokenizer do 5 | 6 | context "when using the default delimiter" do 7 | it "splits the lines correctly" do 8 | expect(subject.extract("hello\nworld\n")).to eq ["hello", "world"] 9 | end 10 | 11 | it "holds partial lines back until a token is found" do 12 | buffer = described_class.new 13 | expect(buffer.extract("hello\nwor")).to eq ["hello"] 14 | expect(buffer.extract("ld\n")).to eq ["world"] 15 | end 16 | end 17 | 18 | context "when passing a custom delimiter" do 19 | subject { FileWatch::BufferedTokenizer.new("\r\n") } 20 | 21 | it "splits the lines correctly" do 22 | expect(subject.extract("hello\r\nworld\r\n")).to eq ["hello", "world"] 23 | end 24 | end 25 | end 26 | -------------------------------------------------------------------------------- /spec/filewatch/read_mode_handlers_read_file_spec.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require_relative 'spec_helper' 3 | 4 | module FileWatch 5 | describe ReadMode::Handlers::ReadFile do 6 | let(:settings) do 7 | Settings.from_options( 8 | :sincedb_write_interval => 0, 9 | :sincedb_path => File::NULL 10 | ) 11 | end 12 | let(:sdb_collection) { SincedbCollection.new(settings) } 13 | let(:directory) { Pathname.new(FIXTURE_DIR) } 14 | let(:pathname) { directory.join('uncompressed.log') } 15 | let(:watched_file) { WatchedFile.new(pathname, PathStatClass.new(pathname), settings) } 16 | let(:processor) { ReadMode::Processor.new(settings).add_watch(watch) } 17 | let(:file) { DummyFileReader.new(settings.file_chunk_size, 2) } 18 | 19 | context "simulate reading a 64KB file with a default chunk size of 32KB and a zero sincedb write interval" do 20 | let(:watch) { double("watch", :quit? => false) } 21 | it "calls 'sincedb_write' exactly 2 times" do 22 | allow(FileOpener).to receive(:open).with(watched_file.path).and_return(file) 23 | expect(sdb_collection).to receive(:sincedb_write).exactly(1).times 24 | watched_file.activate 25 | processor.initialize_handlers(sdb_collection, TestObserver.new) 26 | processor.read_file(watched_file) 27 | end 28 | end 29 | 30 | context "simulate reading a 64KB file with a default chunk size of 32KB and a zero sincedb write interval" do 31 | let(:watch) { double("watch", :quit? => true) } 32 | it "calls 'sincedb_write' exactly 0 times as shutdown is in progress" do 33 | expect(sdb_collection).to receive(:sincedb_write).exactly(0).times 34 | watched_file.activate 35 | processor.initialize_handlers(sdb_collection, TestObserver.new) 36 | processor.read_file(watched_file) 37 | end 38 | end 39 | 40 | context "when restart from existing sincedb" do 41 | let(:settings) do 42 | Settings.from_options( 43 | :sincedb_write_interval => 0, 44 | :sincedb_path => File::NULL, 45 | :file_chunk_size => 10 46 | ) 47 | end 48 | 49 | let(:processor) { double("fake processor") } 50 | let(:observer) { TestObserver.new } 51 | let(:watch) { double("watch") } 52 | 53 | before(:each) { 54 | allow(watch).to receive(:quit?).and_return(false)#.and_return(false).and_return(true) 55 | allow(processor).to receive(:watch).and_return(watch) 56 | } 57 | 58 | it "read from where it left" do 59 | listener = observer.listener_for(Pathname.new(pathname).to_path) 60 | sut = ReadMode::Handlers::ReadFile.new(processor, sdb_collection, observer, settings) 61 | 62 | # simulate a previous partial read of the file 63 | sincedb_value = SincedbValue.new(0) 64 | sincedb_value.set_watched_file(watched_file) 65 | sdb_collection.set(watched_file.sincedb_key, sincedb_value) 66 | 67 | 68 | # simulate a consumption of first line, (size + newline) bytes 69 | sdb_collection.increment(watched_file.sincedb_key, File.readlines(pathname)[0].size + 2) 70 | 71 | # exercise 72 | sut.handle(watched_file) 73 | 74 | # verify 75 | expect(listener.lines.size).to eq(1) 76 | expect(listener.lines[0]).to start_with("2010-03-12 23:51:21 SEA4 192.0.2.222 play 3914 OK") 77 | end 78 | end 79 | end 80 | end 81 | -------------------------------------------------------------------------------- /spec/filewatch/reading_spec.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require 'stud/temporary' 3 | require_relative 'spec_helper' 4 | require 'filewatch/observing_read' 5 | 6 | module FileWatch 7 | describe Watch do 8 | before(:all) do 9 | @thread_abort = Thread.abort_on_exception 10 | Thread.abort_on_exception = true 11 | end 12 | 13 | after(:all) do 14 | Thread.abort_on_exception = @thread_abort 15 | end 16 | 17 | let(:directory) { Stud::Temporary.directory } 18 | let(:watch_dir) { ::File.join(directory, "*.log") } 19 | let(:file_path) { ::File.join(directory, "1.log") } 20 | let(:sincedb_path) { ::File.join(Stud::Temporary.directory, "reading.sdb") } 21 | let(:stat_interval) { 0.1 } 22 | let(:discover_interval) { 4 } 23 | let(:start_new_files_at) { :end } # should be irrelevant for read mode 24 | let(:opts) do 25 | { 26 | :stat_interval => stat_interval, 27 | :start_new_files_at => start_new_files_at, 28 | :delimiter => "\n", 29 | :discover_interval => discover_interval, 30 | :ignore_older => 3600, 31 | :sincedb_path => sincedb_path 32 | } 33 | end 34 | let(:observer) { TestObserver.new } 35 | let(:reading) { ObservingRead.new(opts) } 36 | let(:listener1) { observer.listener_for(file_path) } 37 | 38 | after do 39 | FileUtils.rm_rf(directory) unless directory =~ /fixture/ 40 | end 41 | 42 | context "when watching a directory with files" do 43 | let(:actions) do 44 | RSpec::Sequencing.run("quit after a short time") do 45 | File.open(file_path, "wb") { |file| file.write("line1\nline2\n") } 46 | end 47 | .then("watch") do 48 | reading.watch_this(watch_dir) 49 | end 50 | .then("wait") do 51 | wait(2).for{listener1.calls.last}.to eq(:delete) 52 | end 53 | .then("quit") do 54 | reading.quit 55 | end 56 | end 57 | it "the file is read" do 58 | actions.activate_quietly 59 | reading.subscribe(observer) 60 | actions.assert_no_errors 61 | expect(listener1.calls).to eq([:open, :accept, :accept, :eof, :delete]) 62 | expect(listener1.lines).to eq(["line1", "line2"]) 63 | end 64 | end 65 | 66 | context "when watching a directory with files and sincedb_path is /dev/null or NUL" do 67 | let(:sincedb_path) { File::NULL } 68 | let(:actions) do 69 | RSpec::Sequencing.run("quit after a short time") do 70 | File.open(file_path, "wb") { |file| file.write("line1\nline2\n") } 71 | end 72 | .then("watch") do 73 | reading.watch_this(watch_dir) 74 | end 75 | .then("wait") do 76 | wait(2).for{listener1.calls.last}.to eq(:delete) 77 | end 78 | .then("quit") do 79 | reading.quit 80 | end 81 | end 82 | it "the file is read" do 83 | actions.activate_quietly 84 | reading.subscribe(observer) 85 | actions.assert_no_errors 86 | expect(listener1.calls).to eq([:open, :accept, :accept, :eof, :delete]) 87 | expect(listener1.lines).to eq(["line1", "line2"]) 88 | end 89 | end 90 | 91 | context "when watching a directory with files using striped reading" do 92 | let(:file_path2) { ::File.join(directory, "2.log") } 93 | # use a chunk size that does not align with the line boundaries 94 | let(:opts) { super().merge(:file_chunk_size => 10, :file_chunk_count => 1, :file_sort_by => "path")} 95 | let(:lines) { [] } 96 | let(:observer) { TestObserver.new(lines) } 97 | let(:listener2) { observer.listener_for(file_path2) } 98 | let(:actions) do 99 | RSpec::Sequencing.run("create file") do 100 | File.open(file_path, "w") { |file| file.write("string1\nstring2") } 101 | File.open(file_path2, "w") { |file| file.write("stringA\nstringB") } 102 | end 103 | .then("watch") do 104 | reading.watch_this(watch_dir) 105 | end 106 | .then("wait") do 107 | wait(2).for{listener1.calls.last == :delete && listener2.calls.last == :delete}.to eq(true) 108 | end 109 | .then("quit") do 110 | reading.quit 111 | end 112 | end 113 | it "the files are read seemingly in parallel" do 114 | actions.activate_quietly 115 | reading.subscribe(observer) 116 | actions.assert_no_errors 117 | expect(listener1.calls).to eq([:open, :accept, :accept, :eof, :delete]) 118 | expect(listener2.calls).to eq([:open, :accept, :accept, :eof, :delete]) 119 | expect(lines).to eq(%w(string1 stringA string2 stringB)) 120 | end 121 | end 122 | 123 | context "when a non default delimiter is specified and it is not in the content" do 124 | let(:opts) { super().merge(:delimiter => "\nø") } 125 | let(:actions) do 126 | RSpec::Sequencing.run("create file") do 127 | File.open(file_path, "wb") { |file| file.write("line1\nline2") } 128 | end 129 | .then("watch") do 130 | reading.watch_this(watch_dir) 131 | end 132 | .then("wait") do 133 | wait(2).for{listener1.calls.last}.to eq(:delete) 134 | end 135 | .then("quit") do 136 | reading.quit 137 | end 138 | end 139 | it "the file is opened, data is read, but no lines are found initially, at EOF the whole file becomes the line" do 140 | actions.activate_quietly 141 | reading.subscribe(observer) 142 | actions.assert_no_errors 143 | expect(listener1.calls).to eq([:open, :accept, :eof, :delete]) 144 | expect(listener1.lines).to eq(["line1\nline2"]) 145 | sincedb_record_fields = File.read(sincedb_path).split(" ") 146 | position_field_index = 3 147 | # tailing, no delimiter, we are expecting one, if it grows we read from the start. 148 | # there is an info log telling us that no lines were seen but we can't test for it. 149 | expect(sincedb_record_fields[position_field_index]).to eq("11") 150 | end 151 | end 152 | 153 | context "when watching directory with files and adding a new file" do 154 | let(:file_path2) { ::File.join(directory, "2.log") } 155 | let(:file_path3) { ::File.join(directory, "3.log") } 156 | 157 | let(:opts) { super().merge(:file_sort_by => "last_modified") } 158 | let(:lines) { [] } 159 | let(:observer) { TestObserver.new(lines) } 160 | 161 | 162 | let(:listener2) { observer.listener_for(file_path2) } 163 | let(:listener3) { observer.listener_for(file_path3) } 164 | 165 | let(:actions) do 166 | RSpec::Sequencing.run("create12") do 167 | File.open(file_path, "w") { |file| file.write("string11\nstring12") } 168 | File.open(file_path2, "w") { |file| file.write("string21\nstring22") } 169 | end 170 | .then("watch") do 171 | reading.watch_this(watch_dir) 172 | end 173 | .then("wait12") do 174 | wait(2).for { listener1.calls.last == :delete && listener2.calls.last == :delete }.to eq(true) 175 | end 176 | .then_after(2, "create3") do 177 | File.open(file_path3, "w") { |file| file.write("string31\nstring32") } 178 | end 179 | .then("wait3") do 180 | wait(2).for { listener3.calls.last == :delete }.to eq(true) 181 | end 182 | .then("quit") do 183 | reading.quit 184 | end 185 | end 186 | 187 | it "reads all (3) files" do 188 | actions.activate_quietly 189 | reading.subscribe(observer) 190 | actions.assert_no_errors 191 | expect(lines.last).to eq 'string32' 192 | expect(lines.sort).to eq %w(string11 string12 string21 string22 string31 string32) 193 | expect( reading.watch.watched_files_collection.paths ).to eq [ file_path, file_path2, file_path3 ] 194 | end 195 | end 196 | 197 | context "when watching a directory with files using exit_after_read" do 198 | let(:opts) { super().merge(:exit_after_read => true, :max_open_files => 2) } 199 | let(:file_path3) { ::File.join(directory, "3.log") } 200 | let(:file_path4) { ::File.join(directory, "4.log") } 201 | let(:file_path5) { ::File.join(directory, "5.log") } 202 | let(:lines) { [] } 203 | let(:observer) { TestObserver.new(lines) } 204 | let(:listener3) { observer.listener_for(file_path3) } 205 | let(:file_path6) { ::File.join(directory, "6.log") } 206 | let(:listener6) { observer.listener_for(file_path6) } 207 | 208 | it "the file is read" do 209 | File.open(file_path3, "w") { |file| file.write("line1\nline2\n") } 210 | reading.watch_this(watch_dir) 211 | reading.subscribe(observer) 212 | expect(listener3.lines).to eq(["line1", "line2"]) 213 | end 214 | 215 | it "multiple files are read" do 216 | File.open(file_path3, "w") { |file| file.write("line1\nline2\n") } 217 | File.open(file_path4, "w") { |file| file.write("line3\nline4\n") } 218 | reading.watch_this(watch_dir) 219 | reading.subscribe(observer) 220 | expect(listener3.lines.sort).to eq(["line1", "line2", "line3", "line4"]) 221 | end 222 | 223 | it "multiple files are read even if max_open_files is smaller then number of files" do 224 | File.open(file_path3, "w") { |file| file.write("line1\nline2\n") } 225 | File.open(file_path4, "w") { |file| file.write("line3\nline4\n") } 226 | File.open(file_path5, "w") { |file| file.write("line5\nline6\n") } 227 | reading.watch_this(watch_dir) 228 | reading.subscribe(observer) 229 | expect(listener3.lines.sort).to eq(["line1", "line2", "line3", "line4", "line5", "line6"]) 230 | end 231 | 232 | it "file as marked as reading_completed" do 233 | File.open(file_path3, "w") { |file| file.write("line1\nline2\n") } 234 | reading.watch_this(watch_dir) 235 | reading.subscribe(observer) 236 | expect(listener3.calls).to eq([:open, :accept, :accept, :eof, :delete, :reading_completed]) 237 | end 238 | 239 | it "sincedb works correctly" do 240 | File.open(file_path3, "w") { |file| file.write("line1\nline2\n") } 241 | reading.watch_this(watch_dir) 242 | reading.subscribe(observer) 243 | sincedb_record_fields = File.read(sincedb_path).split(" ") 244 | position_field_index = 3 245 | expect(sincedb_record_fields[position_field_index]).to eq("12") 246 | end 247 | 248 | it "does not include new files added after start" do 249 | File.open(file_path3, "w") { |file| file.write("line1\nline2\n") } 250 | reading.watch_this(watch_dir) 251 | reading.subscribe(observer) 252 | File.open(file_path6, "w") { |file| file.write("foob\nbar\n") } 253 | expect(listener3.lines).to eq(["line1", "line2"]) 254 | expect(listener3.calls).to eq([:open, :accept, :accept, :eof, :delete, :reading_completed]) 255 | expect(listener6.calls).to eq([]) 256 | end 257 | 258 | end 259 | 260 | describe "reading fixtures" do 261 | let(:directory) { FIXTURE_DIR } 262 | let(:actions) do 263 | RSpec::Sequencing.run("watch") do 264 | reading.watch_this(watch_dir) 265 | end 266 | .then("wait") do 267 | wait(1).for{listener1.calls.last}.to eq(:delete) 268 | end 269 | .then("quit") do 270 | reading.quit 271 | end 272 | end 273 | context "for an uncompressed file" do 274 | let(:watch_dir) { ::File.join(directory, "unc*.log") } 275 | let(:file_path) { ::File.join(directory, 'uncompressed.log') } 276 | 277 | it "the file is read" do 278 | FileWatch.make_fixture_current(file_path) 279 | actions.activate_quietly 280 | reading.subscribe(observer) 281 | actions.assert_no_errors 282 | expect(listener1.calls).to eq([:open, :accept, :accept, :eof, :delete]) 283 | expect(listener1.lines.size).to eq(2) 284 | end 285 | end 286 | 287 | context "for another uncompressed file" do 288 | let(:watch_dir) { ::File.join(directory, "invalid*.log") } 289 | let(:file_path) { ::File.join(directory, 'invalid_utf8.gbk.log') } 290 | 291 | it "the file is read" do 292 | FileWatch.make_fixture_current(file_path) 293 | actions.activate_quietly 294 | reading.subscribe(observer) 295 | actions.assert_no_errors 296 | expect(listener1.calls).to eq([:open, :accept, :accept, :eof, :delete]) 297 | expect(listener1.lines.size).to eq(2) 298 | end 299 | end 300 | 301 | context "for a compressed file" do 302 | let(:watch_dir) { ::File.join(directory, "compressed.*.gz") } 303 | let(:file_path) { ::File.join(directory, 'compressed.log.gz') } 304 | 305 | it "the file is read" do 306 | FileWatch.make_fixture_current(file_path) 307 | actions.activate_quietly 308 | reading.subscribe(observer) 309 | actions.assert_no_errors 310 | expect(listener1.calls).to eq([:open, :accept, :accept, :eof, :delete]) 311 | expect(listener1.lines.size).to eq(2) 312 | end 313 | end 314 | end 315 | end 316 | end 317 | -------------------------------------------------------------------------------- /spec/filewatch/settings_spec.rb: -------------------------------------------------------------------------------- 1 | require 'logstash/devutils/rspec/spec_helper' 2 | require 'logstash/inputs/friendly_durations' 3 | 4 | describe FileWatch::Settings do 5 | 6 | context "when create from options" do 7 | it "doesn't convert sincedb_clean_after to seconds" do 8 | res = FileWatch::Settings.from_options({:sincedb_clean_after => LogStash::Inputs::FriendlyDurations.call(1, "days").value}) 9 | 10 | expect(res.sincedb_expiry_duration).to eq 1 * 24 * 3600 11 | end 12 | end 13 | 14 | end 15 | -------------------------------------------------------------------------------- /spec/filewatch/sincedb_record_serializer_spec.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require_relative 'spec_helper' 3 | require 'filewatch/settings' 4 | require 'filewatch/sincedb_record_serializer' 5 | 6 | module FileWatch 7 | describe SincedbRecordSerializer do 8 | let(:opts) { Hash.new } 9 | let(:io) { StringIO.new } 10 | let(:db) { Hash.new } 11 | 12 | let(:sincedb_value_expiry) { SincedbRecordSerializer.days_to_seconds(14) } 13 | 14 | subject { SincedbRecordSerializer.new(sincedb_value_expiry) } 15 | 16 | context "deserialize from IO" do 17 | it 'reads V1 records' do 18 | io.write("5391297 1 4 12\n") 19 | io.rewind 20 | rows = 0 21 | subject.deserialize(io) do |inode_struct, sincedb_value| 22 | expect(inode_struct.inode).to eq("5391297") 23 | expect(inode_struct.maj).to eq(1) 24 | expect(inode_struct.min).to eq(4) 25 | expect(sincedb_value.position).to eq(12) 26 | rows += 1 27 | end 28 | expect(rows).to be > 0 29 | end 30 | 31 | it 'reads V2 records from an IO object' do 32 | now = Time.now.to_f 33 | io.write("5391298 1 4 12 #{now} /a/path/to/1.log\n") 34 | io.rewind 35 | rows = 0 36 | subject.deserialize(io) do |inode_struct, sincedb_value| 37 | expect(inode_struct.inode).to eq("5391298") 38 | expect(inode_struct.maj).to eq(1) 39 | expect(inode_struct.min).to eq(4) 40 | expect(sincedb_value.position).to eq(12) 41 | expect(sincedb_value.last_changed_at).to eq(now) 42 | expect(sincedb_value.path_in_sincedb).to eq("/a/path/to/1.log") 43 | rows += 1 44 | end 45 | expect(rows).to be > 0 46 | end 47 | 48 | it 'properly handles spaces in a filename' do 49 | now = Time.now.to_f 50 | io.write("53912987 1 4 12 #{now} /a/path/to/log log.log\n") 51 | io.rewind 52 | rows = 0 53 | subject.deserialize(io) do |inode_struct, sincedb_value| 54 | expect(inode_struct.inode).to eq("53912987") 55 | expect(inode_struct.maj).to eq(1) 56 | expect(inode_struct.min).to eq(4) 57 | expect(sincedb_value.position).to eq(12) 58 | expect(sincedb_value.last_changed_at).to eq(now) 59 | expect(sincedb_value.path_in_sincedb).to eq("/a/path/to/log log.log") 60 | rows += 1 61 | end 62 | expect(rows).to be > 0 63 | end 64 | end 65 | 66 | context "serialize to IO" do 67 | it "writes db entries" do 68 | now = Time.now.to_f 69 | inode_struct = InodeStruct.new("42424242", 2, 5) 70 | sincedb_value = SincedbValue.new(42, now) 71 | db[inode_struct] = sincedb_value 72 | subject.serialize(db, io) 73 | expect(io.string).to eq("42424242 2 5 42 #{now}\n") 74 | end 75 | 76 | it "does not write expired db entries to an IO object" do 77 | twelve_days_ago = Time.now.to_f - (12.0*24*3600) 78 | sixteen_days_ago = twelve_days_ago - (4.0*24*3600) 79 | db[InodeStruct.new("42424242", 2, 5)] = SincedbValue.new(42, twelve_days_ago) 80 | db[InodeStruct.new("18181818", 1, 6)] = SincedbValue.new(99, sixteen_days_ago) 81 | subject.serialize(db, io) 82 | expect(io.string).to eq("42424242 2 5 42 #{twelve_days_ago}\n") 83 | end 84 | end 85 | 86 | context "given a non default `sincedb_clean_after`" do 87 | 88 | let(:sincedb_value_expiry) { SincedbRecordSerializer.days_to_seconds(2) } 89 | 90 | it "does not write expired db entries to an IO object" do 91 | one_day_ago = Time.now.to_f - (1.0*24*3600) 92 | three_days_ago = one_day_ago - (2.0*24*3600) 93 | db[InodeStruct.new("42424242", 2, 5)] = SincedbValue.new(42, one_day_ago) 94 | db[InodeStruct.new("18181818", 1, 6)] = SincedbValue.new(99, three_days_ago) 95 | subject.serialize(db, io) 96 | expect(io.string).to eq("42424242 2 5 42 #{one_day_ago}\n") 97 | end 98 | end 99 | end 100 | end -------------------------------------------------------------------------------- /spec/filewatch/spec_helper.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "rspec_sequencing" 3 | require 'rspec/wait' 4 | require "logstash/devutils/rspec/spec_helper" 5 | require "concurrent" 6 | require "timecop" 7 | 8 | def formatted_puts(text) 9 | cfg = RSpec.configuration 10 | return unless cfg.formatters.first.is_a?( 11 | RSpec::Core::Formatters::DocumentationFormatter) 12 | txt = cfg.format_docstrings_block.call(text) 13 | cfg.output_stream.puts " #{txt}" 14 | end 15 | 16 | unless RSpec::Matchers.method_defined?(:receive_call_and_args) 17 | RSpec::Matchers.define(:receive_call_and_args) do |m, args| 18 | match do |actual| 19 | actual.trace_for(m) == args 20 | end 21 | 22 | failure_message do 23 | "Expecting method #{m} to receive: #{args} but got: #{actual.trace_for(m)}" 24 | end 25 | end 26 | end 27 | 28 | require_relative "../helpers/rspec_wait_handler_helper" unless defined? RSPEC_WAIT_HANDLER_PATCHED 29 | require_relative "../helpers/logging_level_helper" unless defined? LOG_AT_HANDLED 30 | 31 | require 'filewatch/bootstrap' 32 | 33 | module FileWatch 34 | class DummyIO 35 | def stat 36 | self 37 | end 38 | def ino 39 | 23456 40 | end 41 | def size 42 | 65535 43 | end 44 | def mtime 45 | Time.now 46 | end 47 | def dev_major 48 | 1 49 | end 50 | def dev_minor 51 | 5 52 | end 53 | end 54 | 55 | class DummyFileReader 56 | def initialize(read_size, iterations) 57 | @read_size = read_size 58 | @iterations = iterations 59 | @closed = false 60 | @accumulated = 0 61 | @io = DummyIO.new 62 | end 63 | def file_seek(*) 64 | end 65 | def close() 66 | @closed = true 67 | end 68 | def closed? 69 | @closed 70 | end 71 | def to_io 72 | @io 73 | end 74 | def sysread(amount) 75 | @accumulated += amount 76 | if @accumulated > @read_size * @iterations 77 | raise EOFError.new 78 | end 79 | string = "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcde\n" 80 | multiplier = amount / string.length 81 | string * multiplier 82 | end 83 | def sysseek(offset, whence) 84 | end 85 | end 86 | 87 | FIXTURE_DIR = File.join('spec', 'fixtures') 88 | 89 | def self.make_file_older(path, seconds) 90 | time = Time.now.to_f - seconds 91 | ::File.utime(time, time, path) 92 | end 93 | 94 | def self.make_fixture_current(path, time = Time.now) 95 | ::File.utime(time, time, path) 96 | end 97 | 98 | class TracerBase 99 | def initialize 100 | @tracer = Concurrent::Array.new 101 | end 102 | 103 | def trace_for(symbol) 104 | params = @tracer.map {|k,v| k == symbol ? v : nil}.compact 105 | params.empty? ? false : params 106 | end 107 | 108 | def clear 109 | @tracer.clear 110 | end 111 | end 112 | 113 | module NullCallable 114 | def self.call 115 | end 116 | end 117 | 118 | class TestObserver 119 | class Listener 120 | attr_reader :path, :lines, :calls 121 | 122 | def initialize(path, lines) 123 | @path = path 124 | @lines = lines || Concurrent::Array.new 125 | @calls = Concurrent::Array.new 126 | end 127 | 128 | def accept(line) 129 | @lines << line 130 | @calls << :accept 131 | end 132 | 133 | def deleted 134 | @calls << :delete 135 | end 136 | 137 | def opened 138 | @calls << :open 139 | end 140 | 141 | def error 142 | @calls << :error 143 | end 144 | 145 | def eof 146 | @calls << :eof 147 | end 148 | 149 | def timed_out 150 | @calls << :timed_out 151 | end 152 | 153 | def reading_completed 154 | @calls << :reading_completed 155 | end 156 | end 157 | 158 | attr_reader :listeners 159 | 160 | def initialize(combined_lines = nil) 161 | @listeners = Concurrent::Hash.new { |hash, key| hash[key] = new_listener(key, combined_lines) } 162 | end 163 | 164 | def listener_for(path) 165 | @listeners[path] 166 | end 167 | 168 | def clear 169 | @listeners.clear 170 | end 171 | 172 | private 173 | 174 | def new_listener(path, lines = nil) 175 | Listener.new(path, lines) 176 | end 177 | 178 | end 179 | end 180 | -------------------------------------------------------------------------------- /spec/filewatch/watched_file_spec.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require 'stud/temporary' 3 | require_relative 'spec_helper' 4 | 5 | module FileWatch 6 | describe WatchedFile do 7 | let(:pathname) { Pathname.new(__FILE__) } 8 | 9 | context 'Given two instances of the same file' do 10 | it 'their sincedb_keys should equate' do 11 | wf_key1 = WatchedFile.new(pathname, PathStatClass.new(pathname), Settings.new).sincedb_key 12 | hash_db = { wf_key1 => 42 } 13 | wf_key2 = WatchedFile.new(pathname, PathStatClass.new(pathname), Settings.new).sincedb_key 14 | expect(wf_key1).to eq(wf_key2) 15 | expect(wf_key1).to eql(wf_key2) 16 | expect(wf_key1.hash).to eq(wf_key2.hash) 17 | expect(hash_db[wf_key2]).to eq(42) 18 | end 19 | end 20 | 21 | context 'Given a barrage of state changes' do 22 | it 'only the previous N state changes are remembered' do 23 | watched_file = WatchedFile.new(pathname, PathStatClass.new(pathname), Settings.new) 24 | watched_file.ignore 25 | watched_file.watch 26 | watched_file.activate 27 | watched_file.watch 28 | watched_file.close 29 | watched_file.watch 30 | watched_file.activate 31 | watched_file.unwatch 32 | watched_file.activate 33 | watched_file.close 34 | expect(watched_file.closed?).to be_truthy 35 | expect(watched_file.recent_states).to eq([:watched, :active, :watched, :closed, :watched, :active, :unwatched, :active]) 36 | end 37 | end 38 | 39 | context 'restat' do 40 | 41 | let(:directory) { Stud::Temporary.directory } 42 | let(:file_path) { ::File.join(directory, "restat.file.txt") } 43 | let(:pathname) { Pathname.new(file_path) } 44 | 45 | before { FileUtils.touch file_path, :mtime => Time.now - 300 } 46 | 47 | it 'reports false value when no changes' do 48 | file = WatchedFile.new(pathname, PathStatClass.new(pathname), Settings.new) 49 | mtime = file.modified_at 50 | expect( file.modified_at_changed? ).to be false 51 | expect( file.restat! ).to be_falsy 52 | expect( file.modified_at_changed? ).to be false 53 | expect( file.modified_at ).to eql mtime 54 | expect( file.modified_at(true) ).to eql mtime 55 | end 56 | 57 | it 'reports truthy when changes detected' do 58 | file = WatchedFile.new(pathname, PathStatClass.new(pathname), Settings.new) 59 | mtime = file.modified_at 60 | expect( file.modified_at_changed? ).to be false 61 | FileUtils.touch file_path 62 | expect( file.restat! ).to be_truthy 63 | expect( file.modified_at_changed? ).to be true 64 | expect( file.modified_at ).to eql mtime # until updated 65 | expect( file.modified_at(true) ).to be > mtime 66 | end 67 | end 68 | end 69 | end 70 | -------------------------------------------------------------------------------- /spec/filewatch/watched_files_collection_spec.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require_relative 'spec_helper' 3 | 4 | module FileWatch 5 | describe WatchedFilesCollection do 6 | let(:time) { Time.now } 7 | let(:filepath1) { "/var/log/z.log" } 8 | let(:filepath2) { "/var/log/m.log" } 9 | let(:filepath3) { "/var/log/a.log" } 10 | let(:filepath4) { "/var/log/b.log" } 11 | let(:stat1) { double("stat1", :size => 98, :modified_at => time - 30, :inode => 234567, :inode_struct => InodeStruct.new("234567", 3, 2)) } 12 | let(:stat2) { double("stat2", :size => 99, :modified_at => time - 20, :inode => 234568, :inode_struct => InodeStruct.new("234568", 3, 2)) } 13 | let(:stat3) { double("stat3", :size => 100, :modified_at => time, :inode => 234569, :inode_struct => InodeStruct.new("234569", 3, 2)) } 14 | let(:stat4) { double("stat4", :size => 99, :modified_at => time, :inode => 234570, :inode_struct => InodeStruct.new("234570", 3, 2)) } 15 | let(:wf1) { WatchedFile.new(filepath1, stat1, Settings.new) } 16 | let(:wf2) { WatchedFile.new(filepath2, stat2, Settings.new) } 17 | let(:wf3) { WatchedFile.new(filepath3, stat3, Settings.new) } 18 | let(:wf4) { WatchedFile.new(filepath4, stat4, Settings.new) } 19 | 20 | context "sort by last_modified in ascending order" do 21 | let(:sort_by) { "last_modified" } 22 | let(:sort_direction) { "asc" } 23 | 24 | it "sorts earliest modified first" do 25 | collection = described_class.new(Settings.from_options(:file_sort_by => sort_by, :file_sort_direction => sort_direction)) 26 | expect(collection.empty?).to be true 27 | collection.add(wf2) 28 | expect(collection.empty?).to be false 29 | expect(collection.values).to eq([wf2]) 30 | collection.add(wf3) 31 | expect(collection.values).to eq([wf2, wf3]) 32 | collection.add(wf1) 33 | expect(collection.values).to eq([wf1, wf2, wf3]) 34 | expect(collection.keys.size).to eq 3 35 | end 36 | 37 | it "sorts by path when mtime is same" do 38 | collection = described_class.new(Settings.from_options(:file_sort_by => sort_by, :file_sort_direction => sort_direction)) 39 | expect(collection.size).to eq 0 40 | collection.add(wf2) 41 | collection.add(wf4) 42 | collection.add(wf1) 43 | expect(collection.size).to eq 3 44 | expect(collection.values).to eq([wf1, wf2, wf4]) 45 | collection.add(wf3) 46 | expect(collection.size).to eq 4 47 | expect(collection.values).to eq([wf1, wf2, wf3, wf4]) 48 | expect(collection.keys.size).to eq 4 49 | end 50 | end 51 | 52 | context "sort by path in ascending order" do 53 | let(:sort_by) { "path" } 54 | let(:sort_direction) { "asc" } 55 | 56 | it "sorts path A-Z" do 57 | collection = described_class.new(Settings.from_options(:file_sort_by => sort_by, :file_sort_direction => sort_direction)) 58 | collection.add(wf2) 59 | expect(collection.values).to eq([wf2]) 60 | collection.add(wf1) 61 | expect(collection.values).to eq([wf2, wf1]) 62 | collection.add(wf3) 63 | expect(collection.values).to eq([wf3, wf2, wf1]) 64 | end 65 | end 66 | 67 | context "sort by last_modified in descending order" do 68 | let(:sort_by) { "last_modified" } 69 | let(:sort_direction) { "desc" } 70 | 71 | it "sorts latest modified first" do 72 | collection = described_class.new(Settings.from_options(:file_sort_by => sort_by, :file_sort_direction => sort_direction)) 73 | collection.add(wf2) 74 | expect(collection.values).to eq([wf2]) 75 | collection.add(wf1) 76 | expect(collection.values).to eq([wf2, wf1]) 77 | collection.add(wf3) 78 | expect(collection.values).to eq([wf3, wf2, wf1]) 79 | end 80 | end 81 | 82 | context "sort by path in descending order" do 83 | let(:sort_by) { "path" } 84 | let(:sort_direction) { "desc" } 85 | 86 | it "sorts path Z-A" do 87 | collection = described_class.new(Settings.from_options(:file_sort_by => sort_by, :file_sort_direction => sort_direction)) 88 | collection.add(wf2) 89 | expect(collection.values).to eq([wf2]) 90 | collection.add(wf1) 91 | expect(collection.values).to eq([wf1, wf2]) 92 | collection.add(wf3) 93 | expect(collection.values).to eq([wf1, wf2, wf3]) 94 | end 95 | end 96 | 97 | context "remove_paths" do 98 | let(:sort_by) { "path" } 99 | let(:sort_direction) { "desc" } 100 | 101 | it "is able to delete multiple files at once" do 102 | collection = described_class.new(Settings.from_options(:file_sort_by => sort_by, :file_sort_direction => sort_direction)) 103 | collection.add(wf1) 104 | collection.add(wf2) 105 | collection.add(wf3) 106 | expect(collection.keys).to eq([filepath1, filepath2, filepath3]) 107 | 108 | ret = collection.remove_paths([filepath2, filepath3]) 109 | expect(ret).to eq 2 110 | expect(collection.keys).to eq([filepath1]) 111 | expect(collection.values.size).to eq 1 112 | 113 | ret = collection.remove_paths([filepath2]) 114 | expect(ret).to eq 0 115 | end 116 | end 117 | 118 | context "update" do 119 | let(:sort_by) { "last_modified" } 120 | let(:sort_direction) { "asc" } 121 | 122 | let(:re_stat1) { double("restat1", :size => 99, :modified_at => time, :inode => 234567, :inode_struct => InodeStruct.new("234567", 3, 2)) } 123 | let(:re_stat2) { double("restat2", :size => 99, :modified_at => time, :inode => 234568, :inode_struct => InodeStruct.new("234568", 3, 2)) } 124 | 125 | it "updates entry with changed mtime" do 126 | collection = described_class.new(Settings.from_options(:file_sort_by => sort_by, :file_sort_direction => sort_direction)) 127 | collection.add(wf1) 128 | collection.add(wf2) 129 | collection.add(wf3) 130 | expect(collection.files).to eq([wf1, wf2, wf3]) 131 | 132 | wf2.send(:set_stat, re_stat2) 133 | expect( wf2.modified_at_changed? ).to be_truthy 134 | 135 | collection.update wf2 136 | expect(collection.files).to eq([wf1, wf3, wf2]) 137 | 138 | wf1.send(:set_stat, re_stat1) 139 | expect( wf1.modified_at_changed? ).to be_truthy 140 | collection.update wf1 141 | expect(collection.files).to eq([wf3, wf2, wf1]) 142 | 143 | collection.add(wf4) 144 | expect(collection.files).to eq([wf3, wf4, wf2, wf1]) 145 | end 146 | end 147 | 148 | end 149 | end 150 | -------------------------------------------------------------------------------- /spec/filewatch/winhelper_spec.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "stud/temporary" 3 | require "fileutils" 4 | 5 | if Gem.win_platform? 6 | require "filewatch/winhelper" 7 | 8 | describe Winhelper do 9 | let(:path) { Stud::Temporary.file.path } 10 | 11 | after do 12 | FileUtils.rm_rf(path) 13 | end 14 | 15 | it "return a unique file identifier" do 16 | identifier = Winhelper.identifier_from_path(path) 17 | 18 | expect(identifier).not_to eq("unknown") 19 | expect(identifier.count("-")).to eq(2) 20 | end 21 | end 22 | end 23 | -------------------------------------------------------------------------------- /spec/fixtures/compressed.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/logstash-plugins/logstash-input-file/55a4a7099f05f29351672417036c1342850c7adc/spec/fixtures/compressed.log.gz -------------------------------------------------------------------------------- /spec/fixtures/compressed.log.gzip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/logstash-plugins/logstash-input-file/55a4a7099f05f29351672417036c1342850c7adc/spec/fixtures/compressed.log.gzip -------------------------------------------------------------------------------- /spec/fixtures/invalid_utf8.gbk.log: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/logstash-plugins/logstash-input-file/55a4a7099f05f29351672417036c1342850c7adc/spec/fixtures/invalid_utf8.gbk.log -------------------------------------------------------------------------------- /spec/fixtures/no-final-newline.log: -------------------------------------------------------------------------------- 1 | 2010-03-12 23:51:20 SEA4 192.0.2.147 connect 2014 OK bfd8a98bee0840d9b871b7f6ade9908f rtmp://shqshne4jdp4b6.cloudfront.net/cfx/st​ key=value http://player.longtailvideo.com/player.swf http://www.longtailvideo.com/support/jw-player-setup-wizard?example=204 LNX%2010,0,32,18 - - - - 2 | 2010-03-12 23:51:21 SEA4 192.0.2.222 play 3914 OK bfd8a98bee0840d9b871b7f6ade9908f rtmp://shqshne4jdp4b6.cloudfront.net/cfx/st​ key=value http://player.longtailvideo.com/player.swf http://www.longtailvideo.com/support/jw-player-setup-wizard?example=204 LNX%2010,0,32,18 myvideo p=2&q=4 flv 1 -------------------------------------------------------------------------------- /spec/fixtures/uncompressed.log: -------------------------------------------------------------------------------- 1 | 2010-03-12 23:51:20 SEA4 192.0.2.147 connect 2014 OK bfd8a98bee0840d9b871b7f6ade9908f rtmp://shqshne4jdp4b6.cloudfront.net/cfx/st​ key=value http://player.longtailvideo.com/player.swf http://www.longtailvideo.com/support/jw-player-setup-wizard?example=204 LNX%2010,0,32,18 - - - - 2 | 2010-03-12 23:51:21 SEA4 192.0.2.222 play 3914 OK bfd8a98bee0840d9b871b7f6ade9908f rtmp://shqshne4jdp4b6.cloudfront.net/cfx/st​ key=value http://player.longtailvideo.com/player.swf http://www.longtailvideo.com/support/jw-player-setup-wizard?example=204 LNX%2010,0,32,18 myvideo p=2&q=4 flv 1 3 | -------------------------------------------------------------------------------- /spec/helpers/logging_level_helper.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | ENV["LOG_AT"].tap do |level| 4 | if !level.nil? 5 | LogStash::Logging::Logger::configure_logging(level) 6 | LOG_AT_HANDLED = true 7 | end 8 | end 9 | -------------------------------------------------------------------------------- /spec/helpers/rspec_wait_handler_helper.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | module RSpec 4 | module Wait 5 | module Handler 6 | def handle_matcher(target, *args, &block) 7 | # there is a similar patch in the rspec-wait repo since Nov, 19 2017 8 | # it does not look like the author is interested in the change. 9 | # - do not use Ruby Timeout 10 | count = RSpec.configuration.wait_timeout.fdiv(RSpec.configuration.wait_delay).ceil 11 | failure = nil 12 | count.times do 13 | begin 14 | actual = target.respond_to?(:call) ? target.call : target 15 | super(actual, *args, &block) 16 | failure = nil 17 | rescue RSpec::Expectations::ExpectationNotMetError => failure 18 | sleep RSpec.configuration.wait_delay 19 | end 20 | break if failure.nil? 21 | end 22 | raise failure unless failure.nil? 23 | end 24 | end 25 | 26 | # From: https://github.com/rspec/rspec-expectations/blob/v3.0.0/lib/rspec/expectations/handler.rb#L44-L63 27 | class PositiveHandler < RSpec::Expectations::PositiveExpectationHandler 28 | extend Handler 29 | end 30 | 31 | # From: https://github.com/rspec/rspec-expectations/blob/v3.0.0/lib/rspec/expectations/handler.rb#L66-L93 32 | class NegativeHandler < RSpec::Expectations::NegativeExpectationHandler 33 | extend Handler 34 | end 35 | end 36 | end 37 | 38 | RSPEC_WAIT_HANDLER_PATCHED = true 39 | -------------------------------------------------------------------------------- /spec/helpers/spec_helper.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | require "logstash/devutils/rspec/spec_helper" 4 | require "rspec/wait" 5 | require "rspec_sequencing" 6 | 7 | module FileInput 8 | 9 | FIXTURE_DIR = File.join('spec', 'fixtures') 10 | 11 | def self.make_file_older(path, seconds) 12 | time = Time.now.to_f - seconds 13 | ::File.utime(time, time, path) 14 | end 15 | 16 | def self.make_fixture_current(path, time = Time.now) 17 | ::File.utime(time, time, path) 18 | end 19 | 20 | def self.corrupt_gzip(file_path) 21 | f = File.open(file_path, "w") 22 | f.seek(12) 23 | f.puts 'corrupting_string' 24 | f.close() 25 | end 26 | 27 | def self.truncate_gzip(file_path) 28 | f = File.open(file_path, "ab") 29 | f.truncate(100) 30 | f.close() 31 | end 32 | 33 | class TracerBase 34 | def initialize 35 | @tracer = Concurrent::Array.new 36 | end 37 | 38 | def trace_for(symbol) 39 | params = @tracer.map {|k,v| k == symbol ? v : nil}.compact 40 | if params.empty? 41 | false 42 | else 43 | # merge all params with same key 44 | # there could be multiple instances of same call, e.g. [[:accept, true], [:auto_flush, true], [:close, true], [:auto_flush, true]] 45 | params.reduce {|b1, b2| b1 and b2} 46 | end 47 | end 48 | 49 | def clear 50 | @tracer.clear 51 | end 52 | end 53 | 54 | class CodecTracer < TracerBase 55 | def decode_accept(ctx, data, listener) 56 | @tracer.push [:decode_accept, [ctx, data]] 57 | listener.process(ctx, {"message" => data}) 58 | end 59 | def accept(listener) 60 | @tracer.push [:accept, true] 61 | end 62 | def auto_flush(*) 63 | @tracer.push [:auto_flush, true] 64 | end 65 | def flush(*) 66 | @tracer.push [:flush, true] 67 | end 68 | def close 69 | @tracer.push [:close, true] 70 | end 71 | def clone 72 | self 73 | end 74 | end 75 | end 76 | 77 | require_relative "rspec_wait_handler_helper" unless defined? RSPEC_WAIT_HANDLER_PATCHED 78 | require_relative "logging_level_helper" unless defined? LOG_AT_HANDLED 79 | 80 | unless RSpec::Matchers.method_defined?(:receive_call_and_args) 81 | RSpec::Matchers.define(:receive_call_and_args) do |m, args| 82 | match do |actual| 83 | actual.trace_for(m) == args 84 | end 85 | 86 | failure_message do 87 | "Expecting method #{m} to receive: #{args} but got: #{actual.trace_for(m)}" 88 | end 89 | end 90 | end 91 | 92 | ENV["LOG_AT"].tap do |level| 93 | LogStash::Logging::Logger::configure_logging(level) unless level.nil? 94 | end 95 | -------------------------------------------------------------------------------- /spec/inputs/friendly_durations_spec.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | require "helpers/spec_helper" 4 | require "logstash/inputs/friendly_durations" 5 | 6 | describe "FriendlyDurations module function call" do 7 | context "unacceptable strings" do 8 | it "gives an error message for 'foobar'" do 9 | result = LogStash::Inputs::FriendlyDurations.call("foobar","sec") 10 | expect(result.error_message).to start_with("Value 'foobar' is not a valid duration string e.g. 200 usec") 11 | end 12 | it "gives an error message for '5 5 days'" do 13 | result = LogStash::Inputs::FriendlyDurations.call("5 5 days","sec") 14 | expect(result.error_message).to start_with("Value '5 5 days' is not a valid duration string e.g. 200 usec") 15 | end 16 | end 17 | 18 | context "when a unit is not specified, a unit override will affect the result" do 19 | it "coerces 14 to 1209600.0s as days" do 20 | result = LogStash::Inputs::FriendlyDurations.call(14,"d") 21 | expect(result.error_message).to eq(nil) 22 | expect(result.value).to eq(1209600.0) 23 | end 24 | it "coerces '30' to 1800.0s as minutes" do 25 | result = LogStash::Inputs::FriendlyDurations.call("30","minutes") 26 | expect(result.to_a).to eq([true, 1800.0]) 27 | end 28 | end 29 | 30 | context "acceptable strings" do 31 | [ 32 | ["10", 10.0], 33 | ["10.5 s", 10.5], 34 | ["10.75 secs", 10.75], 35 | ["11 second", 11.0], 36 | ["10 seconds", 10.0], 37 | ["500 ms", 0.5], 38 | ["750.9 msec", 0.7509], 39 | ["750.9 msecs", 0.7509], 40 | ["750.9 us", 0.0007509], 41 | ["750.9 usec", 0.0007509], 42 | ["750.9 usecs", 0.0007509], 43 | ["1.5m", 90.0], 44 | ["2.5 m", 150.0], 45 | ["1.25 min", 75.0], 46 | ["1 minute", 60.0], 47 | ["2.5 minutes", 150.0], 48 | ["2h", 7200.0], 49 | ["2 h", 7200.0], 50 | ["1 hour", 3600.0], 51 | ["1hour", 3600.0], 52 | ["3 hours", 10800.0], 53 | ["0.5d", 43200.0], 54 | ["1day", 86400.0], 55 | ["1 day", 86400.0], 56 | ["2days", 172800.0], 57 | ["14 days", 1209600.0], 58 | ["1w", 604800.0], 59 | ["1 w", 604800.0], 60 | ["1 week", 604800.0], 61 | ["2weeks", 1209600.0], 62 | ["2 weeks", 1209600.0], 63 | ["1.5 weeks", 907200.0], 64 | ].each do |input, coerced| 65 | it "coerces #{input.inspect.rjust(16)} to #{coerced.inspect}" do 66 | result = LogStash::Inputs::FriendlyDurations.call(input,"sec") 67 | expect(result.to_a).to eq([true, coerced]) 68 | end 69 | end 70 | end 71 | end 72 | -------------------------------------------------------------------------------- /src/main/java/JrubyFileWatchService.java: -------------------------------------------------------------------------------- 1 | import org.jruby.Ruby; 2 | import org.jruby.runtime.load.BasicLibraryService; 3 | import org.logstash.filewatch.JrubyFileWatchLibrary; 4 | 5 | public class JrubyFileWatchService implements BasicLibraryService { 6 | @Override 7 | public final boolean basicLoad(final Ruby runtime) { 8 | new JrubyFileWatchLibrary().load(runtime, false); 9 | return true; 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /src/main/java/jnr/posix/windows/WindowsFileInformationByHandle.java: -------------------------------------------------------------------------------- 1 | package jnr.posix.windows; 2 | /* 3 | This, sadly can't be used. 4 | See JrubyFileWatchLibrary class 5 | The jnr jar is loaded by a different class loader than our jar (in rspec anyway) 6 | Even though the package is the same, Java restricts access to `dwVolumeSerialNumber` in the super class 7 | We have to continue to use FFI in Ruby. 8 | */ 9 | 10 | public class WindowsFileInformationByHandle extends WindowsByHandleFileInformation { 11 | public WindowsFileInformationByHandle(jnr.ffi.Runtime runtime) { 12 | super(runtime); 13 | } 14 | 15 | public java.lang.String getIdentifier() { 16 | StringBuilder builder = new StringBuilder(); 17 | builder.append(dwVolumeSerialNumber.intValue()); 18 | builder.append("-"); 19 | builder.append(nFileIndexHigh.intValue()); 20 | builder.append("-"); 21 | builder.append(nFileIndexLow.intValue()); 22 | return builder.toString(); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/main/java/org/logstash/filewatch/RubyWinIO.java: -------------------------------------------------------------------------------- 1 | package org.logstash.filewatch; 2 | 3 | import jnr.posix.HANDLE; 4 | import jnr.posix.JavaLibCHelper; 5 | import org.jruby.Ruby; 6 | import org.jruby.RubyBoolean; 7 | import org.jruby.RubyIO; 8 | import org.jruby.anno.JRubyClass; 9 | import org.jruby.anno.JRubyMethod; 10 | import org.jruby.runtime.ThreadContext; 11 | import org.jruby.runtime.builtin.IRubyObject; 12 | import org.jruby.util.io.OpenFile; 13 | 14 | import java.nio.channels.Channel; 15 | 16 | @JRubyClass(name = "WinIO") 17 | public class RubyWinIO extends RubyIO { 18 | private boolean valid; 19 | private boolean direct; 20 | private long address; 21 | 22 | public RubyWinIO(Ruby runtime, Channel channel) { 23 | super(runtime, channel); 24 | final OpenFile fptr = getOpenFileChecked(); 25 | final boolean locked = fptr.lock(); 26 | try { 27 | fptr.checkClosed(); 28 | final HANDLE handle = JavaLibCHelper.gethandle(JavaLibCHelper.getDescriptorFromChannel(fptr.fd().chFile)); 29 | if (handle.isValid()) { 30 | direct = handle.toPointer().isDirect(); 31 | address = handle.toPointer().address(); 32 | valid = true; 33 | } else { 34 | direct = false; 35 | address = 0L; 36 | valid = false; 37 | } 38 | } finally { 39 | if (locked) { 40 | fptr.unlock(); 41 | } 42 | } 43 | } 44 | 45 | @JRubyMethod(name = "valid?") 46 | public RubyBoolean valid_p(ThreadContext context) { 47 | return context.runtime.newBoolean(valid); 48 | } 49 | 50 | @Override 51 | @JRubyMethod 52 | public IRubyObject close() { 53 | direct = false; 54 | address = 0L; 55 | return super.close(); 56 | } 57 | 58 | final public boolean isDirect() { 59 | return direct; 60 | } 61 | 62 | final public long getAddress() { 63 | return address; 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /src/main/java/org/logstash/filewatch/WatchedFilesCollection.java: -------------------------------------------------------------------------------- 1 | package org.logstash.filewatch; 2 | 3 | import org.jruby.Ruby; 4 | import org.jruby.RubyArray; 5 | import org.jruby.RubyBoolean; 6 | import org.jruby.RubyClass; 7 | import org.jruby.RubyFloat; 8 | import org.jruby.RubyHash; 9 | import org.jruby.RubyObject; 10 | import org.jruby.RubyString; 11 | import org.jruby.anno.JRubyMethod; 12 | import org.jruby.runtime.Block; 13 | import org.jruby.runtime.ThreadContext; 14 | import org.jruby.runtime.Visibility; 15 | import org.jruby.runtime.builtin.IRubyObject; 16 | import org.jruby.runtime.callsite.CachingCallSite; 17 | import org.jruby.runtime.callsite.FunctionalCachingCallSite; 18 | 19 | import java.util.Comparator; 20 | import java.util.SortedMap; 21 | import java.util.TreeMap; 22 | 23 | /** 24 | * FileWatch::WatchedFilesCollection for managing paths mapped to (watched) files. 25 | * 26 | * Implemented in native to avoid Ruby->Java type casting (which JRuby provides no control of as of 9.2). 27 | * The collection already has a noticeable footprint when 10_000s of files are being watched at once, having 28 | * the implementation in Java reduces 1000s of String conversions on every watch re-stat tick. 29 | */ 30 | public class WatchedFilesCollection extends RubyObject { 31 | 32 | // we could have used Ruby's SortedSet but it does not provide support for custom comparators 33 | private SortedMap files; // FileWatch::WatchedFile -> String 34 | private RubyHash filesInverse; // String -> FileWatch::WatchedFile 35 | private String sortBy; 36 | 37 | public WatchedFilesCollection(Ruby runtime, RubyClass metaClass) { 38 | super(runtime, metaClass); 39 | } 40 | 41 | static void load(Ruby runtime) { 42 | runtime.getOrCreateModule("FileWatch") 43 | .defineClassUnder("WatchedFilesCollection", runtime.getObject(), WatchedFilesCollection::new) 44 | .defineAnnotatedMethods(WatchedFilesCollection.class); 45 | } 46 | 47 | @JRubyMethod 48 | public IRubyObject initialize(final ThreadContext context, IRubyObject settings) { 49 | final String sort_by = settings.callMethod(context, "file_sort_by").asJavaString(); 50 | final String sort_direction = settings.callMethod(context, "file_sort_direction").asJavaString(); 51 | 52 | Comparator comparator; 53 | switch (sort_by) { 54 | case "last_modified" : 55 | sortBy = "modified_at"; 56 | comparator = (file1, file2) -> { 57 | if (file1 == file2) return 0; // fast shortcut 58 | RubyFloat mtime1 = modified_at(context, file1); 59 | RubyFloat mtime2 = modified_at(context, file2); 60 | int cmp = Double.compare(mtime1.getDoubleValue(), mtime2.getDoubleValue()); 61 | // if mtime same (rare unless file1 == file2) - order consistently 62 | if (cmp == 0) return path(context, file1).op_cmp(path(context, file2)); 63 | return cmp; 64 | }; 65 | break; 66 | case "path" : 67 | sortBy = "path"; 68 | comparator = (file1, file2) -> path(context, file1).op_cmp(path(context, file2)); 69 | break; 70 | default : 71 | throw context.runtime.newArgumentError("sort_by: '" + sort_by + "' not supported"); 72 | } 73 | switch (sort_direction) { 74 | case "asc" : 75 | // all good - comparator uses ascending order 76 | break; 77 | case "desc" : 78 | comparator = comparator.reversed(); 79 | break; 80 | default : 81 | throw context.runtime.newArgumentError("sort_direction: '" + sort_direction + "' not supported"); 82 | } 83 | 84 | this.files = new TreeMap<>(comparator); 85 | this.filesInverse = RubyHash.newHash(context.runtime); 86 | 87 | // variableTableStore("@files", JavaUtil.convertJavaToRuby(context.runtime, this.files)); 88 | // variableTableStore("@files_inverse", this.filesInverse); 89 | 90 | return this; 91 | } 92 | 93 | @JRubyMethod 94 | public IRubyObject add(ThreadContext context, IRubyObject file) { 95 | RubyString path = getFilePath(context, file); 96 | synchronized (this) { 97 | RubyString prev_path = this.files.put(file, path); 98 | assert prev_path == null || path.equals(prev_path); // file's path should not change! 99 | this.filesInverse.op_aset(context, path, file); 100 | } 101 | return path; 102 | } 103 | 104 | private static RubyString getFilePath(ThreadContext context, IRubyObject file) { 105 | IRubyObject path = file.callMethod(context, "path"); 106 | if (!(path instanceof RubyString)) { 107 | throw context.runtime.newTypeError("expected file.path to return String but did not file: " + file.inspect()); 108 | } 109 | if (!path.isFrozen()) path = ((RubyString) path).dupFrozen(); // path = path.dup.freeze 110 | return (RubyString) path; 111 | } 112 | 113 | @JRubyMethod 114 | public IRubyObject remove_paths(ThreadContext context, IRubyObject arg) { 115 | IRubyObject[] paths; 116 | if (arg instanceof RubyArray) { 117 | paths = ((RubyArray) arg).toJavaArray(); 118 | } else { 119 | paths = new IRubyObject[] { arg }; 120 | } 121 | 122 | int removedCount = 0; 123 | synchronized (this) { 124 | for (final IRubyObject path : paths) { 125 | if (removePath(context, path.convertToString())) removedCount++; 126 | } 127 | } 128 | return context.runtime.newFixnum(removedCount); 129 | } 130 | 131 | private boolean removePath(ThreadContext context, RubyString path) { 132 | IRubyObject file = this.filesInverse.delete(context, path, Block.NULL_BLOCK); 133 | if (file.isNil()) return false; 134 | return this.files.remove(file) != null; 135 | } 136 | 137 | @JRubyMethod // synchronize { @files_inverse[path] } 138 | public synchronized IRubyObject get(ThreadContext context, IRubyObject path) { 139 | return this.filesInverse.op_aref(context, path); 140 | } 141 | 142 | @JRubyMethod // synchronize { @files.size } 143 | public synchronized IRubyObject size(ThreadContext context) { 144 | return context.runtime.newFixnum(this.files.size()); 145 | } 146 | 147 | @JRubyMethod(name = "empty?") // synchronize { @files.empty? } 148 | public synchronized IRubyObject empty_p(ThreadContext context) { 149 | return context.runtime.newBoolean(this.files.isEmpty()); 150 | } 151 | 152 | @JRubyMethod 153 | public synchronized IRubyObject each_file(ThreadContext context, Block block) { 154 | for (IRubyObject watched_file : this.files.keySet()) { 155 | block.yield(context, watched_file); 156 | } 157 | return context.nil; 158 | } 159 | 160 | @JRubyMethod // synchronize { @files.values.to_a } 161 | public IRubyObject paths(ThreadContext context) { 162 | IRubyObject[] values; 163 | synchronized (this) { 164 | values = this.files.values().stream().toArray(IRubyObject[]::new); 165 | } 166 | return context.runtime.newArrayNoCopy(values); 167 | } 168 | 169 | // NOTE: needs to return properly ordered files (can not use @files_inverse) 170 | @JRubyMethod // synchronize { @files.key_set.to_a } 171 | public IRubyObject files(ThreadContext context) { 172 | IRubyObject[] keys; 173 | synchronized (this) { 174 | keys = this.files.keySet().stream().toArray(IRubyObject[]::new); 175 | } 176 | return context.runtime.newArrayNoCopy(keys); 177 | } 178 | 179 | 180 | @JRubyMethod 181 | public IRubyObject update(ThreadContext context, IRubyObject file) { 182 | // NOTE: modified_at might change on restat - to cope with that we need to potentially 183 | // update the sorted collection, on such changes (when file_sort_by: last_modified) : 184 | if (!"modified_at".equals(sortBy)) return context.nil; 185 | 186 | RubyString path = getFilePath(context, file); 187 | synchronized (this) { 188 | this.files.remove(file); // we need to "re-sort" changed file -> remove and add it back 189 | modified_at(context, file, context.tru); // file.modified_at(update: true) 190 | RubyString prev_path = this.files.put(file, path); 191 | assert prev_path == null; 192 | } 193 | return context.tru; 194 | } 195 | 196 | @JRubyMethod(required = 1, visibility = Visibility.PRIVATE) 197 | @Override 198 | public IRubyObject initialize_copy(IRubyObject original) { 199 | final Ruby runtime = getRuntime(); 200 | if (!(original instanceof WatchedFilesCollection)) { 201 | throw runtime.newTypeError("Expecting an instance of class WatchedFilesCollection"); 202 | } 203 | 204 | WatchedFilesCollection proto = (WatchedFilesCollection) original; 205 | 206 | this.files = new TreeMap<>(proto.files.comparator()); 207 | synchronized (proto) { 208 | this.files.putAll(proto.files); 209 | this.filesInverse = (RubyHash) proto.filesInverse.dup(runtime.getCurrentContext()); 210 | } 211 | 212 | return this; 213 | } 214 | 215 | @Override 216 | public IRubyObject inspect() { 217 | return getRuntime().newString("#<" + metaClass.getRealClass().getName() + ": size=" + this.files.size() + ">"); 218 | } 219 | 220 | private static final CachingCallSite modified_at_site = new FunctionalCachingCallSite("modified_at"); 221 | private static final CachingCallSite path_site = new FunctionalCachingCallSite("path"); 222 | 223 | private static RubyString path(ThreadContext context, IRubyObject watched_file) { 224 | return path_site.call(context, watched_file, watched_file).convertToString(); 225 | } 226 | 227 | private static RubyFloat modified_at(ThreadContext context, IRubyObject watched_file) { 228 | return modified_at_site.call(context, watched_file, watched_file).convertToFloat(); 229 | } 230 | 231 | private static RubyFloat modified_at(ThreadContext context, IRubyObject watched_file, RubyBoolean update) { 232 | return modified_at_site.call(context, watched_file, watched_file, update).convertToFloat(); 233 | } 234 | 235 | } 236 | --------------------------------------------------------------------------------