├── .ci ├── Dockerfile.elasticsearch ├── docker-compose.override.yml ├── docker-run.sh ├── docker-setup.sh ├── elasticsearch-run.sh └── logstash-run.sh ├── .github ├── CONTRIBUTING.md ├── ISSUE_TEMPLATE.md └── PULL_REQUEST_TEMPLATE.md ├── .gitignore ├── .travis.yml ├── CHANGELOG.md ├── CONTRIBUTORS ├── Gemfile ├── LICENSE ├── NOTICE.TXT ├── README.md ├── Rakefile ├── docs └── index.asciidoc ├── lib └── logstash │ ├── outputs │ ├── elasticsearch.rb │ └── elasticsearch │ │ ├── data_stream_support.rb │ │ ├── default-ilm-policy.json │ │ ├── http_client.rb │ │ ├── http_client │ │ ├── manticore_adapter.rb │ │ └── pool.rb │ │ ├── http_client_builder.rb │ │ ├── ilm.rb │ │ ├── license_checker.rb │ │ ├── template_manager.rb │ │ └── templates │ │ └── ecs-disabled │ │ ├── elasticsearch-7x.json │ │ ├── elasticsearch-8x.json │ │ └── elasticsearch-9x.json │ └── plugin_mixins │ └── elasticsearch │ ├── api_configs.rb │ ├── common.rb │ └── noop_license_checker.rb ├── logstash-output-elasticsearch.gemspec └── spec ├── es_spec_helper.rb ├── fixtures ├── _nodes │ └── 7x.json ├── htpasswd ├── license_check │ ├── active.json │ └── inactive.json ├── nginx_reverse_proxy.conf ├── scripts │ └── painless │ │ ├── scripted_update.painless │ │ ├── scripted_update_nested.painless │ │ └── scripted_upsert.painless ├── template-with-policy-es7x.json ├── template-with-policy-es8x.json └── test_certs │ ├── GENERATED_AT │ ├── ca.crt │ ├── ca.der.sha256 │ ├── ca.key │ ├── renew.sh │ ├── test.crt │ ├── test.der.sha256 │ ├── test.key │ ├── test.p12 │ ├── test_invalid.crt │ ├── test_invalid.key │ ├── test_invalid.p12 │ ├── test_self_signed.crt │ ├── test_self_signed.key │ └── test_self_signed.p12 ├── integration └── outputs │ ├── compressed_indexing_spec.rb │ ├── create_spec.rb │ ├── data_stream_spec.rb │ ├── delete_spec.rb │ ├── ilm_spec.rb │ ├── index_spec.rb │ ├── index_version_spec.rb │ ├── ingest_pipeline_spec.rb │ ├── metrics_spec.rb │ ├── no_es_on_startup_spec.rb │ ├── painless_update_spec.rb │ ├── parent_spec.rb │ ├── retry_spec.rb │ ├── routing_spec.rb │ ├── sniffer_spec.rb │ ├── templates_spec.rb │ ├── unsupported_actions_spec.rb │ └── update_spec.rb ├── spec_helper.rb ├── support └── elasticsearch │ └── api │ └── actions │ ├── delete_ilm_policy.rb │ ├── get_ilm_policy.rb │ └── put_ilm_policy.rb └── unit ├── http_client_builder_spec.rb └── outputs ├── elasticsearch ├── data_stream_support_spec.rb ├── http_client │ ├── manticore_adapter_spec.rb │ └── pool_spec.rb ├── http_client_spec.rb └── template_manager_spec.rb ├── elasticsearch_proxy_spec.rb ├── elasticsearch_spec.rb ├── elasticsearch_ssl_spec.rb ├── error_whitelist_spec.rb └── license_check_spec.rb /.ci/Dockerfile.elasticsearch: -------------------------------------------------------------------------------- 1 | ARG ELASTIC_STACK_VERSION 2 | FROM docker.elastic.co/elasticsearch/elasticsearch:$ELASTIC_STACK_VERSION 3 | 4 | ARG plugin_path=/usr/share/plugins/plugin 5 | ARG es_path=/usr/share/elasticsearch 6 | ARG es_yml=$es_path/config/elasticsearch.yml 7 | ARG SECURE_INTEGRATION 8 | ARG ES_SSL_KEY_INVALID 9 | ARG ES_SSL_SUPPORTED_PROTOCOLS 10 | 11 | RUN rm -f $es_path/config/scripts 12 | 13 | COPY --chown=elasticsearch:elasticsearch spec/fixtures/test_certs/* $es_path/config/test_certs/ 14 | COPY --chown=elasticsearch:elasticsearch .ci/elasticsearch-run.sh $es_path/ 15 | 16 | RUN if [ "$SECURE_INTEGRATION" != "true" ] ; then echo "xpack.security.enabled: false" >> $es_yml; fi 17 | 18 | RUN if [ "$SECURE_INTEGRATION" = "true" ] ; then echo "xpack.security.http.ssl.enabled: $SECURE_INTEGRATION" >> $es_yml; fi 19 | RUN if [ "$SECURE_INTEGRATION" = "true" ] ; then \ 20 | if [ "$ES_SSL_KEY_INVALID" = "true" ] ; then \ 21 | echo "xpack.security.http.ssl.key: $es_path/config/test_certs/test_invalid.key" >> $es_yml; \ 22 | echo "xpack.security.http.ssl.certificate: $es_path/config/test_certs/test_invalid.crt" >> $es_yml; \ 23 | else \ 24 | echo "xpack.security.http.ssl.key: $es_path/config/test_certs/test.key" >> $es_yml; \ 25 | echo "xpack.security.http.ssl.certificate: $es_path/config/test_certs/test.crt" >> $es_yml; \ 26 | fi \ 27 | fi 28 | RUN if [ "$SECURE_INTEGRATION" = "true" ] ; then echo "xpack.security.http.ssl.certificate_authorities: [ '$es_path/config/test_certs/ca.crt' ]" >> $es_yml; fi 29 | RUN if [ "$SECURE_INTEGRATION" = "true" ] && [ ! -z "$ES_SSL_SUPPORTED_PROTOCOLS" ] ; then echo "xpack.security.http.ssl.supported_protocols: ${ES_SSL_SUPPORTED_PROTOCOLS}" >> $es_yml; fi 30 | 31 | RUN cat $es_yml 32 | 33 | RUN if [ "$SECURE_INTEGRATION" = "true" ] ; then $es_path/bin/elasticsearch-users useradd admin -p elastic -r superuser; fi 34 | RUN if [ "$SECURE_INTEGRATION" = "true" ] ; then $es_path/bin/elasticsearch-users useradd simpleuser -p abc123 -r superuser; fi 35 | RUN if [ "$SECURE_INTEGRATION" = "true" ] ; then $es_path/bin/elasticsearch-users useradd 'f@ncyuser' -p 'ab%12#' -r superuser; fi 36 | -------------------------------------------------------------------------------- /.ci/docker-compose.override.yml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | 3 | services: 4 | 5 | logstash: 6 | command: /usr/share/plugins/plugin/.ci/logstash-run.sh 7 | build: 8 | args: 9 | - ELASTIC_STACK_VERSION=$ELASTIC_STACK_VERSION 10 | environment: 11 | - ELASTIC_STACK_VERSION=$ELASTIC_STACK_VERSION 12 | - INTEGRATION=${INTEGRATION:-false} 13 | - SECURE_INTEGRATION=${SECURE_INTEGRATION:-false} 14 | - ES_SSL_KEY_INVALID=${ES_SSL_KEY_INVALID:-false} 15 | - ES_SSL_SUPPORTED_PROTOCOLS=$ES_SSL_SUPPORTED_PROTOCOLS 16 | 17 | elasticsearch: 18 | build: 19 | context: ../ 20 | dockerfile: .ci/Dockerfile.elasticsearch 21 | args: 22 | - ELASTIC_STACK_VERSION=$ELASTIC_STACK_VERSION 23 | - INTEGRATION=${INTEGRATION:-false} 24 | - SECURE_INTEGRATION=${SECURE_INTEGRATION:-false} 25 | - ES_SSL_KEY_INVALID=${ES_SSL_KEY_INVALID:-false} 26 | - ES_SSL_SUPPORTED_PROTOCOLS=$ES_SSL_SUPPORTED_PROTOCOLS 27 | environment: 28 | - ES_JAVA_OPTS=-Xms640m -Xmx640m 29 | command: /usr/share/elasticsearch/elasticsearch-run.sh 30 | tty: true 31 | ports: 32 | - "9200:9200" 33 | user: elasticsearch 34 | 35 | -------------------------------------------------------------------------------- /.ci/docker-run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # This is intended to be run inside the docker container as the command of the docker-compose. 4 | set -ex 5 | 6 | cd .ci 7 | 8 | if [ "$INTEGRATION" == "true" ]; then 9 | # remove the `--attach logstash` if you want to see all logs including elasticsearch container logs 10 | docker compose up --exit-code-from logstash --attach logstash 11 | else 12 | docker compose up --exit-code-from logstash logstash 13 | fi 14 | -------------------------------------------------------------------------------- /.ci/docker-setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # This is intended to be run the plugin's root directory. `ci/unit/docker-test.sh` 4 | # Ensure you have Docker installed locally and set the ELASTIC_STACK_VERSION environment variable. 5 | set -e 6 | 7 | pull_docker_snapshot() { 8 | project="${1?project name required}" 9 | stack_version_alias="${2?stack version alias required}" 10 | local docker_image="docker.elastic.co/${project}/${project}${DISTRIBUTION_SUFFIX}:${ELASTIC_STACK_VERSION}" 11 | echo "Pulling $docker_image" 12 | if docker pull "$docker_image" ; then 13 | echo "docker pull successful" 14 | else 15 | case "$ELASTIC_STACK_VERSION_ARG" in 16 | "8.previous"|"8.current"|"8.next") 17 | exit 1 18 | ;; 19 | *) 20 | exit 2 21 | ;; 22 | esac 23 | fi 24 | } 25 | 26 | VERSION_URL="https://raw.githubusercontent.com/elastic/logstash/main/ci/logstash_releases.json" 27 | 28 | if [ -z "${ELASTIC_STACK_VERSION}" ]; then 29 | echo "Please set the ELASTIC_STACK_VERSION environment variable" 30 | echo "For example: export ELASTIC_STACK_VERSION=7.x" 31 | exit 1 32 | fi 33 | 34 | # The ELASTIC_STACK_VERSION may be an alias, save the original before translating it 35 | ELASTIC_STACK_VERSION_ALIAS="$ELASTIC_STACK_VERSION" 36 | 37 | echo "Fetching versions from $VERSION_URL" 38 | VERSIONS=$(curl -s $VERSION_URL) 39 | 40 | if [[ "$SNAPSHOT" = "true" ]]; then 41 | ELASTIC_STACK_RETRIEVED_VERSION=$(echo $VERSIONS | jq '.snapshots."'"$ELASTIC_STACK_VERSION"'"') 42 | echo $ELASTIC_STACK_RETRIEVED_VERSION 43 | else 44 | ELASTIC_STACK_RETRIEVED_VERSION=$(echo $VERSIONS | jq '.releases."'"$ELASTIC_STACK_VERSION"'"') 45 | fi 46 | 47 | if [[ "$ELASTIC_STACK_RETRIEVED_VERSION" != "null" ]]; then 48 | # remove starting and trailing double quotes 49 | ELASTIC_STACK_RETRIEVED_VERSION="${ELASTIC_STACK_RETRIEVED_VERSION%\"}" 50 | ELASTIC_STACK_RETRIEVED_VERSION="${ELASTIC_STACK_RETRIEVED_VERSION#\"}" 51 | echo "Translated $ELASTIC_STACK_VERSION to ${ELASTIC_STACK_RETRIEVED_VERSION}" 52 | export ELASTIC_STACK_VERSION=$ELASTIC_STACK_RETRIEVED_VERSION 53 | elif [[ "$ELASTIC_STACK_VERSION" == "8.next" ]]; then 54 | # we know "8.next" only exists between FF and GA of a minor 55 | # exit 1 so the build is skipped 56 | exit 1 57 | fi 58 | 59 | case "${DISTRIBUTION}" in 60 | default) DISTRIBUTION_SUFFIX="" ;; # empty string when explicit "default" is given 61 | *) DISTRIBUTION_SUFFIX="${DISTRIBUTION/*/-}${DISTRIBUTION}" ;; 62 | esac 63 | export DISTRIBUTION_SUFFIX 64 | 65 | echo "Testing against version: $ELASTIC_STACK_VERSION (distribution: ${DISTRIBUTION:-"default"})" 66 | 67 | if [[ "$ELASTIC_STACK_VERSION" = *"-SNAPSHOT" ]]; then 68 | pull_docker_snapshot "logstash" $ELASTIC_STACK_VERSION_ALIAS 69 | if [ "$INTEGRATION" == "true" ]; then 70 | pull_docker_snapshot "elasticsearch" $ELASTIC_STACK_VERSION_ALIAS 71 | fi 72 | fi 73 | 74 | if [ -f Gemfile.lock ]; then 75 | rm Gemfile.lock 76 | fi 77 | 78 | CURRENT_DIR=$(dirname "${BASH_SOURCE[0]}") 79 | 80 | cd .ci 81 | 82 | export BUILDKIT_PROGRESS=plain 83 | if [ "$INTEGRATION" == "true" ]; then 84 | docker compose down 85 | docker compose build 86 | else 87 | docker compose down 88 | docker compose build logstash 89 | fi 90 | -------------------------------------------------------------------------------- /.ci/elasticsearch-run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -ex 3 | 4 | /usr/share/elasticsearch/bin/elasticsearch -Ediscovery.type=single-node -Eaction.destructive_requires_name=false 5 | -------------------------------------------------------------------------------- /.ci/logstash-run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | env 4 | 5 | set -ex 6 | 7 | export PATH=$BUILD_DIR/gradle/bin:$PATH 8 | 9 | if [[ "$SECURE_INTEGRATION" == "true" ]]; then 10 | ES_URL="https://elasticsearch:9200" 11 | else 12 | ES_URL="http://elasticsearch:9200" 13 | fi 14 | 15 | # CentOS 7 using curl defaults does not enable TLSv1.3 16 | CURL_OPTS="-s -u admin:elastic -k --tlsv1.2 --tls-max 1.3" 17 | 18 | wait_for_es() { 19 | count=120 20 | while ! curl $CURL_OPTS $ES_URL >/dev/null && [[ $count -ne 0 ]]; do 21 | count=$(( $count - 1 )) 22 | [[ $count -eq 0 ]] && exit 1 23 | sleep 1 24 | done 25 | echo $(curl $CURL_OPTS $ES_URL | jq -r .version.number) 26 | } 27 | 28 | if [[ "$INTEGRATION" != "true" ]]; then 29 | bundle exec rspec --format=documentation spec/unit --tag ~integration --tag ~secure_integration 30 | else 31 | 32 | if [[ "$SECURE_INTEGRATION" == "true" ]]; then 33 | extra_tag_args="--tag secure_integration" 34 | else 35 | extra_tag_args="--tag ~secure_integration --tag integration" 36 | fi 37 | 38 | echo "Waiting for elasticsearch to respond..." 39 | ES_VERSION=$(wait_for_es) 40 | echo "Elasticsearch $ES_VERSION is Up!" 41 | bundle exec rspec --format=documentation $extra_tag_args --tag update_tests:painless --tag es_version:$ES_VERSION spec/integration 42 | fi 43 | -------------------------------------------------------------------------------- /.github/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to Logstash 2 | 3 | All contributions are welcome: ideas, patches, documentation, bug reports, 4 | complaints, etc! 5 | 6 | Programming is not a required skill, and there are many ways to help out! 7 | It is more important to us that you are able to contribute. 8 | 9 | That said, some basic guidelines, which you are free to ignore :) 10 | 11 | ## Want to learn? 12 | 13 | Want to lurk about and see what others are doing with Logstash? 14 | 15 | * The irc channel (#logstash on irc.freenode.org) is a good place for this 16 | * The [forum](https://discuss.elastic.co/c/logstash) is also 17 | great for learning from others. 18 | 19 | ## Got Questions? 20 | 21 | Have a problem you want Logstash to solve for you? 22 | 23 | * You can ask a question in the [forum](https://discuss.elastic.co/c/logstash) 24 | * Alternately, you are welcome to join the IRC channel #logstash on 25 | irc.freenode.org and ask for help there! 26 | 27 | ## Have an Idea or Feature Request? 28 | 29 | * File a ticket on [GitHub](https://github.com/elastic/logstash/issues). Please remember that GitHub is used only for issues and feature requests. If you have a general question, the [forum](https://discuss.elastic.co/c/logstash) or IRC would be the best place to ask. 30 | 31 | ## Something Not Working? Found a Bug? 32 | 33 | If you think you found a bug, it probably is a bug. 34 | 35 | * If it is a general Logstash or a pipeline issue, file it in [Logstash GitHub](https://github.com/elasticsearch/logstash/issues) 36 | * If it is specific to a plugin, please file it in the respective repository under [logstash-plugins](https://github.com/logstash-plugins) 37 | * or ask the [forum](https://discuss.elastic.co/c/logstash). 38 | 39 | # Contributing Documentation and Code Changes 40 | 41 | If you have a bugfix or new feature that you would like to contribute to 42 | logstash, and you think it will take more than a few minutes to produce the fix 43 | (ie; write code), it is worth discussing the change with the Logstash users and developers first! You can reach us via [GitHub](https://github.com/elastic/logstash/issues), the [forum](https://discuss.elastic.co/c/logstash), or via IRC (#logstash on freenode irc) 44 | Please note that Pull Requests without tests will not be merged. If you would like to contribute but do not have experience with writing tests, please ping us on IRC/forum or create a PR and ask our help. 45 | 46 | ## Contributing to plugins 47 | 48 | Check our [documentation](https://www.elastic.co/guide/en/logstash/current/contributing-to-logstash.html) on how to contribute to plugins or write your own! It is super easy! 49 | 50 | ## Contribution Steps 51 | 52 | 1. Test your changes! [Run](https://github.com/elastic/logstash#testing) the test suite 53 | 2. Please make sure you have signed our [Contributor License 54 | Agreement](https://www.elastic.co/contributor-agreement/). We are not 55 | asking you to assign copyright to us, but to give us the right to distribute 56 | your code without restriction. We ask this of all contributors in order to 57 | assure our users of the origin and continuing existence of the code. You 58 | only need to sign the CLA once. 59 | 3. Send a pull request! Push your changes to your fork of the repository and 60 | [submit a pull 61 | request](https://help.github.com/articles/using-pull-requests). In the pull 62 | request, describe what your changes do and mention any bugs/issues related 63 | to the pull request. 64 | 65 | 66 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | Please post all product and debugging questions on our [forum](https://discuss.elastic.co/c/logstash). Your questions will reach our wider community members there, and if we confirm that there is a bug, then we can open a new issue here. 2 | 3 | For all general issues, please provide the following details for fast resolution: 4 | 5 | - Version: 6 | - Operating System: 7 | - Config File (if you have sensitive info, please remove it): 8 | - Sample Data: 9 | - Steps to Reproduce: 10 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | Thanks for contributing to Logstash! If you haven't already signed our CLA, here's a handy link: https://www.elastic.co/contributor-agreement/ 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.swp 2 | *.gem 3 | Gemfile.lock 4 | .bundle 5 | .idea 6 | *~ 7 | .ruby-version 8 | elasticsearch.tar.gz 9 | /elasticsearch/* 10 | lumberjack.key 11 | target/ 12 | vendor/ 13 | /spec/fixtures/server.key 14 | /spec/fixtures/server.crt 15 | /lib/logstash/outputs/elasticsearch/templates/ecs-v* 16 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | import: 2 | - logstash-plugins/.ci:travis/travis.yml@1.x 3 | 4 | jobs: 5 | include: 6 | - stage: "Integration Tests" 7 | env: INTEGRATION=true SNAPSHOT=true LOG_LEVEL=info ELASTIC_STACK_VERSION=7.current 8 | - env: INTEGRATION=true SNAPSHOT=true LOG_LEVEL=info ELASTIC_STACK_VERSION=8.previous 9 | - env: INTEGRATION=true SNAPSHOT=true LOG_LEVEL=info ELASTIC_STACK_VERSION=8.current 10 | - env: INTEGRATION=true SNAPSHOT=true LOG_LEVEL=info ELASTIC_STACK_VERSION=8.next 11 | - env: INTEGRATION=true SNAPSHOT=true LOG_LEVEL=info ELASTIC_STACK_VERSION=8.future 12 | - env: INTEGRATION=true SNAPSHOT=true LOG_LEVEL=info ELASTIC_STACK_VERSION=main 13 | - stage: "Secure Integration Tests" 14 | env: SECURE_INTEGRATION=true INTEGRATION=true LOG_LEVEL=info ELASTIC_STACK_VERSION=8.current SNAPSHOT=true 15 | - env: SECURE_INTEGRATION=true INTEGRATION=true LOG_LEVEL=info ELASTIC_STACK_VERSION=7.current 16 | - env: SECURE_INTEGRATION=true INTEGRATION=true LOG_LEVEL=info ELASTIC_STACK_VERSION=7.current ES_SSL_KEY_INVALID=true 17 | - env: SECURE_INTEGRATION=true INTEGRATION=true LOG_LEVEL=info ELASTIC_STACK_VERSION=7.current ES_SSL_SUPPORTED_PROTOCOLS=TLSv1.3 18 | -------------------------------------------------------------------------------- /CONTRIBUTORS: -------------------------------------------------------------------------------- 1 | The following is a list of people who have contributed ideas, code, bug 2 | reports, or in general have helped logstash along its way. 3 | 4 | Contributors: 5 | * Aaron Mildenstein (untergeek) 6 | * Bob Corsaro (dokipen) 7 | * Colin Surprenant (colinsurprenant) 8 | * Dmitry Koprov (dkoprov) 9 | * Graham Bleach (bleach) 10 | * Hao Chen (haoch) 11 | * Ivan Babrou (bobrik) 12 | * James Turnbull (jamtur01) 13 | * John E. Vincent (lusis) 14 | * Jordan Sissel (jordansissel) 15 | * João Duarte (jsvd) 16 | * Kurt Hurtado (kurtado) 17 | * Miah Johnson (miah) 18 | * Pere Urbón (purbon) 19 | * Pete Fritchman (fetep) 20 | * Pier-Hugues Pellerin (ph) 21 | * Raymond Feng (raymondfeng) 22 | * Richard Pijnenburg (electrical) 23 | * Spenser Jones (SpenserJ) 24 | * Suyog Rao (suyograo) 25 | * Tal Levy (talevy) 26 | * Tom Hodder (tolland) 27 | * jimmyjones2 28 | * Gabriel Moskovicz (gmoskovicz) 29 | * Luca Belluccini (lucabelluccini) 30 | 31 | Note: If you've sent us patches, bug reports, or otherwise contributed to 32 | Logstash, and you aren't on the list above and want to be, please let us know 33 | and we'll make sure you're here. Contributions from folks like you are what make 34 | open source awesome. 35 | -------------------------------------------------------------------------------- /Gemfile: -------------------------------------------------------------------------------- 1 | source 'https://rubygems.org' 2 | 3 | gemspec 4 | 5 | logstash_path = ENV["LOGSTASH_PATH"] || "../../logstash" 6 | use_logstash_source = ENV["LOGSTASH_SOURCE"] && ENV["LOGSTASH_SOURCE"].to_s == "1" 7 | 8 | if Dir.exist?(logstash_path) && use_logstash_source 9 | gem 'logstash-core', :path => "#{logstash_path}/logstash-core" 10 | gem 'logstash-core-plugin-api', :path => "#{logstash_path}/logstash-core-plugin-api" 11 | end 12 | 13 | if RUBY_VERSION == "1.9.3" 14 | gem 'rake', '12.2.1' 15 | end 16 | 17 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright 2020 Elastic and contributors 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /NOTICE.TXT: -------------------------------------------------------------------------------- 1 | Elasticsearch 2 | Copyright 2012-2015 Elasticsearch 3 | 4 | This product includes software developed by The Apache Software 5 | Foundation (http://www.apache.org/). -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Logstash Plugin 2 | 3 | [![Travis Build Status](https://travis-ci.com/logstash-plugins/logstash-output-elasticsearch.svg)](https://travis-ci.com/logstash-plugins/logstash-output-elasticsearch) 4 | 5 | This is a plugin for [Logstash](https://github.com/elastic/logstash). 6 | 7 | It is fully free and fully open source. The license is Apache 2.0, meaning you are pretty much free to use it however you want in whatever way. 8 | 9 | ## Documentation 10 | 11 | Logstash provides infrastructure to automatically generate documentation for this plugin. We use the asciidoc format to write documentation so any comments in the source code will be first converted into asciidoc and then into html. All plugin documentation are placed under one [central location](http://www.elastic.co/guide/en/logstash/current/). 12 | 13 | - For formatting code or config example, you can use the asciidoc `[source,ruby]` directive 14 | - For more asciidoc formatting tips, see the excellent reference here https://github.com/elastic/docs#asciidoc-guide 15 | 16 | ## Need Help? 17 | 18 | Need help? Try #logstash on freenode IRC or the https://discuss.elastic.co/c/logstash discussion forum. 19 | 20 | ## Developing 21 | 22 | ### 1. Plugin Development and Testing 23 | 24 | #### Code 25 | - To get started, you'll need JRuby with the Bundler gem installed. 26 | 27 | - Create a new plugin or clone and existing from the GitHub [logstash-plugins](https://github.com/logstash-plugins) organization. We also provide [example plugins](https://github.com/logstash-plugins?query=example). 28 | 29 | - Install dependencies 30 | ```sh 31 | bundle install 32 | ``` 33 | 34 | #### Test 35 | 36 | - Update your dependencies 37 | 38 | ```sh 39 | bundle install 40 | ``` 41 | 42 | - Run unit tests 43 | 44 | ```sh 45 | bundle exec rspec 46 | ``` 47 | 48 | - Run integration tests 49 | 50 | ```sh 51 | export INTEGRATION=true 52 | export ES_VERSION=5.1.1 53 | ./travis-run.sh 54 | ``` 55 | 56 | ### 2. Running your unpublished Plugin in Logstash 57 | 58 | #### 2.1 Run in a local Logstash clone 59 | 60 | - Edit Logstash `Gemfile` and add the local plugin path, for example: 61 | ```ruby 62 | gem "logstash-filter-awesome", :path => "/your/local/logstash-filter-awesome" 63 | ``` 64 | - Install plugin 65 | ```sh 66 | # Logstash 2.3 and higher 67 | bin/logstash-plugin install --no-verify 68 | 69 | # Prior to Logstash 2.3 70 | bin/plugin install --no-verify 71 | 72 | ``` 73 | - Run Logstash with your plugin 74 | ```sh 75 | bin/logstash -e 'filter {awesome {}}' 76 | ``` 77 | At this point any modifications to the plugin code will be applied to this local Logstash setup. After modifying the plugin, simply rerun Logstash. 78 | 79 | #### 2.2 Run in an installed Logstash 80 | 81 | You can use the same **2.1** method to run your plugin in an installed Logstash by editing its `Gemfile` and pointing the `:path` to your local plugin development directory or you can build the gem and install it using: 82 | 83 | - Build your plugin gem 84 | ```sh 85 | gem build logstash-filter-awesome.gemspec 86 | ``` 87 | - Install the plugin from the Logstash home 88 | ```sh 89 | # Logstash 2.3 and higher 90 | bin/logstash-plugin install --no-verify 91 | 92 | # Prior to Logstash 2.3 93 | bin/plugin install --no-verify 94 | 95 | ``` 96 | - Start Logstash and proceed to test the plugin 97 | 98 | ## Contributing 99 | 100 | All contributions are welcome: ideas, patches, documentation, bug reports, complaints, and even something you drew up on a napkin. 101 | 102 | Programming is not a required skill. Whatever you've seen about open source and maintainers or community members saying "send patches or die" - you will not see that here. 103 | 104 | It is more important to the community that you are able to contribute. 105 | 106 | For more information about contributing, see the [CONTRIBUTING](https://github.com/elastic/logstash/blob/master/CONTRIBUTING.md) file. 107 | -------------------------------------------------------------------------------- /Rakefile: -------------------------------------------------------------------------------- 1 | require "logstash/devutils/rake" 2 | 3 | task :'vendor-ecs-schemata' do 4 | download_ecs_schemata(:v1, elasticsearch_major: 7, ecs_release_tag: 'v1.12.1') 5 | download_ecs_schemata(:v1, elasticsearch_major: 8, ecs_release_tag: 'v1.12.1', generated_for: 7) 6 | download_ecs_schemata(:v1, elasticsearch_major: 9, ecs_release_tag: 'v1.12.1', generated_for: 7) 7 | 8 | # PRERELEASE: 8.0 branch 9 | # when pinning to released tag, remove BETA warning. 10 | download_ecs_schemata(:v8, elasticsearch_major: 7, ecs_release_tag: '8.0') 11 | download_ecs_schemata(:v8, elasticsearch_major: 8, ecs_release_tag: '8.0') 12 | download_ecs_schemata(:v8, elasticsearch_major: 9, ecs_release_tag: '8.0') 13 | end 14 | 15 | task :vendor => :'vendor-ecs-schemata' 16 | 17 | 18 | ECS_LOGSTASH_INDEX_PATTERNS = %w( 19 | ecs-logstash-* 20 | ).freeze 21 | 22 | def download_ecs_schemata(ecs_major, elasticsearch_major:, ecs_release_tag:, generated_for: elasticsearch_major) 23 | # when talking with ES >= 8, this plugin uses the v2 _index_template API and needs 24 | # the generated monolith legacy index template to be transformed into a v2 index template 25 | transform = Proc.new { |template| transform_legacy_template_to_v2!(template) if elasticsearch_major >= 8 } 26 | 27 | return download_ecs_v1(elasticsearch_major: elasticsearch_major, ecs_release_tag: ecs_release_tag, generated_for: generated_for, &transform) if ecs_major == :v1 28 | 29 | fail(ArgumentError, "Stack-aligned #{ecs_major} does not support `generated_for`") if generated_for != elasticsearch_major 30 | 31 | download_ecs_aligned(ecs_major, elasticsearch_major: elasticsearch_major, ecs_release_tag: ecs_release_tag, &transform) 32 | end 33 | 34 | def download_ecs_v1(elasticsearch_major:, ecs_release_tag:, generated_for: elasticsearch_major, &transform) 35 | $stderr.puts("Vendoring v1 ECS template (#{ecs_release_tag}) for Elasticsearch #{elasticsearch_major}"+(elasticsearch_major==generated_for ? '': " (transformed from templates pre-generated for ES #{generated_for})")) 36 | 37 | source_url = "/elastic/ecs/#{ecs_release_tag}/generated/elasticsearch/#{generated_for}/template.json" 38 | download_and_transform(source_url: source_url, ecs_major: :v1, es_major: elasticsearch_major, &transform) 39 | end 40 | 41 | def download_ecs_aligned(ecs_major, elasticsearch_major:, ecs_release_tag:, &transform) 42 | $stderr.puts("Vendoring Stack-aligned ECS template (#{ecs_release_tag}) for Elasticsearch #{elasticsearch_major}") 43 | 44 | source_url = "/elastic/ecs/#{ecs_release_tag}/generated/elasticsearch/legacy/template.json" 45 | download_and_transform(source_url: source_url, ecs_major: ecs_major, es_major: elasticsearch_major, &transform) 46 | end 47 | 48 | def download_and_transform(source_url:, ecs_major:, es_major:) 49 | require 'net/http' 50 | require 'json' 51 | Net::HTTP.start('raw.githubusercontent.com', :use_ssl => true) do |http| 52 | response = http.get(source_url) 53 | fail "#{response.code} #{response.message}" unless (200...300).cover?(response.code.to_i) 54 | template_directory = File.expand_path("../lib/logstash/outputs/elasticsearch/templates/ecs-#{ecs_major}", __FILE__) 55 | Dir.mkdir(template_directory) unless File.exists?(template_directory) 56 | File.open(File.join(template_directory, "/elasticsearch-#{es_major}x.json"), "w") do |handle| 57 | template = JSON.load(response.body) 58 | replace_index_patterns!(template, ECS_LOGSTASH_INDEX_PATTERNS) 59 | yield(template) if block_given? 60 | handle.write(JSON.pretty_generate template) 61 | end 62 | end 63 | end 64 | 65 | # destructively replaces the index pattern with the provided replacements 66 | def replace_index_patterns!(template, replacement_index_patterns) 67 | template.update('index_patterns' => replacement_index_patterns) 68 | end 69 | 70 | # destructively transforms an ES7-style legacy template into an ES8-compatible index_template. 71 | def transform_legacy_template_to_v2!(template) 72 | # `settings` and `mappings` are now nested under top-level `template` 73 | template["template"] = { 74 | "settings" => template.delete("settings"), 75 | "mappings" => template.delete("mappings"), 76 | } 77 | 78 | # `order` is gone, replaced with `priority`. 79 | template.delete('order') 80 | template["priority"] = 200 #arbitrary 81 | 82 | # a new free-form `_meta` exists, so let's add a note about where the template came from 83 | template["_meta"] = { "description" => "ECS index template for logstash-output-elasticsearch" } 84 | 85 | nil 86 | end 87 | -------------------------------------------------------------------------------- /lib/logstash/outputs/elasticsearch/data_stream_support.rb: -------------------------------------------------------------------------------- 1 | module LogStash module Outputs class ElasticSearch 2 | # DS specific behavior/configuration. 3 | module DataStreamSupport 4 | 5 | # @api private 6 | ENABLING_ECS_GUIDANCE = <<~END.tr("\n", " ") 7 | Elasticsearch data streams require that events adhere to the Elastic Common Schema. 8 | While `ecs_compatibility` can be set for this individual Elasticsearch output plugin, doing so will not fix schema conflicts caused by upstream plugins in your pipeline. 9 | To avoid mapping conflicts, you will need to use ECS-compatible field names and datatypes throughout your pipeline. 10 | Many plugins support an `ecs_compatibility` mode, and the `pipeline.ecs_compatibility` setting can be used to opt-in for all plugins in a pipeline. 11 | END 12 | private_constant :ENABLING_ECS_GUIDANCE 13 | 14 | def self.included(base) 15 | # Defines whether data will be indexed into an Elasticsearch data stream, 16 | # `data_stream_*` settings will only be used if this setting is enabled! 17 | # This setting supports values `true`, `false`, and `auto`. 18 | # Defaults to `false` in Logstash 7.x and `auto` starting in Logstash 8.0. 19 | base.config :data_stream, :validate => ['true', 'false', 'auto'] 20 | 21 | base.config :data_stream_type, :validate => ['logs', 'metrics', 'synthetics', 'traces'], :default => 'logs' 22 | base.config :data_stream_dataset, :validate => :dataset_identifier, :default => 'generic' 23 | base.config :data_stream_namespace, :validate => :namespace_identifier, :default => 'default' 24 | 25 | base.config :data_stream_sync_fields, :validate => :boolean, :default => true 26 | base.config :data_stream_auto_routing, :validate => :boolean, :default => true 27 | 28 | base.extend(Validator) 29 | end 30 | 31 | def data_stream_config? 32 | @data_stream_config.nil? ? @data_stream_config = check_data_stream_config! : @data_stream_config 33 | end 34 | 35 | private 36 | 37 | def data_stream_name(event) 38 | data_stream = event.get('data_stream') 39 | return @index if !data_stream_auto_routing || !data_stream.is_a?(Hash) 40 | 41 | type = data_stream['type'] || data_stream_type 42 | dataset = data_stream['dataset'] || data_stream_dataset 43 | namespace = data_stream['namespace'] || data_stream_namespace 44 | "#{type}-#{dataset}-#{namespace}" 45 | end 46 | 47 | DATA_STREAMS_AND_ECS_ENABLED_BY_DEFAULT_LS_VERSION = '8.0.0' 48 | 49 | # @param params the user configuration for the ES output 50 | # @note LS initialized configuration (with filled defaults) won't detect as data-stream 51 | # compatible, only explicit (`original_params`) config should be tested. 52 | # @return [Boolean] whether given configuration is data-stream compatible 53 | def check_data_stream_config!(params = original_params) 54 | case data_stream_explicit_value 55 | when false 56 | check_disabled_data_stream_config!(params) 57 | return false 58 | when true 59 | check_enabled_data_stream_config!(params) 60 | return true 61 | else # data_stream => auto or not set 62 | use_data_stream = data_stream_default(params) 63 | 64 | check_disabled_data_stream_config!(params) unless use_data_stream 65 | 66 | @logger.info("Data streams auto configuration (`data_stream => auto` or unset) resolved to `#{use_data_stream}`") 67 | return use_data_stream 68 | end 69 | end 70 | 71 | def check_enabled_data_stream_config!(params) 72 | invalid_data_stream_params = invalid_data_stream_params(params) 73 | 74 | if invalid_data_stream_params.any? 75 | @logger.error "Invalid data stream configuration, the following parameters are not supported:", invalid_data_stream_params 76 | raise LogStash::ConfigurationError, "Invalid data stream configuration: #{invalid_data_stream_params.keys}" 77 | end 78 | 79 | if ecs_compatibility == :disabled 80 | if ecs_compatibility_required? 81 | @logger.error "Invalid data stream configuration; `ecs_compatibility` must not be `disabled`. " + ENABLING_ECS_GUIDANCE 82 | raise LogStash::ConfigurationError, "Invalid data stream configuration: `ecs_compatibility => disabled`" 83 | end 84 | 85 | @deprecation_logger.deprecated "In a future release of Logstash, the Elasticsearch output plugin's `data_stream => true` will require the plugin to be run in ECS compatibility mode. " + ENABLING_ECS_GUIDANCE 86 | end 87 | end 88 | 89 | def check_disabled_data_stream_config!(params) 90 | data_stream_params = data_stream_params(params) 91 | 92 | if data_stream_params.any? 93 | @logger.error "Ambiguous configuration; data stream settings must not be present when data streams are disabled (caused by `data_stream => false`, `data_stream => auto` or unset resolved to false). " \ 94 | "You can either manually set `data_stream => true` or remove the following specific data stream settings: ", data_stream_params 95 | 96 | raise LogStash::ConfigurationError, 97 | "Ambiguous configuration; data stream settings must not be present when data streams are disabled: #{data_stream_params.keys}" 98 | end 99 | end 100 | 101 | def data_stream_params(params) 102 | params.select { |name, _| name.start_with?('data_stream_') } 103 | end 104 | 105 | def data_stream_explicit_value 106 | case @data_stream 107 | when 'true' 108 | return true 109 | when 'false' 110 | return false 111 | else 112 | return nil # 'auto' or not set by user 113 | end 114 | end 115 | 116 | def invalid_data_stream_params(params) 117 | shared_params = LogStash::PluginMixins::ElasticSearch::APIConfigs::CONFIG_PARAMS.keys.map(&:to_s) 118 | params.reject do |name, value| 119 | # NOTE: intentionally do not support explicit DS configuration like: 120 | # - `index => ...` identifier provided by data_stream_xxx settings 121 | case name 122 | when 'action' 123 | value == 'create' 124 | when 'routing', 'pipeline' 125 | true 126 | when 'data_stream' 127 | value.to_s == 'true' 128 | when 'manage_template' 129 | value.to_s == 'false' 130 | else 131 | name.start_with?('data_stream_') || 132 | shared_params.include?(name) || 133 | inherited_internal_config_param?(name) # 'id', 'enabled_metric' etc 134 | end 135 | end 136 | end 137 | 138 | def inherited_internal_config_param?(name) 139 | self.class.superclass.get_config.key?(name.to_s) # superclass -> LogStash::Outputs::Base 140 | end 141 | 142 | DATA_STREAMS_ORIGIN_ES_VERSION = '7.9.0' 143 | 144 | # @note assumes to be running AFTER {after_successful_connection} completed, due ES version checks 145 | # @return [Gem::Version] if ES supports DS nil (or raise) otherwise 146 | def assert_es_version_supports_data_streams 147 | raise LogStash::ConfigurationError 'no last_es_version' unless last_es_version # assert - should not happen 148 | es_version = ::Gem::Version.create(last_es_version) 149 | if es_version < ::Gem::Version.create(DATA_STREAMS_ORIGIN_ES_VERSION) 150 | @logger.error "Elasticsearch version does not support data streams, Logstash might end up writing to an index", es_version: es_version.version 151 | # NOTE: when switching to synchronous check from register, this should be a ConfigurationError 152 | raise LogStash::ConfigurationError, "A data_stream configuration is only supported since Elasticsearch #{DATA_STREAMS_ORIGIN_ES_VERSION} " + 153 | "(detected version #{es_version.version}), please upgrade your cluster" 154 | end 155 | es_version # return truthy 156 | end 157 | 158 | # when data_stream => is either 'auto' or not set 159 | def data_stream_default(params) 160 | if ecs_compatibility == :disabled 161 | @logger.info("Not eligible for data streams because ecs_compatibility is not enabled. " + ENABLING_ECS_GUIDANCE) 162 | return false 163 | end 164 | 165 | invalid_data_stream_params = invalid_data_stream_params(params) 166 | 167 | if data_stream_and_ecs_enabled_by_default? 168 | if invalid_data_stream_params.any? 169 | @logger.info("Not eligible for data streams because config contains one or more settings that are not compatible with data streams: #{invalid_data_stream_params.inspect}") 170 | return false 171 | end 172 | 173 | return true 174 | end 175 | 176 | # LS 7.x 177 | if !invalid_data_stream_params.any? && !data_stream_params(params).any? 178 | @logger.warn "Configuration is data stream compliant but due backwards compatibility Logstash 7.x will not assume " + 179 | "writing to a data-stream, default behavior will change on Logstash 8.0 " + 180 | "(set `data_stream => true/false` to disable this warning)" 181 | end 182 | false 183 | end 184 | 185 | def ecs_compatibility_required? 186 | data_stream_and_ecs_enabled_by_default? 187 | end 188 | 189 | def data_stream_and_ecs_enabled_by_default? 190 | ::Gem::Version.create(LOGSTASH_VERSION) >= ::Gem::Version.create(DATA_STREAMS_AND_ECS_ENABLED_BY_DEFAULT_LS_VERSION) 191 | end 192 | 193 | # an {event_action_tuple} replacement when a data-stream configuration is detected 194 | def data_stream_event_action_tuple(event) 195 | event_data = event.to_hash 196 | data_stream_event_sync(event_data) if data_stream_sync_fields 197 | EventActionTuple.new('create', common_event_params(event), event, event_data) 198 | end 199 | 200 | DATA_STREAM_SYNC_FIELDS = [ 'type', 'dataset', 'namespace' ].freeze 201 | 202 | def data_stream_event_sync(event_data) 203 | data_stream = event_data['data_stream'] 204 | if data_stream.is_a?(Hash) 205 | unless data_stream_auto_routing 206 | sync_fields = DATA_STREAM_SYNC_FIELDS.select { |name| data_stream.key?(name) && data_stream[name] != send(:"data_stream_#{name}") } 207 | if sync_fields.any? # these fields will need to be overwritten 208 | info = sync_fields.inject({}) { |info, name| info[name] = data_stream[name]; info } 209 | info[:event] = event_data 210 | @logger.warn "Some data_stream fields are out of sync, these will be updated to reflect data-stream name", info 211 | 212 | # NOTE: we work directly with event.to_hash data thus fine to mutate the 'data_stream' hash 213 | sync_fields.each { |name| data_stream[name] = nil } # fallback to ||= bellow 214 | end 215 | end 216 | else 217 | unless data_stream.nil? 218 | @logger.warn "Invalid 'data_stream' field type, due fields sync will overwrite", value: data_stream, event: event_data 219 | end 220 | event_data['data_stream'] = data_stream = Hash.new 221 | end 222 | 223 | data_stream['type'] ||= data_stream_type 224 | data_stream['dataset'] ||= data_stream_dataset 225 | data_stream['namespace'] ||= data_stream_namespace 226 | 227 | event_data 228 | end 229 | 230 | module Validator 231 | 232 | # @override {LogStash::Config::Mixin::validate_value} to handle custom validators 233 | # @param value [Array] 234 | # @param validator [nil,Array,Symbol] 235 | # @return [Array(true,Object)]: if validation is a success, a tuple containing `true` and the coerced value 236 | # @return [Array(false,String)]: if validation is a failure, a tuple containing `false` and the failure reason. 237 | def validate_value(value, validator) 238 | case validator 239 | when :dataset_identifier then validate_dataset_identifier(value) 240 | when :namespace_identifier then validate_namespace_identifier(value) 241 | else super 242 | end 243 | end 244 | 245 | private 246 | 247 | def validate_dataset_identifier(value) 248 | valid, value = validate_value(value, :string) 249 | return false, value unless valid 250 | 251 | validate_identifier(value) 252 | end 253 | 254 | def validate_namespace_identifier(value) 255 | valid, value = validate_value(value, :string) 256 | return false, value unless valid 257 | 258 | validate_identifier(value) 259 | end 260 | 261 | def validate_identifier(value, max_size = 100) 262 | if value.empty? 263 | return false, "Invalid identifier - empty string" 264 | end 265 | if value.bytesize > max_size 266 | return false, "Invalid identifier - too long (#{value.bytesize} bytes)" 267 | end 268 | # cannot include \, /, *, ?, ", <, >, |, ' ' (space char), ',', #, : 269 | if value.match? Regexp.union(INVALID_IDENTIFIER_CHARS) 270 | return false, "Invalid characters detected #{INVALID_IDENTIFIER_CHARS.inspect} are not allowed" 271 | end 272 | return true, value 273 | end 274 | 275 | INVALID_IDENTIFIER_CHARS = [ '\\', '/', '*', '?', '"', '<', '>', '|', ' ', ',', '#', ':' ] 276 | private_constant :INVALID_IDENTIFIER_CHARS 277 | 278 | end 279 | 280 | end 281 | end end end 282 | -------------------------------------------------------------------------------- /lib/logstash/outputs/elasticsearch/default-ilm-policy.json: -------------------------------------------------------------------------------- 1 | { 2 | "policy" : { 3 | "phases": { 4 | "hot" : { 5 | "actions" : { 6 | "rollover" : { 7 | "max_size" : "50gb", 8 | "max_age":"30d" 9 | } 10 | } 11 | } 12 | } 13 | } 14 | } -------------------------------------------------------------------------------- /lib/logstash/outputs/elasticsearch/http_client/manticore_adapter.rb: -------------------------------------------------------------------------------- 1 | require 'manticore' 2 | require 'cgi' 3 | 4 | module LogStash; module Outputs; class ElasticSearch; class HttpClient; 5 | DEFAULT_HEADERS = { "Content-Type" => "application/json", 'x-elastic-product-origin' => 'logstash-output-elasticsearch' } 6 | 7 | class ManticoreAdapter 8 | attr_reader :manticore, :logger 9 | 10 | def initialize(logger, options) 11 | @logger = logger 12 | options = options.dup 13 | options[:ssl] = options[:ssl] || {} 14 | 15 | # We manage our own retries directly, so let's disable them here 16 | options[:automatic_retries] = 0 17 | # We definitely don't need cookies 18 | options[:cookies] = false 19 | 20 | @client_params = {:headers => DEFAULT_HEADERS.merge(options[:headers] || {})} 21 | 22 | if options[:proxy] 23 | options[:proxy] = manticore_proxy_hash(options[:proxy]) 24 | end 25 | 26 | @manticore = ::Manticore::Client.new(options) 27 | end 28 | 29 | # Transform the proxy option to a hash. Manticore's support for non-hash 30 | # proxy options is broken. This was fixed in https://github.com/cheald/manticore/commit/34a00cee57a56148629ed0a47c329181e7319af5 31 | # but this is not yet released 32 | def manticore_proxy_hash(proxy_uri) 33 | [:scheme, :port, :user, :password, :path].reduce(:host => proxy_uri.host) do |acc,opt| 34 | value = proxy_uri.send(opt) 35 | acc[opt] = value unless value.nil? || (value.is_a?(String) && value.empty?) 36 | acc 37 | end 38 | end 39 | 40 | def client 41 | @manticore 42 | end 43 | 44 | # Performs the request by invoking {Transport::Base#perform_request} with a block. 45 | # 46 | # @return [Response] 47 | # @see Transport::Base#perform_request 48 | # 49 | def perform_request(url, method, path, params={}, body=nil) 50 | # Perform 2-level deep merge on the params, so if the passed params and client params will both have hashes stored on a key they 51 | # will be merged as well, instead of choosing just one of the values 52 | params = (params || {}).merge(@client_params) { |key, oldval, newval| 53 | (oldval.is_a?(Hash) && newval.is_a?(Hash)) ? oldval.merge(newval) : newval 54 | } 55 | params[:body] = body if body 56 | 57 | if url.user 58 | params[:auth] = { 59 | :user => CGI.unescape(url.user), 60 | # We have to unescape the password here since manticore won't do it 61 | # for us unless its part of the URL 62 | :password => CGI.unescape(url.password), 63 | :eager => true 64 | } 65 | end 66 | 67 | request_uri = format_url(url, path) 68 | request_uri_as_string = remove_double_escaping(request_uri.to_s) 69 | begin 70 | resp = @manticore.send(method.downcase, request_uri_as_string, params) 71 | # Manticore returns lazy responses by default 72 | # We want to block for our usage, this will wait for the response to finish 73 | resp.call 74 | rescue ::Manticore::ManticoreException => e 75 | log_request_error(e) 76 | raise ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::HostUnreachableError.new(e, request_uri_as_string) 77 | end 78 | 79 | code = resp.code 80 | if code < 200 || code > 299 # assume anything not 2xx is an error that the layer above needs to interpret 81 | raise ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError.new(code, request_uri, body, resp.body) 82 | end 83 | 84 | resp 85 | end 86 | 87 | def log_request_error(e) 88 | details = { message: e.message, exception: e.class } 89 | details[:cause] = e.cause if e.respond_to?(:cause) 90 | details[:backtrace] = e.backtrace if @logger.debug? 91 | 92 | level = case e 93 | when ::Manticore::Timeout 94 | :debug 95 | when ::Manticore::UnknownException 96 | :warn 97 | else 98 | :info 99 | end 100 | 101 | @logger.send level, "Failed to perform request", details 102 | log_java_exception(details[:cause], :debug) if details[:cause] && @logger.debug? 103 | end 104 | 105 | def log_java_exception(e, level = :debug) 106 | return unless e.is_a?(java.lang.Exception) 107 | # @logger.name using the same convention as LS does 108 | logger = self.class.name.gsub('::', '.').downcase 109 | logger = org.apache.logging.log4j.LogManager.getLogger(logger) 110 | logger.send(level, '', e) # logger.error('', e) - prints nested causes 111 | end 112 | 113 | # Returned urls from this method should be checked for double escaping. 114 | def format_url(url, path_and_query=nil) 115 | request_uri = url.clone 116 | 117 | # We excise auth info from the URL in case manticore itself tries to stick 118 | # sensitive data in a thrown exception or log data 119 | request_uri.user = nil 120 | request_uri.password = nil 121 | 122 | return request_uri.to_s if path_and_query.nil? 123 | 124 | parsed_path_and_query = java.net.URI.new(path_and_query) 125 | 126 | new_query_parts = [request_uri.query, parsed_path_and_query.query].select do |part| 127 | part && !part.empty? # Skip empty nil and "" 128 | end 129 | 130 | request_uri.query = new_query_parts.join("&") unless new_query_parts.empty? 131 | 132 | # use `raw_path`` as `path` will unescape any escaped '/' in the path 133 | request_uri.path = "#{request_uri.path}/#{parsed_path_and_query.raw_path}".gsub(/\/{2,}/, "/") 134 | request_uri 135 | end 136 | 137 | # Later versions of SafeURI will also escape the '%' sign in an already escaped URI. 138 | # (If the path variable is used, it constructs a new java.net.URI object using the multi-arg constructor, 139 | # which will escape any '%' characters in the path, as opposed to the single-arg constructor which requires illegal 140 | # characters to be already escaped, and will throw otherwise) 141 | # The URI needs to have been previously escaped, as it does not play nice with an escaped '/' in the 142 | # middle of a URI, as required by date math, treating it as a path separator 143 | def remove_double_escaping(url) 144 | url.gsub(/%25([0-9A-F]{2})/i, '%\1') 145 | end 146 | 147 | def close 148 | @manticore.close 149 | end 150 | 151 | end 152 | end; end; end; end 153 | -------------------------------------------------------------------------------- /lib/logstash/outputs/elasticsearch/http_client_builder.rb: -------------------------------------------------------------------------------- 1 | require 'cgi' 2 | require "base64" 3 | 4 | module LogStash; module Outputs; class ElasticSearch; 5 | module HttpClientBuilder 6 | def self.build(logger, hosts, params) 7 | client_settings = { 8 | :pool_max => params["pool_max"], 9 | :pool_max_per_route => params["pool_max_per_route"], 10 | :check_connection_timeout => params["validate_after_inactivity"], 11 | :compression_level => params["compression_level"], 12 | :headers => params["custom_headers"] || {} 13 | } 14 | 15 | client_settings[:proxy] = params["proxy"] if params["proxy"] 16 | 17 | common_options = { 18 | :license_checker => params["license_checker"], 19 | :client_settings => client_settings, 20 | :metric => params["metric"], 21 | :resurrect_delay => params["resurrect_delay"] 22 | } 23 | 24 | if params["sniffing"] 25 | common_options[:sniffing] = true 26 | common_options[:sniffer_delay] = params["sniffing_delay"] 27 | end 28 | 29 | common_options[:timeout] = params["timeout"] if params["timeout"] 30 | 31 | if params["path"] 32 | client_settings[:path] = dedup_slashes("/#{params["path"]}/") 33 | end 34 | 35 | common_options[:bulk_path] = if params["bulk_path"] 36 | resolve_filter_path(dedup_slashes("/#{params["bulk_path"]}")) 37 | else 38 | resolve_filter_path(dedup_slashes("/#{params["path"]}/_bulk")) 39 | end 40 | 41 | common_options[:sniffing_path] = if params["sniffing_path"] 42 | dedup_slashes("/#{params["sniffing_path"]}") 43 | else 44 | dedup_slashes("/#{params["path"]}/_nodes/http") 45 | end 46 | 47 | common_options[:healthcheck_path] = if params["healthcheck_path"] 48 | dedup_slashes("/#{params["healthcheck_path"]}") 49 | else 50 | dedup_slashes("/#{params["path"]}") 51 | end 52 | 53 | if params["parameters"] 54 | client_settings[:parameters] = params["parameters"] 55 | end 56 | 57 | logger.debug? && logger.debug("Normalizing http path", :path => params["path"], :normalized => client_settings[:path]) 58 | 59 | client_settings.merge! setup_ssl(logger, params) 60 | common_options.merge! setup_basic_auth(logger, params) 61 | client_settings[:headers].merge! setup_api_key(logger, params) 62 | 63 | external_version_types = ["external", "external_gt", "external_gte"] 64 | # External Version validation 65 | raise( 66 | LogStash::ConfigurationError, 67 | "External versioning requires the presence of a version number." 68 | ) if external_version_types.include?(params.fetch('version_type', '')) and params.fetch("version", nil) == nil 69 | 70 | 71 | # Create API setup 72 | raise( 73 | LogStash::ConfigurationError, 74 | "External versioning is not supported by the create action." 75 | ) if params['action'] == 'create' and external_version_types.include?(params.fetch('version_type', '')) 76 | 77 | # Update API setup 78 | raise( LogStash::ConfigurationError, 79 | "doc_as_upsert and scripted_upsert are mutually exclusive." 80 | ) if params["doc_as_upsert"] and params["scripted_upsert"] 81 | 82 | raise( 83 | LogStash::ConfigurationError, 84 | "Specifying action => 'update' needs a document_id." 85 | ) if params['action'] == 'update' and params.fetch('document_id', '') == '' 86 | 87 | raise( 88 | LogStash::ConfigurationError, 89 | "External versioning is not supported by the update action. See https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-update.html." 90 | ) if params['action'] == 'update' and external_version_types.include?(params.fetch('version_type', '')) 91 | 92 | # Update API setup 93 | update_options = { 94 | :doc_as_upsert => params["doc_as_upsert"], 95 | :script_var_name => params["script_var_name"], 96 | :script_type => params["script_type"], 97 | :script_lang => params["script_lang"], 98 | :scripted_upsert => params["scripted_upsert"] 99 | } 100 | common_options.merge! update_options if params["action"] == 'update' 101 | 102 | create_http_client(common_options.merge(:hosts => hosts, :logger => logger)) 103 | end 104 | 105 | def self.create_http_client(options) 106 | LogStash::Outputs::ElasticSearch::HttpClient.new(options) 107 | end 108 | 109 | def self.setup_ssl(logger, params) 110 | params["ssl_enabled"] = true if params["hosts"].any? {|h| h.scheme == "https" } 111 | return {} if params["ssl_enabled"].nil? 112 | 113 | return {:ssl => {:enabled => false}} if params["ssl_enabled"] == false 114 | 115 | ssl_certificate_authorities, ssl_truststore_path, ssl_certificate, ssl_keystore_path = params.values_at('ssl_certificate_authorities', 'ssl_truststore_path', 'ssl_certificate', 'ssl_keystore_path') 116 | 117 | if ssl_certificate_authorities && ssl_truststore_path 118 | raise LogStash::ConfigurationError, 'Use either "ssl_certificate_authorities/cacert" or "ssl_truststore_path/truststore" when configuring the CA certificate' 119 | end 120 | 121 | if ssl_certificate && ssl_keystore_path 122 | raise LogStash::ConfigurationError, 'Use either "ssl_certificate" or "ssl_keystore_path/keystore" when configuring client certificates' 123 | end 124 | 125 | ssl_options = {:enabled => true} 126 | 127 | if ssl_certificate_authorities&.any? 128 | raise LogStash::ConfigurationError, 'Multiple values on "ssl_certificate_authorities" are not supported by this plugin' if ssl_certificate_authorities.size > 1 129 | ssl_options[:ca_file] = ssl_certificate_authorities.first 130 | end 131 | 132 | setup_ssl_store(ssl_options, 'truststore', params) 133 | setup_ssl_store(ssl_options, 'keystore', params) 134 | 135 | ssl_key = params["ssl_key"] 136 | if ssl_certificate 137 | raise LogStash::ConfigurationError, 'Using an "ssl_certificate" requires an "ssl_key"' unless ssl_key 138 | ssl_options[:client_cert] = ssl_certificate 139 | ssl_options[:client_key] = ssl_key 140 | elsif !ssl_key.nil? 141 | raise LogStash::ConfigurationError, 'An "ssl_certificate" is required when using an "ssl_key"' 142 | end 143 | 144 | ssl_verification_mode = params["ssl_verification_mode"] 145 | unless ssl_verification_mode.nil? 146 | case ssl_verification_mode 147 | when 'none' 148 | logger.warn "You have enabled encryption but DISABLED certificate verification, " + 149 | "to make sure your data is secure set `ssl_verification_mode => full`" 150 | ssl_options[:verify] = :disable 151 | else 152 | # Manticore's :default maps to Apache HTTP Client's DefaultHostnameVerifier, 153 | # which is the modern STRICT verifier that replaces the deprecated StrictHostnameVerifier 154 | ssl_options[:verify] = :default 155 | end 156 | end 157 | 158 | ssl_options[:cipher_suites] = params["ssl_cipher_suites"] if params.include?("ssl_cipher_suites") 159 | ssl_options[:trust_strategy] = params["ssl_trust_strategy"] if params.include?("ssl_trust_strategy") 160 | 161 | protocols = params['ssl_supported_protocols'] 162 | ssl_options[:protocols] = protocols if protocols && protocols.any? 163 | 164 | { ssl: ssl_options } 165 | end 166 | 167 | # @param kind is a string [truststore|keystore] 168 | def self.setup_ssl_store(ssl_options, kind, params) 169 | store_path = params["ssl_#{kind}_path"] 170 | if store_path 171 | ssl_options[kind.to_sym] = store_path 172 | ssl_options["#{kind}_type".to_sym] = params["ssl_#{kind}_type"] if params.include?("ssl_#{kind}_type") 173 | ssl_options["#{kind}_password".to_sym] = params["ssl_#{kind}_password"].value if params.include?("ssl_#{kind}_password") 174 | end 175 | end 176 | 177 | def self.setup_basic_auth(logger, params) 178 | user, password = params["user"], params["password"] 179 | 180 | return {} unless user && password && password.value 181 | 182 | { 183 | :user => CGI.escape(user), 184 | :password => CGI.escape(password.value) 185 | } 186 | end 187 | 188 | def self.setup_api_key(logger, params) 189 | api_key = params["api_key"] 190 | 191 | return {} unless (api_key && api_key.value) 192 | 193 | { "Authorization" => "ApiKey " + Base64.strict_encode64(api_key.value) } 194 | end 195 | 196 | private 197 | def self.dedup_slashes(url) 198 | url.gsub(/\/+/, "/") 199 | end 200 | 201 | # Set a `filter_path` query parameter if it is not already set to be 202 | # `filter_path=errors,items.*.error,items.*.status` to reduce the payload between Logstash and Elasticsearch 203 | def self.resolve_filter_path(url) 204 | return url if url.match?(/(?:[&|?])filter_path=/) 205 | ("#{url}#{query_param_separator(url)}filter_path=errors,items.*.error,items.*.status") 206 | end 207 | 208 | def self.query_param_separator(url) 209 | url.match?(/\?[^\s#]+/) ? '&' : '?' 210 | end 211 | end 212 | end; end; end 213 | -------------------------------------------------------------------------------- /lib/logstash/outputs/elasticsearch/ilm.rb: -------------------------------------------------------------------------------- 1 | module LogStash; module Outputs; class ElasticSearch 2 | module Ilm 3 | 4 | ILM_POLICY_PATH = "default-ilm-policy.json" 5 | 6 | def setup_ilm 7 | logger.warn("Overwriting supplied index #{@index} with rollover alias #{@ilm_rollover_alias}") unless default_index?(@index) 8 | @index = @ilm_rollover_alias 9 | maybe_create_rollover_alias 10 | maybe_create_ilm_policy 11 | end 12 | 13 | def ilm_in_use? 14 | return @ilm_actually_enabled if defined?(@ilm_actually_enabled) 15 | @ilm_actually_enabled = 16 | begin 17 | if serverless? 18 | raise LogStash::ConfigurationError, "Invalid ILM configuration `ilm_enabled => true`. " + 19 | "Serverless Elasticsearch cluster does not support Index Lifecycle Management." if @ilm_enabled.to_s == 'true' 20 | @logger.info("ILM auto configuration (`ilm_enabled => auto` or unset) resolved to `false`. "\ 21 | "Serverless Elasticsearch cluster does not support Index Lifecycle Management.") if @ilm_enabled == 'auto' 22 | false 23 | elsif @ilm_enabled == 'auto' 24 | ilm_alias_set? 25 | elsif @ilm_enabled.to_s == 'true' 26 | ilm_alias_set? 27 | else 28 | false 29 | end 30 | end 31 | end 32 | 33 | private 34 | 35 | def ilm_alias_set? 36 | default_index?(@index) || !default_rollover_alias?(@ilm_rollover_alias) 37 | end 38 | 39 | def default_index?(index) 40 | index == @default_index 41 | end 42 | 43 | def default_rollover_alias?(rollover_alias) 44 | rollover_alias == default_ilm_rollover_alias 45 | end 46 | 47 | def ilm_policy_default? 48 | ilm_policy == LogStash::Outputs::ElasticSearch::DEFAULT_POLICY 49 | end 50 | 51 | def maybe_create_ilm_policy 52 | if ilm_policy_default? 53 | client.ilm_policy_put(ilm_policy, policy_payload) unless client.ilm_policy_exists?(ilm_policy) 54 | else 55 | raise LogStash::ConfigurationError, "The specified ILM policy #{ilm_policy} does not exist on your Elasticsearch instance" unless client.ilm_policy_exists?(ilm_policy) 56 | end 57 | end 58 | 59 | def maybe_create_rollover_alias 60 | client.rollover_alias_put(rollover_alias_target, rollover_alias_payload) unless client.rollover_alias_exists?(ilm_rollover_alias) 61 | end 62 | 63 | def rollover_alias_target 64 | "<#{ilm_rollover_alias}-#{ilm_pattern}>" 65 | end 66 | 67 | def rollover_alias_payload 68 | { 69 | 'aliases' => { 70 | ilm_rollover_alias =>{ 71 | 'is_write_index' => true 72 | } 73 | } 74 | } 75 | end 76 | 77 | def policy_payload 78 | policy_path = ::File.expand_path(ILM_POLICY_PATH, ::File.dirname(__FILE__)) 79 | LogStash::Json.load(::IO.read(policy_path)) 80 | end 81 | end 82 | end; end; end 83 | -------------------------------------------------------------------------------- /lib/logstash/outputs/elasticsearch/license_checker.rb: -------------------------------------------------------------------------------- 1 | module LogStash; module Outputs; class ElasticSearch 2 | class LicenseChecker 3 | 4 | def initialize(logger) 5 | @logger = logger 6 | end 7 | 8 | # Figure out if the provided license is appropriate or not 9 | # The appropriate_license? methods is the method called from LogStash::Outputs::ElasticSearch::HttpClient::Pool#healthcheck! 10 | # @param pool 11 | # @param url [LogStash::Util::SafeURI] ES node URL 12 | # @return [Boolean] true if provided license is deemed appropriate 13 | def appropriate_license?(pool, url) 14 | return true if pool.serverless? 15 | 16 | license = extract_license(pool.get_license(url)) 17 | case license_status(license) 18 | when 'active' 19 | true 20 | when nil 21 | warn_no_license(url) 22 | false 23 | else # 'invalid', 'expired' 24 | warn_invalid_license(url, license) 25 | true 26 | end 27 | end 28 | 29 | NO_LICENSE = {}.freeze 30 | private_constant :NO_LICENSE 31 | 32 | def extract_license(license) 33 | license.fetch("license", NO_LICENSE) 34 | end 35 | 36 | def license_status(license) 37 | license.fetch("status", nil) 38 | end 39 | 40 | private 41 | 42 | def warn_no_license(url) 43 | @logger.error("Could not connect to a compatible version of Elasticsearch", url: url.sanitized.to_s) 44 | end 45 | 46 | def warn_invalid_license(url, license) 47 | @logger.warn("Elasticsearch license is not active, please check Elasticsearch’s licensing information", 48 | url: url.sanitized.to_s, license: license) 49 | end 50 | 51 | end 52 | end; end; end 53 | -------------------------------------------------------------------------------- /lib/logstash/outputs/elasticsearch/template_manager.rb: -------------------------------------------------------------------------------- 1 | module LogStash; module Outputs; class ElasticSearch 2 | class TemplateManager 3 | LEGACY_TEMPLATE_ENDPOINT = '_template'.freeze 4 | INDEX_TEMPLATE_ENDPOINT = '_index_template'.freeze 5 | 6 | # To be mixed into the elasticsearch plugin base 7 | def self.install_template(plugin) 8 | return unless plugin.manage_template 9 | 10 | if plugin.maximum_seen_major_version < 8 && plugin.template_api == 'auto' 11 | plugin.logger.warn("`template_api => auto` resolved to `legacy` since we are connected to " + "Elasticsearch #{plugin.maximum_seen_major_version}, " + 12 | "but will resolve to `composable` the first time it connects to Elasticsearch 8+. " + 13 | "We recommend either setting `template_api => legacy` to continue providing legacy-style templates, " + 14 | "or migrating your template to the composable style and setting `template_api => composable`. " + 15 | "The legacy template API is slated for removal in Elasticsearch 9.") 16 | elsif plugin.template_api == 'legacy' && plugin.serverless? 17 | raise LogStash::ConfigurationError, "Invalid template configuration `template_api => legacy`. Serverless Elasticsearch does not support legacy template API." 18 | end 19 | 20 | 21 | if plugin.template 22 | plugin.logger.info("Using mapping template from", :path => plugin.template) 23 | template = read_template_file(plugin.template) 24 | else 25 | plugin.logger.info("Using a default mapping template", :es_version => plugin.maximum_seen_major_version, 26 | :ecs_compatibility => plugin.ecs_compatibility) 27 | template = load_default_template(plugin.maximum_seen_major_version, plugin.ecs_compatibility) 28 | end 29 | 30 | add_ilm_settings_to_template(plugin, template) if plugin.ilm_in_use? 31 | plugin.logger.debug("Attempting to install template", template: template) 32 | install(plugin.client, template_endpoint(plugin), template_name(plugin), template, plugin.template_overwrite) 33 | end 34 | 35 | private 36 | def self.load_default_template(es_major_version, ecs_compatibility) 37 | template_path = default_template_path(es_major_version, ecs_compatibility) 38 | read_template_file(template_path) 39 | rescue => e 40 | raise LogStash::ConfigurationError, "Failed to load default template for Elasticsearch v#{es_major_version} with ECS #{ecs_compatibility}; caused by: #{e.inspect}" 41 | end 42 | 43 | def self.install(client, template_endpoint, template_name, template, template_overwrite) 44 | client.template_install(template_endpoint, template_name, template, template_overwrite) 45 | end 46 | 47 | def self.add_ilm_settings_to_template(plugin, template) 48 | # Overwrite any index patterns, and use the rollover alias. Use 'index_patterns' rather than 'template' for pattern 49 | # definition - remove any existing definition of 'template' 50 | template.delete('template') if template.include?('template') if plugin.maximum_seen_major_version == 7 51 | template['index_patterns'] = "#{plugin.ilm_rollover_alias}-*" 52 | settings = resolve_template_settings(plugin, template) 53 | if settings && (settings['index.lifecycle.name'] || settings['index.lifecycle.rollover_alias']) 54 | plugin.logger.info("Overwriting index lifecycle name and rollover alias as ILM is enabled") 55 | end 56 | settings.update({ 'index.lifecycle.name' => plugin.ilm_policy, 'index.lifecycle.rollover_alias' => plugin.ilm_rollover_alias}) 57 | end 58 | 59 | def self.resolve_template_settings(plugin, template) 60 | if template.key?('template') 61 | plugin.logger.trace("Resolving ILM template settings: under 'template' key", :template => template, :template_api => plugin.template_api, :es_version => plugin.maximum_seen_major_version) 62 | composable_index_template_settings(template) 63 | elsif template.key?('settings') 64 | plugin.logger.trace("Resolving ILM template settings: under 'settings' key", :template => template, :template_api => plugin.template_api, :es_version => plugin.maximum_seen_major_version) 65 | legacy_index_template_settings(template) 66 | else 67 | use_index_template_api = index_template_api?(plugin) 68 | plugin.logger.trace("Resolving ILM template settings: template doesn't have 'settings' or 'template' fields, falling back to auto detection", :template => template, :template_api => plugin.template_api, :es_version => plugin.maximum_seen_major_version, :index_template_api => use_index_template_api) 69 | if use_index_template_api 70 | composable_index_template_settings(template) 71 | else 72 | legacy_index_template_settings(template) 73 | end 74 | end 75 | end 76 | 77 | # Sets ['settings'] field to be compatible with _template API structure 78 | def self.legacy_index_template_settings(template) 79 | template['settings'] ||= {} 80 | end 81 | 82 | # Sets the ['template']['settings'] fields if not exist to be compatible with _index_template API structure 83 | def self.composable_index_template_settings(template) 84 | template['template'] ||= {} 85 | template['template']['settings'] ||= {} 86 | end 87 | 88 | # Template name - if template_name set, use it 89 | # if not and ILM is enabled, use the rollover alias 90 | # else use the default value of template_name 91 | def self.template_name(plugin) 92 | plugin.ilm_in_use? && !plugin.original_params.key?('template_name') ? plugin.ilm_rollover_alias : plugin.template_name 93 | end 94 | 95 | def self.default_template_path(es_major_version, ecs_compatibility=:disabled) 96 | template_version = es_major_version 97 | default_template_name = "templates/ecs-#{ecs_compatibility}/elasticsearch-#{template_version}x.json" 98 | ::File.expand_path(default_template_name, ::File.dirname(__FILE__)) 99 | end 100 | 101 | def self.read_template_file(template_path) 102 | raise LogStash::ConfigurationError, "Template file '#{template_path}' could not be found" unless ::File.exists?(template_path) 103 | template_data = ::IO.read(template_path) 104 | LogStash::Json.load(template_data) 105 | rescue => e 106 | raise LogStash::ConfigurationError, "Failed to load template file '#{template_path}': #{e.message}" 107 | end 108 | 109 | def self.template_endpoint(plugin) 110 | index_template_api?(plugin) ? INDEX_TEMPLATE_ENDPOINT : LEGACY_TEMPLATE_ENDPOINT 111 | end 112 | 113 | def self.index_template_api?(plugin) 114 | case plugin.serverless? 115 | when true 116 | true 117 | else 118 | case plugin.template_api 119 | when 'auto' 120 | plugin.maximum_seen_major_version >= 8 121 | when 'composable' 122 | true 123 | when 'legacy' 124 | false 125 | else 126 | plugin.logger.warn("Invalid template_api value #{plugin.template_api}") 127 | true 128 | end 129 | end 130 | end 131 | 132 | end 133 | end end end 134 | -------------------------------------------------------------------------------- /lib/logstash/outputs/elasticsearch/templates/ecs-disabled/elasticsearch-7x.json: -------------------------------------------------------------------------------- 1 | { 2 | "index_patterns" : "logstash-*", 3 | "version" : 60001, 4 | "settings" : { 5 | "index.refresh_interval" : "5s", 6 | "number_of_shards": 1 7 | }, 8 | "mappings" : { 9 | "dynamic_templates" : [ { 10 | "message_field" : { 11 | "path_match" : "message", 12 | "match_mapping_type" : "string", 13 | "mapping" : { 14 | "type" : "text", 15 | "norms" : false 16 | } 17 | } 18 | }, { 19 | "string_fields" : { 20 | "match" : "*", 21 | "match_mapping_type" : "string", 22 | "mapping" : { 23 | "type" : "text", "norms" : false, 24 | "fields" : { 25 | "keyword" : { "type": "keyword", "ignore_above": 256 } 26 | } 27 | } 28 | } 29 | } ], 30 | "properties" : { 31 | "@timestamp": { "type": "date"}, 32 | "@version": { "type": "keyword"}, 33 | "geoip" : { 34 | "dynamic": true, 35 | "properties" : { 36 | "ip": { "type": "ip" }, 37 | "location" : { "type" : "geo_point" }, 38 | "latitude" : { "type" : "half_float" }, 39 | "longitude" : { "type" : "half_float" } 40 | } 41 | } 42 | } 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /lib/logstash/outputs/elasticsearch/templates/ecs-disabled/elasticsearch-8x.json: -------------------------------------------------------------------------------- 1 | { 2 | "index_patterns" : "logstash-*", 3 | "version" : 80001, 4 | "template" : { 5 | "settings" : { 6 | "index.refresh_interval" : "5s", 7 | "number_of_shards": 1 8 | }, 9 | "mappings" : { 10 | "dynamic_templates" : [ { 11 | "message_field" : { 12 | "path_match" : "message", 13 | "match_mapping_type" : "string", 14 | "mapping" : { 15 | "type" : "text", 16 | "norms" : false 17 | } 18 | } 19 | }, { 20 | "string_fields" : { 21 | "match" : "*", 22 | "match_mapping_type" : "string", 23 | "mapping" : { 24 | "type" : "text", "norms" : false, 25 | "fields" : { 26 | "keyword" : { "type": "keyword", "ignore_above": 256 } 27 | } 28 | } 29 | } 30 | } ], 31 | "properties" : { 32 | "@timestamp": { "type": "date" }, 33 | "@version": { "type": "keyword" }, 34 | "geoip" : { 35 | "dynamic": true, 36 | "properties" : { 37 | "ip": { "type": "ip" }, 38 | "location" : { "type" : "geo_point" }, 39 | "latitude" : { "type" : "half_float" }, 40 | "longitude" : { "type" : "half_float" } 41 | } 42 | } 43 | } 44 | } 45 | }, 46 | "priority": 200, 47 | "_meta" : { 48 | "description": "index template for logstash-output-elasticsearch" 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /lib/logstash/outputs/elasticsearch/templates/ecs-disabled/elasticsearch-9x.json: -------------------------------------------------------------------------------- 1 | { 2 | "index_patterns" : "logstash-*", 3 | "version" : 80001, 4 | "template" : { 5 | "settings" : { 6 | "index.refresh_interval" : "5s", 7 | "number_of_shards": 1 8 | }, 9 | "mappings" : { 10 | "dynamic_templates" : [ { 11 | "message_field" : { 12 | "path_match" : "message", 13 | "match_mapping_type" : "string", 14 | "mapping" : { 15 | "type" : "text", 16 | "norms" : false 17 | } 18 | } 19 | }, { 20 | "string_fields" : { 21 | "match" : "*", 22 | "match_mapping_type" : "string", 23 | "mapping" : { 24 | "type" : "text", "norms" : false, 25 | "fields" : { 26 | "keyword" : { "type": "keyword", "ignore_above": 256 } 27 | } 28 | } 29 | } 30 | } ], 31 | "properties" : { 32 | "@timestamp": { "type": "date" }, 33 | "@version": { "type": "keyword" }, 34 | "geoip" : { 35 | "dynamic": true, 36 | "properties" : { 37 | "ip": { "type": "ip" }, 38 | "location" : { "type" : "geo_point" }, 39 | "latitude" : { "type" : "half_float" }, 40 | "longitude" : { "type" : "half_float" } 41 | } 42 | } 43 | } 44 | } 45 | }, 46 | "priority": 200, 47 | "_meta" : { 48 | "description": "index template for logstash-output-elasticsearch" 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /lib/logstash/plugin_mixins/elasticsearch/noop_license_checker.rb: -------------------------------------------------------------------------------- 1 | module LogStash; module PluginMixins; module ElasticSearch 2 | class NoopLicenseChecker 3 | INSTANCE = self.new 4 | 5 | def appropriate_license?(pool, url) 6 | true 7 | end 8 | end 9 | end; end; end 10 | -------------------------------------------------------------------------------- /logstash-output-elasticsearch.gemspec: -------------------------------------------------------------------------------- 1 | Gem::Specification.new do |s| 2 | s.name = 'logstash-output-elasticsearch' 3 | s.version = '12.0.3' 4 | s.licenses = ['apache-2.0'] 5 | s.summary = "Stores logs in Elasticsearch" 6 | s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program" 7 | s.authors = ["Elastic"] 8 | s.email = 'info@elastic.co' 9 | s.homepage = "https://www.elastic.co/guide/en/logstash/current/index.html" 10 | s.require_paths = ["lib"] 11 | 12 | s.platform = RUBY_PLATFORM 13 | 14 | # Files 15 | s.files = Dir["lib/**/*","spec/**/*","*.gemspec","*.md","CONTRIBUTORS","Gemfile","LICENSE","NOTICE.TXT", "vendor/jar-dependencies/**/*.jar", "vendor/jar-dependencies/**/*.rb", "VERSION", "docs/**/*"] 16 | 17 | # Tests 18 | s.test_files = s.files.grep(%r{^(test|spec|features)/}) 19 | 20 | # Special flag to let us know this is actually a logstash plugin 21 | s.metadata = { "logstash_plugin" => "true", "logstash_group" => "output" } 22 | 23 | s.add_runtime_dependency "manticore", '>= 0.8.0', '< 1.0.0' 24 | s.add_runtime_dependency 'stud', ['>= 0.0.17', '~> 0.0'] 25 | s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99" 26 | s.add_runtime_dependency 'logstash-mixin-ecs_compatibility_support', '~>1.0' 27 | s.add_runtime_dependency 'logstash-mixin-deprecation_logger_support', '~>1.0' 28 | s.add_runtime_dependency 'logstash-mixin-ca_trusted_fingerprint_support', '~>1.0' 29 | s.add_runtime_dependency 'logstash-mixin-normalize_config_support', '~>1.0' 30 | 31 | s.add_development_dependency 'logstash-codec-plain' 32 | s.add_development_dependency 'logstash-devutils' 33 | s.add_development_dependency 'flores' 34 | s.add_development_dependency 'cabin', ['~> 0.6'] 35 | s.add_development_dependency 'webrick' 36 | s.add_development_dependency 'webmock' 37 | s.add_development_dependency 'rspec-collection_matchers' 38 | # Still used in some specs, we should remove this ASAP 39 | s.add_development_dependency 'elasticsearch' 40 | end 41 | -------------------------------------------------------------------------------- /spec/es_spec_helper.rb: -------------------------------------------------------------------------------- 1 | require_relative './spec_helper' 2 | 3 | require 'elasticsearch' 4 | 5 | require 'json' 6 | require 'cabin' 7 | 8 | # remove this condition and support package once plugin starts consuming elasticsearch-ruby v8 client 9 | # in elasticsearch-ruby v7, ILM APIs were in a separate xpack gem, now directly available 10 | unless elastic_ruby_v8_client_available? 11 | require_relative "support/elasticsearch/api/actions/delete_ilm_policy" 12 | require_relative "support/elasticsearch/api/actions/get_ilm_policy" 13 | require_relative "support/elasticsearch/api/actions/put_ilm_policy" 14 | end 15 | 16 | module ESHelper 17 | def get_host_port 18 | if ENV["INTEGRATION"] == "true" 19 | "elasticsearch:9200" 20 | else 21 | "localhost:9200" 22 | end 23 | end 24 | 25 | def get_client 26 | if elastic_ruby_v8_client_available? 27 | Elasticsearch::Client.new(:hosts => [get_host_port]) 28 | else 29 | Elasticsearch::Client.new(:hosts => [get_host_port]).tap do |client| 30 | allow(client).to receive(:verify_elasticsearch).and_return(true) # bypass client side version checking 31 | end 32 | end 33 | end 34 | 35 | def doc_type 36 | if ESHelper.es_version_satisfies?(">=8") 37 | nil 38 | elsif ESHelper.es_version_satisfies?(">=7") 39 | "_doc" 40 | end 41 | end 42 | 43 | def self.action_for_version(action) 44 | action_params = action[1] 45 | if ESHelper.es_version_satisfies?(">=8") 46 | action_params.delete(:_type) 47 | end 48 | action[1] = action_params 49 | action 50 | end 51 | 52 | def todays_date 53 | Time.now.strftime("%Y.%m.%d") 54 | end 55 | 56 | def field_properties_from_template(template_name, field) 57 | template = get_template(@es, template_name) 58 | mappings = get_template_mappings(template) 59 | mappings["properties"][field]["properties"] 60 | end 61 | 62 | def routing_field_name 63 | :routing 64 | end 65 | 66 | def self.es_version 67 | { 68 | "number" => [ 69 | nilify(RSpec.configuration.filter[:es_version]), 70 | nilify(ENV['ES_VERSION']), 71 | nilify(ENV['ELASTIC_STACK_VERSION']), 72 | ].compact.first, 73 | "build_flavor" => 'default' 74 | } 75 | end 76 | 77 | RSpec::Matchers.define :have_hits do |expected| 78 | hits_count_path = %w(hits total value) 79 | 80 | match do |actual| 81 | @actual_hits_count = actual&.dig(*hits_count_path) 82 | values_match? expected, @actual_hits_count 83 | end 84 | failure_message do |actual| 85 | "expected that #{actual} with #{@actual_hits_count || "UNKNOWN" } hits would have #{expected} hits" 86 | end 87 | end 88 | 89 | RSpec::Matchers.define :have_index_pattern do |expected| 90 | match do |actual| 91 | @actual_index_pattterns = Array(actual['index_patterns'].nil? ? actual['template'] : actual['index_patterns']) 92 | @actual_index_pattterns.any? { |v| values_match? expected, v } 93 | end 94 | failure_message do |actual| 95 | "expected that #{actual} with index patterns #{@actual_index_pattterns} would have included `#{expected}`" 96 | end 97 | end 98 | 99 | def self.es_version_satisfies?(*requirement) 100 | es_version = nilify(RSpec.configuration.filter[:es_version]) || nilify(ENV['ES_VERSION']) || nilify(ENV['ELASTIC_STACK_VERSION']) 101 | if es_version.nil? 102 | puts "Info: ES_VERSION, ELASTIC_STACK_VERSION or 'es_version' tag wasn't set. Returning false to all `es_version_satisfies?` call." 103 | return false 104 | end 105 | es_release_version = Gem::Version.new(es_version).release 106 | Gem::Requirement.new(requirement).satisfied_by?(es_release_version) 107 | end 108 | 109 | private 110 | def self.nilify(str) 111 | if str.nil? 112 | return str 113 | end 114 | str.empty? ? nil : str 115 | end 116 | 117 | public 118 | def clean(client) 119 | client.indices.delete_template(:name => "*") 120 | client.indices.delete_index_template(:name => "logstash*") rescue nil 121 | # This can fail if there are no indexes, ignore failure. 122 | client.indices.delete(:index => "*") rescue nil 123 | clean_ilm(client) if supports_ilm?(client) 124 | end 125 | 126 | def set_cluster_settings(client, cluster_settings) 127 | client.cluster.put_settings(body: cluster_settings) 128 | get_cluster_settings(client) 129 | end 130 | 131 | def get_cluster_settings(client) 132 | client.cluster.get_settings 133 | end 134 | 135 | def get_policy(client, policy_name) 136 | if elastic_ruby_v8_client_available? 137 | client.index_lifecycle_management.get_lifecycle(policy: policy_name) 138 | else 139 | client.get_ilm_policy(name: policy_name) 140 | end 141 | end 142 | 143 | def put_policy(client, policy_name, policy) 144 | if elastic_ruby_v8_client_available? 145 | client.index_lifecycle_management.put_lifecycle({:policy => policy_name, :body=> policy}) 146 | else 147 | client.put_ilm_policy({:name => policy_name, :body=> policy}) 148 | end 149 | end 150 | 151 | def clean_ilm(client) 152 | if elastic_ruby_v8_client_available? 153 | client.index_lifecycle_management.get_lifecycle.each_key { |key| client.index_lifecycle_management.delete_lifecycle(policy: key) if key =~ /logstash-policy/ } 154 | else 155 | client.get_ilm_policy.each_key { |key| client.delete_ilm_policy(name: key) if key =~ /logstash-policy/ } 156 | end 157 | end 158 | 159 | def supports_ilm?(client) 160 | begin 161 | if elastic_ruby_v8_client_available? 162 | client.index_lifecycle_management.get_lifecycle 163 | else 164 | client.get_ilm_policy 165 | end 166 | true 167 | rescue 168 | false 169 | end 170 | end 171 | 172 | def max_docs_policy(max_docs) 173 | { 174 | "policy" => { 175 | "phases"=> { 176 | "hot" => { 177 | "actions" => { 178 | "rollover" => { 179 | "max_docs" => max_docs 180 | } 181 | } 182 | } 183 | } 184 | } 185 | } 186 | end 187 | 188 | def max_age_policy(max_age) 189 | { 190 | "policy" => { 191 | "phases"=> { 192 | "hot" => { 193 | "actions" => { 194 | "rollover" => { 195 | "max_age" => max_age 196 | } 197 | } 198 | } 199 | } 200 | } 201 | } 202 | end 203 | 204 | def get_template(client, name) 205 | if ESHelper.es_version_satisfies?(">=8") 206 | t = client.indices.get_index_template(name: name) 207 | t['index_templates'][0]['index_template'] 208 | else 209 | t = client.indices.get_template(name: name) 210 | t[name] 211 | end 212 | end 213 | 214 | def get_template_settings(template) 215 | if ESHelper.es_version_satisfies?(">=8") 216 | template['template']['settings'] 217 | else 218 | template['settings'] 219 | end 220 | end 221 | 222 | def get_template_mappings(template) 223 | if ESHelper.es_version_satisfies?(">=8") 224 | template['template']['mappings'] 225 | elsif ESHelper.es_version_satisfies?(">=7") 226 | template['mappings'] 227 | end 228 | end 229 | end 230 | 231 | RSpec.configure do |config| 232 | config.include ESHelper 233 | end 234 | -------------------------------------------------------------------------------- /spec/fixtures/_nodes/7x.json: -------------------------------------------------------------------------------- 1 | { 2 | "_nodes" : { 3 | "total" : 3, 4 | "successful" : 3, 5 | "failed" : 0 6 | }, 7 | "cluster_name" : "elasticsearch", 8 | "nodes" : { 9 | "kVPTh7ZvSgWmTRMy-4YExQ" : { 10 | "name" : "kVPTh7Z", 11 | "transport_address" : "127.0.0.1:9300", 12 | "host" : "dev-master", 13 | "ip" : "127.0.0.1", 14 | "version" : "7.0.0", 15 | "build_flavor" : "default", 16 | "build_type" : "tar", 17 | "build_hash" : "b0e7036", 18 | "roles" : [ 19 | "master" 20 | ], 21 | "attributes" : { 22 | "ml.machine_memory" : "17179869184", 23 | "xpack.installed" : "true", 24 | "ml.max_open_jobs" : "20", 25 | "ml.enabled" : "true" 26 | }, 27 | "http" : { 28 | "bound_address" : [ 29 | "127.0.0.1:9200", 30 | "[::1]:9200" 31 | ], 32 | "publish_address" : "dev-master/127.0.0.1:9200", 33 | "max_content_length_in_bytes" : 104857600 34 | } 35 | }, 36 | "J47OFlfpSHGFwRJSF2hbcg" : { 37 | "name" : "J47OFlf", 38 | "transport_address" : "127.0.0.1:9301", 39 | "host" : "dev-masterdata", 40 | "ip" : "127.0.0.1", 41 | "version" : "7.0.0", 42 | "build_flavor" : "default", 43 | "build_type" : "tar", 44 | "build_hash" : "b0e7036", 45 | "roles" : [ 46 | "master", 47 | "data" 48 | ], 49 | "attributes" : { 50 | "ml.machine_memory" : "17179869184", 51 | "ml.max_open_jobs" : "20", 52 | "xpack.installed" : "true", 53 | "ml.enabled" : "true" 54 | }, 55 | "http" : { 56 | "bound_address" : [ 57 | "127.0.0.1:9201", 58 | "[::1]:9201" 59 | ], 60 | "publish_address" : "dev-masterdata/127.0.0.1:9201", 61 | "max_content_length_in_bytes" : 104857600 62 | } 63 | }, 64 | "pDYE99f0QmutVb8gvsf-yw" : { 65 | "name" : "pDYE99f", 66 | "transport_address" : "127.0.0.1:9302", 67 | "host" : "dev-data", 68 | "ip" : "127.0.0.1", 69 | "version" : "7.0.0", 70 | "build_flavor" : "default", 71 | "build_type" : "tar", 72 | "build_hash" : "b0e7036", 73 | "roles" : [ 74 | "data" 75 | ], 76 | "attributes" : { 77 | "ml.machine_memory" : "17179869184", 78 | "ml.max_open_jobs" : "20", 79 | "xpack.installed" : "true", 80 | "ml.enabled" : "true" 81 | }, 82 | "http" : { 83 | "bound_address" : [ 84 | "127.0.0.1:9202", 85 | "[::1]:9202" 86 | ], 87 | "publish_address" : "dev-data/127.0.0.1:9202", 88 | "max_content_length_in_bytes" : 104857600 89 | } 90 | } 91 | } 92 | } -------------------------------------------------------------------------------- /spec/fixtures/htpasswd: -------------------------------------------------------------------------------- 1 | fancyuser:$apr1$Eq3/Qh40$MRzg6mccKUVmx8HJvlqkK1 2 | simpleuser:$apr1$hQQ4QWmo$ECyA1DFO3iCRs07zVXqAq1 3 | -------------------------------------------------------------------------------- /spec/fixtures/license_check/active.json: -------------------------------------------------------------------------------- 1 | { 2 | "license": { 3 | "status": "active", 4 | "uid": "d85d2c6a-b96d-3cc6-96db-5571a789b156", 5 | "type": "enterprise", 6 | "issue_date": "1970-01-01T00:00:00.000Z", 7 | "issue_date_in_millis": 0, 8 | "expiry_date": "2100-01-01T00:00:00.000Z", 9 | "expiry_date_in_millis": 4102444800000, 10 | "max_nodes": null, 11 | "max_resource_units": 100000, 12 | "issued_to": "Elastic Cloud", 13 | "issuer": "API", 14 | "start_date_in_millis": 0 15 | } 16 | } -------------------------------------------------------------------------------- /spec/fixtures/license_check/inactive.json: -------------------------------------------------------------------------------- 1 | { 2 | "license": { 3 | "status": "inactive" 4 | } 5 | } -------------------------------------------------------------------------------- /spec/fixtures/nginx_reverse_proxy.conf: -------------------------------------------------------------------------------- 1 | worker_processes 1; 2 | daemon off; # run in foreground 3 | 4 | events { 5 | worker_connections 1024; 6 | } 7 | 8 | http { 9 | server { 10 | listen 9900 default_server; 11 | ssl on; 12 | ssl_certificate server.crt; 13 | ssl_certificate_key server.key; 14 | client_max_body_size 200m; 15 | 16 | location / { 17 | proxy_pass http://localhost:9200; 18 | auth_basic "Restricted Content"; 19 | auth_basic_user_file htpasswd; 20 | } 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /spec/fixtures/scripts/painless/scripted_update.painless: -------------------------------------------------------------------------------- 1 | ctx._source.counter += params.event.count 2 | 3 | -------------------------------------------------------------------------------- /spec/fixtures/scripts/painless/scripted_update_nested.painless: -------------------------------------------------------------------------------- 1 | ctx._source.counter += params.event.data.count 2 | -------------------------------------------------------------------------------- /spec/fixtures/scripts/painless/scripted_upsert.painless: -------------------------------------------------------------------------------- 1 | ctx._source.counter = params.event.counter 2 | -------------------------------------------------------------------------------- /spec/fixtures/template-with-policy-es7x.json: -------------------------------------------------------------------------------- 1 | { 2 | "index_patterns" : "overwrite-*", 3 | "version" : 60001, 4 | "settings" : { 5 | "index.refresh_interval" : "1s", 6 | "number_of_shards": 1 7 | }, 8 | "mappings" : { 9 | "dynamic_templates" : [ { 10 | "message_field" : { 11 | "path_match" : "message", 12 | "match_mapping_type" : "string", 13 | "mapping" : { 14 | "type" : "text", 15 | "norms" : false 16 | } 17 | } 18 | }, { 19 | "string_fields" : { 20 | "match" : "*", 21 | "match_mapping_type" : "string", 22 | "mapping" : { 23 | "type" : "text", "norms" : false, 24 | "fields" : { 25 | "keyword" : { "type": "keyword", "ignore_above": 256 } 26 | } 27 | } 28 | } 29 | } ], 30 | "properties" : { 31 | "@timestamp": { "type": "date"}, 32 | "@version": { "type": "keyword"}, 33 | "geoip" : { 34 | "dynamic": true, 35 | "properties" : { 36 | "ip": { "type": "ip" }, 37 | "location" : { "type" : "geo_point" }, 38 | "latitude" : { "type" : "half_float" }, 39 | "longitude" : { "type" : "half_float" } 40 | } 41 | } 42 | } 43 | } 44 | 45 | } 46 | -------------------------------------------------------------------------------- /spec/fixtures/template-with-policy-es8x.json: -------------------------------------------------------------------------------- 1 | { 2 | "index_patterns" : "overwrite-*", 3 | "version" : 80001, 4 | "template" : { 5 | "settings" : { 6 | "index.refresh_interval" : "1s", 7 | "number_of_shards": 1 8 | }, 9 | "mappings" : { 10 | "dynamic_templates" : [ { 11 | "message_field" : { 12 | "path_match" : "message", 13 | "match_mapping_type" : "string", 14 | "mapping" : { 15 | "type" : "text", 16 | "norms" : false 17 | } 18 | } 19 | }, { 20 | "string_fields" : { 21 | "match" : "*", 22 | "match_mapping_type" : "string", 23 | "mapping" : { 24 | "type" : "text", "norms" : false, 25 | "fields" : { 26 | "keyword" : { "type": "keyword", "ignore_above": 256 } 27 | } 28 | } 29 | } 30 | } ], 31 | "properties" : { 32 | "@timestamp": { "type": "date" }, 33 | "@version": { "type": "keyword" }, 34 | "geoip" : { 35 | "dynamic": true, 36 | "properties" : { 37 | "ip": { "type": "ip" }, 38 | "location" : { "type" : "geo_point" }, 39 | "latitude" : { "type" : "half_float" }, 40 | "longitude" : { "type" : "half_float" } 41 | } 42 | } 43 | } 44 | } 45 | }, 46 | "priority": 200, 47 | "_meta" : { 48 | "description": "index template for logstash-output-elasticsearch" 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /spec/fixtures/test_certs/GENERATED_AT: -------------------------------------------------------------------------------- 1 | 2024-06-25T21:50:58+01:00 2 | -------------------------------------------------------------------------------- /spec/fixtures/test_certs/ca.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIFDDCCAvQCAQEwDQYJKoZIhvcNAQELBQAwTDELMAkGA1UEBhMCUFQxCzAJBgNV 3 | BAgMAk5BMQ8wDQYDVQQHDAZMaXNib24xDjAMBgNVBAoMBU15TGFiMQ8wDQYDVQQD 4 | DAZSb290Q0EwHhcNMjQwNjI1MjA1MDU4WhcNMjUwNjI1MjA1MDU4WjBMMQswCQYD 5 | VQQGEwJQVDELMAkGA1UECAwCTkExDzANBgNVBAcMBkxpc2JvbjEOMAwGA1UECgwF 6 | TXlMYWIxDzANBgNVBAMMBlJvb3RDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCC 7 | AgoCggIBAMtTMqAWuH17b9XqPa5L3HNqgnZ958+gvcOt7Q/sOEvcDQJgkzZ+Gywh 8 | 5er5JF2iomYOHiD5JncYr4YmRQKuYfD6B1WI5FuQthD/OlA1/RHqtbY27J33SaO6 9 | 6ro8gal7vjHrXKQkefVYRwdfO6DqqbhV6L4sMiy8FzQ55TMpoM35cWuvoAMxvSQq 10 | GZ4pYYKnfNSGhzHvssfNS1xu/Lwb7Vju4jPhp+43BkGwEimI5km7jNC1nwjiHtxD 11 | sY/s93AKa/vLktXKUK5nA3jjJOhAbRTVnbOAgxFt0YbX98xW/aUqscgBUVs9J/My 12 | TRMwVKJ7Vsmth1PdJQksUASuzESlSPl09dMjTQ+MXzJDt0JvX8SIJPmbBng78MSa 13 | CUhpOZiii1l2mBfPWejx20I/SMCUNmzbwm2w9JD50Jv2iX4l4ge4H1CIK1/orW1p 14 | dY9xPL0uKYm6ADsDC0B8sGgNMBXeB6aLojY1/ITwmmfpfk9c/yWPfC7stHgCYRAv 15 | 5MfGAsmv0/ya5VrWQGBJkFiYy1pon6nxUjCbgn0RABojRoGdhhY3QDipgwmSgFZx 16 | r064RFr1bt/Ml3MJmPf535mSwPdk/j/zw4IZTvlmwKW3FyMDhwYL/zX7J0c6MzMP 17 | LEdi73Qjzmr3ENIrir4O86wNz81YRfYkg9ZX8yKJK9LBAUrYCjJ3AgMBAAEwDQYJ 18 | KoZIhvcNAQELBQADggIBABym9LMyS9W9lvpcH4OK1YLfBPJwrhZ+4keiriY4zWOo 19 | pB+v2Q35neMMXSlTDpeIwPdMkqsh8VZprOWURF80JGvpJ6fBfi05rCDWp/ol1ZKi 20 | snCA+dE2zDK7Z3+F0MbakT5oBi5WgkXSvRvlJEJ/gBD7WC1wq0kxCMK+M5w2RPAT 21 | nnV/iozNBkwExxyJA7BpS6F/v0XjwK7fm5Kpql7zKlh5piZ2IVU0B60Sqskcb2mU 22 | 90+1r9T06ekIW/Iz1jd5RWYziu0nbmDeKeKvGAICNU+evYXW+/5kKecMLuEvDCgS 23 | ssbt/Hb510uLHhxfhN4SbvBl2zADsLC+2arf2ATIwD8ZXDDs04ayBsejV0ZwVrTZ 24 | ExKqAys+B3tuIHGRqL5VukdmH6g6oJziYueohPBCOuSOzDd0FhppF4uXZS8DReSg 25 | KieO2ZYfiA1gVRiY6jPx+r7J9I5kSS1gwr/e3zHJHa79ijMB1SSIswQUmgSMkwGh 26 | sNyDNI9ZxgJan3v7kVargMt2LiNcXvVyTzPSYSXcY7SoebfpMprVIG7vZ9TZf+Uu 27 | FQeOfxdLFuGTnpFrYmvOD3OIKfODlY5t+TNICg7A3eTUXeJPcdBBnuVCiQU6TCB5 28 | H+69K5w54Q6a70sHZU1IWsGT8XtbUizPNQky+LAFsE/5oUnCqtypeEu4srcZK53x 29 | -----END CERTIFICATE----- 30 | -------------------------------------------------------------------------------- /spec/fixtures/test_certs/ca.der.sha256: -------------------------------------------------------------------------------- 1 | 8b23238088af65cbae6ee9c23821068d896ec1dad081e2a1035ff70866943247 2 | -------------------------------------------------------------------------------- /spec/fixtures/test_certs/ca.key: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIIJKAIBAAKCAgEAy1MyoBa4fXtv1eo9rkvcc2qCdn3nz6C9w63tD+w4S9wNAmCT 3 | Nn4bLCHl6vkkXaKiZg4eIPkmdxivhiZFAq5h8PoHVYjkW5C2EP86UDX9Eeq1tjbs 4 | nfdJo7rqujyBqXu+MetcpCR59VhHB187oOqpuFXoviwyLLwXNDnlMymgzflxa6+g 5 | AzG9JCoZnilhgqd81IaHMe+yx81LXG78vBvtWO7iM+Gn7jcGQbASKYjmSbuM0LWf 6 | COIe3EOxj+z3cApr+8uS1cpQrmcDeOMk6EBtFNWds4CDEW3Rhtf3zFb9pSqxyAFR 7 | Wz0n8zJNEzBUontWya2HU90lCSxQBK7MRKVI+XT10yNND4xfMkO3Qm9fxIgk+ZsG 8 | eDvwxJoJSGk5mKKLWXaYF89Z6PHbQj9IwJQ2bNvCbbD0kPnQm/aJfiXiB7gfUIgr 9 | X+itbWl1j3E8vS4piboAOwMLQHywaA0wFd4HpouiNjX8hPCaZ+l+T1z/JY98Luy0 10 | eAJhEC/kx8YCya/T/JrlWtZAYEmQWJjLWmifqfFSMJuCfREAGiNGgZ2GFjdAOKmD 11 | CZKAVnGvTrhEWvVu38yXcwmY9/nfmZLA92T+P/PDghlO+WbApbcXIwOHBgv/Nfsn 12 | RzozMw8sR2LvdCPOavcQ0iuKvg7zrA3PzVhF9iSD1lfzIokr0sEBStgKMncCAwEA 13 | AQKCAgBVVGU6qk5i2xrkO5oHO+8YwOpfsBdJG7yIEsYamJhHveH3zW/6vpHIw7Eq 14 | G8UXRtnA2svqKqXp9YI0Wns71NNlvoi1bO3pP6IpH/PpFb9PdaEtB3/mC5HsFNXN 15 | svb3mecILC6E9In6XUHx5hWwQstXgTZcGVA1VfqnAGUgQ6goyTbAasRMkuM9+i0m 16 | I1e47XGF/69dVatCDvZBpJKMn2vMlvR3sYw4fP8zMiFtLPb4mq1OKerEX6Fz7zTl 17 | oh119+m5koXdEzso9jKO2UTz85XT2JKGcriO5/e3D4v/RcLNPk2+Ek+CavgJKGMQ 18 | WogqaHjTyu+wUm7omqA6VuGDLZqh1r0xYR+EXVMAudLjy7/NtAaE4MVOqVRs4WVd 19 | sGccyirkTosxlvK3/vTfsp0VQtreBbxO1maqR5od0aa36MrP4Sk5O07yB9GAthp8 20 | 5qlqtiYaO2Hcq2KJjKPUGwXlAWFZtENQe+G/jy+gYVDwKRInK7f7HubZlAMwsq2S 21 | LSjtgvhqayAMsa7HoeevSVPLVdFb1IVkIw2jgMhXRgxmKa8WzbAUs124f9ey9z81 22 | si7w+qpZHq9LGChBjweTbd0abCianyRGHZIlDBE43XEcs3easxuHM6eOoJz0B7aj 23 | oCXBCo/6Zd0om4ll5jva2+VOH2wTkZk7OhPiGU2f4g7kTJNAAQKCAQEA7YT3UdjN 24 | HybAD3c/a5Kh17R4zCvymQkUWBs80ZaV9LlCYgie6aWlY6e+9m6vpDhY8dJZd+Zm 25 | hlAF3VitRLw3SQUEImEC1caS1q99o1eQxMGu+mk9OiibF9PzZwoPE6zt5EZ0I/Ha 26 | ifmf0Jn3xLyWF4qOKvO3gbWLknirDKffzNnWtFr4SQlEwtOR4m7IFDEz7e7RoGlv 27 | K1qEFyK1PCfR8GeVHXWkJ3udHJRIZlEtNNLkHzU4nCRRYTvQ4l67rD9Tj7CoLbH1 28 | 2OGSSvAkg+1lTBBs6RXZvhVvLrJVtQTXR7Oi8Z3mi3iJu9oWYa/OFaJl4lAN9xTe 29 | QY0u0J1+AS5qAQKCAQEA2yUhO3rC1A7qHgbY4dAsx8f8yy9D0yCrI9OLnPQNF3ws 30 | 4mC1fUS60+07u0FYkgU3zIDwdLj5nsxWjB4ciY4hCgwz7pNJWlowurrfTvQNlqvC 31 | m+Jrt1HYoaV+73mSj+rGv89HXWBW2I/1ED37BRoNB/YIMd/MUL8h0ubt3LIVaRow 32 | 41DT3dM969zuw3Avpx1uXQdnijJ1kA3oHpJ756YLHri/Nv6K0hJmGAbMrHPRxuhY 33 | hYrxPJPVlp5mWUIjNkKoaNl3du3a6iVSbf7W15LxhAHmkKozxnhqoMldI6C8R548 34 | IKGyW4wo3GQvcEGPhgGnz2lswmvtx/6cWMv81b7sdwKCAQAXiC3sqPshk/hBUAIz 35 | iTKJqXKyX8RITzL5y7EJ3s56kYQ3LD16TpQFPJBZ3/t83KxLQRjmHSiZNAJwvKFm 36 | BvO/Q0T2lv/t6B+SL47WCQ3pwHqyioyrX2yGPtSFp+R4gZCMJkLJcOPC+b1QsIBw 37 | uHJyYKLXNJBAxJjQaS4hMdylcguokL66lbV/S/DPK0SdY4aOkzOnneqKtAwUMrcb 38 | /6H4HHsUkRwzYTbepv5JTM+axS4evWofZiW96Ww3kUUsupVvzgPLiy8dTrPswsAL 39 | ZhC8KYBw015gS8VZLgf5yEH/85c4MvmtZcLXnrLK+N0FHbLUajQH/8RJYFB8EK50 40 | NYIBAoIBAQCNO8/AIqz/uCEAew858U168BOm62492lcRvtvCqrLpSNkwiH1PH4V8 41 | 4e7WDxZC/WPpw8u0niYaRr0cjqd7q4L1k8nAX2It/eRb4+XZX4aGbWn9xx3+xpvk 42 | CeHV+rcPU5MFuVPYBSfTaGvbLObjcdemItVDN2XZQGVPJA92ZbtQwlVxmv0AgIzu 43 | vrOOd3XusRQYlpYPRdfooF3RnjyGncea5BwwGDpliwALSg6MshQnqkSqxFIW5XwJ 44 | F0sFCY/h/3HEKStKFZ85uhX21/+kbYqDtinfYCIALkkiGMSTGptdWMiNi0iEob8P 45 | 0u2T3lzeU6DQFrTGVIKpmxkuTUFEjEifAoIBAH4nyu4ei4u7sGUhSZ79egUbqBZR 46 | pjYblM8NB5UOAVmmbaswCWozsnsaBdIgymeokQXDPqIOwadP8IgGrgIxS5phdgvE 47 | CNepxQDoVTXYEecPjc0LL4Kb+urmJL7HEP9BteIkc+7l8b9USDhNlJeCXICoJKBu 48 | bNxgm60ZuoObx7h5APq9wC4x8Xj7AxQKu57Ied/tRFPCHW4UbhZhJfrnS2xTTk0u 49 | z71AS7akI/NPfm3nLviISZeDzTgYs6vLYr/j4JUlcw1z6UpX4DvNm/MULi7ItXP5 50 | yV2H8jpCdjAe+OoC3OHTuQ8FQR29y758HUY7iF8ruuqUSWxF7pfL/a27EMw= 51 | -----END RSA PRIVATE KEY----- 52 | -------------------------------------------------------------------------------- /spec/fixtures/test_certs/renew.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | cd "$(dirname "$0")" 5 | 6 | openssl x509 -x509toreq -in ca.crt -signkey ca.key -out ca.csr 7 | openssl x509 -req -days 365 -in ca.csr -set_serial 0x01 -signkey ca.key -out ca.crt && rm ca.csr 8 | openssl x509 -in ca.crt -outform der | sha256sum | awk '{print $1}' > ca.der.sha256 9 | 10 | openssl x509 -x509toreq -in test.crt -signkey test.key -out test.csr 11 | openssl x509 -req -days 365 -in test.csr -set_serial 0x01 -CA ca.crt -CAkey ca.key -out test.crt && rm test.csr 12 | openssl x509 -in test.crt -outform der | sha256sum | awk '{print $1}' > test.der.sha256 13 | openssl pkcs12 -export -inkey test.key -in test.crt -passout "pass:1234567890" -out test.p12 14 | 15 | # output ISO8601 timestamp to file 16 | date -Iseconds > GENERATED_AT -------------------------------------------------------------------------------- /spec/fixtures/test_certs/test.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIFEzCCAvsCAQEwDQYJKoZIhvcNAQELBQAwTDELMAkGA1UEBhMCUFQxCzAJBgNV 3 | BAgMAk5BMQ8wDQYDVQQHDAZMaXNib24xDjAMBgNVBAoMBU15TGFiMQ8wDQYDVQQD 4 | DAZSb290Q0EwHhcNMjQwNjI1MjA1MDU4WhcNMjUwNjI1MjA1MDU4WjBTMQswCQYD 5 | VQQGEwJQVDELMAkGA1UECAwCTkExDzANBgNVBAcMBkxpc2JvbjEOMAwGA1UECgwF 6 | TXlMYWIxFjAUBgNVBAMMDWVsYXN0aWNzZWFyY2gwggIiMA0GCSqGSIb3DQEBAQUA 7 | A4ICDwAwggIKAoICAQDGIT9szzhN5HvZ2nivnCDzVfdYbbqBhgEbPppWPyFcV0r2 8 | rtmWfeK5EEdsTS/Ey4owTceOplPpAp4svF+a/i1/bHhqnQYYU4f7Qic4fDAszLdi 9 | SIo0o1csNvIogm/P+uvSzE6eZRZUSmo49dY5SKSJt6Pjh6lM2MHEjsPKIKdAN57w 10 | EN90q4IZv6AHE9rphqxcmF1k+j5xmhCUS1EJ+y7hyZ0S7Hghdgp/0cxSu/7YlVYy 11 | JpkIlQd3RPXzEf6VSYjtr9Ajp1rhvv2611q0CB5NALg/KR3OiMPYmTg5HAKOdweN 12 | am76nG3VxTeV3y+LW/pZAbi4qAl+4/c0eOGsL7o/YSn7qhThU1AWS9kY1WxTCrKR 13 | h58rUGRfmvpnOR99xvR4jz942RNiY61pTmsvo+iJspTII3GZhwIGlHtxE9Rn50lW 14 | QcDuDDHfObWhzb4rS55BERIwDUqD1LgCRd0ikRxPSvI1AM4cl35b4DTaDLcnM6EO 15 | fy+QTYsgNoftU1PI1onDQ7ZdfgrTrIBFQQRwOqfyB4bB2zWVj62LSDvZoYYicNUe 16 | cqyE1542WNKzmyE8Mrf3uknN2J6EH7EhmiyRBtGg3NEQCwIYM4/kWPNPOtkSjsn3 17 | cNbMNUZiSnQn/nTs4T8g6b2rrwsay/FGUE83AbPqqcTlp2RUVnjbC8KA5+iV1wID 18 | AQABMA0GCSqGSIb3DQEBCwUAA4ICAQAlB7YFw7e1pzYz55Kqa91juTWP5XiCn59n 19 | J0YKM++vdHqy224HZb9jGtJjZ+0Wod4cwiOVWm+5hLs4IrzfGuXFZEFx/VWP3SDq 20 | 4F3IJJXQkc7jSNrL6IR92xRDSB+yFZZI6FFsnaKMT2fZELndPVFnH+oclc8ZZoyz 21 | 2H/r1CT4yYx7YclAWUqq8Ci3J82qUeeM8Xj9fzGFKy6oCoRsApQb4qb4DoQ1TbZC 22 | b8gWxHj8l4izul1MtTzSkoMb0Ot50vMoT69m1hDz5H4wF6KuAZUAgM9LQWNHJCkt 23 | hlOXvqFTHF+y+bvK+hGs976xViq3HA45M3+5Psv0+fdoHgYQJvd23yt8CM0rGfv3 24 | P+34HlLCW+FdWiazmo+tl5YmtGs6pYuAEp2z5pmUO2l2CutFmv4xBOvXF+rZOzxY 25 | Q0ackJtflnDC/Tlq2qAldY3Oa8nyI3UIaMUcqHemwm5KpDjc0XF2J1qCoSrMxD8+ 26 | L8HdvUYlh3DIFgJIG1DlTtfQO+RwrVi9+NBBGAsforla9HJDO/POiv7O9hED71u+ 27 | pev8flmULeisMeYqeiL55jyS/+45VaF7t36FMyiP3zXANwbHZMvzVobEsXAuzPOt 28 | pVNo/EpszrdBe9JWt1GrFLY9c14FmWG8cAWpcwRH0ofhJPPvEB7usFVWCSduOAbA 29 | Zytzb+8iSw== 30 | -----END CERTIFICATE----- 31 | -------------------------------------------------------------------------------- /spec/fixtures/test_certs/test.der.sha256: -------------------------------------------------------------------------------- 1 | 80329a197063dea8cf7905d10d221648bbdbc05b8fb1d4c2e384b831bc6590df 2 | -------------------------------------------------------------------------------- /spec/fixtures/test_certs/test.key: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIIJKQIBAAKCAgEAxiE/bM84TeR72dp4r5wg81X3WG26gYYBGz6aVj8hXFdK9q7Z 3 | ln3iuRBHbE0vxMuKME3HjqZT6QKeLLxfmv4tf2x4ap0GGFOH+0InOHwwLMy3YkiK 4 | NKNXLDbyKIJvz/rr0sxOnmUWVEpqOPXWOUikibej44epTNjBxI7DyiCnQDee8BDf 5 | dKuCGb+gBxPa6YasXJhdZPo+cZoQlEtRCfsu4cmdEux4IXYKf9HMUrv+2JVWMiaZ 6 | CJUHd0T18xH+lUmI7a/QI6da4b79utdatAgeTQC4PykdzojD2Jk4ORwCjncHjWpu 7 | +pxt1cU3ld8vi1v6WQG4uKgJfuP3NHjhrC+6P2Ep+6oU4VNQFkvZGNVsUwqykYef 8 | K1BkX5r6Zzkffcb0eI8/eNkTYmOtaU5rL6PoibKUyCNxmYcCBpR7cRPUZ+dJVkHA 9 | 7gwx3zm1oc2+K0ueQRESMA1Kg9S4AkXdIpEcT0ryNQDOHJd+W+A02gy3JzOhDn8v 10 | kE2LIDaH7VNTyNaJw0O2XX4K06yARUEEcDqn8geGwds1lY+ti0g72aGGInDVHnKs 11 | hNeeNljSs5shPDK397pJzdiehB+xIZoskQbRoNzREAsCGDOP5FjzTzrZEo7J93DW 12 | zDVGYkp0J/507OE/IOm9q68LGsvxRlBPNwGz6qnE5adkVFZ42wvCgOfoldcCAwEA 13 | AQKCAgA1FkOATCWx+T6WKMudgh/yE16q+vu2KMmzGxsPcOrnaxxS7JawlBpjq9D3 14 | W9coy8DDIJQPzNE+5cyr/+0+Akz+j3nUVy6C5h7RW/BWWjAuUMvyMa2WXQ3GcxJ/ 15 | eDOtbnYxjTyjhEJvY2EC0hwMTUKJBAONu5PJW2rP19DuH8Gwmzai7GJzSGEbtRST 16 | 0OYfHE6ioNCldce1eKpokaWtHvh41ySXJXUqwg4eIYC1ylmGfr0RwvXOLuBJPNkJ 17 | wBCOv51I0oragsT/J8Wkgn9zLZmw2DiF8+ZgqJSRPLyr0K1+rrX/Vj1WOQPU+3rh 18 | VWPP211A7A0qrRuePEbIcHtHP6KPUCepABL44K33zyyOydmnJ7vg3dsW7AN7+Y6O 19 | H4B24d1ogn4TJwzpZCfRvqJJVu2wsnzleng9PcpXyHhldB6S9h2fPpNqDUBvfxMv 20 | w/fGZ2ZpOeUKRfQ7VVR3XIWwFq/eDhzLicHipaoM+6gKeOZdJPAc0Ew5jvOXQSBD 21 | CYCM12a8gnEYd55NLo/fF3wX6Wdq/X6EbWW97gwtmmEqnhcZMxLdeMuKyli22JyX 22 | Ik7QIDsmPSWhCkI2JvQ+CAZZp6oMEKuSb7UqqfACQreIuxCUmPTZq/pAEUGSCZGP 23 | wnWqOk5jwxJ4d5TQm7g2RgPC6lTd7as1m4+JB8H1cNVpS2d0AQKCAQEA5tL9WjIK 24 | u6x1h4a4KPmOQ9B34GxmGM+P9/bQkkJsWYf5eG1MlYPAigAiN0PMYPHDweeROsOO 25 | rvmJiWXtmPMVFFSalHVotN6LMj400HhHObg7dADDYTBBGX6QuljxBX9jgUiKSOKO 26 | 66ngXEyRdUmTAbral3UuSCFGcqG8Khd3taULO2q5JQLEioFT+Lr0UzHywVSJq06O 27 | k37aC3zpiQj4S/bJG4GOadYDIENq+gRCIU7Hn4pS1qtxLYMyhtNDzK0pVriYNj9T 28 | ydHUxSKZO6ogM4423wVKnKOa2Cj4rgKBDHGpJJ9R0ZhrTbAQOa8LgDy1P6aMlvH/ 29 | t9bG/HClmuHrFwKCAQEA271gZpIeCFG/XYIbxO8Uh9wG2cHrt7idw0iFTW4PpOBk 30 | CGExq7WlkR29BkzxTDEXz5bSgDa8Q83cDf9EjSHVJYVGSYnodz7ZV8jZd2PUdCV1 31 | dL1kHV7vqz/KKxuCp7icCwzG9rQ1CjsTv8gBM3cN6DrZgw/2F+HjQpCkeyxc6KBR 32 | Q+167yaLvOv3W0BHdSywtiNDU48oSSBpEK2anh7ShjG3BaEr/gAqGsTvxjsl7zDg 33 | 6MZFegeYPFIEH4ftvLZugPgd3NBg00CfsNRkjVWsH51y4gBy2ZL8d31Q2D2FI94s 34 | he57Trvs8t8Y9QHGTMCuUk9IwRBpgapmW+c6G65jQQKCAQB0IPquAjc8LIwE31aP 35 | 5t4YaC2nj2HeYa8BBf/xVdHFSY/Ncs/w+RRJWb/dJhMRQxFF4QdEFVcX2RvFMK7V 36 | IJceX2JWBqvjM4O8h2dy6tCKzZG7zxZ9MxXNicuutUith2W8iY5pFPLqxdDHHw6f 37 | f6CiYivPv3jFeVhEG/LbCmuDy8FW5162rCnNtTtWDFkX8i077xhEQ4Wf11ZEKVgl 38 | RYoGTeboG8pWnQF9ne2YU8Qhlc0BC0qaDi8mwrcM9gVKWGRP6RdLU5kIFLWDaODH 39 | D9Sbm5UnpzXekME6t4JFqaTbaeO7NRyo4pI5x7aiDtsubVyS5WweFSqeh0QdhV8M 40 | CVWJAoIBAQCJ7OSFfVG8hxNG7lPf2PFaFZF3PXFncEoWOX7yixLmurIPUHKNBQdX 41 | fwMW4VTULjxz4IdgUvr41E47uu6cZ5ASbQUhQ57WumxR3ZAikFbaUGjZTcd5aA2n 42 | v/J1/F6WSBoqFWsNjy97rHlI95FJbIEeM1I0IeCmPPMY5RFY/w+SNI7NxFJVqiwr 43 | +TDZ5g70TYjdymSIHmN7AwzvmwhiF5atBKeMsQ2b8R58jwCxvI6jBFsnwMv7PKkh 44 | s5lC8V8YBKp36UVVRLaB4x5ZL/etfwj7Dyj9EqsERm6R0ebc1ECtstbfekGLugmQ 45 | qNhRcTu3EXpZz8oq5NJUwVBef1TJ6zwBAoIBAQC7Oq5AdmLzYOX0AYUuT3Cjgl0u 46 | 3Tq1c1uqlVaxQGjA3oqua4SR0+kvmRZbZGLpLAVnnwOjUEfUFBBYgP/5Mo/OiCkQ 47 | C8eWkSQKXy6OFy5wh4mbL5oJttKjM4ZoB0gpF31+tGOmrfJwacqEPnyZLKzkgBdG 48 | djVVKP+HH4XUdB2VXst8tvcif+VTrUsD1nuhGMOgbiHZkdx3eug05wfhnYvWljA+ 49 | r/4xcq7TmKSoJqkb0OcOkhqjeZWleA6xRtEvHPGRzbEM67FVsVTxr/N9BX5tS9zu 50 | YLCNI3tTNsDV0Ac+4rl46rghQ/n2TNSEpwvA/pjytsdPXLOpoapWirrsEiXf 51 | -----END RSA PRIVATE KEY----- 52 | -------------------------------------------------------------------------------- /spec/fixtures/test_certs/test.p12: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/logstash-plugins/logstash-output-elasticsearch/f3b08942f463954395fb1cb718e023465bf86cf4/spec/fixtures/test_certs/test.p12 -------------------------------------------------------------------------------- /spec/fixtures/test_certs/test_invalid.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIGVzCCBD+gAwIBAgIBAzANBgkqhkiG9w0BAQsFADBMMQswCQYDVQQGEwJQVDEL 3 | MAkGA1UECAwCTkExDzANBgNVBAcMBkxpc2JvbjEOMAwGA1UECgwFTXlMYWIxDzAN 4 | BgNVBAMMBlJvb3RDQTAeFw0yMjAxMTgwNTA0MDZaFw0yNTAxMTgwNTA0MDZaMFIx 5 | CzAJBgNVBAYTAkxTMQswCQYDVQQIDAJOQTESMBAGA1UEBwwJRVMgT3V0cHV0MREw 6 | DwYDVQQKDAhMb2dzdGFzaDEPMA0GA1UEAwwGc2VydmVyMIICIjANBgkqhkiG9w0B 7 | AQEFAAOCAg8AMIICCgKCAgEAunKYkXLoAC14bdIveIZlImtrp2rkQ5Ugz/vGVWFJ 8 | YatJh9x5dmqJQeO1gpGayNedbwDI/50CTDoqwCz4aunMKpgQdKHryjeuR1zqElhC 9 | QLZtAkzM/pCkTnMYvXDfJrBZcSWP+ddlkH8ffmN0Fxf3LsKln+K5A4hASrVGSKYw 10 | +eNDV7yIxdhthqD0xNRuw/j3lSLxwBbwKOs7Mh+xmdMa4vs3AKJvG9LdTm7xdHtD 11 | 4rkQAA7TRQOR5pl+eDICRnNkGVzgPMdf2kM94ZU7TI1zUMqV1uPNE05Vps14kuWq 12 | Z84r8ecExCo6mQxrQ1M7Y2UBGa5NM9kb+UP1famANadEOlS5kAGEtcpHnh1WK+98 13 | 4mxtMQJHIOOASde/coA3mZ4Oa8Z0Hzy3fsNvD9ieLo2V8yQN9UAQshbB1BEuOaRr 14 | 9wQKT4jlnCinZ4UU6FpEJ7NIiZ9wBKqNbN8iySPYmRihOj6BDuEQB/W/K54KQB5n 15 | ctT6MsoTyTFgqe3Zn0owMrGCsSDJsvUcaAz7ZsnesoiFtKsYyRZAWJVcLO5R0FxZ 16 | YjCGk9stspLT1cqmJ8VbMLhiW7T4ZP+sZj3B+aSZWnS5r78I4G9sB9swDMJMjNRP 17 | O15zvQKIRluULhm9WhPET78Iy9Qg1zoiGdnzxBSZ9Sh8+yb57cCRD4da7eLne4Sb 18 | uU8CAwEAAaOCATwwggE4MAkGA1UdEwQCMAAwEQYJYIZIAYb4QgEBBAQDAgZAMDMG 19 | CWCGSAGG+EIBDQQmFiRPcGVuU1NMIEdlbmVyYXRlZCBTZXJ2ZXIgQ2VydGlmaWNh 20 | dGUwHQYDVR0OBBYEFPqrHNv/8Io82JmzcNWBdaRmftvdMIGIBgNVHSMEgYAwfoAU 21 | oVp0nHjq6mJ/UGuFhnSK7yjbPyuhUKROMEwxCzAJBgNVBAYTAlBUMQswCQYDVQQI 22 | DAJOQTEPMA0GA1UEBwwGTGlzYm9uMQ4wDAYDVQQKDAVNeUxhYjEPMA0GA1UEAwwG 23 | Um9vdENBghRT5Ucn3UmwssDUYlgL5RyE1/DeETAOBgNVHQ8BAf8EBAMCBaAwEwYD 24 | VR0lBAwwCgYIKwYBBQUHAwEwFAYDVR0RBA0wC4IJbG9jYWxob3N0MA0GCSqGSIb3 25 | DQEBCwUAA4ICAQDHt/3rR3rzESrZNNLIU+jbW6Vsfcr22mghhbdoU+pnkZB8Spzp 26 | AUqgmSlOOuz/3OMLPP64sj+gm9kiU177uBoqJUsnaAaV0LNvp+EIWIdskw0OGU2X 27 | uOslZK5EdYGqGgRlElnohPRXcFXwsh//QJYDmNnnC3Fk+ZjZQKjH4q4Ur8ECPqit 28 | wVnRXqlKfLRjWWzvTgoPTAN42KHP2R7xxIHdV+cXH6riLLvtkWcGJbfoQaSuNOvc 29 | GAIoQc2YmIiVsGZ82n9Ww2zO9ByqF+KfGgIFDTCp1CzpfLKLLhzMv/p4n2zf/BOb 30 | MCJJfljOewqmzMo48Wj2vk/46IAGl5uA6PnDwa0LNgomA9c6loDcYEOsCr69xL+D 31 | 7GL/Jzm9HaTE97lRGVwoKBG0hKabFEfOueKB6Oab8bVTuY99kbbRaFZs7I9QYVQy 32 | eY38YJv5kN3yAjFclO39R8cAngqecbQDZ7xTl7dF7CvrpAoNI8olL01Kjy/+vfX4 33 | WAMO9YONnVPwwB05voRZfKErVKi9iwAWa5m9DdtE5QdfjcsXZkITW2CF2skSgujw 34 | BS8P8Z+HccKa+qEPQM+eBWjrlzlx/XC2iXKE1w4zZL0wRgY7+W4dIMgJmPmV/Gue 35 | 5wSnZtMueBdyKy+xFrJcszoakKg/JfSesKfnVLC+60EL2FQqbrGCEiTp2A== 36 | -----END CERTIFICATE----- 37 | -------------------------------------------------------------------------------- /spec/fixtures/test_certs/test_invalid.key: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIIJKgIBAAKCAgEAunKYkXLoAC14bdIveIZlImtrp2rkQ5Ugz/vGVWFJYatJh9x5 3 | dmqJQeO1gpGayNedbwDI/50CTDoqwCz4aunMKpgQdKHryjeuR1zqElhCQLZtAkzM 4 | /pCkTnMYvXDfJrBZcSWP+ddlkH8ffmN0Fxf3LsKln+K5A4hASrVGSKYw+eNDV7yI 5 | xdhthqD0xNRuw/j3lSLxwBbwKOs7Mh+xmdMa4vs3AKJvG9LdTm7xdHtD4rkQAA7T 6 | RQOR5pl+eDICRnNkGVzgPMdf2kM94ZU7TI1zUMqV1uPNE05Vps14kuWqZ84r8ecE 7 | xCo6mQxrQ1M7Y2UBGa5NM9kb+UP1famANadEOlS5kAGEtcpHnh1WK+984mxtMQJH 8 | IOOASde/coA3mZ4Oa8Z0Hzy3fsNvD9ieLo2V8yQN9UAQshbB1BEuOaRr9wQKT4jl 9 | nCinZ4UU6FpEJ7NIiZ9wBKqNbN8iySPYmRihOj6BDuEQB/W/K54KQB5nctT6MsoT 10 | yTFgqe3Zn0owMrGCsSDJsvUcaAz7ZsnesoiFtKsYyRZAWJVcLO5R0FxZYjCGk9st 11 | spLT1cqmJ8VbMLhiW7T4ZP+sZj3B+aSZWnS5r78I4G9sB9swDMJMjNRPO15zvQKI 12 | RluULhm9WhPET78Iy9Qg1zoiGdnzxBSZ9Sh8+yb57cCRD4da7eLne4SbuU8CAwEA 13 | AQKCAgEAjd1oLTOrR4dRnO5S5HgON9RYg3iX2rx7zQUb8hcmSMSWHGQrn2iMq6/h 14 | iknT5oH47l9AkQ4ck3em07bkBiWDDd3Kl5Qk3ybi2rXd/LtfHfIwm5FoYUIZYv+8 15 | kVhy0vNX+vnDxSyZwQqZIgf2ayP3FoNQlolikUd4899ebSTXGyxLL1TrFO8K12dd 16 | Fu80oLStXbLLE4fgkKH71rW64vp3+MGBJmBx/k/ByE4uNp0cGEUL4Z9mLZr7xuSA 17 | EGCszoI6Zfn/PP/O9To4uY5dInB2j8C1pl6KyefO0C6DUfKiaH29fkBzpfcjwxRq 18 | AT4xb/2IisvPPmYyHMYS4ty4CxsMk7Cu5q+DlmRMa7ShOOdSAw+1k7yUl3rodXuu 19 | 48k806s1cpoowo1aiNYMlZ6scQ0GtYU9/KfeWyfzO3QzgNguyAXifZNepU6qumUw 20 | S3670/1O/7KfpUCd8zgGmbHXlm6lhGj6ZHRDVDvp9/RKkqySbTGuWAXRgK3iLvEg 21 | 26KTBtqW6WMx0XsZf7T2CDfXqdL+XzDrYNl9GB5rqgCjBUmsaK9QdwDGw4oFY3+k 22 | 9KzBYj7wE7/W9vjBNBoWg/821McHJyYDaFCdOyC0YkppZKNENMcUkNE1noXQB7w5 23 | ib0NPWum//H+DT+F2iKOzP7N5f4XpHUSRnpYQ3C3d3Jt7n9NRoECggEBAObPgZd/ 24 | ZnrgZtt++tMAsG8wEfjPS4GM/vbAdNJFruT1zamwIQv4ba/7/m2UO2nIpS1T6vBR 25 | MeVjsaueNe2K/9iRbXWKidTj38EnWCeEQQek9+bCjXqW01WjF+fomXYGSOj2RUWx 26 | 5z7CcsfFe2N4yQxZEv9ynf2PZsA90F5FGcm7RvputhcY79Oyq8/baQmT2JpJMbHT 27 | X3J1usCQyIBjaGRErh0rSwXKcNL09Z1P5t4TDkZ+bctzO3s/0qdgS0E3keilyE4L 28 | cKe+hoGl8CsCaUBSmzY5evuLVChoZi6Pwlk84+CxFm5O9l505Bmphb22vZXII3ZS 29 | k7zDIJEBRzN+Sy8CggEBAM7LqE0yy56c55itsWQOKAVYyObn7HdwylQvd3OaEDhG 30 | L5MFFdif/GeLHcNzv2eRnd2n7cyjDfHR7znw9H0/xlLlloxonGiGR3XXCGexQYBY 31 | lJPkeCQRPAqNWXxUKsSPaChXJROsXRy6G2dgcdruM88z9iKnyeu8Ky++y8r6DLrm 32 | niTcRdqzz2HiGbnUyYdH7mg3IVT5LZsXxNyLAN+t4r0LmeZkijCC5xUd1brOxEAM 33 | h36qJZ1XXAGmhPgYW31VAqyDZ4oL9PGXEU/MCrxp8xHflbBOWBLm80gYmbZzxvvK 34 | 4aGdXYWRmlEI3MS+HsvtcGXh4WOE1nLSU3cvDDIBa+ECggEBAKO+hvORoIR/+rix 35 | hwR2srTO55EajzijbKZltvsOEJvCfltp5qf1YOu+3Kukw9myTOyxYjWHhNx2M6/L 36 | F/sj54oe8ga3eD0eRLllTjcKro/byztcvr4/jkJs6CLQcz62CrerL03YfnOZw5BS 37 | W80f/ZHTB5VOHSOrvnuX3uFiKH9ja2FzdZ9BQ7NuSFG4GPaAeuRKFQVRLZ+oQgsn 38 | K/dZjs/Dobpz4k+DZTNkMXOfIexenHwKaZ4ya/puNuYjfIASCmOAaXBk0VFP62DC 39 | 9nWsyjql4BNCCCu4lsXr+sIBnyFr/0aCm6U6Q7KTPtet2oHSyQEf8XiZ8NGzpMD6 40 | pSa07GUCggEBAJc0JCAmJuoX0eM6BT2ieDLIo0TqiWUf7GC3wECfgoKTFxAJpNqa 41 | yCQxfRa/WFFzEJnUwrRg+L47AQ89lpbJ/cn5IyYRC2QF4tRP4U5oNfuRSToF0K6W 42 | h28zwR1+MTM9pCvy5CJJYl+x2H2y8CzjBLDZTnwycRrToEQt3rbQNGSoYTOUd+Mc 43 | nGL2vla5No2a08ARp2aJN4ZyT7fuTzo9207c36+tDbnAzRQMl40ayDYIsz2zTQOJ 44 | r7VpO0poDOVMNkNLZXZznarUCY1uJN91HIySDdI2xoEzquipTCMy7miHBIl2Fb6n 45 | Is1jjICyfrQfLZLhITryExcroGnB1cnubiECggEAdOeyL1Q0MC59nbZ3E0OwCSMe 46 | HAEbcEp8J6nyyxJ+VVSVSmh5SIEajSh5PPkrzzqKkVo6LSXvHeiwvx5R302RBIew 47 | GZ41oUJA5ApPWAiZo/pusRR0nnClBXPSqFOb7uYuDTXuma2rWcN3xt4L1zGbSMS7 48 | de3Wsp6kVv3q2uF/2fv78BwpwaRQaDcmw269iSHjMAtVbBm2QIkN4FI+4CCUdn7H 49 | plUAW5CXWtAHRKFJwjF/I8PYAlyvGb4i8ll/RK5hworj1VrPgnKIg2JP8LcAm1a4 50 | 9sk2DRryU+7PZBWgqaXeTfoDUzdcKmWx/jbGPP7Wlvh3Ao6ityQ0pauWZKzSFg== 51 | -----END RSA PRIVATE KEY----- 52 | -------------------------------------------------------------------------------- /spec/fixtures/test_certs/test_invalid.p12: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/logstash-plugins/logstash-output-elasticsearch/f3b08942f463954395fb1cb718e023465bf86cf4/spec/fixtures/test_certs/test_invalid.p12 -------------------------------------------------------------------------------- /spec/fixtures/test_certs/test_self_signed.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIFdzCCA1+gAwIBAgIUNtmhsOlqXXLvFvsvImAvs9ZrnFkwDQYJKoZIhvcNAQEL 3 | BQAwSzELMAkGA1UEBhMCTFMxCzAJBgNVBAgMAk5BMQswCQYDVQQHDAJFUzERMA8G 4 | A1UECgwITG9nc3Rhc2gxDzANBgNVBAMMBmNsaWVudDAeFw0yMjAxMTgxMzIwMDNa 5 | Fw0yNTAxMTgxMzIwMDNaMEsxCzAJBgNVBAYTAkxTMQswCQYDVQQIDAJOQTELMAkG 6 | A1UEBwwCRVMxETAPBgNVBAoMCExvZ3N0YXNoMQ8wDQYDVQQDDAZjbGllbnQwggIi 7 | MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDT+NYvZzi7y2RHvklPQ7BYV478 8 | A+nN/ncakJ0JjmdekwDhUjb8VO/YxN68FyLargI89hsa9Aw20GRZrtibUYfl2qZy 9 | CW5Gydl/t2evCbO2mIcObImsagp/wNbPfopX8efU1aK3KPSlES0qDX6oB2hl9Afx 10 | oeJY+NBlg1xhVWC8/WAKxG/me9XPYhsOSVYR8UiA2RkPnhSND6dqqR+KGUdhxcZH 11 | rB2Y/fHyy2uAgErWguNSvVRudy7yZn5eAIhV2cI+710SOGPIXPbMPzdJBiNFNhaK 12 | QSCze+lC7xJg2lYhR+2H3DofhftemPMLPtFauVUe1xm5rP/hrVSbZuNN8DEffpAO 13 | whhgsDDmkAh8oY+OTSWCdjKHrir+Dbqo/OfmMK5tkC12LP2e45nvGKLrJGsHtBeP 14 | +l3edtYLy0lvKVXvF/3krujTQYrA04Gqb9JKsvUSFarlhiY3dmJ1+na3kCNRu5Ei 15 | oFlvr8uWJLeEFTll6ahTGkCnWZ5bHP1NNYD4JRwTNtujArtWxEEomoiyjgw3RfHE 16 | pGlSaljnCvP9OX72zfrpfbQJx937CEt22jVTyKls4TPvkX+bKXHUVNG0xDlMcUK/ 17 | +DdTDLxD+j0gBJofSD/uWeWd8VhwSG7jY5PimUhQxwrHouyKbktF3useuElH/3wn 18 | pDy0hPKbgZmATChOBQIDAQABo1MwUTAdBgNVHQ4EFgQUKj3kW3xSP4/7D9djf8DM 19 | XdAG0e8wHwYDVR0jBBgwFoAUKj3kW3xSP4/7D9djf8DMXdAG0e8wDwYDVR0TAQH/ 20 | BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAbNsBp/QUTXNKblQUPsjK97BDHVLs 21 | pdcHAabLd5Cwveb+0RyqUoMGz+XmtOiCS4bX5uLnNbjiFQK1zMyen6RBmdfZ4+nv 22 | /Uh5XMQx3oZCCrmXbTEsFjlSgQuQlTGlaJBZXKwN7KmBgqvsNEPRwteDPOLuQ4DP 23 | 4zc/6euJDeHcrqTYEF4sYB5srkkL4A0Kw3ZaukopKMpdEaAzgqeQdQ5AqbKlfiiI 24 | /XJxomGLQ7LWY9KWYeZZ3RhdN/mEREFYHZ+OkptPcKqjQimIq6JulHVQJar3A3ec 25 | zaVsyT4UkDDn0nf8N/D9mjbCuO3Pd/8EHgqRzxMvglkCkDElrucrXjECK6SBpmgu 26 | nCIvEIJxlHqLicBAQxdv6/N+UILZ37Wg1MyoIkzQA1j6lkw8kMLi0OIStZ6COOVA 27 | 6x00SDH7GwkWEWJCGN7v8xOTd4ftN3Srhsai0wMyNtA39Wi3GiAWo8XwtfrtkWwk 28 | 7zmWYZu/Q98WAKSv7V5UYkCCzI1R9L2ZRBYtl94kQyfCzwVAyw7+x+z2mFm9hKIo 29 | g4Gc2UVOgMdYeghNwst7GUmCOJGo1hAr36UtZgDV4PnM1V41GAFNff+XArdZTpgu 30 | dEEhFp2ITlM/+9c68EvFMUhoi8M0GBv7IPwOE70f4blkBvTnNLQqe0bRfwB4FUcI 31 | 8X4SIgxrmMAlKIc= 32 | -----END CERTIFICATE----- 33 | -------------------------------------------------------------------------------- /spec/fixtures/test_certs/test_self_signed.key: -------------------------------------------------------------------------------- 1 | -----BEGIN ENCRYPTED PRIVATE KEY----- 2 | MIIJnDBOBgkqhkiG9w0BBQ0wQTApBgkqhkiG9w0BBQwwHAQI+GmeuhQKe3oCAggA 3 | MAwGCCqGSIb3DQIJBQAwFAYIKoZIhvcNAwcECBMDAiAm75L9BIIJSAuO3VTZO5tn 4 | F16ZpsL/9CW03G78wIGfiDE3Z4cWGfV/b+0eLUiPEPTWXwsT2dKOK1ih7yWecOLY 5 | Bb1jE/RJGd6SdAvE7Ur8xyEQhDgVbybcn0hubjQj4koP/k7HwcVVyR4cyXUCKcsr 6 | 105LS4lagX3RI6zQCpK47dKbBj3YpZKN8CZPM03SEnmFh+QMDoXAegP2941hIdvL 7 | UO+VY5UpCe1VweBaMAXS6xWSk+LCQ/rlvR7vOTFkXqqC9KybkmrwN8hP2DPi218P 8 | XdQ720tP5gfhYLZtWoEQGrSgoJ7P4we3YyTMtCMmBZLuk5zk/OfwvX7Z+RlxX2rN 9 | TQxn2DlOc5anEG23r3Opv/ETm4awPgpdYYm7by3MJEY0lnOTZRuze8LcW6NjvyVd 10 | oi7geDPmVdvs5AQTV4aSBNfd9xvaq5NWiFPTbtnK17FqWSnWYlmrGJLaWvmSSBdZ 11 | 4BLKElaU9v5uOmCCExaTpx4r+RcNZIdfAggCpcThwJuLtS1Kt0VhDvW5vkUbEq7h 12 | lhObPkUQfCEOgcNgo97ww1E4GxJEth6TN1hP78pcDYg7/Lb4p4DzrBTHH2IFYVmW 13 | cd195YeJIHOt96LWQuslv1HK24Of4+8luR/aLEfT1pGjyvm+5ShsgklEv21YVLJZ 14 | a4NKw8VNL5Y0ErlgN5RXd0n4MAdQHfpHSSUmcW3WNkC607OiS5f4DOAwoyWu8Roa 15 | 9RaHpvJcQEtT1g0qCmLPoaqXOXqXJL+/ayEPXtaTv9wfUYgYpQdaB1b88/USneCx 16 | yUpSnI1Sn4HiZiaNzh+ZpHf0qBDdwPslRXFjtBefoOfJidklTTMQ4kEx4j6Uh2lM 17 | u2EEq0/kldydIUKhqpNdvOBmsylAzOoeWX/PQrVneRXZwe18aZb4QbEtPdB+Lobf 18 | oqRSObI98o9B7joXDJ891GnlsiNg+JvKBgUR9hce14EV8wU25/DiuFr6Mm8yUv3f 19 | UaGf8Cz6H8poj8djBoSF5vrVxu9Ucxkt4thN6J7/OXCnCCOU0vV+FUQqE23FPadh 20 | VK9MMA3cQKPiv+d4HNJriThLEgj2s+5xxZ2QXMl1gGNmMEHclIOA2BOSqoq8VkKS 21 | BpLQEZHMyfkRHzB+bclJnkAFLb2Vh/y3SkOgFggJFIRq0kL7g5PYrOwS6qRgHnNZ 22 | tw+hdTMWBLMwLNgMs1rWkEvTxvT//mDRKFmOV/rlG3xozb6OKB+7O60Tx1N5o3oP 23 | KH7qD3bzJ8JyIg85oJJHqOvvMlkoFk4qhL0l8N79EQy/+bOKpQeOxhWDuIpd0sCA 24 | FJjVEBFlYVI25ZO0pjFYvFQKd8IwfA2DpnXX7DRLZmzUvNG7mfkhmzKzqjwAw3GY 25 | RKfRdYF3OMo6/QcKDE3xl/x7XyepAnFlTEQzHHD6o+uhWEvX0+7McY2YAmPrXUjf 26 | bC+au6vp6945FoBe1AcqbQb2ZdJqQq9F1bgi0QwWhh/JlQfvRMz3PqcYct/92Wvr 27 | Fq6P+awwEq0V2XvOU376f5qC2TPOoEyErCHj+m5zUTezP6rmeO6G1txf1qdVfvcS 28 | yqU5iOyQZnf++ObJCsV3HILu3FyOVKrjplFYF7VzLwcNw/ulUK4d2LaZX99WTTJA 29 | H18OG3x0Y0OqLyWKkO9Pl5WkCW8v8IvocDwVl3KsZI6m6JE+92t+IDI2p27hiN8+ 30 | PzoOQ51EGY/nmtDevaxAy4HcOxXYQJV3gZrAOOlBe+7KwPw1mhR/BW64y7JZaCsj 31 | m7CTWbu/tt+xio+PGJ4woj5K5zKDKkP2O46shJRS5/03r3EDrfySgYvfT1M2y9Aw 32 | tGdFQhF8tMSRUYWVPaj8dBH/cRoFvOSI4ARek/TbbJO0XHYPV+rf76MA8VpE6EFK 33 | BskLXLsgxti3sm5p/6D6tg6iC7efBkEebtKmjMvLK38Td1h8aVRZ1tVtJj7K/hYw 34 | Zp2WQaEwpZskgVGXvN+fMN33VUNTYqNS6jTYwPY9OyN1lwoaxw4yAhJolLKp9qJZ 35 | SF7CdYFbao6pgBj5/pzTwANPeRtm5M2gzrNrLgPHjM5w43R3mC6L2qG47Apw8DnI 36 | HhyfzWbnS4UNP64yZoeIY7QpykGcOsR3wO5qjg9hH3WFOgWMciOzOKn3LnhcA05I 37 | C8a4W+xYEcWYfRG7oTBpMKgswWz+++Ho09MuDbkDO3WSwmaKw+dU+ACJc0L6Rov2 38 | wEe0vE4vNugvUbS15ST9Z3zKZuEnMVDw8u5qFLDRz9tEhF/wgi4O9W4k/Qy6Ib42 39 | SUJxbLxPRC8w/CnzRa6xHPxzDcfYQbwnNG+hq5PPgn8xyoIjNJatfigo9o3THRbv 40 | wwDzJEWEzhew9MVYz0Re09KZrCi4BqCNCMwkAM8nOdPu38+MO+0DylqgIY+0u/zt 41 | HbfKl+cbImCuRWzUZFzb6lo3uEJeUZa0UjrxykYCupzMNJ15ezXYxSsWgH58ah/0 42 | 6uqw0I8XwJM6mWY7BQvhixUXmeOgRBFBaNaLBe/bvvEideoEAXgLWw5ID40PZvh4 43 | 4nRtMSOdEJFt6EGLnNl+WCC/f9B/NXHKx2yTzXam52UEe0SDnDcDca3v4z54U2MY 44 | v3qozFf8GSM1EFAcEHRDfCLj+/gXgOHK2DFcMc3YwGJUfiSbz2LfGzFG34nezJDA 45 | TtRFdRlW4ctqUKA4CkGbbZlJ6v9WFAg72BD5OognB035vtpdvfLOiUfUeflskMTt 46 | Uyci5s5sxw8TWRIpaXwk1pnLcivXC0TSVnzi1HhD6pNsejkBXBeQPxB6qMbWUhQb 47 | RDzRqbe4z0/IjG6uAXhacXw087iPjPN7+xtZJWwCaQjWGuNn2Fs76CJRrWlt/DF9 48 | PgaBlpul8EuNyMqZWKaehln0zBvH5Y177BXlrEeTpuUo5/kPWj2jEu51jfe+xIW8 49 | 3RejLknCS39KjOV794ImLw8B1WlCwrfajVnXwgga8fCY6KGz7u3Prqq9irhRfycO 50 | pAl7Rja/fb/1yBuQHrUU1lgYIXqb262lebMhLlDHntc+J8Vjk++UuP2WjPNeztp2 51 | H4VF5NxMKJU8gScaN67FUtFYst5cz2aJuDHxbitFHKgoxNFYHVMjY26X/kmZVnUb 52 | aNCsaa8YbPPUDmHf2rJjIlK+PZLvLlENFbWDxaRC7/ab39Za9DTD9zH4aut6gw3Y 53 | q/eu/hnuc3qa580zbpgcAg== 54 | -----END ENCRYPTED PRIVATE KEY----- 55 | -------------------------------------------------------------------------------- /spec/fixtures/test_certs/test_self_signed.p12: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/logstash-plugins/logstash-output-elasticsearch/f3b08942f463954395fb1cb718e023465bf86cf4/spec/fixtures/test_certs/test_self_signed.p12 -------------------------------------------------------------------------------- /spec/integration/outputs/compressed_indexing_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative "../../../spec/es_spec_helper" 2 | require "logstash/outputs/elasticsearch" 3 | require "stringio" 4 | 5 | RSpec::Matchers.define :a_valid_gzip_encoded_string do 6 | match { |data| 7 | expect { Zlib::GzipReader.new(StringIO.new(data)).read }.not_to raise_error 8 | } 9 | end 10 | 11 | [ {"http_compression" => true}, {"compression_level" => 1} ].each do |compression_config| 12 | describe "indexing with http_compression turned on", :integration => true do 13 | let(:event) { LogStash::Event.new("message" => "Hello World!", "type" => type) } 14 | let(:event_with_invalid_utf_8_bytes) { LogStash::Event.new("message" => "Message from spacecraft which contains \xAC invalid \xD7 byte sequences.", "type" => type) } 15 | 16 | let(:index) { 10.times.collect { rand(10).to_s }.join("") } 17 | let(:type) { "_doc" } 18 | let(:event_count) { 10000 + rand(500) } 19 | # mix the events with valid and invalid UTF-8 payloads 20 | let(:events) { event_count.times.map { |i| i%3 == 0 ? event : event_with_invalid_utf_8_bytes }.to_a } 21 | let(:config) { 22 | { 23 | "hosts" => get_host_port, 24 | "index" => index 25 | } 26 | } 27 | subject { LogStash::Outputs::ElasticSearch.new(config.merge(compression_config)) } 28 | 29 | let(:es_url) { "http://#{get_host_port}" } 30 | let(:index_url) {"#{es_url}/#{index}"} 31 | let(:http_client_options) { {} } 32 | let(:http_client) do 33 | Manticore::Client.new(http_client_options) 34 | end 35 | let(:expected_headers) { 36 | { 37 | "Content-Encoding" => "gzip", 38 | "Content-Type" => "application/json", 39 | 'x-elastic-product-origin' => 'logstash-output-elasticsearch' 40 | } 41 | } 42 | 43 | before do 44 | subject.register 45 | subject.multi_receive([]) 46 | end 47 | 48 | shared_examples "an indexer" do 49 | it "ships events" do 50 | subject.multi_receive(events) 51 | 52 | http_client.post("#{es_url}/_refresh").call 53 | 54 | response = http_client.get("#{index_url}/_count?q=*") 55 | result = LogStash::Json.load(response.body) 56 | cur_count = result["count"] 57 | expect(cur_count).to eq(event_count) 58 | 59 | response = http_client.get("#{index_url}/_search?q=*&size=1000") 60 | result = LogStash::Json.load(response.body) 61 | result["hits"]["hits"].each do |doc| 62 | if ESHelper.es_version_satisfies?(">= 8") 63 | expect(doc).not_to include("_type") 64 | else 65 | expect(doc["_type"]).to eq(type) 66 | end 67 | expect(doc["_index"]).to eq(index) 68 | end 69 | end 70 | end 71 | 72 | it "sets the correct content-encoding header and body is compressed" do 73 | expect(subject.client.pool.adapter.client).to receive(:send). 74 | with(anything, anything, {:headers=> expected_headers, :body => a_valid_gzip_encoded_string}). 75 | and_call_original 76 | subject.multi_receive(events) 77 | end 78 | 79 | it_behaves_like("an indexer") 80 | end 81 | end 82 | -------------------------------------------------------------------------------- /spec/integration/outputs/create_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative "../../../spec/es_spec_helper" 2 | 3 | describe "client create actions", :integration => true do 4 | require "logstash/outputs/elasticsearch" 5 | 6 | def get_es_output(action, id, version=nil, version_type=nil) 7 | settings = { 8 | "manage_template" => true, 9 | "index" => "logstash-create", 10 | "template_overwrite" => true, 11 | "hosts" => get_host_port(), 12 | "action" => action 13 | } 14 | settings['document_id'] = id 15 | settings['version'] = version if version 16 | settings['version_type'] = version_type if version_type 17 | LogStash::Outputs::ElasticSearch.new(settings) 18 | end 19 | 20 | before :each do 21 | @es = get_client 22 | # Delete all templates first. 23 | # Clean ES of data before we start. 24 | @es.indices.delete_template(:name => "*") 25 | # This can fail if there are no indexes, ignore failure. 26 | @es.indices.delete(:index => "*") rescue nil 27 | end 28 | 29 | context "when action => create" do 30 | it "should create new documents with or without id" do 31 | subject = get_es_output("create", "id123") 32 | subject.register 33 | subject.multi_receive([LogStash::Event.new("message" => "sample message here")]) 34 | @es.indices.refresh 35 | # Wait or fail until everything's indexed. 36 | Stud::try(3.times) do 37 | r = @es.search(index: 'logstash-*') 38 | expect(r).to have_hits(1) 39 | end 40 | end 41 | 42 | it "should allow default (internal) version" do 43 | subject = get_es_output("create", "id123", 43) 44 | subject.register 45 | end 46 | 47 | it "should allow internal version" do 48 | subject = get_es_output("create", "id123", 43, "internal") 49 | subject.register 50 | end 51 | 52 | it "should not allow external version" do 53 | subject = get_es_output("create", "id123", 43, "external") 54 | expect { subject.register }.to raise_error(LogStash::ConfigurationError) 55 | end 56 | 57 | it "should not allow external_gt version" do 58 | subject = get_es_output("create", "id123", 43, "external_gt") 59 | expect { subject.register }.to raise_error(LogStash::ConfigurationError) 60 | end 61 | 62 | it "should not allow external_gte version" do 63 | subject = get_es_output("create", "id123", 43, "external_gte") 64 | expect { subject.register }.to raise_error(LogStash::ConfigurationError) 65 | end 66 | end 67 | end 68 | -------------------------------------------------------------------------------- /spec/integration/outputs/data_stream_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative "../../../spec/es_spec_helper" 2 | require "logstash/outputs/elasticsearch" 3 | 4 | describe "data streams", :integration => true do 5 | 6 | let(:ds_name) { "logs-#{ds_dataset}-default" } 7 | let(:ds_dataset) { 'integration_test' } 8 | 9 | let(:options) do 10 | { "data_stream" => 'true', "data_stream_dataset" => ds_dataset, "hosts" => get_host_port() } 11 | end 12 | 13 | subject { LogStash::Outputs::ElasticSearch.new(options) } 14 | 15 | # All data-streams features require that the plugin be run in a non-disabled ECS compatibility mode. 16 | # We run the plugin in ECS by default, and add test scenarios specifically for it being disabled. 17 | let(:ecs_compatibility) { :v1 } 18 | before(:each) do 19 | allow( subject ).to receive(:ecs_compatibility).and_return(ecs_compatibility) 20 | end 21 | 22 | before :each do 23 | @es = get_client 24 | @es.delete_by_query(index: ".ds-#{ds_name}-*", expand_wildcards: :all, body: { query: { match_all: {} } }) rescue nil 25 | 26 | es_version = @es.info['version']['number'] 27 | if Gem::Version.create(es_version) < Gem::Version.create('7.9.0') 28 | skip "ES version #{es_version} does not support data-streams" 29 | end 30 | end 31 | 32 | it "creates a new document" do 33 | subject.register 34 | subject.multi_receive([LogStash::Event.new("message" => "MSG 111")]) 35 | 36 | @es.indices.refresh 37 | 38 | Stud::try(3.times) do 39 | r = @es.search(index: ds_name) 40 | 41 | expect( r['hits']['total']['value'] ).to eq 1 42 | doc = r['hits']['hits'].first 43 | expect( doc['_source'] ).to include "message"=>"MSG 111" 44 | expect( doc['_source'] ).to include "data_stream"=>{"dataset"=>ds_dataset, "type"=>"logs", "namespace"=>"default"} 45 | end 46 | end 47 | 48 | context "with document_id" do 49 | 50 | let(:document_id) { '1234567890' } 51 | let(:options) { super().merge("document_id" => document_id) } 52 | 53 | it "creates a new document" do 54 | subject.register 55 | subject.multi_receive([LogStash::Event.new("message" => "foo")]) 56 | 57 | @es.indices.refresh 58 | 59 | Stud::try(3.times) do 60 | r = @es.search(index: ds_name, body: { query: { match: { _id: document_id } } }) 61 | expect( r['hits']['total']['value'] ).to eq 1 62 | doc = r['hits']['hits'].first 63 | expect( doc['_source'] ).to include "message"=>"foo" 64 | end 65 | end 66 | 67 | end 68 | end 69 | -------------------------------------------------------------------------------- /spec/integration/outputs/delete_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative "../../../spec/es_spec_helper" 2 | require "logstash/outputs/elasticsearch" 3 | 4 | 5 | describe "Versioned delete", :integration => true do 6 | require "logstash/outputs/elasticsearch" 7 | 8 | let(:es) { get_client } 9 | 10 | before :each do 11 | # Delete all templates first. 12 | # Clean ES of data before we start. 13 | es.indices.delete_template(:name => "*") 14 | # This can fail if there are no indexes, ignore failure. 15 | es.indices.delete(:index => "*") rescue nil 16 | es.indices.refresh 17 | end 18 | 19 | context "when delete only" do 20 | subject { LogStash::Outputs::ElasticSearch.new(settings) } 21 | 22 | before do 23 | subject.register 24 | end 25 | 26 | let(:settings) do 27 | { 28 | "manage_template" => true, 29 | "index" => "logstash-delete", 30 | "template_overwrite" => true, 31 | "hosts" => get_host_port(), 32 | "document_id" => "%{my_id}", 33 | "version" => "%{my_version}", 34 | "version_type" => "external", 35 | "action" => "%{my_action}" 36 | } 37 | end 38 | 39 | it "should ignore non-monotonic external version updates" do 40 | id = "ev2" 41 | subject.multi_receive([LogStash::Event.new("my_id" => id, "my_action" => "index", "message" => "foo", "my_version" => 99)]) 42 | r = es.get(:index => 'logstash-delete', :id => id, :refresh => true) 43 | expect(r['_version']).to eq(99) 44 | expect(r['_source']['message']).to eq('foo') 45 | 46 | subject.multi_receive([LogStash::Event.new("my_id" => id, "my_action" => "delete", "message" => "foo", "my_version" => 98)]) 47 | r2 = es.get(:index => 'logstash-delete', :id => id, :refresh => true) 48 | expect(r2['_version']).to eq(99) 49 | expect(r2['_source']['message']).to eq('foo') 50 | end 51 | 52 | it "should commit monotonic external version updates" do 53 | id = "ev3" 54 | subject.multi_receive([LogStash::Event.new("my_id" => id, "my_action" => "index", "message" => "foo", "my_version" => 99)]) 55 | r = es.get(:index => 'logstash-delete', :id => id, :refresh => true) 56 | expect(r['_version']).to eq(99) 57 | expect(r['_source']['message']).to eq('foo') 58 | 59 | subject.multi_receive([LogStash::Event.new("my_id" => id, "my_action" => "delete", "message" => "foo", "my_version" => 100)]) 60 | expect { es.get(:index => 'logstash-delete', :id => id, :refresh => true) }.to raise_error(get_expected_error_class) 61 | end 62 | end 63 | end 64 | -------------------------------------------------------------------------------- /spec/integration/outputs/index_version_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative "../../../spec/es_spec_helper" 2 | require "logstash/outputs/elasticsearch" 3 | 4 | describe "Versioned indexing", :integration => true do 5 | require "logstash/outputs/elasticsearch" 6 | 7 | let(:es) { get_client } 8 | 9 | before :each do 10 | # Delete all templates first. 11 | # Clean ES of data before we start. 12 | es.indices.delete_template(:name => "*") 13 | # This can fail if there are no indexes, ignore failure. 14 | es.indices.delete(:index => "*") rescue nil 15 | es.indices.refresh 16 | end 17 | 18 | context "when index only" do 19 | subject { LogStash::Outputs::ElasticSearch.new(settings) } 20 | 21 | before do 22 | subject.register 23 | end 24 | 25 | describe "unversioned output" do 26 | let(:settings) do 27 | { 28 | "manage_template" => true, 29 | "index" => "logstash-index", 30 | "template_overwrite" => true, 31 | "hosts" => get_host_port(), 32 | "action" => "index", 33 | "document_id" => "%{my_id}" 34 | } 35 | end 36 | 37 | it "should default to ES version" do 38 | subject.multi_receive([LogStash::Event.new("my_id" => "123", "message" => "foo")]) 39 | r = es.get(:index => 'logstash-index', :id => '123', :refresh => true) 40 | expect(r["_version"]).to eq(1) 41 | expect(r["_source"]["message"]).to eq('foo') 42 | subject.multi_receive([LogStash::Event.new("my_id" => "123", "message" => "foobar")]) 43 | r2 = es.get(:index => 'logstash-index', :id => '123', :refresh => true) 44 | expect(r2["_version"]).to eq(2) 45 | expect(r2["_source"]["message"]).to eq('foobar') 46 | end 47 | end 48 | 49 | describe "versioned output" do 50 | let(:settings) do 51 | { 52 | "manage_template" => true, 53 | "index" => "logstash-index", 54 | "template_overwrite" => true, 55 | "hosts" => get_host_port(), 56 | "action" => "index", 57 | "document_id" => "%{my_id}", 58 | "version" => "%{my_version}", 59 | "version_type" => "external", 60 | } 61 | end 62 | 63 | it "should respect the external version" do 64 | id = "ev1" 65 | subject.multi_receive([LogStash::Event.new("my_id" => id, "my_version" => "99", "message" => "foo")]) 66 | r = es.get(:index => 'logstash-index', :id => id, :refresh => true) 67 | expect(r["_version"]).to eq(99) 68 | expect(r["_source"]["message"]).to eq('foo') 69 | end 70 | 71 | it "should ignore non-monotonic external version updates" do 72 | id = "ev2" 73 | subject.multi_receive([LogStash::Event.new("my_id" => id, "my_version" => "99", "message" => "foo")]) 74 | r = es.get(:index => 'logstash-index', :id => id, :refresh => true) 75 | expect(r["_version"]).to eq(99) 76 | expect(r["_source"]["message"]).to eq('foo') 77 | 78 | subject.multi_receive([LogStash::Event.new("my_id" => id, "my_version" => "98", "message" => "foo")]) 79 | r2 = es.get(:index => 'logstash-index', :id => id, :refresh => true) 80 | expect(r2["_version"]).to eq(99) 81 | expect(r2["_source"]["message"]).to eq('foo') 82 | end 83 | 84 | it "should commit monotonic external version updates" do 85 | id = "ev3" 86 | subject.multi_receive([LogStash::Event.new("my_id" => id, "my_version" => "99", "message" => "foo")]) 87 | r = es.get(:index => 'logstash-index', :id => id, :refresh => true) 88 | expect(r["_version"]).to eq(99) 89 | expect(r["_source"]["message"]).to eq('foo') 90 | 91 | subject.multi_receive([LogStash::Event.new("my_id" => id, "my_version" => "100", "message" => "foo")]) 92 | r2 = es.get(:index => 'logstash-index', :id => id, :refresh => true) 93 | expect(r2["_version"]).to eq(100) 94 | expect(r2["_source"]["message"]).to eq('foo') 95 | end 96 | end 97 | end 98 | end 99 | -------------------------------------------------------------------------------- /spec/integration/outputs/ingest_pipeline_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative "../../../spec/es_spec_helper" 2 | 3 | describe "Ingest pipeline execution behavior", :integration => true do 4 | subject! do 5 | require "logstash/outputs/elasticsearch" 6 | settings = { 7 | "hosts" => "#{get_host_port()}", 8 | "pipeline" => "apache-logs", 9 | "data_stream" => 'false', 10 | "ecs_compatibility" => "disabled", # specs are tightly tied to non-ECS defaults 11 | } 12 | next LogStash::Outputs::ElasticSearch.new(settings) 13 | end 14 | 15 | let(:http_client) { Manticore::Client.new } 16 | let(:ingest_url) { "http://#{get_host_port()}/_ingest/pipeline/apache-logs" } 17 | let(:apache_logs_pipeline) { ' 18 | { 19 | "description" : "Pipeline to parse Apache logs", 20 | "processors" : [ 21 | { 22 | "grok": { 23 | "field": "message", 24 | "patterns": ["%{COMBINEDAPACHELOG}"] 25 | } 26 | } 27 | ] 28 | }' 29 | } 30 | 31 | before :each do 32 | # Delete all templates first. 33 | require "elasticsearch" 34 | 35 | # Clean ES of data before we start. 36 | @es = get_client 37 | @es.indices.delete_template(:name => "*") 38 | 39 | # This can fail if there are no indexes, ignore failure. 40 | @es.indices.delete(:index => "*") rescue nil 41 | 42 | # delete existing ingest pipeline 43 | http_client.delete(ingest_url).call 44 | 45 | # register pipeline 46 | http_client.put(ingest_url, :body => apache_logs_pipeline, :headers => {"Content-Type" => "application/json" }).call 47 | 48 | #TODO: Use esclient 49 | #@es.ingest.put_pipeline :id => 'apache_pipeline', :body => pipeline_defintion 50 | 51 | subject.register 52 | subject.multi_receive([LogStash::Event.new("message" => '183.60.215.50 - - [01/Jun/2015:18:00:00 +0000] "GET /scripts/netcat-webserver HTTP/1.1" 200 182 "-" "Mozilla/5.0 (compatible; EasouSpider; +http://www.easou.com/search/spider.html)"')]) 53 | @es.indices.refresh 54 | 55 | #Wait or fail until everything's indexed. 56 | Stud::try(10.times) do 57 | r = @es.search(index: 'logstash-*') 58 | expect(r).to have_hits(1) 59 | sleep(0.1) 60 | end 61 | end 62 | 63 | it "indexes using the proper pipeline" do 64 | results = @es.search(:index => 'logstash-*', :q => "message:\"netcat\"") 65 | expect(results).to have_hits(1) 66 | expect(results["hits"]["hits"][0]["_source"]["response"]).to eq("200") 67 | expect(results["hits"]["hits"][0]["_source"]["bytes"]).to eq("182") 68 | expect(results["hits"]["hits"][0]["_source"]["verb"]).to eq("GET") 69 | expect(results["hits"]["hits"][0]["_source"]["request"]).to eq("/scripts/netcat-webserver") 70 | expect(results["hits"]["hits"][0]["_source"]["auth"]).to eq("-") 71 | expect(results["hits"]["hits"][0]["_source"]["ident"]).to eq("-") 72 | expect(results["hits"]["hits"][0]["_source"]["clientip"]).to eq("183.60.215.50") 73 | expect(results["hits"]["hits"][0]["_source"]["junkfieldaaaa"]).to eq(nil) 74 | end 75 | end 76 | -------------------------------------------------------------------------------- /spec/integration/outputs/metrics_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative "../../../spec/es_spec_helper" 2 | 3 | describe "metrics", :integration => true do 4 | subject! do 5 | require "logstash/outputs/elasticsearch" 6 | settings = { 7 | "manage_template" => false, 8 | "hosts" => "#{get_host_port()}" 9 | } 10 | plugin = LogStash::Outputs::ElasticSearch.new(settings) 11 | end 12 | 13 | let(:metric) { subject.metric } 14 | let(:bulk_request_metrics) { subject.instance_variable_get(:@bulk_request_metrics) } 15 | let(:document_level_metrics) { subject.instance_variable_get(:@document_level_metrics) } 16 | 17 | before :each do 18 | require "elasticsearch" 19 | 20 | # Clean ES of data before we start. 21 | @es = get_client 22 | clean(@es) 23 | subject.register 24 | end 25 | 26 | context "after a succesful bulk insert" do 27 | let(:bulk) { [ 28 | LogStash::Event.new("message" => "sample message here"), 29 | LogStash::Event.new("somemessage" => { "message" => "sample nested message here" }), 30 | LogStash::Event.new("somevalue" => 100), 31 | LogStash::Event.new("somevalue" => 10), 32 | LogStash::Event.new("somevalue" => 1), 33 | LogStash::Event.new("country" => "us"), 34 | LogStash::Event.new("country" => "at"), 35 | LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0 ] }) 36 | ]} 37 | 38 | it "increases successful bulk request metric" do 39 | expect(bulk_request_metrics).to receive(:increment).with(:successes).once 40 | subject.multi_receive(bulk) 41 | end 42 | 43 | it "increases number of successful inserted documents" do 44 | expect(document_level_metrics).to receive(:increment).with(:successes, bulk.size).once 45 | subject.multi_receive(bulk) 46 | end 47 | end 48 | 49 | context "after a bulk insert that generates errors" do 50 | let(:bulk) { [ 51 | LogStash::Event.new("message" => "sample message here"), 52 | LogStash::Event.new("message" => { "message" => "sample nested message here" }), 53 | ]} 54 | it "increases bulk request with error metric" do 55 | expect(bulk_request_metrics).to receive(:increment).with(:with_errors).once 56 | expect(bulk_request_metrics).to_not receive(:increment).with(:successes) 57 | subject.multi_receive(bulk) 58 | end 59 | 60 | it "increases number of successful and non retryable documents" do 61 | expect(document_level_metrics).to receive(:increment).with(:dlq_routed).once 62 | expect(document_level_metrics).to receive(:increment).with(:successes).once 63 | subject.multi_receive(bulk) 64 | end 65 | end 66 | end 67 | -------------------------------------------------------------------------------- /spec/integration/outputs/no_es_on_startup_spec.rb: -------------------------------------------------------------------------------- 1 | require "logstash/outputs/elasticsearch" 2 | require_relative "../../../spec/es_spec_helper" 3 | 4 | describe "elasticsearch is down on startup", :integration => true do 5 | let(:event1) { LogStash::Event.new("somevalue" => 100, "@timestamp" => "2014-11-17T20:37:17.223Z", "@metadata" => {"retry_count" => 0}) } 6 | let(:event2) { LogStash::Event.new("message" => "a") } 7 | 8 | subject { 9 | LogStash::Outputs::ElasticSearch.new({ 10 | "manage_template" => true, 11 | "index" => "logstash-2014.11.17", 12 | "template_overwrite" => true, 13 | "hosts" => get_host_port(), 14 | "retry_max_interval" => 64, 15 | "retry_initial_interval" => 2, 16 | 'ecs_compatibility' => 'disabled' 17 | }) 18 | } 19 | 20 | before :each do 21 | # Delete all templates first. 22 | allow(Stud).to receive(:stoppable_sleep) 23 | 24 | # Clean ES of data before we start. 25 | @es = get_client 26 | @es.indices.delete_template(:name => "*") 27 | @es.indices.delete(:index => "*") 28 | @es.indices.refresh 29 | end 30 | 31 | after :each do 32 | subject.close 33 | end 34 | 35 | it 'should ingest events when Elasticsearch recovers before documents are sent' do 36 | allow_any_instance_of(LogStash::Outputs::ElasticSearch::HttpClient::Pool).to receive(:get_root_path).with(any_args).and_raise( 37 | ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::HostUnreachableError.new StandardError.new("TEST: before docs are sent"), 'http://test.es/' 38 | ) 39 | subject.register 40 | allow_any_instance_of(LogStash::Outputs::ElasticSearch::HttpClient::Pool).to receive(:get_root_path).with(any_args).and_call_original 41 | subject.multi_receive([event1, event2]) 42 | @es.indices.refresh 43 | r = @es.search(index: 'logstash-*') 44 | expect(r).to have_hits(2) 45 | end 46 | 47 | it 'should ingest events when Elasticsearch recovers after documents are sent' do 48 | allow_any_instance_of(LogStash::Outputs::ElasticSearch::HttpClient::Pool).to receive(:get_root_path).with(any_args).and_raise( 49 | ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::HostUnreachableError.new StandardError.new("TEST: after docs are sent"), 'http://test.es/' 50 | ) 51 | subject.register 52 | Thread.new do 53 | sleep 4 54 | allow_any_instance_of(LogStash::Outputs::ElasticSearch::HttpClient::Pool).to receive(:get_root_path).with(any_args).and_call_original 55 | end 56 | subject.multi_receive([event1, event2]) 57 | @es.indices.refresh 58 | r = @es.search(index: 'logstash-*') 59 | expect(r).to have_hits(2) 60 | end 61 | 62 | it 'should get cluster_uuid when Elasticsearch recovers from license check failure' do 63 | allow_any_instance_of(LogStash::Outputs::ElasticSearch::HttpClient::Pool).to receive(:get_license).and_raise( 64 | ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::HostUnreachableError.new StandardError.new("TEST: docs are sent"), 'http://test.es/_license' 65 | ) 66 | subject.register 67 | Thread.new do 68 | sleep 4 69 | allow_any_instance_of(LogStash::Outputs::ElasticSearch::HttpClient::Pool).to receive(:get_license).and_call_original 70 | end 71 | subject.multi_receive([event1, event2]) 72 | @es.indices.refresh 73 | r = @es.search(index: 'logstash-*') 74 | expect(r).to have_hits(2) 75 | expect(subject.plugin_metadata.get(:cluster_uuid)).not_to be_empty 76 | expect(subject.plugin_metadata.get(:cluster_uuid)).not_to eq("_na_") 77 | end 78 | end 79 | -------------------------------------------------------------------------------- /spec/integration/outputs/painless_update_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative "../../../spec/es_spec_helper" 2 | 3 | describe "Update actions using painless scripts", :integration => true, :update_tests => 'painless' do 4 | require "logstash/outputs/elasticsearch" 5 | 6 | def get_es_output( options={} ) 7 | settings = { 8 | "manage_template" => true, 9 | "index" => "logstash-update", 10 | "template_overwrite" => true, 11 | "hosts" => get_host_port(), 12 | "action" => "update" 13 | } 14 | LogStash::Outputs::ElasticSearch.new(settings.merge!(options)) 15 | end 16 | 17 | before :each do 18 | @es = get_client 19 | # Delete all templates first. 20 | # Clean ES of data before we start. 21 | @es.indices.delete_template(:name => "*") 22 | # This can fail if there are no indexes, ignore failure. 23 | @es.indices.delete(:index => "*") rescue nil 24 | @es.index( 25 | { 26 | :index => 'logstash-update', 27 | :id => '123', 28 | :body => { :message => 'Test', :counter => 1 }, 29 | :refresh => true 30 | }) 31 | @es.indices.refresh 32 | end 33 | 34 | context "scripted updates" do 35 | context 'with an indexed script' do 36 | it "should increment a counter with event/doc 'count' variable with indexed script" do 37 | @es.perform_request(:put, "_scripts/indexed_update", {}, {"script" => {"source" => "ctx._source.counter += params.event.count", "lang" => "painless"}}) 38 | 39 | plugin_parameters = { 40 | 'document_id' => "123", 41 | 'script' => 'indexed_update', 42 | 'script_type' => 'indexed' 43 | } 44 | 45 | plugin_parameters.merge!('script_lang' => '') 46 | 47 | subject = get_es_output(plugin_parameters) 48 | subject.register 49 | subject.multi_receive([LogStash::Event.new("count" => 4 )]) 50 | r = @es.get(:index => 'logstash-update', :id => "123", :refresh => true) 51 | expect(r["_source"]["counter"]).to eq(5) 52 | end 53 | end 54 | end 55 | 56 | context "when update with upsert" do 57 | it "should create new documents with provided upsert" do 58 | subject = get_es_output({ 'document_id' => "456", 'upsert' => '{"message": "upsert message"}' }) 59 | subject.register 60 | subject.multi_receive([LogStash::Event.new("message" => "sample message here")]) 61 | r = @es.get(:index => 'logstash-update', :id => "456", :refresh => true) 62 | expect(r["_source"]["message"]).to eq('upsert message') 63 | end 64 | 65 | it "should create new documents with event/doc as upsert" do 66 | subject = get_es_output({ 'document_id' => "456", 'doc_as_upsert' => true }) 67 | subject.register 68 | subject.multi_receive([LogStash::Event.new("message" => "sample message here")]) 69 | r = @es.get(:index => 'logstash-update', :id => "456", :refresh => true) 70 | expect(r["_source"]["message"]).to eq('sample message here') 71 | end 72 | 73 | it "should fail on documents with event/doc as upsert at external version" do 74 | subject = get_es_output({ 'document_id' => "456", 'doc_as_upsert' => true, 'version' => 999, "version_type" => "external" }) 75 | expect { subject.register }.to raise_error(LogStash::ConfigurationError) 76 | end 77 | end 78 | 79 | context "updates with scripted upsert" do 80 | context 'with an inline script' do 81 | it "should create new documents with upsert content" do 82 | subject = get_es_output({ 'document_id' => "456", 'script' => 'ctx._source.counter = params.event.counter', 'upsert' => '{"message": "upsert message"}', 'script_type' => 'inline' }) 83 | subject.register 84 | 85 | subject.multi_receive([LogStash::Event.new("message" => "sample message here")]) 86 | r = @es.get(:index => 'logstash-update', :id => "456", :refresh => true) 87 | expect(r["_source"]["message"]).to eq('upsert message') 88 | end 89 | 90 | it "should create new documents with event/doc as script params" do 91 | subject = get_es_output({ 'document_id' => "456", 'script' => 'ctx._source.counter = params.event.counter', 'scripted_upsert' => true, 'script_type' => 'inline' }) 92 | subject.register 93 | subject.multi_receive([LogStash::Event.new("counter" => 1)]) 94 | @es.indices.refresh 95 | r = @es.get(:index => 'logstash-update', :id => "456", :refresh => true) 96 | expect(r["_source"]["counter"]).to eq(1) 97 | end 98 | end 99 | end 100 | end 101 | -------------------------------------------------------------------------------- /spec/integration/outputs/parent_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative "../../../spec/es_spec_helper" 2 | require "logstash/outputs/elasticsearch" 3 | 4 | describe "join type field", :integration => true do 5 | 6 | shared_examples "a join field based parent indexer" do 7 | let(:index) { 10.times.collect { rand(10).to_s }.join("") } 8 | 9 | let(:type) { "_doc" } 10 | 11 | let(:event_count) { 10000 + rand(500) } 12 | let(:parent) { "not_implemented" } 13 | let(:config) { "not_implemented" } 14 | let(:parent_id) { "test" } 15 | let(:join_field) { "join_field" } 16 | let(:parent_relation) { "parent_type" } 17 | let(:child_relation) { "child_type" } 18 | let(:default_headers) { 19 | {"Content-Type" => "application/json"} 20 | } 21 | subject { LogStash::Outputs::ElasticSearch.new(config) } 22 | 23 | before do 24 | # Add mapping and a parent document 25 | index_url = "http://#{get_host_port()}/#{index}" 26 | 27 | properties = { 28 | "properties" => { 29 | join_field => { 30 | "type" => "join", 31 | "relations" => { parent_relation => child_relation } 32 | } 33 | } 34 | } 35 | 36 | mapping = { "mappings" => properties} 37 | 38 | Manticore.put("#{index_url}", {:body => mapping.to_json, :headers => default_headers}).call 39 | pdoc = { "message" => "ohayo", join_field => parent_relation } 40 | Manticore.put("#{index_url}/#{type}/#{parent_id}", {:body => pdoc.to_json, :headers => default_headers}).call 41 | 42 | subject.register 43 | subject.multi_receive(event_count.times.map { LogStash::Event.new("link_to" => parent_id, "message" => "Hello World!", join_field => child_relation) }) 44 | end 45 | 46 | 47 | it "ships events" do 48 | index_url = "http://#{get_host_port()}/#{index}" 49 | 50 | Manticore.post("#{index_url}/_refresh").call 51 | 52 | # Wait until all events are available. 53 | Stud::try(10.times) do 54 | query = { "query" => { "has_parent" => { "parent_type" => parent_relation, "query" => { "match_all" => { } } } } } 55 | response = Manticore.post("#{index_url}/_count", {:body => query.to_json, :headers => default_headers}) 56 | data = response.body 57 | result = LogStash::Json.load(data) 58 | cur_count = result["count"] 59 | expect(cur_count).to eq(event_count) 60 | end 61 | end 62 | end 63 | 64 | describe "(http protocol) index events with static parent" do 65 | it_behaves_like 'a join field based parent indexer' do 66 | let(:config) { 67 | { 68 | "hosts" => get_host_port, 69 | "index" => index, 70 | "parent" => parent_id, 71 | "document_type" => type, 72 | "join_field" => join_field, 73 | "manage_template" => false 74 | } 75 | } 76 | end 77 | end 78 | 79 | describe "(http_protocol) index events with fieldref in parent value" do 80 | it_behaves_like 'a join field based parent indexer' do 81 | let(:config) { 82 | { 83 | "hosts" => get_host_port, 84 | "index" => index, 85 | "parent" => "%{link_to}", 86 | "document_type" => type, 87 | "join_field" => join_field, 88 | "manage_template" => false 89 | } 90 | } 91 | end 92 | end 93 | end 94 | -------------------------------------------------------------------------------- /spec/integration/outputs/retry_spec.rb: -------------------------------------------------------------------------------- 1 | require "logstash/outputs/elasticsearch" 2 | require_relative "../../../spec/es_spec_helper" 3 | 4 | describe "failures in bulk class expected behavior", :integration => true do 5 | let(:template) { '{"template" : "not important, will be updated by :index"}' } 6 | let(:event1) { LogStash::Event.new("somevalue" => 100, "@timestamp" => "2014-11-17T20:37:17.223Z", "@metadata" => {"retry_count" => 0}) } 7 | let(:action1) do 8 | ESHelper.action_for_version(["index", {:_id=>nil, routing_field_name =>nil, :_index=>"logstash-2014.11.17" }, event1.to_hash]) 9 | end 10 | let(:event2) { LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0] }, "@timestamp" => "2014-11-17T20:37:17.223Z", "@metadata" => {"retry_count" => 0}) } 11 | let(:action2) do 12 | ESHelper.action_for_version(["index", {:_id=>nil, routing_field_name =>nil, :_index=>"logstash-2014.11.17" }, event2.to_hash]) 13 | end 14 | let(:invalid_event) { LogStash::Event.new("geoip" => { "location" => "notlatlon" }, "@timestamp" => "2014-11-17T20:37:17.223Z") } 15 | 16 | def mock_actions_with_response(*resp) 17 | raise ArgumentError, "Cannot mock actions until subject is registered and has a client!" unless subject.client 18 | 19 | expanded_responses = resp.map do |resp| 20 | items = resp["statuses"] && resp["statuses"].map do |status| 21 | {"create" => {"status" => status, "error" => "Error for #{status}"}} 22 | end 23 | 24 | { 25 | "errors" => resp["errors"], 26 | "items" => items 27 | } 28 | end 29 | 30 | allow(subject.client).to receive(:bulk).and_return(*expanded_responses) 31 | end 32 | 33 | subject! do 34 | settings = { 35 | "manage_template" => true, 36 | "index" => "logstash-2014.11.17", 37 | "template_overwrite" => true, 38 | "hosts" => get_host_port(), 39 | "retry_max_interval" => 64, 40 | "retry_initial_interval" => 2, 41 | "ecs_compatibility" => "disabled", # specs are tightly tied to non-ECS defaults 42 | } 43 | next LogStash::Outputs::ElasticSearch.new(settings) 44 | end 45 | 46 | before :each do 47 | # Delete all templates first. 48 | require "elasticsearch" 49 | allow(Stud).to receive(:stoppable_sleep) 50 | 51 | # Clean ES of data before we start. 52 | @es = get_client 53 | @es.indices.delete_template(:name => "*") 54 | @es.indices.delete(:index => "*") 55 | @es.indices.refresh 56 | end 57 | 58 | after :each do 59 | subject.close 60 | end 61 | 62 | it "should retry exactly once if all bulk actions are successful" do 63 | expect(subject).to receive(:submit).with([action1, action2]).once.and_call_original 64 | subject.register 65 | mock_actions_with_response({"errors" => false}) 66 | subject.multi_receive([event1, event2]) 67 | end 68 | 69 | it "retry exceptions within the submit body" do 70 | call_count = 0 71 | subject.register 72 | 73 | expect(subject.client).to receive(:bulk).with(anything).exactly(3).times do 74 | if (call_count += 1) <= 2 75 | raise "error first two times" 76 | else 77 | {"errors" => false} 78 | end 79 | end 80 | 81 | subject.multi_receive([event1]) 82 | end 83 | 84 | it "should retry actions with response status of 503" do expect(subject).to receive(:submit).with([action1, action1, action1, action2]).ordered.once.and_call_original 85 | expect(subject).to receive(:submit).with([action1, action2]).ordered.once.and_call_original 86 | expect(subject).to receive(:submit).with([action2]).ordered.once.and_call_original 87 | 88 | subject.register 89 | mock_actions_with_response({"errors" => true, "statuses" => [200, 200, 503, 503]}, 90 | {"errors" => true, "statuses" => [200, 503]}, 91 | {"errors" => false}) 92 | 93 | subject.multi_receive([event1, event1, event1, event2]) 94 | end 95 | 96 | retryable_codes = [429, 502, 503] 97 | 98 | retryable_codes.each do |code| 99 | it "should retry actions with response status of #{code}" do 100 | subject.register 101 | 102 | mock_actions_with_response({"errors" => true, "statuses" => [code]}, 103 | {"errors" => false}) 104 | expect(subject).to receive(:submit).with([action1]).twice.and_call_original 105 | 106 | subject.multi_receive([event1]) 107 | end 108 | end 109 | 110 | it "should retry an event infinitely until a non retryable status occurs" do 111 | expect(subject).to receive(:submit).with([action1]).exactly(6).times.and_call_original 112 | subject.register 113 | 114 | mock_actions_with_response({"errors" => true, "statuses" => [429]}, 115 | {"errors" => true, "statuses" => [429]}, 116 | {"errors" => true, "statuses" => [429]}, 117 | {"errors" => true, "statuses" => [429]}, 118 | {"errors" => true, "statuses" => [429]}, 119 | {"errors" => true, "statuses" => [400]}) 120 | 121 | subject.multi_receive([event1]) 122 | end 123 | 124 | it "should sleep for an exponentially increasing amount of time on each retry, capped by the max" do 125 | [2, 4, 8, 16, 32, 64, 64].each_with_index do |interval,i| 126 | expect(Stud).to receive(:stoppable_sleep).with(interval).ordered 127 | end 128 | 129 | subject.register 130 | 131 | mock_actions_with_response({"errors" => true, "statuses" => [429]}, 132 | {"errors" => true, "statuses" => [429]}, 133 | {"errors" => true, "statuses" => [429]}, 134 | {"errors" => true, "statuses" => [429]}, 135 | {"errors" => true, "statuses" => [429]}, 136 | {"errors" => true, "statuses" => [429]}, 137 | {"errors" => true, "statuses" => [429]}, 138 | {"errors" => true, "statuses" => [400]}) 139 | 140 | subject.multi_receive([event1]) 141 | end 142 | 143 | it "non-retryable errors like mapping errors (400) should be dropped and not be retried (unfortunately)" do 144 | subject.register 145 | expect(subject).to receive(:submit).once.and_call_original 146 | subject.multi_receive([invalid_event]) 147 | subject.close 148 | 149 | @es.indices.refresh 150 | r = @es.search(index: 'logstash-*') 151 | expect(r).to have_hits(0) 152 | end 153 | 154 | it "successful requests should not be appended to retry queue" do 155 | expect(subject).to receive(:submit).once.and_call_original 156 | 157 | subject.register 158 | subject.multi_receive([event1]) 159 | subject.close 160 | @es.indices.refresh 161 | r = @es.search(index: 'logstash-*') 162 | expect(r).to have_hits(1) 163 | end 164 | 165 | it "should only index proper events" do 166 | subject.register 167 | subject.multi_receive([invalid_event, event1]) 168 | subject.close 169 | 170 | @es.indices.refresh 171 | r = @es.search(index: 'logstash-*') 172 | expect(r).to have_hits(1) 173 | end 174 | end 175 | -------------------------------------------------------------------------------- /spec/integration/outputs/routing_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative "../../../spec/es_spec_helper" 2 | 3 | shared_examples "a routing indexer" do 4 | let(:index) { 10.times.collect { rand(10).to_s }.join("") } 5 | let(:type) { 10.times.collect { rand(10).to_s }.join("") } 6 | let(:event_count) { 10000 + rand(500) } 7 | let(:routing) { "not_implemented" } 8 | let(:config) { "not_implemented" } 9 | subject { LogStash::Outputs::ElasticSearch.new(config) } 10 | 11 | before do 12 | subject.register 13 | event_count.times do 14 | subject.multi_receive([LogStash::Event.new("message" => "test", "type" => type)]) 15 | end 16 | end 17 | 18 | 19 | it "ships events" do 20 | index_url = "http://#{get_host_port()}/#{index}" 21 | 22 | client = Manticore::Client.new 23 | client.post("#{index_url}/_refresh").call 24 | 25 | # Wait until all events are available. 26 | Stud::try(10.times) do 27 | data = "" 28 | 29 | response = client.get("#{index_url}/_count?q=*&routing=#{routing}").call 30 | result = LogStash::Json.load(response.body) 31 | cur_count = result["count"] 32 | expect(cur_count).to eq(event_count) 33 | end 34 | end 35 | end 36 | 37 | describe "(http protocol) index events with static routing", :integration => true do 38 | it_behaves_like 'a routing indexer' do 39 | let(:routing) { "test" } 40 | let(:config) { 41 | { 42 | "hosts" => get_host_port, 43 | "index" => index, 44 | "routing" => routing 45 | } 46 | } 47 | end 48 | end 49 | 50 | describe "(http_protocol) index events with fieldref in routing value", :integration => true do 51 | it_behaves_like 'a routing indexer' do 52 | let(:routing) { "test" } 53 | let(:config) { 54 | { 55 | "hosts" => get_host_port, 56 | "index" => index, 57 | "routing" => "%{message}" 58 | } 59 | } 60 | end 61 | end 62 | -------------------------------------------------------------------------------- /spec/integration/outputs/sniffer_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative "../../../spec/es_spec_helper" 2 | require "logstash/outputs/elasticsearch/http_client" 3 | require "json" 4 | require "socket" 5 | 6 | describe "pool sniffer", :integration => true do 7 | let(:logger) { Cabin::Channel.get } 8 | let(:adapter) { LogStash::Outputs::ElasticSearch::HttpClient::ManticoreAdapter.new(logger, {}) } 9 | let(:es_host) { get_host_port.split(":").first } 10 | let(:es_port) { get_host_port.split(":").last } 11 | let(:es_ip) { IPSocket.getaddress(es_host) } 12 | let(:initial_urls) { [::LogStash::Util::SafeURI.new("http://#{get_host_port}")] } 13 | let(:options) do 14 | { 15 | :resurrect_delay => 2, # Shorten the delay a bit to speed up tests 16 | :url_normalizer => proc {|u| u}, 17 | :metric => ::LogStash::Instrument::NullMetric.new(:dummy).namespace(:alsodummy) 18 | } 19 | end 20 | 21 | subject { LogStash::Outputs::ElasticSearch::HttpClient::Pool.new(logger, adapter, initial_urls, options) } 22 | 23 | describe("Simple sniff parsing") do 24 | before(:each) { subject.start } 25 | 26 | context "with single node" do 27 | it "should execute a sniff without error" do 28 | expect { subject.check_sniff }.not_to raise_error 29 | end 30 | 31 | it "should return single sniff URL" do 32 | uris = subject.check_sniff 33 | 34 | expect(uris.size).to eq(1) 35 | end 36 | end 37 | end 38 | 39 | describe("Complex sniff parsing") do 40 | before(:each) do 41 | response_double = double("_nodes/http", body: File.read("spec/fixtures/_nodes/7x.json")) 42 | allow(subject).to receive(:perform_request).and_return([nil, { version: "7.0" }, response_double]) 43 | subject.start 44 | end 45 | 46 | context "with mixed master-only, data-only, and data + master nodes" do 47 | it "should execute a sniff without error" do 48 | expect { subject.check_sniff }.not_to raise_error 49 | end 50 | 51 | it "should return the correct sniff URLs" do 52 | # ie. with the master-only node, and with the node name correctly set. 53 | uris = subject.check_sniff 54 | 55 | expect(uris).to include(::LogStash::Util::SafeURI.new("//dev-masterdata:9201"), ::LogStash::Util::SafeURI.new("//dev-data:9202")) 56 | end 57 | end 58 | end 59 | end 60 | -------------------------------------------------------------------------------- /spec/integration/outputs/templates_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative "../../../spec/es_spec_helper" 2 | 3 | describe "index template expected behavior", :integration => true do 4 | let(:ecs_compatibility) { fail('spec group does not define `ecs_compatibility`!') } 5 | 6 | subject! do 7 | require "logstash/outputs/elasticsearch" 8 | allow_any_instance_of(LogStash::Outputs::ElasticSearch).to receive(:ecs_compatibility).and_return(ecs_compatibility) 9 | 10 | settings = { 11 | "manage_template" => true, 12 | "template_overwrite" => true, 13 | "hosts" => "#{get_host_port()}" 14 | } 15 | next LogStash::Outputs::ElasticSearch.new(settings) 16 | end 17 | 18 | let(:elasticsearch_client) { get_client } 19 | 20 | before(:each) do 21 | # delete indices and templates 22 | require "elasticsearch" 23 | 24 | elasticsearch_client.indices.delete_template(:name => '*') 25 | # This can fail if there are no indexes, ignore failure. 26 | elasticsearch_client.indices.delete(:index => '*') rescue puts("DELETE INDICES ERROR: #{$!}") 27 | # Since we are pinned to ES client 7.x, we need to delete data streams the hard way... 28 | elasticsearch_client.perform_request("DELETE", "/_data_stream/*") rescue puts("DELETE DATA STREAMS ERROR: #{$!}") 29 | end 30 | 31 | context 'with ecs_compatibility => disabled' do 32 | let(:ecs_compatibility) { :disabled } 33 | before :each do 34 | @es = elasticsearch_client # cache as ivar for tests... 35 | 36 | subject.register 37 | 38 | subject.multi_receive([ 39 | LogStash::Event.new("message" => "sample message here"), 40 | LogStash::Event.new("somemessage" => { "message" => "sample nested message here" }), 41 | LogStash::Event.new("somevalue" => 100), 42 | LogStash::Event.new("somevalue" => 10), 43 | LogStash::Event.new("somevalue" => 1), 44 | LogStash::Event.new("country" => "us"), 45 | LogStash::Event.new("country" => "at"), 46 | LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0 ] }) 47 | ]) 48 | 49 | @es.indices.refresh 50 | 51 | # Wait or fail until everything's indexed. 52 | Stud::try(20.times) do 53 | r = @es.search(index: 'logstash*') 54 | expect(r).to have_hits(8) 55 | end 56 | end 57 | 58 | it "permits phrase searching on string fields" do 59 | results = @es.search(index: 'logstash*', q: "message:\"sample message\"") 60 | expect(results).to have_hits(1) 61 | expect(results["hits"]["hits"][0]["_source"]["message"]).to eq("sample message here") 62 | end 63 | 64 | it "numbers dynamically map to a numeric type and permit range queries" do 65 | results = @es.search(index: 'logstash*', q: "somevalue:[5 TO 105]") 66 | expect(results).to have_hits(2) 67 | 68 | values = results["hits"]["hits"].collect { |r| r["_source"]["somevalue"] } 69 | expect(values).to include(10) 70 | expect(values).to include(100) 71 | expect(values).to_not include(1) 72 | end 73 | 74 | it "does not create .keyword field for top-level message field" do 75 | results = @es.search(index: 'logstash*', q: "message.keyword:\"sample message here\"") 76 | expect(results).to have_hits(0) 77 | end 78 | 79 | it "creates .keyword field for nested message fields" do 80 | results = @es.search(index: 'logstash*', q: "somemessage.message.keyword:\"sample nested message here\"") 81 | expect(results).to have_hits(1) 82 | end 83 | 84 | it "creates .keyword field from any string field which is not_analyzed" do 85 | results = @es.search(index: 'logstash*', q: "country.keyword:\"us\"") 86 | expect(results).to have_hits(1) 87 | expect(results["hits"]["hits"][0]["_source"]["country"]).to eq("us") 88 | 89 | # partial or terms should not work. 90 | results = @es.search(index: 'logstash*', q: "country.keyword:\"u\"") 91 | expect(results).to have_hits(0) 92 | end 93 | 94 | it "make [geoip][location] a geo_point" do 95 | expect(field_properties_from_template("logstash", "geoip")["location"]["type"]).to eq("geo_point") 96 | end 97 | 98 | it "aggregate .keyword results correctly " do 99 | results = @es.search(index: 'logstash*', body: { "aggregations" => { "my_agg" => { "terms" => { "field" => "country.keyword" } } } })["aggregations"]["my_agg"] 100 | terms = results["buckets"].collect { |b| b["key"] } 101 | 102 | expect(terms).to include("us") 103 | 104 | # 'at' is a stopword, make sure stopwords are not ignored. 105 | expect(terms).to include("at") 106 | end 107 | end 108 | 109 | context 'with ECS enabled' do 110 | let(:ecs_compatibility) { :v1 } 111 | 112 | before(:each) do 113 | subject.register # should load template? 114 | subject.multi_receive([LogStash::Event.new("message" => "sample message here")]) 115 | end 116 | 117 | let(:elasticsearch_cluster_major_version) do 118 | elasticsearch_client.info&.dig("version", "number" )&.split('.')&.map(&:to_i)&.first 119 | end 120 | 121 | it 'loads the templates' do 122 | aggregate_failures do 123 | if elasticsearch_cluster_major_version >= 8 124 | # In ES 8+ we use the _index_template API 125 | expect(elasticsearch_client.indices.exists_index_template(name: 'ecs-logstash')).to be_truthy 126 | else 127 | # Otherwise, we used the legacy _template API 128 | expect(elasticsearch_client.indices.exists_template(name: 'ecs-logstash')).to be_truthy 129 | end 130 | end 131 | end 132 | end 133 | end 134 | -------------------------------------------------------------------------------- /spec/integration/outputs/unsupported_actions_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative "../../../spec/es_spec_helper" 2 | 3 | describe "Unsupported actions testing...", :integration => true do 4 | require "logstash/outputs/elasticsearch" 5 | 6 | INDEX = "logstash-unsupported-actions-rejected" 7 | 8 | def get_es_output( options={} ) 9 | settings = { 10 | "manage_template" => true, 11 | "index" => INDEX, 12 | "template_overwrite" => true, 13 | "hosts" => get_host_port(), 14 | "action" => "%{action_field}", 15 | "document_id" => "%{doc_id}", 16 | "ecs_compatibility" => "disabled" 17 | } 18 | LogStash::Outputs::ElasticSearch.new(settings.merge!(options)) 19 | end 20 | 21 | before :each do 22 | @es = get_client 23 | # Delete all templates first. 24 | # Clean ES of data before we start. 25 | @es.indices.delete_template(:name => "*") 26 | # This can fail if there are no indexes, ignore failure. 27 | @es.indices.delete(:index => "*") rescue nil 28 | # index single doc for update purpose 29 | @es.index( 30 | { 31 | :index => INDEX, 32 | :id => '2', 33 | :body => { :message => 'Test to doc indexing', :counter => 1 }, 34 | :refresh => true 35 | } 36 | ) 37 | 38 | @es.index( 39 | { 40 | :index => INDEX, 41 | :id => '3', 42 | :body => { :message => 'Test to doc deletion', :counter => 2 }, 43 | :refresh => true 44 | } 45 | ) 46 | @es.indices.refresh 47 | end 48 | 49 | context "multiple actions include unsupported action" do 50 | let(:events) {[ 51 | LogStash::Event.new("action_field" => "index", "doc_id" => 1, "message"=> "hello"), 52 | LogStash::Event.new("action_field" => "update", "doc_id" => 2, "message"=> "hi"), 53 | LogStash::Event.new("action_field" => "delete", "doc_id" => 3), 54 | LogStash::Event.new("action_field" => "unsupported_action", "doc_id" => 4, "message"=> "world!") 55 | ]} 56 | 57 | it "should reject unsupported doc" do 58 | subject = get_es_output 59 | subject.register 60 | subject.multi_receive(events) 61 | 62 | index_or_update = proc do |event| 63 | action = event.get("action_field") 64 | action.eql?("index") || action.eql?("update") 65 | end 66 | 67 | indexed_events = events.select { |event| index_or_update.call(event) } 68 | rejected_events = events.select { |event| !index_or_update.call(event) } 69 | 70 | indexed_events.each do |event| 71 | response = @es.get(:index => INDEX, :id => event.get("doc_id"), :refresh => true) 72 | expect(response['_source']['message']).to eq(event.get("message")) 73 | end 74 | 75 | rejected_events.each do |event| 76 | expect {@es.get(:index => INDEX, :id => event.get("doc_id"), :refresh => true)}.to raise_error(get_expected_error_class) 77 | end 78 | end 79 | end 80 | end 81 | -------------------------------------------------------------------------------- /spec/integration/outputs/update_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative "../../../spec/es_spec_helper" 2 | 3 | describe "Update actions without scripts", :integration => true do 4 | require "logstash/outputs/elasticsearch" 5 | 6 | def get_es_output( options={} ) 7 | settings = { 8 | "manage_template" => true, 9 | "index" => "logstash-update", 10 | "template_overwrite" => true, 11 | "hosts" => get_host_port(), 12 | "action" => "update" 13 | } 14 | LogStash::Outputs::ElasticSearch.new(settings.merge!(options)) 15 | end 16 | 17 | before :each do 18 | @es = get_client 19 | # Delete all templates first. 20 | # Clean ES of data before we start. 21 | @es.indices.delete_template(:name => "*") 22 | # This can fail if there are no indexes, ignore failure. 23 | @es.indices.delete(:index => "*") rescue nil 24 | @es.index( 25 | { 26 | :index => 'logstash-update', 27 | :id => '123', 28 | :body => { :message => 'Test', :counter => 1 }, 29 | :refresh => true 30 | } 31 | ) 32 | @es.indices.refresh 33 | end 34 | 35 | it "should fail without a document_id" do 36 | subject = get_es_output 37 | expect { subject.register }.to raise_error(LogStash::ConfigurationError) 38 | end 39 | 40 | context "when update only" do 41 | it "should not create new document" do 42 | subject = get_es_output({ 'document_id' => "456" } ) 43 | subject.register 44 | subject.multi_receive([LogStash::Event.new("message" => "sample message here")]) 45 | expect {@es.get(:index => 'logstash-update', :id => '456', :refresh => true)}.to raise_error(get_expected_error_class) 46 | end 47 | 48 | it "should update existing document" do 49 | subject = get_es_output({ 'document_id' => "123" }) 50 | subject.register 51 | subject.multi_receive([LogStash::Event.new("message" => "updated message here")]) 52 | r = @es.get(:index => 'logstash-update', :id => '123', :refresh => true) 53 | expect(r["_source"]["message"]).to eq('updated message here') 54 | end 55 | 56 | # The es ruby client treats the data field differently. Make sure this doesn't 57 | # raise an exception 58 | it "should update an existing document that has a 'data' field" do 59 | subject = get_es_output({ 'document_id' => "123" }) 60 | subject.register 61 | subject.multi_receive([LogStash::Event.new("data" => "updated message here", "message" => "foo")]) 62 | r = @es.get(:index => 'logstash-update', :id => '123', :refresh => true) 63 | expect(r["_source"]["data"]).to eq('updated message here') 64 | expect(r["_source"]["message"]).to eq('foo') 65 | end 66 | 67 | it "should allow default (internal) version" do 68 | subject = get_es_output({ 'document_id' => "123", "version" => "99" }) 69 | subject.register 70 | end 71 | 72 | it "should allow internal version" do 73 | subject = get_es_output({ 'document_id' => "123", "version" => "99", "version_type" => "internal" }) 74 | subject.register 75 | end 76 | 77 | it "should not allow external version" do 78 | subject = get_es_output({ 'document_id' => "123", "version" => "99", "version_type" => "external" }) 79 | expect { subject.register }.to raise_error(LogStash::ConfigurationError) 80 | end 81 | 82 | it "should not allow external_gt version" do 83 | subject = get_es_output({ 'document_id' => "123", "version" => "99", "version_type" => "external_gt" }) 84 | expect { subject.register }.to raise_error(LogStash::ConfigurationError) 85 | end 86 | 87 | it "should not allow external_gte version" do 88 | subject = get_es_output({ 'document_id' => "123", "version" => "99", "version_type" => "external_gte" }) 89 | expect { subject.register }.to raise_error(LogStash::ConfigurationError) 90 | end 91 | 92 | end 93 | 94 | context "when update with upsert" do 95 | it "should create new documents with provided upsert" do 96 | subject = get_es_output({ 'document_id' => "456", 'upsert' => '{"message": "upsert message"}' }) 97 | subject.register 98 | subject.multi_receive([LogStash::Event.new("message" => "sample message here")]) 99 | r = @es.get(:index => 'logstash-update', :id => '456', :refresh => true) 100 | expect(r["_source"]["message"]).to eq('upsert message') 101 | end 102 | 103 | it "should create new documents with event/doc as upsert" do 104 | subject = get_es_output({ 'document_id' => "456", 'doc_as_upsert' => true }) 105 | subject.register 106 | subject.multi_receive([LogStash::Event.new("message" => "sample message here")]) 107 | r = @es.get(:index => 'logstash-update', :id => '456', :refresh => true) 108 | expect(r["_source"]["message"]).to eq('sample message here') 109 | end 110 | 111 | it "should fail on documents with event/doc as upsert at external version" do 112 | subject = get_es_output({ 'document_id' => "456", 'doc_as_upsert' => true, 'version' => 999, "version_type" => "external" }) 113 | expect { subject.register }.to raise_error(LogStash::ConfigurationError) 114 | end 115 | end 116 | end 117 | -------------------------------------------------------------------------------- /spec/spec_helper.rb: -------------------------------------------------------------------------------- 1 | require "logstash/devutils/rspec/spec_helper" 2 | 3 | require "logstash/outputs/elasticsearch" 4 | 5 | module LogStash::Outputs::ElasticSearch::SpecHelper 6 | end 7 | 8 | RSpec.configure do |config| 9 | config.include LogStash::Outputs::ElasticSearch::SpecHelper 10 | end 11 | 12 | # remove once plugin starts consuming elasticsearch-ruby v8 client 13 | def elastic_ruby_v8_client_available? 14 | Elasticsearch::Transport 15 | false 16 | rescue NameError # NameError: uninitialized constant Elasticsearch::Transport if Elastic Ruby client is not available 17 | true 18 | end -------------------------------------------------------------------------------- /spec/support/elasticsearch/api/actions/delete_ilm_policy.rb: -------------------------------------------------------------------------------- 1 | # Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one 2 | # or more contributor license agreements. Licensed under the Elastic License; 3 | # you may not use this file except in compliance with the Elastic License. 4 | 5 | module Elasticsearch 6 | module API 7 | module Actions 8 | 9 | # Update the password of the specified user 10 | def delete_ilm_policy(arguments={}) 11 | method = HTTP_DELETE 12 | path = Utils.__pathify '_ilm/policy/', 13 | Utils.__escape(arguments[:name]) 14 | params = {} 15 | perform_request(method, path, params, nil).body 16 | end 17 | end 18 | end 19 | end 20 | -------------------------------------------------------------------------------- /spec/support/elasticsearch/api/actions/get_ilm_policy.rb: -------------------------------------------------------------------------------- 1 | # Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one 2 | # or more contributor license agreements. Licensed under the Elastic License; 3 | # you may not use this file except in compliance with the Elastic License. 4 | 5 | module Elasticsearch 6 | module API 7 | module Actions 8 | 9 | # Retrieve the list of index lifecycle management policies 10 | def get_ilm_policy(arguments={}) 11 | method = HTTP_GET 12 | path = Utils.__pathify '_ilm/policy', Utils.__escape(arguments[:name]) 13 | params = {} 14 | perform_request(method, path, params, nil).body 15 | end 16 | end 17 | end 18 | end -------------------------------------------------------------------------------- /spec/support/elasticsearch/api/actions/put_ilm_policy.rb: -------------------------------------------------------------------------------- 1 | # Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one 2 | # or more contributor license agreements. Licensed under the Elastic License; 3 | # you may not use this file except in compliance with the Elastic License. 4 | 5 | module Elasticsearch 6 | module API 7 | module Actions 8 | 9 | # @option arguments [String] :name The name of the policy (*Required*) 10 | # @option arguments [Hash] :body The policy definition (*Required*) 11 | 12 | def put_ilm_policy(arguments={}) 13 | raise ArgumentError, "Required argument 'name' missing" unless arguments[:name] 14 | raise ArgumentError, "Required argument 'body' missing" unless arguments[:body] 15 | method = HTTP_PUT 16 | path = Utils.__pathify '_ilm/policy/', Utils.__escape(arguments[:name]) 17 | 18 | params = Utils.__validate_and_extract_params arguments 19 | 20 | body = arguments[:body] 21 | perform_request(method, path, params, body.to_json).body 22 | end 23 | end 24 | end 25 | end 26 | -------------------------------------------------------------------------------- /spec/unit/http_client_builder_spec.rb: -------------------------------------------------------------------------------- 1 | require "logstash/devutils/rspec/spec_helper" 2 | require "logstash/outputs/elasticsearch" 3 | require "logstash/outputs/elasticsearch/http_client" 4 | require "logstash/outputs/elasticsearch/http_client_builder" 5 | 6 | describe LogStash::Outputs::ElasticSearch::HttpClientBuilder do 7 | describe "auth setup with url encodable passwords" do 8 | let(:klass) { LogStash::Outputs::ElasticSearch::HttpClientBuilder } 9 | let(:user) { "foo@bar"} 10 | let(:password) {"baz@blah" } 11 | let(:password_secured) do 12 | secured = double("password") 13 | allow(secured).to receive(:value).and_return(password) 14 | secured 15 | end 16 | let(:options) { {"user" => user, "password" => password} } 17 | let(:logger) { mock("logger") } 18 | let(:auth_setup) { klass.setup_basic_auth(double("logger"), {"user" => user, "password" => password_secured}) } 19 | 20 | it "should return the user escaped" do 21 | expect(auth_setup[:user]).to eql(CGI.escape(user)) 22 | end 23 | 24 | it "should return the password escaped" do 25 | expect(auth_setup[:password]).to eql(CGI.escape(password)) 26 | end 27 | end 28 | 29 | describe "customizing action paths" do 30 | let(:hosts) { [ ::LogStash::Util::SafeURI.new("http://localhost:9200") ] } 31 | let(:options) { {"hosts" => hosts } } 32 | let(:logger) { double("logger") } 33 | before :each do 34 | [:debug, :debug?, :info?, :info, :warn].each do |level| 35 | allow(logger).to receive(level) 36 | end 37 | end 38 | 39 | describe "bulk_path" do 40 | let (:filter_path) {"filter_path=errors,items.*.error,items.*.status"} 41 | 42 | shared_examples("filter_path added to bulk path appropriately") do 43 | it "sets the bulk_path option to the expected bulk path" do 44 | expect(described_class).to receive(:create_http_client) do |options| 45 | expect(options[:bulk_path]).to eq(expected_bulk_path) 46 | end 47 | described_class.build(logger, hosts, options) 48 | end 49 | end 50 | 51 | context "when setting bulk_path" do 52 | let(:bulk_path) { "/meh" } 53 | let(:options) { super().merge("bulk_path" => bulk_path) } 54 | 55 | context "when using path" do 56 | let(:options) { super().merge("path" => "/path") } 57 | let(:expected_bulk_path) { "#{bulk_path}?#{filter_path}" } 58 | 59 | it_behaves_like "filter_path added to bulk path appropriately" 60 | end 61 | 62 | context "when setting a filter path as first parameter" do 63 | let (:filter_path) {"filter_path=error"} 64 | let(:bulk_path) { "/meh?#{filter_path}&routing=true" } 65 | let(:expected_bulk_path) { bulk_path } 66 | 67 | it_behaves_like "filter_path added to bulk path appropriately" 68 | end 69 | 70 | context "when setting a filter path as second parameter" do 71 | let (:filter_path) {"filter_path=error"} 72 | let(:bulk_path) { "/meh?routing=true&#{filter_path}" } 73 | let(:expected_bulk_path) { bulk_path } 74 | 75 | it_behaves_like "filter_path added to bulk path appropriately" 76 | end 77 | 78 | context "when not using path" do 79 | let(:expected_bulk_path) { "#{bulk_path}?#{filter_path}"} 80 | 81 | it_behaves_like "filter_path added to bulk path appropriately" 82 | end 83 | end 84 | 85 | context "when not setting bulk_path" do 86 | 87 | context "when using path" do 88 | let(:path) { "/meh" } 89 | let(:expected_bulk_path) { "#{path}/_bulk?#{filter_path}"} 90 | let(:options) { super().merge("path" => path) } 91 | 92 | it_behaves_like "filter_path added to bulk path appropriately" 93 | end 94 | 95 | context "when not using path" do 96 | let(:expected_bulk_path) { "/_bulk?#{filter_path}"} 97 | 98 | it_behaves_like "filter_path added to bulk path appropriately" 99 | end 100 | end 101 | end 102 | 103 | describe "healthcheck_path" do 104 | context "when setting healthcheck_path" do 105 | let(:healthcheck_path) { "/meh" } 106 | let(:options) { super().merge("healthcheck_path" => healthcheck_path) } 107 | 108 | context "when using path" do 109 | let(:options) { super().merge("path" => "/path") } 110 | it "ignores the path setting" do 111 | expect(described_class).to receive(:create_http_client) do |options| 112 | expect(options[:healthcheck_path]).to eq(healthcheck_path) 113 | end 114 | described_class.build(logger, hosts, options) 115 | end 116 | end 117 | context "when not using path" do 118 | 119 | it "uses the healthcheck_path setting" do 120 | expect(described_class).to receive(:create_http_client) do |options| 121 | expect(options[:healthcheck_path]).to eq(healthcheck_path) 122 | end 123 | described_class.build(logger, hosts, options) 124 | end 125 | end 126 | end 127 | 128 | context "when not setting healthcheck_path" do 129 | 130 | context "when using path" do 131 | let(:path) { "/meh" } 132 | let(:options) { super().merge("path" => path) } 133 | it "sets healthcheck_path to path" do 134 | expect(described_class).to receive(:create_http_client) do |options| 135 | expect(options[:healthcheck_path]).to eq(path) 136 | end 137 | described_class.build(logger, hosts, options) 138 | end 139 | end 140 | 141 | context "when not using path" do 142 | it "sets the healthcheck_path to root" do 143 | expect(described_class).to receive(:create_http_client) do |options| 144 | expect(options[:healthcheck_path]).to eq("/") 145 | end 146 | described_class.build(logger, hosts, options) 147 | end 148 | end 149 | end 150 | end 151 | describe "sniffing_path" do 152 | context "when setting sniffing_path" do 153 | let(:sniffing_path) { "/meh" } 154 | let(:options) { super().merge("sniffing_path" => sniffing_path) } 155 | 156 | context "when using path" do 157 | let(:options) { super().merge("path" => "/path") } 158 | it "ignores the path setting" do 159 | expect(described_class).to receive(:create_http_client) do |options| 160 | expect(options[:sniffing_path]).to eq(sniffing_path) 161 | end 162 | described_class.build(logger, hosts, options) 163 | end 164 | end 165 | context "when not using path" do 166 | 167 | it "uses the sniffing_path setting" do 168 | expect(described_class).to receive(:create_http_client) do |options| 169 | expect(options[:sniffing_path]).to eq(sniffing_path) 170 | end 171 | described_class.build(logger, hosts, options) 172 | end 173 | end 174 | end 175 | 176 | context "when not setting sniffing_path" do 177 | 178 | context "when using path" do 179 | let(:path) { "/meh" } 180 | let(:options) { super().merge("path" => path) } 181 | it "sets sniffing_path to path+_nodes/http" do 182 | expect(described_class).to receive(:create_http_client) do |options| 183 | expect(options[:sniffing_path]).to eq("#{path}/_nodes/http") 184 | end 185 | described_class.build(logger, hosts, options) 186 | end 187 | end 188 | 189 | context "when not using path" do 190 | it "sets the sniffing_path to _nodes/http" do 191 | expect(described_class).to receive(:create_http_client) do |options| 192 | expect(options[:sniffing_path]).to eq("/_nodes/http") 193 | end 194 | described_class.build(logger, hosts, options) 195 | end 196 | end 197 | end 198 | end 199 | end 200 | end 201 | -------------------------------------------------------------------------------- /spec/unit/outputs/elasticsearch/http_client/manticore_adapter_spec.rb: -------------------------------------------------------------------------------- 1 | require "logstash/devutils/rspec/spec_helper" 2 | require "logstash/outputs/elasticsearch/http_client" 3 | require 'cabin' 4 | 5 | describe LogStash::Outputs::ElasticSearch::HttpClient::ManticoreAdapter do 6 | let(:logger) { Cabin::Channel.get } 7 | let(:options) { {} } 8 | 9 | subject { described_class.new(logger, options) } 10 | 11 | it "should raise an exception if requests are issued after close" do 12 | subject.close 13 | begin 14 | subject.perform_request(::LogStash::Util::SafeURI.new("http://localhost:9200"), :get, '/') 15 | fail 'expected to raise a HostUnreachableError' 16 | rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::HostUnreachableError => e 17 | expect( e.original_error ).to be_a ::Manticore::ClientStoppedException 18 | end 19 | end 20 | 21 | describe "auth" do 22 | let(:user) { "myuser" } 23 | let(:password) { "mypassword" } 24 | let(:noauth_uri) { clone = uri.clone; clone.user=nil; clone.password=nil; clone } 25 | let(:uri) { ::LogStash::Util::SafeURI.new("http://#{user}:#{password}@localhost:9200") } 26 | 27 | it "should convert the auth to params" do 28 | resp = double("response") 29 | allow(resp).to receive(:call) 30 | allow(resp).to receive(:code).and_return(200) 31 | 32 | expected_uri = noauth_uri.clone 33 | expected_uri.path = "/" 34 | 35 | expect(subject.manticore).to receive(:get). 36 | with(expected_uri.to_s, { 37 | :headers => LogStash::Outputs::ElasticSearch::HttpClient::DEFAULT_HEADERS, 38 | :auth => { 39 | :user => user, 40 | :password => password, 41 | :eager => true 42 | } 43 | }).and_return resp 44 | 45 | subject.perform_request(uri, :get, "/") 46 | end 47 | end 48 | 49 | describe "bad response codes" do 50 | let(:uri) { ::LogStash::Util::SafeURI.new("http://localhost:9200") } 51 | 52 | it "should raise a bad response code error" do 53 | resp = double("response") 54 | allow(resp).to receive(:call) 55 | allow(resp).to receive(:code).and_return(500) 56 | allow(resp).to receive(:body).and_return("a body") 57 | 58 | expect(subject.manticore).to receive(:get). 59 | with(uri.to_s + "/", anything). 60 | and_return(resp) 61 | 62 | uri_with_path = uri.clone 63 | uri_with_path.path = "/" 64 | 65 | expect(::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError).to receive(:new). 66 | with(resp.code, uri_with_path, nil, resp.body).and_call_original 67 | 68 | expect do 69 | subject.perform_request(uri, :get, "/") 70 | end.to raise_error(::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError) 71 | end 72 | end 73 | 74 | describe "format_url" do 75 | let(:url) { ::LogStash::Util::SafeURI.new("http://localhost:9200/path/") } 76 | let(:path) { "_bulk" } 77 | subject { described_class.new(double("logger"), {}) } 78 | 79 | it "should add the path argument to the uri's path" do 80 | expect(subject.format_url(url, path).path).to eq("/path/_bulk") 81 | end 82 | 83 | context "when uri contains query parameters" do 84 | let(:query_params) { "query=value&key=value2" } 85 | let(:url) { ::LogStash::Util::SafeURI.new("http://localhost:9200/path/?#{query_params}") } 86 | let(:formatted) { subject.format_url(url, path)} 87 | 88 | it "should retain query_params after format" do 89 | expect(formatted.query).to eq(query_params) 90 | end 91 | 92 | context "and the path contains query parameters" do 93 | let(:path) { "/special_path?specialParam=123" } 94 | 95 | it "should join the query correctly" do 96 | expect(formatted.query).to eq(query_params + "&specialParam=123") 97 | end 98 | end 99 | end 100 | 101 | context "when the path contains query parameters" do 102 | let(:path) { "/special_bulk?pathParam=1"} 103 | let(:formatted) { subject.format_url(url, path) } 104 | 105 | it "should add the path correctly" do 106 | expect(formatted.path).to eq("#{url.path}special_bulk") 107 | expect(subject.remove_double_escaping(formatted.path)).to eq("#{url.path}special_bulk") 108 | end 109 | 110 | it "should add the query parameters correctly" do 111 | expect(formatted.query).to eq("pathParam=1") 112 | end 113 | end 114 | 115 | context "when uri contains credentials" do 116 | let(:url) { ::LogStash::Util::SafeURI.new("http://myuser:mypass@localhost:9200") } 117 | let(:formatted) { subject.format_url(url, path) } 118 | 119 | it "should remove credentials after format" do 120 | expect(formatted.userinfo).to be_nil 121 | end 122 | end 123 | 124 | context 'when uri contains date math' do 125 | let(:url) { ::LogStash::Util::SafeURI.new("http://localhost:9200") } 126 | let(:path) { CGI.escape("") } 127 | let(:formatted) { subject.format_url(url, path) } 128 | 129 | it 'should escape the uri correctly' do 130 | expect(subject.remove_double_escaping(formatted.path)).to eq("/%3Clogstash-%7Bnow%2Fd%7D-0001%3E") 131 | end 132 | end 133 | 134 | context 'when uri does not contain date math' do 135 | let(:url) { ::LogStash::Util::SafeURI.new("http://localhost:9200") } 136 | let(:path) { CGI.escape("logstash-0001") } 137 | let(:formatted) { subject.format_url(url, path) } 138 | 139 | it 'should escape the uri correctly' do 140 | expect(subject.remove_double_escaping(formatted.path)).to eq("/logstash-0001") 141 | end 142 | end 143 | end 144 | 145 | describe "integration specs", :integration => true do 146 | it "should perform correct tests without error" do 147 | resp = subject.perform_request(::LogStash::Util::SafeURI.new("http://localhost:9200"), :get, "/") 148 | expect(resp.code).to eql(200) 149 | end 150 | end 151 | end 152 | -------------------------------------------------------------------------------- /spec/unit/outputs/elasticsearch/template_manager_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative "../../../../spec/spec_helper" 2 | require "logstash/outputs/elasticsearch/template_manager" 3 | 4 | describe LogStash::Outputs::ElasticSearch::TemplateManager do 5 | 6 | describe ".default_template_path" do 7 | context "elasticsearch 7.x" do 8 | it "chooses the 7x template" do 9 | expect(described_class.default_template_path(7)).to end_with("/templates/ecs-disabled/elasticsearch-7x.json") 10 | end 11 | end 12 | context "elasticsearch 8.x" do 13 | it "chooses the 8x template" do 14 | expect(described_class.default_template_path(8)).to end_with("/templates/ecs-disabled/elasticsearch-8x.json") 15 | end 16 | end 17 | end 18 | 19 | context 'when ECS v1 is requested' do 20 | it 'resolves' do 21 | expect(described_class.default_template_path(7, :v1)).to end_with("/templates/ecs-v1/elasticsearch-7x.json") 22 | end 23 | end 24 | 25 | context 'when ECS v8 is requested' do 26 | it 'resolves' do 27 | expect(described_class.default_template_path(7, :v8)).to end_with("/templates/ecs-v8/elasticsearch-7x.json") 28 | end 29 | end 30 | 31 | context "index template with ilm settings" do 32 | let(:plugin_settings) { {"manage_template" => true, "template_overwrite" => true} } 33 | let(:plugin) { LogStash::Outputs::ElasticSearch.new(plugin_settings) } 34 | 35 | describe "with custom template" do 36 | 37 | describe "in version 8+" do 38 | let(:file_path) { described_class.default_template_path(8) } 39 | let(:template) { described_class.read_template_file(file_path)} 40 | 41 | it "should update settings" do 42 | expect(plugin).to receive(:maximum_seen_major_version).at_least(:once).and_return(8) 43 | described_class.add_ilm_settings_to_template(plugin, template) 44 | expect(template['template']['settings']['index.lifecycle.name']).not_to eq(nil) 45 | expect(template['template']['settings']['index.lifecycle.rollover_alias']).not_to eq(nil) 46 | expect(template.include?('settings')).to be_falsey 47 | end 48 | end 49 | 50 | describe "in version 7" do 51 | let(:file_path) { described_class.default_template_path(7) } 52 | let(:template) { described_class.read_template_file(file_path)} 53 | 54 | it "should update settings" do 55 | expect(plugin).to receive(:maximum_seen_major_version).at_least(:once).and_return(7) 56 | described_class.add_ilm_settings_to_template(plugin, template) 57 | expect(template['settings']['index.lifecycle.name']).not_to eq(nil) 58 | expect(template['settings']['index.lifecycle.rollover_alias']).not_to eq(nil) 59 | expect(template.include?('template')).to be_falsey 60 | end 61 | end 62 | end 63 | 64 | context "resolve template setting" do 65 | let(:plugin_settings) { super().merge({"template_api" => template_api}) } 66 | 67 | describe "with composable template API" do 68 | let(:template_api) { "composable" } 69 | 70 | it 'resolves composable index template API compatible setting' do 71 | expect(plugin).to receive(:serverless?).and_return(false) 72 | expect(plugin).to receive(:maximum_seen_major_version).at_least(:once).and_return(8) # required to log 73 | template = {} 74 | described_class.resolve_template_settings(plugin, template) 75 | expect(template["template"]["settings"]).not_to eq(nil) 76 | end 77 | end 78 | 79 | describe "with legacy template API" do 80 | let(:template_api) { "legacy" } 81 | 82 | it 'resolves legacy index template API compatible setting' do 83 | expect(plugin).to receive(:serverless?).and_return(false) 84 | expect(plugin).to receive(:maximum_seen_major_version).at_least(:once).and_return(7) # required to log 85 | template = {} 86 | described_class.resolve_template_settings(plugin, template) 87 | expect(template["settings"]).not_to eq(nil) 88 | end 89 | end 90 | 91 | describe "with `template_api => 'auto'`" do 92 | let(:template_api) { "auto" } 93 | 94 | describe "with ES 7" do 95 | 96 | it 'resolves legacy index template API compatible setting' do 97 | expect(plugin).to receive(:serverless?).and_return(false) 98 | expect(plugin).to receive(:maximum_seen_major_version).at_least(:once).and_return(7) 99 | template = {} 100 | described_class.resolve_template_settings(plugin, template) 101 | expect(template["settings"]).not_to eq(nil) 102 | end 103 | end 104 | 105 | describe "with ES >= 8 versions" do 106 | it 'resolves composable index template API compatible setting' do 107 | expect(plugin).to receive(:serverless?).and_return(false) 108 | expect(plugin).to receive(:maximum_seen_major_version).at_least(:once).and_return(8) 109 | template = {} 110 | described_class.resolve_template_settings(plugin, template) 111 | expect(template["template"]["settings"]).not_to eq(nil) 112 | end 113 | end 114 | end 115 | end 116 | end 117 | 118 | describe "template endpoint" do 119 | describe "template_api => 'auto'" do 120 | let(:plugin_settings) { {"manage_template" => true, "template_api" => 'auto'} } 121 | let(:plugin) { LogStash::Outputs::ElasticSearch.new(plugin_settings) } 122 | 123 | describe "in version 8+" do 124 | it "should use index template API" do 125 | expect(plugin).to receive(:serverless?).and_return(false) 126 | expect(plugin).to receive(:maximum_seen_major_version).at_least(:once).and_return(8) 127 | endpoint = described_class.template_endpoint(plugin) 128 | expect(endpoint).to be_equal(LogStash::Outputs::ElasticSearch::TemplateManager::INDEX_TEMPLATE_ENDPOINT) 129 | end 130 | end 131 | 132 | describe "in version 7" do 133 | it "should use legacy template API" do 134 | expect(plugin).to receive(:serverless?).and_return(false) 135 | expect(plugin).to receive(:maximum_seen_major_version).at_least(:once).and_return(7) 136 | endpoint = described_class.template_endpoint(plugin) 137 | expect(endpoint).to be_equal(LogStash::Outputs::ElasticSearch::TemplateManager::LEGACY_TEMPLATE_ENDPOINT) 138 | end 139 | end 140 | end 141 | 142 | describe "template_api => 'legacy'" do 143 | let(:plugin_settings) { {"manage_template" => true, "template_api" => 'legacy'} } 144 | let(:plugin) { LogStash::Outputs::ElasticSearch.new(plugin_settings) } 145 | 146 | describe "in version 8+" do 147 | it "should use legacy template API" do 148 | expect(plugin).to receive(:serverless?).and_return(false) 149 | expect(plugin).to receive(:maximum_seen_major_version).never 150 | endpoint = described_class.template_endpoint(plugin) 151 | expect(endpoint).to be_equal(LogStash::Outputs::ElasticSearch::TemplateManager::LEGACY_TEMPLATE_ENDPOINT) 152 | end 153 | end 154 | end 155 | 156 | describe "template_api => 'composable'" do 157 | let(:plugin_settings) { {"manage_template" => true, "template_api" => 'composable'} } 158 | let(:plugin) { LogStash::Outputs::ElasticSearch.new(plugin_settings) } 159 | 160 | describe "in version 8+" do 161 | it "should use legacy template API" do 162 | expect(plugin).to receive(:serverless?).and_return(false) 163 | expect(plugin).to receive(:maximum_seen_major_version).never 164 | endpoint = described_class.template_endpoint(plugin) 165 | expect(endpoint).to be_equal(LogStash::Outputs::ElasticSearch::TemplateManager::INDEX_TEMPLATE_ENDPOINT) 166 | end 167 | end 168 | end 169 | 170 | describe "in serverless" do 171 | [:auto, :composable, :legacy].each do |api| 172 | let(:plugin_settings) { {"manage_template" => true, "template_api" => api.to_s} } 173 | let(:plugin) { LogStash::Outputs::ElasticSearch.new(plugin_settings) } 174 | 175 | it "use index template API when template_api set to #{api}" do 176 | expect(plugin).to receive(:serverless?).and_return(true) 177 | endpoint = described_class.template_endpoint(plugin) 178 | expect(endpoint).to be_equal(LogStash::Outputs::ElasticSearch::TemplateManager::INDEX_TEMPLATE_ENDPOINT) 179 | end 180 | end 181 | 182 | end 183 | end 184 | end 185 | -------------------------------------------------------------------------------- /spec/unit/outputs/elasticsearch_proxy_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative "../../../spec/spec_helper" 2 | require 'stud/temporary' 3 | require 'manticore/client' 4 | 5 | describe "Proxy option" do 6 | let(:settings) { { "hosts" => "node01" } } 7 | subject { 8 | LogStash::Outputs::ElasticSearch.new(settings) 9 | } 10 | 11 | before do 12 | allow(::Manticore::Client).to receive(:new).with(any_args).and_call_original 13 | end 14 | 15 | describe "valid configs" do 16 | before do 17 | subject.register 18 | end 19 | 20 | after do 21 | subject.close 22 | end 23 | 24 | context "when specified as a URI" do 25 | shared_examples("hash conversion") do |hash| 26 | let(:settings) { super().merge("proxy" => proxy)} 27 | 28 | it "should set the proxy to the correct hash value" do 29 | expect(::Manticore::Client).to have_received(:new) do |options| 30 | expect(options[:proxy]).to eq(hash) 31 | end 32 | end 33 | end 34 | 35 | describe "simple proxy" do 36 | let(:proxy) { LogStash::Util::SafeURI.new("http://127.0.0.1:1234") } 37 | 38 | include_examples("hash conversion", 39 | { 40 | :host => "127.0.0.1", 41 | :scheme => "http", 42 | :port => 1234 43 | } 44 | ) 45 | end 46 | 47 | 48 | describe "a secure authed proxy" do 49 | let(:proxy) { LogStash::Util::SafeURI.new("https://myuser:mypass@127.0.0.1:1234") } 50 | 51 | include_examples("hash conversion", 52 | { 53 | :host => "127.0.0.1", 54 | :scheme => "https", 55 | :user => "myuser", 56 | :password => "mypass", 57 | :port => 1234 58 | } 59 | ) 60 | end 61 | end 62 | 63 | context "when not specified" do 64 | it "should not send the proxy option to manticore" do 65 | expect(::Manticore::Client).to have_received(:new) do |options| 66 | expect(options).not_to include(:proxy) 67 | end 68 | end 69 | end 70 | end 71 | 72 | context "when specified as ''" do 73 | let(:settings) { super().merge("proxy" => "${A_MISSING_ENV_VARIABLE:}")} 74 | 75 | it "should not send the proxy option to manticore" do 76 | expect { subject.register }.not_to raise_error 77 | 78 | expect(::Manticore::Client).to have_received(:new) do |options| 79 | expect(options).not_to include(:proxy) 80 | end 81 | 82 | subject.close 83 | end 84 | end 85 | 86 | context "when specified as invalid uri" do 87 | let(:settings) { super().merge("proxy" => ":")} 88 | 89 | it "should fail" do 90 | # SafeURI isn't doing the proper exception wrapping for us, we can not simply : 91 | # expect { subject.register }.to raise_error(ArgumentError, /URI is not valid/i) 92 | begin 93 | subject.register 94 | rescue ArgumentError => e 95 | expect(e.message).to match /URI is not valid/i 96 | rescue java.net.URISyntaxException => e 97 | expect(e.message).to match /scheme name/i 98 | else 99 | fail 'exception not raised' 100 | end 101 | end 102 | end 103 | end 104 | -------------------------------------------------------------------------------- /spec/unit/outputs/elasticsearch_ssl_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative "../../../spec/spec_helper" 2 | require 'stud/temporary' 3 | 4 | describe "SSL options" do 5 | let(:manticore_double) { double("manticoreSSL #{self.inspect}") } 6 | 7 | let(:settings) { { "ssl_enabled" => true, "hosts" => "localhost", "pool_max" => 1, "pool_max_per_route" => 1 } } 8 | 9 | subject do 10 | require "logstash/outputs/elasticsearch" 11 | LogStash::Outputs::ElasticSearch.new(settings) 12 | end 13 | 14 | before do 15 | allow(manticore_double).to receive(:close) 16 | 17 | response_double = double("manticore response").as_null_object 18 | # Allow healtchecks 19 | allow(manticore_double).to receive(:head).with(any_args).and_return(response_double) 20 | allow(manticore_double).to receive(:get).with(any_args).and_return(response_double) 21 | allow(::Manticore::Client).to receive(:new).and_return(manticore_double) 22 | end 23 | 24 | after do 25 | subject.close 26 | end 27 | 28 | context "when ssl_verification_mode" do 29 | context "is set to none" do 30 | let(:settings) { super().merge( 31 | "ssl_verification_mode" => 'none', 32 | ) } 33 | 34 | it "should print a warning" do 35 | expect(subject.logger).to receive(:warn).with(/You have enabled encryption but DISABLED certificate verification/).at_least(:once) 36 | allow(subject.logger).to receive(:warn).with(any_args) 37 | 38 | subject.register 39 | allow(LogStash::Outputs::ElasticSearch::HttpClient::Pool).to receive(:start) 40 | end 41 | 42 | it "should pass the flag to the ES client" do 43 | expect(::Manticore::Client).to receive(:new) do |args| 44 | expect(args[:ssl]).to match hash_including(:enabled => true, :verify => :disable) 45 | end.and_return(manticore_double) 46 | 47 | subject.register 48 | end 49 | end 50 | 51 | context "is set to full" do 52 | let(:settings) { super().merge( 53 | "ssl_verification_mode" => 'full', 54 | ) } 55 | 56 | it "should pass the flag to the ES client" do 57 | expect(::Manticore::Client).to receive(:new) do |args| 58 | expect(args[:ssl]).to match hash_including(:enabled => true, :verify => :default) 59 | end.and_return(manticore_double) 60 | 61 | subject.register 62 | end 63 | end 64 | end 65 | 66 | context "with the conflicting configs" do 67 | context "ssl_certificate_authorities and ssl_truststore_path set" do 68 | let(:ssl_truststore_path) { Stud::Temporary.file.path } 69 | let(:ssl_certificate_authorities_path) { Stud::Temporary.file.path } 70 | let(:settings) { super().merge( 71 | "ssl_truststore_path" => ssl_truststore_path, 72 | "ssl_certificate_authorities" => ssl_certificate_authorities_path 73 | ) } 74 | 75 | after :each do 76 | File.delete(ssl_truststore_path) 77 | File.delete(ssl_certificate_authorities_path) 78 | end 79 | 80 | it "should raise a configuration error" do 81 | expect { subject.register }.to raise_error(LogStash::ConfigurationError, /Use either "ssl_certificate_authorities\/cacert" or "ssl_truststore_path\/truststore"/) 82 | end 83 | end 84 | 85 | context "ssl_certificate and ssl_keystore_path set" do 86 | let(:ssl_keystore_path) { Stud::Temporary.file.path } 87 | let(:ssl_certificate_path) { Stud::Temporary.file.path } 88 | let(:settings) { super().merge( 89 | "ssl_certificate" => ssl_certificate_path, 90 | "ssl_keystore_path" => ssl_keystore_path 91 | ) } 92 | 93 | after :each do 94 | File.delete(ssl_keystore_path) 95 | File.delete(ssl_certificate_path) 96 | end 97 | 98 | it "should raise a configuration error" do 99 | expect { subject.register }.to raise_error(LogStash::ConfigurationError, /Use either "ssl_certificate" or "ssl_keystore_path\/keystore"/) 100 | end 101 | end 102 | end 103 | 104 | context "when configured with Java store files" do 105 | let(:ssl_truststore_path) { Stud::Temporary.file.path } 106 | let(:ssl_keystore_path) { Stud::Temporary.file.path } 107 | 108 | after :each do 109 | File.delete(ssl_truststore_path) 110 | File.delete(ssl_keystore_path) 111 | end 112 | 113 | let(:settings) { super().merge( 114 | "ssl_truststore_path" => ssl_truststore_path, 115 | "ssl_truststore_type" => "jks", 116 | "ssl_truststore_password" => "foo", 117 | "ssl_keystore_path" => ssl_keystore_path, 118 | "ssl_keystore_type" => "jks", 119 | "ssl_keystore_password" => "bar", 120 | "ssl_verification_mode" => "full", 121 | "ssl_cipher_suites" => ["TLS_DHE_RSA_WITH_AES_256_CBC_SHA256"], 122 | "ssl_supported_protocols" => ["TLSv1.3"] 123 | ) } 124 | 125 | it "should pass the parameters to the ES client" do 126 | expect(::Manticore::Client).to receive(:new) do |args| 127 | expect(args[:ssl]).to match hash_including( 128 | :enabled => true, 129 | :keystore => ssl_keystore_path, 130 | :keystore_type => "jks", 131 | :keystore_password => "bar", 132 | :truststore => ssl_truststore_path, 133 | :truststore_type => "jks", 134 | :truststore_password => "foo", 135 | :verify => :default, 136 | :cipher_suites => ["TLS_DHE_RSA_WITH_AES_256_CBC_SHA256"], 137 | :protocols => ["TLSv1.3"], 138 | ) 139 | end.and_return(manticore_double) 140 | 141 | subject.register 142 | end 143 | end 144 | 145 | context "when configured with certificate files" do 146 | let(:ssl_certificate_authorities_path) { Stud::Temporary.file.path } 147 | let(:ssl_certificate_path) { Stud::Temporary.file.path } 148 | let(:ssl_key_path) { Stud::Temporary.file.path } 149 | let(:settings) { super().merge( 150 | "ssl_certificate_authorities" => [ssl_certificate_authorities_path], 151 | "ssl_certificate" => ssl_certificate_path, 152 | "ssl_key" => ssl_key_path, 153 | "ssl_verification_mode" => "full", 154 | "ssl_cipher_suites" => ["TLS_DHE_RSA_WITH_AES_256_CBC_SHA256"], 155 | "ssl_supported_protocols" => ["TLSv1.3"] 156 | ) } 157 | 158 | after :each do 159 | File.delete(ssl_certificate_authorities_path) 160 | File.delete(ssl_certificate_path) 161 | File.delete(ssl_key_path) 162 | end 163 | 164 | it "should pass the parameters to the ES client" do 165 | expect(::Manticore::Client).to receive(:new) do |args| 166 | expect(args[:ssl]).to match hash_including( 167 | :enabled => true, 168 | :ca_file => ssl_certificate_authorities_path, 169 | :client_cert => ssl_certificate_path, 170 | :client_key => ssl_key_path, 171 | :verify => :default, 172 | :cipher_suites => ["TLS_DHE_RSA_WITH_AES_256_CBC_SHA256"], 173 | :protocols => ["TLSv1.3"], 174 | ) 175 | end.and_return(manticore_double) 176 | 177 | subject.register 178 | end 179 | 180 | context "and only the ssl_certificate is set" do 181 | let(:settings) { super().reject { |k| "ssl_key".eql?(k) } } 182 | 183 | it "should raise a configuration error" do 184 | expect { subject.register }.to raise_error(LogStash::ConfigurationError, /Using an "ssl_certificate" requires an "ssl_key"/) 185 | end 186 | end 187 | 188 | context "and only the ssl_key is set" do 189 | let(:settings) { super().reject { |k| "ssl_certificate".eql?(k) } } 190 | 191 | it "should raise a configuration error" do 192 | expect { subject.register }.to raise_error(LogStash::ConfigurationError, /An "ssl_certificate" is required when using an "ssl_key"/) 193 | end 194 | end 195 | end 196 | end 197 | 198 | # Move outside the SSL options describe block that has the after hook 199 | describe "SSL obsolete settings" do 200 | let(:base_settings) { { "hosts" => "localhost", "pool_max" => 1, "pool_max_per_route" => 1 } } 201 | [ 202 | {name: 'ssl', replacement: 'ssl_enabled'}, 203 | {name: 'ssl_certificate_verification', replacement: 'ssl_verification_mode'}, 204 | {name: 'cacert', replacement: 'ssl_certificate_authorities'}, 205 | {name: 'truststore', replacement: 'ssl_truststore_path'}, 206 | {name: 'keystore', replacement: 'ssl_keystore_path'}, 207 | {name: 'truststore_password', replacement: 'ssl_truststore_password'}, 208 | {name: 'keystore_password', replacement: 'ssl_keystore_password'} 209 | ].each do |obsolete_setting| 210 | context "with option #{obsolete_setting[:name]}" do 211 | let(:settings) { base_settings.merge(obsolete_setting[:name] => "value") } 212 | 213 | it "emits an error about the setting being obsolete" do 214 | error_text = /The setting `#{obsolete_setting[:name]}` in plugin `elasticsearch` is obsolete and is no longer available. (Use|Set) '#{obsolete_setting[:replacement]}' instead/i 215 | expect { LogStash::Outputs::ElasticSearch.new(settings) }.to raise_error LogStash::ConfigurationError, error_text 216 | end 217 | end 218 | end 219 | end -------------------------------------------------------------------------------- /spec/unit/outputs/error_whitelist_spec.rb: -------------------------------------------------------------------------------- 1 | require "logstash/outputs/elasticsearch" 2 | require_relative "../../../spec/es_spec_helper" 3 | 4 | describe "whitelisting error types in expected behavior" do 5 | let(:template) { '{"template" : "not important, will be updated by :index"}' } 6 | let(:event1) { LogStash::Event.new("somevalue" => 100, "@timestamp" => "2014-11-17T20:37:17.223Z") } 7 | let(:settings) { {"manage_template" => true, "index" => "logstash-2014.11.17", "template_overwrite" => true, "hosts" => get_host_port() } } 8 | 9 | subject { LogStash::Outputs::ElasticSearch.new(settings) } 10 | 11 | before :each do 12 | allow(subject.logger).to receive(:warn) 13 | allow(subject).to receive(:maximum_seen_major_version).and_return(0) 14 | allow(subject).to receive(:alive_urls_count).and_return(1) 15 | allow(subject).to receive(:finish_register) 16 | 17 | subject.register 18 | 19 | allow(subject.client).to receive(:get_xpack_info) 20 | allow(subject.client).to receive(:bulk).and_return( 21 | { 22 | "errors" => true, 23 | "items" => [{ 24 | "create" => { 25 | "status" => 409, 26 | "error" => { 27 | "type" => "version_conflict_engine_exception", 28 | "reason" => "[shard] document already exists" 29 | } 30 | } 31 | }] 32 | }) 33 | 34 | subject.multi_receive([event1]) 35 | end 36 | 37 | after :each do 38 | subject.close 39 | end 40 | 41 | describe "when failure logging is enabled for everything" do 42 | it "should log a failure on the action" do 43 | expect(subject.logger).to have_received(:warn).with("Failed action", anything) 44 | end 45 | end 46 | 47 | describe "when failure logging is disabled for document exists error" do 48 | let(:settings) { super().merge("silence_errors_in_log" => ["version_conflict_engine_exception"]) } 49 | 50 | it "should log a failure on the action" do 51 | expect(subject.logger).not_to have_received(:warn).with("Failed action", anything) 52 | end 53 | end 54 | 55 | end 56 | -------------------------------------------------------------------------------- /spec/unit/outputs/license_check_spec.rb: -------------------------------------------------------------------------------- 1 | require "logstash/devutils/rspec/spec_helper" 2 | require "logstash/outputs/elasticsearch/http_client" 3 | require "logstash/outputs/elasticsearch/license_checker" 4 | 5 | describe LogStash::Outputs::ElasticSearch::LicenseChecker do 6 | 7 | # Note that the actual license checking logic is spec'ed in pool_spec.rb 8 | 9 | context "LicenseChecker API required by Pool class" do 10 | subject { described_class } 11 | 12 | it "defines the appropriate_license? methods" do 13 | expect(subject.instance_methods).to include(:appropriate_license?) 14 | end 15 | end 16 | 17 | context "Pool class API required by the LicenseChecker" do 18 | subject { LogStash::Outputs::ElasticSearch::HttpClient::Pool } 19 | 20 | it "contains the get_license method" do 21 | expect(LogStash::Outputs::ElasticSearch::HttpClient::Pool.instance_methods).to include(:get_license) 22 | end 23 | end 24 | 25 | context "appropriate license" do 26 | let(:logger) { double("logger") } 27 | let(:url) { LogStash::Util::SafeURI.new("https://cloud.elastic.co") } 28 | let(:pool) { double("pool") } 29 | subject { described_class.new(logger) } 30 | 31 | it "is true when connect to serverless" do 32 | allow(pool).to receive(:serverless?).and_return(true) 33 | expect(subject.appropriate_license?(pool, url)).to eq true 34 | end 35 | 36 | it "is true when license status is active" do 37 | allow(pool).to receive(:serverless?).and_return(false) 38 | allow(pool).to receive(:get_license).with(url).and_return(LogStash::Json.load File.read("spec/fixtures/license_check/active.json")) 39 | expect(subject.appropriate_license?(pool, url)).to eq true 40 | end 41 | 42 | it "is true when license status is inactive" do 43 | allow(logger).to receive(:warn).with(instance_of(String), anything) 44 | allow(pool).to receive(:serverless?).and_return(false) 45 | allow(pool).to receive(:get_license).with(url).and_return(LogStash::Json.load File.read("spec/fixtures/license_check/inactive.json")) 46 | expect(subject.appropriate_license?(pool, url)).to eq true 47 | end 48 | 49 | it "is false when no license return" do 50 | allow(logger).to receive(:error).with(instance_of(String), anything) 51 | allow(pool).to receive(:serverless?).and_return(false) 52 | allow(pool).to receive(:get_license).with(url).and_return(LogStash::Json.load('{}')) 53 | expect(subject.appropriate_license?(pool, url)).to eq false 54 | end 55 | end 56 | end 57 | 58 | --------------------------------------------------------------------------------