├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── NOTICE.TXT
├── .github
├── PULL_REQUEST_TEMPLATE.md
├── ISSUE_TEMPLATE.md
└── CONTRIBUTING.md
├── .gitignore
├── .ci
└── run.sh
├── .travis.yml
├── maxmind-db-NOTICE.txt
├── lib
├── logstash-filter-geoip_jars.rb
└── logstash
│ └── filters
│ ├── geoip
│ └── patch.rb
│ └── geoip.rb
├── Gemfile
├── Rakefile
├── spec
└── filters
│ ├── test_helper.rb
│ ├── geoip_spec.rb
│ ├── geoip_online_spec.rb
│ ├── geoip_offline_spec.rb
│ └── geoip_ecs_spec.rb
├── CONTRIBUTORS
├── logstash-filter-geoip.gemspec
├── gradlew.bat
├── README.md
├── src
└── main
│ └── java
│ └── org
│ └── logstash
│ └── filters
│ └── geoip
│ ├── Fields.java
│ └── GeoIPFilter.java
├── gradlew
├── CHANGELOG.md
├── LICENSE
└── docs
└── index.asciidoc
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DSO-Lab/logstash-filter-geoip/main/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/NOTICE.TXT:
--------------------------------------------------------------------------------
1 | Elasticsearch
2 | Copyright 2012-2015 Elasticsearch
3 |
4 | This product includes software developed by The Apache Software
5 | Foundation (http://www.apache.org/).
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | Thanks for contributing to Logstash! If you haven't already signed our CLA, here's a handy link: https://www.elastic.co/contributor-agreement/
2 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.gem
2 | Gemfile.lock
3 | .bundle
4 | vendor
5 |
6 | # build dirs
7 | build
8 | .gradle
9 |
10 | # Intellij
11 | .idea
12 | *.iml
13 |
14 | gradle.properties
15 |
--------------------------------------------------------------------------------
/.ci/run.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | current_dir="$(dirname "$0")"
3 |
4 | bundle install
5 | bundle exec rake gradle.properties
6 | ./gradlew assemble
7 | bundle exec rake vendor
8 | ./gradlew test
9 | bundle exec rspec spec
10 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | import:
2 | - logstash-plugins/.ci:travis/travis.yml@1.x
3 |
4 | env:
5 | - DISTRIBUTION=default ELASTIC_STACK_VERSION=7.x
6 | - DISTRIBUTION=default ELASTIC_STACK_VERSION=7.x SNAPSHOT=true
7 | - DISTRIBUTION=default ELASTIC_STACK_VERSION=8.x SNAPSHOT=true
8 |
--------------------------------------------------------------------------------
/maxmind-db-NOTICE.txt:
--------------------------------------------------------------------------------
1 | This product includes GeoLite2 data created by MaxMind, available from
2 | http://www.maxmind.com.
3 |
4 | The GeoLite2 databases are distributed under the Creative Commons Attribution-ShareAlike 4.0 International License
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Tue Jan 12 14:21:00 CET 2021
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-7.2-bin.zip
7 |
--------------------------------------------------------------------------------
/lib/logstash-filter-geoip_jars.rb:
--------------------------------------------------------------------------------
1 | # AUTOGENERATED BY THE GRADLE SCRIPT. DO NOT EDIT.
2 |
3 | require 'jar_dependencies'
4 | require_jar('com.maxmind.geoip2', 'geoip2', '2.9.0')
5 | require_jar('com.maxmind.db', 'maxmind-db', '1.2.2')
6 | require_jar('org.logstash.filters', 'logstash-filter-geoip', '6.0.0')
7 |
--------------------------------------------------------------------------------
/Gemfile:
--------------------------------------------------------------------------------
1 | source 'https://rubygems.org'
2 |
3 | gemspec
4 |
5 | logstash_path = ENV["LOGSTASH_PATH"] || "../../logstash"
6 | use_logstash_source = ENV["LOGSTASH_SOURCE"] && ENV["LOGSTASH_SOURCE"].to_s == "1"
7 |
8 | if Dir.exist?(logstash_path) && use_logstash_source
9 | gem 'logstash-core', :path => "#{logstash_path}/logstash-core"
10 | gem 'logstash-core-plugin-api', :path => "#{logstash_path}/logstash-core-plugin-api"
11 | end
12 |
--------------------------------------------------------------------------------
/lib/logstash/filters/geoip/patch.rb:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | def suppress_all_warnings
3 | old_verbose = $VERBOSE
4 | begin
5 | $VERBOSE = nil
6 | yield if block_given?
7 | ensure
8 | # always re-set to old value, even if block raises an exception
9 | $VERBOSE = old_verbose
10 | end
11 | end
12 |
13 | # create a new instance of the Java class File without shadowing the Ruby version of the File class
14 | module JavaIO
15 | include_package "java.io"
16 | end
17 |
18 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | Please post all product and debugging questions on our [forum](https://discuss.elastic.co/c/logstash). Your questions will reach our wider community members there, and if we confirm that there is a bug, then we can open a new issue here.
2 |
3 | For all general issues, please provide the following details for fast resolution:
4 |
5 | - Version:
6 | - Operating System:
7 | - Config File (if you have sensitive info, please remove it):
8 | - Sample Data:
9 | - Steps to Reproduce:
10 |
--------------------------------------------------------------------------------
/Rakefile:
--------------------------------------------------------------------------------
1 | require 'json'
2 |
3 | BASE_PATH = File.expand_path(File.dirname(__FILE__))
4 |
5 | task :default do
6 | system("rake -T")
7 | end
8 |
9 | require "logstash/devutils/rake"
10 |
11 | task :vendor => :gradle
12 |
13 | task :gradle => "gradle.properties" do
14 | system("./gradlew vendor")
15 | end
16 |
17 | file "gradle.properties" do
18 | delete_create_gradle_properties
19 | end
20 |
21 | def delete_create_gradle_properties
22 | root_dir = File.dirname(__FILE__)
23 | gradle_properties_file = "#{root_dir}/gradle.properties"
24 | lsc_path = `bundle show logstash-core`
25 | lsce_path = `bundle show logstash-core-event`
26 | FileUtils.rm_f(gradle_properties_file)
27 | File.open(gradle_properties_file, "w") do |f|
28 | f.puts "logstashCoreGemPath=#{lsc_path}"
29 | end
30 | puts "-------------------> Wrote #{gradle_properties_file}"
31 | puts `cat #{gradle_properties_file}`
32 | end
33 |
--------------------------------------------------------------------------------
/spec/filters/test_helper.rb:
--------------------------------------------------------------------------------
1 | require "logstash-core/logstash-core"
2 | require "digest"
3 | require "csv"
4 |
5 | def get_vendor_path(filename)
6 | ::File.join(::File.expand_path("../../vendor/", ::File.dirname(__FILE__)), filename)
7 | end
8 |
9 | def get_data_dir
10 | ::File.join(LogStash::SETTINGS.get_value("path.data"), "plugins", "filters", "geoip")
11 | end
12 |
13 | def get_file_path(filename)
14 | ::File.join(get_data_dir, filename)
15 | end
16 |
17 | def get_metadata_city_database_name
18 | if ::File.exist?(METADATA_PATH)
19 | city = ::CSV.read(METADATA_PATH, headers: false).select { |row| row[0].eql?("City") }.last
20 | city[3]
21 | else
22 | nil
23 | end
24 | end
25 |
26 | METADATA_PATH = get_file_path("metadata.csv")
27 | DEFAULT_CITY_DB_PATH = get_vendor_path("GeoLite2-City.mmdb")
28 | DEFAULT_ASN_DB_PATH = get_vendor_path("GeoLite2-ASN.mmdb")
29 |
30 | major, minor = LOGSTASH_VERSION.split(".")
31 | MAJOR = major.to_i
32 | MINOR = minor.to_i
33 |
--------------------------------------------------------------------------------
/CONTRIBUTORS:
--------------------------------------------------------------------------------
1 | The following is a list of people who have contributed ideas, code, bug
2 | reports, or in general have helped logstash along its way.
3 |
4 | Contributors:
5 | * Aaron Mildenstein (untergeek)
6 | * Avishai Ish-Shalom (avishai-ish-shalom)
7 | * Brad Fritz (bfritz)
8 | * Colin Surprenant (colinsurprenant)
9 | * Jordan Sissel (jordansissel)
10 | * Kurt Hurtado (kurtado)
11 | * Leandro Moreira (leandromoreira)
12 | * Nick Ethier (nickethier)
13 | * Pier-Hugues Pellerin (ph)
14 | * Pieter Lexis (pieterlexis)
15 | * Richard Pijnenburg (electrical)
16 | * Suyog Rao (suyograo)
17 | * Vincent Batts (vbatts)
18 | * avleen
19 | * Guy Boertje (guyboertje)
20 | * Thomas Decaux (qwant)
21 | * Gary Gao (garyelephant)
22 |
23 | Note: If you've sent us patches, bug reports, or otherwise contributed to
24 | Logstash, and you aren't on the list above and want to be, please let us know
25 | and we'll make sure you're here. Contributions from folks like you are what make
26 | open source awesome.
27 |
--------------------------------------------------------------------------------
/spec/filters/geoip_spec.rb:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | require "logstash/devutils/rspec/spec_helper"
3 | require "logstash/filters/geoip"
4 | require_relative 'test_helper'
5 |
6 | describe LogStash::Filters::GeoIP do
7 |
8 | describe "database path", :aggregate_failures do
9 | let(:plugin) { LogStash::Filters::GeoIP.new("source" => "[target][ip]", "database" => DEFAULT_ASN_DB_PATH) }
10 |
11 | before :each do
12 | logstash_path = ENV['LOGSTASH_PATH'] || '/usr/share/logstash' # docker logstash home
13 | stub_const('LogStash::Environment::LOGSTASH_HOME', logstash_path)
14 | end
15 |
16 | context "select_database_path with static path" do
17 | it "should be the assigned path" do
18 | expect(plugin.select_database_path).to eql(DEFAULT_ASN_DB_PATH)
19 | end
20 | end
21 |
22 | describe ">= 7.14" do
23 | it "load_database_manager? should be true" do
24 | expect(plugin.load_database_manager?).to be_truthy
25 | end
26 | end if MAJOR >= 8 || (MAJOR == 7 && MINOR >= 14)
27 |
28 | describe "<= 7.13" do
29 | it "load_database_manager? should be false" do
30 | expect(plugin.load_database_manager?).to be_falsey
31 | end
32 |
33 | describe "select_database_path without path setting" do
34 | let(:plugin) { LogStash::Filters::GeoIP.new("source" => "[target][ip]") }
35 |
36 | it "should be default" do
37 | expect(plugin.select_database_path).to eql(DEFAULT_CITY_DB_PATH)
38 | end
39 | end
40 | end if MAJOR < 7 || (MAJOR == 7 && MINOR <= 13)
41 | end
42 | end
43 |
--------------------------------------------------------------------------------
/logstash-filter-geoip.gemspec:
--------------------------------------------------------------------------------
1 | Gem::Specification.new do |s|
2 |
3 | s.name = 'logstash-filter-geoip'
4 | s.version = '7.2.12'
5 | s.licenses = ['Apache License (2.0)']
6 | s.summary = "Adds geographical information about an IP address"
7 | s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
8 | s.authors = ["Elastic"]
9 | s.email = 'info@elastic.co'
10 | s.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html"
11 | s.platform = "java"
12 | s.require_paths = ["lib", "vendor/jar-dependencies"]
13 |
14 | # Files
15 | s.files = Dir['lib/**/*','spec/**/*','vendor/**/*', 'vendor/jar-dependencies/**/*.jar', '*.gemspec','*.md','CONTRIBUTORS','Gemfile','LICENSE','NOTICE.TXT', 'maxmind-db-NOTICE.txt', 'docs/**/*']
16 |
17 | # Tests
18 | s.test_files = s.files.grep(%r{^(test|spec|features)/})
19 |
20 | # Special flag to let us know this is actually a logstash plugin
21 | s.metadata = { "logstash_plugin" => "true", "logstash_group" => "filter" }
22 |
23 | # Gem dependencies
24 | s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
25 | s.add_runtime_dependency 'logstash-mixin-ecs_compatibility_support', '~>1.2'
26 | s.add_development_dependency 'logstash-devutils'
27 | s.add_development_dependency 'insist'
28 | s.add_development_dependency 'benchmark-ips'
29 | # only compatible with 7.14+ because of the dependency of DatabaseManager
30 | s.add_runtime_dependency "logstash-core", ">= 7.14.0"
31 | end
32 |
--------------------------------------------------------------------------------
/spec/filters/geoip_online_spec.rb:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | require "logstash/devutils/rspec/spec_helper"
3 | require "insist"
4 | require "logstash/filters/geoip"
5 | require_relative 'test_helper'
6 |
7 | describe LogStash::Filters::GeoIP do
8 |
9 | before(:each) do
10 | ::File.delete(METADATA_PATH) if ::File.exist?(METADATA_PATH)
11 | end
12 |
13 | describe "config without database path in LS >= 7.14", :aggregate_failures do
14 | before(:each) do
15 | dir_path = Stud::Temporary.directory
16 | File.open(dir_path + '/uuid', 'w') { |f| f.write(SecureRandom.uuid) }
17 | allow(LogStash::SETTINGS).to receive(:get).and_call_original
18 | allow(LogStash::SETTINGS).to receive(:get).with("path.data").and_return(dir_path)
19 | end
20 |
21 | let(:plugin) { LogStash::Filters::GeoIP.new("source" => "[target][ip]") }
22 |
23 | context "restart the plugin" do
24 | let(:event) { LogStash::Event.new("target" => { "ip" => "173.9.34.107" }) }
25 | let(:event2) { LogStash::Event.new("target" => { "ip" => "55.159.212.43" }) }
26 |
27 | it "should use the same database" do
28 | unless plugin.load_database_manager?
29 | logstash_path = ENV['LOGSTASH_PATH'] || '/usr/share/logstash' # docker logstash home
30 | stub_const('LogStash::Environment::LOGSTASH_HOME', logstash_path)
31 | end
32 |
33 | plugin.register
34 | plugin.filter(event)
35 | plugin.close
36 | first_dirname = get_metadata_city_database_name
37 | plugin.register
38 | plugin.filter(event2)
39 | plugin.close
40 | second_dirname = get_metadata_city_database_name
41 |
42 | expect(first_dirname).not_to be_nil
43 | expect(first_dirname).to eq(second_dirname)
44 | expect(File).to exist(get_file_path(first_dirname))
45 | end
46 | end
47 | end if MAJOR >= 8 || (MAJOR == 7 && MINOR >= 14)
48 |
49 | describe "config without database path in LS < 7.14" do
50 | context "should run in offline mode" do
51 | config <<-CONFIG
52 | filter {
53 | geoip {
54 | source => "ip"
55 | }
56 | }
57 | CONFIG
58 |
59 | sample("ip" => "173.9.34.107") do
60 | insist { subject.get("geoip") }.include?("ip")
61 | expect(::File.exist?(METADATA_PATH)).to be_falsey
62 | end
63 | end
64 | end if MAJOR < 7 || (MAJOR == 7 && MINOR < 14)
65 | end
66 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | set DIRNAME=%~dp0
12 | if "%DIRNAME%" == "" set DIRNAME=.
13 | set APP_BASE_NAME=%~n0
14 | set APP_HOME=%DIRNAME%
15 |
16 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
17 | set DEFAULT_JVM_OPTS=
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windows variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 |
53 | :win9xME_args
54 | @rem Slurp the command line arguments.
55 | set CMD_LINE_ARGS=
56 | set _SKIP=2
57 |
58 | :win9xME_args_slurp
59 | if "x%~1" == "x" goto execute
60 |
61 | set CMD_LINE_ARGS=%*
62 |
63 | :execute
64 | @rem Setup the command line
65 |
66 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
67 |
68 | @rem Execute Gradle
69 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
70 |
71 | :end
72 | @rem End local scope for the variables with windows NT shell
73 | if "%ERRORLEVEL%"=="0" goto mainEnd
74 |
75 | :fail
76 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
77 | rem the _cmd.exe /c_ return code!
78 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
79 | exit /b 1
80 |
81 | :mainEnd
82 | if "%OS%"=="Windows_NT" endlocal
83 |
84 | :omega
85 |
--------------------------------------------------------------------------------
/.github/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing to Logstash
2 |
3 | All contributions are welcome: ideas, patches, documentation, bug reports,
4 | complaints, etc!
5 |
6 | Programming is not a required skill, and there are many ways to help out!
7 | It is more important to us that you are able to contribute.
8 |
9 | That said, some basic guidelines, which you are free to ignore :)
10 |
11 | ## Want to learn?
12 |
13 | Want to lurk about and see what others are doing with Logstash?
14 |
15 | * The irc channel (#logstash on irc.freenode.org) is a good place for this
16 | * The [forum](https://discuss.elastic.co/c/logstash) is also
17 | great for learning from others.
18 |
19 | ## Got Questions?
20 |
21 | Have a problem you want Logstash to solve for you?
22 |
23 | * You can ask a question in the [forum](https://discuss.elastic.co/c/logstash)
24 | * Alternately, you are welcome to join the IRC channel #logstash on
25 | irc.freenode.org and ask for help there!
26 |
27 | ## Have an Idea or Feature Request?
28 |
29 | * File a ticket on [GitHub](https://github.com/elastic/logstash/issues). Please remember that GitHub is used only for issues and feature requests. If you have a general question, the [forum](https://discuss.elastic.co/c/logstash) or IRC would be the best place to ask.
30 |
31 | ## Something Not Working? Found a Bug?
32 |
33 | If you think you found a bug, it probably is a bug.
34 |
35 | * If it is a general Logstash or a pipeline issue, file it in [Logstash GitHub](https://github.com/elasticsearch/logstash/issues)
36 | * If it is specific to a plugin, please file it in the respective repository under [logstash-plugins](https://github.com/logstash-plugins)
37 | * or ask the [forum](https://discuss.elastic.co/c/logstash).
38 |
39 | # Contributing Documentation and Code Changes
40 |
41 | If you have a bugfix or new feature that you would like to contribute to
42 | logstash, and you think it will take more than a few minutes to produce the fix
43 | (ie; write code), it is worth discussing the change with the Logstash users and developers first! You can reach us via [GitHub](https://github.com/elastic/logstash/issues), the [forum](https://discuss.elastic.co/c/logstash), or via IRC (#logstash on freenode irc)
44 | Please note that Pull Requests without tests will not be merged. If you would like to contribute but do not have experience with writing tests, please ping us on IRC/forum or create a PR and ask our help.
45 |
46 | ## Contributing to plugins
47 |
48 | Check our [documentation](https://www.elastic.co/guide/en/logstash/current/contributing-to-logstash.html) on how to contribute to plugins or write your own! It is super easy!
49 |
50 | ## Contribution Steps
51 |
52 | 1. Test your changes! [Run](https://github.com/elastic/logstash#testing) the test suite
53 | 2. Please make sure you have signed our [Contributor License
54 | Agreement](https://www.elastic.co/contributor-agreement/). We are not
55 | asking you to assign copyright to us, but to give us the right to distribute
56 | your code without restriction. We ask this of all contributors in order to
57 | assure our users of the origin and continuing existence of the code. You
58 | only need to sign the CLA once.
59 | 3. Send a pull request! Push your changes to your fork of the repository and
60 | [submit a pull
61 | request](https://help.github.com/articles/using-pull-requests). In the pull
62 | request, describe what your changes do and mention any bugs/issues related
63 | to the pull request.
64 |
65 |
66 |
--------------------------------------------------------------------------------
/spec/filters/geoip_offline_spec.rb:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | require "logstash/devutils/rspec/spec_helper"
3 | require "insist"
4 | require "logstash/filters/geoip"
5 |
6 | CITYDB = ::Dir.glob(::File.expand_path(::File.join("..", "..", "..", "vendor", "GeoLite2-City.mmdb"), __FILE__)).first
7 | ASNDB = ::Dir.glob(::File.expand_path(::File.join("..", "..", "..", "vendor", "GeoLite2-ASN.mmdb"), __FILE__)).first
8 |
9 |
10 | describe LogStash::Filters::GeoIP do
11 | shared_examples "invalid empty IP" do
12 | it "should not give target field" do
13 | expect(event.get(target)).to be_nil
14 | expect(event.get("tags")).to include("_geoip_lookup_failure")
15 | end
16 | end
17 |
18 | shared_examples "invalid string IP" do
19 | it "should give empty hash in target field" do
20 | expect(event.get(target)).to eq({})
21 | expect(event.get("tags")).to include("_geoip_lookup_failure")
22 | end
23 | end
24 |
25 | let(:target) { "server" }
26 |
27 | describe "invalid IP" do
28 | let(:ip) { "173.9.34.107" }
29 | let(:event) { LogStash::Event.new("client" => { "ip" => ip } ) }
30 | let(:plugin) {
31 | LogStash::Filters::GeoIP.new(
32 | "source" => "[client][ip]",
33 | "target" => target,
34 | "fields" => %w[country_name continent_code],
35 | "database" => CITYDB
36 | )
37 | }
38 |
39 | before do
40 | plugin.register
41 | plugin.filter(event)
42 | end
43 |
44 | context "when ip is 127.0.0.1" do
45 | let(:ip) { "127.0.0.1" }
46 | it "should give empty hash" do
47 | expect(event.get(target)).to eq({})
48 | end
49 | end
50 |
51 | context "when ip is empty string" do
52 | let(:ip) { "" }
53 | it_behaves_like "invalid empty IP"
54 | end
55 |
56 | context "when ip is space" do
57 | let(:ip) { " " }
58 | it_behaves_like "invalid empty IP"
59 | end
60 |
61 | context "when ip is dash" do
62 | let(:ip) { "-" }
63 | it_behaves_like "invalid string IP"
64 | end
65 |
66 | context "when ip is N/A" do
67 | let(:ip) { "N/A" }
68 | it_behaves_like "invalid string IP"
69 | end
70 |
71 | context "when ip is two ip comma separated" do
72 | let(:ip) { "123.45.67.89,61.160.232.222" }
73 | it_behaves_like "invalid string IP"
74 | end
75 |
76 | context "when ip is not found in the DB" do
77 | let(:ip) { "0.0.0.0" }
78 | it_behaves_like "invalid string IP"
79 | end
80 |
81 | context "when ip is IPv6 format for localhost" do
82 | let(:ip) { "::1" }
83 | it_behaves_like "invalid string IP"
84 | end
85 | end
86 |
87 | describe "database path is empty" do
88 | let(:plugin) { LogStash::Filters::GeoIP.new("source" => "message", "target" => target) }
89 | let(:event) { LogStash::Event.new("message" => "8.8.8.8") }
90 |
91 | context "when database manager give nil database path" do
92 | it "should tag expired database" do
93 | expect(plugin).to receive(:select_database_path).and_return(nil)
94 |
95 | plugin.register
96 | plugin.filter(event)
97 |
98 | expect(event.get("tags")).to include("_geoip_expired_database")
99 | end
100 | end
101 | end
102 |
103 | describe "database path is an invalid database file" do
104 | config <<-CONFIG
105 | filter {
106 | geoip {
107 | source => "ip"
108 | target => "geo"
109 | database => "./Gemfile"
110 | }
111 | }
112 | CONFIG
113 |
114 | context "should return the correct sourcefield in the logging message" do
115 | sample("ip" => "8.8.8.8") do
116 | expect { subject }.to raise_error(java.lang.IllegalArgumentException, "The database provided is invalid or corrupted.")
117 | end
118 | end
119 | end
120 |
121 | end
122 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Logstash Plugin
2 |
3 | [](https://travis-ci.com/logstash-plugins/logstash-filter-geoip)
4 |
5 | This is a plugin for [Logstash](https://github.com/elastic/logstash).
6 |
7 | It is fully free and fully open source. The license is Apache 2.0, meaning you are pretty much free to use it however you want in whatever way.
8 |
9 | ## Documentation
10 |
11 | Logstash provides infrastructure to automatically generate documentation for this plugin. We use the asciidoc format to write documentation so any comments in the source code will be first converted into asciidoc and then into html. All plugin documentation are placed under one [central location](http://www.elastic.co/guide/en/logstash/current/).
12 |
13 | - For formatting code or config example, you can use the asciidoc `[source,ruby]` directive
14 | - For more asciidoc formatting tips, see the excellent reference here https://github.com/elastic/docs#asciidoc-guide
15 |
16 | ## Need Help?
17 |
18 | Need help? Try #logstash on freenode IRC or the https://discuss.elastic.co/c/logstash discussion forum.
19 |
20 | ## Developing
21 |
22 | ### 1. Plugin Developement and Testing
23 |
24 | #### Code
25 | - To get started, you'll need JRuby with the Bundler gem installed.
26 |
27 | - Create a new plugin or clone and existing from the GitHub [logstash-plugins](https://github.com/logstash-plugins) organization. We also provide [example plugins](https://github.com/logstash-plugins?query=example).
28 |
29 | - Install dependencies
30 | ```sh
31 | bundle install
32 | ```
33 |
34 | #### Test
35 |
36 | - Update your dependencies
37 |
38 | ```sh
39 | bundle install
40 | ```
41 |
42 | - Pull down GeoIP database files
43 |
44 | ```sh
45 | bundle exec rake vendor
46 | ```
47 |
48 | - Install jar dependencies
49 |
50 | ```
51 | bundle exec rake install_jars
52 | ```
53 |
54 | - Run tests
55 |
56 | ```sh
57 | bundle exec rspec
58 | ```
59 |
60 | ### 2. Running your unpublished Plugin in Logstash
61 |
62 | #### 2.1 Run in a local Logstash clone
63 |
64 | - Edit Logstash `Gemfile` and add the local plugin path, for example:
65 | ```ruby
66 | gem "logstash-filter-awesome", :path => "/your/local/logstash-filter-awesome"
67 | ```
68 | - Install plugin
69 | ```sh
70 | # Logstash 2.3 and higher
71 | bin/logstash-plugin install --no-verify
72 |
73 | # Prior to Logstash 2.3
74 | bin/plugin install --no-verify
75 |
76 | ```
77 | - Run Logstash with your plugin
78 | ```sh
79 | bin/logstash -e 'filter {awesome {}}'
80 | ```
81 | At this point any modifications to the plugin code will be applied to this local Logstash setup. After modifying the plugin, simply rerun Logstash.
82 |
83 | #### 2.2 Run in an installed Logstash
84 |
85 | You can use the same **2.1** method to run your plugin in an installed Logstash by editing its `Gemfile` and pointing the `:path` to your local plugin development directory or you can build the gem and install it using:
86 |
87 | - Build your plugin gem
88 | ```sh
89 | gem build logstash-filter-awesome.gemspec
90 | ```
91 | - Install the plugin from the Logstash home
92 | ```sh
93 | # Logstash 2.3 and higher
94 | bin/logstash-plugin install --no-verify
95 |
96 | # Prior to Logstash 2.3
97 | bin/plugin install --no-verify
98 |
99 | ```
100 | - Start Logstash and proceed to test the plugin
101 |
102 | ## Contributing
103 |
104 | All contributions are welcome: ideas, patches, documentation, bug reports, complaints, and even something you drew up on a napkin.
105 |
106 | Programming is not a required skill. Whatever you've seen about open source and maintainers or community members saying "send patches or die" - you will not see that here.
107 |
108 | It is more important to the community that you are able to contribute.
109 |
110 | For more information about contributing, see the [CONTRIBUTING](https://github.com/elastic/logstash/blob/master/CONTRIBUTING.md) file.
--------------------------------------------------------------------------------
/src/main/java/org/logstash/filters/geoip/Fields.java:
--------------------------------------------------------------------------------
1 | package org.logstash.filters.geoip;
2 |
3 | /*
4 | * Licensed to Elasticsearch under one or more contributor
5 | * license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright
7 | * ownership. Elasticsearch licenses this file to you under
8 | * the Apache License, Version 2.0 (the "License"); you may
9 | * not use this file except in compliance with the License.
10 | * You may obtain a copy of the License at
11 | *
12 | * http://www.apache.org/licenses/LICENSE-2.0
13 | *
14 | * Unless required by applicable law or agreed to in writing,
15 | * software distributed under the License is distributed on an
16 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
17 | * KIND, either express or implied. See the License for the
18 | * specific language governing permissions and limitations
19 | * under the License.
20 | */
21 |
22 | import java.util.*;
23 | import java.util.stream.Collectors;
24 | import java.util.stream.Stream;
25 |
26 | enum Fields {
27 | AUTONOMOUS_SYSTEM_NUMBER("as.number", "asn"),
28 | AUTONOMOUS_SYSTEM_ORGANIZATION("as.organization.name", "as_org"),
29 | CITY_NAME("geo.city_name", "city_name"),
30 | COUNTRY_NAME("geo.country_name", "country_name"),
31 | CONTINENT_CODE("geo.continent_code", "continent_code"),
32 | CONTINENT_NAME("geo.continent_name", "continent_name"),
33 | COUNTRY_CODE2("geo.country_iso_code", "country_code2"),
34 | COUNTRY_CODE3("", "country_code3"), // `country_code3` is not ECS compatible and will be skipped in ECS mode
35 | DOMAIN("domain"),
36 | IP("ip"),
37 | ISP("mmdb.isp", "isp"),
38 | POSTAL_CODE("geo.postal_code", "postal_code"),
39 | DMA_CODE("mmdb.dma_code", "dma_code"),
40 | REGION_NAME("geo.region_name", "region_name"),
41 | REGION_CODE("geo.region_code", "region_code"),
42 | REGION_ISO_CODE("geo.region_iso_code", "region_iso_code"),
43 | TIMEZONE("geo.timezone", "timezone"),
44 | LOCATION("geo.location", "location"),
45 | LATITUDE("geo.location.lat", "latitude"),
46 | LONGITUDE("geo.location.lon", "longitude"),
47 | ORGANIZATION("mmdb.organization", "organization");
48 |
49 | private final String fieldName;
50 | private final String ecsFieldName;
51 |
52 | private final String fieldReferenceLegacy;
53 | private final String fieldReferenceECSv1;
54 |
55 | @Deprecated
56 | Fields(String fieldName) {
57 | this(fieldName, fieldName);
58 | }
59 |
60 | Fields(final String ecsFieldName, final String legacyFieldName) {
61 | this.ecsFieldName = ecsFieldName;
62 | this.fieldName = legacyFieldName;
63 |
64 | this.fieldReferenceLegacy = normalizeFieldReferenceFragment(fieldName);
65 | this.fieldReferenceECSv1 = normalizeFieldReferenceFragment(ecsFieldName);
66 | }
67 |
68 | public String fieldName() {
69 | return fieldName;
70 | }
71 |
72 | public String getEcsFieldName() {
73 | return this.ecsFieldName;
74 | }
75 |
76 | public String getFieldReferenceLegacy() {
77 | return this.fieldReferenceLegacy;
78 | }
79 |
80 | public String getFieldReferenceECSv1() {
81 | return this.fieldReferenceECSv1;
82 | }
83 |
84 | private static final Map MAPPING;
85 | static {
86 | final Map mapping = new HashMap<>();
87 | for (Fields value : values()) {
88 | mapping.put(value.name().toUpperCase(Locale.ROOT), value);
89 | }
90 | MAPPING = Collections.unmodifiableMap(mapping);
91 | }
92 |
93 | static final EnumSet ALL_FIELDS = EnumSet.allOf(Fields.class);
94 |
95 | static final EnumSet DEFAULT_CITY_FIELDS = EnumSet.of(Fields.IP, Fields.CITY_NAME,
96 | Fields.CONTINENT_CODE, Fields.COUNTRY_NAME, Fields.COUNTRY_CODE2,
97 | Fields.COUNTRY_CODE3, Fields.IP, Fields.POSTAL_CODE, Fields.DMA_CODE, Fields.REGION_NAME,
98 | Fields.REGION_CODE, Fields.TIMEZONE, Fields.LOCATION, Fields.LATITUDE, Fields.LONGITUDE);
99 |
100 | // When ECS is enabled, the composite REGION_ISO_CODE field is preferred to separate REGION_CODE
101 | static final EnumSet DEFAULT_ECS_CITY_FIELDS;
102 | static {
103 | DEFAULT_ECS_CITY_FIELDS = EnumSet.copyOf(DEFAULT_CITY_FIELDS);
104 | DEFAULT_ECS_CITY_FIELDS.remove(REGION_CODE);
105 | DEFAULT_ECS_CITY_FIELDS.add(REGION_ISO_CODE);
106 | }
107 |
108 | static final EnumSet DEFAULT_COUNTRY_FIELDS = EnumSet.of(Fields.IP, Fields.COUNTRY_CODE2,
109 | Fields.IP, Fields.COUNTRY_NAME, Fields.CONTINENT_NAME);
110 |
111 | static final EnumSet DEFAULT_ISP_FIELDS = EnumSet.of(Fields.IP, Fields.AUTONOMOUS_SYSTEM_NUMBER,
112 | Fields.AUTONOMOUS_SYSTEM_ORGANIZATION, Fields.ISP, Fields.ORGANIZATION);
113 |
114 | static final EnumSet DEFAULT_ASN_LITE_FIELDS = EnumSet.of(Fields.IP, Fields.AUTONOMOUS_SYSTEM_NUMBER,
115 | Fields.AUTONOMOUS_SYSTEM_ORGANIZATION);
116 |
117 | static final EnumSet DEFAULT_DOMAIN_FIELDS = EnumSet.of(Fields.DOMAIN);
118 |
119 | public static Fields parseField(String value) {
120 | final Fields fields = MAPPING.get(value.toUpperCase(Locale.ROOT));
121 | if (fields == null) {
122 | throw new IllegalArgumentException("illegal field value " + value + ". valid values are " +
123 | Arrays.toString(ALL_FIELDS.toArray()));
124 | }
125 | return fields;
126 | }
127 |
128 | /**
129 | * Normalizes a dot-separated field path into a bracket-notation Logstash Field Reference
130 | * @param fieldName: a dot-separated field path (e.g., `geo.location.lat`)
131 | * @return: a bracket-notation Field Reference (e.g., `[geo][location][lat]`)
132 | */
133 | private static String normalizeFieldReferenceFragment(final String fieldName) {
134 | return Stream.of(fieldName.split("\\."))
135 | .map((f) -> "[" + f + "]")
136 | .collect(Collectors.joining());
137 | }
138 | }
139 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Attempt to set APP_HOME
10 | # Resolve links: $0 may be a link
11 | PRG="$0"
12 | # Need this for relative symlinks.
13 | while [ -h "$PRG" ] ; do
14 | ls=`ls -ld "$PRG"`
15 | link=`expr "$ls" : '.*-> \(.*\)$'`
16 | if expr "$link" : '/.*' > /dev/null; then
17 | PRG="$link"
18 | else
19 | PRG=`dirname "$PRG"`"/$link"
20 | fi
21 | done
22 | SAVED="`pwd`"
23 | cd "`dirname \"$PRG\"`/" >/dev/null
24 | APP_HOME="`pwd -P`"
25 | cd "$SAVED" >/dev/null
26 |
27 | APP_NAME="Gradle"
28 | APP_BASE_NAME=`basename "$0"`
29 |
30 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
31 | DEFAULT_JVM_OPTS=""
32 |
33 | # Use the maximum available, or set MAX_FD != -1 to use that value.
34 | MAX_FD="maximum"
35 |
36 | warn ( ) {
37 | echo "$*"
38 | }
39 |
40 | die ( ) {
41 | echo
42 | echo "$*"
43 | echo
44 | exit 1
45 | }
46 |
47 | # OS specific support (must be 'true' or 'false').
48 | cygwin=false
49 | msys=false
50 | darwin=false
51 | nonstop=false
52 | case "`uname`" in
53 | CYGWIN* )
54 | cygwin=true
55 | ;;
56 | Darwin* )
57 | darwin=true
58 | ;;
59 | MINGW* )
60 | msys=true
61 | ;;
62 | NONSTOP* )
63 | nonstop=true
64 | ;;
65 | esac
66 |
67 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
68 |
69 | # Determine the Java command to use to start the JVM.
70 | if [ -n "$JAVA_HOME" ] ; then
71 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
72 | # IBM's JDK on AIX uses strange locations for the executables
73 | JAVACMD="$JAVA_HOME/jre/sh/java"
74 | else
75 | JAVACMD="$JAVA_HOME/bin/java"
76 | fi
77 | if [ ! -x "$JAVACMD" ] ; then
78 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
79 |
80 | Please set the JAVA_HOME variable in your environment to match the
81 | location of your Java installation."
82 | fi
83 | else
84 | JAVACMD="java"
85 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
86 |
87 | Please set the JAVA_HOME variable in your environment to match the
88 | location of your Java installation."
89 | fi
90 |
91 | # Increase the maximum file descriptors if we can.
92 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
93 | MAX_FD_LIMIT=`ulimit -H -n`
94 | if [ $? -eq 0 ] ; then
95 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
96 | MAX_FD="$MAX_FD_LIMIT"
97 | fi
98 | ulimit -n $MAX_FD
99 | if [ $? -ne 0 ] ; then
100 | warn "Could not set maximum file descriptor limit: $MAX_FD"
101 | fi
102 | else
103 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
104 | fi
105 | fi
106 |
107 | # For Darwin, add options to specify how the application appears in the dock
108 | if $darwin; then
109 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
110 | fi
111 |
112 | # For Cygwin, switch paths to Windows format before running java
113 | if $cygwin ; then
114 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
115 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
116 | JAVACMD=`cygpath --unix "$JAVACMD"`
117 |
118 | # We build the pattern for arguments to be converted via cygpath
119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
120 | SEP=""
121 | for dir in $ROOTDIRSRAW ; do
122 | ROOTDIRS="$ROOTDIRS$SEP$dir"
123 | SEP="|"
124 | done
125 | OURCYGPATTERN="(^($ROOTDIRS))"
126 | # Add a user-defined pattern to the cygpath arguments
127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
129 | fi
130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
131 | i=0
132 | for arg in "$@" ; do
133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
135 |
136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
138 | else
139 | eval `echo args$i`="\"$arg\""
140 | fi
141 | i=$((i+1))
142 | done
143 | case $i in
144 | (0) set -- ;;
145 | (1) set -- "$args0" ;;
146 | (2) set -- "$args0" "$args1" ;;
147 | (3) set -- "$args0" "$args1" "$args2" ;;
148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
154 | esac
155 | fi
156 |
157 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
158 | function splitJvmOpts() {
159 | JVM_OPTS=("$@")
160 | }
161 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
162 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
163 |
164 | # by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
165 | if [[ "$(uname)" == "Darwin" ]] && [[ "$HOME" == "$PWD" ]]; then
166 | cd "$(dirname "$0")"
167 | fi
168 |
169 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
170 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | ## 7.2.12
2 | - [DOC] Add `http_proxy` environment variable for GeoIP service endpoint. The feature is included in 8.1.0, and was back-ported to 7.17.2 [#207](https://github.com/logstash-plugins/logstash-filter-geoip/pull/207)
3 |
4 | ## 7.2.11
5 | - Improved compatibility with the Elastic Common Schema [#206](https://github.com/logstash-plugins/logstash-filter-geoip/pull/206)
6 | - Added support for ECS's composite `region_iso_code` (`US-WA`), which _replaces_ the non-ECS `region_code` (`WA`) as a default field with City databases. To get the stand-alone `region_code` in ECS mode, you must include it in the `fields` directive.
7 | - [DOC] Improve ECS-related documentation
8 |
9 | ## 7.2.10
10 | - [DOC] Air-gapped environment requires both ASN and City databases [#204](https://github.com/logstash-plugins/logstash-filter-geoip/pull/204)
11 |
12 | ## 7.2.9
13 | - Fix: red CI in Logstash 8.0 [#201](https://github.com/logstash-plugins/logstash-filter-geoip/pull/201)
14 | - Update Log4j dependency to 2.17.1
15 |
16 | ## 7.2.8
17 | - Update Log4j dependency to 2.17.0
18 |
19 | ## 7.2.7
20 | - Ensure java 8 compatibility [#197](https://github.com/logstash-plugins/logstash-filter-geoip/pull/197)
21 |
22 | ## 7.2.6
23 | - Update Log4J dependencies [#196](https://github.com/logstash-plugins/logstash-filter-geoip/pull/196)
24 |
25 | ## 7.2.5
26 | - Added preview of ECS-v8 support with existing ECS-v1 implementation [#193](https://github.com/logstash-plugins/logstash-filter-geoip/pull/193)
27 |
28 | ## 7.2.4
29 | - Fix: update to Gradle 7 [#191](https://github.com/logstash-plugins/logstash-filter-geoip/pull/191)
30 | - [DOC] Clarify CC licensed database indefinite use condition and air-gapped environment [#192](https://github.com/logstash-plugins/logstash-filter-geoip/pull/192)
31 |
32 | ## 7.2.3
33 | - [DOC] Add documentation for bootstrapping air-gapped environment for database auto-update [#189](https://github.com/logstash-plugins/logstash-filter-geoip/pull/189)
34 |
35 | ## 7.2.2
36 | - [DOC] Add documentation for database auto-update behavior and database metrics [#187](https://github.com/logstash-plugins/logstash-filter-geoip/pull/187)
37 |
38 | ## 7.2.1
39 | - Republish the gem due to missing jars in 7.2.0 [#186](https://github.com/logstash-plugins/logstash-filter-geoip/pull/186)
40 |
41 | ## 7.2.0
42 | - YANKED
43 | - Add EULA GeoIP2 Database with auto-update [#181](https://github.com/logstash-plugins/logstash-filter-geoip/pull/181)
44 | Available in Logstash 7.14+
45 | - Support multiple pipelines using the same database
46 | - Add EULA doc
47 |
48 | ## 7.1.3
49 | - Fixed resolving wrong `fields` name `AUTONOMOUS_SYSTEM_NUMBER` and `AUTONOMOUS_SYSTEM_ORGANIZATION` [#185](https://github.com/logstash-plugins/logstash-filter-geoip/pull/185)
50 |
51 | ## 7.1.2
52 | - Remove EULA doc as MaxMind auto-update has been retargeted to a later release [#183](https://github.com/logstash-plugins/logstash-filter-geoip/pull/183)
53 |
54 | ## 7.1.1
55 | - Changed the behaviour of database expiry. Instead of stopping the pipeline, it adds a tag `_geoip_expired_database` [#182](https://github.com/logstash-plugins/logstash-filter-geoip/pull/182)
56 |
57 | ## 7.1.0
58 | - Add ECS compatibility [#179](https://github.com/logstash-plugins/logstash-filter-geoip/pull/179)
59 |
60 | ## 7.0.1
61 | - [DOC] Add documentation for MaxMind database license change [#177](https://github.com/logstash-plugins/logstash-filter-geoip/pull/177)
62 |
63 | ## 7.0.0
64 | - Changed the plugin to use EULA GeoIP2 Database with auto-update [#176](https://github.com/logstash-plugins/logstash-filter-geoip/pull/176)
65 | Available in Logstash 7.13+ Elastic license
66 |
67 | ## 6.0.5
68 | - Fix database download task. Upgrade project to java 11 [#175](https://github.com/logstash-plugins/logstash-filter-geoip/pull/175)
69 |
70 | ## 6.0.4
71 | - Enable the use of MaxMind GeoIP2-Domain databases [#162](https://github.com/logstash-plugins/logstash-filter-geoip/pull/162)
72 |
73 | ## 6.0.3
74 | - Fixed docs for missing region_code [#158](https://github.com/logstash-plugins/logstash-filter-geoip/pull/158)
75 |
76 | ## 6.0.2
77 | - Update of GeoLite2 DB [#157](https://github.com/logstash-plugins/logstash-filter-geoip/pull/157)
78 |
79 | ## 6.0.1
80 | - Fixed deeplink to Elasticsearch Reference
81 | [#151](https://github.com/logstash-plugins/logstash-filter-geoip/pull/151)
82 |
83 | ## 6.0.0
84 | - Removed obsolete lru_cache_size field
85 |
86 | ## 5.0.3
87 | - Skip lookup operation if source field contains an empty string
88 | - Update of the GeoIP2 DB
89 |
90 | ## 5.0.2
91 | - Update gemspec summary
92 |
93 | ## 5.0.1
94 | - Fix some documentation issues
95 |
96 | ## 5.0.0
97 | - Make deprecated field lru_cache_size obsolete
98 |
99 | ## 4.3.0
100 | - Bundle the GeoLite2-ASN database by default
101 | - Add default_database_type configuration option to allow selection between the GeoLite2-City and GeoLote2-ASN databases.
102 |
103 | ## 4.2.0
104 | - Add support for GeoLite2-ASN database from MaxMind for ASN data.
105 | - Update Java dependencies to 2.9.0 to support the new ASN database.
106 |
107 | ## 4.1.1
108 | - Add support for commercial databases from MaxMind.
109 | - Add ASN data support via GeoIP2-ISP database.
110 |
111 | ## 4.1.0
112 | - Removed from RubyGems.org since it was missing the default GeoIP2 database.
113 |
114 | ## 4.0.6
115 | - Docs: Remove patch classes from the main plugin file
116 | - Update of the GeoIP2 DB
117 |
118 | ## 4.0.5
119 | - Docs: Clarify GeoLite2 database support
120 |
121 | ## 4.0.4
122 | - Update of the GeoIP2 DB
123 | - Target should be merged and not completely overwritten (#98)
124 |
125 | ## 4.0.3
126 | - Update of the GeoIP2 DB
127 |
128 | ## 4.0.2
129 | - Recreate gem since 4.0.1 lacked jars
130 |
131 | ## 4.0.1
132 | - Relax constraint on logstash-core-plugin-api to >= 1.60 <= 2.99
133 |
134 | ## 4.0.0
135 | - Update the plugin to the version 2.0 of the plugin api, this change is required for Logstash 5.0 compatibility. See https://github.com/elastic/logstash/issues/5141
136 | - GA release for GeoIP2 database, compatible with LS 5.x
137 |
138 | # 3.0.0-beta3
139 | - Return empty result when IP lookup fails for location field (#70)
140 |
141 | # 3.0.0-beta2
142 | - Internal: Actually include the vendored jars
143 |
144 | # 3.0.0-beta1
145 | - Changed plugin to use GeoIP2 database. See http://dev.maxmind.com/geoip/geoip2/whats-new-in-geoip2/
146 |
147 | # 2.0.7
148 | - Depend on logstash-core-plugin-api instead of logstash-core, removing the need to mass update plugins on major releases of logstash
149 | # 2.0.6
150 | - New dependency requirements for logstash-core for the 5.0 release
151 | ## 2.0.5
152 | - Use proper field references
153 |
154 | ## 2.0.4
155 | - Refactor GeoIP Struct to hash conversion to minimise repeated manipulation
156 |
157 | ## 2.0.3
158 | - Fix Issue 50, incorrect data returned when geo lookup fails
159 |
160 | ## 2.0.2
161 | - Update core dependency in gemspec
162 |
163 | ## 2.0.1
164 | - Remove filter? call
165 |
166 | ## 2.0.0
167 | - Plugins were updated to follow the new shutdown semantic, this mainly allows Logstash to instruct input plugins to terminate gracefully,
168 | instead of using Thread.raise on the plugins' threads. Ref: https://github.com/elastic/logstash/pull/3895
169 | - Dependency on logstash-core update to 2.0
170 |
171 | * 1.1.2
172 | - Be more defensive with threadsafety, mostly for specs
173 | * 1.1.1
174 | - Lazy-load LRU cache
175 | * 1.1.0
176 | - Add LRU cache
177 |
--------------------------------------------------------------------------------
/lib/logstash/filters/geoip.rb:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | require "logstash/filters/base"
3 | require "logstash/namespace"
4 | require "logstash-filter-geoip_jars"
5 | require "logstash/plugin_mixins/ecs_compatibility_support"
6 |
7 |
8 | # The GeoIP filter adds information about the geographical location of IP addresses,
9 | # based on data from the MaxMind GeoLite2 database.
10 | #
11 | # A `[geoip][location]` field is created if
12 | # the GeoIP lookup returns a latitude and longitude. The field is stored in
13 | # http://geojson.org/geojson-spec.html[GeoJSON] format. Additionally,
14 | # the default Elasticsearch template provided with the
15 | # <> maps
16 | # the `[geoip][location]` field to an http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/mapping-geo-point-type.html#_mapping_options[Elasticsearch geo_point].
17 | #
18 | # As this field is a `geo_point` _and_ it is still valid GeoJSON, you get
19 | # the awesomeness of Elasticsearch's geospatial query, facet and filter functions
20 | # and the flexibility of having GeoJSON for all other applications (like Kibana's
21 | # map visualization).
22 | #
23 | # [NOTE]
24 | # --
25 | # This product includes GeoLite2 data created by MaxMind, available from
26 | # http://www.maxmind.com. This database is licensed under
27 | # http://creativecommons.org/licenses/by-sa/4.0/[Creative Commons Attribution-ShareAlike 4.0 International License].
28 | #
29 | # Versions 4.0.0 and later of the GeoIP filter use the MaxMind GeoLite2 database
30 | # and support both IPv4 and IPv6 lookups. Versions prior to 4.0.0 use the legacy
31 | # MaxMind GeoLite database and support IPv4 lookups only.
32 | # --
33 |
34 | class LogStash::Filters::GeoIP < LogStash::Filters::Base
35 | include LogStash::PluginMixins::ECSCompatibilitySupport(:disabled, :v1, :v8 => :v1)
36 |
37 | config_name "geoip"
38 |
39 | # The path to the GeoLite2 database file which Logstash should use. City and ASN databases are supported.
40 | #
41 | # If not specified, this will default to the GeoLite2 City database that ships
42 | # with Logstash.
43 | config :database, :validate => :path
44 |
45 | # If using the default database, which type should Logstash use. Valid values are "City" and "ASN", and case matters.
46 | config :default_database_type, :validate => ["City","ASN"], :default => "City"
47 |
48 | # The field containing the IP address or hostname to map via geoip. If
49 | # this field is an array, only the first value will be used.
50 | config :source, :validate => :string, :required => true
51 |
52 | # An array of geoip fields to be included in the event.
53 | #
54 | # Possible fields depend on the database type. By default, all geoip fields
55 | # are included in the event.
56 | #
57 | # For the built-in GeoLite2 City database, the following are available:
58 | # `city_name`, `continent_code`, `country_code2`, `country_code3`, `country_name`,
59 | # `dma_code`, `ip`, `latitude`, `longitude`, `postal_code`, `region_name` and `timezone`.
60 | config :fields, :validate => :array
61 |
62 | # Specify the field into which Logstash should store the geoip data.
63 | # This can be useful, for example, if you have `src_ip` and `dst_ip` fields and
64 | # would like the GeoIP information of both IPs.
65 | #
66 | # ECS disabled/ Legacy default: `geoip`
67 | # ECS default: The `target` is auto-generated from `source` when the `source` specifies an `ip` sub-field
68 | # For example, source => [client][ip], `target` will be `client`
69 | # If `source` is not an `ip` sub-field, source => client_ip, `target` setting is mandatory
70 | #
71 | # Elasticsearch ECS mode expected `geo` fields to be nested at:
72 | # `client`, `destination`, `host`, `observer`, `server`, `source`
73 | #
74 | # `geo` fields are not expected to be used directly at the root of the events
75 | config :target, :validate => :string
76 |
77 | # GeoIP lookup is surprisingly expensive. This filter uses an cache to take advantage of the fact that
78 | # IPs agents are often found adjacent to one another in log files and rarely have a random distribution.
79 | # The higher you set this the more likely an item is to be in the cache and the faster this filter will run.
80 | # However, if you set this too high you can use more memory than desired.
81 | # Since the Geoip API upgraded to v2, there is not any eviction policy so far, if cache is full, no more record can be added.
82 | # Experiment with different values for this option to find the best performance for your dataset.
83 | #
84 | # This MUST be set to a value > 0. There is really no reason to not want this behavior, the overhead is minimal
85 | # and the speed gains are large.
86 | #
87 | # It is important to note that this config value is global to the geoip_type. That is to say all instances of the geoip filter
88 | # of the same geoip_type share the same cache. The last declared cache size will 'win'. The reason for this is that there would be no benefit
89 | # to having multiple caches for different instances at different points in the pipeline, that would just increase the
90 | # number of cache misses and waste memory.
91 | config :cache_size, :validate => :number, :default => 1000
92 |
93 | # Tags the event on failure to look up geo information. This can be used in later analysis.
94 | config :tag_on_failure, :validate => :array, :default => ["_geoip_lookup_failure"]
95 |
96 | public
97 |
98 | ECS_TARGET_FIELD = %w{
99 | client
100 | destination
101 | host
102 | observer
103 | server
104 | source
105 | }.map(&:freeze).freeze
106 |
107 | def register
108 | setup_target_field
109 | setup_filter(select_database_path)
110 | end
111 |
112 | public
113 | def filter(event)
114 | return unless filter?(event)
115 | return event.tag("_geoip_expired_database") unless @healthy_database
116 |
117 | if @geoipfilter.handleEvent(event)
118 | filter_matched(event)
119 | else
120 | tag_unsuccessful_lookup(event)
121 | end
122 | end
123 |
124 | def tag_unsuccessful_lookup(event)
125 | @logger.debug? && @logger.debug("IP #{event.get(@source)} was not found in the database", :event => event)
126 | @tag_on_failure.each{|tag| event.tag(tag)}
127 | end
128 |
129 | def setup_target_field
130 | if ecs_compatibility == :disabled
131 | @target ||= 'geoip'
132 | else
133 | @target ||= auto_target_from_source!
134 | # normalize top-level fields to not be bracket-wrapped
135 | normalized_target = @target.gsub(/\A\[([^\[\]]+)\]\z/,'\1')
136 | logger.warn("ECS expect `target` value `#{normalized_target}` in #{ECS_TARGET_FIELD}") unless ECS_TARGET_FIELD.include?(normalized_target)
137 | end
138 | end
139 |
140 | def auto_target_from_source!
141 | return @source[0...-4] if @source.end_with?('[ip]') && @source.length > 4
142 |
143 | fail(LogStash::ConfigurationError, "GeoIP Filter in ECS-Compatiblity mode "\
144 | "requires a `target` when `source` is not an `ip` sub-field, eg. [client][ip]")
145 | end
146 |
147 | def setup_filter(database_path)
148 | @healthy_database = !database_path.nil?
149 | return if database_path.nil?
150 |
151 | @database = database_path
152 | @geoipfilter = org.logstash.filters.geoip.GeoIPFilter.new(@source, @target, @fields, @database, @cache_size, ecs_compatibility.to_s)
153 | end
154 |
155 | # call by DatabaseManager
156 | def update_filter(action, *args)
157 | @logger.trace("update filter", :action => action, :args => args) if @logger.trace?
158 |
159 | case action
160 | when :update
161 | setup_filter(*args)
162 | when :expire
163 | fail_filter
164 | else
165 | @logger.warn("invalid action: #{action}")
166 | end
167 | end
168 |
169 | def fail_filter
170 | @healthy_database = false
171 | end
172 |
173 | def close
174 | @database_manager.unsubscribe_database_path(@default_database_type, self) if @database_manager
175 | end
176 |
177 | def select_database_path
178 | path =
179 | if load_database_manager?
180 | @database_manager = LogStash::Filters::Geoip::DatabaseManager.instance
181 | @database_manager.subscribe_database_path(@default_database_type, @database, self)
182 | else
183 | vendor_path = ::File.expand_path(::File.join("..", "..", "..", "..", "vendor"), __FILE__)
184 | @database.nil? ? ::File.join(vendor_path, "GeoLite2-#{@default_database_type}.mmdb") : @database
185 | end
186 |
187 | @logger.info("Using geoip database", :path => path)
188 | path
189 | end
190 |
191 | def load_database_manager?
192 | begin
193 | require_relative ::File.join(LogStash::Environment::LOGSTASH_HOME, "x-pack", "lib", "filters", "geoip", "database_manager")
194 | compatible_logstash_version?
195 | rescue LoadError => e
196 | @logger.info("DatabaseManager is not in classpath", :version => LOGSTASH_VERSION, :exception => e)
197 | false
198 | end
199 | end
200 |
201 | MINIMUM_LOGSTASH_VERSION=">= 7.14.0".freeze
202 | def compatible_logstash_version?
203 | Gem::Requirement.new(MINIMUM_LOGSTASH_VERSION).satisfied_by?(Gem::Version.new(LOGSTASH_VERSION))
204 | end
205 |
206 | end # class LogStash::Filters::GeoIP
207 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
179 | APPENDIX: How to apply the Apache License to your work.
180 |
181 | To apply the Apache License to your work, attach the following
182 | boilerplate notice, with the fields enclosed by brackets "[]"
183 | replaced with your own identifying information. (Don't include
184 | the brackets!) The text should be enclosed in the appropriate
185 | comment syntax for the file format. We also recommend that a
186 | file or class name and description of purpose be included on the
187 | same "printed page" as the copyright notice for easier
188 | identification within third-party archives.
189 |
190 | Copyright 2020 Elastic and contributors
191 |
192 | Licensed under the Apache License, Version 2.0 (the "License");
193 | you may not use this file except in compliance with the License.
194 | You may obtain a copy of the License at
195 |
196 | http://www.apache.org/licenses/LICENSE-2.0
197 |
198 | Unless required by applicable law or agreed to in writing, software
199 | distributed under the License is distributed on an "AS IS" BASIS,
200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201 | See the License for the specific language governing permissions and
202 | limitations under the License.
203 |
--------------------------------------------------------------------------------
/spec/filters/geoip_ecs_spec.rb:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | require "logstash/devutils/rspec/spec_helper"
3 | require "logstash/filters/geoip"
4 | require_relative 'test_helper'
5 | require 'logstash/plugin_mixins/ecs_compatibility_support/spec_helper'
6 |
7 | CITYDB = ::Dir.glob(::File.expand_path(::File.join("..", "..", "..", "vendor", "GeoLite2-City.mmdb"), __FILE__)).first
8 | ASNDB = ::Dir.glob(::File.expand_path(::File.join("..", "..", "..", "vendor", "GeoLite2-ASN.mmdb"), __FILE__)).first
9 |
10 | describe LogStash::Filters::GeoIP do
11 | let(:options) { {} }
12 | let(:plugin) { LogStash::Filters::GeoIP.new(options) }
13 |
14 | describe "simple ip filter", :aggregate_failures do
15 |
16 | context "when specifying the target", :ecs_compatibility_support do
17 | ecs_compatibility_matrix(:disabled, :v1, :v8 => :v1) do |ecs_select|
18 |
19 | let(:ip) { "8.8.8.8" }
20 | let(:event) { LogStash::Event.new("message" => ip) }
21 | let(:target) { "server" }
22 | let(:common_options) { {"source" => "message", "database" => CITYDB, "target" => target} }
23 |
24 | before(:each) do
25 | allow_any_instance_of(described_class).to receive(:ecs_compatibility).and_return(ecs_compatibility)
26 | plugin.register
27 | end
28 |
29 | context "with city database" do
30 | # example.com, has been static for 10+ years
31 | # and has city-level details
32 | let(:ip) { "93.184.216.34" }
33 |
34 | let(:options) { common_options }
35 |
36 | it "should return geo in target" do
37 | plugin.filter(event)
38 |
39 | expect( event.get ecs_select[disabled: "[#{target}][ip]", v1: "[#{target}][ip]"] ).to eq ip
40 | expect( event.get ecs_select[disabled: "[#{target}][country_code2]", v1: "[#{target}][geo][country_iso_code]"] ).to eq 'US'
41 | expect( event.get ecs_select[disabled: "[#{target}][country_name]", v1: "[#{target}][geo][country_name]"] ).to eq 'United States'
42 | expect( event.get ecs_select[disabled: "[#{target}][continent_code]", v1: "[#{target}][geo][continent_code]"] ).to eq 'NA'
43 | expect( event.get ecs_select[disabled: "[#{target}][location][lat]", v1: "[#{target}][geo][location][lat]"] ).to eq 42.1596
44 | expect( event.get ecs_select[disabled: "[#{target}][location][lon]", v1: "[#{target}][geo][location][lon]"] ).to eq -70.8217
45 | expect( event.get ecs_select[disabled: "[#{target}][city_name]", v1: "[#{target}][geo][city_name]"] ).to eq 'Norwell'
46 | expect( event.get ecs_select[disabled: "[#{target}][dma_code]", v1: "[#{target}][mmdb][dma_code]"] ).to eq 506
47 | expect( event.get ecs_select[disabled: "[#{target}][region_name]", v1: "[#{target}][geo][region_name]"] ).to eq 'Massachusetts'
48 |
49 | if ecs_select.active_mode == :disabled
50 | expect( event.get "[#{target}][country_code3]" ).to eq 'US'
51 | expect( event.get "[#{target}][region_code]" ).to eq 'MA'
52 | expect( event.get "[#{target}][region_iso_code]" ).to be_nil
53 | else
54 | expect( event.get "[#{target}][geo][country_code3]" ).to be_nil
55 | expect( event.get "[#{target}][country_code3]" ).to be_nil
56 | expect( event.get "[#{target}][geo][region_iso_code]" ).to eq 'US-MA'
57 | expect( event.get "[#{target}][region_code]" ).to be_nil
58 | end
59 | puts event.to_hash.inspect
60 | end
61 | end
62 |
63 | context "with ASN database" do
64 | let(:options) { common_options.merge({"database" => ASNDB}) }
65 |
66 | it "should return geo in target" do
67 | plugin.filter(event)
68 |
69 | expect( event.get ecs_select[disabled: "[#{target}][ip]", v1: "[#{target}][ip]"] ).to eq ip
70 | expect( event.get ecs_select[disabled: "[#{target}][asn]", v1: "[#{target}][as][number]"] ).to eq 15169
71 | expect( event.get ecs_select[disabled: "[#{target}][as_org]", v1: "[#{target}][as][organization][name]"] ).to eq "Google LLC"
72 | end
73 |
74 | context "with customize fields" do
75 | let(:fields) { ["AUTONOMOUS_SYSTEM_NUMBER"] }
76 | let(:options) { common_options.merge({"database" => ASNDB, "fields" => fields}) }
77 |
78 | it "should give asn field" do
79 | plugin.filter(event)
80 |
81 | expect( event.get ecs_select[disabled: "[#{target}][ip]", v1: "[#{target}][ip]"] ).to be_nil
82 | expect( event.get ecs_select[disabled: "[#{target}][as_org]", v1: "[#{target}][as][organization][name]"] ).to be_nil
83 |
84 | expect( event.get ecs_select[disabled: "[#{target}][asn]", v1: "[#{target}][as][number]"] ).to eq 15169
85 | end
86 | end
87 | end
88 |
89 | context "with customize fields" do
90 | context "continent_name and timezone" do
91 | let(:fields) { ["continent_name", "timezone"] }
92 | let(:options) { common_options.merge({"fields" => fields}) }
93 |
94 | it "should return fields in UTF8" do
95 | plugin.filter(event)
96 |
97 | expect( event.get ecs_select[disabled: "[#{target}][ip]", v1: "[#{target}][ip]"] ).to be_nil
98 | expect( event.get ecs_select[disabled: "[#{target}][country_code2]", v1: "[#{target}][geo][country_iso_code]"] ).to be_nil
99 | expect( event.get ecs_select[disabled: "[#{target}][country_name]", v1: "[#{target}][geo][country_name]"] ).to be_nil
100 | expect( event.get ecs_select[disabled: "[#{target}][continent_code]", v1: "[#{target}][geo][continent_code]"] ).to be_nil
101 | expect( event.get ecs_select[disabled: "[#{target}][location][lat]", v1: "[#{target}][geo][location][lat]"] ).to be_nil
102 | expect( event.get ecs_select[disabled: "[#{target}][location][lon]", v1: "[#{target}][geo][location][lon]"] ).to be_nil
103 |
104 | continent_name = event.get ecs_select[disabled: "[#{target}][continent_name]", v1: "[#{target}][geo][continent_name]"]
105 | timezone = event.get ecs_select[disabled: "[#{target}][timezone]", v1: "[#{target}][geo][timezone]"]
106 | expect( continent_name ).to eq "North America"
107 | expect( timezone ).to eq "America/Chicago"
108 | expect( continent_name.encoding ).to eq Encoding::UTF_8
109 | expect( timezone.encoding ).to eq Encoding::UTF_8
110 | end
111 | end
112 |
113 | context "location" do
114 | shared_examples "provide location, lat and lon" do
115 | it "should return location, lat and lon" do
116 | plugin.filter(event)
117 |
118 | expect( event.get ecs_select[disabled: "[#{target}][ip]", v1: "[#{target}][ip]"] ).to be_nil
119 | expect( event.get ecs_select[disabled: "[#{target}][country_code2]", v1: "[#{target}][geo][country_iso_code]"] ).to be_nil
120 | expect( event.get ecs_select[disabled: "[#{target}][country_name]", v1: "[#{target}][geo][country_name]"] ).to be_nil
121 | expect( event.get ecs_select[disabled: "[#{target}][continent_code]", v1: "[#{target}][geo][continent_code]"] ).to be_nil
122 | expect( event.get ecs_select[disabled: "[#{target}][continent_name]", v1: "[#{target}][geo][continent_name]"] ).to be_nil
123 | expect( event.get ecs_select[disabled: "[#{target}][timezone]", v1: "[#{target}][geo][timezone]"] ).to be_nil
124 |
125 | expect( event.get ecs_select[disabled: "[#{target}][location][lat]", v1: "[#{target}][geo][location][lat]"] ).not_to be_nil
126 | expect( event.get ecs_select[disabled: "[#{target}][location][lon]", v1: "[#{target}][geo][location][lon]"] ).not_to be_nil
127 | end
128 | end
129 |
130 | context "location and longitude" do
131 | let(:fields) { ["location", "longitude"] }
132 | let(:options) { common_options.merge({"fields" => fields}) }
133 | it_behaves_like "provide location, lat and lon"
134 | end
135 |
136 | context "location and latitude" do
137 | let(:fields) { ["location", "latitude"] }
138 | let(:options) { common_options.merge({"fields" => fields}) }
139 | it_behaves_like "provide location, lat and lon"
140 | end
141 | end
142 |
143 | context "continent_code and IP is IPv6 format" do
144 | let(:ip) { "2607:f0d0:1002:51::4" }
145 | let(:fields) { ["continent_code", "ip"] }
146 | let(:options) { common_options.merge({"fields" => fields}) }
147 |
148 | it "should return fields" do
149 | plugin.filter(event)
150 |
151 | expect( event.get ecs_select[disabled: "[#{target}][country_code2]", v1: "[#{target}][geo][country_iso_code]"] ).to be_nil
152 | expect( event.get ecs_select[disabled: "[#{target}][country_name]", v1: "[#{target}][geo][country_name]"] ).to be_nil
153 | expect( event.get ecs_select[disabled: "[#{target}][continent_name]", v1: "[#{target}][geo][continent_name]"] ).to be_nil
154 | expect( event.get ecs_select[disabled: "[#{target}][location][lat]", v1: "[#{target}][geo][location][lat]"] ).to be_nil
155 | expect( event.get ecs_select[disabled: "[#{target}][location][lon]", v1: "[#{target}][geo][location][lon]"] ).to be_nil
156 | expect( event.get ecs_select[disabled: "[#{target}][timezone]", v1: "[#{target}][geo][timezone]"] ).to be_nil
157 |
158 | expect( event.get ecs_select[disabled: "[#{target}][ip]", v1: "[#{target}][ip]"] ).to eq("2607:f0d0:1002:51:0:0:0:4")
159 | expect( event.get ecs_select[disabled: "[#{target}][continent_code]", v1: "[#{target}][geo][continent_code]"] ).to eq("NA")
160 | end
161 | end
162 | end
163 | end
164 | end
165 |
166 | context "setup target field" do
167 | let(:ip) { "8.8.8.8" }
168 | let(:event) { LogStash::Event.new("message" => ip) }
169 | let(:common_options) { {"source" => "message", "database" => CITYDB} }
170 |
171 | context "ECS disabled" do
172 | before do
173 | allow_any_instance_of(described_class).to receive(:ecs_compatibility).and_return(:disabled)
174 | plugin.register
175 | plugin.filter(event)
176 | end
177 |
178 | context "`target` is unset" do
179 | let(:options) { common_options }
180 | it "should use 'geoip'" do
181 | expect( event.get "[geoip][ip]" ).to eq ip
182 | end
183 | end
184 |
185 | context "`target` is set" do
186 | let(:target) { 'host' }
187 | let(:options) { common_options.merge({"target" => target}) }
188 | it "should use `target`" do
189 | expect( event.get "[#{target}][ip]" ).to eq ip
190 | end
191 | end
192 | end
193 |
194 | context "ECS mode" do
195 | before do
196 | allow_any_instance_of(described_class).to receive(:ecs_compatibility).and_return(:v1)
197 | end
198 |
199 | context "`target` is unset" do
200 |
201 | context "`source` end with [ip]" do
202 | let(:event) { LogStash::Event.new("host" => {"ip" => ip}) }
203 | let(:options) { common_options.merge({"source" => "[host][ip]"}) }
204 |
205 | it "should use source's parent as target" do
206 | plugin.register
207 | plugin.filter(event)
208 | expect( event.get "[host][geo][country_iso_code]" ).to eq 'US'
209 | end
210 | end
211 |
212 | context "`source` end with [ip] but `target` does not match ECS template" do
213 | let(:event) { LogStash::Event.new("hostname" => {"ip" => ip}) }
214 | let(:options) { common_options.merge({"source" => "[hostname][ip]"}) }
215 |
216 | it "should use source's parent as target with warning" do
217 | expect(plugin.logger).to receive(:warn).with(/ECS expect `target`/)
218 | plugin.register
219 | plugin.filter(event)
220 | expect( event.get "[hostname][geo][country_iso_code]" ).to eq 'US'
221 | end
222 | end
223 |
224 | context "`source` == [ip]" do
225 | let(:event) { LogStash::Event.new("ip" => ip) }
226 | let(:options) { common_options.merge({"source" => "[ip]"}) }
227 |
228 | it "should raise error to require `target`" do
229 | expect { plugin.register }.to raise_error LogStash::ConfigurationError, /requires a `target`/
230 | end
231 | end
232 |
233 | context "`source` not end with [ip]" do
234 | let(:event) { LogStash::Event.new("host_ip" => ip) }
235 | let(:options) { common_options.merge({"source" => "host_ip"}) }
236 |
237 | it "should raise error to require `target`" do
238 | expect { plugin.register }.to raise_error LogStash::ConfigurationError, /requires a `target`/
239 | end
240 | end
241 | end
242 |
243 | context "`target` is set" do
244 | let(:event) { LogStash::Event.new("client" => {"ip" => ip}) }
245 | let(:options) { common_options.merge({"source" => "[client][ip]", "target" => target}) }
246 |
247 | context "`target` matches ECS template" do
248 | let(:target) { 'host' }
249 |
250 | it "should use `target`" do
251 | plugin.register
252 | plugin.filter(event)
253 | expect( event.get "[#{target}][geo][country_iso_code]" ).to eq 'US'
254 | end
255 | end
256 |
257 | context "`target` in canonical field reference syntax matches ECS template" do
258 | let(:target) { '[host]' }
259 |
260 | it "should normalize and use `target`" do
261 | expect(plugin.logger).to receive(:warn).never
262 | plugin.register
263 | plugin.filter(event)
264 | expect( event.get "[host][geo][country_iso_code]" ).to eq 'US'
265 | end
266 | end
267 |
268 | context "`target` does not match ECS template" do
269 | let(:target) { 'host_ip' }
270 |
271 | it "should use `target` with warning" do
272 | expect(plugin.logger).to receive(:warn).with(/ECS expect `target`/)
273 | plugin.register
274 | plugin.filter(event)
275 | expect( event.get "[#{target}][geo][country_iso_code]" ).to eq 'US'
276 | end
277 | end
278 | end
279 | end
280 | end
281 |
282 | end
283 | end
284 |
--------------------------------------------------------------------------------
/docs/index.asciidoc:
--------------------------------------------------------------------------------
1 | :plugin: geoip
2 | :type: filter
3 |
4 | ///////////////////////////////////////////
5 | START - GENERATED VARIABLES, DO NOT EDIT!
6 | ///////////////////////////////////////////
7 | :version: %VERSION%
8 | :release_date: %RELEASE_DATE%
9 | :changelog_url: %CHANGELOG_URL%
10 | :include_path: ../../../../logstash/docs/include
11 | ///////////////////////////////////////////
12 | END - GENERATED VARIABLES, DO NOT EDIT!
13 | ///////////////////////////////////////////
14 |
15 | [id="plugins-{type}s-{plugin}"]
16 |
17 | === Geoip filter plugin
18 |
19 | include::{include_path}/plugin_header.asciidoc[]
20 |
21 | ==== Description
22 |
23 | The GeoIP filter adds information about the geographical location of IP addresses,
24 | based on data from the MaxMind GeoLite2 databases.
25 |
26 | ==== Supported Databases
27 |
28 | This plugin is bundled with https://dev.maxmind.com/geoip/geoip2/geolite2[GeoLite2] City database out of the box. From MaxMind's description --
29 | "GeoLite2 databases are free IP geolocation databases comparable to, but less accurate than, MaxMind’s
30 | GeoIP2 databases". Please see GeoIP Lite2 license for more details.
31 |
32 | https://www.maxmind.com/en/geoip2-databases[Commercial databases] from MaxMind are also supported in this plugin.
33 |
34 | If you need to use databases other than the bundled GeoLite2 City, you can download them directly
35 | from MaxMind's website and use the `database` option to specify their location. The GeoLite2 databases
36 | can be https://dev.maxmind.com/geoip/geoip2/geolite2[downloaded from here].
37 |
38 | If you would like to get Autonomous System Number(ASN) information, you can use the GeoLite2-ASN database.
39 |
40 | [id="plugins-{type}s-{plugin}-database_license"]
41 | ==== Database License
42 |
43 | https://www.maxmind.com[MaxMind] changed from releasing the GeoIP database under
44 | a Creative Commons (CC) license to a proprietary end-user license agreement
45 | (EULA). The MaxMind EULA requires Logstash to update the MaxMind database
46 | within 30 days of a database update.
47 |
48 | The GeoIP filter plugin can manage the database for users running the Logstash default
49 | distribution, or you can manage
50 | database updates on your own. The behavior is controlled by the `database` setting.
51 | When you use the default `database` setting, the auto-update feature ensures that the plugin is
52 | using the latest version of the database.
53 | Otherwise, you are responsible for maintaining compliance.
54 |
55 | The Logstash open source distribution uses the MaxMind Creative Commons license
56 | database by default.
57 |
58 | [id="plugins-{type}s-{plugin}-database_auto"]
59 | ==== Database Auto-update
60 |
61 | This plugin bundles Creative Commons (CC) license databases.
62 | Logstash checks for database updates every day. It downloads the latest and can replace the old database
63 | while the plugin is running.
64 | After Logstash downloads EULA license databases, it will not fallback to CC license databases.
65 |
66 | NOTE: If the database has never been updated successfully, as in air-gapped environments, Logstash can use CC license databases indefinitely.
67 |
68 | After Logstash has switched to a EULA licensed database, the geoip filter will
69 | stop enriching events in order to maintain compliance if Logstash fails to
70 | check for database updates for 30 days.
71 | Events will be tagged with `_geoip_expired_database` tag to facilitate the handling of this situation.
72 |
73 | TIP: When possible, allow Logstash to access the internet to download databases so that they are always up-to-date.
74 |
75 | [id="plugins-{type}s-{plugin}-manage_update"]
76 | ==== Manage your own database updates
77 |
78 | **Use an HTTP proxy**
79 |
80 | If you can't connect directly to the Elastic GeoIP endpoint, consider setting up
81 | an HTTP proxy server. You can then specify the proxy with `http_proxy` environment variable.
82 |
83 | [source,sh]
84 | ----
85 | export http_proxy="http://PROXY_IP:PROXY_PORT"
86 | ----
87 |
88 | **Use a custom endpoint (air-gapped environments)**
89 |
90 | If you work in air-gapped environment and can't update your databases from the Elastic endpoint,
91 | You can then download databases from MaxMind and bootstrap the service.
92 |
93 | . Download both `GeoLite2-ASN.mmdb` and `GeoLite2-City.mmdb` database files from the
94 | http://dev.maxmind.com/geoip/geoip2/geolite2[MaxMind site].
95 |
96 | . Copy both database files to a single directory.
97 |
98 | . https://www.elastic.co/downloads/elasticsearch[Download {es}].
99 |
100 | . From your {es} directory, run:
101 | +
102 | [source,sh]
103 | ----
104 | ./bin/elasticsearch-geoip -s my/database/dir
105 | ----
106 |
107 | . Serve the static database files from your directory. For example, you can use
108 | Docker to serve the files from nginx server:
109 | +
110 | [source,sh]
111 | ----
112 | docker run -p 8080:80 -v my/database/dir:/usr/share/nginx/html:ro nginx
113 | ----
114 |
115 | . Specify the service's endpoint URL using the
116 | `xpack.geoip.download.endpoint=http://localhost:8080/overview.json` setting in `logstash.yml`.
117 |
118 | Logstash gets automatic updates from this service.
119 |
120 | [id="plugins-{type}s-{plugin}-metrics"]
121 | ==== Database Metrics
122 |
123 | You can monitor database status through the {logstash-ref}/node-stats-api.html#node-stats-api[Node Stats API].
124 |
125 | The following request returns a JSON document containing database manager stats,
126 | including:
127 |
128 | * database status and freshness
129 | ** `geoip_download_manager.database.*.status`
130 | *** `init` : initial CC database status
131 | *** `up_to_date` : using up-to-date EULA database
132 | *** `to_be_expired` : 25 days without calling service
133 | *** `expired` : 30 days without calling service
134 | ** `fail_check_in_days` : number of days Logstash fails to call service since the last success
135 | * info about download successes and failures
136 | ** `geoip_download_manager.download_stats.successes` number of successful checks and downloads
137 | ** `geoip_download_manager.download_stats.failures` number of failed check or download
138 | ** `geoip_download_manager.download_stats.status`
139 | *** `updating` : check and download at the moment
140 | *** `succeeded` : last download succeed
141 | *** `failed` : last download failed
142 |
143 | [source,js]
144 | --------------------------------------------------
145 | curl -XGET 'localhost:9600/_node/stats/geoip_download_manager?pretty'
146 | --------------------------------------------------
147 |
148 | Example response:
149 |
150 | [source,js]
151 | --------------------------------------------------
152 | {
153 | "geoip_download_manager" : {
154 | "database" : {
155 | "ASN" : {
156 | "status" : "up_to_date",
157 | "fail_check_in_days" : 0,
158 | "last_updated_at": "2021-06-21T16:06:54+02:00"
159 | },
160 | "City" : {
161 | "status" : "up_to_date",
162 | "fail_check_in_days" : 0,
163 | "last_updated_at": "2021-06-21T16:06:54+02:00"
164 | }
165 | },
166 | "download_stats" : {
167 | "successes" : 15,
168 | "failures" : 1,
169 | "last_checked_at" : "2021-06-21T16:07:03+02:00",
170 | "status" : "succeeded"
171 | }
172 | }
173 | }
174 | --------------------------------------------------
175 |
176 | [id="plugins-{type}s-{plugin}-field-mapping"]
177 | ==== Field mapping
178 |
179 | When this plugin is run with <> disabled, the MaxMind DB's fields are added directly to the <>.
180 | When ECS compatibility is enabled, the fields are structured to fit into an ECS shape.
181 |
182 | [cols="3,5,3"]
183 | |===========================
184 | | Database Field Name | ECS Field | Example
185 |
186 | | `ip` | `[ip]` | `12.34.56.78`
187 |
188 | | `city_name` | `[geo][city_name]` | `Seattle`
189 | | `country_name` | `[geo][country_name]` | `United States`
190 | | `continent_code` | `[geo][continent_code]` | `NA`
191 | | `continent_name` | `[geo][continent_name]` | `North America`
192 | | `country_code2` | `[geo][country_iso_code]` | `US`
193 | | `country_code3` | _N/A_ | `US`
194 |
195 | _maintained for legacy
196 | support, but populated
197 | with 2-character country
198 | code_
199 |
200 | | `postal_code` | `[geo][postal_code]` | `98106`
201 | | `region_name` | `[geo][region_name]` | `Washington`
202 | | `region_code` | `[geo][region_code]` | `WA`
203 | | `region_iso_code`* | `[geo][region_iso_code]` | `US-WA`
204 | | `timezone` | `[geo][timezone]` | `America/Los_Angeles`
205 | | `location`* | `[geo][location]` | `{"lat": 47.6062, "lon": -122.3321}"`
206 | | `latitude` | `[geo][location][lat]` | `47.6062`
207 | | `longitude` | `[geo][location][lon]` | `-122.3321`
208 |
209 | | `domain` | `[domain]` | `example.com`
210 |
211 | | `asn` | `[as][number]` | `98765`
212 | | `as_org` | `[as][organization][name]` | `Elastic, NV`
213 |
214 | | `isp` | `[mmdb][isp]` | `InterLink Supra LLC`
215 | | `dma_code` | `[mmdb][dma_code]` | `819`
216 | | `organization` | `[mmdb][organization]` | `Elastic, NV`
217 | |===========================
218 |
219 | NOTE: `*` indicates a composite field, which is only populated if GeoIP lookup result contains all components.
220 |
221 | ==== Details
222 |
223 | When using a City database, the enrichment is aborted if no latitude/longitude pair is available.
224 |
225 | The `location` field combines the latitude and longitude into a structure called https://datatracker.ietf.org/doc/html/rfc7946[GeoJSON].
226 | When you are using a default <>, the templates provided by the {logstash-ref}/plugins-outputs-elasticsearch.html[elasticsearch output] map the field to an {ref}/geo-point.html[Elasticsearch Geo_point datatype].
227 |
228 | As this field is a `geo_point` _and_ it is still valid GeoJSON, you get
229 | the awesomeness of Elasticsearch's geospatial query, facet and filter functions
230 | and the flexibility of having GeoJSON for all other applications (like Kibana's
231 | map visualization).
232 |
233 | [NOTE]
234 | --
235 | This product includes GeoLite2 data created by MaxMind, available from
236 | http://www.maxmind.com. This database is licensed under
237 | http://creativecommons.org/licenses/by-sa/4.0/[Creative Commons Attribution-ShareAlike 4.0 International License].
238 |
239 | Versions 4.0.0 and later of the GeoIP filter use the MaxMind GeoLite2 database
240 | and support both IPv4 and IPv6 lookups. Versions prior to 4.0.0 use the legacy
241 | MaxMind GeoLite database and support IPv4 lookups only.
242 | --
243 |
244 | [id="plugins-{type}s-{plugin}-options"]
245 | ==== Geoip Filter Configuration Options
246 |
247 | This plugin supports the following configuration options plus the <> described later.
248 |
249 | [cols="<,<,<",options="header",]
250 | |=======================================================================
251 | |Setting |Input type|Required
252 | | <> |<>|No
253 | | <> |a valid filesystem path|No
254 | | <> |`City` or `ASN`|No
255 | | <> | <>|No
256 | | <> |<>|No
257 | | <> |<>|Yes
258 | | <> |<>|No
259 | | <> |<>|No
260 | |=======================================================================
261 |
262 | Also see <> for a list of options supported by all
263 | filter plugins.
264 |
265 |
266 |
267 | [id="plugins-{type}s-{plugin}-cache_size"]
268 | ===== `cache_size`
269 |
270 | * Value type is <>
271 | * Default value is `1000`
272 |
273 | GeoIP lookup is surprisingly expensive. This filter uses an cache to take advantage of the fact that
274 | IPs agents are often found adjacent to one another in log files and rarely have a random distribution.
275 | The higher you set this the more likely an item is to be in the cache and the faster this filter will run.
276 | However, if you set this too high you can use more memory than desired.
277 | Since the Geoip API upgraded to v2, there is not any eviction policy so far, if cache is full, no more record can be added.
278 | Experiment with different values for this option to find the best performance for your dataset.
279 |
280 | This MUST be set to a value > 0. There is really no reason to not want this behavior, the overhead is minimal
281 | and the speed gains are large.
282 |
283 | It is important to note that this config value is global to the geoip_type. That is to say all instances of the geoip filter
284 | of the same geoip_type share the same cache. The last declared cache size will 'win'. The reason for this is that there would be no benefit
285 | to having multiple caches for different instances at different points in the pipeline, that would just increase the
286 | number of cache misses and waste memory.
287 |
288 | [id="plugins-{type}s-{plugin}-database"]
289 | ===== `database`
290 |
291 | * Value type is <>
292 | * If not specified, the database defaults to the `GeoLite2 City` database that ships with Logstash.
293 |
294 | The path to MaxMind's database file that Logstash should use.
295 | The default database is `GeoLite2-City`.
296 | This plugin supports several free databases (`GeoLite2-City`, `GeoLite2-Country`, `GeoLite2-ASN`)
297 | and a selection of commercially-licensed databases (`GeoIP2-City`, `GeoIP2-ISP`, `GeoIP2-Country`).
298 |
299 | Database auto-update applies to the default distribution.
300 | When `database` points to user's database path, auto-update is disabled.
301 | See <> for more information.
302 |
303 | [id="plugins-{type}s-{plugin}-default_database_type"]
304 | ===== `default_database_type`
305 |
306 | This plugin now includes both the GeoLite2-City and GeoLite2-ASN databases. If `database` and `default_database_type` are unset, the GeoLite2-City database will be selected. To use the included GeoLite2-ASN database, set `default_database_type` to `ASN`.
307 |
308 | * Value type is <>
309 | * The default value is `City`
310 | * The only acceptable values are `City` and `ASN`
311 |
312 | [id="plugins-{type}s-{plugin}-fields"]
313 | ===== `fields`
314 |
315 | * Value type is <>
316 | * There is no default value for this setting.
317 |
318 | An array of geoip fields to be included in the event.
319 |
320 | Possible fields depend on the database type.
321 | By default, all geoip fields from the relevant database are included in the event.
322 |
323 | For a complete list of available fields and how they map to an event's structure, see <>.
324 |
325 | [id="plugins-{type}s-{plugin}-ecs_compatibility"]
326 | ===== `ecs_compatibility`
327 |
328 | * Value type is <>
329 | * Supported values are:
330 | ** `disabled`: unstructured geo data added at root level
331 | ** `v1`, `v8`: use fields that are compatible with Elastic Common Schema. Example: `[client][geo][country_name]`. See <> for more info.
332 | * Default value depends on which version of Logstash is running:
333 | ** When Logstash provides a `pipeline.ecs_compatibility` setting, its value is used as the default
334 | ** Otherwise, the default value is `disabled`.
335 |
336 | Controls this plugin's compatibility with the {ecs-ref}[Elastic Common Schema (ECS)].
337 | The value of this setting affects the _default_ value of <>.
338 |
339 | [id="plugins-{type}s-{plugin}-source"]
340 | ===== `source`
341 |
342 | * This is a required setting.
343 | * Value type is <>
344 | * There is no default value for this setting.
345 |
346 | The field containing the IP address or hostname to map via geoip. If
347 | this field is an array, only the first value will be used.
348 |
349 | [id="plugins-{type}s-{plugin}-tag_on_failure"]
350 | ===== `tag_on_failure`
351 |
352 | * Value type is <>
353 | * Default value is `["_geoip_lookup_failure"]`
354 |
355 | Tags the event on failure to look up geo information. This can be used in later analysis.
356 |
357 | [id="plugins-{type}s-{plugin}-target"]
358 | ===== `target`
359 |
360 | * This is an optional setting with condition.
361 | * Value type is <>
362 | * Default value depends on whether <> is enabled:
363 | ** ECS Compatibility disabled: `geoip`
364 | ** ECS Compatibility enabled: If `source` is an `ip` sub-field, eg. `[client][ip]`,
365 | `target` will automatically set to the parent field, in this example `client`,
366 | otherwise, `target` is a required setting
367 | *** `geo` field is nested in `[client][geo]`
368 | *** ECS compatible values are `client`, `destination`, `host`, `observer`, `server`, `source`
369 |
370 | Specify the field into which Logstash should store the geoip data.
371 | This can be useful, for example, if you have `src_ip` and `dst_ip` fields and
372 | would like the GeoIP information of both IPs.
373 |
374 | If you save the data to a target field other than `geoip` and want to use the
375 | `geo_point` related functions in Elasticsearch, you need to alter the template
376 | provided with the Elasticsearch output and configure the output to use the
377 | new template.
378 |
379 | Even if you don't use the `geo_point` mapping, the `[target][location]` field
380 | is still valid GeoJSON.
381 |
382 |
383 |
384 | [id="plugins-{type}s-{plugin}-common-options"]
385 | include::{include_path}/{type}.asciidoc[]
386 |
--------------------------------------------------------------------------------
/src/main/java/org/logstash/filters/geoip/GeoIPFilter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to Elasticsearch under one or more contributor
3 | * license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright
5 | * ownership. Elasticsearch licenses this file to you under
6 | * the Apache License, Version 2.0 (the "License"); you may
7 | * not use this file except in compliance with the License.
8 | * You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing,
13 | * software distributed under the License is distributed on an
14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15 | * KIND, either express or implied. See the License for the
16 | * specific language governing permissions and limitations
17 | * under the License.
18 | */
19 | package org.logstash.filters.geoip;
20 |
21 | import com.maxmind.db.CHMCache;
22 | import com.maxmind.db.InvalidDatabaseException;
23 | import com.maxmind.geoip2.exception.AddressNotFoundException;
24 | import com.maxmind.geoip2.exception.GeoIp2Exception;
25 | import com.maxmind.geoip2.model.AsnResponse;
26 | import com.maxmind.geoip2.model.CityResponse;
27 | import com.maxmind.geoip2.model.CountryResponse;
28 | import com.maxmind.geoip2.model.DomainResponse;
29 | import com.maxmind.geoip2.model.IspResponse;
30 | import com.maxmind.geoip2.record.*;
31 | import org.apache.logging.log4j.LogManager;
32 | import org.apache.logging.log4j.Logger;
33 | import org.logstash.Event;
34 |
35 | import com.maxmind.geoip2.DatabaseReader;
36 | import org.logstash.ext.JrubyEventExtLibrary.RubyEvent;
37 |
38 | import java.io.File;
39 | import java.io.IOException;
40 | import java.net.InetAddress;
41 | import java.net.UnknownHostException;
42 | import java.util.*;
43 | import java.util.function.Function;
44 |
45 | public class GeoIPFilter {
46 | private static Logger logger = LogManager.getLogger();
47 | // The free GeoIP2 databases
48 | private static final String CITY_LITE_DB_TYPE = "GeoLite2-City";
49 | private static final String COUNTRY_LITE_DB_TYPE = "GeoLite2-Country";
50 | private static final String ASN_LITE_DB_TYPE = "GeoLite2-ASN";
51 |
52 | // The paid GeoIP2 databases
53 | private static final String CITY_DB_TYPE = "GeoIP2-City";
54 | private static final String CITY_AFRICA_DB_TYPE = "GeoIP2-City-Africa";
55 | private static final String CITY_ASIA_PACIFIC_DB_TYPE = "GeoIP2-City-Asia-Pacific";
56 | private static final String CITY_EUROPE_DB_TYPE = "GeoIP2-City-Europe";
57 | private static final String CITY_NORTH_AMERICA_DB_TYPE = "GeoIP2-City-North-America";
58 | private static final String CITY_SOUTH_AMERICA_DB_TYPE = "GeoIP2-City-South-America";
59 | private static final String COUNTRY_DB_TYPE = "GeoIP2-Country";
60 | private static final String ISP_DB_TYPE = "GeoIP2-ISP";
61 | private static final String DOMAIN_DB_TYPE = "GeoIP2-Domain";
62 |
63 | private final String sourceField;
64 | private final String targetField;
65 | private final Set desiredFields;
66 | private final DatabaseReader databaseReader;
67 |
68 | private final Function fieldReferenceExtractor;
69 |
70 | public GeoIPFilter(String sourceField, String targetField, List fields, String databasePath, int cacheSize,
71 | String ecsCompatibility) {
72 | this.sourceField = sourceField;
73 | this.targetField = targetField;
74 | switch (ecsCompatibility) {
75 | case "disabled":
76 | this.fieldReferenceExtractor = Fields::getFieldReferenceLegacy;
77 | break;
78 | case "v1":
79 | case "v8":
80 | this.fieldReferenceExtractor = Fields::getFieldReferenceECSv1;
81 | break;
82 | default:
83 | throw new UnsupportedOperationException("Unknown ECS version " + ecsCompatibility);
84 | }
85 |
86 | final File database = new File(databasePath);
87 | try {
88 | this.databaseReader = new DatabaseReader.Builder(database).withCache(new CHMCache(cacheSize)).build();
89 | } catch (InvalidDatabaseException e) {
90 | throw new IllegalArgumentException("The database provided is invalid or corrupted.", e);
91 | } catch (IOException e) {
92 | throw new IllegalArgumentException("The database provided was not found in the path", e);
93 | }
94 | this.desiredFields = createDesiredFields(fields, !ecsCompatibility.equals("disabled"));
95 | }
96 |
97 | public static boolean isDatabaseValid(String databasePath) {
98 | final File database = new File(databasePath);
99 | try {
100 | new DatabaseReader.Builder(database).build();
101 | return true;
102 | } catch (InvalidDatabaseException e) {
103 | logger.debug("The database provided is invalid or corrupted");
104 | } catch (IOException e) {
105 | logger.debug("The database provided was not found in the path");
106 | }
107 | return false;
108 | }
109 |
110 | private Set createDesiredFields(List fields, final boolean ecsCompatibilityEnabled) {
111 | Set desiredFields = EnumSet.noneOf(Fields.class);
112 | if (fields == null || fields.isEmpty()) {
113 | switch (databaseReader.getMetadata().getDatabaseType()) {
114 | case CITY_LITE_DB_TYPE:
115 | case CITY_DB_TYPE:
116 | case CITY_AFRICA_DB_TYPE:
117 | case CITY_ASIA_PACIFIC_DB_TYPE:
118 | case CITY_EUROPE_DB_TYPE:
119 | case CITY_NORTH_AMERICA_DB_TYPE:
120 | case CITY_SOUTH_AMERICA_DB_TYPE:
121 | desiredFields = ecsCompatibilityEnabled ? Fields.DEFAULT_ECS_CITY_FIELDS : Fields.DEFAULT_CITY_FIELDS;
122 | break;
123 | case COUNTRY_LITE_DB_TYPE:
124 | case COUNTRY_DB_TYPE:
125 | desiredFields = Fields.DEFAULT_COUNTRY_FIELDS;
126 | break;
127 | case ISP_DB_TYPE:
128 | desiredFields = Fields.DEFAULT_ISP_FIELDS;
129 | break;
130 | case ASN_LITE_DB_TYPE:
131 | desiredFields = Fields.DEFAULT_ASN_LITE_FIELDS;
132 | break;
133 | case DOMAIN_DB_TYPE:
134 | desiredFields = Fields.DEFAULT_DOMAIN_FIELDS;
135 | }
136 | } else {
137 | for (String fieldName : fields) {
138 | desiredFields.add(Fields.parseField(fieldName));
139 | }
140 | }
141 | return desiredFields;
142 | }
143 |
144 | public boolean handleEvent(RubyEvent rubyEvent) {
145 | final Event event = rubyEvent.getEvent();
146 | Object input = event.getField(sourceField);
147 | if (input == null) {
148 | return false;
149 | }
150 | String ip;
151 |
152 | if (input instanceof List) {
153 | ip = (String) ((List) input).get(0);
154 |
155 | } else if (input instanceof String) {
156 | ip = (String) input;
157 | } else {
158 | throw new IllegalArgumentException("Expected input field value to be String or List type");
159 | }
160 |
161 | if (ip.trim().isEmpty()){
162 | return false;
163 | }
164 |
165 | Map geoData = new HashMap<>();
166 |
167 | try {
168 | final InetAddress ipAddress = InetAddress.getByName(ip);
169 | switch (databaseReader.getMetadata().getDatabaseType()) {
170 | case CITY_LITE_DB_TYPE:
171 | case CITY_DB_TYPE:
172 | case CITY_AFRICA_DB_TYPE:
173 | case CITY_ASIA_PACIFIC_DB_TYPE:
174 | case CITY_EUROPE_DB_TYPE:
175 | case CITY_NORTH_AMERICA_DB_TYPE:
176 | case CITY_SOUTH_AMERICA_DB_TYPE:
177 | geoData = retrieveCityGeoData(ipAddress);
178 | break;
179 | case COUNTRY_LITE_DB_TYPE:
180 | case COUNTRY_DB_TYPE:
181 | geoData = retrieveCountryGeoData(ipAddress);
182 | break;
183 | case ASN_LITE_DB_TYPE:
184 | geoData = retrieveAsnGeoData(ipAddress);
185 | break;
186 | case ISP_DB_TYPE:
187 | geoData = retrieveIspGeoData(ipAddress);
188 | break;
189 | case DOMAIN_DB_TYPE:
190 | geoData = retrieveDomainGeoData(ipAddress);
191 | break;
192 | default:
193 | throw new IllegalStateException("Unsupported database type " + databaseReader.getMetadata().getDatabaseType() + "");
194 | }
195 | } catch (UnknownHostException e) {
196 | logger.debug("IP Field contained invalid IP address or hostname. exception={}, field={}, event={}", e, sourceField, event);
197 | } catch (AddressNotFoundException e) {
198 | logger.debug("IP not found! exception={}, field={}, event={}", e, sourceField, event);
199 | } catch (GeoIp2Exception | IOException e) {
200 | logger.debug("GeoIP2 Exception. exception={}, field={}, event={}", e, sourceField, event);
201 | }
202 |
203 | return applyGeoData(geoData, event);
204 | }
205 |
206 | private boolean applyGeoData(Map geoData, Event event) {
207 | if (geoData == null) {
208 | return false;
209 | }
210 | // only do event.set(@target) if the lookup result is not null
211 | if (event.getField(targetField) == null) {
212 | event.setField(targetField, Collections.emptyMap());
213 | }
214 | // don't do anything more if the lookup result is empty
215 | if (geoData.isEmpty()) {
216 | return false;
217 | }
218 |
219 | String targetFieldReference = "[" + this.targetField + "]";
220 | for (Map.Entry it: geoData.entrySet()) {
221 | final Fields field = it.getKey();
222 | final String subFieldReference = this.fieldReferenceExtractor.apply(field);
223 |
224 | if (subFieldReference.equals("[]")) {
225 | continue; // skip the incompatible ECS field
226 | }
227 |
228 | event.setField(targetFieldReference + subFieldReference, it.getValue());
229 | }
230 | return true;
231 | }
232 |
233 | private Map retrieveCityGeoData(InetAddress ipAddress) throws GeoIp2Exception, IOException {
234 | CityResponse response = databaseReader.city(ipAddress);
235 | Country country = response.getCountry();
236 | City city = response.getCity();
237 | Location location = response.getLocation();
238 | Continent continent = response.getContinent();
239 | Postal postal = response.getPostal();
240 | Subdivision subdivision = response.getMostSpecificSubdivision();
241 | Map geoData = new HashMap<>();
242 |
243 | // if location is empty, there is no point populating geo data
244 | // and most likely all other fields are empty as well
245 | if (location.getLatitude() == null && location.getLongitude() == null) {
246 | return geoData;
247 | }
248 |
249 | for (Fields desiredField : this.desiredFields) {
250 | switch (desiredField) {
251 | case CITY_NAME:
252 | String cityName = city.getName();
253 | if (cityName != null) {
254 | geoData.put(Fields.CITY_NAME, cityName);
255 | }
256 | break;
257 | case CONTINENT_CODE:
258 | String continentCode = continent.getCode();
259 | if (continentCode != null) {
260 | geoData.put(Fields.CONTINENT_CODE, continentCode);
261 | }
262 | break;
263 | case CONTINENT_NAME:
264 | String continentName = continent.getName();
265 | if (continentName != null) {
266 | geoData.put(Fields.CONTINENT_NAME, continentName);
267 | }
268 | break;
269 | case COUNTRY_NAME:
270 | String countryName = country.getName();
271 | if (countryName != null) {
272 | geoData.put(Fields.COUNTRY_NAME, countryName);
273 | }
274 | break;
275 | case COUNTRY_CODE2:
276 | String countryCode2 = country.getIsoCode();
277 | if (countryCode2 != null) {
278 | geoData.put(Fields.COUNTRY_CODE2, countryCode2);
279 | }
280 | break;
281 | case COUNTRY_CODE3:
282 | String countryCode3 = country.getIsoCode();
283 | if (countryCode3 != null) {
284 | geoData.put(Fields.COUNTRY_CODE3, countryCode3);
285 | }
286 | break;
287 | case IP:
288 | geoData.put(Fields.IP, ipAddress.getHostAddress());
289 | break;
290 | case POSTAL_CODE:
291 | String postalCode = postal.getCode();
292 | if (postalCode != null) {
293 | geoData.put(Fields.POSTAL_CODE, postalCode);
294 | }
295 | break;
296 | case DMA_CODE:
297 | Integer dmaCode = location.getMetroCode();
298 | if (dmaCode != null) {
299 | geoData.put(Fields.DMA_CODE, dmaCode);
300 | }
301 | break;
302 | case REGION_NAME:
303 | String subdivisionName = subdivision.getName();
304 | if (subdivisionName != null) {
305 | geoData.put(Fields.REGION_NAME, subdivisionName);
306 | }
307 | break;
308 | case REGION_CODE:
309 | String subdivisionCode = subdivision.getIsoCode();
310 | if (subdivisionCode != null) {
311 | geoData.put(Fields.REGION_CODE, subdivisionCode);
312 | }
313 | break;
314 | case REGION_ISO_CODE:
315 | String countryCodeForRegion = country.getIsoCode();
316 | String regionCode2 = subdivision.getIsoCode();
317 | if (countryCodeForRegion != null && regionCode2 != null) {
318 | geoData.put(Fields.REGION_ISO_CODE, String.format("%s-%s", countryCodeForRegion, regionCode2));
319 | }
320 | break;
321 | case TIMEZONE:
322 | String locationTimeZone = location.getTimeZone();
323 | if (locationTimeZone != null) {
324 | geoData.put(Fields.TIMEZONE, locationTimeZone);
325 | }
326 | break;
327 | case LOCATION:
328 | Double latitude = location.getLatitude();
329 | Double longitude = location.getLongitude();
330 | if (latitude != null && longitude != null) {
331 | Map locationObject = new HashMap<>();
332 | locationObject.put("lat", latitude);
333 | locationObject.put("lon", longitude);
334 | geoData.put(Fields.LOCATION, locationObject);
335 | }
336 | break;
337 | case LATITUDE:
338 | Double lat = location.getLatitude();
339 | if (lat != null) {
340 | geoData.put(Fields.LATITUDE, lat);
341 | }
342 | break;
343 | case LONGITUDE:
344 | Double lon = location.getLongitude();
345 | if (lon != null) {
346 | geoData.put(Fields.LONGITUDE, lon);
347 | }
348 | break;
349 | }
350 | }
351 |
352 | return geoData;
353 | }
354 |
355 | private Map retrieveCountryGeoData(InetAddress ipAddress) throws GeoIp2Exception, IOException {
356 | CountryResponse response = databaseReader.country(ipAddress);
357 | Country country = response.getCountry();
358 | Continent continent = response.getContinent();
359 | Map geoData = new HashMap<>();
360 |
361 | for (Fields desiredField : this.desiredFields) {
362 | switch (desiredField) {
363 | case IP:
364 | geoData.put(Fields.IP, ipAddress.getHostAddress());
365 | break;
366 | case COUNTRY_CODE2:
367 | String countryCode2 = country.getIsoCode();
368 | if (countryCode2 != null) {
369 | geoData.put(Fields.COUNTRY_CODE2, countryCode2);
370 | }
371 | break;
372 | case COUNTRY_NAME:
373 | String countryName = country.getName();
374 | if (countryName != null) {
375 | geoData.put(Fields.COUNTRY_NAME, countryName);
376 | }
377 | break;
378 | case CONTINENT_NAME:
379 | String continentName = continent.getName();
380 | if (continentName != null) {
381 | geoData.put(Fields.CONTINENT_NAME, continentName);
382 | }
383 | break;
384 | }
385 | }
386 |
387 | return geoData;
388 | }
389 |
390 | private Map retrieveIspGeoData(InetAddress ipAddress) throws GeoIp2Exception, IOException {
391 | IspResponse response = databaseReader.isp(ipAddress);
392 |
393 | Map geoData = new HashMap<>();
394 | for (Fields desiredField : this.desiredFields) {
395 | switch (desiredField) {
396 | case IP:
397 | geoData.put(Fields.IP, ipAddress.getHostAddress());
398 | break;
399 | case AUTONOMOUS_SYSTEM_NUMBER:
400 | Integer asn = response.getAutonomousSystemNumber();
401 | if (asn != null) {
402 | geoData.put(Fields.AUTONOMOUS_SYSTEM_NUMBER, asn);
403 | }
404 | break;
405 | case AUTONOMOUS_SYSTEM_ORGANIZATION:
406 | String aso = response.getAutonomousSystemOrganization();
407 | if (aso != null) {
408 | geoData.put(Fields.AUTONOMOUS_SYSTEM_ORGANIZATION, aso);
409 | }
410 | break;
411 | case ISP:
412 | String isp = response.getIsp();
413 | if (isp != null) {
414 | geoData.put(Fields.ISP, isp);
415 | }
416 | break;
417 | case ORGANIZATION:
418 | String org = response.getOrganization();
419 | if (org != null) {
420 | geoData.put(Fields.ORGANIZATION, org);
421 | }
422 | break;
423 | }
424 | }
425 |
426 | return geoData;
427 | }
428 |
429 | private Map retrieveAsnGeoData(InetAddress ipAddress) throws GeoIp2Exception, IOException {
430 | AsnResponse response = databaseReader.asn(ipAddress);
431 | Map geoData = new HashMap<>();
432 | for (Fields desiredField : this.desiredFields) {
433 | switch (desiredField) {
434 | case IP:
435 | geoData.put(Fields.IP, ipAddress.getHostAddress());
436 | break;
437 | case AUTONOMOUS_SYSTEM_NUMBER:
438 | Integer asn = response.getAutonomousSystemNumber();
439 | if (asn != null) {
440 | geoData.put(Fields.AUTONOMOUS_SYSTEM_NUMBER, asn);
441 | }
442 | break;
443 | case AUTONOMOUS_SYSTEM_ORGANIZATION:
444 | String aso = response.getAutonomousSystemOrganization();
445 | if (aso != null) {
446 | geoData.put(Fields.AUTONOMOUS_SYSTEM_ORGANIZATION, aso);
447 | }
448 | break;
449 | }
450 | }
451 |
452 | return geoData;
453 | }
454 |
455 | private Map retrieveDomainGeoData(InetAddress ipAddress) throws GeoIp2Exception, IOException {
456 | DomainResponse response = databaseReader.domain(ipAddress);
457 | Map geoData = new HashMap<>();
458 | for (Fields desiredField : this.desiredFields) {
459 | switch (desiredField) {
460 | case DOMAIN:
461 | String domain = response.getDomain();
462 | geoData.put(Fields.DOMAIN, domain);
463 | break;
464 | }
465 | }
466 |
467 | return geoData;
468 | }
469 | }
470 |
--------------------------------------------------------------------------------