├── .dockerignore ├── .env.examples ├── .github ├── FUNDING.yml └── workflows │ ├── build.yml │ └── tests.yml ├── .gitignore ├── Dockerfile ├── LICENSE ├── README.md ├── dev-requirements.txt ├── geoip2influx.py ├── geoip2influx ├── __init__.py ├── constants.py ├── influx.py ├── influx_base.py ├── influxv2.py ├── logger.py └── logparser.py ├── nginx_logs_geo_map.json ├── requirements.txt ├── root └── etc │ ├── crontabs │ └── root │ ├── logrotate.d │ └── geoip2influx │ └── s6-overlay │ └── s6-rc.d │ ├── init-adduser │ └── branding │ ├── init-geoip2influx-setup │ ├── dependencies.d │ │ └── init-maxmind-setup │ ├── run │ ├── type │ └── up │ ├── init-maxmind-setup │ ├── run │ ├── type │ └── up │ ├── svc-geoip2influx │ ├── dependencies.d │ │ ├── init-geoip2influx-setup │ │ └── init-services │ ├── run │ └── type │ └── user │ └── contents.d │ ├── init-geoip2influx-setup │ ├── init-maxmind-setup │ └── svc-geoip2influx ├── run.py └── tests ├── GeoLite2-City.mmdb ├── __init__.py ├── invalid_logs.txt ├── test_geoip2influx.py ├── valid_ipv4_log.txt └── valid_ipv6_log.txt /.dockerignore: -------------------------------------------------------------------------------- 1 | .gitignore 2 | LICENSE 3 | README.md 4 | 5 | # Byte-compiled / optimized / DLL files 6 | __pycache__/ 7 | *.py[cod] 8 | *$py.class 9 | 10 | # C extensions 11 | *.so 12 | 13 | # Distribution / packaging 14 | .Python 15 | build/ 16 | develop-eggs/ 17 | dist/ 18 | downloads/ 19 | eggs/ 20 | .eggs/ 21 | lib/ 22 | lib64/ 23 | parts/ 24 | sdist/ 25 | var/ 26 | wheels/ 27 | pip-wheel-metadata/ 28 | share/python-wheels/ 29 | *.egg-info/ 30 | .installed.cfg 31 | *.egg 32 | MANIFEST 33 | 34 | # PyInstaller 35 | # Usually these files are written by a python script from a template 36 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 37 | *.manifest 38 | *.spec 39 | 40 | # Installer logs 41 | pip-log.txt 42 | pip-delete-this-directory.txt 43 | 44 | # Unit test / coverage reports 45 | htmlcov/ 46 | .tox/ 47 | .nox/ 48 | .coverage 49 | .coverage.* 50 | .cache 51 | nosetests.xml 52 | coverage.xml 53 | *.cover 54 | *.py,cover 55 | .hypothesis/ 56 | .pytest_cache/ 57 | 58 | # Translations 59 | *.mo 60 | *.pot 61 | 62 | # Django stuff: 63 | *.log 64 | local_settings.py 65 | db.sqlite3 66 | db.sqlite3-journal 67 | 68 | # Flask stuff: 69 | instance/ 70 | .webassets-cache 71 | 72 | # Scrapy stuff: 73 | .scrapy 74 | 75 | # Sphinx documentation 76 | docs/_build/ 77 | 78 | # PyBuilder 79 | target/ 80 | 81 | # Jupyter Notebook 82 | .ipynb_checkpoints 83 | 84 | # IPython 85 | profile_default/ 86 | ipython_config.py 87 | 88 | # pyenv 89 | .python-version 90 | 91 | # pipenv 92 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 93 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 94 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 95 | # install all needed dependencies. 96 | #Pipfile.lock 97 | 98 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 99 | __pypackages__/ 100 | 101 | # Celery stuff 102 | celerybeat-schedule 103 | celerybeat.pid 104 | 105 | # SageMath parsed files 106 | *.sage.py 107 | 108 | # Environments 109 | .env 110 | .venv 111 | env/ 112 | venv/ 113 | ENV/ 114 | env.bak/ 115 | venv.bak/ 116 | 117 | # Spyder project settings 118 | .spyderproject 119 | .spyproject 120 | 121 | # Rope project settings 122 | .ropeproject 123 | 124 | # mkdocs documentation 125 | /site 126 | 127 | # mypy 128 | .mypy_cache/ 129 | .dmypy.json 130 | dmypy.json 131 | 132 | # Pyre type checker 133 | .pyre/ 134 | -------------------------------------------------------------------------------- /.env.examples: -------------------------------------------------------------------------------- 1 | # INFLUXDB CONFIG 2 | INFLUX_HOST = "localhost" 3 | INFLUX_HOST_PORT = "8086" 4 | INFLUX_DATABASE = "geoip2influx" 5 | INFLUX_USER = "root" 6 | INFLUX_PASSWORD = "root" 7 | INFLUX_RETENTION = "7d" 8 | INFLUX_SHARD = "1d" 9 | 10 | # INFLUXDB2 CONFIG 11 | INFLUXDB_V2_TOKEN = "secret-token" 12 | INFLUXDB_V2_URL = "http://localhost:8086" 13 | INFLUXDB_V2_ORG = "geoip2influx" 14 | INFLUXDB_V2_BUCKET = "geoip2influx" 15 | INFLUXDB_V2_RETENTION = "604800" # seconds (7 days) 16 | INFLUXDB_V2_DEBUG = "false" 17 | INFLUXDB_V2_BATCHING = "true" 18 | INFLUXDB_V2_BATCH_SIZE = "50" 19 | INFLUXDB_V2_FLUSH_INTERVAL = "30_000" # milliseconds 20 | 21 | GEO_MEASUREMENT = "geoip2influx" 22 | LOG_MEASUREMENT = "nginx_access_logs" 23 | NGINX_LOG_PATH = "/var/log/nginx/access.log" 24 | SEND_NGINX_LOGS = "true" 25 | GEOIP2INFLUX_LOG_LEVEL = "info" 26 | GEOIP2INFLUX_LOG_PATH = "/var/log/geoip2influx.log" 27 | GEOIP_DB_PATH = "/usr/share/GeoIP/GeoLite2-City.mmdb" 28 | USE_INFLUXDB_V2 = "true" 29 | MAXMINDDB_USER_ID = "123456" 30 | MAXMINDDB_LICENSE_KEY = "license-key" -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: GilbN 4 | 5 | -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: build 2 | on: 3 | push: 4 | branches: 5 | - master 6 | paths: 7 | - 'geoip2influx/**' 8 | - 'tests/**' 9 | - 'run.py' 10 | - 'requirements.txt' 11 | - 'Dockerfile' 12 | - 'root/**' 13 | jobs: 14 | push_to_ghcr_io: 15 | runs-on: ubuntu-latest 16 | steps: 17 | - name: checkout 18 | uses: actions/checkout@v4 19 | with: 20 | fetch-depth: 0 21 | - name: Login to GitHub Container Registry 22 | uses: docker/login-action@v3 23 | with: 24 | registry: ghcr.io 25 | username: ${{ github.repository_owner }} 26 | password: ${{ secrets.GH_PAT }} 27 | - name: build&push 28 | run: | 29 | docker build . --tag ghcr.io/gilbn/geoip2influx 30 | docker push ghcr.io/gilbn/geoip2influx 31 | -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a single version of Python 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python 3 | 4 | name: Test Package 5 | 6 | on: 7 | push: 8 | branches: [ "master" ] 9 | pull_request: 10 | branches: [ "master" ] 11 | 12 | permissions: 13 | contents: read 14 | 15 | jobs: 16 | test: 17 | 18 | runs-on: ubuntu-latest 19 | 20 | steps: 21 | - uses: actions/checkout@v4 22 | - name: Set up Python 3.10 23 | uses: actions/setup-python@v5 24 | with: 25 | python-version: "3.10" 26 | - name: Install dependencies 27 | run: | 28 | python -m pip install --upgrade pip 29 | pip install flake8 pytest 30 | if [ -f requirements.txt ]; then pip install -r requirements.txt; fi 31 | - name: Lint with flake8 32 | run: | 33 | # stop the build if there are Python syntax errors or undefined names 34 | flake8 run.py geoip2influx/ --count --select=E9,F63,F7,F82 --show-source --statistics 35 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 36 | flake8 run.py geoip2influx/ --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 37 | - name: Test with pytest 38 | run: | 39 | pytest 40 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /*.mmdb 2 | venv*/ 3 | *.log* 4 | #################### 5 | 6 | # Byte-compiled / optimized / DLL files 7 | __pycache__/ 8 | *.py[cod] 9 | *$py.class 10 | 11 | # C extensions 12 | *.so 13 | 14 | # Distribution / packaging 15 | .Python 16 | build/ 17 | develop-eggs/ 18 | dist/ 19 | downloads/ 20 | eggs/ 21 | .eggs/ 22 | lib/ 23 | lib64/ 24 | parts/ 25 | sdist/ 26 | var/ 27 | wheels/ 28 | pip-wheel-metadata/ 29 | share/python-wheels/ 30 | *.egg-info/ 31 | .installed.cfg 32 | *.egg 33 | MANIFEST 34 | 35 | # PyInstaller 36 | # Usually these files are written by a python script from a template 37 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 38 | *.manifest 39 | *.spec 40 | 41 | # Installer logs 42 | pip-log.txt 43 | pip-delete-this-directory.txt 44 | 45 | # Unit test / coverage reports 46 | htmlcov/ 47 | .tox/ 48 | .nox/ 49 | .coverage 50 | .coverage.* 51 | .cache 52 | nosetests.xml 53 | coverage.xml 54 | *.cover 55 | *.py,cover 56 | .hypothesis/ 57 | .pytest_cache/ 58 | 59 | # Translations 60 | *.mo 61 | *.pot 62 | 63 | # Django stuff: 64 | *.log 65 | local_settings.py 66 | db.sqlite3 67 | db.sqlite3-journal 68 | 69 | # Flask stuff: 70 | instance/ 71 | .webassets-cache 72 | 73 | # Scrapy stuff: 74 | .scrapy 75 | 76 | # Sphinx documentation 77 | docs/_build/ 78 | 79 | # PyBuilder 80 | target/ 81 | 82 | # Jupyter Notebook 83 | .ipynb_checkpoints 84 | 85 | # IPython 86 | profile_default/ 87 | ipython_config.py 88 | 89 | # pyenv 90 | .python-version 91 | 92 | # pipenv 93 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 94 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 95 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 96 | # install all needed dependencies. 97 | #Pipfile.lock 98 | 99 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 100 | __pypackages__/ 101 | 102 | # Celery stuff 103 | celerybeat-schedule 104 | celerybeat.pid 105 | 106 | # SageMath parsed files 107 | *.sage.py 108 | 109 | # Environments 110 | .env 111 | .venv 112 | env/ 113 | venv/ 114 | ENV/ 115 | env.bak/ 116 | venv.bak/ 117 | 118 | # Spyder project settings 119 | .spyderproject 120 | .spyproject 121 | 122 | # Rope project settings 123 | .ropeproject 124 | 125 | # mkdocs documentation 126 | /site 127 | 128 | # mypy 129 | .mypy_cache/ 130 | .dmypy.json 131 | dmypy.json 132 | 133 | # Pyre type checker 134 | .pyre/ 135 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM lsiobase/alpine:3.20 2 | LABEL maintainer="GilbN" 3 | 4 | WORKDIR /geoip2influx 5 | 6 | # Copy the requirements.txt and run.py files 7 | COPY requirements.txt run.py ./ 8 | 9 | # Copy the entire geoip2influx directory 10 | COPY /geoip2influx /geoip2influx/ 11 | 12 | RUN \ 13 | echo " ## Installing packages ## " && \ 14 | apk add --no-cache --virtual=build-dependencies \ 15 | python3-dev \ 16 | py3-pip \ 17 | logrotate \ 18 | libmaxminddb && \ 19 | echo "**** install packages ****" && \ 20 | apk add --no-cache \ 21 | python3 && \ 22 | echo " ## Installing python modules ## " && \ 23 | python3 -m venv /lsiopy && \ 24 | pip3 install --no-cache-dir -r requirements.txt 25 | COPY root/ / 26 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 GilbN 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # [Geoip2Influx](https://github.com/gilbN/geoip2influx) 2 | 3 | [![Docker Cloud Build Status](https://img.shields.io/docker/cloud/build/gilbn/geoip2influx?style=for-the-badge)](https://hub.docker.com/r/gilbn/geoip2influx/builds) 4 | [![Docker Image Size (latest by date)](https://img.shields.io/docker/image-size/gilbn/geoip2influx?color=blue&style=for-the-badge)](https://hub.docker.com/r/gilbn/geoip2influx) 5 | [![Docker Pulls](https://img.shields.io/docker/pulls/gilbn/geoip2influx?color=blue&style=for-the-badge)](https://hub.docker.com/r/gilbn/geoip2influx) 6 | [![GitHub](https://img.shields.io/github/license/gilbn/geoip2influx?color=blue&style=for-the-badge)](https://github.com/gilbN/geoip2influx/blob/master/LICENSE) 7 | [![Discord](https://img.shields.io/discord/591352397830553601?color=blue&style=for-the-badge)](https://discord.gg/HSPa4cz) 8 | [![](https://img.shields.io/badge/Blog-technicalramblings.com-blue?style=for-the-badge)](https://technicalramblings.com/) 9 | *** 10 | 11 | A python script that will parse the nginx access.log and send geolocation metrics and log metrics to InfluxDB 12 | 13 | ![](https://i.imgur.com/mh0IhYA.jpg) 14 | 15 | ### For the linuxserver/letsencrypt docker mod, click here : https://github.com/linuxserver/docker-mods/tree/swag-geoip2influx 16 | 17 | *** 18 | 19 | ## Usage 20 | 21 | ### Enviroment variables: 22 | 23 | These are the **default** values for all envs. 24 | Add the ones that differ on your system. 25 | 26 | | Environment Variable | Example Value | Description | 27 | | -------------------- | ------------- | ----------- | 28 | | NGINX_LOG_PATH | /config/log/nginx/access.log | Container path for Nginx logfile , defaults to the example. | 29 | | GEO_MEASUREMENT | geoip2influx | InfluxDB measurement name for geohashes. Optional, defaults to the example. | 30 | | LOG_MEASUREMENT | nginx_access_logs | InfluxDB measurement name for nginx logs. Optional, defaults to the example. | 31 | | SEND_NGINX_LOGS | true | Set to `false` to disable nginx logs. Optional, defaults to `true`. | 32 | | GEOIP2INFLUX_LOG_LEVEL | info | Sets the log level in geoip2influx.log. Use `debug` for verbose logging Optional, defaults to info. | 33 | | GEOIP2INFLUX_LOG_PATH | /config/log/geoip2influx/geoip2influx.log | Optional. Defaults to example. | 34 | | GEOIP_DB_PATH | /config/geoip2db/GeoLite2-City.mmdb | Optional. Defaults to example. | 35 | | MAXMINDDB_LICENSE_KEY | xxxxxxx | Add your Maxmind licence key | 36 | | MAXMINDDB_USER_ID | xxxxxxx| Add your Maxmind account id | 37 | 38 | **InfluxDB v1.8.x values** 39 | 40 | | Environment Variable | Example Value | Description | 41 | | -------------------- | ------------- | ----------- | 42 | | INFLUX_HOST | localhost | Host running InfluxDB. | 43 | | INFLUX_HOST_PORT | 8086 | Optional, defaults to 8086. | 44 | | INFLUX_DATABASE | geoip2influx | Optional, defaults to geoip2influx. | 45 | | INFLUX_USER | root | Optional, defaults to root. | 46 | | INFLUX_PASS | root | Optional, defaults to root. | 47 | | INFLUX_RETENTION | 7d | Sets the retention for the database. Optional, defaults to example.| 48 | | INFLUX_SHARD | 1d | Set the shard for the database. Optional, defaults to example. | 49 | 50 | **InfluxDB v2.x values** 51 | 52 | | Environment Variable | Example Value | Description | 53 | | -------------------- | ------------- | ----------- | 54 | | USE_INFLUXDB_V2 | true | Required if using InfluxDB2. Defaults to false | 55 | | INFLUXDB_V2_TOKEN | secret-token | Required | 56 | | INFLUXDB_V2_URL | http://localhost:8086 | Optional, defaults to http://localhost:8086 | 57 | | INFLUXDB_V2_ORG | geoip2influx | Optional, defaults to geoip2influx. Will be created if not exists. | 58 | | INFLUXDB_V2_BUCKET | geoip2influx | Optional, defaults to geoip2influx. Will be created if not exists. | 59 | | INFLUXDB_V2_RETENTION | 604800 | Optional, defaults to 604800. 7 days in seconds | 60 | | INFLUXDB_V2_DEBUG | false | Optional, defaults to false. Enables the debug mode for the influxdb-client package. | 61 | | INFLUXDB_V2_BATCHING | true | Optional, defaults to false. Enables batch writing of data. | 62 | | INFLUXDB_V2_BATCH_SIZE | 100 | Optional, defaults to 10. | 63 | | INFLUXDB_V2_FLUSH_INTERVAL | 30000 | Optional, defaults to 15000. How often in milliseconds to write a batch | 64 | 65 | #### INFLUXDB_V2_TOKEN 66 | 67 | If the organization or bucket does not exist, it will try and create them with the token. 68 | 69 | > [!NOTE] 70 | > The minimim level of rights needed is write access to the bucket. 71 | 72 | ### MaxMind Geolite2 73 | 74 | Default download location is `/config/geoip2db/GeoLite2-City.mmdb` 75 | 76 | Get your licence key here: https://www.maxmind.com/en/geolite2/signup 77 | 78 | ### InfluxDB 79 | 80 | #### InfluxDB v2.x and v1.8x is supported. 81 | 82 | #### Note: The Grafana dashboard currently only supports InfluxDB v1.8.x 83 | 84 | The InfluxDB database/bucket and retention rules will be created automatically with the name you choose. 85 | 86 | ``` 87 | -e INFLUX_DATABASE=geoip2influx or -e INFLUXDB_V2_BUCKET=geoip2influx 88 | ``` 89 | 90 | ### Docker 91 | 92 | ```bash 93 | docker create \ 94 | --name=geoip2influx \ 95 | -e PUID=1000 \ 96 | -e PGID=1000 \ 97 | -e TZ=Europe/Oslo \ 98 | -e INFLUX_HOST= \ 99 | -e INFLUX_HOST_PORT= \ 100 | -e MAXMINDDB_LICENSE_KEY=\ 101 | -e MAXMINDDB_USER_ID=\ 102 | -v /path/to/appdata/geoip2influx:/config \ 103 | -v /path/to/nginx/accesslog/:/config/log/nginx/ \ 104 | --restart unless-stopped \ 105 | ghcr.io/gilbn/geoip2influx 106 | ``` 107 | 108 | ### Docker compose 109 | 110 | ```yaml 111 | version: "2.1" 112 | services: 113 | geoip2influx: 114 | image: ghcr.io/gilbn/geoip2influx 115 | container_name: geoip2influx 116 | environment: 117 | - PUID=1000 118 | - PGID=1000 119 | - TZ=Europe/Oslo 120 | - INFLUX_HOST= 121 | - INFLUX_HOST_PORT= 122 | - MAXMINDDB_LICENSE_KEY= 123 | - MAXMINDDB_USER_ID= 124 | volumes: 125 | - /path/to/appdata/geoip2influx:/config 126 | - /path/to/nginx/accesslog/:/config/log/nginx/ 127 | restart: unless-stopped 128 | ``` 129 | 130 | **InfluxDB2 examples** 131 | 132 | ```bash 133 | docker create \ 134 | --name=geoip2influx \ 135 | -e PUID=1000 \ 136 | -e PGID=1000 \ 137 | -e TZ=Europe/Oslo \ 138 | -e INFLUXDB_V2_URL= \ 139 | -e INFLUXDB_V2_TOKEN= \ 140 | -e USE_INFLUXDB_V2=true \ 141 | -e MAXMINDDB_LICENSE_KEY=\ 142 | -e MAXMINDDB_USER_ID=\ 143 | -v /path/to/appdata/geoip2influx:/config \ 144 | -v /path/to/nginx/accesslog/:/config/log/nginx/ \ 145 | --restart unless-stopped \ 146 | ghcr.io/gilbn/geoip2influx 147 | ``` 148 | 149 | ```yaml 150 | version: "2.1" 151 | services: 152 | geoip2influx: 153 | image: ghcr.io/gilbn/geoip2influx 154 | container_name: geoip2influx 155 | environment: 156 | - PUID=1000 157 | - PGID=1000 158 | - TZ=Europe/Oslo 159 | - INFLUXDB_V2_URL= 160 | - INFLUXDB_V2_TOKEN= 161 | - USE_INFLUXDB_V2=true 162 | - MAXMINDDB_LICENSE_KEY= 163 | - MAXMINDDB_USER_ID= 164 | volumes: 165 | - /path/to/appdata/geoip2influx:/config 166 | - /path/to/nginx/accesslog/:/config/log/nginx/ 167 | restart: unless-stopped 168 | ``` 169 | 170 | *** 171 | 172 | ## Grafana dashboard: 173 | 174 | Use [nginx_logs_geo_map.json](/nginx_logs_geo_map.json) 175 | 176 | ### Note 177 | 178 | Currently only supports InfluxDB 1.8.x. 179 | 180 | *** 181 | 182 | ## Sending Nginx log metrics 183 | 184 | Nginx needs to be compiled with the geoip2 module: https://github.com/leev/ngx_http_geoip2_module 185 | 186 | 1. Add the following to the http block in your `nginx.conf` file: 187 | 188 | ```nginx 189 | geoip2 /config/geoip2db/GeoLite2-City.mmdb { 190 | auto_reload 5m; 191 | $geoip2_data_country_iso_code country iso_code; 192 | $geoip2_data_city_name city names en; 193 | } 194 | 195 | log_format custom '$remote_addr - $remote_user [$time_local]' 196 | '"$request" $status $body_bytes_sent' 197 | '"$http_referer" $host "$http_user_agent"' 198 | '"$request_time" "$upstream_connect_time"' 199 | '"$geoip2_data_city_name" "$geoip2_data_country_iso_code"'; 200 | ``` 201 | 202 | 2. Set the access log use the `custom` log format. 203 | ```nginx 204 | access_log /config/log/nginx/access.log custom; 205 | ``` 206 | 207 | ### Multiple log files 208 | 209 | If you separate your nginx log files but want this script to parse all of them you can do the following: 210 | 211 | As nginx can have multiple `access log` directives in a block, just add another one in the server block. 212 | 213 | **Example** 214 | 215 | ```nginx 216 | access_log /config/log/nginx/technicalramblings/access.log custom; 217 | access_log /config/log/nginx/access.log custom; 218 | ``` 219 | This will log the same lines to both files. 220 | 221 | Then use the `/config/log/nginx/access.log` file in the `NGINX_LOG_PATH` variable. 222 | 223 | *** 224 | 225 | ## Updates 226 | 227 | **18.08.24** - Rename env from USE_INFLUX_V2 to USE_INFLUXDB_V2. 228 | 229 | **10.08.24** - Add support for InfluxDB2. 230 | 231 | **06.08.24** - Complete refactor of the python code. Deprecate the old geoip2influx.py file. 232 | 233 | **28.07.24** - Refactor to alpine 3.20. New env required. MAXMINDDB_USER_ID. 234 | 235 | **21.06.20** - Added $host(domain) to the nginx log metrics. This will break your nginx logs parsing, as you need to update the custom log format. 236 | 237 | **06.06.20** - Added influx retention policy to try and mitigate max-values-per-tag limit exceeded errors. 238 | 239 | * `-e INFLUX_RETENTION` Default 30d 240 | * `-e INFLUX_SHARD` Default 2d 241 | * It will only add the retention policy if the database doesn't exist. 242 | 243 | **30.05.20** - Added logging. Use `-e GEOIP2INFLUX_LOG_LEVEL` to set the log level. 244 | 245 | **15.05.20** - Removed `GEOIP2_KEY` and `GEOIP_DB_PATH`variables. With commit https://github.com/linuxserver/docker-letsencrypt/commit/75b9685fdb3ec6edda590300f289b0e75dd9efd0 the letsencrypt container now natively supports downloading and updating(weekly) the GeoLite2-City database! 246 | 247 | *** 248 | 249 | Adapted source: https://github.com/ratibor78/geostat 250 | -------------------------------------------------------------------------------- /dev-requirements.txt: -------------------------------------------------------------------------------- 1 | -r requirements.txt 2 | pytest -------------------------------------------------------------------------------- /geoip2influx.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python3 2 | 3 | from os.path import exists, isfile 4 | from os import environ as env, stat 5 | from platform import uname 6 | from re import compile, match, search, IGNORECASE 7 | from sys import path, exit 8 | from time import sleep, time 9 | from datetime import datetime 10 | import logging 11 | 12 | from geoip2.database import Reader 13 | from geohash2 import encode 14 | from influxdb import InfluxDBClient 15 | from requests.exceptions import ConnectionError 16 | from influxdb.exceptions import InfluxDBServerError, InfluxDBClientError 17 | from IPy import IP as ipadd 18 | 19 | 20 | # Getting params from envs 21 | geoip_db_path = '/config/geoip2db/GeoLite2-City.mmdb' 22 | log_path = env.get('NGINX_LOG_PATH', '/config/log/nginx/access.log') 23 | influxdb_host = env.get('INFLUX_HOST', 'localhost') 24 | influxdb_port = env.get('INFLUX_HOST_PORT', '8086') 25 | influxdb_database = env.get('INFLUX_DATABASE', 'geoip2influx') 26 | influxdb_user = env.get('INFLUX_USER', 'root') 27 | influxdb_user_pass = env.get('INFLUX_PASS', 'root') 28 | influxdb_retention = env.get('INFLUX_RETENTION','7d') 29 | influxdb_shard = env.get('INFLUX_SHARD', '1d') 30 | geo_measurement = env.get('GEO_MEASUREMENT', 'geoip2influx') 31 | log_measurement = env.get('LOG_MEASUREMENT', 'nginx_access_logs') 32 | send_nginx_logs = env.get('SEND_NGINX_LOGS','true') 33 | log_level = env.get('GEOIP2INFLUX_LOG_LEVEL', 'info').upper() 34 | g2i_log_path = env.get('GEOIP2INFLUX_LOG_PATH','/config/log/geoip2influx/geoip2influx.log') 35 | 36 | # Logging 37 | logging.basicConfig(level=log_level,format='GEOIP2INFLUX %(asctime)s :: %(levelname)s :: %(message)s',datefmt='%d/%b/%Y %H:%M:%S',handlers=[logging.StreamHandler(),logging.FileHandler(g2i_log_path)]) 38 | 39 | # global variables 40 | monitored_ip_types = ['PUBLIC', 'ALLOCATED APNIC', 'ALLOCATED ARIN', 'ALLOCATED RIPE NCC', 'ALLOCATED LACNIC', 'ALLOCATED AFRINIC'] 41 | 42 | 43 | def regex_tester(log_path, N): 44 | time_out = time() + 60 45 | re_ipv4 = compile(r'(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})') 46 | re_ipv6 = compile(r'(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))') # NOQA 47 | while True: 48 | assert N >= 0 49 | pos = N + 1 50 | lines = [] 51 | with open(log_path) as f: 52 | while len(lines) <= N: 53 | try: 54 | f.seek(-pos, 2) 55 | except IOError: 56 | f.seek(0) 57 | break 58 | finally: 59 | lines = list(f) 60 | pos *= 2 61 | log_lines = lines[-N:] 62 | for line in log_lines: 63 | if re_ipv4.match(line): 64 | regex = compile(r'(?P\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}) - (?P.+) \[(?P\d{2}\/[A-Z]{1}[a-z]{2}\/\d{4}:\d{2}:\d{2}:\d{2} ((\+|\-)\d{4}))\](["](?P.+)) (?P.+) ((?PHTTP\/[1-3]\.[0-9])["]) (?P\d{3}) (?P\d{1,99})(["](?P(\-)|(.+))["]) (?P.+) (["](?P.+)["])(["](?P.+)["]) (["](?P.+)["])(["](?P.+)["]) (["](?P.+)["])', IGNORECASE) # NOQA 65 | if regex.match(line): 66 | logging.debug(f'Regex is matching {log_path} continuing...') 67 | return True 68 | if re_ipv6.match(line): 69 | regex = compile(r'(?P(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))) - (?P.+) \[(?P\d{2}\/[A-Z]{1}[a-z]{2}\/\d{4}:\d{2}:\d{2}:\d{2} ((\+|\-)\d{4}))\](["](?P.+)) (?P.+) ((?PHTTP\/[1-3]\.[0-9])["]) (?P\d{3}) (?P\d{1,99})(["](?P(\-)|(.+))["]) (?P.+) (["](?P.+)["])(["](?P.+)["]) (["](?P.+)["])(["](?P.+)["]) (["](?P.+)["])', IGNORECASE) # NOQA 70 | if regex.match(line): 71 | logging.debug(f'Regex is matching {log_path} continuing...') 72 | return True 73 | else: 74 | logging.debug(f'Testing regex on: {log_path}') 75 | sleep(2) 76 | if time() > time_out: 77 | logging.warning(f'Failed to match regex on: {log_path}') 78 | break 79 | 80 | 81 | def file_exists(log_path,geoip_db_path): 82 | time_out = time() + 30 83 | while True: 84 | file_list = [log_path, geoip_db_path] 85 | if not exists(log_path): 86 | logging.warning((f'File: {log_path} not found...')) 87 | sleep(1) 88 | if not exists(geoip_db_path): 89 | logging.warning((f'File: {geoip_db_path} not found...')) 90 | sleep(1) 91 | if all([isfile(f) for f in file_list]): 92 | for f in file_list: 93 | logging.debug(f'Found: {f}') 94 | return True 95 | if time() > time_out: 96 | if not exists(geoip_db_path) and not exists(log_path): 97 | logging.critical(f"Can't find: {geoip_db_path} or {log_path} exiting!") 98 | break 99 | elif not exists(geoip_db_path): 100 | logging.critical(f"Can't find: {geoip_db_path}, exiting!") 101 | break 102 | elif not exists(log_path): 103 | logging.critical(f"Can't find: {log_path}, exiting!") 104 | break 105 | 106 | 107 | def logparse( 108 | log_path, influxdb_host, influxdb_port, influxdb_database, influxdb_user, influxdb_user_pass, influxdb_retention, 109 | influxdb_shard, geo_measurement, log_measurement, send_nginx_logs, geoip_db_path, inode): 110 | # Preparing variables and params 111 | ips = {} 112 | geohash_fields = {} 113 | geohash_tags = {} 114 | log_data_fields = {} 115 | log_data_tags = {} 116 | nginx_log = {} 117 | hostname = uname()[1] 118 | client = InfluxDBClient( 119 | host=influxdb_host, port=influxdb_port, username=influxdb_user, password=influxdb_user_pass, database=influxdb_database) 120 | 121 | try: 122 | logging.debug('Testing InfluxDB connection') 123 | version = client.request('ping', expected_response_code=204).headers['X-Influxdb-Version'] 124 | logging.debug(f'Influxdb version: {version}') 125 | except ConnectionError as e: 126 | logging.critical('Error testing connection to InfluxDB. Please check your url/hostname.\n' 127 | f'Error: {e}' 128 | ) 129 | exit(1) 130 | 131 | try: 132 | databases = [db['name'] for db in client.get_list_database()] 133 | if influxdb_database in databases: 134 | logging.debug(f'Found database: {influxdb_database}') 135 | except InfluxDBClientError as e: 136 | logging.critical('Error getting database list! Please check your InfluxDB configuration.\n' 137 | f'Error: {e}' 138 | ) 139 | exit(1) 140 | 141 | if influxdb_database not in databases: 142 | logging.info(f'Creating database: {influxdb_database}') 143 | client.create_database(influxdb_database) 144 | 145 | retention_policies = [policy['name'] for policy in client.get_list_retention_policies(database=influxdb_database)] 146 | if f'{influxdb_database} {influxdb_retention}-{influxdb_shard}' not in retention_policies: 147 | logging.info(f'Creating {influxdb_database} retention policy ({influxdb_retention}-{influxdb_shard})') 148 | client.create_retention_policy(name=f'{influxdb_database} {influxdb_retention}-{influxdb_shard}', duration=influxdb_retention, replication='1', 149 | database=influxdb_database, default=True, shard_duration=influxdb_shard) 150 | 151 | re_ipv4 = compile(r'(?P\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}) - (?P.+) \[(?P\d{2}\/[A-Z]{1}[a-z]{2}\/\d{4}:\d{2}:\d{2}:\d{2} ((\+|\-)\d{4}))\](["](?P.+)) (?P.+) ((?PHTTP\/[1-3]\.[0-9])["]) (?P\d{3}) (?P\d{1,99})(["](?P(\-)|(.+))["]) (?P.+) (["](?P.+)["])(["](?P.+)["]) (["](?P.+)["])(["](?P.+)["]) (["](?P.+)["])', IGNORECASE) # NOQA 152 | re_ipv6 = compile(r'(?P(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))) - (?P.+) \[(?P\d{2}\/[A-Z]{1}[a-z]{2}\/\d{4}:\d{2}:\d{2}:\d{2} ((\+|\-)\d{4}))\](["](?P.+)) (?P.+) ((?PHTTP\/[1-3]\.[0-9])["]) (?P\d{3}) (?P\d{1,99})(["](?P(\-)|(.+))["]) (?P.+) (["](?P.+)["])(["](?P.+)["]) (["](?P.+)["])(["](?P.+)["]) (["](?P.+)["])', IGNORECASE) # NOQA 153 | 154 | gi = Reader(geoip_db_path) 155 | 156 | if send_nginx_logs in ('true', 'True'): 157 | send_logs = True 158 | else: 159 | send_logs = False 160 | re_ipv4 = compile(r'(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})') 161 | re_ipv6 = compile(r'(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))') # NOQA 162 | logging.info('SEND_NGINX_LOGS set to false') 163 | pass 164 | if not regex_tester(log_path,3): 165 | if send_logs: 166 | re_ipv4 = compile(r'(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})') 167 | re_ipv6 = compile(r'(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))') # NOQA 168 | send_logs = False 169 | logging.warning('NGINX log metrics disabled! Double check your NGINX custom log format..') 170 | 171 | # Main loop to parse access.log file in tailf style with sending metrics. 172 | with open(log_path, 'r') as log_file: 173 | logging.info('Starting log parsing') 174 | str_results = stat(log_path) 175 | st_size = str_results[6] 176 | log_file.seek(st_size) 177 | while True: 178 | geo_metrics = [] 179 | log_metrics = [] 180 | where = log_file.tell() 181 | line = log_file.readline() 182 | inodenew = stat(log_path).st_ino 183 | if inode != inodenew: 184 | break 185 | if not line: 186 | sleep(1) 187 | log_file.seek(where) 188 | else: 189 | if re_ipv4.match(line): 190 | m = re_ipv4.match(line) 191 | ip = m.group(1) 192 | log = re_ipv4 193 | elif re_ipv6.match(line): 194 | m = re_ipv6.match(line) 195 | ip = m.group(1) 196 | log = re_ipv6 197 | else: 198 | logging.warning('Failed to match regex that previously matched!? Skipping this line!\n' 199 | 'If you think the regex should have mathed the line, please share the log line below on https://discord.gg/HSPa4cz or Github: https://github.com/gilbN/geoip2influx\n' 200 | f'Line: {line}' 201 | ) 202 | continue 203 | ip_type = ipadd(ip).iptype() 204 | if ip_type in monitored_ip_types and ip: 205 | info = gi.city(ip) 206 | if info: 207 | geohash = encode(info.location.latitude, info.location.longitude) 208 | geohash_fields['count'] = 1 209 | geohash_tags['geohash'] = geohash 210 | geohash_tags['ip'] = ip 211 | geohash_tags['host'] = hostname 212 | geohash_tags['country_code'] = info.country.iso_code 213 | geohash_tags['country_name'] = info.country.name 214 | geohash_tags['state'] = info.subdivisions.most_specific.name if info.subdivisions.most_specific.name else "-" 215 | geohash_tags['state_code'] = info.subdivisions.most_specific.iso_code if info.subdivisions.most_specific.iso_code else "-" 216 | geohash_tags['city'] = info.city.name if info.city.name else "-" 217 | geohash_tags['postal_code'] = info.postal.code if info.postal.code else "-" 218 | geohash_tags['latitude'] = info.location.latitude if info.location.latitude else "-" 219 | geohash_tags['longitude'] = info.location.longitude if info.location.longitude else "-" 220 | ips['tags'] = geohash_tags 221 | ips['fields'] = geohash_fields 222 | ips['measurement'] = geo_measurement 223 | geo_metrics.append(ips) 224 | logging.debug(f'Geo metrics: {geo_metrics}') 225 | try: 226 | client.write_points(geo_metrics) 227 | except (InfluxDBServerError, ConnectionError) as e: 228 | logging.error('Error writing data to InfluxDB! Check your database!\n' 229 | f'Error: {e}' 230 | ) 231 | else: 232 | logging.debug(f"Incorrect IP type: {ip_type}") 233 | if send_logs: 234 | data = search(log, line) 235 | if ip_type in monitored_ip_types and ip: 236 | info = gi.city(ip) 237 | if info: 238 | datadict = data.groupdict() 239 | log_data_fields['count'] = 1 240 | log_data_fields['bytes_sent'] = int(datadict['bytes_sent']) 241 | log_data_fields['request_time'] = float(datadict['request_time']) 242 | try: 243 | log_data_fields['connect_time'] = float(datadict['connect_time']) if datadict['connect_time'] != '-' else 0.0 244 | except ValueError: 245 | log_data_fields['connect_time'] = str(datadict['connect_time']) 246 | log_data_tags['ip'] = datadict['ipaddress'] 247 | log_data_tags['datetime'] = datetime.strptime(datadict['dateandtime'], '%d/%b/%Y:%H:%M:%S %z') 248 | log_data_tags['remote_user'] = datadict['remote_user'] 249 | log_data_tags['method'] = datadict['method'] 250 | log_data_tags['referrer'] = datadict['referrer'] 251 | log_data_tags['host'] = datadict['host'] 252 | log_data_tags['http_version'] = datadict['http_version'] 253 | log_data_tags['status_code'] = datadict['status_code'] 254 | log_data_tags['bytes_sent'] = datadict['bytes_sent'] 255 | log_data_tags['url'] = datadict['url'] 256 | log_data_tags['user_agent'] = datadict['user_agent'] 257 | log_data_tags['request_time'] = datadict['request_time'] 258 | log_data_tags['connect_time'] = datadict['connect_time'] 259 | log_data_tags['city'] = datadict['city'] 260 | log_data_tags['country_code'] = datadict['country_code'] 261 | log_data_tags['country_name'] = info.country.name 262 | nginx_log['tags'] = log_data_tags 263 | nginx_log['fields'] = log_data_fields 264 | nginx_log['measurement'] = log_measurement 265 | log_metrics.append(nginx_log) 266 | logging.debug(f'NGINX log metrics: {log_metrics}') 267 | try: 268 | client.write_points(log_metrics) 269 | except (InfluxDBServerError, InfluxDBClientError, ConnectionError) as e: 270 | logging.error('Error writing data to InfluxDB! Check your database!\n' 271 | f'Error: {e}' 272 | ) 273 | 274 | 275 | def main(): 276 | logging.info('Starting geoip2influx..') 277 | 278 | logging.debug('Variables set:' + 279 | f'\n geoip_db_path :: {geoip_db_path}' + 280 | f'\n -e LOG_PATH :: {log_path}' + 281 | f'\n -e INFLUX_HOST :: {influxdb_host}' + 282 | f'\n -e INFLUX_HOST_PORT :: {influxdb_port}' + 283 | f'\n -e INFLUX_DATABASE :: {influxdb_database}' + 284 | f'\n -e INFLUX_RETENTION :: {influxdb_retention}' + 285 | f'\n -e INFLUX_SHARD :: {influxdb_shard}' + 286 | f'\n -e INFLUX_USER :: {influxdb_user}' + 287 | f'\n -e INFLUX_PASS :: {influxdb_user_pass}' + 288 | f'\n -e GEO_MEASUREMENT :: {geo_measurement}' + 289 | f'\n -e LOG_MEASUREMENT :: {log_measurement}' + 290 | f'\n -e SEND_NGINX_LOGS :: {send_nginx_logs}' + 291 | f'\n -e GEOIP2INFLUX_LOG_LEVEL :: {log_level}' 292 | ) 293 | # Parsing log file and sending metrics to Influxdb 294 | while file_exists(log_path,geoip_db_path): 295 | # Get inode from log file 296 | inode = stat(log_path).st_ino 297 | # Run main loop and grep a log file 298 | logparse( 299 | log_path, influxdb_host, influxdb_port, influxdb_database, influxdb_user, influxdb_user_pass, 300 | influxdb_retention, influxdb_shard, geo_measurement, log_measurement, send_nginx_logs, geoip_db_path, inode) # NOQA 301 | 302 | if __name__ == '__main__': 303 | try: 304 | logging.warning("THIS SCRIPT IS DEPRECATED! Please use the new run.py script and its dependencies instead!") 305 | main() 306 | except KeyboardInterrupt: 307 | exit(0) 308 | -------------------------------------------------------------------------------- /geoip2influx/__init__.py: -------------------------------------------------------------------------------- 1 | from .logger import configure_logging 2 | from .logparser import LogParser 3 | from .influx import InfluxClient 4 | from .constants import ipv4_pattern, ipv6_pattern, MONITORED_IP_TYPES, ipv4, ipv6 5 | -------------------------------------------------------------------------------- /geoip2influx/constants.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import re 4 | 5 | MONITORED_IP_TYPES = ['PUBLIC', 'ALLOCATED APNIC', 'ALLOCATED ARIN', 'ALLOCATED RIPE NCC', 'ALLOCATED LACNIC', 'ALLOCATED AFRINIC'] 6 | 7 | class Rgx: 8 | """Regular expression patterns for the log file.""" 9 | RE_IPV4_PATTERN = r'(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' 10 | RE_IPV6_PATTERN = r'(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))' # NOQA 11 | REMOTE_USER_PATTERN = r'(\S+)' 12 | DATE_AND_TIME_PATTERN = r'(\d{2}\/[A-Z]{1}[a-z]{2}\/\d{4}:\d{2}:\d{2}:\d{2}\s(?:\+|\-)\d{4})' 13 | REQUEST_PATTERN = r'(?:)' 14 | METHOD_PATTERN = r'([A-Z]+)' 15 | REFERRER_PATTERN = r'(.+?)' 16 | HTTP_VERSION_PATTERN = r'(HTTP\/[1-3]\.[0-9])' 17 | STATUS_CODE_PATTERN = r'(\d{3})' 18 | BYTES_SENT_PATTERN = r'(\d{1,99})' 19 | URL_PATTERN = r'(?:\-|.+)' 20 | HOST_PATTERN = r'(.+?)' 21 | USER_AGENT_PATTERN = r'(.+?)' 22 | REQUEST_TIME_PATTERN = r'(.+?)' 23 | CONNECT_TIME_PATTERN = r'(.+?)' 24 | CITY_PATTERN = r'(.+?)' 25 | COUNTRY_CODE_PATTERN = r'(.+?)' 26 | 27 | def create_log_pattern(ip_pattern: str) -> re.Pattern[str]: 28 | """Create a regular expression pattern for the log file. 29 | 30 | Args: 31 | ip_pattern (str): The regular expression pattern for the IP address. 32 | 33 | Returns: 34 | re.Pattern[str]: The regular expression pattern for the log file. 35 | """ 36 | return re.compile(rf''' 37 | (?P{ip_pattern}) 38 | \s-\s 39 | (?P{Rgx.REMOTE_USER_PATTERN}) 40 | \s\[ 41 | (?P{Rgx.DATE_AND_TIME_PATTERN})\] 42 | \s?" 43 | (?P 44 | ({Rgx.REQUEST_PATTERN} 45 | (?P{Rgx.METHOD_PATTERN})\s 46 | (?P{Rgx.REFERRER_PATTERN})\s 47 | (?P{Rgx.HTTP_VERSION_PATTERN}) 48 | | 49 | [^"]* 50 | ) 51 | )" 52 | \s 53 | (?P{Rgx.STATUS_CODE_PATTERN}) 54 | \s 55 | (?P{Rgx.BYTES_SENT_PATTERN}) 56 | \s?" 57 | (?P{Rgx.URL_PATTERN})" 58 | (?P{Rgx.HOST_PATTERN})" 59 | (?P{Rgx.USER_AGENT_PATTERN}) 60 | "\s?" 61 | (?P{Rgx.REQUEST_TIME_PATTERN})" 62 | \s" 63 | (?P{Rgx.CONNECT_TIME_PATTERN})" 64 | \s?" 65 | (?P{Rgx.CITY_PATTERN})" 66 | \s" 67 | (?P{Rgx.COUNTRY_CODE_PATTERN})" 68 | ''', re.VERBOSE | re.IGNORECASE) # NOQA 69 | 70 | def ipv4_pattern() -> re.Pattern[str]: 71 | """Return the full regular expression pattern for an IPv4 log line.""" 72 | return create_log_pattern(Rgx.RE_IPV4_PATTERN) 73 | 74 | def ipv6_pattern() -> re.Pattern[str]: 75 | """Return the full regular expression pattern for an IPv6 log line.""" 76 | return create_log_pattern(Rgx.RE_IPV6_PATTERN) 77 | 78 | def ipv4() -> re.Pattern[str]: 79 | """Return the regular expression pattern for an IPv4 address.""" 80 | return re.compile(Rgx.RE_IPV4_PATTERN) 81 | 82 | def ipv6() -> re.Pattern[str]: 83 | """Return the regular expression pattern for an IPv6 address.""" 84 | return re.compile(Rgx.RE_IPV6_PATTERN) -------------------------------------------------------------------------------- /geoip2influx/influx.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import os 4 | import logging 5 | from logging import Logger 6 | 7 | from influxdb import InfluxDBClient 8 | from requests.exceptions import ConnectionError 9 | from influxdb.exceptions import InfluxDBServerError, InfluxDBClientError 10 | 11 | from .influx_base import InfluxBase 12 | 13 | logger: Logger = logging.getLogger(__name__) 14 | 15 | class InfluxClient(InfluxBase): 16 | def __init__(self, auto_init: bool = True, **kwargs) -> None: 17 | """Initialize the InfluxDBClient. 18 | 19 | If no arguments are provided, the client will attempt to use the following environment variables: 20 | - INFLUX_HOST 21 | - INFLUX_HOST_PORT 22 | - INFLUX_USER 23 | - INFLUX_PASS 24 | - INFLUX_DATABASE 25 | - INFLUX_RETENTION 26 | - INFLUX_SHARD 27 | 28 | Args: 29 | host (str, optional): The InfluxDB host. Defaults to None. 30 | port (int, optional): The InfluxDB port. Defaults to None. 31 | username (str, optional): The InfluxDB username. Defaults to None. 32 | password (str, optional): The InfluxDB password. Defaults to None. 33 | database (str, optional): The InfluxDB database. Defaults to None. 34 | retention (str, optional): The InfluxDB retention policy. Defaults to None. 35 | shard (str, optional): The InfluxDB shard duration. Defaults to None. 36 | 37 | Raises: 38 | ValueError: If the InfluxDB client is not properly configured. 39 | """ 40 | 41 | self.host = kwargs.pop("host", None) or os.getenv("INFLUX_HOST", "localhost") 42 | self.port = kwargs.pop("port", None) or os.getenv("INFLUX_HOST_PORT", 8086) 43 | self.username = kwargs.pop("username", None) or os.getenv("INFLUX_USER", "root") 44 | self.password = kwargs.pop("password", None) or os.getenv("INFLUX_PASS", "root") 45 | self.database = kwargs.pop("database", None) or os.getenv("INFLUX_DATABASE", "geoip2influx") 46 | self.retention = kwargs.pop("retention", None) or os.getenv("INFLUX_RETENTION", "7d") 47 | self.shard = kwargs.pop("shard", None) or os.getenv("INFLUX_SHARD", "1d") 48 | self.version: str|None = None 49 | self.retention_policy = f"{self.database} {self.retention}-{self.shard}" 50 | self._setup_complete: bool = False 51 | 52 | self.logger = logging.getLogger("InfluxClient") 53 | self.logger.debug("InfluxDB host: %s", self.host) 54 | self.logger.debug("InfluxDB port: %s", self.port) 55 | self.logger.debug("InfluxDB username: %s", self.username) 56 | self.logger.debug("InfluxDB password: %s", self.password) 57 | self.logger.debug("InfluxDB database: %s", self.database) 58 | 59 | self.influx: InfluxDBClient | None = self.create_influx_client( 60 | host=self.host, 61 | port=self.port, 62 | username=self.username, 63 | password=self.password, 64 | database=self.database, 65 | **kwargs 66 | ) 67 | 68 | if auto_init: 69 | self.setup() 70 | 71 | @property 72 | def setup_complete(self) -> bool: 73 | return self._setup_complete 74 | 75 | @setup_complete.setter 76 | def setup_complete(self, value: bool) -> None: 77 | self._setup_complete = value 78 | 79 | def setup(self): 80 | """Setup the database and retention policy, and validate the setup.""" 81 | self.test_connection() 82 | self.create_database() 83 | self.create_retention_policy() 84 | self.validate() 85 | self.logger.success("InfluxDB client setup complete.") 86 | self.setup_complete = True 87 | 88 | def create_influx_client(self, **kwargs) -> InfluxDBClient | None: 89 | try: 90 | return InfluxDBClient(**kwargs) 91 | except Exception: 92 | self.logger.exception("Error creating InfluxDB client.") 93 | raise 94 | 95 | def test_connection(self) -> None: 96 | try: 97 | self.version: str = self.influx.ping() 98 | self.logger.debug("InfluxDB version: %s", self.version) 99 | except Exception: 100 | self.logger.exception("Error testing connection to InfluxDB. Please check your url/hostname.") 101 | raise 102 | 103 | def write_to_influx(self, data: list[dict]) -> None: 104 | """Write the data to InfluxDB. 105 | 106 | Args: 107 | data (list[dict]): The data to write to InfluxDB. 108 | """ 109 | 110 | try: 111 | if not data: 112 | self.logger.debug("No data to write to InfluxDB.") 113 | return 114 | if self.influx.write_points(data): 115 | measurement = data[0]["measurement"] 116 | self.logger.debug("'%s' data written to InfluxDB.", measurement) 117 | return 118 | self.logger.error("Error writing data to InfluxDB!") 119 | except (InfluxDBServerError, InfluxDBClientError, ConnectionError): 120 | self.logger.exception("Error writing data to InfluxDB! Check your database!") 121 | 122 | def create_database(self) -> None: 123 | """Create the database if it does not exist.""" 124 | try: 125 | if self.check_database(): 126 | return 127 | self.logger.info("Creating database.") 128 | self.influx.create_database(self.database) 129 | if self.check_database(): 130 | self.logger.info("Database %s created.", self.database) 131 | except Exception: 132 | self.logger.exception("Error creating database %s.", self.database) 133 | 134 | def check_database(self) -> bool: 135 | """Check if the database exists.""" 136 | try: 137 | databases: list[dict] = self.influx.get_list_database() 138 | if self.database in [db["name"] for db in databases]: 139 | self.logger.debug("Database %s exists.", self.database) 140 | return True 141 | self.logger.debug("Database %s does not exist.", self.database) 142 | return False 143 | except Exception: 144 | self.logger.exception("Error checking database %s.", self.database) 145 | return False 146 | 147 | def create_retention_policy(self) -> None: 148 | """Create the retention policy if it does not exist.""" 149 | 150 | if self.check_retention_policy(): 151 | return 152 | self.logger.info("Creating retention policy %s.", self.retention_policy) 153 | self.influx.create_retention_policy( 154 | name=self.retention_policy, 155 | duration=self.retention, 156 | replication=1, 157 | database=self.database, 158 | default=True, 159 | shard_duration=self.shard 160 | ) 161 | 162 | def check_retention_policy(self) -> bool: 163 | policies: list[dict] = self.influx.get_list_retention_policies(self.database) 164 | retention_policies: list = [policy['name'] for policy in policies] 165 | if self.retention_policy in retention_policies: 166 | self.logger.debug(f"Retention policy {self.retention} exists.") 167 | return True 168 | self.logger.debug(f"Retention policy {self.retention} does not exist.") 169 | return False 170 | 171 | 172 | def validate(self) -> None: 173 | """Validate that everything is properly configured. 174 | 175 | Raises: 176 | ValueError: If the InfluxDB client is not properly configured. 177 | ValueError: If the InfluxDB database does not exist. 178 | ValueError: If the InfluxDB retention policy does not exist. 179 | """ 180 | if not self.influx: 181 | raise ValueError("InfluxDB client is not properly configured.") 182 | if not self.check_database(): 183 | raise ValueError("InfluxDB database does not exist.") 184 | if not self.check_retention_policy(): 185 | raise ValueError("InfluxDB retention policy does not exist.") 186 | self.logger.info("InfluxDB client validated.") -------------------------------------------------------------------------------- /geoip2influx/influx_base.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | 3 | class InfluxBase(ABC): 4 | 5 | @property 6 | @abstractmethod 7 | def setup_complete(self) -> bool: 8 | pass 9 | 10 | @setup_complete.setter 11 | @abstractmethod 12 | def setup_complete(self, value: bool) -> None: 13 | pass 14 | 15 | @abstractmethod 16 | def write_to_influx(self, data: str) -> None: 17 | pass 18 | 19 | @abstractmethod 20 | def setup(self) -> None: 21 | pass 22 | 23 | @abstractmethod 24 | def test_connection(self) -> None: 25 | pass 26 | 27 | @abstractmethod 28 | def create_influx_client(self, **kwargs) -> None: 29 | pass -------------------------------------------------------------------------------- /geoip2influx/influxv2.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import os 4 | import logging 5 | from logging import Logger 6 | 7 | from influxdb_client import InfluxDBClient, WriteApi, Point, BucketRetentionRules 8 | from influxdb_client.client.bucket_api import BucketsApi 9 | from influxdb_client.client.organizations_api import OrganizationsApi 10 | from requests.exceptions import ConnectionError 11 | from influxdb_client.client.exceptions import InfluxDBError 12 | from influxdb_client.rest import ApiException 13 | from influxdb_client.client.write_api import SYNCHRONOUS, WriteOptions 14 | 15 | from .influx_base import InfluxBase 16 | 17 | logger: Logger = logging.getLogger(__name__) 18 | 19 | class BatchingCallback: 20 | 21 | def success(self, conf: tuple[str, str, str], data: str) -> None: 22 | logger.debug("Written batch: %s, data: %s", conf, data) 23 | 24 | def error(self, conf: tuple[str, str, str], data: str, exception: InfluxDBError) -> None: 25 | logger.error("Cannot write batch: %s, data: %s due: %s", conf, data, exception) 26 | 27 | def retry(self, conf: tuple[str, str, str], data: str, exception: InfluxDBError) -> None: 28 | logger.warning("Retryable error occured for batch: %s, data: %s retry: %s", conf, data, exception) 29 | 30 | class InfluxClient(InfluxBase): 31 | def __init__(self, auto_init: bool = True) -> None: 32 | """Initialize the InfluxDBClient. 33 | 34 | Supported InfluxDBClient environment properties: 35 | - INFLUXDB_V2_URL 36 | - INFLUXDB_V2_ORG 37 | - INFLUXDB_V2_TOKEN 38 | - INFLUXDB_V2_TIMEOUT 39 | - INFLUXDB_V2_VERIFY_SSL 40 | - INFLUXDB_V2_SSL_CA_CERT 41 | - INFLUXDB_V2_CERT_FILE 42 | - INFLUXDB_V2_CERT_KEY_FILE 43 | - INFLUXDB_V2_CERT_KEY_PASSWORD 44 | - INFLUXDB_V2_CONNECTION_POOL_MAXSIZE 45 | - INFLUXDB_V2_AUTH_BASIC 46 | - INFLUXDB_V2_PROFILERS 47 | 48 | Used by this class: 49 | - INFLUXDB_V2_BUCKET 50 | - INFLUX_V2_RETENTION 51 | - INFLUXDB_V2_DEBUG 52 | - INFLUXDB_V2_BATCHING 53 | - INFLUXDB_V2_BATCH_SIZE 54 | - INFLUXDB_V2_FLUSH_INTERVAL 55 | 56 | Args: 57 | auto_init (bool, optional): Whether to automatically setup the InfluxDB client. Defaults to True. 58 | 59 | Raises: 60 | ValueError: If the InfluxDB client is not properly configured. 61 | """ 62 | 63 | self.bucket: str = os.getenv("INFLUXDB_V2_BUCKET", "geoip2influx") 64 | self.retention: str = int(os.getenv("INFLUX_V2_RETENTION", "604800")) 65 | self.debug: bool = os.getenv("INFLUXDB_V2_DEBUG", "false").lower() == "true" 66 | self.org: str = os.getenv("INFLUXDB_V2_ORG", "geoip2influx") 67 | self.version: str|None = None 68 | self._setup_complete: bool = False 69 | batching: bool = os.getenv("INFLUXDB_V2_BATCHING", "false").lower() == "true" 70 | batch_size: int = int(os.getenv("INFLUXDB_V2_BATCH_SIZE", "10")) 71 | flush_interval: int = int(os.getenv("INFLUXDB_V2_FLUSH_INTERVAL", "15000")) 72 | 73 | self.influx: InfluxDBClient | None = self.create_influx_client(debug=self.debug) 74 | 75 | self.logger: Logger = logging.getLogger("InfluxClient") 76 | self.logger.debug("InfluxDB url: %s", self.influx.url) 77 | self.logger.debug("InfluxDB org: %s", self.org) 78 | self.logger.debug("InfluxDB token: %s", self.influx.token) 79 | self.logger.debug("InfluxDB bucket: %s", self.bucket) 80 | self.logger.debug("InfluxDB bucket retention seconds: %s", self.retention) 81 | self.logger.debug("InfluxDB batching enabled: %s", batching) 82 | 83 | if batching: 84 | self.logger.debug("InfluxDB batch size: %s", batch_size) 85 | self.logger.debug("InfluxDB flush interval: %s", flush_interval) 86 | callback = BatchingCallback() 87 | write_options: WriteOptions = WriteOptions(batch_size=batch_size, flush_interval=flush_interval) 88 | self.write_api: WriteApi = self.influx.write_api( 89 | write_options=write_options, 90 | success_callback=callback.success, 91 | error_callback=callback.error, 92 | retry_callback=callback.retry 93 | ) 94 | else: 95 | write_options = SYNCHRONOUS 96 | self.write_api: WriteApi = self.influx.write_api(write_options=write_options) 97 | self.bucket_api: BucketsApi = self.influx.buckets_api() 98 | self.org_api: OrganizationsApi = self.influx.organizations_api() 99 | 100 | if auto_init: 101 | self.setup() 102 | 103 | @property 104 | def setup_complete(self) -> bool: 105 | return self._setup_complete 106 | 107 | @setup_complete.setter 108 | def setup_complete(self, value: bool) -> None: 109 | self._setup_complete = value 110 | 111 | def setup(self) -> None: 112 | """Setup the bucket and retention policy, and validate the setup.""" 113 | self.test_connection() 114 | self.create_org() 115 | self.create_bucket() 116 | self.logger.success("InfluxDB client setup complete.") 117 | self.setup_complete = True 118 | 119 | def create_influx_client(self, debug = True, enable_gzip:bool = False, **kwargs) -> InfluxDBClient | None: 120 | try: 121 | return InfluxDBClient.from_env_properties(debug, enable_gzip, **kwargs) 122 | except Exception: 123 | self.logger.exception("Error creating InfluxDB client.") 124 | raise 125 | 126 | def test_connection(self) -> None: 127 | try: 128 | if not self.influx.ping(): 129 | raise ConnectionError("InfluxDB ping failed") 130 | self.version: str = self.influx.version() 131 | self.logger.debug("InfluxDB version: %s", self.version) 132 | except Exception: 133 | self.logger.exception("Error testing connection to InfluxDB. Please check your url/hostname.") 134 | raise 135 | 136 | def write_to_influx(self, data: list[dict]) -> None: 137 | """Write the data to InfluxDB. 138 | 139 | Args: 140 | data (list[dict]): The data to write to InfluxDB. 141 | """ 142 | 143 | try: 144 | if not data: 145 | self.logger.debug("No data to write to InfluxDB.") 146 | return 147 | records: list[Point] = [Point.from_dict(point) for point in data] 148 | self.write_api.write(self.bucket, self.org, records) 149 | measurement = data[0]["measurement"] 150 | self.logger.debug("'%s' data written to InfluxDB.", measurement) 151 | except (InfluxDBError, ConnectionError): 152 | self.logger.exception("Error writing data to InfluxDB! Check your database!") 153 | 154 | def create_bucket(self) -> None: 155 | """Create the bucket and retention policy if it does not exist.""" 156 | try: 157 | if self.bucket_exists(): 158 | return 159 | self.logger.debug("Trying to create bucket '%s'.", self.bucket) 160 | bucket_description: str = f"Bucket for storing GeoIP data for {self.bucket}" 161 | bucket_retention = BucketRetentionRules(type="expire",every_seconds=self.retention) 162 | self.bucket_api.create_bucket(bucket_name=self.bucket, org=self.org, description=bucket_description, retention_rules=bucket_retention) 163 | if self.bucket_exists(): 164 | self.logger.info("Bucket '%s' created.", self.bucket) 165 | except InfluxDBError as exc: 166 | if "Are you using token with sufficient permission?" in exc.message: 167 | self.logger.debug("Not authorized to create buckets") 168 | except Exception: 169 | self.logger.exception("Error creating bucket %s.", self.bucket) 170 | 171 | def bucket_exists(self) -> bool: 172 | """Check if the bucket exists.""" 173 | try: 174 | if self.bucket_api.find_bucket_by_name(self.bucket): 175 | self.logger.debug("Bucket '%s' exists.", self.bucket) 176 | return True 177 | self.logger.debug("Bucket '%s' does not exist or no read permissions", self.bucket) 178 | return False 179 | except Exception: 180 | self.logger.exception("Error checking bucket %s.", self.bucket) 181 | 182 | def create_org(self) -> None: 183 | """Create the organization if it does not exist.""" 184 | if self.org_exists(): 185 | return 186 | try: 187 | self.logger.debug("Trying to create organization '%s'.", self.org) 188 | self.org_api.create_organization(name=self.org) 189 | if self.org_exists(): 190 | self.logger.info("Organization '%s' created.", self.org) 191 | except ApiException as exc: 192 | if exc.reason == "Unauthorized": 193 | self.logger.debug("Not authorized to create organizations") 194 | else: 195 | self.logger.exception("Error creating organization '%s'.") 196 | except Exception: 197 | self.logger.exception("Error creating organization '%s'.", self.org) 198 | 199 | def org_exists(self) -> bool: 200 | """Check if the organization exists.""" 201 | try: 202 | orgs = self.org_api.find_organizations(org=self.org) 203 | if orgs and self.org in [org.name for org in orgs]: 204 | self.logger.debug("Organization '%s' exists.", self.org) 205 | return True 206 | self.logger.debug("Organization '%s' does not exist or no read permissions", self.org) 207 | return False 208 | except Exception: 209 | self.logger.exception("Error reading organization '%s'.", self.org) 210 | -------------------------------------------------------------------------------- /geoip2influx/logger.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import os 4 | import logging 5 | from logging import Logger 6 | from logging.handlers import TimedRotatingFileHandler 7 | from logging import LogRecord 8 | import re 9 | import platform 10 | import sys 11 | 12 | # Get the major and minor version of Python 13 | major_version = sys.version_info.major 14 | minor_version = sys.version_info.minor 15 | 16 | # https://stackoverflow.com/a/78515926/15290341 17 | # The stack level is different for Python 3.9 and above 18 | # We need to set the correct stack level so that the log (%(module)s.%(funcName)s|line:%(lineno)d) output is correct 19 | stack_level_per_py_version = 2 if (major_version, minor_version) >= (3, 9) else 1 20 | 21 | log_dir = os.getenv('GEOIP2INFLUX_LOG_PATH','/config/log/geoip2influx/geoip2influx.log') 22 | 23 | logger: Logger = logging.getLogger() 24 | 25 | # Add custom log level for success messages 26 | logging.SUCCESS = 25 27 | logging.addLevelName(logging.SUCCESS, "SUCCESS") 28 | 29 | def success(self:'Logger', message:str, *args, **kwargs): 30 | """Log 'message % args' with severity 'SUCCESS'. 31 | 32 | To pass exception information, use the keyword argument exc_info with 33 | a true value, e.g. 34 | 35 | logger.success("Houston, Tranquility Base Here. The Eagle has Landed.", exc_info=1) 36 | """ 37 | if self.isEnabledFor(logging.SUCCESS): 38 | self._log(logging.SUCCESS, message, args, stacklevel = stack_level_per_py_version, **kwargs) 39 | 40 | logging.Logger.success = success 41 | 42 | class ColorPercentStyle(logging.PercentStyle): 43 | """Custom log formatter that add color to specific log levels.""" 44 | grey: str = "38" 45 | yellow: str = "33" 46 | red: str = "31" 47 | cyan: str = "36" 48 | green: str = "32" 49 | 50 | def _get_color_fmt(self, color_code, bold=False) -> str: 51 | if bold: 52 | return "\x1b[" + color_code + ";1m" + self._fmt + "\x1b[0m" 53 | return "\x1b[" + color_code + ";20m" + self._fmt + "\x1b[0m" 54 | 55 | def _get_fmt(self, levelno) -> str: 56 | colors: dict[int, str] = { 57 | logging.DEBUG: self._get_color_fmt(self.grey), 58 | logging.INFO: self._get_color_fmt(self.cyan), 59 | logging.WARNING: self._get_color_fmt(self.yellow), 60 | logging.ERROR: self._get_color_fmt(self.red), 61 | logging.CRITICAL: self._get_color_fmt(self.red), 62 | logging.SUCCESS: self._get_color_fmt(self.green) 63 | } 64 | 65 | return colors.get(levelno, self._get_color_fmt(self.grey)) 66 | 67 | def _format(self, record:LogRecord) -> str: 68 | return self._get_fmt(record.levelno) % record.__dict__ 69 | 70 | class CustomLogFormatter(logging.Formatter): 71 | """Formatter that removes creds from logs.""" 72 | ACCESS_KEY: str = os.environ.get("ACCESS_KEY","super_secret_key") or "super_secret_key" # If env is an empty string, use default value 73 | SECRET_KEY: str = os.environ.get("SECRET_KEY","super_secret_key") or "super_secret_key" # If env is an empty string, use default value 74 | 75 | def formatException(self, exc_info) -> str: 76 | """Format an exception so that it prints on a single line.""" 77 | result: str = super(CustomLogFormatter, self).formatException(exc_info) 78 | return repr(result) # or format into one line however you want to 79 | 80 | def format_credential_key(self, s) -> str: 81 | return re.sub(self.ACCESS_KEY, '(removed)', s) 82 | 83 | def format_secret_key(self, s) -> str: 84 | return re.sub(self.SECRET_KEY, '(removed)', s) 85 | 86 | def format(self, record) -> str: 87 | s: str = super(CustomLogFormatter, self).format(record) 88 | if record.exc_text: 89 | s = s.replace('\n', '') + '|' 90 | s = self.format_credential_key(s) 91 | s = self.format_secret_key(s) 92 | 93 | return s 94 | 95 | def formatMessage(self, record) -> str: 96 | return ColorPercentStyle(self._fmt).format(record) 97 | 98 | def configure_logging(log_level:str) -> None: 99 | """Setup console and file logging""" 100 | 101 | log_level = log_level.upper() 102 | logger.handlers = [] 103 | logger.setLevel(log_level) 104 | 105 | # Console logging 106 | ch = logging.StreamHandler() 107 | cf = CustomLogFormatter('%(asctime)-15s | %(threadName)-17s | %(name)-12s | %(levelname)-8s | (%(module)s.%(funcName)s|line:%(lineno)d) | %(message)s |', '%d/%m/%Y %H:%M:%S') 108 | ch.setFormatter(cf) 109 | ch.setLevel(log_level) 110 | logger.addHandler(ch) 111 | 112 | # File logging 113 | fh = TimedRotatingFileHandler(log_dir, when="midnight", interval=1, backupCount=7, delay=True, encoding='utf-8') 114 | f = logging.Formatter('%(asctime)-15s | %(threadName)-17s | %(name)-12s | %(levelname)-8s | (%(module)s.%(funcName)s|line:%(lineno)d) | %(message)s |', '%d/%m/%Y %H:%M:%S') 115 | fh.setFormatter(f) 116 | fh.setLevel(log_level) 117 | logger.addHandler(fh) 118 | 119 | logging.info('Operating system: %s', platform.platform()) 120 | logging.info('Python version: %s', platform.python_version()) 121 | -------------------------------------------------------------------------------- /geoip2influx/logparser.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import re 4 | import os 5 | import time 6 | import logging 7 | from functools import wraps 8 | import socket 9 | from datetime import datetime 10 | 11 | from geoip2.database import Reader 12 | from geoip2.models import City 13 | from geohash2 import encode 14 | 15 | from IPy import IP 16 | 17 | from .constants import ipv4_pattern, ipv6_pattern, MONITORED_IP_TYPES, ipv4, ipv6 18 | from .influx import InfluxClient 19 | from .influxv2 import InfluxClient as InfluxClientV2 20 | 21 | logger = logging.getLogger(__name__) 22 | 23 | def wait(timeout_seconds=60): 24 | """Factory Decorator to wait for a function to return True for a given amount of time. 25 | 26 | Args: 27 | timeout_seconds (int, optional): Defaults to 60. 28 | """ 29 | def decorator(func): 30 | @wraps(func) 31 | def wrapper(*args, **kwargs) -> bool: 32 | timeout: float = time.time() + timeout_seconds 33 | while time.time() < timeout: 34 | if func(*args, **kwargs): 35 | return True 36 | time.sleep(1) 37 | logger.error(f"Timeout of {timeout_seconds} seconds reached on {func.__name__} function.") 38 | return False 39 | return wrapper 40 | return decorator 41 | 42 | class LogParser: 43 | def __init__(self, auto_init: bool = True) -> None: 44 | """Initialize the LogParser. 45 | 46 | Args: 47 | auto_init (bool, optional): Will run the setup method if True. Defaults to True. 48 | """ 49 | self.log_path: str = os.getenv("NGINX_LOG_PATH", "/config/log/nginx/access.log") 50 | self.geoip_path: str = os.getenv("GEOIP_DB_PATH", "/config/geoip2db/GeoLite2-City.mmdb") 51 | self.geo_measurement = os.getenv("GEO_MEASUREMENT", "geoip2influx") 52 | self.log_measurement = os.getenv("LOG_MEASUREMENT", "nginx_access_logs") 53 | self.send_logs: bool = os.getenv("SEND_NGINX_LOGS", "true").lower() == "true" 54 | 55 | use_influxdb_v2: bool = os.getenv("USE_INFLUXDB_V2", "false").lower() == "true" 56 | 57 | self.hostname: str = socket.gethostname() 58 | self.client: InfluxClient | InfluxClientV2 = InfluxClientV2(auto_init) if use_influxdb_v2 else InfluxClient(auto_init) 59 | self.geoip_reader: None|Reader = None 60 | self.current_log_inode: int|None = None 61 | self.parsed_lines: int = 0 62 | 63 | self.logger = logging.getLogger("LogParser") 64 | self.logger.debug("Log file path: %s", self.log_path) 65 | self.logger.debug("GeoIP database path: %s", self.geoip_path) 66 | self.logger.debug("GeoIP measurement name: %s", self.geo_measurement) 67 | self.logger.debug("NGINX log measurement name: %s", self.log_measurement) 68 | self.logger.debug("Send NGINX logs: %s", self.send_logs) 69 | self.logger.debug("Hostname: %s", self.hostname) 70 | 71 | if auto_init: 72 | self.setup() 73 | 74 | def setup(self) -> None: 75 | """Setup the necessary components before running the log parser.""" 76 | self.geoip_reader = Reader(self.geoip_path) 77 | if not self.client.setup_complete: 78 | self.client.setup() 79 | 80 | def validate_log_line(self, log_line: str) -> re.Match[str] | None: 81 | """Validate the log line against the IPv4 and IPv6 patterns.""" 82 | if self.send_logs: 83 | return ipv4_pattern().match(log_line) or ipv6_pattern().match(log_line) 84 | # If we are not sending logs but only geo data to influx, only validate the IP address 85 | self.send_logs = False 86 | return ipv4().match(log_line) or ipv6().match(log_line) 87 | 88 | @wait(timeout_seconds=60) 89 | def validate_log_format(self) -> bool: # regex tester 90 | """Try for 60 seconds and validate that the log format is correct by checking the last 3 lines.""" 91 | LAST_LINE_COUNT = 3 92 | POSITION = LAST_LINE_COUNT + 1 93 | log_lines_capture = [] 94 | lines = [] 95 | with open(self.log_path, "r", encoding="utf-8") as f: 96 | while len(log_lines_capture) <= LAST_LINE_COUNT: 97 | try: 98 | f.seek(-POSITION, os.SEEK_END) # Move to the last line 99 | except (IOError, OSError): 100 | f.seek(os.SEEK_SET) # Start of file 101 | break 102 | finally: 103 | log_lines_capture = list(f) # Read all lines from the current position 104 | POSITION *= 2 # Double the position to read more lines 105 | lines: list = log_lines_capture[-LAST_LINE_COUNT:] # Get the last 3 lines 106 | for line in lines: 107 | if self.validate_log_line(line): 108 | self.logger.success("Log file format is valid!") 109 | return True 110 | self.logger.debug("Testing log format") 111 | return False 112 | 113 | @wait(timeout_seconds=60) 114 | def log_file_exists(self) -> bool: 115 | """Try for 60 seconds to check if the log file exists.""" 116 | self.logger.debug(f"Checking if log file {self.log_path} exists.") 117 | if not os.path.exists(self.log_path): 118 | self.logger.warning(f"Log file {self.log_path} does not exist.") 119 | return False 120 | self.logger.info(f"Log file {self.log_path} exists.") 121 | self.current_log_inode: int = os.stat(self.log_path).st_ino 122 | return True 123 | 124 | @wait(timeout_seconds=60) 125 | def geoip_file_exists(self) -> bool: 126 | """Try for 60 seconds to check if the GeoIP file exists.""" 127 | self.logger.debug(f"Checking if GeoIP file {self.geoip_path} exists.") 128 | if not os.path.exists(self.geoip_path): 129 | self.logger.warning(f"GeoIP file {self.geoip_path} does not exist.") 130 | return False 131 | self.logger.info(f"GeoIP file {self.geoip_path} exists.") 132 | return True 133 | 134 | def tail_logs(self, skip_validation: bool = False) -> None: 135 | """Continiously tail the log file and parse the logs. 136 | 137 | If the log file has been rotated, reopen the file and continue tailing. 138 | 139 | Writes the geo data to InfluxDB and optionally the log data. 140 | """ 141 | 142 | if not skip_validation: 143 | self.logger.debug("Trying to validate the log file format.") 144 | if not self.validate_log_format(): 145 | self.send_logs = False 146 | self.logger.warning("Log file format is invalid. Only sending geo data to Influx.") 147 | 148 | self.logger.debug("Opening log file.") 149 | with open(self.log_path, "r", encoding="utf-8") as file: 150 | stat_results: os.stat_result = os.stat(self.log_path) 151 | st_size: int = stat_results.st_size 152 | file.seek(st_size) # Move to the end of the file 153 | self.logger.info("Tailing log file.") 154 | while True: 155 | if self.is_rotated(stat_results): 156 | return self.tail_logs(skip_validation=True) # Reopen the file and continue tailing 157 | where = file.tell() # Get the current position in the file 158 | line = file.readline() # Read the next line 159 | if not line: # If the line is empty, wait for 1 second 160 | time.sleep(1) 161 | file.seek(where) # Move to the current position 162 | continue 163 | matched: re.Match[str] | None = self.validate_log_line(line) 164 | if not matched: 165 | self.logger.warning('Failed to match regex that previously matched!? Skipping this line!\n' 166 | 'If you think the regex should have mathed the line, please share the log line below on https://discord.gg/HSPa4cz or Github: https://github.com/gilbN/geoip2influx\n' 167 | f'Line: "{line}"') 168 | continue 169 | ip: str = matched.group(1) 170 | geo_metrics: list[dict] = self.create_geo_metrics(ip) 171 | self.client.write_to_influx(geo_metrics) 172 | 173 | if self.send_logs: 174 | log_metrics: list[dict] = self.create_log_metrics(matched, ip) 175 | self.client.write_to_influx(log_metrics) 176 | self.parsed_lines += 1 177 | 178 | def run(self) -> None: 179 | """Tail the log file and write the data to InfluxDB.""" 180 | while all([self.log_file_exists(), self.geoip_file_exists()]): 181 | self.tail_logs() 182 | 183 | def is_rotated(self, stat_result:os.stat_result) -> bool: 184 | """Check if log file has been rotated/truncated. 185 | 186 | Update the current inode if it has changed. 187 | """ 188 | new_stat_results: os.stat_result = os.stat(self.log_path) 189 | new_st_size: int = new_stat_results.st_size 190 | new_inode = new_stat_results.st_ino 191 | if stat_result.st_size > new_st_size: 192 | self.logger.info("Log file has been truncated/rotated.") 193 | return True 194 | if new_inode != self.current_log_inode: 195 | self.logger.info("Log file inode %s has changed. New inode is %s.", self.current_log_inode, new_inode) 196 | self.current_log_inode = new_inode 197 | return True 198 | return False 199 | 200 | def get_ip_type(self, ip:str) -> str: 201 | """Get the IP type of the given IP address. 202 | 203 | If the IP address is invalid, return an empty string. 204 | """ 205 | if not isinstance(ip, str): 206 | self.logger.error("IP address must be a string.") 207 | return "" 208 | try: 209 | ip_type = IP(ip).iptype() 210 | return ip_type 211 | except ValueError: 212 | self.logger.error("Invalid IP address %s.", ip) 213 | return "" 214 | 215 | def check_ip_type(self, ip:str) -> bool: 216 | """Check that the ip type is one of the monitored IP types.""" 217 | ip_type: str = self.get_ip_type(ip) 218 | if ip_type not in MONITORED_IP_TYPES: 219 | self.logger.debug("IP type %s (%s) is not a monitored IP type.", ip_type, ip) 220 | return False 221 | return True 222 | 223 | def create_geo_metrics(self, ip:str) -> list[dict]: 224 | """Create the geo metrics for the given IP address. 225 | 226 | Args: 227 | ip (str): The IP address to create the metrics for. 228 | 229 | Returns: 230 | list[dict]: A list of geo metrics for the given IP address or an empty list if no data was found. 231 | """ 232 | if not isinstance(ip, str): 233 | self.logger.error("IP address must be a string.") 234 | return [] 235 | 236 | geo_metrics: list[dict] = [] 237 | geohash_fields: dict = {} 238 | geohash_tags: dict = {} 239 | 240 | if not self.check_ip_type(ip): 241 | return [] 242 | 243 | ip_data: City = self.geoip_reader.city(ip) 244 | if not ip_data: 245 | self.logger.debug("No data found for IP %s.", ip) 246 | return [] 247 | 248 | geohash = encode(ip_data.location.latitude, ip_data.location.longitude) 249 | geohash_fields["count"] = 1 250 | geohash_tags["geohash"] = geohash 251 | geohash_tags['ip'] = ip 252 | geohash_tags['host'] = self.hostname 253 | geohash_tags['country_code'] = ip_data.country.iso_code 254 | geohash_tags['country_name'] = ip_data.country.name 255 | geohash_tags['state'] = ip_data.subdivisions.most_specific.name or "-" 256 | geohash_tags['state_code'] = ip_data.subdivisions.most_specific.iso_code or "-" 257 | geohash_tags['city'] = ip_data.city.name or "-" 258 | geohash_tags['postal_code'] = ip_data.postal.code or "-" 259 | geohash_tags['latitude'] = ip_data.location.latitude or "-" 260 | geohash_tags['longitude'] = ip_data.location.longitude or "-" 261 | geo_metrics.append( 262 | { 263 | "tags": geohash_tags, 264 | "fields": geohash_fields, 265 | "measurement": self.geo_measurement 266 | }) 267 | self.logger.debug("GeoIP metrics: %s", geo_metrics) 268 | return geo_metrics 269 | 270 | def create_log_metrics(self, log_data:re.Match[str], ip:str) -> list[dict]: 271 | """Create the log metrics for the given log data. 272 | 273 | Args: 274 | log_data (re.Match[str]): The log data to create the metrics for. 275 | 276 | Returns: 277 | list[dict]: A list of log metrics for the given log data or an empty list if no data was found. 278 | """ 279 | log_metrics: list[dict] = [] 280 | log_data_tags: dict = {} 281 | log_data_fields: dict = {} 282 | 283 | if not log_data: 284 | self.logger.error("Log data must be a valid log data.") 285 | return [] 286 | 287 | if not self.check_ip_type(ip): 288 | return [] 289 | 290 | ip_data: City = self.geoip_reader.city(ip) 291 | if not ip_data: 292 | self.logger.debug("No data found for IP %s.", ip) 293 | return [] 294 | 295 | datadict: dict = log_data.groupdict() 296 | 297 | log_data_fields['count'] = 1 298 | log_data_fields['bytes_sent'] = int(datadict['bytes_sent']) 299 | log_data_fields['request_time'] = float(datadict['request_time']) 300 | 301 | try: 302 | log_data_fields['connect_time'] = float(datadict['connect_time']) if datadict['connect_time'] != '-' else 0.0 303 | except ValueError: 304 | log_data_fields['connect_time'] = 0.0 305 | log_data_tags['ip'] = datadict['ipaddress'] 306 | log_data_tags['datetime'] = datetime.strptime(datadict['dateandtime'], '%d/%b/%Y:%H:%M:%S %z') 307 | log_data_tags['remote_user'] = datadict['remote_user'] 308 | log_data_tags['method'] = datadict['method'] 309 | log_data_tags['referrer'] = datadict['referrer'] 310 | log_data_tags['host'] = datadict['host'] 311 | log_data_tags['http_version'] = datadict['http_version'] 312 | log_data_tags['status_code'] = datadict['status_code'] 313 | log_data_tags['bytes_sent'] = datadict['bytes_sent'] 314 | log_data_tags['url'] = datadict['url'] 315 | log_data_tags['user_agent'] = datadict['user_agent'] 316 | log_data_tags['request_time'] = datadict['request_time'] 317 | log_data_tags['connect_time'] = datadict['connect_time'] 318 | log_data_tags['city'] = datadict['city'] 319 | log_data_tags['country_code'] = datadict['country_code'] 320 | log_data_tags['country_name'] = ip_data.country.name 321 | log_metrics.append( 322 | { 323 | "tags": log_data_tags, 324 | "fields": log_data_fields, 325 | "measurement": self.log_measurement 326 | }) 327 | self.logger.debug("NGINX log metrics: %s", log_metrics) 328 | return log_metrics 329 | -------------------------------------------------------------------------------- /nginx_logs_geo_map.json: -------------------------------------------------------------------------------- 1 | { 2 | "__inputs": [ 3 | { 4 | "name": "DS_INFLUXDB_(GEOIP2)", 5 | "label": "InfluxDB (geoip2)", 6 | "description": "", 7 | "type": "datasource", 8 | "pluginId": "influxdb", 9 | "pluginName": "InfluxDB" 10 | } 11 | ], 12 | "__elements": [], 13 | "__requires": [ 14 | { 15 | "type": "panel", 16 | "id": "geomap", 17 | "name": "Geomap", 18 | "version": "" 19 | }, 20 | { 21 | "type": "grafana", 22 | "id": "grafana", 23 | "name": "Grafana", 24 | "version": "8.4.11" 25 | }, 26 | { 27 | "type": "panel", 28 | "id": "graph", 29 | "name": "Graph (old)", 30 | "version": "" 31 | }, 32 | { 33 | "type": "datasource", 34 | "id": "influxdb", 35 | "name": "InfluxDB", 36 | "version": "1.0.0" 37 | }, 38 | { 39 | "type": "panel", 40 | "id": "stat", 41 | "name": "Stat", 42 | "version": "" 43 | }, 44 | { 45 | "type": "panel", 46 | "id": "table", 47 | "name": "Table", 48 | "version": "" 49 | }, 50 | { 51 | "type": "panel", 52 | "id": "table-old", 53 | "name": "Table (old)", 54 | "version": "" 55 | } 56 | ], 57 | "annotations": { 58 | "list": [ 59 | { 60 | "$$hashKey": "object:712", 61 | "builtIn": 1, 62 | "datasource": "-- Grafana --", 63 | "enable": true, 64 | "hide": true, 65 | "iconColor": "rgba(0, 211, 255, 1)", 66 | "name": "Annotations & Alerts", 67 | "target": { 68 | "limit": 100, 69 | "matchAny": false, 70 | "tags": [], 71 | "type": "dashboard" 72 | }, 73 | "type": "dashboard" 74 | } 75 | ] 76 | }, 77 | "description": "Nginx Logs and GEO map", 78 | "editable": true, 79 | "fiscalYearStartMonth": 0, 80 | "gnetId": 12268, 81 | "graphTooltip": 0, 82 | "id": null, 83 | "iteration": 1722161127784, 84 | "links": [ 85 | { 86 | "$$hashKey": "object:17709", 87 | "icon": "external link", 88 | "tags": [], 89 | "targetBlank": true, 90 | "title": "Geoip2Influx", 91 | "type": "link", 92 | "url": "https://github.com/gilbN/lsio-docker-mods/tree/master/letsencrypt/geoip2-nginx-stats" 93 | } 94 | ], 95 | "liveNow": false, 96 | "panels": [ 97 | { 98 | "collapsed": false, 99 | "datasource": { 100 | "type": "influxdb", 101 | "uid": "${DS_INFLUXDB_(GEOIP2)}" 102 | }, 103 | "gridPos": { 104 | "h": 1, 105 | "w": 24, 106 | "x": 0, 107 | "y": 0 108 | }, 109 | "id": 8, 110 | "panels": [], 111 | "title": "GEO MAP", 112 | "type": "row" 113 | }, 114 | { 115 | "fieldConfig": { 116 | "defaults": { 117 | "color": { 118 | "mode": "thresholds" 119 | }, 120 | "custom": { 121 | "hideFrom": { 122 | "legend": false, 123 | "tooltip": false, 124 | "viz": false 125 | } 126 | }, 127 | "mappings": [], 128 | "thresholds": { 129 | "mode": "absolute", 130 | "steps": [ 131 | { 132 | "color": "green", 133 | "value": null 134 | }, 135 | { 136 | "color": "orange", 137 | "value": 100 138 | }, 139 | { 140 | "color": "red", 141 | "value": 500 142 | } 143 | ] 144 | } 145 | }, 146 | "overrides": [ 147 | { 148 | "matcher": { 149 | "id": "byName", 150 | "options": "metric" 151 | }, 152 | "properties": [ 153 | { 154 | "id": "displayName", 155 | "value": "Count" 156 | } 157 | ] 158 | }, 159 | { 160 | "matcher": { 161 | "id": "byName", 162 | "options": "city" 163 | }, 164 | "properties": [ 165 | { 166 | "id": "displayName", 167 | "value": "City" 168 | } 169 | ] 170 | }, 171 | { 172 | "matcher": { 173 | "id": "byName", 174 | "options": "geohash" 175 | }, 176 | "properties": [ 177 | { 178 | "id": "displayName", 179 | "value": "Geohash" 180 | } 181 | ] 182 | }, 183 | { 184 | "matcher": { 185 | "id": "byName", 186 | "options": "Time" 187 | }, 188 | "properties": [ 189 | { 190 | "id": "custom.hideFrom", 191 | "value": { 192 | "legend": true, 193 | "tooltip": true, 194 | "viz": true 195 | } 196 | } 197 | ] 198 | } 199 | ] 200 | }, 201 | "gridPos": { 202 | "h": 14, 203 | "w": 11, 204 | "x": 0, 205 | "y": 1 206 | }, 207 | "id": 28, 208 | "maxDataPoints": 1, 209 | "options": { 210 | "basemap": { 211 | "config": {}, 212 | "name": "Layer 0", 213 | "type": "default" 214 | }, 215 | "controls": { 216 | "mouseWheelZoom": true, 217 | "showAttribution": true, 218 | "showDebug": false, 219 | "showScale": false, 220 | "showZoom": true 221 | }, 222 | "layers": [ 223 | { 224 | "config": { 225 | "showLegend": true, 226 | "style": { 227 | "color": { 228 | "field": "metric", 229 | "fixed": "dark-green" 230 | }, 231 | "opacity": 0.4, 232 | "rotation": { 233 | "fixed": 0, 234 | "max": 360, 235 | "min": -360, 236 | "mode": "mod" 237 | }, 238 | "size": { 239 | "field": "metric", 240 | "fixed": 5, 241 | "max": 20, 242 | "min": 2 243 | }, 244 | "symbol": { 245 | "fixed": "img/icons/marker/circle.svg", 246 | "mode": "fixed" 247 | }, 248 | "text": { 249 | "fixed": "", 250 | "mode": "field" 251 | }, 252 | "textConfig": { 253 | "fontSize": 12, 254 | "offsetX": 0, 255 | "offsetY": 0, 256 | "textAlign": "center", 257 | "textBaseline": "middle" 258 | } 259 | } 260 | }, 261 | "location": { 262 | "": { 263 | "geohash": "geohash" 264 | }, 265 | "mode": "auto" 266 | }, 267 | "name": "Cities", 268 | "tooltip": true, 269 | "type": "markers" 270 | } 271 | ], 272 | "view": { 273 | "id": "coords", 274 | "lat": 46, 275 | "lon": 14, 276 | "zoom": 2 277 | } 278 | }, 279 | "pluginVersion": "8.4.11", 280 | "targets": [ 281 | { 282 | "datasource": { 283 | "type": "influxdb", 284 | "uid": "${DS_INFLUXDB_(GEOIP2)}" 285 | }, 286 | "groupBy": [ 287 | { 288 | "params": [ 289 | "geohash" 290 | ], 291 | "type": "tag" 292 | }, 293 | { 294 | "params": [ 295 | "city" 296 | ], 297 | "type": "tag" 298 | } 299 | ], 300 | "hide": false, 301 | "measurement": "geoip2influx", 302 | "orderByTime": "ASC", 303 | "policy": "default", 304 | "query": "SELECT sum(\"count\") AS \"metric\" FROM \"geoip2influx\" WHERE (\"ip\" !~ /^37\\.49\\.71\\.47$/ AND \"country_name\" =~ /^(Argentina|Australia|Belarus|Belgium|Brazil|Bulgaria|Canada|Chad|Chile|China|Cyprus|Czechia|Egypt|France|Germany|Greece|Guatemala|Hong Kong|Hungary|India|Indonesia|Iran|Ireland|Italy|Japan|Jordan|Kazakhstan|Lithuania|Malaysia|Mexico|Moldova|Monaco|Norway|Poland|Portugal|Puerto Rico|Romania|Russia|Saudi Arabia|Serbia|Seychelles|Singapore|Slovenia|South Africa|South Korea|Spain|Switzerland|Taiwan|Thailand|The Netherlands|Türkiye|Ukraine|United Arab Emirates|United Kingdom|United States|Vietnam)$/ AND \"country_code\" =~ /^(AE|AR|AU|BE|BG|BR|BY|CA|CH|CL|CN|CY|CZ|DE|EG|ES|FR|GB|GR|GT|HK|HU|ID|IE|IN|IR|IT|JO|JP|KR|KZ|LT|MC|MD|MX|MY|NL|NO|PL|PR|PT|RO|RS|RU|SA|SC|SG|SI|TD|TH|TR|TW|UA|US|VN|ZA)$/ AND \"city\" =~ /^(-|Abéché|Akasztó|Alexandria|Amritsar|Amstelveen|Amsterdam|Ankara|Anse aux Pins|Antwerp|Ashburn|Athens|Atlanta|Augsburg|Baden-Baden|Bangkok|Batman|Beijing|Belovo|Bengaluru|Bergamo|Berlin|Bhopal|Bhubaneswar|Boardman|Brasília|Bremen|Brescia|Brownsville|Brussels|Budapest|Buenos Aires|Buffalo|Burgas|Butler|Békéscsaba|Cairo|Chengdu|Chennai|Chillán|Chisinau|Chon Buri|Chongqing|Cincinnati|Ciudad Obregón|Clifton|Columbus|Coluna|Council Bluffs|Crateús|Dadeville|Depok|Des Moines|Dharamsala|Dimitrovgrad|Dubai|Dublin|Düsseldorf|Edison|Elk Grove Village|Eneas Marques|Eygelshoven|Faridabad|Firmat|Foshan|Frankfurt am Main|Fuzhou|Gmina Widawa|Goyang-si|Groningen|Guangzhou|Guaramiranga|Guatemala City|Haidian|Hamburg|Hangzhou|Hanover|Harrow|Hisar|Ho Chi Minh City|Hohenlockstedt|Hong Kong|Imola|Istanbul|Jaipur|Jakarta|Jhajjar|Johannesburg|Joinville|Kaliningrad|Kangra|Kaohsiung City|Karlsruhe|Kent|Kharkiv|Kiskunfélegyháza|Kuala Lumpur|Kurud|Larnaca|Lille|Ljubljana|London|Long Beach|Los Angeles|Lugano|Makhachkala|Mannheim|Marseille|Merkez|Milan|Minsk|Moca|Monaco|Mos|Moscow|Mosier|Mumbai|Munich|Naaldwijk|Nakhon Pathom|Nevel|New Taipei|New York|Nice|Nishinomiya|North Bergen|North Charleston|Novosibirsk|Nuremberg|Olinda|Olomouc|Origgio|Oslo|Paris|Perm|Perth|Petropavl|Pohuwato|Pune|Qom|Quanzhou|Rakovski|Ramenskoye|Reims|Riyadh|Rudri|Ruma|San Antonio|San Antonio Oeste|San Diego|San Jose|Santa Clara|Saratov|Sasang-gu|Savosa|Sayama|Scalenghe|Seoul|Shenzhen|Siena|Singapore|Siverek|Slough|Smolensk|Sofia|Springdale|Staten Island|Stuttgart|Surakarta|Sydney|Tainan City|Takamatsu|Taoyuan District|Tokyo|Toronto|Tyumen|Ubajara|Ufa|Upper Marlboro|Valencia|Venado Tuerto|Vevey|Viareggio|Vilnius|Warsaw|Washington|Whitehaven|Zhengzhou|Zurich|Ócsa)$/) AND $timeFilter GROUP BY \"geohash\", \"city\"", 305 | "rawQuery": false, 306 | "refId": "A", 307 | "resultFormat": "table", 308 | "select": [ 309 | [ 310 | { 311 | "params": [ 312 | "count" 313 | ], 314 | "type": "field" 315 | }, 316 | { 317 | "params": [], 318 | "type": "sum" 319 | }, 320 | { 321 | "params": [ 322 | "metric" 323 | ], 324 | "type": "alias" 325 | } 326 | ] 327 | ], 328 | "tags": [ 329 | { 330 | "key": "ip", 331 | "operator": "!~", 332 | "value": "/^$excluded_ip$/" 333 | }, 334 | { 335 | "condition": "AND", 336 | "key": "country_name", 337 | "operator": "=~", 338 | "value": "/^$country$/" 339 | }, 340 | { 341 | "condition": "AND", 342 | "key": "country_code", 343 | "operator": "=~", 344 | "value": "/^$country_code$/" 345 | }, 346 | { 347 | "condition": "AND", 348 | "key": "city", 349 | "operator": "=~", 350 | "value": "/^$city$/" 351 | } 352 | ] 353 | } 354 | ], 355 | "timeFrom": "$geo_interval", 356 | "title": "GEOMAP", 357 | "transformations": [], 358 | "type": "geomap" 359 | }, 360 | { 361 | "datasource": { 362 | "uid": "$datasource" 363 | }, 364 | "fieldConfig": { 365 | "defaults": { 366 | "color": { 367 | "mode": "continuous-GrYlRd" 368 | }, 369 | "custom": { 370 | "align": "left", 371 | "displayMode": "auto", 372 | "filterable": false 373 | }, 374 | "mappings": [], 375 | "thresholds": { 376 | "mode": "absolute", 377 | "steps": [ 378 | { 379 | "color": "green", 380 | "value": null 381 | }, 382 | { 383 | "color": "red", 384 | "value": 80 385 | } 386 | ] 387 | } 388 | }, 389 | "overrides": [ 390 | { 391 | "matcher": { 392 | "id": "byName", 393 | "options": "Count" 394 | }, 395 | "properties": [ 396 | { 397 | "id": "thresholds", 398 | "value": { 399 | "mode": "absolute", 400 | "steps": [ 401 | { 402 | "color": "rgba(50, 172, 45, 0.97)", 403 | "value": null 404 | }, 405 | { 406 | "color": "rgba(237, 129, 40, 0.89)", 407 | "value": 5000 408 | }, 409 | { 410 | "color": "rgba(245, 54, 54, 0.9)", 411 | "value": 10000 412 | } 413 | ] 414 | } 415 | }, 416 | { 417 | "id": "custom.displayMode", 418 | "value": "color-background" 419 | }, 420 | { 421 | "id": "color", 422 | "value": { 423 | "mode": "thresholds" 424 | } 425 | }, 426 | { 427 | "id": "custom.width", 428 | "value": 84 429 | } 430 | ] 431 | }, 432 | { 433 | "matcher": { 434 | "id": "byName", 435 | "options": "Domain" 436 | }, 437 | "properties": [ 438 | { 439 | "id": "custom.width", 440 | "value": 286 441 | } 442 | ] 443 | } 444 | ] 445 | }, 446 | "gridPos": { 447 | "h": 14, 448 | "w": 5, 449 | "x": 11, 450 | "y": 1 451 | }, 452 | "hideTimeOverride": true, 453 | "id": 14, 454 | "options": { 455 | "footer": { 456 | "fields": "", 457 | "reducer": [ 458 | "sum" 459 | ], 460 | "show": false 461 | }, 462 | "showHeader": true, 463 | "sortBy": [ 464 | { 465 | "desc": true, 466 | "displayName": "Count" 467 | } 468 | ] 469 | }, 470 | "pluginVersion": "8.4.11", 471 | "targets": [ 472 | { 473 | "groupBy": [ 474 | { 475 | "params": [ 476 | "host" 477 | ], 478 | "type": "tag" 479 | } 480 | ], 481 | "limit": "15", 482 | "measurement": "nginx_access_logs", 483 | "orderByTime": "ASC", 484 | "policy": "default", 485 | "refId": "A", 486 | "resultFormat": "table", 487 | "select": [ 488 | [ 489 | { 490 | "params": [ 491 | "count" 492 | ], 493 | "type": "field" 494 | }, 495 | { 496 | "params": [], 497 | "type": "sum" 498 | } 499 | ] 500 | ], 501 | "tags": [ 502 | { 503 | "key": "ip", 504 | "operator": "!~", 505 | "value": "/^$excluded_ip$/" 506 | } 507 | ] 508 | } 509 | ], 510 | "timeFrom": "$geo_interval", 511 | "title": "Top visited domains", 512 | "transformations": [ 513 | { 514 | "id": "organize", 515 | "options": { 516 | "excludeByName": { 517 | "Time": true 518 | }, 519 | "indexByName": {}, 520 | "renameByName": { 521 | "host": "Domain", 522 | "sum": "Count" 523 | } 524 | } 525 | } 526 | ], 527 | "type": "table" 528 | }, 529 | { 530 | "datasource": { 531 | "uid": "$datasource" 532 | }, 533 | "fieldConfig": { 534 | "defaults": { 535 | "displayName": "", 536 | "mappings": [], 537 | "thresholds": { 538 | "mode": "absolute", 539 | "steps": [ 540 | { 541 | "color": "rgba(50, 172, 45, 0.97)", 542 | "value": null 543 | }, 544 | { 545 | "color": "#EAB839", 546 | "value": 524300000 547 | }, 548 | { 549 | "color": "red", 550 | "value": 1049000000 551 | } 552 | ] 553 | }, 554 | "unit": "bytes" 555 | }, 556 | "overrides": [] 557 | }, 558 | "gridPos": { 559 | "h": 3, 560 | "w": 5, 561 | "x": 16, 562 | "y": 1 563 | }, 564 | "hideTimeOverride": true, 565 | "id": 19, 566 | "links": [], 567 | "options": { 568 | "colorMode": "background", 569 | "graphMode": "none", 570 | "justifyMode": "auto", 571 | "orientation": "horizontal", 572 | "reduceOptions": { 573 | "calcs": [ 574 | "sum" 575 | ], 576 | "fields": "", 577 | "values": false 578 | }, 579 | "textMode": "auto" 580 | }, 581 | "pluginVersion": "8.4.11", 582 | "targets": [ 583 | { 584 | "alias": "", 585 | "groupBy": [ 586 | { 587 | "params": [ 588 | "$__interval" 589 | ], 590 | "type": "time" 591 | }, 592 | { 593 | "params": [ 594 | "null" 595 | ], 596 | "type": "fill" 597 | } 598 | ], 599 | "measurement": "nginx_access_logs", 600 | "orderByTime": "ASC", 601 | "policy": "default", 602 | "refId": "A", 603 | "resultFormat": "time_series", 604 | "select": [ 605 | [ 606 | { 607 | "params": [ 608 | "bytes_sent" 609 | ], 610 | "type": "field" 611 | }, 612 | { 613 | "params": [], 614 | "type": "sum" 615 | } 616 | ] 617 | ], 618 | "tags": [ 619 | { 620 | "key": "ip", 621 | "operator": "!~", 622 | "value": "/^$excluded_ip$/" 623 | }, 624 | { 625 | "condition": "AND", 626 | "key": "city", 627 | "operator": "=~", 628 | "value": "/^$city$/" 629 | }, 630 | { 631 | "condition": "AND", 632 | "key": "country_code", 633 | "operator": "=~", 634 | "value": "/^$country_code$/" 635 | }, 636 | { 637 | "condition": "AND", 638 | "key": "country_name", 639 | "operator": "=~", 640 | "value": "/^$country$/" 641 | }, 642 | { 643 | "condition": "AND", 644 | "key": "host", 645 | "operator": "=~", 646 | "value": "/^$domain$/" 647 | } 648 | ] 649 | } 650 | ], 651 | "timeFrom": "$geo_interval", 652 | "title": "Data Transmitted", 653 | "type": "stat" 654 | }, 655 | { 656 | "datasource": { 657 | "uid": "$datasource" 658 | }, 659 | "fieldConfig": { 660 | "defaults": { 661 | "displayName": "", 662 | "mappings": [], 663 | "thresholds": { 664 | "mode": "absolute", 665 | "steps": [ 666 | { 667 | "color": "rgba(50, 172, 45, 0.97)", 668 | "value": null 669 | }, 670 | { 671 | "color": "#EAB839", 672 | "value": 5000 673 | }, 674 | { 675 | "color": "red", 676 | "value": 10000 677 | } 678 | ] 679 | }, 680 | "unit": "none" 681 | }, 682 | "overrides": [] 683 | }, 684 | "gridPos": { 685 | "h": 3, 686 | "w": 3, 687 | "x": 21, 688 | "y": 1 689 | }, 690 | "hideTimeOverride": true, 691 | "id": 25, 692 | "links": [], 693 | "options": { 694 | "colorMode": "background", 695 | "graphMode": "none", 696 | "justifyMode": "auto", 697 | "orientation": "horizontal", 698 | "reduceOptions": { 699 | "calcs": [ 700 | "sum" 701 | ], 702 | "fields": "", 703 | "values": false 704 | }, 705 | "textMode": "auto" 706 | }, 707 | "pluginVersion": "8.4.11", 708 | "targets": [ 709 | { 710 | "alias": "", 711 | "groupBy": [ 712 | { 713 | "params": [ 714 | "$__interval" 715 | ], 716 | "type": "time" 717 | }, 718 | { 719 | "params": [ 720 | "null" 721 | ], 722 | "type": "fill" 723 | } 724 | ], 725 | "measurement": "geoip2influx", 726 | "orderByTime": "ASC", 727 | "policy": "default", 728 | "refId": "A", 729 | "resultFormat": "time_series", 730 | "select": [ 731 | [ 732 | { 733 | "params": [ 734 | "count" 735 | ], 736 | "type": "field" 737 | }, 738 | { 739 | "params": [], 740 | "type": "count" 741 | } 742 | ] 743 | ], 744 | "tags": [ 745 | { 746 | "key": "ip", 747 | "operator": "!~", 748 | "value": "/^$excluded_ip$/" 749 | }, 750 | { 751 | "condition": "AND", 752 | "key": "country_code", 753 | "operator": "=~", 754 | "value": "/^$country_code$/" 755 | }, 756 | { 757 | "condition": "AND", 758 | "key": "country_name", 759 | "operator": "=~", 760 | "value": "/^$country$/" 761 | }, 762 | { 763 | "condition": "AND", 764 | "key": "city", 765 | "operator": "=~", 766 | "value": "/^$city$/" 767 | } 768 | ] 769 | } 770 | ], 771 | "timeFrom": "$geo_interval", 772 | "title": "Total Requests", 773 | "type": "stat" 774 | }, 775 | { 776 | "datasource": { 777 | "uid": "$datasource" 778 | }, 779 | "fieldConfig": { 780 | "defaults": { 781 | "custom": { 782 | "align": "auto", 783 | "displayMode": "auto", 784 | "filterable": false 785 | }, 786 | "mappings": [], 787 | "thresholds": { 788 | "mode": "absolute", 789 | "steps": [ 790 | { 791 | "color": "green", 792 | "value": null 793 | }, 794 | { 795 | "color": "red", 796 | "value": 80 797 | } 798 | ] 799 | } 800 | }, 801 | "overrides": [ 802 | { 803 | "matcher": { 804 | "id": "byName", 805 | "options": "Count" 806 | }, 807 | "properties": [ 808 | { 809 | "id": "custom.displayMode", 810 | "value": "color-background" 811 | }, 812 | { 813 | "id": "thresholds", 814 | "value": { 815 | "mode": "absolute", 816 | "steps": [ 817 | { 818 | "color": "rgba(50, 172, 45, 0.97)", 819 | "value": null 820 | }, 821 | { 822 | "color": "rgba(237, 129, 40, 0.89)", 823 | "value": 500 824 | }, 825 | { 826 | "color": "rgba(245, 54, 54, 0.9)", 827 | "value": 1000 828 | } 829 | ] 830 | } 831 | }, 832 | { 833 | "id": "custom.width", 834 | "value": 74 835 | } 836 | ] 837 | }, 838 | { 839 | "matcher": { 840 | "id": "byName", 841 | "options": "IP" 842 | }, 843 | "properties": [ 844 | { 845 | "id": "custom.width", 846 | "value": 123 847 | } 848 | ] 849 | } 850 | ] 851 | }, 852 | "gridPos": { 853 | "h": 11, 854 | "w": 5, 855 | "x": 16, 856 | "y": 4 857 | }, 858 | "hideTimeOverride": true, 859 | "id": 21, 860 | "links": [], 861 | "options": { 862 | "footer": { 863 | "fields": "", 864 | "reducer": [ 865 | "sum" 866 | ], 867 | "show": false 868 | }, 869 | "showHeader": true, 870 | "sortBy": [ 871 | { 872 | "desc": true, 873 | "displayName": "Count" 874 | } 875 | ] 876 | }, 877 | "pluginVersion": "8.4.11", 878 | "targets": [ 879 | { 880 | "groupBy": [ 881 | { 882 | "params": [ 883 | "ip" 884 | ], 885 | "type": "tag" 886 | }, 887 | { 888 | "params": [ 889 | "country_name" 890 | ], 891 | "type": "tag" 892 | } 893 | ], 894 | "measurement": "geoip2influx", 895 | "orderByTime": "ASC", 896 | "policy": "default", 897 | "refId": "A", 898 | "resultFormat": "table", 899 | "select": [ 900 | [ 901 | { 902 | "params": [ 903 | "count" 904 | ], 905 | "type": "field" 906 | }, 907 | { 908 | "params": [], 909 | "type": "sum" 910 | } 911 | ] 912 | ], 913 | "tags": [ 914 | { 915 | "key": "ip", 916 | "operator": "!~", 917 | "value": "/^$excluded_ip$/" 918 | }, 919 | { 920 | "condition": "AND", 921 | "key": "country_name", 922 | "operator": "=~", 923 | "value": "/^$country$/" 924 | }, 925 | { 926 | "condition": "AND", 927 | "key": "country_code", 928 | "operator": "=~", 929 | "value": "/^$country_code$/" 930 | }, 931 | { 932 | "condition": "AND", 933 | "key": "city", 934 | "operator": "=~", 935 | "value": "/^$city$/" 936 | } 937 | ] 938 | } 939 | ], 940 | "timeFrom": "$geo_interval", 941 | "title": "Top IP/Country", 942 | "transformations": [ 943 | { 944 | "id": "organize", 945 | "options": { 946 | "excludeByName": { 947 | "Time": true 948 | }, 949 | "indexByName": {}, 950 | "renameByName": { 951 | "country_name": "Country", 952 | "ip": "IP", 953 | "sum": "Count" 954 | } 955 | } 956 | } 957 | ], 958 | "type": "table" 959 | }, 960 | { 961 | "datasource": { 962 | "uid": "$datasource" 963 | }, 964 | "fieldConfig": { 965 | "defaults": { 966 | "custom": { 967 | "align": "left", 968 | "displayMode": "auto", 969 | "filterable": false 970 | }, 971 | "mappings": [], 972 | "thresholds": { 973 | "mode": "absolute", 974 | "steps": [ 975 | { 976 | "color": "green", 977 | "value": null 978 | }, 979 | { 980 | "color": "red", 981 | "value": 80 982 | } 983 | ] 984 | } 985 | }, 986 | "overrides": [ 987 | { 988 | "matcher": { 989 | "id": "byName", 990 | "options": "Count" 991 | }, 992 | "properties": [ 993 | { 994 | "id": "custom.displayMode", 995 | "value": "color-background" 996 | }, 997 | { 998 | "id": "thresholds", 999 | "value": { 1000 | "mode": "absolute", 1001 | "steps": [ 1002 | { 1003 | "color": "rgba(50, 172, 45, 0.97)", 1004 | "value": null 1005 | }, 1006 | { 1007 | "color": "rgba(237, 129, 40, 0.89)", 1008 | "value": 5000 1009 | }, 1010 | { 1011 | "color": "rgba(245, 54, 54, 0.9)", 1012 | "value": 10000 1013 | } 1014 | ] 1015 | } 1016 | }, 1017 | { 1018 | "id": "custom.width", 1019 | "value": 92 1020 | } 1021 | ] 1022 | }, 1023 | { 1024 | "matcher": { 1025 | "id": "byName", 1026 | "options": "Country Code" 1027 | }, 1028 | "properties": [ 1029 | { 1030 | "id": "custom.width", 1031 | "value": 127 1032 | } 1033 | ] 1034 | } 1035 | ] 1036 | }, 1037 | "gridPos": { 1038 | "h": 11, 1039 | "w": 3, 1040 | "x": 21, 1041 | "y": 4 1042 | }, 1043 | "hideTimeOverride": true, 1044 | "id": 26, 1045 | "options": { 1046 | "footer": { 1047 | "fields": "", 1048 | "reducer": [ 1049 | "sum" 1050 | ], 1051 | "show": false 1052 | }, 1053 | "showHeader": true, 1054 | "sortBy": [ 1055 | { 1056 | "desc": true, 1057 | "displayName": "Count" 1058 | } 1059 | ] 1060 | }, 1061 | "pluginVersion": "8.4.11", 1062 | "targets": [ 1063 | { 1064 | "groupBy": [ 1065 | { 1066 | "params": [ 1067 | "country_code" 1068 | ], 1069 | "type": "tag" 1070 | } 1071 | ], 1072 | "limit": "15", 1073 | "measurement": "geoip2influx", 1074 | "orderByTime": "ASC", 1075 | "policy": "default", 1076 | "refId": "A", 1077 | "resultFormat": "table", 1078 | "select": [ 1079 | [ 1080 | { 1081 | "params": [ 1082 | "count" 1083 | ], 1084 | "type": "field" 1085 | }, 1086 | { 1087 | "params": [], 1088 | "type": "sum" 1089 | } 1090 | ] 1091 | ], 1092 | "tags": [ 1093 | { 1094 | "key": "ip", 1095 | "operator": "!~", 1096 | "value": "/^$excluded_ip$/" 1097 | }, 1098 | { 1099 | "condition": "AND", 1100 | "key": "country_code", 1101 | "operator": "=~", 1102 | "value": "/^$country_code$/" 1103 | } 1104 | ] 1105 | } 1106 | ], 1107 | "timeFrom": "$geo_interval", 1108 | "title": "Top 15 countries", 1109 | "transformations": [ 1110 | { 1111 | "id": "organize", 1112 | "options": { 1113 | "excludeByName": { 1114 | "Time": true 1115 | }, 1116 | "indexByName": {}, 1117 | "renameByName": { 1118 | "country_code": "Country Code", 1119 | "sum": "Count" 1120 | } 1121 | } 1122 | } 1123 | ], 1124 | "type": "table" 1125 | }, 1126 | { 1127 | "datasource": { 1128 | "type": "influxdb", 1129 | "uid": "$datasource" 1130 | }, 1131 | "fieldConfig": { 1132 | "defaults": { 1133 | "custom": { 1134 | "align": "auto", 1135 | "displayMode": "auto", 1136 | "filterable": false 1137 | }, 1138 | "mappings": [], 1139 | "thresholds": { 1140 | "mode": "absolute", 1141 | "steps": [ 1142 | { 1143 | "color": "green", 1144 | "value": null 1145 | }, 1146 | { 1147 | "color": "red", 1148 | "value": 80 1149 | } 1150 | ] 1151 | } 1152 | }, 1153 | "overrides": [ 1154 | { 1155 | "matcher": { 1156 | "id": "byName", 1157 | "options": "datetime" 1158 | }, 1159 | "properties": [ 1160 | { 1161 | "id": "custom.width", 1162 | "value": 211 1163 | } 1164 | ] 1165 | }, 1166 | { 1167 | "matcher": { 1168 | "id": "byName", 1169 | "options": "Connect Time" 1170 | }, 1171 | "properties": [ 1172 | { 1173 | "id": "custom.width", 1174 | "value": 110 1175 | } 1176 | ] 1177 | }, 1178 | { 1179 | "matcher": { 1180 | "id": "byName", 1181 | "options": "Country Code" 1182 | }, 1183 | "properties": [ 1184 | { 1185 | "id": "custom.width", 1186 | "value": 102 1187 | } 1188 | ] 1189 | }, 1190 | { 1191 | "matcher": { 1192 | "id": "byName", 1193 | "options": "City" 1194 | }, 1195 | "properties": [ 1196 | { 1197 | "id": "custom.width", 1198 | "value": 125 1199 | } 1200 | ] 1201 | }, 1202 | { 1203 | "matcher": { 1204 | "id": "byName", 1205 | "options": "Bytes Sent" 1206 | }, 1207 | "properties": [ 1208 | { 1209 | "id": "custom.width", 1210 | "value": 84 1211 | } 1212 | ] 1213 | }, 1214 | { 1215 | "matcher": { 1216 | "id": "byName", 1217 | "options": "Count" 1218 | }, 1219 | "properties": [ 1220 | { 1221 | "id": "custom.width", 1222 | "value": 53 1223 | } 1224 | ] 1225 | }, 1226 | { 1227 | "matcher": { 1228 | "id": "byName", 1229 | "options": "Method" 1230 | }, 1231 | "properties": [ 1232 | { 1233 | "id": "custom.width", 1234 | "value": 82 1235 | } 1236 | ] 1237 | }, 1238 | { 1239 | "matcher": { 1240 | "id": "byName", 1241 | "options": "IP" 1242 | }, 1243 | "properties": [ 1244 | { 1245 | "id": "custom.width", 1246 | "value": 121 1247 | } 1248 | ] 1249 | }, 1250 | { 1251 | "matcher": { 1252 | "id": "byName", 1253 | "options": "HTTP Version" 1254 | }, 1255 | "properties": [ 1256 | { 1257 | "id": "custom.width", 1258 | "value": 108 1259 | } 1260 | ] 1261 | }, 1262 | { 1263 | "matcher": { 1264 | "id": "byName", 1265 | "options": "Country" 1266 | }, 1267 | "properties": [ 1268 | { 1269 | "id": "custom.width", 1270 | "value": 86 1271 | } 1272 | ] 1273 | }, 1274 | { 1275 | "matcher": { 1276 | "id": "byName", 1277 | "options": "URL" 1278 | }, 1279 | "properties": [ 1280 | { 1281 | "id": "custom.width", 1282 | "value": 188 1283 | } 1284 | ] 1285 | }, 1286 | { 1287 | "matcher": { 1288 | "id": "byName", 1289 | "options": "Status Code" 1290 | }, 1291 | "properties": [ 1292 | { 1293 | "id": "custom.width", 1294 | "value": 94 1295 | } 1296 | ] 1297 | }, 1298 | { 1299 | "matcher": { 1300 | "id": "byName", 1301 | "options": "Date" 1302 | }, 1303 | "properties": [ 1304 | { 1305 | "id": "custom.width", 1306 | "value": 193 1307 | } 1308 | ] 1309 | }, 1310 | { 1311 | "matcher": { 1312 | "id": "byName", 1313 | "options": "Domain" 1314 | }, 1315 | "properties": [ 1316 | { 1317 | "id": "custom.width", 1318 | "value": 171 1319 | } 1320 | ] 1321 | }, 1322 | { 1323 | "matcher": { 1324 | "id": "byName", 1325 | "options": "Remote User" 1326 | }, 1327 | "properties": [ 1328 | { 1329 | "id": "custom.width", 1330 | "value": 118 1331 | } 1332 | ] 1333 | }, 1334 | { 1335 | "matcher": { 1336 | "id": "byName", 1337 | "options": "Full Request" 1338 | }, 1339 | "properties": [ 1340 | { 1341 | "id": "custom.width", 1342 | "value": 94 1343 | } 1344 | ] 1345 | } 1346 | ] 1347 | }, 1348 | "gridPos": { 1349 | "h": 10, 1350 | "w": 24, 1351 | "x": 0, 1352 | "y": 15 1353 | }, 1354 | "hideTimeOverride": true, 1355 | "id": 17, 1356 | "options": { 1357 | "footer": { 1358 | "fields": "", 1359 | "reducer": [ 1360 | "sum" 1361 | ], 1362 | "show": false 1363 | }, 1364 | "showHeader": true, 1365 | "sortBy": [ 1366 | { 1367 | "desc": true, 1368 | "displayName": "Date" 1369 | } 1370 | ] 1371 | }, 1372 | "pluginVersion": "8.4.11", 1373 | "targets": [ 1374 | { 1375 | "datasource": { 1376 | "type": "influxdb", 1377 | "uid": "${DS_INFLUXDB_(GEOIP2)}" 1378 | }, 1379 | "groupBy": [ 1380 | { 1381 | "params": [ 1382 | "url" 1383 | ], 1384 | "type": "tag" 1385 | }, 1386 | { 1387 | "params": [ 1388 | "ip" 1389 | ], 1390 | "type": "tag" 1391 | }, 1392 | { 1393 | "params": [ 1394 | "city" 1395 | ], 1396 | "type": "tag" 1397 | }, 1398 | { 1399 | "params": [ 1400 | "country_code" 1401 | ], 1402 | "type": "tag" 1403 | }, 1404 | { 1405 | "params": [ 1406 | "datetime" 1407 | ], 1408 | "type": "tag" 1409 | }, 1410 | { 1411 | "params": [ 1412 | "http_version" 1413 | ], 1414 | "type": "tag" 1415 | }, 1416 | { 1417 | "params": [ 1418 | "method" 1419 | ], 1420 | "type": "tag" 1421 | }, 1422 | { 1423 | "params": [ 1424 | "referrer" 1425 | ], 1426 | "type": "tag" 1427 | }, 1428 | { 1429 | "params": [ 1430 | "remote_user" 1431 | ], 1432 | "type": "tag" 1433 | }, 1434 | { 1435 | "params": [ 1436 | "status_code" 1437 | ], 1438 | "type": "tag" 1439 | }, 1440 | { 1441 | "params": [ 1442 | "connect_time" 1443 | ], 1444 | "type": "tag" 1445 | }, 1446 | { 1447 | "params": [ 1448 | "bytes_sent" 1449 | ], 1450 | "type": "tag" 1451 | }, 1452 | { 1453 | "params": [ 1454 | "country_name" 1455 | ], 1456 | "type": "tag" 1457 | }, 1458 | { 1459 | "params": [ 1460 | "host" 1461 | ], 1462 | "type": "tag" 1463 | }, 1464 | { 1465 | "params": [ 1466 | "request" 1467 | ], 1468 | "type": "tag" 1469 | } 1470 | ], 1471 | "hide": false, 1472 | "limit": "", 1473 | "measurement": "nginx_access_logs", 1474 | "orderByTime": "ASC", 1475 | "policy": "default", 1476 | "query": "SELECT count(\"count\") FROM \"nginx_access_logs\" WHERE $timeFilter GROUP BY \"url\", \"ip\", \"city\", \"country_code\", \"datetime\", \"http_version\", \"method\", \"referrer\", \"remote_user\", \"status_code\", \"connect_time\", \"bytes_sent\" LIMIT 50", 1477 | "rawQuery": false, 1478 | "refId": "A", 1479 | "resultFormat": "table", 1480 | "select": [ 1481 | [ 1482 | { 1483 | "params": [ 1484 | "count" 1485 | ], 1486 | "type": "field" 1487 | }, 1488 | { 1489 | "params": [], 1490 | "type": "distinct" 1491 | } 1492 | ] 1493 | ], 1494 | "slimit": "", 1495 | "tags": [ 1496 | { 1497 | "key": "ip", 1498 | "operator": "!~", 1499 | "value": "/^$excluded_ip$/" 1500 | }, 1501 | { 1502 | "condition": "AND", 1503 | "key": "country_code", 1504 | "operator": "=~", 1505 | "value": "/^$country_code$/" 1506 | }, 1507 | { 1508 | "condition": "AND", 1509 | "key": "city", 1510 | "operator": "=~", 1511 | "value": "/^$city$/" 1512 | }, 1513 | { 1514 | "condition": "AND", 1515 | "key": "country_name", 1516 | "operator": "=~", 1517 | "value": "/^$country$/" 1518 | }, 1519 | { 1520 | "condition": "AND", 1521 | "key": "host", 1522 | "operator": "=~", 1523 | "value": "/^$domain$/" 1524 | } 1525 | ] 1526 | } 1527 | ], 1528 | "timeFrom": "$log_interval", 1529 | "title": "Nginx Logs", 1530 | "transformations": [ 1531 | { 1532 | "id": "organize", 1533 | "options": { 1534 | "excludeByName": { 1535 | "Time": true 1536 | }, 1537 | "indexByName": { 1538 | "Time": 0, 1539 | "bytes_sent": 13, 1540 | "city": 2, 1541 | "connect_time": 14, 1542 | "country_code": 4, 1543 | "country_name": 5, 1544 | "datetime": 1, 1545 | "distinct": 16, 1546 | "host": 6, 1547 | "http_version": 10, 1548 | "ip": 3, 1549 | "method": 8, 1550 | "referrer": 12, 1551 | "remote_user": 15, 1552 | "request": 7, 1553 | "status_code": 9, 1554 | "url": 11 1555 | }, 1556 | "renameByName": { 1557 | "bytes_sent": "Bytes Sent", 1558 | "city": "City", 1559 | "connect_time": "Connect Time", 1560 | "country_code": "Country Code", 1561 | "country_name": "Country", 1562 | "datetime": "Date", 1563 | "distinct": "Count", 1564 | "host": "Domain", 1565 | "http_version": "HTTP Version", 1566 | "ip": "IP", 1567 | "method": "Method", 1568 | "referrer": "Referrer", 1569 | "remote_user": "Remote User", 1570 | "request": "Full Request", 1571 | "status_code": "Status Code", 1572 | "url": "URL" 1573 | } 1574 | } 1575 | } 1576 | ], 1577 | "type": "table" 1578 | }, 1579 | { 1580 | "datasource": { 1581 | "uid": "$datasource" 1582 | }, 1583 | "fieldConfig": { 1584 | "defaults": { 1585 | "custom": { 1586 | "align": "auto", 1587 | "displayMode": "auto", 1588 | "filterable": false 1589 | }, 1590 | "mappings": [], 1591 | "thresholds": { 1592 | "mode": "absolute", 1593 | "steps": [ 1594 | { 1595 | "color": "green", 1596 | "value": null 1597 | }, 1598 | { 1599 | "color": "red", 1600 | "value": 80 1601 | } 1602 | ] 1603 | } 1604 | }, 1605 | "overrides": [ 1606 | { 1607 | "matcher": { 1608 | "id": "byName", 1609 | "options": "Latitude" 1610 | }, 1611 | "properties": [ 1612 | { 1613 | "id": "custom.width", 1614 | "value": 70 1615 | } 1616 | ] 1617 | }, 1618 | { 1619 | "matcher": { 1620 | "id": "byName", 1621 | "options": "Longitude" 1622 | }, 1623 | "properties": [ 1624 | { 1625 | "id": "custom.width", 1626 | "value": 94 1627 | } 1628 | ] 1629 | }, 1630 | { 1631 | "matcher": { 1632 | "id": "byName", 1633 | "options": "Postal Code" 1634 | }, 1635 | "properties": [ 1636 | { 1637 | "id": "custom.width", 1638 | "value": 97 1639 | } 1640 | ] 1641 | } 1642 | ] 1643 | }, 1644 | "gridPos": { 1645 | "h": 8, 1646 | "w": 24, 1647 | "x": 0, 1648 | "y": 25 1649 | }, 1650 | "hideTimeOverride": true, 1651 | "id": 4, 1652 | "links": [], 1653 | "options": { 1654 | "footer": { 1655 | "fields": "", 1656 | "reducer": [ 1657 | "sum" 1658 | ], 1659 | "show": false 1660 | }, 1661 | "showHeader": true, 1662 | "sortBy": [ 1663 | { 1664 | "desc": true, 1665 | "displayName": "Count" 1666 | } 1667 | ] 1668 | }, 1669 | "pluginVersion": "8.4.11", 1670 | "targets": [ 1671 | { 1672 | "groupBy": [ 1673 | { 1674 | "params": [ 1675 | "city" 1676 | ], 1677 | "type": "tag" 1678 | }, 1679 | { 1680 | "params": [ 1681 | "country_name" 1682 | ], 1683 | "type": "tag" 1684 | }, 1685 | { 1686 | "params": [ 1687 | "country_code" 1688 | ], 1689 | "type": "tag" 1690 | }, 1691 | { 1692 | "params": [ 1693 | "state" 1694 | ], 1695 | "type": "tag" 1696 | }, 1697 | { 1698 | "params": [ 1699 | "postal_code" 1700 | ], 1701 | "type": "tag" 1702 | }, 1703 | { 1704 | "params": [ 1705 | "state_code" 1706 | ], 1707 | "type": "tag" 1708 | }, 1709 | { 1710 | "params": [ 1711 | "latitude" 1712 | ], 1713 | "type": "tag" 1714 | }, 1715 | { 1716 | "params": [ 1717 | "longitude" 1718 | ], 1719 | "type": "tag" 1720 | }, 1721 | { 1722 | "params": [ 1723 | "ip" 1724 | ], 1725 | "type": "tag" 1726 | } 1727 | ], 1728 | "measurement": "geoip2influx", 1729 | "orderByTime": "ASC", 1730 | "policy": "default", 1731 | "refId": "A", 1732 | "resultFormat": "table", 1733 | "select": [ 1734 | [ 1735 | { 1736 | "params": [ 1737 | "count" 1738 | ], 1739 | "type": "field" 1740 | }, 1741 | { 1742 | "params": [], 1743 | "type": "sum" 1744 | } 1745 | ] 1746 | ], 1747 | "tags": [ 1748 | { 1749 | "key": "ip", 1750 | "operator": "!~", 1751 | "value": "/^$excluded_ip$/" 1752 | }, 1753 | { 1754 | "condition": "AND", 1755 | "key": "country_name", 1756 | "operator": "=~", 1757 | "value": "/^$country$/" 1758 | }, 1759 | { 1760 | "condition": "AND", 1761 | "key": "country_code", 1762 | "operator": "=~", 1763 | "value": "/^$country_code$/" 1764 | }, 1765 | { 1766 | "condition": "AND", 1767 | "key": "city", 1768 | "operator": "=~", 1769 | "value": "/^$city$/" 1770 | } 1771 | ] 1772 | } 1773 | ], 1774 | "timeFrom": "$geo_interval", 1775 | "title": "Detailed location data", 1776 | "transformations": [ 1777 | { 1778 | "id": "organize", 1779 | "options": { 1780 | "excludeByName": { 1781 | "Time": true 1782 | }, 1783 | "indexByName": {}, 1784 | "renameByName": { 1785 | "Time": "", 1786 | "city": "City", 1787 | "country_code": "Country Code", 1788 | "country_name": "Country", 1789 | "ip": "IP", 1790 | "latitude": "Latitude", 1791 | "longitude": "Longitude", 1792 | "postal_code": "Postal Code", 1793 | "state": "State", 1794 | "state_code": "State (ANSI)", 1795 | "sum": "Count" 1796 | } 1797 | } 1798 | } 1799 | ], 1800 | "type": "table" 1801 | }, 1802 | { 1803 | "aliasColors": { 1804 | "Requests": "red", 1805 | "geoip2influx.sum": "red" 1806 | }, 1807 | "bars": false, 1808 | "dashLength": 10, 1809 | "dashes": false, 1810 | "datasource": { 1811 | "uid": "$datasource" 1812 | }, 1813 | "fieldConfig": { 1814 | "defaults": { 1815 | "links": [] 1816 | }, 1817 | "overrides": [] 1818 | }, 1819 | "fill": 7, 1820 | "fillGradient": 0, 1821 | "gridPos": { 1822 | "h": 8, 1823 | "w": 13, 1824 | "x": 0, 1825 | "y": 33 1826 | }, 1827 | "hiddenSeries": false, 1828 | "hideTimeOverride": true, 1829 | "id": 23, 1830 | "interval": "", 1831 | "legend": { 1832 | "alignAsTable": false, 1833 | "avg": false, 1834 | "current": false, 1835 | "hideEmpty": false, 1836 | "max": false, 1837 | "min": false, 1838 | "rightSide": false, 1839 | "show": false, 1840 | "sort": "current", 1841 | "sortDesc": true, 1842 | "total": false, 1843 | "values": false 1844 | }, 1845 | "lines": true, 1846 | "linewidth": 2, 1847 | "nullPointMode": "null", 1848 | "options": { 1849 | "alertThreshold": true 1850 | }, 1851 | "percentage": false, 1852 | "pluginVersion": "8.4.11", 1853 | "pointradius": 2, 1854 | "points": false, 1855 | "renderer": "flot", 1856 | "seriesOverrides": [], 1857 | "spaceLength": 10, 1858 | "stack": true, 1859 | "steppedLine": false, 1860 | "targets": [ 1861 | { 1862 | "alias": "$tag_country_name", 1863 | "groupBy": [ 1864 | { 1865 | "params": [ 1866 | "$interval" 1867 | ], 1868 | "type": "time" 1869 | }, 1870 | { 1871 | "params": [ 1872 | "country_name" 1873 | ], 1874 | "type": "tag" 1875 | }, 1876 | { 1877 | "params": [ 1878 | "none" 1879 | ], 1880 | "type": "fill" 1881 | } 1882 | ], 1883 | "measurement": "geoip2influx", 1884 | "orderByTime": "ASC", 1885 | "policy": "default", 1886 | "refId": "A", 1887 | "resultFormat": "time_series", 1888 | "select": [ 1889 | [ 1890 | { 1891 | "params": [ 1892 | "count" 1893 | ], 1894 | "type": "field" 1895 | }, 1896 | { 1897 | "params": [], 1898 | "type": "sum" 1899 | }, 1900 | { 1901 | "params": [], 1902 | "type": "cumulative_sum" 1903 | } 1904 | ] 1905 | ], 1906 | "tags": [ 1907 | { 1908 | "key": "ip", 1909 | "operator": "!~", 1910 | "value": "/^$excluded_ip$/" 1911 | }, 1912 | { 1913 | "condition": "AND", 1914 | "key": "country_name", 1915 | "operator": "=~", 1916 | "value": "/^$country$/" 1917 | }, 1918 | { 1919 | "condition": "AND", 1920 | "key": "country_code", 1921 | "operator": "=~", 1922 | "value": "/^$country_code$/" 1923 | }, 1924 | { 1925 | "condition": "AND", 1926 | "key": "city", 1927 | "operator": "=~", 1928 | "value": "/^$city$/" 1929 | } 1930 | ] 1931 | } 1932 | ], 1933 | "thresholds": [], 1934 | "timeFrom": "$geo_interval", 1935 | "timeRegions": [], 1936 | "title": "Request count cumulative ", 1937 | "tooltip": { 1938 | "shared": true, 1939 | "sort": 2, 1940 | "value_type": "individual" 1941 | }, 1942 | "transformations": [], 1943 | "type": "graph", 1944 | "xaxis": { 1945 | "mode": "time", 1946 | "show": true, 1947 | "values": [] 1948 | }, 1949 | "yaxes": [ 1950 | { 1951 | "$$hashKey": "object:874", 1952 | "format": "none", 1953 | "logBase": 1, 1954 | "show": true 1955 | }, 1956 | { 1957 | "$$hashKey": "object:875", 1958 | "format": "short", 1959 | "logBase": 1, 1960 | "show": false 1961 | } 1962 | ], 1963 | "yaxis": { 1964 | "align": false 1965 | } 1966 | }, 1967 | { 1968 | "aliasColors": { 1969 | "Requests": "red", 1970 | "geoip2influx.sum": "red" 1971 | }, 1972 | "bars": false, 1973 | "dashLength": 10, 1974 | "dashes": false, 1975 | "datasource": { 1976 | "uid": "$datasource" 1977 | }, 1978 | "description": "", 1979 | "fieldConfig": { 1980 | "defaults": { 1981 | "links": [] 1982 | }, 1983 | "overrides": [] 1984 | }, 1985 | "fill": 7, 1986 | "fillGradient": 0, 1987 | "gridPos": { 1988 | "h": 8, 1989 | "w": 11, 1990 | "x": 13, 1991 | "y": 33 1992 | }, 1993 | "hiddenSeries": false, 1994 | "hideTimeOverride": true, 1995 | "id": 24, 1996 | "interval": "", 1997 | "legend": { 1998 | "alignAsTable": true, 1999 | "avg": false, 2000 | "current": true, 2001 | "hideEmpty": false, 2002 | "max": false, 2003 | "min": false, 2004 | "rightSide": true, 2005 | "show": false, 2006 | "sort": "current", 2007 | "sortDesc": true, 2008 | "total": false, 2009 | "values": true 2010 | }, 2011 | "lines": true, 2012 | "linewidth": 1, 2013 | "nullPointMode": "null", 2014 | "options": { 2015 | "alertThreshold": true 2016 | }, 2017 | "percentage": false, 2018 | "pluginVersion": "8.4.11", 2019 | "pointradius": 2, 2020 | "points": false, 2021 | "renderer": "flot", 2022 | "seriesOverrides": [], 2023 | "spaceLength": 10, 2024 | "stack": true, 2025 | "steppedLine": false, 2026 | "targets": [ 2027 | { 2028 | "alias": "$tag_country_code", 2029 | "groupBy": [ 2030 | { 2031 | "params": [ 2032 | "$__interval" 2033 | ], 2034 | "type": "time" 2035 | }, 2036 | { 2037 | "params": [ 2038 | "country_code" 2039 | ], 2040 | "type": "tag" 2041 | }, 2042 | { 2043 | "params": [ 2044 | "none" 2045 | ], 2046 | "type": "fill" 2047 | } 2048 | ], 2049 | "hide": false, 2050 | "measurement": "nginx_access_logs", 2051 | "orderByTime": "ASC", 2052 | "policy": "default", 2053 | "refId": "A", 2054 | "resultFormat": "time_series", 2055 | "select": [ 2056 | [ 2057 | { 2058 | "params": [ 2059 | "bytes_sent" 2060 | ], 2061 | "type": "field" 2062 | }, 2063 | { 2064 | "params": [], 2065 | "type": "sum" 2066 | }, 2067 | { 2068 | "params": [], 2069 | "type": "cumulative_sum" 2070 | } 2071 | ] 2072 | ], 2073 | "tags": [ 2074 | { 2075 | "key": "ip", 2076 | "operator": "!~", 2077 | "value": "/^$excluded_ip$/" 2078 | }, 2079 | { 2080 | "condition": "AND", 2081 | "key": "country_code", 2082 | "operator": "=~", 2083 | "value": "/^$country_code$/" 2084 | }, 2085 | { 2086 | "condition": "AND", 2087 | "key": "city", 2088 | "operator": "=~", 2089 | "value": "/^$city$/" 2090 | }, 2091 | { 2092 | "condition": "AND", 2093 | "key": "country_name", 2094 | "operator": "=~", 2095 | "value": "/^$country$/" 2096 | }, 2097 | { 2098 | "condition": "AND", 2099 | "key": "host", 2100 | "operator": "=~", 2101 | "value": "/^$domain$/" 2102 | } 2103 | ] 2104 | } 2105 | ], 2106 | "thresholds": [], 2107 | "timeFrom": "$log_interval", 2108 | "timeRegions": [], 2109 | "title": "Bytes sum cumulative ", 2110 | "tooltip": { 2111 | "shared": true, 2112 | "sort": 2, 2113 | "value_type": "individual" 2114 | }, 2115 | "transformations": [], 2116 | "type": "graph", 2117 | "xaxis": { 2118 | "mode": "time", 2119 | "show": true, 2120 | "values": [] 2121 | }, 2122 | "yaxes": [ 2123 | { 2124 | "$$hashKey": "object:874", 2125 | "format": "decbytes", 2126 | "logBase": 1, 2127 | "show": true 2128 | }, 2129 | { 2130 | "$$hashKey": "object:875", 2131 | "format": "short", 2132 | "logBase": 1, 2133 | "show": false 2134 | } 2135 | ], 2136 | "yaxis": { 2137 | "align": false 2138 | } 2139 | }, 2140 | { 2141 | "collapsed": false, 2142 | "datasource": { 2143 | "type": "influxdb", 2144 | "uid": "${DS_INFLUXDB_(GEOIP2)}" 2145 | }, 2146 | "gridPos": { 2147 | "h": 1, 2148 | "w": 24, 2149 | "x": 0, 2150 | "y": 41 2151 | }, 2152 | "id": 10, 2153 | "panels": [], 2154 | "title": "NGINX LOGs", 2155 | "type": "row" 2156 | }, 2157 | { 2158 | "columns": [], 2159 | "datasource": { 2160 | "uid": "$datasource" 2161 | }, 2162 | "fontSize": "100%", 2163 | "gridPos": { 2164 | "h": 13, 2165 | "w": 24, 2166 | "x": 0, 2167 | "y": 42 2168 | }, 2169 | "hideTimeOverride": true, 2170 | "id": 16, 2171 | "showHeader": true, 2172 | "sort": { 2173 | "col": 3, 2174 | "desc": true 2175 | }, 2176 | "styles": [ 2177 | { 2178 | "$$hashKey": "object:4905", 2179 | "alias": "Time", 2180 | "align": "auto", 2181 | "dateFormat": "YYYY-MM-DD HH:mm:ss", 2182 | "pattern": "Time", 2183 | "type": "hidden" 2184 | }, 2185 | { 2186 | "$$hashKey": "object:4906", 2187 | "alias": "URL", 2188 | "align": "", 2189 | "colors": [ 2190 | "rgba(245, 54, 54, 0.9)", 2191 | "rgba(237, 129, 40, 0.89)", 2192 | "rgba(50, 172, 45, 0.97)" 2193 | ], 2194 | "decimals": 2, 2195 | "link": true, 2196 | "pattern": "url", 2197 | "preserveFormat": false, 2198 | "thresholds": [], 2199 | "type": "string", 2200 | "unit": "short" 2201 | }, 2202 | { 2203 | "$$hashKey": "object:5035", 2204 | "alias": "Count", 2205 | "align": "auto", 2206 | "colorMode": "cell", 2207 | "colors": [ 2208 | "rgba(50, 172, 45, 0.97)", 2209 | "rgba(237, 129, 40, 0.89)", 2210 | "rgba(245, 54, 54, 0.9)" 2211 | ], 2212 | "dateFormat": "YYYY-MM-DD HH:mm:ss", 2213 | "decimals": 0, 2214 | "mappingType": 1, 2215 | "pattern": "sum", 2216 | "thresholds": [ 2217 | "500", 2218 | "1000" 2219 | ], 2220 | "type": "number", 2221 | "unit": "none" 2222 | }, 2223 | { 2224 | "$$hashKey": "object:1519", 2225 | "alias": "Referrer", 2226 | "align": "auto", 2227 | "colors": [ 2228 | "rgba(245, 54, 54, 0.9)", 2229 | "rgba(237, 129, 40, 0.89)", 2230 | "rgba(50, 172, 45, 0.97)" 2231 | ], 2232 | "dateFormat": "YYYY-MM-DD HH:mm:ss", 2233 | "decimals": 2, 2234 | "mappingType": 1, 2235 | "pattern": "referrer", 2236 | "thresholds": [], 2237 | "type": "number", 2238 | "unit": "short" 2239 | } 2240 | ], 2241 | "targets": [ 2242 | { 2243 | "groupBy": [ 2244 | { 2245 | "params": [ 2246 | "url" 2247 | ], 2248 | "type": "tag" 2249 | }, 2250 | { 2251 | "params": [ 2252 | "referrer" 2253 | ], 2254 | "type": "tag" 2255 | } 2256 | ], 2257 | "hide": false, 2258 | "limit": "", 2259 | "measurement": "nginx_access_logs", 2260 | "orderByTime": "ASC", 2261 | "policy": "default", 2262 | "refId": "A", 2263 | "resultFormat": "table", 2264 | "select": [ 2265 | [ 2266 | { 2267 | "params": [ 2268 | "count" 2269 | ], 2270 | "type": "field" 2271 | }, 2272 | { 2273 | "params": [], 2274 | "type": "sum" 2275 | } 2276 | ] 2277 | ], 2278 | "slimit": "", 2279 | "tags": [ 2280 | { 2281 | "key": "ip", 2282 | "operator": "!~", 2283 | "value": "/^$excluded_ip$/" 2284 | }, 2285 | { 2286 | "condition": "AND", 2287 | "key": "country_code", 2288 | "operator": "=~", 2289 | "value": "/^$country_code$/" 2290 | }, 2291 | { 2292 | "condition": "AND", 2293 | "key": "city", 2294 | "operator": "=~", 2295 | "value": "/^$city$/" 2296 | }, 2297 | { 2298 | "condition": "AND", 2299 | "key": "country_name", 2300 | "operator": "=~", 2301 | "value": "/^$country$/" 2302 | }, 2303 | { 2304 | "condition": "AND", 2305 | "key": "host", 2306 | "operator": "=~", 2307 | "value": "/^$domain$/" 2308 | } 2309 | ] 2310 | } 2311 | ], 2312 | "timeFrom": "$log_interval", 2313 | "title": "Top Visited URL's", 2314 | "transform": "table", 2315 | "type": "table-old" 2316 | } 2317 | ], 2318 | "refresh": "1m", 2319 | "schemaVersion": 35, 2320 | "style": "dark", 2321 | "tags": [ 2322 | "nginx", 2323 | "geoip2", 2324 | "python", 2325 | "influxdb" 2326 | ], 2327 | "templating": { 2328 | "list": [ 2329 | { 2330 | "current": { 2331 | "selected": false, 2332 | "text": "InfluxDB (geoip2)", 2333 | "value": "InfluxDB (geoip2)" 2334 | }, 2335 | "hide": 0, 2336 | "includeAll": false, 2337 | "multi": false, 2338 | "name": "datasource", 2339 | "options": [], 2340 | "query": "influxdb", 2341 | "refresh": 1, 2342 | "regex": "", 2343 | "skipUrlSync": false, 2344 | "type": "datasource" 2345 | }, 2346 | { 2347 | "current": { 2348 | "$$hashKey": "object:12341", 2349 | "selected": true, 2350 | "text": "7d", 2351 | "value": "7d" 2352 | }, 2353 | "hide": 0, 2354 | "includeAll": false, 2355 | "label": "Nginx Log Interval", 2356 | "multi": false, 2357 | "name": "log_interval", 2358 | "options": [ 2359 | { 2360 | "$$hashKey": "object:12334", 2361 | "selected": false, 2362 | "text": "1m", 2363 | "value": "1m" 2364 | }, 2365 | { 2366 | "$$hashKey": "object:12335", 2367 | "selected": false, 2368 | "text": "10m", 2369 | "value": "10m" 2370 | }, 2371 | { 2372 | "$$hashKey": "object:12336", 2373 | "selected": false, 2374 | "text": "30m", 2375 | "value": "30m" 2376 | }, 2377 | { 2378 | "$$hashKey": "object:12337", 2379 | "selected": false, 2380 | "text": "1h", 2381 | "value": "1h" 2382 | }, 2383 | { 2384 | "$$hashKey": "object:12338", 2385 | "selected": false, 2386 | "text": "6h", 2387 | "value": "6h" 2388 | }, 2389 | { 2390 | "$$hashKey": "object:12339", 2391 | "selected": false, 2392 | "text": "12h", 2393 | "value": "12h" 2394 | }, 2395 | { 2396 | "$$hashKey": "object:12340", 2397 | "selected": false, 2398 | "text": "1d", 2399 | "value": "1d" 2400 | }, 2401 | { 2402 | "$$hashKey": "object:12341", 2403 | "selected": true, 2404 | "text": "7d", 2405 | "value": "7d" 2406 | }, 2407 | { 2408 | "$$hashKey": "object:12342", 2409 | "selected": false, 2410 | "text": "14d", 2411 | "value": "14d" 2412 | }, 2413 | { 2414 | "$$hashKey": "object:12343", 2415 | "selected": false, 2416 | "text": "30d", 2417 | "value": "30d" 2418 | }, 2419 | { 2420 | "$$hashKey": "object:12344", 2421 | "selected": false, 2422 | "text": "now/d", 2423 | "value": "now/d" 2424 | }, 2425 | { 2426 | "$$hashKey": "object:12345", 2427 | "selected": false, 2428 | "text": "now-1d/d", 2429 | "value": "now-1d/d" 2430 | } 2431 | ], 2432 | "query": "1m,10m,30m,1h,6h,12h,1d,7d,14d,30d,now/d,now-1d/d", 2433 | "queryValue": "", 2434 | "skipUrlSync": false, 2435 | "type": "custom" 2436 | }, 2437 | { 2438 | "current": { 2439 | "$$hashKey": "object:11668", 2440 | "selected": true, 2441 | "text": "7d", 2442 | "value": "7d" 2443 | }, 2444 | "hide": 0, 2445 | "includeAll": false, 2446 | "label": "Geo Data Interval", 2447 | "multi": false, 2448 | "name": "geo_interval", 2449 | "options": [ 2450 | { 2451 | "$$hashKey": "object:11661", 2452 | "selected": false, 2453 | "text": "1m", 2454 | "value": "1m" 2455 | }, 2456 | { 2457 | "$$hashKey": "object:11662", 2458 | "selected": false, 2459 | "text": "10m", 2460 | "value": "10m" 2461 | }, 2462 | { 2463 | "$$hashKey": "object:11663", 2464 | "selected": false, 2465 | "text": "30m", 2466 | "value": "30m" 2467 | }, 2468 | { 2469 | "$$hashKey": "object:11664", 2470 | "selected": false, 2471 | "text": "1h", 2472 | "value": "1h" 2473 | }, 2474 | { 2475 | "$$hashKey": "object:11665", 2476 | "selected": false, 2477 | "text": "6h", 2478 | "value": "6h" 2479 | }, 2480 | { 2481 | "$$hashKey": "object:11666", 2482 | "selected": false, 2483 | "text": "12h", 2484 | "value": "12h" 2485 | }, 2486 | { 2487 | "$$hashKey": "object:11667", 2488 | "selected": false, 2489 | "text": "1d", 2490 | "value": "1d" 2491 | }, 2492 | { 2493 | "$$hashKey": "object:11668", 2494 | "selected": true, 2495 | "text": "7d", 2496 | "value": "7d" 2497 | }, 2498 | { 2499 | "$$hashKey": "object:11669", 2500 | "selected": false, 2501 | "text": "14d", 2502 | "value": "14d" 2503 | }, 2504 | { 2505 | "$$hashKey": "object:11670", 2506 | "selected": false, 2507 | "text": "30d", 2508 | "value": "30d" 2509 | }, 2510 | { 2511 | "$$hashKey": "object:11671", 2512 | "selected": false, 2513 | "text": "now/d", 2514 | "value": "now/d" 2515 | }, 2516 | { 2517 | "$$hashKey": "object:11672", 2518 | "selected": false, 2519 | "text": "now-1d/d", 2520 | "value": "now-1d/d" 2521 | } 2522 | ], 2523 | "query": "1m,10m,30m,1h,6h,12h,1d,7d,14d,30d,now/d,now-1d/d", 2524 | "queryValue": "", 2525 | "skipUrlSync": false, 2526 | "type": "custom" 2527 | }, 2528 | { 2529 | "current": {}, 2530 | "datasource": { 2531 | "uid": "$datasource" 2532 | }, 2533 | "definition": "SHOW TAG VALUES FROM \"geoip2influx\" WITH KEY = \"country_name\"", 2534 | "hide": 0, 2535 | "includeAll": true, 2536 | "label": "Country", 2537 | "multi": true, 2538 | "name": "country", 2539 | "options": [], 2540 | "query": "SHOW TAG VALUES FROM \"geoip2influx\" WITH KEY = \"country_name\"", 2541 | "refresh": 1, 2542 | "regex": "", 2543 | "skipUrlSync": false, 2544 | "sort": 1, 2545 | "tagValuesQuery": "", 2546 | "tagsQuery": "", 2547 | "type": "query" 2548 | }, 2549 | { 2550 | "current": {}, 2551 | "datasource": { 2552 | "uid": "$datasource" 2553 | }, 2554 | "definition": "SHOW TAG VALUES FROM \"geoip2influx\" WITH KEY = \"country_code\"", 2555 | "hide": 0, 2556 | "includeAll": true, 2557 | "label": "Country Code", 2558 | "multi": true, 2559 | "name": "country_code", 2560 | "options": [], 2561 | "query": "SHOW TAG VALUES FROM \"geoip2influx\" WITH KEY = \"country_code\"", 2562 | "refresh": 1, 2563 | "regex": "", 2564 | "skipUrlSync": false, 2565 | "sort": 1, 2566 | "tagValuesQuery": "", 2567 | "tagsQuery": "", 2568 | "type": "query", 2569 | "useTags": false 2570 | }, 2571 | { 2572 | "allValue": "", 2573 | "current": {}, 2574 | "datasource": { 2575 | "uid": "$datasource" 2576 | }, 2577 | "definition": "SHOW TAG VALUES FROM \"geoip2influx\" WITH KEY = \"city\"", 2578 | "hide": 0, 2579 | "includeAll": true, 2580 | "label": "City", 2581 | "multi": true, 2582 | "name": "city", 2583 | "options": [], 2584 | "query": "SHOW TAG VALUES FROM \"geoip2influx\" WITH KEY = \"city\"", 2585 | "refresh": 1, 2586 | "regex": "", 2587 | "skipUrlSync": false, 2588 | "sort": 0, 2589 | "tagValuesQuery": "", 2590 | "tagsQuery": "", 2591 | "type": "query", 2592 | "useTags": false 2593 | }, 2594 | { 2595 | "current": {}, 2596 | "datasource": { 2597 | "uid": "$datasource" 2598 | }, 2599 | "definition": "SHOW TAG VALUES FROM \"nginx_access_logs\" WITH KEY = \"host\"", 2600 | "hide": 0, 2601 | "includeAll": true, 2602 | "label": "Domains", 2603 | "multi": false, 2604 | "name": "domain", 2605 | "options": [], 2606 | "query": "SHOW TAG VALUES FROM \"nginx_access_logs\" WITH KEY = \"host\"", 2607 | "refresh": 1, 2608 | "regex": "", 2609 | "skipUrlSync": false, 2610 | "sort": 0, 2611 | "tagValuesQuery": "", 2612 | "tagsQuery": "", 2613 | "type": "query", 2614 | "useTags": false 2615 | }, 2616 | { 2617 | "current": { 2618 | "selected": true, 2619 | "text": "", 2620 | "value": "" 2621 | }, 2622 | "hide": 0, 2623 | "label": "Exclude IP", 2624 | "name": "excluded_ip", 2625 | "options": [ 2626 | { 2627 | "selected": true, 2628 | "text": "", 2629 | "value": "" 2630 | } 2631 | ], 2632 | "query": "", 2633 | "skipUrlSync": false, 2634 | "type": "textbox" 2635 | } 2636 | ] 2637 | }, 2638 | "time": { 2639 | "from": "now/d", 2640 | "to": "now" 2641 | }, 2642 | "timepicker": { 2643 | "refresh_intervals": [ 2644 | "10s", 2645 | "30s", 2646 | "1m", 2647 | "5m", 2648 | "15m", 2649 | "30m", 2650 | "1h", 2651 | "2h", 2652 | "1d" 2653 | ], 2654 | "time_options": [ 2655 | "5m", 2656 | "15m", 2657 | "1h", 2658 | "6h", 2659 | "12h", 2660 | "24h", 2661 | "2d", 2662 | "7d", 2663 | "30d" 2664 | ] 2665 | }, 2666 | "timezone": "", 2667 | "title": "NGINX LOGS & GEO MAP", 2668 | "uid": "1lcjN0bik3", 2669 | "version": 12, 2670 | "weekStart": "" 2671 | } -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | geoip2==4.8.0 2 | geohash2==1.1 3 | influxdb==5.3.2 4 | IPy==1.01 5 | python-dotenv 6 | influxdb-client==1.44.0 7 | -------------------------------------------------------------------------------- /root/etc/crontabs/root: -------------------------------------------------------------------------------- 1 | # do daily/weekly/monthly maintenance 2 | # min hour day month weekday command 3 | */15 * * * * run-parts /etc/periodic/15min 4 | 0 * * * * run-parts /etc/periodic/hourly 5 | 0 2 * * * run-parts /etc/periodic/daily 6 | 0 3 * * 6 run-parts /etc/periodic/weekly 7 | 0 5 1 * * run-parts /etc/periodic/monthly -------------------------------------------------------------------------------- /root/etc/logrotate.d/geoip2influx: -------------------------------------------------------------------------------- 1 | /config/log/geoip2influx/geoip2influx.log { 2 | daily 3 | rotate 7 4 | size 25M 5 | compress 6 | delaycompress 7 | nodateext 8 | missingok 9 | notifempty 10 | postrotate 11 | s6-svc -1 /run/service/svc-geoip2influx 12 | endscript 13 | su abc abc 14 | } -------------------------------------------------------------------------------- /root/etc/s6-overlay/s6-rc.d/init-adduser/branding: -------------------------------------------------------------------------------- 1 | 2 | ██████╗ ███████╗ ██████╗ ██╗██████╗ ██████╗ ██╗███╗ ██╗███████╗██╗ ██╗ ██╗██╗ ██╗ 3 | ██╔════╝ ██╔════╝██╔═══██╗██║██╔══██╗╚════██╗██║████╗ ██║██╔════╝██║ ██║ ██║╚██╗██╔╝ 4 | ██║ ███╗█████╗ ██║ ██║██║██████╔╝ █████╔╝██║██╔██╗ ██║█████╗ ██║ ██║ ██║ ╚███╔╝ 5 | ██║ ██║██╔══╝ ██║ ██║██║██╔═══╝ ██╔═══╝ ██║██║╚██╗██║██╔══╝ ██║ ██║ ██║ ██╔██╗ 6 | ╚██████╔╝███████╗╚██████╔╝██║██║ ███████╗██║██║ ╚████║██║ ███████╗╚██████╔╝██╔╝ ██╗ 7 | ╚═════╝ ╚══════╝ ╚═════╝ ╚═╝╚═╝ ╚══════╝╚═╝╚═╝ ╚═══╝╚═╝ ╚══════╝ ╚═════╝ ╚═╝ ╚═╝ 8 | 9 | 10 | Made by @gilbN 11 | https://github.com/GilbN/geoip2influx 12 | -------------------------------------------------------------------------------- /root/etc/s6-overlay/s6-rc.d/init-geoip2influx-setup/dependencies.d/init-maxmind-setup: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GilbN/geoip2influx/530b4cbe439a376594f868d157e010265dd744c6/root/etc/s6-overlay/s6-rc.d/init-geoip2influx-setup/dependencies.d/init-maxmind-setup -------------------------------------------------------------------------------- /root/etc/s6-overlay/s6-rc.d/init-geoip2influx-setup/run: -------------------------------------------------------------------------------- 1 | #!/usr/bin/with-contenv bash 2 | # shellcheck shell=bash 3 | 4 | echo -e "[init-geoip2influx-setup] Starting" 5 | 6 | echo -e "[init-geoip2influx-setup] Variables set:\\n\\n 7 | INFLUX_HOST=${INFLUX_HOST}\\n\ 8 | INFLUX_HOST_PORT=${INFLUX_HOST_PORT}\\n\ 9 | INFLUX_DATABASE=${INFLUX_DATABASE}\\n\ 10 | INFLUX_USER=${INFLUX_USER}\\n\ 11 | INFLUX_PASS=${INFLUX_PASS}\\n\ 12 | INFLUX_RETENTION=${INFLUX_RETENTION}\\n\ 13 | INFLUX_SHARD=${INFLUX_SHARD}\\n\\n 14 | INFLUXDB_V2_TOKEN=${INFLUXDB_V2_TOKEN}\\n\ 15 | INFLUXDB_V2_URL=${INFLUXDB_V2_URL}\\n\ 16 | INFLUXDB_V2_ORG=${INFLUXDB_V2_ORG}\\n\ 17 | INFLUXDB_V2_BUCKET=${INFLUXDB_V2_BUCKET}\\n\ 18 | INFLUXDB_V2_RETENTION=${INFLUXDB_V2_RETENTION}\\n\ 19 | INFLUXDB_V2_DEBUG=${INFLUXDB_V2_DEBUG}\\n\ 20 | INFLUXDB_V2_BATCHING=${INFLUXDB_V2_BATCHING}\\n\ 21 | INFLUXDB_V2_BATCH_SIZE=${INFLUXDB_V2_BATCH_SIZE}\\n\ 22 | INFLUXDB_V2_FLUSH_INTERVAL=${INFLUXDB_V2_FLUSH_INTERVAL}\\n\\n 23 | GEO_MEASUREMENT=${GEO_MEASUREMENT}\\n\ 24 | LOG_MEASUREMENT=${LOG_MEASUREMENT}\\n\ 25 | NGINX_LOG_PATH=${NGINX_LOG_PATH}\\n\ 26 | SEND_NGINX_LOGS=${SEND_NGINX_LOGS}\\n\ 27 | GEOIP2INFLUX_LOG_LEVEL=${GEOIP2INFLUX_LOG_LEVEL}\\n\ 28 | GEOIP2INFLUX_LOG_PATH=${GEOIP2INFLUX_LOG_PATH}\\n\ 29 | GEOIP_DB_PATH=${GEOIP_DB_PATH}\\n\ 30 | USE_INFLUXDB_V2=${USE_INFLUXDB_V2}\\n\ 31 | MAXMINDDB_USER_ID=${MAXMINDDB_USER_ID}\\n\ 32 | MAXMINDDB_LICENSE_KEY=${MAXMINDDB_LICENSE_KEY}\\n" 33 | 34 | mkdir -p \ 35 | /config/log/geoip2influx 36 | cp -rf /geoip2influx /config/geoip2db 37 | cp -f /geoip2influx/run.py /config/geoip2db 38 | # move old log if needed 39 | if [ -f /config/geoip2db/geoip2influx.log ]; then 40 | echo -e "[init-geoip2influx-setup] Moving old log" 41 | mv /config/geoip2db/geoip2influx.log /config/log/geoip2influx 42 | fi 43 | 44 | lsiown -R abc:abc /config/geoip2db 45 | chmod +x /config/geoip2db/run.py 46 | chmod -R 0644 /etc/logrotate.d 47 | 48 | echo -e "[init-geoip2influx-setup] Finished" -------------------------------------------------------------------------------- /root/etc/s6-overlay/s6-rc.d/init-geoip2influx-setup/type: -------------------------------------------------------------------------------- 1 | oneshot -------------------------------------------------------------------------------- /root/etc/s6-overlay/s6-rc.d/init-geoip2influx-setup/up: -------------------------------------------------------------------------------- 1 | /etc/s6-overlay/s6-rc.d/init-geoip2influx-setup/run -------------------------------------------------------------------------------- /root/etc/s6-overlay/s6-rc.d/init-maxmind-setup/run: -------------------------------------------------------------------------------- 1 | #!/usr/bin/with-contenv bash 2 | 3 | echo -e "[init-maxmind-setup] init started" 4 | 5 | # create GeoIP2 folder symlink 6 | mkdir -p \ 7 | /config/geoip2db 8 | if [[ -d /var/lib/libmaxminddb ]] && [[ ! -L /var/lib/libmaxminddb ]]; then 9 | rm -rf /var/lib/libmaxminddb 10 | fi 11 | if [[ ! -d /var/lib/libmaxminddb ]]; then 12 | ln -s /config/geoip2db /var/lib/libmaxminddb 13 | fi 14 | # check GeoIP2 database 15 | if [[ -n "${MAXMINDDB_USER_ID}" ]]; then 16 | sed -i "s|.*MAXMINDDB_USER_ID.*|MAXMINDDB_USER_ID=\"${MAXMINDDB_USER_ID}\"|g" /etc/libmaxminddb.cron.conf 17 | else 18 | echo -e "[init-maxmind-setup] Starting with Alpine 3.20 rebase, MaxMindDB now requires setting the env var MAXMINDDB_USER_ID with your account's user id." 19 | fi 20 | if [[ -n "${MAXMINDDB_LICENSE_KEY}" ]]; then 21 | sed -i "s|.*MAXMINDDB_LICENSE_KEY.*|MAXMINDDB_LICENSE_KEY=\"${MAXMINDDB_LICENSE_KEY}\"|g" /etc/libmaxminddb.cron.conf 22 | if [[ ! -f /var/lib/libmaxminddb/GeoLite2-City.mmdb ]]; then 23 | echo -e "[init-maxmind-setup] Downloading GeoIP2 City database." 24 | /etc/periodic/weekly/libmaxminddb 25 | fi 26 | elif [[ -f /var/lib/libmaxminddb/GeoLite2-City.mmdb ]]; then 27 | echo -e "[init-maxmind-setup] Currently using the user provided GeoLite2-City.mmdb.\nIf you want to enable weekly auto-updates of the database, retrieve a free license key from MaxMind,\nand add a new env variable \"MAXMINDDB_LICENSE_KEY\", set to your license key." 28 | else 29 | echo -e "[init-maxmind-setup] Starting 2019/12/30, GeoIP2 databases require personal license key to download. Please retrieve a free license key from MaxMind,\nand add a new env variable \"MAXMINDDB_LICENSE_KEY\", set to your license key." 30 | fi 31 | 32 | # permissions 33 | lsiown -R abc:abc /config/geoip2db 34 | echo "[init-maxmind-setup] init finished" -------------------------------------------------------------------------------- /root/etc/s6-overlay/s6-rc.d/init-maxmind-setup/type: -------------------------------------------------------------------------------- 1 | oneshot -------------------------------------------------------------------------------- /root/etc/s6-overlay/s6-rc.d/init-maxmind-setup/up: -------------------------------------------------------------------------------- 1 | /etc/s6-overlay/s6-rc.d/init-maxmind-setup/run -------------------------------------------------------------------------------- /root/etc/s6-overlay/s6-rc.d/svc-geoip2influx/dependencies.d/init-geoip2influx-setup: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GilbN/geoip2influx/530b4cbe439a376594f868d157e010265dd744c6/root/etc/s6-overlay/s6-rc.d/svc-geoip2influx/dependencies.d/init-geoip2influx-setup -------------------------------------------------------------------------------- /root/etc/s6-overlay/s6-rc.d/svc-geoip2influx/dependencies.d/init-services: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GilbN/geoip2influx/530b4cbe439a376594f868d157e010265dd744c6/root/etc/s6-overlay/s6-rc.d/svc-geoip2influx/dependencies.d/init-services -------------------------------------------------------------------------------- /root/etc/s6-overlay/s6-rc.d/svc-geoip2influx/run: -------------------------------------------------------------------------------- 1 | #!/usr/bin/with-contenv bash 2 | # shellcheck shell=bash 3 | 4 | exec \ 5 | python3 /config/geoip2db/run.py -------------------------------------------------------------------------------- /root/etc/s6-overlay/s6-rc.d/svc-geoip2influx/type: -------------------------------------------------------------------------------- 1 | longrun -------------------------------------------------------------------------------- /root/etc/s6-overlay/s6-rc.d/user/contents.d/init-geoip2influx-setup: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GilbN/geoip2influx/530b4cbe439a376594f868d157e010265dd744c6/root/etc/s6-overlay/s6-rc.d/user/contents.d/init-geoip2influx-setup -------------------------------------------------------------------------------- /root/etc/s6-overlay/s6-rc.d/user/contents.d/init-maxmind-setup: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GilbN/geoip2influx/530b4cbe439a376594f868d157e010265dd744c6/root/etc/s6-overlay/s6-rc.d/user/contents.d/init-maxmind-setup -------------------------------------------------------------------------------- /root/etc/s6-overlay/s6-rc.d/user/contents.d/svc-geoip2influx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GilbN/geoip2influx/530b4cbe439a376594f868d157e010265dd744c6/root/etc/s6-overlay/s6-rc.d/user/contents.d/svc-geoip2influx -------------------------------------------------------------------------------- /run.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import os 4 | import logging 5 | import signal 6 | from geoip2influx import LogParser, configure_logging 7 | 8 | from dotenv import load_dotenv 9 | load_dotenv() 10 | 11 | def handle_sigterm(signum, frame): 12 | logger = logging.getLogger("g2i") 13 | logger.info("Received SIGTERM. Exiting GeoIP2Influx.") 14 | try: 15 | parser.client.influx.close() 16 | except Exception: 17 | logger.exception("Error closing InfluxDB client.") 18 | logger.info("Parsed %d log line(s).", parser.parsed_lines) 19 | exit(0) 20 | 21 | if __name__ == "__main__": 22 | try: 23 | configure_logging(os.getenv("GEOIP2INFLUX_LOG_LEVEL", "debug")) 24 | signal.signal(signal.SIGTERM, handle_sigterm) 25 | logger = logging.getLogger("g2i") 26 | logger.info("Starting GeoIP2Influx.") 27 | parser = LogParser() 28 | parser.run() 29 | except KeyboardInterrupt: 30 | logger.info("Exiting GeoIP2Influx.") 31 | logger.info("Parsed %d log line(s).", parser.parsed_lines) 32 | exit(0) 33 | except Exception: 34 | logger.exception("Error running parser.") 35 | exit(1) 36 | -------------------------------------------------------------------------------- /tests/GeoLite2-City.mmdb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GilbN/geoip2influx/530b4cbe439a376594f868d157e010265dd744c6/tests/GeoLite2-City.mmdb -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GilbN/geoip2influx/530b4cbe439a376594f868d157e010265dd744c6/tests/__init__.py -------------------------------------------------------------------------------- /tests/invalid_logs.txt: -------------------------------------------------------------------------------- 1 | 4.255.101.233 - - [28/Jul/2024:02:00:50 +0200] "GET /manager/html HTTP/1.1" 301 162 "-" "Mozilla/5.0 zgrab/0.x" 2 | 162.158.95.66 - - [28/Jul/2024:02:01:09 +0200] "GET /wp-admin/maint/index.php HTTP/1.1" 301 162 "-" "Mozlila/5.0 (Linux; Android 7.0; SM-G892A Bulid/NRD90M; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/60.0.3112.107 Moblie Safari/537.36" 3 | 172.71.250.42 - - [28/Jul/2024:02:01:37 +0200] "GET /wp-admin/maint/index.php HTTP/2.0" 404 555 "-" "Mozlila/5.0 (Linux; Android 7.0; SM-G892A Bulid/NRD90M; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/60.0.3112.107 Moblie Safari/537.36" 4 | 172.69.151.162 - - [28/Jul/2024:02:01:37 +0200] "GET /wp-includes/Text/index.php HTTP/1.1" 301 162 "-" "Mozlila/5.0 (Linux; Android 7.0; SM-G892A Bulid/NRD90M; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/60.0.3112.107 Moblie Safari/537.36" 5 | 172.70.250.169 - - [28/Jul/2024:02:03:23 +0200] "GET /robots.txt HTTP/2.0" 404 555 "-" "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.5060.134 Safari/537.36" 6 | 85.90.246.159 - - [28/Jul/2024:02:10:44 +0200] "\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xB0o\x00\x00\x00\x00\x00\x00\x00\xA7{\x22version\x22:\x220.46.0\x22,\x22hostname\x22:\x22\x22,\x22os\x22:\x22\x22,\x22arch\x22:\x22\x22,\x22user\x22:\x22\x22,\x22privilege_key\x22:\x22c4ca4238a0b923820dcc509a6f75849b\x22,\x22timestamp\x22:1,\x22run_id\x22:\x22\x22,\x22metas\x22:null,\x22pool_count\x22:0}" 302 138 "-" "-" 7 | 167.94.145.97 - - [28/Jul/2024:02:13:55 +0200] "GET / HTTP/1.1" 301 162 "-" "-" -------------------------------------------------------------------------------- /tests/test_geoip2influx.py: -------------------------------------------------------------------------------- 1 | import re 2 | import pytest 3 | import datetime 4 | 5 | from geoip2.database import Reader 6 | from geoip2influx.constants import ipv4_pattern, ipv6_pattern, Rgx 7 | from geoip2influx import LogParser 8 | 9 | VALID_LOG_PATH = "tests/valid_ipv4_log.txt" 10 | INVALID_LOG_PATH = "tests/invalid_logs.txt" 11 | TEST_IPV6 = "2607:f0d0:1002:51::4" 12 | 13 | test_geo_metrics: list[dict] = [{'tags': {'geohash': '9ydqy025w0qn', 'ip': '2607:f0d0:1002:51::4', 'host': 'localhost', 'country_code': 'US', 'country_name': 'United States', 'state': '-', 'state_code': '-', 'city': '-', 'postal_code': '-', 'latitude': 37.751, 'longitude': -97.822}, 'fields': {'count': 1}, 'measurement': 'geoip2influx'}] 14 | test_log_metrics: list[dict] = [{'tags': {'ip': '2607:f0d0:1002:51::4', 'datetime': datetime.datetime(2024, 8, 3, 13, 14, 23, tzinfo=datetime.timezone(datetime.timedelta(seconds=7200))), 'remote_user': '-', 'method': 'GET', 'referrer': '/wp-includes/Text/about.php', 'host': ' yourdomain.com ', 'http_version': 'HTTP/2.0', 'status_code': '404', 'bytes_sent': '36', 'url': '-', 'user_agent': '-', 'request_time': '0.002', 'connect_time': '0.000', 'city': 'Hong Kong', 'country_code': 'HK', 'country_name': 'United States'}, 'fields': {'count': 1, 'bytes_sent': 36, 'request_time': 0.002, 'connect_time': 0.0}, 'measurement': 'nginx_access_logs'}] 15 | 16 | @pytest.fixture 17 | def load_valid_ipv4_log() -> list[str]: 18 | """Load the contents of the valid IPv4 log file.""" 19 | with open('tests/valid_ipv4_log.txt', "r", encoding="utf-8") as f: 20 | return f.readlines() 21 | 22 | @pytest.fixture 23 | def load_valid_ipv6_log() -> list[str]: 24 | """Load the contents of the valid IPv6 log file.""" 25 | with open('tests/valid_ipv6_log.txt', "r", encoding="utf-8") as f: 26 | return f.readlines() 27 | 28 | @pytest.fixture 29 | def load_invalid_logs() -> list[str]: 30 | """Load the contents of the invalid log file.""" 31 | with open('tests/invalid_logs.txt', "r", encoding="utf-8") as f: 32 | return f.readlines() 33 | 34 | @pytest.fixture 35 | def ipv4_log_pattern() -> re.Pattern[str]: 36 | """Return the regular expression pattern for an IPv4 log line.""" 37 | return ipv4_pattern() 38 | 39 | @pytest.fixture 40 | def ipv6_log_pattern() -> re.Pattern[str]: 41 | """Return the regular expression pattern for an IPv6 log line.""" 42 | return ipv6_pattern() 43 | 44 | @pytest.fixture 45 | def log_parser() -> LogParser: 46 | """Return an instance of the LogParser class.""" 47 | parser = LogParser(auto_init=False) 48 | parser.hostname = "localhost" 49 | parser.geoip_reader = Reader("tests/GeoLite2-City.mmdb") 50 | return parser 51 | 52 | def test_regex_tester_ipv4(load_valid_ipv4_log: list[str], ipv4_log_pattern: re.Pattern[str]) -> None: 53 | """Test the regex tester for IPv4 log lines.""" 54 | for line in load_valid_ipv4_log: 55 | assert bool(ipv4_log_pattern.match(line)) is True 56 | 57 | def test_regex_tester_ipv6(load_valid_ipv6_log: list[str], ipv6_log_pattern: re.Pattern[str]) -> None: 58 | """Test the regex tester for IPv6 log lines.""" 59 | for line in load_valid_ipv6_log: 60 | assert bool(ipv6_log_pattern.match(line)) is True 61 | 62 | def test_regex_tester_invalid(load_invalid_logs: list[str], ipv4_log_pattern: re.Pattern[str], ipv6_log_pattern: re.Pattern[str]) -> None: 63 | """Test the regex tester for invalid log lines.""" 64 | for line in load_invalid_logs: 65 | assert bool(ipv4_log_pattern.match(line)) is False 66 | assert bool(ipv6_log_pattern.match(line)) is False 67 | 68 | def test_get_ip_type(log_parser: LogParser) -> None: 69 | """Test the get_ip_type function.""" 70 | private_ip = "10.10.10.1" 71 | public_ip = "52.53.54.55" 72 | assert log_parser.get_ip_type(private_ip) == "PRIVATE" 73 | assert log_parser.get_ip_type(public_ip) == "PUBLIC" 74 | 75 | def test_get_ip_type_invalid(log_parser: LogParser) -> None: 76 | """Test the get_ip_type function with an invalid IP address.""" 77 | invalid_ip = "10.10.10.256" 78 | assert log_parser.get_ip_type(invalid_ip) == "" 79 | 80 | def test_create_geo_metrics(log_parser: LogParser) -> None: 81 | """Test the create_geo_metrics function.""" 82 | assert log_parser.create_geo_metrics(TEST_IPV6) == test_geo_metrics 83 | 84 | def test_create_log_metrics(log_parser: LogParser, load_valid_ipv6_log: list[str]): 85 | """Test the create_log_metrics function.""" 86 | test_line: str = load_valid_ipv6_log[0] 87 | matched: re.Match[str] | None = log_parser.validate_log_line(test_line) 88 | log_metrics: list[dict] = log_parser.create_log_metrics(matched, TEST_IPV6) 89 | assert log_metrics[0]["tags"]["ip"] == TEST_IPV6 90 | assert log_metrics[0]["tags"]["city"] == "Hong Kong" 91 | assert log_metrics[0]["tags"]["country_code"] == "HK" 92 | assert log_metrics[0]["tags"]["status_code"] == "404" 93 | assert log_metrics[0]["measurement"] == "nginx_access_logs" 94 | -------------------------------------------------------------------------------- /tests/valid_ipv4_log.txt: -------------------------------------------------------------------------------- 1 | 162.158.114.92 - - [03/Aug/2024:13:14:17 +0200]"GET /wp-includes/SimplePie/about.php HTTP/2.0" 404 36"-" yourdomain.com "-""0.002" "0.001""Hong Kong" "HK" 2 | 172.71.210.123 - - [03/Aug/2024:13:14:17 +0200]"GET /wp-content/banners/about.php HTTP/1.1" 301 162"-" yourdomain.com "-""0.000" "-""Hong Kong" "HK" 3 | 172.71.210.122 - - [03/Aug/2024:13:14:18 +0200]"GET /wp-content/banners/about.php HTTP/2.0" 404 36"-" yourdomain.com "-""0.002" "0.001""Hong Kong" "HK" 4 | 172.71.211.7 - - [03/Aug/2024:13:14:19 +0200]"GET /wp-content/about.php HTTP/1.1" 301 162"-" yourdomain.com "-""0.000" "-""Hong Kong" "HK" 5 | 172.71.210.119 - - [03/Aug/2024:13:14:20 +0200]"GET /wp-content/about.php HTTP/2.0" 404 36"-" yourdomain.com "-""0.001" "0.000""Hong Kong" "HK" 6 | 162.158.114.78 - - [03/Aug/2024:13:14:20 +0200]"GET /.well-known/about.php HTTP/1.1" 301 162"-" yourdomain.com "-""0.000" "-""Hong Kong" "HK" 7 | 162.158.114.92 - - [03/Aug/2024:13:14:22 +0200]"GET /.well-known/about.php HTTP/2.0" 404 36"-" yourdomain.com "-""0.002" "0.000""Hong Kong" "HK" 8 | 172.71.211.7 - - [03/Aug/2024:13:14:23 +0200]"GET /wp-includes/Text/about.php HTTP/1.1" 301 162"-" yourdomain.com "-""0.000" "-""Hong Kong" "HK" 9 | 172.71.210.119 - - [03/Aug/2024:13:14:23 +0200]"GET /wp-includes/Text/about.php HTTP/2.0" 404 36"-" yourdomain.com "-""0.002" "0.000""Hong Kong" "HK" 10 | 172.71.211.15 - - [03/Aug/2024:13:14:23 +0200]"GET /wp-includes/ID3/about.php HTTP/1.1" 301 162"-" yourdomain.com "-""0.000" "-""Hong Kong" "HK" 11 | 162.158.114.218 - - [03/Aug/2024:13:14:25 +0200]"GET /wp-includes/ID3/about.php HTTP/2.0" 404 36"-" yourdomain.com "-""0.002" "0.001""Hong Kong" "HK" 12 | 65.49.1.65 - - [03/Aug/2024:12:23:48 +0200]"\x16\x03\x01\x00{\x01\x00\x00w\x03\x03T\x8AW;\xE5\x91\xED\xDD\x0B\xA3V\x98\x9F/\x05,kQ\x13\xB6\xF9{\x95.\x13\x1A4s0\xEC,\xA5\x00\x00\x1A\xC0/\xC0+\xC0\x11\xC0\x07\xC0\x13\xC0\x09\xC0\x14\xC0" 400 150"-" _ "-""0.152" "-""-" "US" 13 | -------------------------------------------------------------------------------- /tests/valid_ipv6_log.txt: -------------------------------------------------------------------------------- 1 | 2607:f0d0:1002:51::4 - - [03/Aug/2024:13:14:23 +0200]"GET /wp-includes/Text/about.php HTTP/2.0" 404 36"-" yourdomain.com "-""0.002" "0.000""Hong Kong" "HK" 2 | 2607:f0d0:1002:51::4 - - [03/Aug/2024:13:14:23 +0200]"GET /wp-includes/ID3/about.php HTTP/1.1" 301 162"-" yourdomain.com "-""0.000" "-""Hong Kong" "HK" 3 | 2607:f0d0:1002:51::4 - - [03/Aug/2024:13:14:25 +0200]"GET /wp-includes/ID3/about.php HTTP/2.0" 404 36"-" yourdomain.com "-""0.002" "0.001""Hong Kong" "HK" 4 | 2607:f0d0:1002:51::4 - - [03/Aug/2024:12:23:48 +0200]"\x16\x03\x01\x00{\x01\x00\x00w\x03\x03T\x8AW;\xE5\x91\xED\xDD\x0B\xA3V\x98\x9F/\x05,kQ\x13\xB6\xF9{\x95.\x13\x1A4s0\xEC,\xA5\x00\x00\x1A\xC0/\xC0+\xC0\x11\xC0\x07\xC0\x13\xC0\x09\xC0\x14\xC0" 400 150"-" _ "-""0.152" "-""-" "US" 5 | --------------------------------------------------------------------------------