├── .formatter.exs ├── .github └── workflows │ └── ci.yml ├── .gitignore ├── LICENSE.md ├── README.md ├── _config.yml ├── bench └── logger_json_overhead.exs ├── config ├── config.exs └── test.exs ├── coveralls.json ├── lib ├── logger_json.ex └── logger_json │ ├── ecto.ex │ ├── formatter.ex │ ├── formatter │ ├── code.ex │ ├── datetime.ex │ ├── map_builder.ex │ ├── message.ex │ ├── metadata.ex │ ├── plug.ex │ └── redactor_encoder.ex │ ├── formatters │ ├── basic.ex │ ├── datadog.ex │ ├── elastic.ex │ └── google_cloud.ex │ ├── plug.ex │ ├── redactor.ex │ └── redactors │ └── redact_keys.ex ├── mix.exs ├── mix.lock └── test ├── logger_json ├── ecto_test.exs ├── formatter │ ├── code_test.exs │ ├── message_test.exs │ ├── metadata_test.exs │ └── redactor_encoder_test.exs ├── formatter_test.exs ├── formatters │ ├── basic_test.exs │ ├── datadog_test.exs │ ├── elastic_test.exs │ └── google_cloud_test.exs └── plug_test.exs ├── logger_json_test.exs ├── support ├── crashing_gen_server.ex ├── logger_case.ex └── name_struct.ex └── test_helper.exs /.formatter.exs: -------------------------------------------------------------------------------- 1 | [ 2 | import_deps: [:stream_data], 3 | inputs: ["mix.exs", "{config,lib,test}/**/*.{ex,exs}"], 4 | line_length: 120 5 | ] 6 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Elixir 2 | 3 | on: 4 | push: 5 | branches: [ "master" ] 6 | pull_request: 7 | branches: [ "master" ] 8 | 9 | permissions: 10 | contents: read 11 | 12 | jobs: 13 | build: 14 | name: Build and Test 15 | runs-on: ubuntu-latest 16 | permissions: 17 | # required by test reporter 18 | pull-requests: write 19 | checks: write 20 | issues: write 21 | statuses: write 22 | strategy: 23 | matrix: 24 | include: 25 | - otp-version: 24.3 26 | elixir-version: 1.16 27 | - otp-version: 25 28 | elixir-version: 1.15 29 | - otp-version: 26 30 | elixir-version: 1.16 31 | - otp-version: 27 32 | elixir-version: 1.17 33 | - otp-version: 27 34 | elixir-version: 1.18 35 | check-formatted: true 36 | report-coverage: true 37 | steps: 38 | - uses: actions/checkout@v3 39 | - name: Set up Elixir 40 | uses: erlef/setup-beam@v1 41 | with: 42 | elixir-version: ${{ matrix.elixir-version }} 43 | otp-version: ${{ matrix.otp-version }} 44 | - name: Restore dependencies cache 45 | uses: actions/cache@v3 46 | with: 47 | path: | 48 | deps 49 | _build 50 | key: deps-${{ runner.os }}-mix-${{ hashFiles('**/mix.lock') }}-elixir-${{ matrix.elixir-version }}-otp-${{ matrix.otp-version }} 51 | - name: Install and compile dependencies 52 | env: 53 | MIX_ENV: test 54 | run: mix do deps.get, deps.compile 55 | - name: Make sure code is formatted 56 | env: 57 | MIX_ENV: test 58 | if: ${{ matrix.check-formatted == true }} 59 | run: mix format --check-formatted 60 | - name: Run tests 61 | env: 62 | MIX_ENV: test 63 | run: mix test --exclude pending 64 | - name: Test Report 65 | uses: dorny/test-reporter@v1 66 | if: (success() || failure()) && github.event_name == 'push' 67 | with: 68 | name: Mix Tests on Elixir ${{ matrix.elixir-version }} / OTP ${{ matrix.otp-version }} 69 | path: _build/test/lib/logger_json/test-junit-report.xml 70 | reporter: java-junit 71 | - name: Report code coverage 72 | env: 73 | MIX_ENV: test 74 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 75 | if: ${{ matrix.report-coverage == true }} 76 | run: mix coveralls.github 77 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # The directory Mix will write compiled artifacts to. 2 | /_build 3 | 4 | # If you run "mix test --cover", coverage assets end up here. 5 | /cover 6 | 7 | # The directory Mix downloads your dependencies sources to. 8 | /deps 9 | 10 | # Where 3rd-party dependencies like ExDoc output generated docs. 11 | /doc 12 | 13 | # If the VM crashes, it generates a dump, let's ignore it too. 14 | erl_crash.dump 15 | 16 | # Also ignore archive artifacts (built via "mix archive.build"). 17 | *.ez 18 | 19 | # Ignore package tarball (built via "mix hex.build"). 20 | logger_json-*.tar 21 | 22 | # Temporary files for e.g. tests. 23 | /tmp/ 24 | 25 | # Don't commit benchmark snapshots 26 | bench/snapshots 27 | 28 | # Don't commit editor configs 29 | .idea 30 | *.iws 31 | /out/ 32 | atlassian-ide-plugin.xml 33 | *.tmlanguage.cache 34 | *.tmPreferences.cache 35 | *.stTheme.cache 36 | *.sublime-workspace 37 | sftp-config.json 38 | GitHub.sublime-settings 39 | .tags 40 | .tags_sorted_by_file 41 | .vagrant 42 | .DS_Store 43 | 44 | # Ignore released binaries 45 | rel/*/ 46 | !rel/config.exs 47 | .deliver 48 | 49 | # Don't commit file uploads 50 | uploads/ 51 | !uploads/.gitkeep 52 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2016 Andrii Dryga 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # LoggerJSON 2 | 3 | [![Build Status](https://github.com/Nebo15/logger_json/actions/workflows/ci.yml/badge.svg)](https://github.com/Nebo15/logger_json/actions/workflows/ci.yml) 4 | [![Coverage Status](https://coveralls.io/repos/github/Nebo15/logger_json/badge.svg?branch=master)](https://coveralls.io/github/Nebo15/logger_json?branch=master) 5 | [![Module Version](https://img.shields.io/hexpm/v/logger_json.svg)](https://hex.pm/packages/logger_json) 6 | [![Hex Docs](https://img.shields.io/badge/hex-docs-lightgreen.svg)](https://hexdocs.pm/logger_json/) 7 | [![Hex Download Total](https://img.shields.io/hexpm/dt/logger_json.svg)](https://hex.pm/packages/logger_json) 8 | [![License](https://img.shields.io/hexpm/l/logger_json.svg)](https://github.com/Nebo15/logger_json/blob/master/LICENSE.md) 9 | 10 | A collection of formatters and utilities for JSON-based logging for various cloud tools and platforms. 11 | 12 | ## Supported formatters 13 | 14 | - [`LoggerJSON.Formatters.Basic`](https://hexdocs.pm/logger_json/LoggerJSON.Formatters.Basic.html) - a basic JSON formatter that logs messages in a structured, but generic format, can be used with any JSON-based logging system. 15 | 16 | - [`LoggerJSON.Formatters.GoogleCloud`](https://hexdocs.pm/logger_json/LoggerJSON.Formatters.GoogleCloud.html) - a formatter that logs messages in a structured format that can be consumed by Google Cloud Logger and Google Cloud Error Reporter. 17 | 18 | - [`LoggerJSON.Formatters.Datadog`](https://hexdocs.pm/logger_json/LoggerJSON.Formatters.Datadog.html) - a formatter that logs messages in a structured format that can be consumed by Datadog. 19 | 20 | - [`LoggerJSON.Formatters.Elastic`](https://hexdocs.pm/logger_json/LoggerJSON.Formatters.Elastic.html) - a formatter that logs messages in a structured format that conforms to the [Elastic Common Schema (ECS)](https://www.elastic.co/guide/en/ecs/8.11/ecs-reference.html), so it can be consumed by ElasticSearch, LogStash, FileBeat and Kibana. 21 | 22 | ## Installation 23 | 24 | Add `logger_json` to your list of dependencies in `mix.exs`: 25 | 26 | ```elixir 27 | def deps do 28 | [ 29 | # ... 30 | {:logger_json, "~> 7.0"} 31 | # ... 32 | ] 33 | end 34 | ``` 35 | 36 | and install it running `mix deps.get`. 37 | 38 | Then, enable the formatter in your `runtime.exs`: 39 | 40 | ```elixir 41 | config :logger, :default_handler, 42 | formatter: LoggerJSON.Formatters.Basic.new(metadata: [:request_id]) 43 | ``` 44 | 45 | or inside your application code (eg. in your `application.ex`): 46 | 47 | ```elixir 48 | formatter = LoggerJSON.Formatters.Basic.new(metadata: :all) 49 | :logger.update_handler_config(:default, :formatter, formatter) 50 | ``` 51 | 52 | or inside your `config.exs` (notice that `new/1` is not available here 53 | and tuple format must be used): 54 | 55 | ```elixir 56 | config :logger, :default_handler, 57 | formatter: {LoggerJSON.Formatters.Basic, metadata: [:request_id]} 58 | ``` 59 | 60 | You might also want to format the log messages when migrations are running: 61 | 62 | ```elixir 63 | config :domain, MyApp.Repo, 64 | # ... 65 | start_apps_before_migration: [:logger_json] 66 | ``` 67 | 68 | And you might want to make logging level configurable using an `LOG_LEVEL` environment variable (in `application.ex`): 69 | 70 | ```elixir 71 | LoggerJSON.configure_log_level_from_env!() 72 | ``` 73 | 74 | Additionally, you may also be try [redirecting otp reports to Logger](https://hexdocs.pm/logger/Logger.html#module-configuration) (see "Configuration" section). 75 | 76 | ## Configuration 77 | 78 | Configuration can be set using 2nd element of the tuple of the `:formatter` option in `Logger` configuration. 79 | For example in `config.exs`: 80 | 81 | ```elixir 82 | config :logger, :default_handler, 83 | formatter: LoggerJSON.Formatters.GoogleCloud.new(metadata: :all, project_id: "logger-101") 84 | ``` 85 | 86 | or during runtime: 87 | 88 | ```elixir 89 | formatter = LoggerJSON.Formatters.Basic.new(%{metadata: {:all_except, [:conn]}}) 90 | :logger.update_handler_config(:default, :formatter, formatter) 91 | ``` 92 | 93 | By default, `LoggerJSON` is using `Jason` as the JSON encoder. If you use Elixir 1.18 or later, you can 94 | use the built-in `JSON` module as the encoder. To do this, you need to set the `:encoder` option in your 95 | `config.exs` file. This setting is only available at compile-time: 96 | 97 | config :logger_json, encoder: JSON 98 | 99 | ## Docs 100 | 101 | The docs can be found at [https://hexdocs.pm/logger_json](https://hexdocs.pm/logger_json). 102 | 103 | ## Examples 104 | 105 | ### Basic 106 | 107 | ```json 108 | { 109 | "message": "Hello", 110 | "metadata": { 111 | "domain": ["elixir"] 112 | }, 113 | "severity": "notice", 114 | "time": "2024-04-11T21:31:01.403Z" 115 | } 116 | ``` 117 | 118 | ### Google Cloud Logger 119 | 120 | Follows the [Google Cloud Logger LogEntry](https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry) format, 121 | for more details see [special fields in structured payloads](https://cloud.google.com/logging/docs/agent/configuration#special_fields_in_structured_payloads). 122 | 123 | ```json 124 | { 125 | "logging.googleapis.com/trace": "projects/my-projectid/traces/0679686673a", 126 | "logging.googleapis.com/spanId": "000000000000004a", 127 | "logging.googleapis.com/operation": { 128 | "pid": "#PID<0.29081.0>" 129 | }, 130 | "logging.googleapis.com/sourceLocation": { 131 | "file": "/Users/andrew/Projects/os/logger_json/test/formatters/google_cloud_test.exs", 132 | "function": "Elixir.LoggerJSON.Formatters.GoogleCloudTest.test logs an LogEntry of a given level/1", 133 | "line": 44 134 | }, 135 | "message": { 136 | "domain": ["elixir"], 137 | "message": "Hello" 138 | }, 139 | "severity": "NOTICE", 140 | "time": "2024-04-12T15:07:55.020Z" 141 | } 142 | ``` 143 | 144 | and this is how it looks in Google Cloud Logger: 145 | 146 | ```json 147 | { 148 | "insertId": "1d4hmnafsj7vy1", 149 | "jsonPayload": { 150 | "message": "Hello", 151 | "logging.googleapis.com/spanId": "000000000000004a", 152 | "domain": ["elixir"], 153 | "time": "2024-04-12T15:07:55.020Z" 154 | }, 155 | "resource": { 156 | "type": "gce_instance", 157 | "labels": { 158 | "zone": "us-east1-d", 159 | "project_id": "firezone-staging", 160 | "instance_id": "3168853301020468373" 161 | } 162 | }, 163 | "timestamp": "2024-04-12T15:07:55.023307594Z", 164 | "severity": "NOTICE", 165 | "logName": "projects/firezone-staging/logs/cos_containers", 166 | "operation": { 167 | "id": "F8WQ1FsdFAm5ZY0AC1PB", 168 | "producer": "#PID<0.29081.0>" 169 | }, 170 | "trace": "projects/firezone-staging/traces/bc007e40a2e9edffa23785d8badc43b8", 171 | "sourceLocation": { 172 | "file": "lib/phoenix/logger.ex", 173 | "line": "231", 174 | "function": "Elixir.Phoenix.Logger.phoenix_endpoint_stop/4" 175 | }, 176 | "receiveTimestamp": "2024-04-12T15:07:55.678986520Z" 177 | } 178 | ``` 179 | 180 | Exception that can be sent to Google Cloud Error Reporter: 181 | 182 | ```json 183 | { 184 | "httpRequest": { 185 | "protocol": "HTTP/1.1", 186 | "referer": "http://www.example.com/", 187 | "remoteIp": "", 188 | "requestMethod": "PATCH", 189 | "requestUrl": "http://www.example.com/", 190 | "status": 503, 191 | "userAgent": "Mozilla/5.0" 192 | }, 193 | "logging.googleapis.com/operation": { 194 | "pid": "#PID<0.250.0>" 195 | }, 196 | "logging.googleapis.com/sourceLocation": { 197 | "file": "/Users/andrew/Projects/os/logger_json/test/formatters/google_cloud_test.exs", 198 | "function": "Elixir.LoggerJSON.Formatters.GoogleCloudTest.test logs exception http context/1", 199 | "line": 301 200 | }, 201 | "@type": "type.googleapis.com/google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent", 202 | "context": { 203 | "httpRequest": { 204 | "protocol": "HTTP/1.1", 205 | "referer": "http://www.example.com/", 206 | "remoteIp": "", 207 | "requestMethod": "PATCH", 208 | "requestUrl": "http://www.example.com/", 209 | "status": 503, 210 | "userAgent": "Mozilla/5.0" 211 | }, 212 | "reportLocation": { 213 | "filePath": "/Users/andrew/Projects/os/logger_json/test/formatters/google_cloud_test.exs", 214 | "functionName": "Elixir.LoggerJSON.Formatters.GoogleCloudTest.test logs exception http context/1", 215 | "lineNumber": 301 216 | } 217 | }, 218 | "domain": ["elixir"], 219 | "message": "Hello", 220 | "serviceContext": { 221 | "service": "nonode@nohost" 222 | }, 223 | "stack_trace": "** (EXIT from #PID<0.250.0>) :foo", 224 | "severity": "DEBUG", 225 | "time": "2024-04-11T21:34:53.503Z" 226 | } 227 | ``` 228 | 229 | ## Datadog 230 | 231 | Adheres to the [default standard attribute list](https://docs.datadoghq.com/logs/processing/attributes_naming_convention/#default-standard-attribute-list) 232 | as much as possible. 233 | 234 | ```json 235 | { 236 | "domain": ["elixir"], 237 | "http": { 238 | "method": "GET", 239 | "referer": "http://www.example2.com/", 240 | "request_id": null, 241 | "status_code": 200, 242 | "url": "http://www.example.com/", 243 | "url_details": { 244 | "host": "www.example.com", 245 | "path": "/", 246 | "port": 80, 247 | "queryString": "", 248 | "scheme": "http" 249 | }, 250 | "useragent": "Mozilla/5.0" 251 | }, 252 | "logger": { 253 | "file_name": "/Users/andrew/Projects/os/logger_json/test/formatters/datadog_test.exs", 254 | "line": 239, 255 | "method_name": "Elixir.LoggerJSON.Formatters.DatadogTest.test logs http context/1", 256 | "thread_name": "#PID<0.225.0>" 257 | }, 258 | "message": "Hello", 259 | "network": { 260 | "client": { 261 | "ip": "127.0.0.1" 262 | } 263 | }, 264 | "syslog": { 265 | "hostname": "MacBook-Pro", 266 | "severity": "debug", 267 | "timestamp": "2024-04-11T23:10:47.967Z" 268 | } 269 | } 270 | ``` 271 | 272 | ## Elastic 273 | 274 | Follows the [Elastic Common Schema (ECS)](https://www.elastic.co/guide/en/ecs/8.11/ecs-reference.html) format. 275 | 276 | ```json 277 | { 278 | "@timestamp": "2024-05-21T15:17:35.374Z", 279 | "ecs.version": "8.11.0", 280 | "log.level": "info", 281 | "log.logger": "Elixir.LoggerJSON.Formatters.ElasticTest", 282 | "log.origin": { 283 | "file.line": 18, 284 | "file.name": "/app/logger_json/test/logger_json/formatters/elastic_test.exs", 285 | "function": "test logs message of every level/1" 286 | }, 287 | "message": "Hello" 288 | } 289 | ``` 290 | 291 | When an error is thrown, the message field is populated with the error message and the `error.` fields will be set: 292 | 293 | > Note: when throwing a custom exception type that defines the fields `id` and/or `code`, then the `error.id` and/or `error.code` fields will be set respectively. 294 | 295 | ```json 296 | { 297 | "@timestamp": "2024-05-21T15:20:11.623Z", 298 | "ecs.version": "8.11.0", 299 | "error.message": "runtime error", 300 | "error.stack_trace": "** (RuntimeError) runtime error\n test/logger_json/formatters/elastic_test.exs:191: anonymous fn/0 in LoggerJSON.Formatters.ElasticTest.\"test logs exceptions\"/1\n", 301 | "error.type": "Elixir.RuntimeError", 302 | "log.level": "error", 303 | "message": "runtime error" 304 | } 305 | ``` 306 | 307 | Any custom metadata fields will be added to the root of the message, so that your application can fill any other ECS fields that you require: 308 | 309 | > Note that this also allows you to produce messages that do not strictly adhere to the ECS specification. 310 | 311 | ```json 312 | // Logger.info("Hello") with Logger.metadata(:"device.model.name": "My Awesome Device") 313 | // or Logger.info("Hello", "device.model.name": "My Awesome Device") 314 | { 315 | "@timestamp": "2024-05-21T15:17:35.374Z", 316 | "ecs.version": "8.11.0", 317 | "log.level": "info", 318 | "log.logger": "Elixir.LoggerJSON.Formatters.ElasticTest", 319 | "log.origin": { 320 | "file.line": 18, 321 | "file.name": "/app/logger_json/test/logger_json/formatters/elastic_test.exs", 322 | "function": "test logs message of every level/1" 323 | }, 324 | "message": "Hello", 325 | "device.model.name": "My Awesome Device" 326 | } 327 | ``` 328 | 329 | ## Copyright and License 330 | 331 | Copyright (c) 2016 Andrew Dryga 332 | 333 | Released under the MIT License, which can be found in [LICENSE.md](./LICENSE.md). 334 | -------------------------------------------------------------------------------- /_config.yml: -------------------------------------------------------------------------------- 1 | theme: jekyll-theme-cayman -------------------------------------------------------------------------------- /bench/logger_json_overhead.exs: -------------------------------------------------------------------------------- 1 | # now = DateTime.utc_now() 2 | 3 | inputs = [ 4 | {"just a msg", %{message: "This is just some elaborate message"}}, 5 | {"some map", 6 | %{ 7 | message: "some other weirdo message", 8 | time: DateTime.utc_now(), 9 | http_meta: %{ 10 | status: 500, 11 | method: "GET", 12 | headers: [["what", "eva"], ["some-more", "stuff"]] 13 | } 14 | }}, 15 | {"bigger_map", 16 | %{ 17 | "users" => %{ 18 | "user_1" => %{ 19 | "name" => "Alice", 20 | "age" => 30, 21 | "preferences" => %{ 22 | "theme" => "dark", 23 | "language" => "English", 24 | "notifications" => %{ 25 | "email" => true, 26 | "sms" => false, 27 | "push" => true 28 | } 29 | }, 30 | "tags" => ["developer", "team_lead"] 31 | }, 32 | "user_2" => %{ 33 | "name" => "Bob", 34 | "age" => 25, 35 | "preferences" => %{ 36 | "theme" => "light", 37 | "language" => "French", 38 | "notifications" => %{ 39 | "email" => true, 40 | "sms" => true, 41 | "push" => false 42 | } 43 | }, 44 | "tags" => ["designer", "remote"] 45 | } 46 | }, 47 | "settings" => %{ 48 | "global" => %{ 49 | "timezone" => "UTC", 50 | "currency" => :usd, 51 | "support_contact" => "support@example.com" 52 | }, 53 | "regional" => %{ 54 | "US" => %{ 55 | "timezone" => "America/New_York", 56 | "currency" => :usd 57 | }, 58 | "EU" => %{ 59 | "timezone" => "Europe/Berlin", 60 | "currency" => "EUR" 61 | } 62 | } 63 | }, 64 | "analytics" => %{ 65 | "page_views" => %{ 66 | "home" => 1200, 67 | "about" => 450, 68 | "contact" => 300 69 | }, 70 | "user_sessions" => %{ 71 | "total" => 2000, 72 | "active" => 150 73 | } 74 | } 75 | }} 76 | ] 77 | 78 | redactors = [] 79 | {_, default_formatter_config} = Logger.Formatter.new(colors: [enabled?: false]) 80 | {_, default_json_formatter_config} = LoggerJSON.Formatters.Basic.new(metadata: :all) 81 | 82 | Benchee.run( 83 | %{ 84 | "just JSON" => fn input -> JSON.encode_to_iodata!(input) end, 85 | "just Jason" => fn input -> Jason.encode_to_iodata!(input) end, 86 | "logger_json encode" => fn input -> 87 | %{message: LoggerJSON.Formatter.RedactorEncoder.encode(input, redactors)} 88 | end, 89 | "whole logger format" => fn input -> 90 | LoggerJSON.Formatters.Basic.format(%{level: :info, meta: %{}, msg: {:report, input}}, default_json_formatter_config) 91 | end, 92 | # odd that those 2 end up being the slowest - what additional work are they doing? 93 | "default formatter with report data (sanity check)" => fn input -> 94 | Logger.Formatter.format( 95 | %{level: :info, meta: %{}, msg: {:report, input}}, 96 | default_formatter_config 97 | ) 98 | end, 99 | "default formatter with pre-formatted report data as string (sanity check 2)" => 100 | {fn input -> 101 | Logger.Formatter.format( 102 | %{level: :info, meta: %{}, msg: {:string, input}}, 103 | default_formatter_config 104 | ) 105 | end, before_scenario: &inspect/1} 106 | }, 107 | warmup: 0.1, 108 | time: 1, 109 | inputs: inputs 110 | ) 111 | -------------------------------------------------------------------------------- /config/config.exs: -------------------------------------------------------------------------------- 1 | import Config 2 | 3 | if Mix.env() == :test do 4 | import_config "#{Mix.env()}.exs" 5 | end 6 | -------------------------------------------------------------------------------- /config/test.exs: -------------------------------------------------------------------------------- 1 | import Config 2 | 3 | encoder = 4 | if Version.compare(System.version(), "1.18.0") == :lt do 5 | Jason 6 | else 7 | JSON 8 | end 9 | 10 | config :logger_json, encoder: encoder 11 | 12 | config :logger, 13 | handle_otp_reports: true, 14 | handle_sasl_reports: false 15 | -------------------------------------------------------------------------------- /coveralls.json: -------------------------------------------------------------------------------- 1 | { 2 | "skip_files": [ 3 | "test/*" 4 | ], 5 | "custom_stop_words": [ 6 | "field", 7 | "has_one", 8 | "has_many", 9 | "embeds_one", 10 | "embeds_many", 11 | "schema", 12 | "send" 13 | ], 14 | "treat_no_relevant_lines_as_covered": true 15 | } 16 | -------------------------------------------------------------------------------- /lib/logger_json.ex: -------------------------------------------------------------------------------- 1 | defmodule LoggerJSON do 2 | @moduledoc """ 3 | A collection of formatters and utilities for JSON-based logging for various cloud tools and platforms. 4 | 5 | ## Supported formatters 6 | 7 | * `LoggerJSON.Formatters.Basic` - a basic JSON formatter that logs messages in a structured format, 8 | can be used with any JSON-based logging system, like ElasticSearch, Logstash, etc. 9 | 10 | * `LoggerJSON.Formatters.GoogleCloud` - a formatter that logs messages in a structured format that can be 11 | consumed by Google Cloud Logger and Google Cloud Error Reporter. 12 | 13 | * `LoggerJSON.Formatters.Datadog` - a formatter that logs messages in a structured format that can be consumed 14 | by Datadog. 15 | 16 | * `LoggerJSON.Formatters.Elastic` - a formatter that logs messages in a structured format that conforms to the 17 | [Elastic Common Schema (ECS)](https://www.elastic.co/guide/en/ecs/8.11/ecs-reference.html), 18 | so it can be consumed by ElasticSearch, LogStash, FileBeat and Kibana. 19 | 20 | ## Installation 21 | 22 | Add `logger_json` to your list of dependencies in `mix.exs`: 23 | 24 | def deps do 25 | [ 26 | # ... 27 | {:logger_json, "~> 7.0"} 28 | # ... 29 | ] 30 | end 31 | 32 | and install it running `mix deps.get`. 33 | 34 | Then, enable the formatter in your `runtime.exs`: 35 | 36 | config :logger, :default_handler, 37 | formatter: LoggerJSON.Formatters.Basic.new(metadata: [:request_id]) 38 | 39 | or inside your application code (eg. in your `application.ex`): 40 | 41 | formatter = LoggerJSON.Formatters.Basic.new(metadata: :all) 42 | :logger.update_handler_config(:default, :formatter, formatter) 43 | 44 | or inside your `config.exs` (notice that `new/1` is not available here 45 | and tuple format must be used): 46 | 47 | config :logger, :default_handler, 48 | formatter: {LoggerJSON.Formatters.Basic, metadata: [:request_id]} 49 | 50 | ## Configuration 51 | 52 | Configuration can be set using `new/1` helper of the formatter module, 53 | or by setting the 2nd element of the `:formatter` option tuple in `Logger` configuration. 54 | 55 | For example in `config.exs`: 56 | 57 | config :logger, :default_handler, 58 | formatter: LoggerJSON.Formatters.GoogleCloud.new(metadata: :all, project_id: "logger-101") 59 | 60 | or during runtime: 61 | 62 | formatter = LoggerJSON.Formatters.Basic.new(metadata: {:all_except, [:conn]}) 63 | :logger.update_handler_config(:default, :formatter, formatter) 64 | 65 | By default, `LoggerJSON` is using `Jason` as the JSON encoder. If you use Elixir 1.18 or later, you can 66 | use the built-in `JSON` module as the encoder. To do this, you need to set the `:encoder` option in your 67 | `config.exs` file. This setting is only available at compile-time: 68 | 69 | config :logger_json, encoder: JSON 70 | 71 | ### Shared Options 72 | 73 | Some formatters require additional configuration options. Here are the options that are common for each formatter: 74 | 75 | * `:encoder_opts` - options to be passed directly to the JSON encoder. This allows you to customize the behavior 76 | of the JSON encoder. If the encoder is `JSON`, it defaults to `JSON.protocol_encode/2`. Otherwise, defaults to 77 | empty keywords. See the [documentation for Jason](https://hexdocs.pm/jason/Jason.html#encode/2-options) for 78 | available options for `Jason` encoder. 79 | 80 | * `:metadata` - a list of metadata keys to include in the log entry. By default, no metadata is included. 81 | If `:all`is given, all metadata is included. If `{:all_except, keys}` is given, all metadata except 82 | the specified keys is included. If `{:from_application_env, {app, module}, path}` is given, the metadata is fetched from 83 | the application environment (eg. `{:from_application_env, {:logger, :default_formatter}, [:metadata]}`) during the 84 | configuration initialization. 85 | 86 | * `:redactors` - a list of tuples, where first element is the module that implements the `LoggerJSON.Redactor` behaviour, 87 | and the second element is the options to pass to the redactor module. By default, no redactors are used. 88 | 89 | ## Metadata 90 | 91 | You can set some well-known metadata keys to be included in the log entry. The following keys are supported 92 | for all formatters: 93 | 94 | * `:conn` - the `Plug.Conn` struct, setting it will include the request and response details in the log entry; 95 | * `:crash_reason` - a tuple where the first element is the exception struct and the second is the stacktrace. 96 | For example: `Logger.error("Exception!", crash_reason: {e, __STACKTRACE__})`. Setting it will include the exception 97 | details in the log entry. 98 | 99 | Formatters may encode the well-known metadata differently and support additional metadata keys, see the documentation 100 | of the formatter for more details. 101 | """ 102 | 103 | # TODO: replace with `Logger.levels()` once LoggerJSON starts depending on Elixir 1.16+ 104 | @log_levels [:error, :info, :debug, :emergency, :alert, :critical, :warning, :notice] 105 | @log_level_strings Enum.map(@log_levels, &to_string/1) 106 | 107 | @doc """ 108 | Configures Logger log level at runtime by using value from environment variable. 109 | 110 | By default, 'LOG_LEVEL' environment variable is used. 111 | """ 112 | def configure_log_level_from_env!(env_name \\ "LOG_LEVEL") do 113 | env_name 114 | |> System.get_env() 115 | |> configure_log_level!() 116 | end 117 | 118 | @doc """ 119 | Changes Logger log level at runtime. 120 | 121 | Notice that settings this value below `compile_time_purge_level` would not work, 122 | because Logger calls would be already stripped at compile-time. 123 | """ 124 | def configure_log_level!(nil), 125 | do: :ok 126 | 127 | def configure_log_level!(level) when level in @log_level_strings, 128 | do: Logger.configure(level: String.to_atom(level)) 129 | 130 | def configure_log_level!(level) when level in @log_levels, 131 | do: Logger.configure(level: level) 132 | 133 | def configure_log_level!(level) do 134 | raise ArgumentError, "Log level should be one of #{inspect(@log_levels)} values, got: #{inspect(level)}" 135 | end 136 | end 137 | -------------------------------------------------------------------------------- /lib/logger_json/ecto.ex: -------------------------------------------------------------------------------- 1 | if Code.ensure_loaded?(Ecto) and Code.ensure_loaded?(:telemetry) do 2 | defmodule LoggerJSON.Ecto do 3 | @moduledoc """ 4 | A telemetry handler that logs Ecto query metrics in JSON format. 5 | 6 | Please keep in mind that logging all database operations will have a performance impact 7 | on your application, it's not recommended to use this module in high-throughput production 8 | environments. 9 | """ 10 | require Logger 11 | 12 | @doc """ 13 | Attaches the telemetry handler to the given event. 14 | 15 | ### Available options 16 | 17 | * `:level` - log level which is used to log requests. Defaults to `:info`. 18 | 19 | ### Dynamic log level 20 | 21 | In some cases you may wish to set the log level dynamically 22 | on a per-query basis. To do so, set the `:level` option to 23 | a tuple, `{Mod, Fun, Args}`. The query and map of time measures 24 | will be prepended to the provided list of arguments. 25 | 26 | When invoked, your function must return a 27 | [`Logger.level()`](`t:Logger.level()/0`) or `false` to 28 | disable logging for the request. 29 | 30 | ### Examples 31 | 32 | Attaching the telemetry handler to the `MyApp.Repo` events with the `:info` log level: 33 | 34 | LoggerJSON.Ecto.attach("logger-json-queries", [:my_app, :repo, :query], :info) 35 | 36 | For more details on event and handler naming see 37 | (`Ecto.Repo` documentation)[https://hexdocs.pm/ecto/Ecto.Repo.html#module-telemetry-events]. 38 | """ 39 | @spec attach( 40 | name :: String.t(), 41 | event :: [atom()], 42 | level :: 43 | Logger.level() 44 | | {module :: module(), function :: atom(), arguments :: [term()]} 45 | | false 46 | ) :: :ok | {:error, :already_exists} 47 | def attach(name, event, level) do 48 | :telemetry.attach(name, event, &__MODULE__.telemetry_logging_handler/4, level) 49 | end 50 | 51 | @doc """ 52 | A telemetry handler that logs Ecto query along with it's metrics in a structured format. 53 | """ 54 | @spec telemetry_logging_handler( 55 | event_name :: [atom()], 56 | measurements :: %{ 57 | query_time: non_neg_integer(), 58 | queue_time: non_neg_integer(), 59 | decode_time: non_neg_integer(), 60 | total_time: non_neg_integer() 61 | }, 62 | metadata :: %{required(:query) => String.t(), required(:repo) => module()}, 63 | level :: 64 | Logger.level() 65 | | {module :: module(), function :: atom(), arguments :: [term()]} 66 | | false 67 | ) :: :ok 68 | def telemetry_logging_handler(_event_name, measurements, %{query: query, repo: repo}, level) do 69 | query_time = Map.get(measurements, :query_time) |> format_time(:nanosecond) 70 | queue_time = Map.get(measurements, :queue_time) |> format_time(:nanosecond) 71 | decode_time = Map.get(measurements, :decode_time) |> format_time(:nanosecond) 72 | latency = Map.get(measurements, :total_time) |> format_time(:nanosecond) 73 | 74 | metadata = [ 75 | query: %{ 76 | repo: inspect(repo), 77 | execution_time_us: query_time, 78 | decode_time_us: decode_time, 79 | queue_time_us: queue_time, 80 | latency_us: latency 81 | } 82 | ] 83 | 84 | if level = level(level, query, measurements) do 85 | Logger.log(level, query, metadata) 86 | else 87 | :ok 88 | end 89 | end 90 | 91 | defp level({m, f, a}, query, measurements), do: apply(m, f, [query, measurements | a]) 92 | defp level(level, _query, _measurements) when is_atom(level), do: level 93 | 94 | defp format_time(nil, _unit), do: 0 95 | defp format_time(time, unit), do: System.convert_time_unit(time, unit, :microsecond) 96 | end 97 | end 98 | -------------------------------------------------------------------------------- /lib/logger_json/formatter.ex: -------------------------------------------------------------------------------- 1 | defmodule LoggerJSON.Formatter do 2 | @type opts :: [ 3 | {:encoder_opts, encoder_opts()} 4 | | {:metadata, :all | {:all_except, [atom()]} | [atom()]} 5 | | {:redactors, [{module(), term()}]} 6 | | {atom(), term()} 7 | ] 8 | 9 | @type config :: term() 10 | 11 | @type encoder_opts :: JSON.encoder() | [Jason.encode_opt()] | term() 12 | 13 | @doc """ 14 | Initializes a new formatter configuration. 15 | 16 | ## Compile‑time vs. Runtime Configuration 17 | 18 | This function can’t be used in `config.exs` because that file is evaluated 19 | before your application modules are compiled and loaded, so `new/1` isn’t defined yet. 20 | You can only call it in `config/runtime.exs` or from your application code. 21 | 22 | If you must set up the formatter in `config.exs`, use the tuple format: 23 | the first element is the module implementing `LoggerJSON.Formatter`, 24 | and the second is the options passed to `new/1`. For example: 25 | 26 | config :logger, :default_handler, 27 | formatter: {LoggerJSON.Formatters.Basic, metadata: [:request_id]} 28 | 29 | Note that tuple‑based configs are resolved for each log entry, 30 | which can increase logging overhead. 31 | """ 32 | @callback new(opts :: opts()) :: {module(), config()} 33 | 34 | @doc """ 35 | Formats a log event. 36 | """ 37 | @callback format(event :: :logger.log_event(), config_or_opts :: opts() | config()) :: iodata() 38 | 39 | @encoder Application.compile_env(:logger_json, :encoder, Jason) 40 | @encoder_protocol Application.compile_env(:logger_json, :encoder_protocol) || Module.concat(@encoder, "Encoder") 41 | @default_encoder_opts if(@encoder == JSON, do: &JSON.protocol_encode/2, else: []) 42 | 43 | @doc false 44 | @spec default_encoder_opts :: encoder_opts() 45 | def default_encoder_opts, do: @default_encoder_opts 46 | 47 | @doc false 48 | @spec encoder :: module() 49 | def encoder, do: @encoder 50 | 51 | @doc false 52 | @spec encoder_protocol :: module() 53 | def encoder_protocol, do: @encoder_protocol 54 | end 55 | -------------------------------------------------------------------------------- /lib/logger_json/formatter/code.ex: -------------------------------------------------------------------------------- 1 | defmodule LoggerJSON.Formatter.Code do 2 | @moduledoc false 3 | 4 | @doc """ 5 | Provide a string output of the MFA log entry. 6 | """ 7 | def format_function(nil, function), do: function 8 | def format_function(module, function), do: "#{module}.#{function}" 9 | def format_function(module, function, arity), do: "#{format_function(module, function)}/#{arity}" 10 | end 11 | -------------------------------------------------------------------------------- /lib/logger_json/formatter/datetime.ex: -------------------------------------------------------------------------------- 1 | defmodule LoggerJSON.Formatter.DateTime do 2 | @moduledoc false 3 | 4 | @doc """ 5 | Returns either a `time` taken from metadata or current time in RFC3339 UTC "Zulu" format. 6 | """ 7 | def utc_time(%{time: time}) when is_integer(time) and time >= 0 do 8 | system_time_to_rfc3339(time) 9 | end 10 | 11 | def utc_time(_meta) do 12 | :os.system_time(:microsecond) 13 | |> system_time_to_rfc3339() 14 | end 15 | 16 | defp system_time_to_rfc3339(system_time) do 17 | micro = rem(system_time, 1_000_000) 18 | 19 | {date, {hours, minutes, seconds}} = :calendar.system_time_to_universal_time(system_time, :microsecond) 20 | 21 | [format_date(date), ?T, format_time({hours, minutes, seconds, div(micro, 1000)}), ?Z] 22 | |> IO.iodata_to_binary() 23 | end 24 | 25 | defp format_time({hh, mi, ss, ms}) do 26 | [pad2(hh), ?:, pad2(mi), ?:, pad2(ss), ?., pad3(ms)] 27 | end 28 | 29 | defp format_date({yy, mm, dd}) do 30 | [Integer.to_string(yy), ?-, pad2(mm), ?-, pad2(dd)] 31 | end 32 | 33 | defp pad3(int) when int < 10, do: [?0, ?0, Integer.to_string(int)] 34 | defp pad3(int) when int < 100, do: [?0, Integer.to_string(int)] 35 | defp pad3(int), do: Integer.to_string(int) 36 | 37 | defp pad2(int) when int < 10, do: [?0, Integer.to_string(int)] 38 | defp pad2(int), do: Integer.to_string(int) 39 | end 40 | -------------------------------------------------------------------------------- /lib/logger_json/formatter/map_builder.ex: -------------------------------------------------------------------------------- 1 | defmodule LoggerJSON.Formatter.MapBuilder do 2 | @moduledoc false 3 | 4 | @doc """ 5 | Optionally put a value to a map. 6 | """ 7 | def maybe_put(map, _key, nil), do: map 8 | def maybe_put(map, key, value), do: Map.put(map, key, value) 9 | 10 | @doc """ 11 | Optionally merge two maps. 12 | """ 13 | def maybe_merge(map, nil), do: map 14 | def maybe_merge(left_map, right_map), do: Map.merge(right_map, left_map) 15 | 16 | @doc """ 17 | Update a map key using a callback function. Noop if the key does not exist. 18 | """ 19 | def maybe_update(map, key, cb) do 20 | if Map.has_key?(map, key) do 21 | Map.update!(map, key, cb) 22 | else 23 | map 24 | end 25 | end 26 | end 27 | -------------------------------------------------------------------------------- /lib/logger_json/formatter/message.ex: -------------------------------------------------------------------------------- 1 | defmodule LoggerJSON.Formatter.Message do 2 | @moduledoc false 3 | 4 | # crash 5 | def format_message({:string, message}, %{crash_reason: crash_reason}, %{crash: crash_fmt}) do 6 | crash_fmt.(message, crash_reason) 7 | end 8 | 9 | # binary 10 | def format_message({:string, message}, _meta, %{binary: binary_fmt}) do 11 | binary_fmt.(message) 12 | end 13 | 14 | # OTP report or structured logging data 15 | def format_message( 16 | {:report, data}, 17 | %{report_cb: callback} = meta, 18 | %{binary: binary_fmt, structured: structured_fmt} = formatters 19 | ) do 20 | cond do 21 | is_function(callback, 1) and callback != (&:logger.format_otp_report/1) -> 22 | format_message(callback.(data), meta, formatters) 23 | 24 | is_function(callback, 2) -> 25 | callback.(data, %{depth: :unlimited, chars_limit: :unlimited, single_line: false}) 26 | |> binary_fmt.() 27 | 28 | true -> 29 | structured_fmt.(data) 30 | end 31 | end 32 | 33 | def format_message({:report, data}, _meta, %{structured: structured_fmt}) do 34 | structured_fmt.(data) 35 | end 36 | 37 | def format_message({format, args}, _meta, %{binary: binary_fmt}) do 38 | format 39 | |> Logger.Utils.scan_inspect(args, :infinity) 40 | |> :io_lib.build_text() 41 | |> binary_fmt.() 42 | end 43 | end 44 | -------------------------------------------------------------------------------- /lib/logger_json/formatter/metadata.ex: -------------------------------------------------------------------------------- 1 | defmodule LoggerJSON.Formatter.Metadata do 2 | @moduledoc false 3 | 4 | @ignored_metadata_keys ~w[ansi_color initial_call crash_reason pid gl report_cb time]a 5 | 6 | @doc """ 7 | Takes current metadata option value and updates it to exclude the given keys. 8 | """ 9 | def update_metadata_selector({:from_application_env, {app, module}, path}, processed_keys) do 10 | Application.fetch_env!(app, module) 11 | |> get_in(path) 12 | |> update_metadata_selector(processed_keys) 13 | end 14 | 15 | def update_metadata_selector({:from_application_env, {app, module}}, processed_keys) do 16 | Application.fetch_env!(app, module) 17 | |> update_metadata_selector(processed_keys) 18 | end 19 | 20 | def update_metadata_selector({:from_application_env, other}, _processed_keys) do 21 | raise ArgumentError, """ 22 | Invalid value for `:metadata` option: `{:from_application_env, #{inspect(other)}}`. 23 | 24 | The value must be a tuple with the application and module name, 25 | and an optional path to the metadata option. 26 | 27 | Eg.: `{:from_application_env, {:logger, :default_formatter}, [:metadata]}` 28 | """ 29 | end 30 | 31 | def update_metadata_selector(:all, processed_keys), 32 | do: {:all_except, processed_keys} 33 | 34 | def update_metadata_selector({:all_except, except_keys}, processed_keys), 35 | do: {:all_except, except_keys ++ processed_keys} 36 | 37 | def update_metadata_selector(nil, processed_keys), 38 | do: {:all_except, processed_keys} 39 | 40 | def update_metadata_selector(keys, processed_keys), 41 | do: keys -- processed_keys 42 | 43 | @doc """ 44 | Takes metadata and returns a map with the given keys. 45 | 46 | The `keys` can be either a list of keys or one of the following terms: 47 | 48 | * `:all` - all metadata keys except the ones already processed by the formatter; 49 | * `{:all_except, keys}` - all metadata keys except the ones given in the list and 50 | the ones already processed by the formatter. 51 | """ 52 | def take_metadata(meta, {:all_except, keys}) do 53 | meta 54 | |> Map.drop(keys ++ @ignored_metadata_keys) 55 | |> Enum.into(%{}) 56 | end 57 | 58 | def take_metadata(meta, :all) do 59 | meta 60 | |> Map.drop(@ignored_metadata_keys) 61 | |> Enum.into(%{}) 62 | end 63 | 64 | def take_metadata(_meta, []) do 65 | %{} 66 | end 67 | 68 | def take_metadata(meta, keys) when is_list(keys) do 69 | Map.take(meta, keys) 70 | end 71 | end 72 | -------------------------------------------------------------------------------- /lib/logger_json/formatter/plug.ex: -------------------------------------------------------------------------------- 1 | if Code.ensure_loaded?(Plug) do 2 | defmodule LoggerJSON.Formatter.Plug do 3 | @moduledoc false 4 | 5 | alias Plug.Conn 6 | 7 | @doc """ 8 | Returns the first IP address from the `x-forwarded-for` header 9 | if it exists, otherwise returns the remote IP address. 10 | 11 | Please keep in mind that returning first IP address from the 12 | `x-forwarded-for` header is not very reliable, as it can be 13 | easily spoofed. Additionally, we do not exclude the IP addresses 14 | from list of well-known proxies, so it's possible that the 15 | returned IP address is not the actual client IP address. 16 | """ 17 | def remote_ip(conn) do 18 | if header_value = get_header(conn, "x-forwarded-for") do 19 | header_value 20 | |> String.split(",") 21 | |> hd() 22 | |> String.trim() 23 | else 24 | to_string(:inet_parse.ntoa(conn.remote_ip)) 25 | end 26 | end 27 | 28 | @doc """ 29 | Returns the first value of the given header from the request. 30 | """ 31 | def get_header(conn, header) do 32 | case Conn.get_req_header(conn, header) do 33 | [] -> nil 34 | [val | _] -> val 35 | end 36 | end 37 | end 38 | end 39 | -------------------------------------------------------------------------------- /lib/logger_json/formatter/redactor_encoder.ex: -------------------------------------------------------------------------------- 1 | defmodule LoggerJSON.Formatter.RedactorEncoder do 2 | @doc """ 3 | Takes a term and makes sure that it can be encoded by the encoder without errors 4 | and without leaking sensitive information. 5 | 6 | ## Encoding rules 7 | 8 | Type | Encoding | Redaction 9 | ------------------- | --------------------------------------------------- | -------------- 10 | `boolean()` | unchanged | unchanged 11 | `map()` | unchanged | values are redacted 12 | `list()` | unchanged | unchanged 13 | `tuple()` | converted to list | unchanged 14 | `binary()` | unchanged if printable, otherwise using `inspect/2` | unchanged 15 | `number()` | unchanged | unchanged 16 | `atom()` | unchanged | unchanged 17 | `struct()` | converted to map | values are redacted 18 | `keyword()` | converted to map | values are redacted 19 | `%Jason.Fragment{}` | unchanged | unchanged if encoder is `Jason` 20 | everything else | using `inspect/2` | unchanged 21 | """ 22 | 23 | @type redactor :: {redactor :: module(), redactor_opts :: term()} 24 | 25 | @encoder_protocol LoggerJSON.Formatter.encoder_protocol() 26 | 27 | @spec encode(term(), redactors :: [redactor()]) :: term() 28 | def encode(nil, _redactors), do: nil 29 | def encode(true, _redactors), do: true 30 | def encode(false, _redactors), do: false 31 | def encode(atom, _redactors) when is_atom(atom), do: atom 32 | def encode(tuple, redactors) when is_tuple(tuple), do: tuple |> Tuple.to_list() |> encode(redactors) 33 | def encode(number, _redactors) when is_number(number), do: number 34 | def encode("[REDACTED]", _redactors), do: "[REDACTED]" 35 | def encode(binary, _redactors) when is_binary(binary), do: encode_binary(binary) 36 | 37 | if @encoder_protocol == Jason.Encoder do 38 | def encode(fragment, _redactors) when is_struct(fragment, Jason.Fragment), do: fragment 39 | end 40 | 41 | def encode(%NaiveDateTime{} = naive_datetime, _redactors), do: naive_datetime 42 | def encode(%DateTime{} = datetime, _redactors), do: datetime 43 | def encode(%Date{} = date, _redactors), do: date 44 | def encode(%Time{} = time, _redactors), do: time 45 | 46 | if Code.ensure_loaded?(Decimal) do 47 | def encode(%Decimal{} = decimal, _redactors), do: decimal 48 | end 49 | 50 | def encode(%_struct{} = struct, redactors) do 51 | struct 52 | |> Map.from_struct() 53 | |> encode(redactors) 54 | end 55 | 56 | def encode(%{} = map, redactors) do 57 | for {key, value} <- map, into: %{} do 58 | encode_key_value({key, value}, redactors) 59 | end 60 | end 61 | 62 | def encode([{key, _} | _] = keyword, redactors) when is_atom(key) do 63 | Enum.into(keyword, %{}, fn {key, value} -> 64 | encode_key_value({key, value}, redactors) 65 | end) 66 | rescue 67 | _ -> encode_list(keyword, redactors, []) 68 | end 69 | 70 | def encode(list, redactors) when is_list(list), do: encode_list(list, redactors, []) 71 | def encode(data, _redactors), do: inspect(data, pretty: true, width: 80) 72 | 73 | defp encode_key_value({:mfa, {_module, _function, _arity} = mfa}, redactors) do 74 | value = format_mfa(mfa) 75 | encode_key_value({:mfa, value}, redactors) 76 | end 77 | 78 | defp encode_key_value({key, value}, redactors) do 79 | key = encode_key(key) 80 | {key, encode(redact(key, value, redactors), redactors)} 81 | end 82 | 83 | defp encode_key(key) when is_binary(key), do: encode_binary(key) 84 | defp encode_key(key) when is_atom(key) or is_number(key), do: key 85 | defp encode_key(key), do: inspect(key) 86 | 87 | defp format_mfa({module, function, arity}), do: "#{module}.#{function}/#{arity}" 88 | 89 | defp encode_binary(data) when is_binary(data) do 90 | if String.printable?(data) do 91 | data 92 | else 93 | inspect(data) 94 | end 95 | end 96 | 97 | defp redact(_key, value, []) do 98 | value 99 | end 100 | 101 | defp redact(key, value, redactors) do 102 | Enum.reduce(redactors, value, fn {redactor, opts}, acc -> 103 | redactor.redact(to_string(key), acc, opts) 104 | end) 105 | end 106 | 107 | defp encode_list([], _redactors, acc), do: Enum.reverse(acc) 108 | 109 | defp encode_list([head | tail], redactors, acc) do 110 | encoded = encode(head, redactors) 111 | encode_list(tail, redactors, [encoded | acc]) 112 | end 113 | 114 | defp encode_list(improper_tail, redactors, [head | tail]) do 115 | # Tuple will be converted to list, which will make the list proper and 116 | # defeat the purpose 117 | redacted = 118 | case improper_tail do 119 | {key, value} -> {key, encode(redact(key, value, redactors), redactors)} 120 | _other -> encode(improper_tail, redactors) 121 | end 122 | 123 | tail 124 | |> Enum.reduce([head | redacted], fn el, acc -> [el | acc] end) 125 | |> inspect(limit: :infinity, pretty: true) 126 | end 127 | end 128 | -------------------------------------------------------------------------------- /lib/logger_json/formatters/basic.ex: -------------------------------------------------------------------------------- 1 | defmodule LoggerJSON.Formatters.Basic do 2 | @moduledoc """ 3 | Custom Erlang's [`:logger` formatter](https://www.erlang.org/doc/apps/kernel/logger_chapter.html#formatters) which 4 | writes logs in a JSON format. 5 | 6 | For list of options see "Shared options" in `LoggerJSON`. 7 | 8 | ## Examples 9 | 10 | %{ 11 | "message" => "Hello", 12 | "metadata" => %{"domain" => ["elixir"]}, 13 | "severity" => "notice", 14 | "time" => "2024-04-11T21:31:01.403Z" 15 | } 16 | """ 17 | import LoggerJSON.Formatter.{MapBuilder, DateTime, Message, Metadata, RedactorEncoder} 18 | require LoggerJSON.Formatter, as: Formatter 19 | 20 | @behaviour Formatter 21 | 22 | @encoder Formatter.encoder() 23 | 24 | @processed_metadata_keys ~w[otel_span_id span_id 25 | otel_trace_id trace_id 26 | conn]a 27 | 28 | @impl Formatter 29 | def new(opts \\ []) do 30 | {__MODULE__, config(opts)} 31 | end 32 | 33 | defp config(%{} = map), do: map 34 | 35 | defp config(opts) do 36 | opts = Keyword.new(opts) 37 | encoder_opts = Keyword.get_lazy(opts, :encoder_opts, &Formatter.default_encoder_opts/0) 38 | metadata_keys_or_selector = Keyword.get(opts, :metadata, []) 39 | metadata_selector = update_metadata_selector(metadata_keys_or_selector, @processed_metadata_keys) 40 | redactors = Keyword.get(opts, :redactors, []) 41 | %{encoder_opts: encoder_opts, metadata: metadata_selector, redactors: redactors} 42 | end 43 | 44 | @impl Formatter 45 | def format(%{level: level, meta: meta, msg: msg}, config_or_opts) do 46 | %{ 47 | encoder_opts: encoder_opts, 48 | metadata: metadata_selector, 49 | redactors: redactors 50 | } = config(config_or_opts) 51 | 52 | message = 53 | format_message(msg, meta, %{ 54 | binary: &format_binary_message/1, 55 | structured: &format_structured_message/1, 56 | crash: &format_crash_reason(&1, &2, meta) 57 | }) 58 | 59 | metadata = 60 | meta 61 | |> take_metadata(metadata_selector) 62 | |> maybe_update(:file, &IO.chardata_to_string/1) 63 | 64 | line = 65 | %{ 66 | time: utc_time(meta), 67 | severity: Atom.to_string(level), 68 | message: encode(message, redactors), 69 | metadata: encode(metadata, redactors) 70 | } 71 | |> maybe_put(:request, format_http_request(meta)) 72 | |> maybe_put(:span, format_span(meta)) 73 | |> maybe_put(:trace, format_trace(meta)) 74 | |> @encoder.encode_to_iodata!(encoder_opts) 75 | 76 | [line, "\n"] 77 | end 78 | 79 | @doc false 80 | def format_binary_message(binary) do 81 | IO.chardata_to_string(binary) 82 | end 83 | 84 | @doc false 85 | def format_structured_message(map) when is_map(map) do 86 | map 87 | end 88 | 89 | def format_structured_message(keyword) do 90 | Enum.into(keyword, %{}) 91 | end 92 | 93 | @doc false 94 | def format_crash_reason(binary, _reason, _meta) do 95 | IO.chardata_to_string(binary) 96 | end 97 | 98 | if Code.ensure_loaded?(Plug.Conn) do 99 | defp format_http_request(%{conn: %Plug.Conn{} = conn}) do 100 | %{ 101 | connection: %{ 102 | protocol: Plug.Conn.get_http_protocol(conn), 103 | method: conn.method, 104 | path: conn.request_path, 105 | status: conn.status 106 | }, 107 | client: %{ 108 | user_agent: Formatter.Plug.get_header(conn, "user-agent"), 109 | ip: Formatter.Plug.remote_ip(conn) 110 | } 111 | } 112 | end 113 | end 114 | 115 | defp format_http_request(_meta), do: nil 116 | 117 | defp format_span(%{otel_span_id: otel_span_id}), do: IO.chardata_to_string(otel_span_id) 118 | defp format_span(%{span_id: span_id}), do: span_id 119 | defp format_span(_meta), do: nil 120 | 121 | defp format_trace(%{otel_trace_id: otel_trace_id}), do: IO.chardata_to_string(otel_trace_id) 122 | defp format_trace(%{trace_id: trace_id}), do: trace_id 123 | defp format_trace(_meta), do: nil 124 | end 125 | -------------------------------------------------------------------------------- /lib/logger_json/formatters/datadog.ex: -------------------------------------------------------------------------------- 1 | defmodule LoggerJSON.Formatters.Datadog do 2 | @moduledoc """ 3 | Custom Erlang's [`:logger` formatter](https://www.erlang.org/doc/apps/kernel/logger_chapter.html#formatters) which 4 | writes logs in a structured format that can be consumed by Datadog. 5 | 6 | This formatter adheres to the 7 | [default standard attribute list](https://docs.datadoghq.com/logs/processing/attributes_naming_convention/#default-standard-attribute-list) 8 | as much as possible. 9 | 10 | ## Formatter Configuration 11 | 12 | The formatter can be configured with the following options: 13 | 14 | * `:hostname` (optional) - changes how the `syslog.hostname` is set in logs. By default, it uses `:system` which uses 15 | `:inet.gethostname/0` to resolve the value. If you are running in an environment where the hostname is not correct, 16 | you can hard code it by setting `hostname` to a string. In places where the hostname is inaccurate but also dynamic 17 | (like Kubernetes), you can set `hostname` to `:unset` to exclude it entirely. You'll then be relying on 18 | [`dd-agent`](https://docs.datadoghq.com/agent/) to determine the hostname. 19 | 20 | * `:reported_levels` (optional) - a list of log levels that should be reported as errors to Datadog. 21 | Default: `[:emergency, :alert, :critical, :error]`. 22 | 23 | For list of shared options see "Shared options" in `LoggerJSON`. 24 | 25 | ## Metadata 26 | 27 | For list of other well-known metadata keys see "Metadata" in `LoggerJSON`. 28 | 29 | ## Examples 30 | 31 | %{ 32 | "domain" => ["elixir"], 33 | "logger" => %{ 34 | "file_name" => "/Users/andrew/Projects/os/logger_json/test/formatters/datadog_test.exs", 35 | "line" => 44, 36 | "method_name" => "Elixir.LoggerJSON.Formatters.DatadogTest.test logs an LogEntry of a given level/1", 37 | "thread_name" => "#PID<0.234.0>" 38 | }, 39 | "message" => "Hello", 40 | "syslog" => %{ 41 | "hostname" => "MacBook-Pro", 42 | "severity" => "notice", 43 | "timestamp" => "2024-04-11T23:03:39.726Z" 44 | } 45 | } 46 | """ 47 | import LoggerJSON.Formatter.{MapBuilder, DateTime, Message, Metadata, Code, RedactorEncoder} 48 | require LoggerJSON.Formatter, as: Formatter 49 | 50 | @behaviour Formatter 51 | 52 | @encoder Formatter.encoder() 53 | 54 | @processed_metadata_keys ~w[pid file line mfa conn]a 55 | 56 | @default_levels_reported_as_errors ~w[emergency alert critical error]a 57 | 58 | @impl Formatter 59 | def new(opts \\ []) do 60 | {__MODULE__, config(opts)} 61 | end 62 | 63 | defp config(%{} = map), do: map 64 | 65 | defp config(opts) do 66 | opts = Keyword.new(opts) 67 | encoder_opts = Keyword.get_lazy(opts, :encoder_opts, &Formatter.default_encoder_opts/0) 68 | redactors = Keyword.get(opts, :redactors, []) 69 | hostname = Keyword.get(opts, :hostname, :system) 70 | metadata_keys_or_selector = Keyword.get(opts, :metadata, []) 71 | metadata_selector = update_metadata_selector(metadata_keys_or_selector, @processed_metadata_keys) 72 | reported_levels = Keyword.get(opts, :reported_levels, @default_levels_reported_as_errors) 73 | 74 | %{ 75 | encoder_opts: encoder_opts, 76 | metadata: metadata_selector, 77 | redactors: redactors, 78 | hostname: hostname, 79 | reported_levels: reported_levels 80 | } 81 | end 82 | 83 | @impl Formatter 84 | def format(%{level: level, meta: meta, msg: msg}, config_or_opts) do 85 | %{ 86 | encoder_opts: encoder_opts, 87 | metadata: metadata_selector, 88 | redactors: redactors, 89 | hostname: hostname, 90 | reported_levels: reported_levels 91 | } = config(config_or_opts) 92 | 93 | message = 94 | format_message(msg, meta, %{ 95 | binary: &format_binary_message/1, 96 | structured: &format_structured_message/1, 97 | crash: &format_crash_reason(&1, &2, meta) 98 | }) 99 | 100 | metadata = 101 | take_metadata(meta, metadata_selector) 102 | |> maybe_put(:"dd.span_id", format_span(meta)) 103 | |> maybe_put(:"dd.trace_id", format_trace(meta)) 104 | |> maybe_update(:otel_span_id, &safe_chardata_to_string/1) 105 | |> maybe_update(:otel_trace_id, &safe_chardata_to_string/1) 106 | 107 | line = 108 | %{syslog: syslog(level, meta, hostname)} 109 | |> maybe_put(:logger, format_logger(meta)) 110 | |> maybe_merge(format_http_request(meta)) 111 | |> maybe_merge(format_error(message, metadata, level, reported_levels)) 112 | |> maybe_merge(encode(metadata, redactors)) 113 | |> maybe_merge(encode(message, redactors)) 114 | |> @encoder.encode_to_iodata!(encoder_opts) 115 | 116 | [line, "\n"] 117 | end 118 | 119 | @doc false 120 | def format_binary_message(binary) do 121 | %{message: IO.chardata_to_string(binary)} 122 | end 123 | 124 | @doc false 125 | def format_structured_message(map) when is_map(map) do 126 | %{message: map} 127 | end 128 | 129 | def format_structured_message(keyword) do 130 | %{message: Enum.into(keyword, %{})} 131 | end 132 | 133 | @doc false 134 | def format_crash_reason(binary, {%struct{} = _exception, stacktrace}, _meta) do 135 | kind = 136 | struct 137 | |> Module.split() 138 | |> List.last() 139 | 140 | message = IO.chardata_to_string(binary) 141 | 142 | %{ 143 | message: message, 144 | error: %{ 145 | kind: kind, 146 | message: message, 147 | stack: Exception.format_stacktrace(stacktrace) 148 | } 149 | } 150 | end 151 | 152 | # https://docs.datadoghq.com/standard-attributes/?search=logger+error&product=log+management 153 | def format_crash_reason(binary, other, _meta) do 154 | message = IO.chardata_to_string(binary) 155 | 156 | %{ 157 | message: message, 158 | error: %{ 159 | kind: format_crash_reason_kind(other), 160 | message: message 161 | } 162 | } 163 | end 164 | 165 | defp format_crash_reason_kind({{:EXIT, _pid}, _reason}), do: "exit" 166 | defp format_crash_reason_kind({:exit, _reason}), do: "exit" 167 | defp format_crash_reason_kind({:throw, _reason}), do: "throw" 168 | defp format_crash_reason_kind(_), do: "other" 169 | 170 | defp syslog(level, meta, :system) do 171 | {:ok, hostname} = :inet.gethostname() 172 | 173 | %{ 174 | hostname: to_string(hostname), 175 | severity: Atom.to_string(level), 176 | timestamp: utc_time(meta) 177 | } 178 | end 179 | 180 | defp syslog(level, meta, :unset) do 181 | %{ 182 | severity: Atom.to_string(level), 183 | timestamp: utc_time(meta) 184 | } 185 | end 186 | 187 | defp syslog(level, meta, hostname) do 188 | %{ 189 | hostname: hostname, 190 | severity: Atom.to_string(level), 191 | timestamp: utc_time(meta) 192 | } 193 | end 194 | 195 | defp format_logger(%{file: file, line: line, mfa: {m, f, a}} = meta) do 196 | %{ 197 | thread_name: inspect(meta[:pid]), 198 | method_name: format_function(m, f, a), 199 | file_name: IO.chardata_to_string(file), 200 | line: line 201 | } 202 | end 203 | 204 | defp format_logger(_meta), 205 | do: nil 206 | 207 | # To connect logs and traces, span_id and trace_id keys are respectively dd.span_id and dd.trace_id 208 | # https://docs.datadoghq.com/tracing/faq/why-cant-i-see-my-correlated-logs-in-the-trace-id-panel/?tab=jsonlogs 209 | defp format_span(%{otel_span_id: otel_span_id}), do: convert_otel_field(otel_span_id) 210 | defp format_span(%{span_id: span_id}), do: span_id 211 | defp format_span(_meta), do: nil 212 | 213 | defp format_trace(%{otel_trace_id: otel_trace_id}), do: convert_otel_field(otel_trace_id) 214 | defp format_trace(%{trace_id: trace_id}), do: trace_id 215 | defp format_trace(_meta), do: nil 216 | 217 | # This converts native OpenTelemetry fields to the native Datadog format. 218 | # This function is taken from the Datadog examples for converting. Mostly the Golang version 219 | # https://docs.datadoghq.com/tracing/other_telemetry/connect_logs_and_traces/opentelemetry/?tab=go 220 | # Tests were stolen from https://github.com/open-telemetry/opentelemetry-specification/issues/525 221 | # and https://go.dev/play/p/pUBHcLdXJNy 222 | defp convert_otel_field(<>) do 223 | {value, _} = Integer.parse(value, 16) 224 | Integer.to_string(value, 10) 225 | rescue 226 | _ -> "" 227 | end 228 | 229 | defp convert_otel_field(value) when byte_size(value) < 16, do: "" 230 | 231 | defp convert_otel_field(value) when is_binary(value) or is_list(value) do 232 | value = to_string(value) 233 | len = byte_size(value) - 16 234 | <<_front::binary-size(len), value::binary>> = value 235 | convert_otel_field(value) 236 | rescue 237 | _ -> "" 238 | end 239 | 240 | defp convert_otel_field(_other) do 241 | "" 242 | end 243 | 244 | defp safe_chardata_to_string(chardata) when is_list(chardata) or is_binary(chardata) do 245 | IO.chardata_to_string(chardata) 246 | end 247 | 248 | defp safe_chardata_to_string(other), do: other 249 | 250 | if Code.ensure_loaded?(Plug.Conn) do 251 | defp format_http_request(%{conn: %Plug.Conn{} = conn, duration_us: duration_us} = meta) do 252 | conn 253 | |> build_http_request_data(meta[:request_id]) 254 | |> maybe_put(:duration, to_nanosecs(duration_us)) 255 | end 256 | 257 | defp format_http_request(%{conn: %Plug.Conn{} = conn}), do: format_http_request(%{conn: conn, duration_us: nil}) 258 | end 259 | 260 | defp format_http_request(_meta), do: nil 261 | 262 | defp format_error(%{error: _error}, _metadata, _level, _reported_levels), do: nil 263 | 264 | defp format_error(%{message: message}, metadata, level, reported_levels) when is_binary(message) do 265 | if level in reported_levels do 266 | error = 267 | metadata[:error] 268 | |> Kernel.||(%{}) 269 | |> Map.put(:kind, get_error_kind(metadata)) 270 | |> Map.put(:message, message) 271 | |> maybe_put(:stack, get_error_stack(metadata)) 272 | 273 | %{error: error} 274 | end 275 | end 276 | 277 | defp format_error(_msg, _metadata, _level, _reported_levels), do: nil 278 | 279 | defp get_error_kind(%{error: %{kind: kind}}) when is_binary(kind), do: kind 280 | defp get_error_kind(_metadata), do: "error" 281 | 282 | defp get_error_stack(%{error: %{stack: stack}}) when is_binary(stack), do: stack 283 | defp get_error_stack(_metadata), do: nil 284 | 285 | if Code.ensure_loaded?(Plug.Conn) do 286 | defp build_http_request_data(%Plug.Conn{} = conn, request_id) do 287 | request_url = Plug.Conn.request_url(conn) 288 | user_agent = Formatter.Plug.get_header(conn, "user-agent") 289 | remote_ip = Formatter.Plug.remote_ip(conn) 290 | referer = Formatter.Plug.get_header(conn, "referer") 291 | 292 | %{ 293 | http: %{ 294 | url: request_url, 295 | status_code: conn.status, 296 | method: conn.method, 297 | referer: referer, 298 | request_id: request_id, 299 | useragent: user_agent, 300 | url_details: %{ 301 | host: conn.host, 302 | port: conn.port, 303 | path: conn.request_path, 304 | queryString: conn.query_string, 305 | scheme: conn.scheme 306 | } 307 | }, 308 | network: %{client: %{ip: remote_ip}} 309 | } 310 | end 311 | end 312 | 313 | if Code.ensure_loaded?(Plug.Conn) do 314 | defp to_nanosecs(duration_us) when is_number(duration_us), do: duration_us * 1000 315 | defp to_nanosecs(_), do: nil 316 | end 317 | end 318 | -------------------------------------------------------------------------------- /lib/logger_json/formatters/elastic.ex: -------------------------------------------------------------------------------- 1 | defmodule LoggerJSON.Formatters.Elastic do 2 | @moduledoc """ 3 | Custom Erlang's [`:logger` formatter](https://www.erlang.org/doc/apps/kernel/logger_chapter.html#formatters) which 4 | writes logs in a JSON-structured format that conforms to the Elastic Common Schema (ECS), so it can be consumed by 5 | ElasticSearch, LogStash, FileBeat and Kibana. 6 | 7 | ## Formatter Configuration 8 | 9 | For list of options see "Shared options" in `LoggerJSON`. 10 | 11 | ## Metadata 12 | 13 | For list of other well-known metadata keys see "Metadata" in `LoggerJSON`. 14 | 15 | Any custom metadata that you set with `Logger.metadata/1` will be included top-level in the log entry. 16 | 17 | ## Examples 18 | 19 | Example of an info log (`Logger.info("Hello")` without any metadata): 20 | 21 | ```elixir 22 | %{ 23 | "@timestamp" => "2024-05-17T16:20:00.000Z", 24 | "ecs.version" => "8.11.0", 25 | "log.level" => "info", 26 | "log.logger" => "Elixir.LoggerJSON.Formatters.ElasticTest", 27 | "log.origin" => %{ 28 | "file.name" => "/app/logger_json/test/formatters/elastic_test.exs", 29 | "file.line" => 18, 30 | "function" => "test logs an LogEntry of every level/1" 31 | }, 32 | "message" => "Hello" 33 | } 34 | ``` 35 | 36 | Example of logging by keywords or by map (Logger.info(%{message: "Hello", foo: :bar, fiz: %{buz: "buz"}})). 37 | The keywords or map items are added to the top-level of the log entry: 38 | 39 | ```elixir 40 | %{ 41 | "@timestamp" => "2024-05-17T16:20:00.000Z", 42 | "ecs.version" => "8.11.0", 43 | "fiz" => %{"buz" => "buz"}, 44 | "foo" => "bar", 45 | "log.level" => "debug", 46 | "log.logger" => "Elixir.LoggerJSON.Formatters.ElasticTest", 47 | "log.origin" => %{ 48 | "file.line" => 68, 49 | "file.name" => "/app/logger_json/test/formatters/elastic_test.exs", 50 | "function" => "test logs an LogEntry with a map payload containing message/1" 51 | }, 52 | "message" => "Hello" 53 | } 54 | ``` 55 | 56 | Example of logging due to raising an exception (`raise RuntimeError`): 57 | 58 | ```elixir 59 | %{ 60 | "@timestamp" => "2024-05-17T16:20:00.000Z", 61 | "ecs.version" => "8.11.0", 62 | "error.message" => "runtime error", 63 | "error.stack_trace" => "** (RuntimeError) runtime error\n test/logger_json/formatters/elastic_test.exs:191: anonymous fn/0 in LoggerJSON.Formatters.ElasticTest.\"test logs exceptions\"/1\n", 64 | "error.type" => "Elixir.RuntimeError", 65 | "log.level" => "error", 66 | "message" => "Process #PID<0.322.0> raised an exception\n** (RuntimeError) runtime error\n test/logger_json/formatters/elastic_test.exs:191: anonymous fn/0 in LoggerJSON.Formatters.ElasticTest.\"test logs exceptions\"/1" 67 | } 68 | ``` 69 | 70 | Note that if you raise an exception that contains an `id` or a `code` property, they will be included in the log entry as `error.id` and `error.code` respectively. 71 | 72 | Example: 73 | 74 | ```elixir 75 | defmodule TestException do 76 | defexception [:message, :id, :code] 77 | end 78 | 79 | ... 80 | 81 | raise TestException, id: :oops_id, code: 42, message: "oops!" 82 | ``` 83 | 84 | results in: 85 | 86 | ```elixir 87 | %{ 88 | "@timestamp" => "2024-05-17T16:20:00.000Z", 89 | "ecs.version" => "8.11.0", 90 | "error.code" => 42, 91 | "error.id" => "oops_id", 92 | "error.message" => "oops!", 93 | "error.stack_trace" => "** (LoggerJSON.Formatters.ElasticTest.TestException) oops!\n test/logger_json/formatters/elastic_test.exs:223: anonymous fn/0 in LoggerJSON.Formatters.ElasticTest.\"test logs exceptions with id and code\"/1\n", 94 | "error.type" => "Elixir.LoggerJSON.Formatters.ElasticTest.TestException", 95 | "log.level" => "error", 96 | "message" => "Process #PID<0.325.0> raised an exception\n** (LoggerJSON.Formatters.ElasticTest.TestException) oops!\n test/logger_json/formatters/elastic_test.exs:223: anonymous fn/0 in LoggerJSON.Formatters.ElasticTest.\"test logs exceptions with id and code\"/1" 97 | } 98 | ``` 99 | 100 | You can also choose to log caught exceptions with a custom message. 101 | For example, after catching an exception with `try`/`rescue`: 102 | 103 | ```elixir 104 | try do 105 | raise "oops" 106 | rescue 107 | e in RuntimeError -> Logger.error("Something went wrong", crash_reason: {e, __STACKTRACE__}) 108 | end 109 | ``` 110 | 111 | then you'll get a message like: 112 | 113 | ```elixir 114 | %{ 115 | "@timestamp" => "2024-05-17T16:20:00.000Z", 116 | "ecs.version" => "8.11.0", 117 | "error.message" => "oops", 118 | "error.stack_trace" => "** (RuntimeError) oops\n test/logger_json/formatters/elastic_test.exs:421: anonymous fn/0 in LoggerJSON.Formatters.ElasticTest.\"test logs caught errors\"/1\n (logger_json 6.0.2) test/support/logger_case.ex:16: anonymous fn/1 in LoggerJSON.Case.capture_log/2\n (ex_unit 1.16.3) lib/ex_unit/capture_io.ex:258: ExUnit.CaptureIO.do_with_io/3\n (ex_unit 1.16.3) lib/ex_unit/capture_io.ex:134: ExUnit.CaptureIO.capture_io/2\n (logger_json 6.0.2) test/support/logger_case.ex:15: LoggerJSON.Case.capture_log/2\n test/logger_json/formatters/elastic_test.exs:419: LoggerJSON.Formatters.ElasticTest.\"test logs caught errors\"/1\n (ex_unit 1.16.3) lib/ex_unit/runner.ex:472: ExUnit.Runner.exec_test/2\n (stdlib 5.2.3) timer.erl:270: :timer.tc/2\n (ex_unit 1.16.3) lib/ex_unit/runner.ex:394: anonymous fn/6 in ExUnit.Runner.spawn_test_monitor/4\n", 119 | "error.type" => "Elixir.RuntimeError", 120 | "log.level" => "error", 121 | "log.logger" => "Elixir.LoggerJSON.Formatters.ElasticTest", 122 | "log.origin" => %{ 123 | "file.line" => 423, 124 | "file.name" => "/app/logger_json/test/logger_json/formatters/elastic_test.exs", 125 | "function" => "test logs caught errors/1" 126 | }, 127 | "message" => "Something went wrong" 128 | } 129 | ``` 130 | 131 | """ 132 | import LoggerJSON.Formatter.{MapBuilder, DateTime, Message, Metadata, RedactorEncoder} 133 | alias LoggerJSON.Formatter 134 | 135 | @behaviour Formatter 136 | 137 | @ecs_version "8.11.0" 138 | 139 | @encoder Formatter.encoder() 140 | 141 | @processed_metadata_keys ~w[file line mfa domain error_logger 142 | otel_span_id span_id 143 | otel_trace_id trace_id 144 | conn]a 145 | 146 | @impl Formatter 147 | def new(opts \\ []) do 148 | {__MODULE__, config(opts)} 149 | end 150 | 151 | defp config(%{} = map), do: map 152 | 153 | defp config(opts) do 154 | opts = Keyword.new(opts) 155 | encoder_opts = Keyword.get_lazy(opts, :encoder_opts, &Formatter.default_encoder_opts/0) 156 | metadata_keys_or_selector = Keyword.get(opts, :metadata, []) 157 | metadata_selector = update_metadata_selector(metadata_keys_or_selector, @processed_metadata_keys) 158 | redactors = Keyword.get(opts, :redactors, []) 159 | %{encoder_opts: encoder_opts, metadata: metadata_selector, redactors: redactors} 160 | end 161 | 162 | @impl Formatter 163 | def format(%{level: level, meta: meta, msg: msg}, config_or_opts) do 164 | %{ 165 | encoder_opts: encoder_opts, 166 | metadata: metadata_selector, 167 | redactors: redactors 168 | } = config(config_or_opts) 169 | 170 | message = 171 | format_message(msg, meta, %{ 172 | binary: &format_binary_message/1, 173 | structured: &format_structured_message/1, 174 | crash: &format_crash_reason(&1, &2, meta) 175 | }) 176 | 177 | line = 178 | %{ 179 | "@timestamp": utc_time(meta), 180 | "log.level": Atom.to_string(level), 181 | "ecs.version": @ecs_version 182 | } 183 | |> maybe_merge(encode(message, redactors)) 184 | |> maybe_merge(encode(take_metadata(meta, metadata_selector), redactors)) 185 | |> maybe_merge(format_logger_fields(meta)) 186 | |> maybe_merge(format_http_request(meta)) 187 | |> maybe_put(:"span.id", format_span_id(meta)) 188 | |> maybe_put(:"trace.id", format_trace_id(meta)) 189 | |> @encoder.encode_to_iodata!(encoder_opts) 190 | 191 | [line, "\n"] 192 | end 193 | 194 | @doc false 195 | def format_binary_message(binary) do 196 | %{message: IO.chardata_to_string(binary)} 197 | end 198 | 199 | @doc false 200 | def format_structured_message(map) when is_map(map) do 201 | map 202 | end 203 | 204 | def format_structured_message(keyword) do 205 | Enum.into(keyword, %{}) 206 | end 207 | 208 | @doc false 209 | def format_crash_reason(message, {{:EXIT, pid}, reason}, _meta) do 210 | stacktrace = Exception.format_banner({:EXIT, pid}, reason, []) 211 | error_message = "process #{inspect(pid)} exit: #{inspect(reason)}" 212 | format_error_fields(message, error_message, stacktrace, "exit") 213 | end 214 | 215 | def format_crash_reason(message, {:exit, reason}, _meta) do 216 | stacktrace = Exception.format_banner(:exit, reason, []) 217 | error_message = "exit: #{inspect(reason)}" 218 | format_error_fields(message, error_message, stacktrace, "exit") 219 | end 220 | 221 | def format_crash_reason(message, {:throw, reason}, _meta) do 222 | stacktrace = Exception.format_banner(:throw, reason, []) 223 | error_message = "throw: #{inspect(reason)}" 224 | format_error_fields(message, error_message, stacktrace, "throw") 225 | end 226 | 227 | def format_crash_reason(message, {%type{} = exception, stacktrace}, _meta) do 228 | formatted_stacktrace = 229 | [ 230 | Exception.format_banner(:error, exception, stacktrace), 231 | Exception.format_stacktrace(stacktrace) 232 | ] 233 | |> Enum.join("\n") 234 | 235 | format_error_fields(message, Exception.message(exception), formatted_stacktrace, type) 236 | |> maybe_put(:"error.id", get_exception_id(exception)) 237 | |> maybe_put(:"error.code", get_exception_code(exception)) 238 | end 239 | 240 | def format_crash_reason(message, {{{%type{} = exception, _}, _}, stacktrace}, _meta) do 241 | formatted_stacktrace = 242 | [ 243 | Exception.format_banner(:error, exception, stacktrace), 244 | Exception.format_stacktrace(stacktrace) 245 | ] 246 | |> Enum.join("\n") 247 | 248 | format_error_fields(message, Exception.message(exception), formatted_stacktrace, type) 249 | |> maybe_put(:"error.id", get_exception_id(exception)) 250 | |> maybe_put(:"error.code", get_exception_code(exception)) 251 | end 252 | 253 | def format_crash_reason(message, {error, reason}, _meta) when is_atom(error) or is_binary(error) do 254 | stacktrace = "** (#{error}) #{inspect(reason)}" 255 | error_message = "#{error}: #{inspect(reason)}" 256 | format_error_fields(message, error_message, stacktrace, error) 257 | end 258 | 259 | def format_crash_reason(message, other, _meta) do 260 | format_error_fields(message, inspect(other), nil, nil) 261 | end 262 | 263 | defp get_exception_id(%{id: id}), do: id 264 | defp get_exception_id(_), do: nil 265 | 266 | defp get_exception_code(%{code: code}), do: code 267 | defp get_exception_code(_), do: nil 268 | 269 | # Formats the error fields as specified in https://www.elastic.co/guide/en/ecs/8.11/ecs-error.html 270 | defp format_error_fields(message, error_message, stacktrace, type) do 271 | %{ 272 | message: safe_chardata_to_string(message), 273 | "error.message": error_message 274 | } 275 | |> maybe_put(:"error.stack_trace", stacktrace) 276 | |> maybe_put(:"error.type", type) 277 | end 278 | 279 | # Formats the log.logger and log.origin fields as specified in https://www.elastic.co/guide/en/ecs/8.11/ecs-log.html 280 | defp format_logger_fields(%{file: file, line: line, mfa: {module, function, arity}}) do 281 | %{ 282 | "log.logger": module, 283 | "log.origin": %{ 284 | "file.name": to_string(file), 285 | "file.line": line, 286 | function: "#{function}/#{arity}" 287 | } 288 | } 289 | end 290 | 291 | defp format_logger_fields(_meta), do: nil 292 | 293 | if Code.ensure_loaded?(Plug.Conn) do 294 | # See the formats for the following fields in ECS: 295 | # - client.ip: https://www.elastic.co/guide/en/ecs/8.11/ecs-client.html 296 | # - http.*: https://www.elastic.co/guide/en/ecs/8.11/ecs-http.html 297 | # - url.path: https://www.elastic.co/guide/en/ecs/8.11/ecs-url.html 298 | # - user_agent.original: https://www.elastic.co/guide/en/ecs/8.11/ecs-user_agent.html 299 | # - event.duration (note: ns, not μs): https://www.elastic.co/guide/en/ecs/current/ecs-event.html#field-event-duration 300 | defp format_http_request(%{conn: %Plug.Conn{} = conn, duration_us: duration_us}) do 301 | %{ 302 | "client.ip": Formatter.Plug.remote_ip(conn), 303 | "http.version": Plug.Conn.get_http_protocol(conn), 304 | "http.request.method": conn.method, 305 | "http.request.referrer": Formatter.Plug.get_header(conn, "referer"), 306 | "http.response.status_code": conn.status, 307 | "url.path": conn.request_path, 308 | "user_agent.original": Formatter.Plug.get_header(conn, "user-agent") 309 | } 310 | |> maybe_put(:"event.duration", to_nanosecs(duration_us)) 311 | end 312 | 313 | defp format_http_request(%{conn: %Plug.Conn{} = conn}), do: format_http_request(%{conn: conn, duration_us: nil}) 314 | end 315 | 316 | defp format_http_request(_meta), do: nil 317 | 318 | defp format_span_id(%{otel_span_id: otel_span_id}), do: safe_chardata_to_string(otel_span_id) 319 | defp format_span_id(%{span_id: span_id}), do: span_id 320 | defp format_span_id(_meta), do: nil 321 | 322 | defp format_trace_id(%{otel_trace_id: otel_trace_id}), do: safe_chardata_to_string(otel_trace_id) 323 | defp format_trace_id(%{trace_id: trace_id}), do: trace_id 324 | defp format_trace_id(_meta), do: nil 325 | 326 | defp safe_chardata_to_string(chardata) when is_list(chardata) or is_binary(chardata) do 327 | IO.chardata_to_string(chardata) 328 | end 329 | 330 | defp safe_chardata_to_string(other), do: other 331 | 332 | if Code.ensure_loaded?(Plug.Conn) do 333 | defp to_nanosecs(duration_us) when is_number(duration_us), do: duration_us * 1000 334 | defp to_nanosecs(_), do: nil 335 | end 336 | end 337 | -------------------------------------------------------------------------------- /lib/logger_json/formatters/google_cloud.ex: -------------------------------------------------------------------------------- 1 | defmodule LoggerJSON.Formatters.GoogleCloud do 2 | @moduledoc """ 3 | Custom Erlang's [`:logger` formatter](https://www.erlang.org/doc/apps/kernel/logger_chapter.html#formatters) which 4 | writes logs in a structured format that can be consumed by Google Cloud Logger. 5 | 6 | Even though the log messages on Google Cloud use [LogEntry](https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry) 7 | format, not all the fields are available in the structured payload. The fields that are available can be found in the 8 | [special fields in structured payloads](https://cloud.google.com/logging/docs/agent/configuration#special_fields_in_structured_payloads). 9 | 10 | ## Formatter Configuration 11 | 12 | The formatter can be configured with the following options: 13 | 14 | * `:project_id` (optional) - the Google Cloud project ID. This is required for correctly logging OpenTelemetry trace and 15 | span IDs so that they can be linked to the correct trace in Google Cloud Trace. If it's not provided, the formatter will 16 | try to read it from the environment variables `GOOGLE_CLOUD_PROJECT`, `GOOGLE_PROJECT_ID` or `GCLOUD_PROJECT`. Please keep 17 | in mind that environment will be read whenever you call the `new/1` function, so you should configure the formatter at runtime 18 | (in the `config/runtime.exs` or in your `application.ex`) if you rely on this feature. 19 | 20 | * `:service_context` (optional) - a map with the following keys: 21 | * `:service` - the name of the service that is logging the message. Default: `node()`. 22 | * `:version` - the version of the service that is logging the message. 23 | 24 | * `:reported_levels` (optional) - a list of log levels that should be reported as errors to Google Cloud Error Reporting. 25 | Default: `[:emergency, :alert, :critical, :error]`. 26 | 27 | For list of shared options see "Shared options" in `LoggerJSON`. 28 | 29 | ## Metadata 30 | 31 | You can extend the log entry with some additional metadata:application 32 | 33 | * `user_id`, `identity_id`, `actor_id`, `account_id` (ordered by precedence) - the ID of the user that is performing the action. 34 | It will be included along with the error report for Google Cloud Error Reporting; 35 | 36 | For list of other well-known metadata keys see "Metadata" in `LoggerJSON`. 37 | 38 | ## Examples 39 | 40 | Regular message: 41 | 42 | %{ 43 | "logging.googleapis.com/operation" => %{"pid" => "#PID<0.228.0>"}, 44 | "logging.googleapis.com/sourceLocation" => %{ 45 | "file" => "/Users/andrew/Projects/os/logger_json/test/formatters/google_cloud_test.exs", 46 | "function" => "Elixir.LoggerJSON.Formatters.GoogleCloudTest.test logs an LogEntry of a given level/1", 47 | "line" => 44 48 | }, 49 | "message" => %{"domain" => ["elixir"], "message" => "Hello"}, 50 | "severity" => "NOTICE", 51 | "time" => "2024-04-11T21:32:46.957Z" 52 | } 53 | 54 | Exception message that will be recognized by Google Cloud Error Reporting: 55 | 56 | %{ 57 | "httpRequest" => %{ 58 | "protocol" => "HTTP/1.1", 59 | "referer" => "http://www.example.com/", 60 | "remoteIp" => "", 61 | "requestMethod" => "PATCH", 62 | "requestUrl" => "http://www.example.com/", 63 | "status" => 503, 64 | "userAgent" => "Mozilla/5.0" 65 | }, 66 | "logging.googleapis.com/operation" => %{"pid" => "#PID<0.250.0>"}, 67 | "logging.googleapis.com/sourceLocation" => %{ 68 | "file" => "/Users/andrew/Projects/os/logger_json/test/formatters/google_cloud_test.exs", 69 | "function" => "Elixir.LoggerJSON.Formatters.GoogleCloudTest.test logs exception http context/1", 70 | "line" => 301 71 | }, 72 | "@type" => "type.googleapis.com/google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent", 73 | "context" => %{ 74 | "httpRequest" => %{ 75 | "protocol" => "HTTP/1.1", 76 | "referer" => "http://www.example.com/", 77 | "remoteIp" => "", 78 | "requestMethod" => "PATCH", 79 | "requestUrl" => "http://www.example.com/", 80 | "status" => 503, 81 | "userAgent" => "Mozilla/5.0" 82 | }, 83 | "reportLocation" => %{ 84 | "filePath" => "/Users/andrew/Projects/os/logger_json/test/formatters/google_cloud_test.exs", 85 | "functionName" => "Elixir.LoggerJSON.Formatters.GoogleCloudTest.test logs exception http context/1", 86 | "lineNumber" => 301 87 | } 88 | }, 89 | "domain" => ["elixir"], 90 | "message" => "Hello", 91 | "serviceContext" => %{"service" => "nonode@nohost"}, 92 | "stack_trace" => "** (EXIT from #PID<0.250.0>) :foo", 93 | "severity" => "DEBUG", 94 | "time" => "2024-04-11T21:34:53.503Z" 95 | } 96 | """ 97 | import LoggerJSON.Formatter.{MapBuilder, DateTime, Message, Metadata, Code, RedactorEncoder} 98 | require LoggerJSON.Formatter, as: Formatter 99 | 100 | @behaviour Formatter 101 | 102 | @encoder Formatter.encoder() 103 | 104 | @processed_metadata_keys ~w[pid file line mfa 105 | otel_span_id span_id 106 | otel_trace_id trace_id 107 | conn]a 108 | 109 | @default_levels_reported_as_errors ~w[emergency alert critical error]a 110 | 111 | @impl Formatter 112 | def new(opts \\ []) do 113 | {__MODULE__, config(opts)} 114 | end 115 | 116 | defp config(%{} = map), do: map 117 | 118 | defp config(opts) do 119 | opts = Keyword.new(opts) 120 | encoder_opts = Keyword.get_lazy(opts, :encoder_opts, &Formatter.default_encoder_opts/0) 121 | redactors = Keyword.get(opts, :redactors, []) 122 | service_context = Keyword.get_lazy(opts, :service_context, fn -> %{service: to_string(node())} end) 123 | project_id = Keyword.get_lazy(opts, :project_id, &get_default_project_id/0) 124 | metadata_keys_or_selector = Keyword.get(opts, :metadata, []) 125 | metadata_selector = update_metadata_selector(metadata_keys_or_selector, @processed_metadata_keys) 126 | reported_levels = Keyword.get(opts, :reported_levels, @default_levels_reported_as_errors) 127 | 128 | %{ 129 | encoder_opts: encoder_opts, 130 | redactors: redactors, 131 | service_context: service_context, 132 | project_id: project_id, 133 | metadata: metadata_selector, 134 | reported_levels: reported_levels 135 | } 136 | end 137 | 138 | defp get_default_project_id do 139 | System.get_env("GOOGLE_CLOUD_PROJECT") || 140 | System.get_env("GOOGLE_PROJECT_ID") || 141 | System.get_env("GCLOUD_PROJECT") 142 | end 143 | 144 | @impl Formatter 145 | def format(%{level: level, meta: meta, msg: msg}, config_or_opts) do 146 | %{ 147 | encoder_opts: encoder_opts, 148 | redactors: redactors, 149 | service_context: service_context, 150 | project_id: project_id, 151 | metadata: metadata_selector, 152 | reported_levels: reported_levels 153 | } = config(config_or_opts) 154 | 155 | message = 156 | format_message(msg, meta, %{ 157 | binary: &format_binary_message/1, 158 | structured: &format_structured_message/1, 159 | crash: &format_crash_reason(&1, &2, service_context, meta) 160 | }) 161 | 162 | metadata = 163 | take_metadata(meta, metadata_selector) 164 | 165 | line = 166 | %{ 167 | time: utc_time(meta), 168 | severity: log_level(level) 169 | } 170 | |> maybe_put(:"logging.googleapis.com/sourceLocation", format_source_location(meta)) 171 | |> maybe_put(:"logging.googleapis.com/operation", format_operation(meta)) 172 | |> maybe_put(:"logging.googleapis.com/spanId", format_span(meta, project_id)) 173 | |> maybe_put(:"logging.googleapis.com/trace", format_trace(meta, project_id)) 174 | |> maybe_put(:httpRequest, format_http_request(meta)) 175 | |> maybe_report_to_google_cloud_error_reporter(level, reported_levels) 176 | |> maybe_merge(encode(message, redactors)) 177 | |> maybe_merge(encode(metadata, redactors)) 178 | |> @encoder.encode_to_iodata!(encoder_opts) 179 | 180 | [line, "\n"] 181 | end 182 | 183 | defp maybe_report_to_google_cloud_error_reporter(map, level, reported_levels) do 184 | if level in reported_levels do 185 | Map.put(map, :"@type", "type.googleapis.com/google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent") 186 | else 187 | map 188 | end 189 | end 190 | 191 | defp log_level(:emergency), do: "EMERGENCY" 192 | defp log_level(:alert), do: "ALERT" 193 | defp log_level(:critical), do: "CRITICAL" 194 | defp log_level(:error), do: "ERROR" 195 | defp log_level(:warning), do: "WARNING" 196 | defp log_level(:notice), do: "NOTICE" 197 | defp log_level(:info), do: "INFO" 198 | defp log_level(:debug), do: "DEBUG" 199 | 200 | @doc false 201 | def format_binary_message(binary) do 202 | %{message: IO.chardata_to_string(binary)} 203 | end 204 | 205 | @doc false 206 | def format_structured_message(map) when is_map(map) do 207 | map 208 | end 209 | 210 | def format_structured_message(keyword) do 211 | Enum.into(keyword, %{}) 212 | end 213 | 214 | @doc false 215 | # https://cloud.google.com/error-reporting/docs/formatting-error-messages 216 | def format_crash_reason(binary, {{:EXIT, pid}, reason}, service_context, meta) do 217 | stacktrace = Exception.format_banner({:EXIT, pid}, reason, []) 218 | format_reported_error_event(binary, stacktrace, service_context, meta) 219 | end 220 | 221 | def format_crash_reason(binary, {:exit, reason}, service_context, meta) do 222 | stacktrace = Exception.format_banner(:exit, reason, []) 223 | format_reported_error_event(binary, stacktrace, service_context, meta) 224 | end 225 | 226 | def format_crash_reason(binary, {:throw, reason}, service_context, meta) do 227 | stacktrace = Exception.format_banner(:throw, reason, []) 228 | format_reported_error_event(binary, stacktrace, service_context, meta) 229 | end 230 | 231 | def format_crash_reason(_binary, {%{} = exception, stacktrace}, service_context, meta) do 232 | message = Exception.message(exception) 233 | 234 | ruby_stacktrace = 235 | [ 236 | Exception.format_banner(:error, exception, stacktrace), 237 | format_stacktrace(stacktrace) 238 | ] 239 | |> Enum.join("\n") 240 | 241 | format_reported_error_event(message, ruby_stacktrace, service_context, meta) 242 | end 243 | 244 | def format_crash_reason(message, {{{%{} = exception, _}, _}, stacktrace}, service_context, meta) do 245 | ruby_stacktrace = 246 | [ 247 | Exception.format_banner(:error, exception, stacktrace), 248 | format_stacktrace(stacktrace) 249 | ] 250 | |> Enum.join("\n") 251 | 252 | format_reported_error_event(message, ruby_stacktrace, service_context, meta) 253 | end 254 | 255 | def format_crash_reason(binary, {error, reason}, service_context, meta) when is_atom(error) or is_binary(error) do 256 | stacktrace = "** (#{error}) #{inspect(reason)}" 257 | format_reported_error_event(binary, stacktrace, service_context, meta) 258 | end 259 | 260 | def format_crash_reason(binary, {error, reason}, service_context, meta) do 261 | format_crash_reason(binary, {inspect(error), reason}, service_context, meta) 262 | end 263 | 264 | def format_crash_reason(binary, _other, service_context, meta) do 265 | format_reported_error_event(binary, nil, service_context, meta) 266 | end 267 | 268 | defp format_reported_error_event(message, stacktrace, service_context, meta) do 269 | %{ 270 | "@type": "type.googleapis.com/google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent", 271 | stack_trace: stacktrace, 272 | message: IO.chardata_to_string(message), 273 | context: format_reported_error_event_context(meta), 274 | serviceContext: service_context 275 | } 276 | end 277 | 278 | # https://cloud.google.com/error-reporting/docs/formatting-error-messages#reported-error-example 279 | defp format_reported_error_event_context(meta) do 280 | %{} 281 | |> maybe_put(:reportLocation, format_crash_report_location(meta)) 282 | |> maybe_put(:httpRequest, format_http_request(meta)) 283 | |> maybe_put(:user, format_affected_user(meta)) 284 | end 285 | 286 | defp format_crash_report_location(%{file: file, line: line, mfa: {m, f, a}}) do 287 | %{ 288 | filePath: IO.chardata_to_string(file), 289 | lineNumber: line, 290 | functionName: format_function(m, f, a) 291 | } 292 | end 293 | 294 | defp format_crash_report_location(_meta), do: nil 295 | 296 | if Code.ensure_loaded?(Plug.Conn) do 297 | defp format_http_request(%{conn: %Plug.Conn{} = conn} = assigns) do 298 | request_method = conn.method |> to_string() |> String.upcase() 299 | request_url = Plug.Conn.request_url(conn) 300 | status = conn.status 301 | user_agent = Formatter.Plug.get_header(conn, "user-agent") 302 | remote_ip = Formatter.Plug.remote_ip(conn) 303 | referer = Formatter.Plug.get_header(conn, "referer") 304 | latency = http_request_latency(assigns) 305 | 306 | %{ 307 | protocol: Plug.Conn.get_http_protocol(conn), 308 | requestMethod: request_method, 309 | requestUrl: request_url, 310 | status: status, 311 | userAgent: user_agent, 312 | remoteIp: remote_ip, 313 | referer: referer, 314 | latency: latency 315 | } 316 | end 317 | end 318 | 319 | defp format_http_request(_meta), do: nil 320 | 321 | if Code.ensure_loaded?(Plug.Conn) do 322 | defp http_request_latency(%{duration_us: duration_us}) do 323 | duration_s = Float.round(duration_us / 1_000_000, 9) 324 | "#{duration_s}s" 325 | end 326 | 327 | defp http_request_latency(_assigns) do 328 | nil 329 | end 330 | end 331 | 332 | defp format_affected_user(%{user_id: user_id}), do: "user:" <> user_id 333 | defp format_affected_user(%{identity_id: identity_id}), do: "identity:" <> identity_id 334 | defp format_affected_user(%{actor_id: actor_id}), do: "actor:" <> actor_id 335 | defp format_affected_user(%{account_id: account_id}), do: "account:" <> account_id 336 | defp format_affected_user(_meta), do: nil 337 | 338 | defp format_stacktrace(stacktrace) do 339 | lines = 340 | Exception.format_stacktrace(stacktrace) 341 | |> String.trim_trailing() 342 | |> String.split("\n") 343 | |> Enum.map(&format_line/1) 344 | |> Enum.group_by(fn {kind, _line} -> kind end) 345 | 346 | lines = format_lines(:trace, lines[:trace]) ++ format_lines(:context, lines[:context]) ++ [""] 347 | 348 | Enum.join(lines, "\n") 349 | end 350 | 351 | defp format_line(line) do 352 | case Regex.run(~r/(.+)\:(\d+)\: (.*)/, line) do 353 | [_, file, line, function] -> 354 | {:trace, "#{file}:#{line}:in `#{function}'"} 355 | 356 | # There is no way how Exception.format_stacktrace/1 can return something 357 | # that does not match the clause above, but we keep this clause "just in case" 358 | # coveralls-ignore-next-line 359 | _ -> 360 | {:context, line} 361 | end 362 | end 363 | 364 | defp format_lines(_kind, nil) do 365 | [] 366 | end 367 | 368 | defp format_lines(:trace, lines) do 369 | Enum.map(lines, fn {:trace, line} -> line end) 370 | end 371 | 372 | # There is no way how Exception.format_stacktrace/1 can return context at the moment 373 | # coveralls-ignore-start 374 | defp format_lines(:context, lines) do 375 | ["Context:" | Enum.map(lines, fn {:context, line} -> line end)] 376 | end 377 | 378 | # coveralls-ignore-stop 379 | 380 | # https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#LogEntryOperation 381 | defp format_operation(%{request_id: request_id, pid: pid}), do: %{id: request_id, producer: inspect(pid)} 382 | defp format_operation(%{pid: pid}), do: %{producer: inspect(pid)} 383 | 384 | # Erlang logger always has `pid` in the metadata but we keep this clause "just in case" 385 | # coveralls-ignore-next-line 386 | defp format_operation(_meta), do: nil 387 | 388 | # https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#LogEntrySourceLocation 389 | defp format_source_location(%{file: file, line: line, mfa: {m, f, a}}) do 390 | %{ 391 | file: IO.chardata_to_string(file), 392 | line: line, 393 | function: format_function(m, f, a) 394 | } 395 | end 396 | 397 | defp format_source_location(_meta), 398 | do: nil 399 | 400 | defp format_span(%{otel_span_id: otel_span_id}, _project_id_or_nil), 401 | do: safe_chardata_to_string(otel_span_id) 402 | 403 | defp format_span(%{span_id: span_id}, _project_id_or_nil), 404 | do: span_id 405 | 406 | defp format_span(_meta, _project_id_or_nil), 407 | do: nil 408 | 409 | defp format_trace(%{otel_trace_id: otel_trace_id}, nil), 410 | do: safe_chardata_to_string(otel_trace_id) 411 | 412 | defp format_trace(%{otel_trace_id: otel_trace_id}, project_id), 413 | do: "projects/#{project_id}/traces/#{safe_chardata_to_string(otel_trace_id)}" 414 | 415 | defp format_trace(%{trace_id: trace_id}, _project_id_or_nil), 416 | do: trace_id 417 | 418 | defp format_trace(_meta, _project_id_or_nil), 419 | do: nil 420 | 421 | defp safe_chardata_to_string(chardata) when is_list(chardata) or is_binary(chardata) do 422 | IO.chardata_to_string(chardata) 423 | end 424 | 425 | defp safe_chardata_to_string(other), do: other 426 | end 427 | -------------------------------------------------------------------------------- /lib/logger_json/plug.ex: -------------------------------------------------------------------------------- 1 | if Code.ensure_loaded?(Plug) and Code.ensure_loaded?(:telemetry) do 2 | defmodule LoggerJSON.Plug do 3 | @moduledoc """ 4 | A telemetry handler that logs request information in JSON format. 5 | 6 | Please keep in mind that logging all requests may have a performance impact on your application, 7 | it's not recommended to use this module in high-throughput production environments. 8 | """ 9 | require Logger 10 | 11 | @doc """ 12 | Attaches the telemetry handler to the given event. 13 | 14 | ### Available options 15 | 16 | * `:level` - log level which is used to log requests. Defaults to `:info`. 17 | 18 | ### Dynamic log level 19 | 20 | In some cases you may wish to set the log level dynamically 21 | on a per-query basis. To do so, set the `:level` option to 22 | a tuple, `{Mod, Fun, Args}`. The query and map of time measures 23 | will be prepended to the provided list of arguments. 24 | 25 | When invoked, your function must return a 26 | [`Logger.level()`](`t:Logger.level()/0`) or `false` to 27 | disable logging for the request. 28 | 29 | ### Examples 30 | 31 | Attaching the telemetry handler to the `MyApp.Repo` events with the `:info` log level: 32 | 33 | # in the endpoint 34 | plug Plug.Telemetry, event_prefix: [:myapp, :plug] 35 | 36 | # in your application.ex 37 | LoggerJSON.Plug.attach("logger-json-requests", [:myapp, :plug, :stop], :info) 38 | 39 | To make plug broadcast those events see [`Plug.Telemetry`](https://hexdocs.pm/plug/Plug.Telemetry.html) documentation. 40 | 41 | You can also attach to the `[:phoenix, :endpoint, :stop]` event to log request latency from Phoenix endpoints: 42 | 43 | LoggerJSON.Plug.attach("logger-json-phoenix-requests", [:phoenix, :endpoint, :stop], :info) 44 | """ 45 | def attach(name, event, level) do 46 | :telemetry.attach(name, event, &__MODULE__.telemetry_logging_handler/4, level) 47 | end 48 | 49 | @doc """ 50 | A telemetry handler that logs requests in a structured format. 51 | """ 52 | @spec telemetry_logging_handler( 53 | event_name :: [atom()], 54 | query_time :: %{duration: non_neg_integer()}, 55 | metadata :: %{conn: Plug.Conn.t()}, 56 | level :: Logger.level() | {module :: module(), function :: atom(), arguments :: [term()]} | false 57 | ) :: :ok 58 | def telemetry_logging_handler(_event_name, %{duration: duration}, %{conn: conn}, level) do 59 | duration = System.convert_time_unit(duration, :native, :microsecond) 60 | 61 | if level = level(level, conn) do 62 | Logger.log( 63 | level, 64 | fn -> 65 | %{ 66 | method: method, 67 | request_path: request_path, 68 | state: state, 69 | status: status 70 | } = conn 71 | 72 | [ 73 | method, 74 | ?\s, 75 | request_path, 76 | ?\s, 77 | "[", 78 | connection_type(state), 79 | ?\s, 80 | status(status), 81 | "in ", 82 | duration(duration), 83 | "]" 84 | ] 85 | end, 86 | conn: conn, 87 | duration_us: duration 88 | ) 89 | else 90 | :ok 91 | end 92 | end 93 | 94 | defp connection_type(:set_chunked), do: "Chunked" 95 | defp connection_type(_), do: "Sent" 96 | 97 | defp status(nil), do: "" 98 | defp status(status), do: [status |> Plug.Conn.Status.code() |> Integer.to_string(), ?\s] 99 | 100 | defp duration(duration) do 101 | if duration > 1000 do 102 | [duration |> div(1000) |> Integer.to_string(), "ms"] 103 | else 104 | [Integer.to_string(duration), "us"] 105 | end 106 | end 107 | 108 | defp level({m, f, a}, conn), do: apply(m, f, [conn | a]) 109 | defp level(level, _conn) when is_atom(level), do: level 110 | end 111 | end 112 | -------------------------------------------------------------------------------- /lib/logger_json/redactor.ex: -------------------------------------------------------------------------------- 1 | defmodule LoggerJSON.Redactor do 2 | @moduledoc """ 3 | This module provides a behaviour which allows to redact sensitive information from logs. 4 | 5 | *Note*: redactor will not be applied on `Jason.Fragment` structs if the encoder is `Jason`. 6 | For more information about encoding and redacting see `LoggerJSON.Formatter.RedactorEncoder.encode/2`. 7 | """ 8 | 9 | @doc """ 10 | Initializes a new redactor configuration. 11 | 12 | ## Compile‑time vs. Runtime Configuration 13 | 14 | This function can’t be used in `config.exs` because that file is evaluated 15 | before your application modules are compiled and loaded, so `new/1` isn’t defined yet. 16 | You can only call it in `config/runtime.exs` or from your application code. 17 | 18 | If you must set up the redactor in `config.exs`, use the tuple format: 19 | the first element is the module implementing `LoggerJSON.Redactor`, 20 | and the second is the options passed to `new/1`. For example: 21 | 22 | config :logger, :default_handler, 23 | formatter: {LoggerJSON.Formatters.Basic, redactors: [ 24 | {MyRedactor, [option1: :value1]} 25 | ]} 26 | 27 | Note that tuple‑based configs are resolved for each log entry, 28 | which can increase logging overhead. 29 | """ 30 | @callback new(opts :: term()) :: {module(), term()} 31 | 32 | @doc """ 33 | Takes a key and a value and returns a redacted value. 34 | 35 | This callback will be applied on key-value pairs, like elements of structs, maps or keyword lists. 36 | """ 37 | @callback redact(key :: String.t(), value :: term(), opts :: term()) :: term() 38 | 39 | # TODO: Make it required in a future version 40 | @optional_callbacks new: 1 41 | end 42 | -------------------------------------------------------------------------------- /lib/logger_json/redactors/redact_keys.ex: -------------------------------------------------------------------------------- 1 | defmodule LoggerJSON.Redactors.RedactKeys do 2 | @moduledoc """ 3 | A simple redactor which replace the value of the keys with `"[REDACTED]"`. 4 | 5 | It takes list of keys to redact as an argument, eg.: 6 | ```elixir 7 | config :logger, :default_handler, 8 | formatter: 9 | LoggerJSON.Formatters.Basic.new( 10 | redactors: [ 11 | LoggerJSON.Redactors.RedactKeys.new(["password"]) 12 | ] 13 | ) 14 | ``` 15 | 16 | Keep in mind that the key will be converted to binary before sending it to the redactor. 17 | """ 18 | 19 | @behaviour LoggerJSON.Redactor 20 | 21 | @impl LoggerJSON.Redactor 22 | def new(keys) do 23 | {__MODULE__, keys} 24 | end 25 | 26 | @impl LoggerJSON.Redactor 27 | def redact(key, value, keys) do 28 | if key in keys do 29 | "[REDACTED]" 30 | else 31 | value 32 | end 33 | end 34 | end 35 | -------------------------------------------------------------------------------- /mix.exs: -------------------------------------------------------------------------------- 1 | defmodule LoggerJSON.Mixfile do 2 | use Mix.Project 3 | 4 | @source_url "https://github.com/Nebo15/logger_json" 5 | @version "7.0.3" 6 | 7 | def project do 8 | [ 9 | app: :logger_json, 10 | version: @version, 11 | elixir: "~> 1.16 or ~> 1.15.1", 12 | elixirc_paths: elixirc_paths(Mix.env()), 13 | compilers: [] ++ Mix.compilers(), 14 | build_embedded: Mix.env() == :prod, 15 | start_permanent: Mix.env() == :prod, 16 | package: package(), 17 | deps: deps(), 18 | docs: docs(), 19 | test_coverage: [tool: ExCoveralls], 20 | preferred_cli_env: [coveralls: :test, "coveralls.travis": :test, "coveralls.html": :test], 21 | dialyzer: [ 22 | plt_add_apps: [:plug] 23 | ] 24 | ] 25 | end 26 | 27 | def application do 28 | [ 29 | extra_applications: [:logger] 30 | ] 31 | end 32 | 33 | defp elixirc_paths(:test), do: ["lib", "test/support"] 34 | defp elixirc_paths(_), do: ["lib"] 35 | 36 | defp deps do 37 | [ 38 | {:jason, "~> 1.4", optional: true}, 39 | {:plug, "~> 1.15", optional: true}, 40 | {:decimal, ">= 0.0.0", optional: true}, 41 | {:ecto, "~> 3.11", optional: true}, 42 | {:telemetry, "~> 1.0", optional: true}, 43 | {:benchee, "~> 1.3", only: [:dev, :test]}, 44 | {:stream_data, "~> 1.0", only: [:dev, :test]}, 45 | {:castore, "~> 1.0", only: [:dev, :test]}, 46 | {:excoveralls, ">= 0.15.0", only: [:dev, :test]}, 47 | {:junit_formatter, "~> 3.3", only: [:test]}, 48 | {:ex_doc, ">= 0.15.0", only: [:dev, :test], runtime: false}, 49 | {:dialyxir, "~> 1.4.0", only: [:dev], runtime: false} 50 | ] 51 | end 52 | 53 | defp package do 54 | [ 55 | description: """ 56 | This package includes a set of :logger formatters designed to output logs in JSON format. 57 | It is compatible with a variety of log management systems that support JSON, 58 | including Google Cloud Logging and Error Reporting, Datadog, ElasticSearch, LogStash, FileBeat, and Kibana. 59 | """, 60 | contributors: ["Andrew Dryga"], 61 | maintainers: ["Andrew Dryga"], 62 | licenses: ["MIT"], 63 | files: ~w(lib LICENSE.md mix.exs README.md), 64 | links: %{ 65 | Changelog: "https://github.com/Nebo15/logger_json/releases", 66 | GitHub: @source_url 67 | } 68 | ] 69 | end 70 | 71 | defp docs do 72 | [ 73 | extras: [ 74 | "LICENSE.md": [title: "License"], 75 | "README.md": [title: "Overview"] 76 | ], 77 | main: "readme", 78 | source_url: @source_url, 79 | source_ref: @version, 80 | formatters: ["html"] 81 | ] 82 | end 83 | end 84 | -------------------------------------------------------------------------------- /mix.lock: -------------------------------------------------------------------------------- 1 | %{ 2 | "benchee": {:hex, :benchee, "1.3.1", "c786e6a76321121a44229dde3988fc772bca73ea75170a73fd5f4ddf1af95ccf", [:mix], [{:deep_merge, "~> 1.0", [hex: :deep_merge, repo: "hexpm", optional: false]}, {:statistex, "~> 1.0", [hex: :statistex, repo: "hexpm", optional: false]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "76224c58ea1d0391c8309a8ecbfe27d71062878f59bd41a390266bf4ac1cc56d"}, 3 | "castore": {:hex, :castore, "1.0.12", "053f0e32700cbec356280c0e835df425a3be4bc1e0627b714330ad9d0f05497f", [:mix], [], "hexpm", "3dca286b2186055ba0c9449b4e95b97bf1b57b47c1f2644555879e659960c224"}, 4 | "decimal": {:hex, :decimal, "2.3.0", "3ad6255aa77b4a3c4f818171b12d237500e63525c2fd056699967a3e7ea20f62", [:mix], [], "hexpm", "a4d66355cb29cb47c3cf30e71329e58361cfcb37c34235ef3bf1d7bf3773aeac"}, 5 | "deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm", "ce708e5f094b9cd4e8f2be4f00d2f4250c4095be93f8cd6d018c753894885430"}, 6 | "dialyxir": {:hex, :dialyxir, "1.4.5", "ca1571ac18e0f88d4ab245f0b60fa31ff1b12cbae2b11bd25d207f865e8ae78a", [:mix], [{:erlex, ">= 0.2.7", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "b0fb08bb8107c750db5c0b324fa2df5ceaa0f9307690ee3c1f6ba5b9eb5d35c3"}, 7 | "earmark_parser": {:hex, :earmark_parser, "1.4.44", "f20830dd6b5c77afe2b063777ddbbff09f9759396500cdbe7523efd58d7a339c", [:mix], [], "hexpm", "4778ac752b4701a5599215f7030989c989ffdc4f6df457c5f36938cc2d2a2750"}, 8 | "ecto": {:hex, :ecto, "3.12.5", "4a312960ce612e17337e7cefcf9be45b95a3be6b36b6f94dfb3d8c361d631866", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "6eb18e80bef8bb57e17f5a7f068a1719fbda384d40fc37acb8eb8aeca493b6ea"}, 9 | "erlex": {:hex, :erlex, "0.2.7", "810e8725f96ab74d17aac676e748627a07bc87eb950d2b83acd29dc047a30595", [:mix], [], "hexpm", "3ed95f79d1a844c3f6bf0cea61e0d5612a42ce56da9c03f01df538685365efb0"}, 10 | "ex_doc": {:hex, :ex_doc, "0.37.3", "f7816881a443cd77872b7d6118e8a55f547f49903aef8747dbcb345a75b462f9", [:mix], [{:earmark_parser, "~> 1.4.42", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.0", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14 or ~> 1.0", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1 or ~> 1.0", [hex: :makeup_erlang, repo: "hexpm", optional: false]}, {:makeup_html, ">= 0.1.0", [hex: :makeup_html, repo: "hexpm", optional: true]}], "hexpm", "e6aebca7156e7c29b5da4daa17f6361205b2ae5f26e5c7d8ca0d3f7e18972233"}, 11 | "excoveralls": {:hex, :excoveralls, "0.18.5", "e229d0a65982613332ec30f07940038fe451a2e5b29bce2a5022165f0c9b157e", [:mix], [{:castore, "~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "523fe8a15603f86d64852aab2abe8ddbd78e68579c8525ae765facc5eae01562"}, 12 | "jason": {:hex, :jason, "1.4.4", "b9226785a9aa77b6857ca22832cffa5d5011a667207eb2a0ad56adb5db443b8a", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "c5eb0cab91f094599f94d55bc63409236a8ec69a21a67814529e8d5f6cc90b3b"}, 13 | "junit_formatter": {:hex, :junit_formatter, "3.4.0", "d0e8db6c34dab6d3c4154c3b46b21540db1109ae709d6cf99ba7e7a2ce4b1ac2", [:mix], [], "hexpm", "bb36e2ae83f1ced6ab931c4ce51dd3dbef1ef61bb4932412e173b0cfa259dacd"}, 14 | "makeup": {:hex, :makeup, "1.2.1", "e90ac1c65589ef354378def3ba19d401e739ee7ee06fb47f94c687016e3713d1", [:mix], [{:nimble_parsec, "~> 1.4", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "d36484867b0bae0fea568d10131197a4c2e47056a6fbe84922bf6ba71c8d17ce"}, 15 | "makeup_elixir": {:hex, :makeup_elixir, "1.0.1", "e928a4f984e795e41e3abd27bfc09f51db16ab8ba1aebdba2b3a575437efafc2", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "7284900d412a3e5cfd97fdaed4f5ed389b8f2b4cb49efc0eb3bd10e2febf9507"}, 16 | "makeup_erlang": {:hex, :makeup_erlang, "1.0.2", "03e1804074b3aa64d5fad7aa64601ed0fb395337b982d9bcf04029d68d51b6a7", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "af33ff7ef368d5893e4a267933e7744e46ce3cf1f61e2dccf53a111ed3aa3727"}, 17 | "mime": {:hex, :mime, "2.0.6", "8f18486773d9b15f95f4f4f1e39b710045fa1de891fada4516559967276e4dc2", [:mix], [], "hexpm", "c9945363a6b26d747389aac3643f8e0e09d30499a138ad64fe8fd1d13d9b153e"}, 18 | "nimble_parsec": {:hex, :nimble_parsec, "1.4.2", "8efba0122db06df95bfaa78f791344a89352ba04baedd3849593bfce4d0dc1c6", [:mix], [], "hexpm", "4b21398942dda052b403bbe1da991ccd03a053668d147d53fb8c4e0efe09c973"}, 19 | "plug": {:hex, :plug, "1.17.0", "a0832e7af4ae0f4819e0c08dd2e7482364937aea6a8a997a679f2cbb7e026b2e", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "f6692046652a69a00a5a21d0b7e11fcf401064839d59d6b8787f23af55b1e6bc"}, 20 | "plug_crypto": {:hex, :plug_crypto, "2.1.1", "19bda8184399cb24afa10be734f84a16ea0a2bc65054e23a62bb10f06bc89491", [:mix], [], "hexpm", "6470bce6ffe41c8bd497612ffde1a7e4af67f36a15eea5f921af71cf3e11247c"}, 21 | "statistex": {:hex, :statistex, "1.0.0", "f3dc93f3c0c6c92e5f291704cf62b99b553253d7969e9a5fa713e5481cd858a5", [:mix], [], "hexpm", "ff9d8bee7035028ab4742ff52fc80a2aa35cece833cf5319009b52f1b5a86c27"}, 22 | "stream_data": {:hex, :stream_data, "1.2.0", "58dd3f9e88afe27dc38bef26fce0c84a9e7a96772b2925c7b32cd2435697a52b", [:mix], [], "hexpm", "eb5c546ee3466920314643edf68943a5b14b32d1da9fe01698dc92b73f89a9ed"}, 23 | "telemetry": {:hex, :telemetry, "1.3.0", "fedebbae410d715cf8e7062c96a1ef32ec22e764197f70cda73d82778d61e7a2", [:rebar3], [], "hexpm", "7015fc8919dbe63764f4b4b87a95b7c0996bd539e0d499be6ec9d7f3875b79e6"}, 24 | } 25 | -------------------------------------------------------------------------------- /test/logger_json/ecto_test.exs: -------------------------------------------------------------------------------- 1 | defmodule LoggerJSON.EctoTest do 2 | use LoggerJSON.Case, async: false 3 | import LoggerJSON.Ecto 4 | require Logger 5 | 6 | setup do 7 | formatter = LoggerJSON.Formatters.Basic.new(metadata: :all) 8 | :logger.update_handler_config(:default, :formatter, formatter) 9 | end 10 | 11 | describe "attach/3" do 12 | test "attaches a telemetry handler" do 13 | assert attach( 14 | "logger-json-queries", 15 | [:my_app, :repo, :query], 16 | :info 17 | ) == :ok 18 | 19 | assert [ 20 | %{ 21 | function: _function, 22 | id: "logger-json-queries", 23 | config: :info, 24 | event_name: [:my_app, :repo, :query] 25 | } 26 | ] = :telemetry.list_handlers([:my_app, :repo, :query]) 27 | end 28 | end 29 | 30 | describe "telemetry_logging_handler/4" do 31 | test "logs ecto queries received via telemetry event" do 32 | log = 33 | capture_log(fn -> 34 | telemetry_logging_handler( 35 | [:repo, :query], 36 | %{query_time: 2_930_000, queue_time: 106_000, total_time: 3_036_000}, 37 | %{ 38 | params: [], 39 | query: "begin", 40 | repo: Repo, 41 | result: 42 | {:ok, 43 | %{ 44 | columns: nil, 45 | command: :savepoint, 46 | connection_id: 26925, 47 | messages: [], 48 | num_rows: nil, 49 | rows: nil 50 | }}, 51 | source: nil, 52 | type: :ecto_sql_query 53 | }, 54 | :info 55 | ) 56 | 57 | Logger.flush() 58 | end) 59 | 60 | assert %{ 61 | "message" => "begin", 62 | "metadata" => %{ 63 | "query" => %{ 64 | "decode_time_us" => 0, 65 | "execution_time_us" => 2930, 66 | "latency_us" => 3036, 67 | "queue_time_us" => 106, 68 | "repo" => "Repo" 69 | } 70 | } 71 | } = decode_or_print_error(log) 72 | end 73 | 74 | test "allows disabling logging at runtime" do 75 | log = 76 | capture_log(fn -> 77 | telemetry_logging_handler( 78 | [:repo, :query], 79 | %{query_time: 2_930_000, queue_time: 106_000, total_time: 3_036_000}, 80 | %{ 81 | params: [], 82 | query: "begin", 83 | repo: Repo, 84 | result: 85 | {:ok, 86 | %{ 87 | columns: nil, 88 | command: :savepoint, 89 | connection_id: 26925, 90 | messages: [], 91 | num_rows: nil, 92 | rows: nil 93 | }}, 94 | source: nil, 95 | type: :ecto_sql_query 96 | }, 97 | {__MODULE__, :ignore_log, [:arg]} 98 | ) 99 | 100 | Logger.flush() 101 | end) 102 | 103 | assert log == "" 104 | end 105 | end 106 | 107 | def ignore_log(_query, _time, :arg), do: false 108 | end 109 | -------------------------------------------------------------------------------- /test/logger_json/formatter/code_test.exs: -------------------------------------------------------------------------------- 1 | defmodule LoggerJSON.Formatter.CodeTest do 2 | use ExUnit.Case, async: true 3 | import LoggerJSON.Formatter.Code 4 | 5 | describe "format_function/2" do 6 | test "returns the function name" do 7 | assert format_function(nil, "function") == "function" 8 | end 9 | 10 | test "returns the module and function name" do 11 | assert format_function("module", "function") == "module.function" 12 | end 13 | end 14 | 15 | describe "format_function/3" do 16 | test "returns the module, function name, and arity" do 17 | assert format_function("module", "function", 1) == "module.function/1" 18 | end 19 | end 20 | end 21 | -------------------------------------------------------------------------------- /test/logger_json/formatter/message_test.exs: -------------------------------------------------------------------------------- 1 | defmodule LoggerJSON.Formatter.MessageTest do 2 | use ExUnit.Case, async: true 3 | import LoggerJSON.Formatter.Message 4 | 5 | describe "format_message/3" do 6 | setup do 7 | # Define mock formatters 8 | binary_fmt = fn data -> "Binary: #{data}" end 9 | structured_fmt = fn data -> "Structured: #{data |> Enum.sort() |> inspect()}" end 10 | crash_fmt = fn message, reason -> "Crash: #{message} - #{reason}" end 11 | 12 | {:ok, formatters: %{binary: binary_fmt, structured: structured_fmt, crash: crash_fmt}} 13 | end 14 | 15 | test "formats crash messages correctly", %{formatters: formatters} do 16 | message = {:string, "Error occurred"} 17 | meta = %{crash_reason: "something went wrong"} 18 | assert format_message(message, meta, %{crash: formatters.crash}) == "Crash: Error occurred - something went wrong" 19 | end 20 | 21 | test "formats binary messages correctly", %{formatters: formatters} do 22 | message = {:string, "Hello, world!"} 23 | meta = %{} 24 | assert format_message(message, meta, %{binary: formatters.binary}) == "Binary: Hello, world!" 25 | end 26 | 27 | test "formats structured messages without callback correctly", %{formatters: formatters} do 28 | message = {:report, %{id: 1, content: "Report data"}} 29 | meta = %{} 30 | 31 | assert format_message(message, meta, %{structured: formatters.structured}) == 32 | ~s|Structured: [content: "Report data", id: 1]| 33 | end 34 | 35 | test "formats reports with custom callbacks altering the data", %{formatters: formatters} do 36 | callback = fn data -> {:string, "Altered: #{data.content}"} end 37 | message = {:report, %{content: "Original"}} 38 | meta = %{report_cb: callback} 39 | assert format_message(message, meta, formatters) == "Binary: Altered: Original" 40 | end 41 | 42 | test "formats reports with callbacks for binary formatting", %{formatters: formatters} do 43 | callback = fn data, _opts -> "Processed: #{data.content}" end 44 | message = {:report, %{content: "Needs processing"}} 45 | meta = %{report_cb: callback} 46 | 47 | assert format_message(message, meta, %{binary: formatters.binary, structured: formatters.structured}) == 48 | "Binary: Processed: Needs processing" 49 | end 50 | 51 | test "formats report with default behavior", %{formatters: formatters} do 52 | message = {:report, %{id: 2, content: "Another report"}} 53 | meta = %{report_cb: &:logger.format_otp_report/1} 54 | assert format_message(message, meta, formatters) == ~s|Structured: [content: "Another report", id: 2]| 55 | end 56 | 57 | test "formats general message using Logger.Utils.scan_inspect", %{formatters: formatters} do 58 | message = {:string, "Message"} 59 | meta = %{} 60 | assert format_message(message, meta, %{binary: formatters.binary}) == "Binary: Message" 61 | end 62 | 63 | test "formats reports with complex callback and binary formatting", %{formatters: formatters} do 64 | message = {~c"~p", [1]} 65 | meta = %{report_cb: nil} 66 | 67 | assert format_message(message, meta, %{binary: formatters.binary, structured: formatters.structured}) == 68 | "Binary: 1" 69 | end 70 | end 71 | end 72 | -------------------------------------------------------------------------------- /test/logger_json/formatter/metadata_test.exs: -------------------------------------------------------------------------------- 1 | defmodule LoggerJSON.Formatter.MetadataTest do 2 | use ExUnit.Case, async: true 3 | import LoggerJSON.Formatter.Metadata 4 | 5 | describe "update_metadata_selector/2" do 6 | # test this 7 | # def update_metadata_selector({:from_application_env, {app, module}, path}, processed_keys) do 8 | # Application.fetch_env!(app, module) 9 | # |> get_in(path) 10 | # |> update_metadata_selector(processed_keys) 11 | # end 12 | 13 | # def update_metadata_selector({:from_application_env, {app, module}}, processed_keys) do 14 | # Application.fetch_env!(app, module) 15 | # |> update_metadata_selector(processed_keys) 16 | # end 17 | 18 | # def update_metadata_selector({:from_application_env, other}, _processed_keys) do 19 | # raise """ 20 | # Invalid value for `:metadata` option: `{:from_application_env, #{inspect(other)}}`. 21 | 22 | # The value must be a tuple with the application and module name, 23 | # and an optional path to the metadata option. 24 | 25 | # Eg.: `{:from_application_env, {:logger, :default_formatter}, [:metadata]}` 26 | # """ 27 | # end 28 | 29 | test "takes metadata from application env" do 30 | Application.put_env(:logger_json, :test_metadata_key, [:foo]) 31 | 32 | assert update_metadata_selector({:from_application_env, {:logger_json, :test_metadata_key}}, []) == 33 | [:foo] 34 | 35 | Application.put_env(:logger_json, :test_metadata_key, %{metadata: [:foo]}) 36 | 37 | assert update_metadata_selector({:from_application_env, {:logger_json, :test_metadata_key}, [:metadata]}, []) == 38 | [:foo] 39 | end 40 | 41 | test "raises if metadata is not a tuple with the application and module name" do 42 | message = ~r/Invalid value for `:metadata` option: `{:from_application_env, :foo}`./ 43 | 44 | assert_raise ArgumentError, message, fn -> 45 | update_metadata_selector({:from_application_env, :foo}, []) 46 | end 47 | end 48 | 49 | test "takes metadata :all rule and updates it to exclude the given keys" do 50 | assert update_metadata_selector(:all, [:ansi_color]) == {:all_except, [:ansi_color]} 51 | end 52 | 53 | test "takes metadata :all_except rule and returns a map with the given keys" do 54 | assert update_metadata_selector({:all_except, [:foo, :bar]}, [:bar, :buz]) == 55 | {:all_except, [:foo, :bar, :bar, :buz]} 56 | end 57 | 58 | test "takes metadata keys and returns a map with the given keys" do 59 | assert update_metadata_selector([:foo, :bar], [:bar, :fiz]) == [:foo] 60 | end 61 | end 62 | 63 | describe "take_metadata/2" do 64 | test "takes metadata keys list and returns a map with the given keys" do 65 | meta = %{ 66 | foo: "foo", 67 | bar: "bar", 68 | fiz: "fiz" 69 | } 70 | 71 | assert take_metadata(meta, [:foo, :bar]) == %{foo: "foo", bar: "bar"} 72 | assert take_metadata(meta, []) == %{} 73 | end 74 | 75 | test "takes metadata :all_except rule and returns a map with all keys except listed ones" do 76 | meta = %{ 77 | foo: "foo", 78 | bar: "bar", 79 | fiz: "fiz" 80 | } 81 | 82 | assert take_metadata(meta, {:all_except, [:foo, :bar]}) == %{fiz: "fiz"} 83 | end 84 | 85 | test "takes metadata :all rule and returns a map with all keys" do 86 | meta = %{ 87 | foo: "foo", 88 | bar: "bar", 89 | fiz: "fiz" 90 | } 91 | 92 | assert take_metadata(meta, :all) == meta 93 | end 94 | 95 | test "does not return reserved keys" do 96 | meta = %{ 97 | ansi_color: "ansi_color", 98 | initial_call: "initial_call", 99 | crash_reason: "crash_reason", 100 | pid: "pid", 101 | gl: "gl", 102 | report_cb: "report_cb", 103 | time: "time" 104 | } 105 | 106 | assert take_metadata(meta, :all) == %{} 107 | assert take_metadata(meta, {:all_except, [:foo]}) == %{} 108 | end 109 | 110 | test "returns reserved keys if they are listed explicitly" do 111 | meta = %{ 112 | mfa: "mfa" 113 | } 114 | 115 | assert take_metadata(meta, [:mfa]) == %{mfa: "mfa"} 116 | end 117 | end 118 | end 119 | -------------------------------------------------------------------------------- /test/logger_json/formatter/redactor_encoder_test.exs: -------------------------------------------------------------------------------- 1 | defmodule LoggerJSON.Formatter.RedactorEncoderTest do 2 | use LoggerJSON.Case, async: true 3 | use ExUnitProperties 4 | import LoggerJSON.Formatter.RedactorEncoder 5 | 6 | defmodule IDStruct, do: defstruct(id: nil) 7 | 8 | defmodule PasswordStruct, do: defstruct(password: "foo") 9 | 10 | @encoder LoggerJSON.Formatter.encoder() 11 | @redactors [LoggerJSON.Redactors.RedactKeys.new(["password"])] 12 | 13 | describe "encode/2" do 14 | test "allows nils" do 15 | assert encode(nil, @redactors) == nil 16 | end 17 | 18 | test "allows booleans" do 19 | assert encode(true, @redactors) == true 20 | assert encode(false, @redactors) == false 21 | end 22 | 23 | test "allows printable strings" do 24 | assert encode("hello", @redactors) == "hello" 25 | end 26 | 27 | test "inspects non-printable binaries" do 28 | assert encode("hello" <> <<0>>, @redactors) == "<<104, 101, 108, 108, 111, 0>>" 29 | end 30 | 31 | test "allows atoms" do 32 | assert encode(:hello, @redactors) == :hello 33 | end 34 | 35 | test "allows numbers" do 36 | assert encode(123, @redactors) == 123 37 | end 38 | 39 | test "allows dates and times" do 40 | assert encode(~U[2024-01-01 00:00:00Z], @redactors) == ~U[2024-01-01 00:00:00Z] 41 | assert encode(~N[2024-01-01 00:00:00], @redactors) == ~N[2024-01-01 00:00:00] 42 | assert encode(~D[2024-01-01], @redactors) == ~D[2024-01-01] 43 | assert encode(~T[00:00:00], @redactors) == ~T[00:00:00] 44 | end 45 | 46 | test "allows decimals" do 47 | assert encode(Decimal.new("1.2"), @redactors) == Decimal.new("1.2") 48 | end 49 | 50 | test "strips Structs" do 51 | assert encode(%IDStruct{id: "hello"}, @redactors) == %{id: "hello"} 52 | end 53 | 54 | test "redacts values in structs" do 55 | assert encode(%PasswordStruct{password: "hello"}, @redactors) == %{password: "[REDACTED]"} 56 | end 57 | 58 | # Jason.Encoder or JSON.Encoder protocols can be used in many other scenarios, 59 | # like DB/API response serliazation, so it's better not to 60 | # assume that it's what the users expects to see in logs. 61 | test "strips structs when encoder is derived for them" do 62 | assert encode(%NameStruct{name: "B"}, @redactors) == %{name: "B"} 63 | end 64 | 65 | test "converts tuples to lists" do 66 | assert encode({1, 2, 3}, @redactors) == [1, 2, 3] 67 | end 68 | 69 | test "converts nested tuples to nested lists" do 70 | assert encode({{2000, 1, 1}, {13, 30, 15}}, @redactors) == [[2000, 1, 1], [13, 30, 15]] 71 | end 72 | 73 | test "converts keyword lists to maps" do 74 | assert encode([a: 1, b: 2], @redactors) == %{a: 1, b: 2} 75 | end 76 | 77 | test "redacts values in keyword lists" do 78 | assert encode([password: "foo"], @redactors) == %{password: "[REDACTED]"} 79 | end 80 | 81 | test "redacts values in improper list tail" do 82 | assert encode([nil | %{password: "foo"}], @redactors) == "[nil | %{password: \"[REDACTED]\"}]" 83 | end 84 | 85 | test "converts non-string map keys" do 86 | assert encode(%{1 => 2}, []) == %{1 => 2} 87 | assert encode(%{:a => 1}, []) == %{:a => 1} 88 | assert encode(%{{"a", "b"} => 1}, []) == %{"{\"a\", \"b\"}" => 1} 89 | assert encode(%{%{a: 1, b: 2} => 3}, []) in [%{"%{a: 1, b: 2}" => 3}, %{"%{b: 2, a: 1}" => 3}] 90 | assert encode(%{[{:a, :b}] => 3}, []) == %{"[a: :b]" => 3} 91 | end 92 | 93 | test "redacts values in maps" do 94 | assert encode(%{password: "foo"}, @redactors) == %{password: "[REDACTED]"} 95 | end 96 | 97 | test "inspects functions" do 98 | assert encode(&encode/2, []) == "&LoggerJSON.Formatter.RedactorEncoder.encode/2" 99 | end 100 | 101 | test "inspects pids" do 102 | assert encode(self(), []) == inspect(self()) 103 | end 104 | 105 | test "inspects improper lists" do 106 | assert encode([1, [2, 3], 4 | 5], @redactors) == "[1, [2, 3], 4 | 5]" 107 | end 108 | 109 | test "inspects improper lists that start as keyword lists" do 110 | assert encode([{:foo, 1}, 2 | 3], @redactors) == "[[:foo, 1], 2 | 3]" 111 | end 112 | 113 | test "inspects improper keyword lists" do 114 | assert encode([{:foo, 1} | {:bar, 2}], @redactors) == "[[:foo, 1] | {:bar, 2}]" 115 | end 116 | 117 | test "doesn't choke on things that look like keyword lists but aren't" do 118 | assert encode([{:a, 1}, {:b, 2, :c}], []) == [[:a, 1], [:b, 2, :c]] 119 | end 120 | 121 | test "formats nested structures" do 122 | input = %{ 123 | foo: [ 124 | foo_a: %{"x" => 1, "y" => %IDStruct{id: 1}}, 125 | foo_b: [foo_b_1: 1, foo_b_2: {"2a", "2b"}] 126 | ], 127 | self: self() 128 | } 129 | 130 | assert encode(input, []) == %{ 131 | foo: %{ 132 | foo_a: %{"x" => 1, "y" => %{id: 1}}, 133 | foo_b: %{foo_b_1: 1, foo_b_2: ["2a", "2b"]} 134 | }, 135 | self: inspect(self()) 136 | } 137 | end 138 | 139 | test "redacts nested structures" do 140 | assert encode(%{password: "foo", other_key: %{password: ["foo"]}}, @redactors) == %{ 141 | password: "[REDACTED]", 142 | other_key: %{password: "[REDACTED]"} 143 | } 144 | 145 | assert encode([password: "foo", other_key: [password: "bar"]], @redactors) == %{ 146 | password: "[REDACTED]", 147 | other_key: %{password: "[REDACTED]"} 148 | } 149 | 150 | assert encode([password: "foo", other_key: %{password: "bar"}], @redactors) == %{ 151 | password: "[REDACTED]", 152 | other_key: %{password: "[REDACTED]"} 153 | } 154 | 155 | assert encode([foo: ["foo", %{password: "bar"}]], @redactors) == %{foo: ["foo", %{password: "[REDACTED]"}]} 156 | end 157 | 158 | property "converts any term so that it can be encoded" do 159 | check all value <- term() do 160 | value 161 | |> encode([]) 162 | |> @encoder.encode!() 163 | end 164 | end 165 | end 166 | end 167 | -------------------------------------------------------------------------------- /test/logger_json/formatter_test.exs: -------------------------------------------------------------------------------- 1 | defmodule LoggerJSON.FormatterTest do 2 | use LoggerJSON.Case, async: true 3 | 4 | require LoggerJSON.Formatter 5 | 6 | @encoder Application.compile_env!(:logger_json, :encoder) 7 | @encoder_protocol Module.concat(@encoder, "Encoder") 8 | @default_encoder_opts if(@encoder == JSON, do: &JSON.protocol_encode/2, else: []) 9 | 10 | describe "default_encoder_opts/0" do 11 | test "returns value based on :encoder env" do 12 | assert LoggerJSON.Formatter.default_encoder_opts() == @default_encoder_opts 13 | end 14 | end 15 | 16 | describe "encoder/0" do 17 | test "returns value based on :encoder env" do 18 | assert LoggerJSON.Formatter.encoder() == @encoder 19 | end 20 | end 21 | 22 | describe "encoder_protocol/0" do 23 | test "returns value based on :encoder env" do 24 | assert LoggerJSON.Formatter.encoder_protocol() == @encoder_protocol 25 | end 26 | end 27 | end 28 | -------------------------------------------------------------------------------- /test/logger_json/formatters/basic_test.exs: -------------------------------------------------------------------------------- 1 | defmodule LoggerJSON.Formatters.BasicTest do 2 | use LoggerJSON.Case 3 | use ExUnitProperties 4 | alias LoggerJSON.Formatters.Basic 5 | require Logger 6 | 7 | @encoder LoggerJSON.Formatter.encoder() 8 | 9 | setup do 10 | formatter = Basic.new(metadata: :all) 11 | :logger.update_handler_config(:default, :formatter, formatter) 12 | end 13 | 14 | property "allows to log any binary messages" do 15 | check all message <- StreamData.binary() do 16 | assert capture_log(fn -> 17 | Logger.debug(message) 18 | end) 19 | |> decode_or_print_error() 20 | |> Map.has_key?("message") 21 | end 22 | end 23 | 24 | property "allows to log any structured messages" do 25 | check all message <- StreamData.map_of(StreamData.atom(:alphanumeric), StreamData.term()) do 26 | assert capture_log(fn -> 27 | Logger.debug(message) 28 | end) 29 | |> decode_or_print_error() 30 | |> Map.has_key?("message") 31 | end 32 | 33 | check all message <- StreamData.keyword_of(StreamData.term()) do 34 | assert capture_log(fn -> 35 | Logger.debug(message) 36 | end) 37 | |> decode_or_print_error() 38 | |> Map.has_key?("message") 39 | end 40 | end 41 | 42 | test "logs message of a given level" do 43 | for level <- [:error, :info, :debug, :emergency, :alert, :critical, :warning, :notice] do 44 | log = 45 | capture_log(level, fn -> 46 | Logger.log(level, "Hello") 47 | end) 48 | |> decode_or_print_error() 49 | 50 | level_string = to_string(level) 51 | 52 | assert %{ 53 | "message" => "Hello", 54 | "metadata" => %{"domain" => ["elixir"]}, 55 | "severity" => ^level_string, 56 | "time" => _ 57 | } = log 58 | end 59 | end 60 | 61 | test "logs message with a map payload" do 62 | log = 63 | capture_log(fn -> 64 | Logger.debug(%{foo: :bar, fiz: [1, 2, 3, "buz"]}) 65 | end) 66 | |> decode_or_print_error() 67 | 68 | assert log["message"] == %{ 69 | "fiz" => [1, 2, 3, "buz"], 70 | "foo" => "bar" 71 | } 72 | end 73 | 74 | test "logs message with a keyword payload" do 75 | log = 76 | capture_log(fn -> 77 | Logger.debug(a: {0, false}) 78 | end) 79 | |> decode_or_print_error() 80 | 81 | assert log["message"] == %{ 82 | "a" => [0, false] 83 | } 84 | end 85 | 86 | test "logs OpenTelemetry span and trace ids" do 87 | Logger.metadata( 88 | otel_span_id: ~c"bff20904aa5883a6", 89 | otel_trace_flags: ~c"01", 90 | otel_trace_id: ~c"294740ce41cc9f202dedb563db123532" 91 | ) 92 | 93 | log = 94 | capture_log(fn -> 95 | Logger.debug("Hello") 96 | end) 97 | |> decode_or_print_error() 98 | 99 | assert log["span"] == "bff20904aa5883a6" 100 | assert log["trace"] == "294740ce41cc9f202dedb563db123532" 101 | end 102 | 103 | test "logs span and trace ids" do 104 | Logger.metadata( 105 | span_id: "bff20904aa5883a6", 106 | trace_id: "294740ce41cc9f202dedb563db123532" 107 | ) 108 | 109 | log = 110 | capture_log(fn -> 111 | Logger.debug("Hello") 112 | end) 113 | |> decode_or_print_error() 114 | 115 | assert log["span"] == "bff20904aa5883a6" 116 | assert log["trace"] == "294740ce41cc9f202dedb563db123532" 117 | end 118 | 119 | test "logs file, line and mfa as metadata" do 120 | metadata = 121 | capture_log(fn -> 122 | Logger.debug("Hello") 123 | end) 124 | |> decode_or_print_error() 125 | |> Map.get("metadata") 126 | 127 | assert metadata |> Map.get("file") =~ "logger_json/formatters/basic_test.exs" 128 | assert metadata |> Map.get("line") |> is_integer() 129 | 130 | assert metadata["mfa"] === "Elixir.LoggerJSON.Formatters.BasicTest.test logs file, line and mfa as metadata/1" 131 | end 132 | 133 | test "logs metadata" do 134 | Logger.metadata( 135 | date: Date.utc_today(), 136 | time: Time.new(10, 10, 11), 137 | pid: self(), 138 | ref: make_ref(), 139 | atom: :atom, 140 | list: [1, 2, 3], 141 | map: %{foo: :bar}, 142 | struct: URI.parse("https://example.com"), 143 | binary: "binary", 144 | node: node() 145 | ) 146 | 147 | log = 148 | capture_log(fn -> 149 | Logger.debug("Hello", float: 3.14) 150 | end) 151 | |> decode_or_print_error() 152 | 153 | assert %{ 154 | "metadata" => %{ 155 | "atom" => "atom", 156 | "binary" => "binary", 157 | "date" => _, 158 | "domain" => ["elixir"], 159 | "list" => [1, 2, 3], 160 | "map" => %{"foo" => "bar"}, 161 | "node" => "nonode@nohost", 162 | "ref" => _ref, 163 | "float" => 3.14, 164 | "struct" => %{ 165 | "authority" => "example.com", 166 | "fragment" => nil, 167 | "host" => "example.com", 168 | "path" => nil, 169 | "port" => 443, 170 | "query" => nil, 171 | "scheme" => "https", 172 | "userinfo" => nil 173 | } 174 | } 175 | } = log 176 | 177 | formatter = Basic.new(metadata: {:all_except, [:struct]}) 178 | :logger.update_handler_config(:default, :formatter, formatter) 179 | 180 | log = 181 | capture_log(fn -> 182 | Logger.debug("Hello", float: 3.14) 183 | end) 184 | |> decode_or_print_error() 185 | 186 | assert %{ 187 | "metadata" => %{ 188 | "atom" => "atom", 189 | "binary" => "binary", 190 | "date" => _, 191 | "domain" => ["elixir"], 192 | "list" => [1, 2, 3], 193 | "map" => %{"foo" => "bar"}, 194 | "node" => "nonode@nohost", 195 | "ref" => _ref, 196 | "float" => 3.14 197 | } 198 | } = log 199 | 200 | formatter = Basic.new(metadata: [:node]) 201 | :logger.update_handler_config(:default, :formatter, formatter) 202 | 203 | log = 204 | capture_log(fn -> 205 | Logger.debug("Hello", float: 3.14) 206 | end) 207 | |> decode_or_print_error() 208 | 209 | assert log["metadata"] == %{"node" => "nonode@nohost"} 210 | end 211 | 212 | test "logs exceptions" do 213 | log = 214 | capture_log(fn -> 215 | pid = 216 | spawn(fn -> 217 | raise RuntimeError 218 | end) 219 | 220 | ref = Process.monitor(pid) 221 | assert_receive {:DOWN, ^ref, _, _, _} 222 | Process.sleep(100) 223 | end) 224 | |> decode_or_print_error() 225 | 226 | assert log["message"] =~ "Process #PID<" 227 | assert log["message"] =~ "> raised an exception" 228 | assert log["message"] =~ "RuntimeError" 229 | end 230 | 231 | test "logs http context" do 232 | conn = 233 | Plug.Test.conn("GET", "/", "") 234 | |> Plug.Conn.put_req_header("user-agent", "Mozilla/5.0") 235 | |> Plug.Conn.put_req_header("referer", "http://www.example2.com/") 236 | |> Plug.Conn.put_req_header("x-forwarded-for", "127.0.0.1,200.111.222.111") 237 | |> Plug.Conn.send_resp(200, "Hi!") 238 | 239 | Logger.metadata(conn: conn) 240 | 241 | log = 242 | capture_log(fn -> 243 | Logger.debug("Hello") 244 | end) 245 | |> decode_or_print_error() 246 | 247 | assert log["request"] == %{ 248 | "client" => %{ 249 | "ip" => "127.0.0.1", 250 | "user_agent" => "Mozilla/5.0" 251 | }, 252 | "connection" => %{ 253 | "method" => "GET", 254 | "path" => "/", 255 | "protocol" => "HTTP/1.1", 256 | "status" => 200 257 | } 258 | } 259 | end 260 | 261 | if @encoder == Jason do 262 | test "passing options to encoder" do 263 | formatter = Basic.new(encoder_opts: [pretty: true]) 264 | :logger.update_handler_config(:default, :formatter, formatter) 265 | 266 | assert capture_log(fn -> 267 | Logger.debug("Hello") 268 | end) =~ 269 | ~r/\n\s{2}"message": "Hello"/ 270 | end 271 | end 272 | 273 | test "accepts opts as a tuple" do 274 | :logger.update_handler_config(:default, :formatter, {Basic, metadata: :all}) 275 | 276 | assert capture_log(fn -> 277 | Logger.debug("hello") 278 | end) 279 | |> decode_or_print_error() 280 | |> Map.has_key?("message") 281 | end 282 | 283 | test "reads metadata from the given application env" do 284 | Application.put_env(:logger_json, :test_basic_metadata_key, [:foo]) 285 | formatter = Basic.new(metadata: {:from_application_env, {:logger_json, :test_basic_metadata_key}}) 286 | :logger.update_handler_config(:default, :formatter, formatter) 287 | 288 | Logger.metadata(foo: "foo") 289 | 290 | log = 291 | capture_log(fn -> 292 | Logger.debug("Hello") 293 | end) 294 | |> decode_or_print_error() 295 | 296 | assert %{ 297 | "metadata" => %{ 298 | "foo" => "foo" 299 | } 300 | } = log 301 | end 302 | 303 | test "reads metadata from the given application env at given path" do 304 | Application.put_env(:logger_json, :test_basic_metadata_key, metadata: [:foo]) 305 | formatter = Basic.new(metadata: {:from_application_env, {:logger_json, :test_basic_metadata_key}, [:metadata]}) 306 | :logger.update_handler_config(:default, :formatter, formatter) 307 | 308 | Logger.metadata(foo: "foo") 309 | 310 | log = 311 | capture_log(fn -> 312 | Logger.debug("Hello") 313 | end) 314 | |> decode_or_print_error() 315 | 316 | assert %{ 317 | "metadata" => %{ 318 | "foo" => "foo" 319 | } 320 | } = log 321 | end 322 | end 323 | -------------------------------------------------------------------------------- /test/logger_json/formatters/datadog_test.exs: -------------------------------------------------------------------------------- 1 | defmodule LoggerJSON.Formatters.DatadogTest do 2 | use LoggerJSON.Case 3 | use ExUnitProperties 4 | alias LoggerJSON.Formatters.Datadog 5 | require Logger 6 | 7 | @encoder LoggerJSON.Formatter.encoder() 8 | 9 | setup do 10 | formatter = Datadog.new(metadata: :all) 11 | :logger.update_handler_config(:default, :formatter, formatter) 12 | end 13 | 14 | property "allows to log any binary messages" do 15 | check all message <- StreamData.binary() do 16 | assert capture_log(fn -> 17 | Logger.debug(message) 18 | end) 19 | |> decode_or_print_error() 20 | |> Map.has_key?("message") 21 | end 22 | end 23 | 24 | property "allows to log any structured messages" do 25 | check all message <- StreamData.map_of(StreamData.atom(:alphanumeric), StreamData.term()) do 26 | assert capture_log(fn -> 27 | Logger.debug(message) 28 | end) 29 | |> decode_or_print_error() 30 | |> Map.has_key?("message") 31 | end 32 | 33 | check all message <- StreamData.keyword_of(StreamData.term()) do 34 | assert capture_log(fn -> 35 | Logger.debug(message) 36 | end) 37 | |> decode_or_print_error() 38 | |> Map.has_key?("message") 39 | end 40 | end 41 | 42 | test "logs an LogEntry of a given level" do 43 | for level <- [:error, :info, :debug, :emergency, :alert, :critical, :warning, :notice] do 44 | log = 45 | capture_log(level, fn -> 46 | Logger.log(level, "Hello") 47 | end) 48 | |> decode_or_print_error() 49 | 50 | level_string = to_string(level) 51 | 52 | assert %{ 53 | "message" => "Hello", 54 | "domain" => ["elixir"], 55 | "syslog" => %{"hostname" => _hostname, "severity" => ^level_string, "timestamp" => _time} 56 | } = log 57 | end 58 | end 59 | 60 | test "logs an LogEntry with a map payload" do 61 | log = 62 | capture_log(fn -> 63 | Logger.debug(%{foo: :bar, fiz: [1, 2, 3, "buz"]}) 64 | end) 65 | |> decode_or_print_error() 66 | 67 | assert log["message"] == %{ 68 | "fiz" => [1, 2, 3, "buz"], 69 | "foo" => "bar" 70 | } 71 | end 72 | 73 | test "logs an LogEntry with a keyword payload" do 74 | log = 75 | capture_log(fn -> 76 | Logger.debug(a: {0, false}) 77 | end) 78 | |> decode_or_print_error() 79 | 80 | assert log["message"] == %{ 81 | "a" => [0, false] 82 | } 83 | end 84 | 85 | test "logs hostname" do 86 | # uses the hostname of the machine by default 87 | log = 88 | capture_log(fn -> 89 | Logger.debug("Hello") 90 | end) 91 | |> decode_or_print_error() 92 | 93 | assert log["syslog"]["hostname"] == :inet.gethostname() |> elem(1) |> IO.chardata_to_string() 94 | 95 | # static value 96 | formatter = Datadog.new(hostname: "foo.bar1") 97 | :logger.update_handler_config(:default, :formatter, formatter) 98 | 99 | log = 100 | capture_log(fn -> 101 | Logger.debug("Hello") 102 | end) 103 | |> decode_or_print_error() 104 | 105 | assert log["syslog"]["hostname"] == "foo.bar1" 106 | 107 | # unset value 108 | formatter = Datadog.new(hostname: :unset) 109 | :logger.update_handler_config(:default, :formatter, formatter) 110 | 111 | log = 112 | capture_log(fn -> 113 | Logger.debug("Hello") 114 | end) 115 | |> decode_or_print_error() 116 | 117 | refute Map.has_key?(log["syslog"], "hostname") 118 | end 119 | 120 | test "logs OpenTelemetry span and trace ids" do 121 | Logger.metadata( 122 | otel_span_id: ~c"bff20904aa5883a6", 123 | otel_trace_flags: ~c"01", 124 | otel_trace_id: ~c"294740ce41cc9f202dedb563db123532" 125 | ) 126 | 127 | log = 128 | capture_log(fn -> 129 | Logger.debug("Hello") 130 | end) 131 | |> decode_or_print_error() 132 | 133 | assert log["dd.span_id"] == "13831127321250661286" 134 | assert log["dd.trace_id"] == "3309500741668975922" 135 | end 136 | 137 | test "does not crash when OpenTelemetry span or trace ids are invalid" do 138 | Logger.metadata( 139 | otel_span_id: :foo, 140 | otel_trace_id: "123" 141 | ) 142 | 143 | log = 144 | capture_log(fn -> 145 | Logger.debug("Hello") 146 | end) 147 | |> decode_or_print_error() 148 | 149 | assert log["dd.span_id"] == "" 150 | assert log["dd.trace_id"] == "" 151 | 152 | Logger.metadata( 153 | otel_span_id: "ghijklmnopqrstuv", 154 | otel_trace_id: "ghijklmnopqrstuv" 155 | ) 156 | 157 | log = 158 | capture_log(fn -> 159 | Logger.debug("Hello") 160 | end) 161 | |> decode_or_print_error() 162 | 163 | assert log["dd.span_id"] == "" 164 | assert log["dd.trace_id"] == "" 165 | 166 | Logger.metadata( 167 | otel_span_id: "🚀🚀🚀🚀🚀🚀🚀🚀", 168 | otel_trace_id: "🚀🚀🚀🚀🚀🚀🚀🚀" 169 | ) 170 | 171 | log = 172 | capture_log(fn -> 173 | Logger.debug("Hello") 174 | end) 175 | |> decode_or_print_error() 176 | 177 | assert log["dd.span_id"] == "" 178 | assert log["dd.trace_id"] == "" 179 | 180 | Logger.metadata( 181 | otel_span_id: ~c"🚀🚀🚀🚀🚀🚀🚀🚀", 182 | otel_trace_id: ~c"🚀🚀🚀🚀🚀🚀🚀🚀" 183 | ) 184 | 185 | log = 186 | capture_log(fn -> 187 | Logger.debug("Hello") 188 | end) 189 | |> decode_or_print_error() 190 | 191 | assert log["dd.span_id"] == "" 192 | assert log["dd.trace_id"] == "" 193 | 194 | Logger.metadata( 195 | otel_span_id: ~c"🚀", 196 | otel_trace_id: ~c"🚀" 197 | ) 198 | 199 | log = 200 | capture_log(fn -> 201 | Logger.debug("Hello") 202 | end) 203 | |> decode_or_print_error() 204 | 205 | assert log["dd.span_id"] == "" 206 | assert log["dd.trace_id"] == "" 207 | end 208 | 209 | test "logs span and trace ids" do 210 | Logger.metadata( 211 | span_id: "bff20904aa5883a6", 212 | trace_id: "294740ce41cc9f202dedb563db123532" 213 | ) 214 | 215 | log = 216 | capture_log(fn -> 217 | Logger.debug("Hello") 218 | end) 219 | |> decode_or_print_error() 220 | 221 | assert log["dd.span_id"] == "bff20904aa5883a6" 222 | assert log["dd.trace_id"] == "294740ce41cc9f202dedb563db123532" 223 | 224 | assert log["span_id"] == "bff20904aa5883a6" 225 | assert log["trace_id"] == "294740ce41cc9f202dedb563db123532" 226 | end 227 | 228 | test "logs metadata" do 229 | Logger.metadata( 230 | date: Date.utc_today(), 231 | time: Time.new(10, 10, 11), 232 | pid: self(), 233 | ref: make_ref(), 234 | atom: :atom, 235 | list: [1, 2, 3], 236 | map: %{foo: :bar}, 237 | struct: URI.parse("https://example.com"), 238 | binary: "binary", 239 | node: node() 240 | ) 241 | 242 | log = 243 | capture_log(fn -> 244 | Logger.debug("Hello", float: 3.14) 245 | end) 246 | |> decode_or_print_error() 247 | 248 | assert %{ 249 | "atom" => "atom", 250 | "binary" => "binary", 251 | "date" => _, 252 | "domain" => ["elixir"], 253 | "list" => [1, 2, 3], 254 | "map" => %{"foo" => "bar"}, 255 | "message" => "Hello", 256 | "node" => "nonode@nohost", 257 | "ref" => _ref, 258 | "float" => 3.14, 259 | "struct" => %{ 260 | "authority" => "example.com", 261 | "fragment" => nil, 262 | "host" => "example.com", 263 | "path" => nil, 264 | "port" => 443, 265 | "query" => nil, 266 | "scheme" => "https", 267 | "userinfo" => nil 268 | } 269 | } = log 270 | end 271 | 272 | test "logs exceptions" do 273 | log = 274 | capture_log(fn -> 275 | pid = 276 | spawn(fn -> 277 | raise RuntimeError 278 | end) 279 | 280 | ref = Process.monitor(pid) 281 | assert_receive {:DOWN, ^ref, _, _, _} 282 | Process.sleep(100) 283 | end) 284 | |> decode_or_print_error() 285 | 286 | assert %{ 287 | "message" => message, 288 | "error" => %{ 289 | "kind" => "RuntimeError", 290 | "message" => message, 291 | "stack" => stacktrace 292 | }, 293 | "syslog" => %{ 294 | "hostname" => _, 295 | "severity" => "error", 296 | "timestamp" => _ 297 | } 298 | } = log 299 | 300 | assert message =~ "Process #PID<" 301 | assert message =~ "> raised an exception" 302 | assert message =~ "** (RuntimeError) runtime error" 303 | assert stacktrace =~ "test/" 304 | end 305 | 306 | test "logs http context" do 307 | conn = 308 | Plug.Test.conn("GET", "/", "") 309 | |> Plug.Conn.put_req_header("user-agent", "Mozilla/5.0") 310 | |> Plug.Conn.put_req_header("referer", "http://www.example2.com/") 311 | |> Plug.Conn.put_req_header("x-forwarded-for", "127.0.0.1") 312 | |> Plug.Conn.send_resp(200, "Hi!") 313 | 314 | Logger.metadata(conn: conn, duration_us: 1337) 315 | 316 | log = 317 | capture_log(fn -> 318 | Logger.debug("Hello") 319 | end) 320 | |> decode_or_print_error() 321 | 322 | assert log["network"] == %{"client" => %{"ip" => "127.0.0.1"}} 323 | 324 | assert log["http"] == %{ 325 | "referer" => "http://www.example2.com/", 326 | "method" => "GET", 327 | "request_id" => nil, 328 | "status_code" => 200, 329 | "url" => "http://www.example.com/", 330 | "url_details" => %{ 331 | "host" => "www.example.com", 332 | "path" => "/", 333 | "port" => 80, 334 | "queryString" => "", 335 | "scheme" => "http" 336 | }, 337 | "useragent" => "Mozilla/5.0" 338 | } 339 | 340 | assert log["duration"] == 1_337_000 341 | end 342 | 343 | test "logs exception http context" do 344 | conn = 345 | Plug.Test.conn("patch", "/", "") 346 | |> Plug.Conn.put_req_header("user-agent", "Mozilla/5.0") 347 | |> Plug.Conn.put_req_header("referer", "http://www.example.com/") 348 | |> Plug.Conn.put_req_header("x-forwarded-for", "127.0.0.1") 349 | |> Plug.Conn.send_resp(503, "oops") 350 | 351 | Logger.metadata(crash_reason: {{:EXIT, self()}, :foo}, conn: conn) 352 | 353 | log = 354 | capture_log(fn -> 355 | Logger.debug("Hello") 356 | end) 357 | |> decode_or_print_error() 358 | 359 | assert log["error"] == %{"message" => "Hello", "kind" => "exit"} 360 | 361 | assert log["network"] == %{"client" => %{"ip" => "127.0.0.1"}} 362 | 363 | assert log["http"] == %{ 364 | "referer" => "http://www.example.com/", 365 | "method" => "PATCH", 366 | "request_id" => nil, 367 | "status_code" => 503, 368 | "url" => "http://www.example.com/", 369 | "url_details" => %{ 370 | "host" => "www.example.com", 371 | "path" => "/", 372 | "port" => 80, 373 | "queryString" => "", 374 | "scheme" => "http" 375 | }, 376 | "useragent" => "Mozilla/5.0" 377 | } 378 | 379 | assert log["duration"] == nil 380 | end 381 | 382 | test "logs throws" do 383 | Logger.metadata(crash_reason: {:throw, {:error, :whatever}}) 384 | 385 | log_entry = 386 | capture_log(fn -> 387 | Logger.debug("error here") 388 | end) 389 | |> decode_or_print_error() 390 | 391 | assert %{ 392 | "error" => %{"message" => "error here"}, 393 | "logger" => %{ 394 | "file_name" => _file, 395 | "line" => _line, 396 | "method_name" => "Elixir.LoggerJSON.Formatters.DatadogTest." <> _, 397 | "thread_name" => _pid 398 | } 399 | } = log_entry 400 | end 401 | 402 | test "logs exits" do 403 | Logger.metadata(crash_reason: {:exit, :sad_failure}) 404 | 405 | log_entry = 406 | capture_log(fn -> 407 | Logger.debug("error here") 408 | end) 409 | |> decode_or_print_error() 410 | 411 | assert %{ 412 | "error" => %{"message" => "error here"}, 413 | "logger" => %{ 414 | "file_name" => _file, 415 | "line" => _line, 416 | "method_name" => "Elixir.LoggerJSON.Formatters.DatadogTest." <> _, 417 | "thread_name" => _pid 418 | } 419 | } = log_entry 420 | end 421 | 422 | test "logs process exits" do 423 | Logger.metadata(crash_reason: {{:EXIT, self()}, :sad_failure}) 424 | 425 | log_entry = 426 | capture_log(fn -> 427 | Logger.debug("error here") 428 | end) 429 | |> decode_or_print_error() 430 | 431 | assert %{ 432 | "error" => %{"message" => "error here"}, 433 | "logger" => %{ 434 | "file_name" => _file, 435 | "line" => _line, 436 | "method_name" => "Elixir.LoggerJSON.Formatters.DatadogTest." <> _, 437 | "thread_name" => _pid 438 | } 439 | } = log_entry 440 | end 441 | 442 | test "logs reasons in tuple" do 443 | Logger.metadata(crash_reason: {:socket_closed_unexpectedly, []}) 444 | 445 | log_entry = 446 | capture_log(fn -> 447 | Logger.debug("error here") 448 | end) 449 | |> decode_or_print_error() 450 | 451 | assert %{ 452 | "error" => %{"message" => "error here"}, 453 | "logger" => %{ 454 | "file_name" => _file, 455 | "line" => _line, 456 | "method_name" => "Elixir.LoggerJSON.Formatters.DatadogTest." <> _, 457 | "thread_name" => _pid 458 | } 459 | } = log_entry 460 | end 461 | 462 | test "logs error.{kind, message, stack} for error+ logs" do 463 | for level <- [:error, :critical, :alert, :emergency] do 464 | log = 465 | capture_log(fn -> 466 | Logger.log(level, "Something went wrong") 467 | end) 468 | |> decode_or_print_error() 469 | 470 | assert log["error"]["kind"] == "error" 471 | assert log["error"]["message"] == "Something went wrong" 472 | end 473 | end 474 | 475 | test "logs error.* fields from logger metadata" do 476 | for level <- [:error, :critical, :alert, :emergency] do 477 | log = 478 | capture_log(level, fn -> 479 | Logger.log(level, "Something went wrong", 480 | error: %{kind: "CustomError", module: "PaymentGateway", stack: "stacktrace"} 481 | ) 482 | end) 483 | |> decode_or_print_error() 484 | 485 | assert log["error"]["kind"] == "CustomError" 486 | assert log["error"]["message"] == "Something went wrong" 487 | assert log["error"]["module"] == "PaymentGateway" 488 | assert log["error"]["stack"] == "stacktrace" 489 | end 490 | end 491 | 492 | if @encoder == Jason do 493 | test "passing options to encoder" do 494 | formatter = Datadog.new(encoder_opts: [pretty: true]) 495 | :logger.update_handler_config(:default, :formatter, formatter) 496 | 497 | assert capture_log(fn -> 498 | Logger.debug("Hello") 499 | end) =~ 500 | ~r/\n\s{2}"message": "Hello"/ 501 | end 502 | end 503 | 504 | test "accepts opts as a tuple" do 505 | :logger.update_handler_config(:default, :formatter, {Datadog, metadata: :all}) 506 | 507 | assert capture_log(fn -> 508 | Logger.debug("hello") 509 | end) 510 | |> decode_or_print_error() 511 | |> Map.has_key?("message") 512 | end 513 | 514 | test "reads metadata from the given application env" do 515 | Application.put_env(:logger_json, :test_datadog_metadata_key, [:foo]) 516 | formatter = Datadog.new(metadata: {:from_application_env, {:logger_json, :test_datadog_metadata_key}}) 517 | :logger.update_handler_config(:default, :formatter, formatter) 518 | 519 | Logger.metadata(foo: "foo") 520 | 521 | log = 522 | capture_log(fn -> 523 | Logger.debug("Hello") 524 | end) 525 | |> decode_or_print_error() 526 | 527 | assert %{ 528 | "foo" => "foo" 529 | } = log 530 | end 531 | 532 | test "reads metadata from the given application env at given path" do 533 | Application.put_env(:logger_json, :test_datadog_metadata_key, metadata: [:foo]) 534 | formatter = Datadog.new(metadata: {:from_application_env, {:logger_json, :test_datadog_metadata_key}, [:metadata]}) 535 | :logger.update_handler_config(:default, :formatter, formatter) 536 | 537 | Logger.metadata(foo: "foo") 538 | 539 | log = 540 | capture_log(fn -> 541 | Logger.debug("Hello") 542 | end) 543 | |> decode_or_print_error() 544 | 545 | assert %{ 546 | "foo" => "foo" 547 | } = log 548 | end 549 | 550 | test "logs Task/GenServer termination" do 551 | test_pid = self() 552 | 553 | logs = 554 | capture_log(fn -> 555 | {:ok, _} = Supervisor.start_link([{CrashingGenServer, :ok}], strategy: :one_for_one) 556 | 557 | {:ok, _} = 558 | Task.start(fn -> 559 | try do 560 | GenServer.call(CrashingGenServer, :boom) 561 | catch 562 | _ -> nil 563 | after 564 | send(test_pid, :done) 565 | end 566 | end) 567 | 568 | # Wait for task to finish 569 | receive do 570 | :done -> nil 571 | end 572 | 573 | # Let logs flush 574 | Process.sleep(100) 575 | end) 576 | 577 | [_, log_entry] = 578 | logs 579 | |> String.trim() 580 | |> String.split("\n") 581 | |> Enum.map(&decode_or_print_error/1) 582 | 583 | assert %{ 584 | "domain" => ["otp", "elixir"], 585 | "error" => %{"message" => message}, 586 | "error_logger" => %{"tag" => "error_msg"}, 587 | "syslog" => %{"severity" => "error"} 588 | } = log_entry 589 | 590 | assert message =~ ~r/Task #PID<\d+.\d+.\d+> started from #{inspect(test_pid)} terminating/ 591 | end 592 | end 593 | -------------------------------------------------------------------------------- /test/logger_json/formatters/elastic_test.exs: -------------------------------------------------------------------------------- 1 | defmodule LoggerJSON.Formatters.ElasticTest do 2 | use LoggerJSON.Case 3 | use ExUnitProperties 4 | alias LoggerJSON.Formatters.Elastic 5 | require Logger 6 | 7 | @encoder LoggerJSON.Formatter.encoder() 8 | 9 | setup do 10 | formatter = Elastic.new(metadata: :all) 11 | :logger.update_handler_config(:default, :formatter, formatter) 12 | end 13 | 14 | test "logs message of every level" do 15 | for level <- [:error, :info, :debug, :emergency, :alert, :critical, :warning, :notice] do 16 | message = "Hello" 17 | 18 | log_entry = 19 | capture_log(level, fn -> 20 | Logger.log(level, message) 21 | end) 22 | |> decode_or_print_error() 23 | 24 | level_string = Atom.to_string(level) 25 | 26 | assert %{ 27 | "@timestamp" => timestamp, 28 | "ecs.version" => "8.11.0", 29 | "log.level" => ^level_string, 30 | "log.logger" => "Elixir.LoggerJSON.Formatters.ElasticTest", 31 | "log.origin" => %{ 32 | "file.name" => origin_file, 33 | "file.line" => origin_line, 34 | "function" => origin_function 35 | }, 36 | "message" => ^message 37 | } = log_entry 38 | 39 | assert {:ok, _, _} = DateTime.from_iso8601(timestamp) 40 | assert origin_line > 0 41 | assert String.ends_with?(origin_file, "test/logger_json/formatters/elastic_test.exs") 42 | assert String.starts_with?(origin_function, "test logs message of every level/1") 43 | assert log_entry["domain"] == nil 44 | end 45 | end 46 | 47 | test "logs message with a map payload" do 48 | log = 49 | capture_log(fn -> 50 | Logger.debug(%{foo: :bar, fiz: [1, 2, 3, "buz"]}) 51 | end) 52 | |> decode_or_print_error() 53 | 54 | assert log["fiz"] == [1, 2, 3, "buz"] 55 | assert log["foo"] == "bar" 56 | end 57 | 58 | test "logs message with a keyword payload" do 59 | log = 60 | capture_log(fn -> 61 | Logger.debug(a: {0, false}) 62 | end) 63 | |> decode_or_print_error() 64 | 65 | assert log["a"] == [0, false] 66 | end 67 | 68 | test "logs an LogEntry with a map payload containing message" do 69 | log = 70 | capture_log(fn -> 71 | Logger.debug(%{message: "Hello", foo: :bar, fiz: %{buz: "buz"}}) 72 | end) 73 | |> decode_or_print_error() 74 | 75 | assert log["message"] == "Hello" 76 | assert log["foo"] == "bar" 77 | assert log["fiz"]["buz"] == "buz" 78 | end 79 | 80 | test "logs OpenTelemetry span and trace ids" do 81 | Logger.metadata( 82 | otel_span_id: ~c"bff20904aa5883a6", 83 | otel_trace_flags: ~c"01", 84 | otel_trace_id: ~c"294740ce41cc9f202dedb563db123532" 85 | ) 86 | 87 | log = 88 | capture_log(fn -> 89 | Logger.debug("Hello") 90 | end) 91 | |> decode_or_print_error() 92 | 93 | assert log["span.id"] == "bff20904aa5883a6" 94 | assert log["trace.id"] == "294740ce41cc9f202dedb563db123532" 95 | end 96 | 97 | test "logs span and trace ids" do 98 | Logger.metadata( 99 | span_id: "bff20904aa5883a6", 100 | trace_id: "294740ce41cc9f202dedb563db123532" 101 | ) 102 | 103 | log = 104 | capture_log(fn -> 105 | Logger.debug("Hello") 106 | end) 107 | |> decode_or_print_error() 108 | 109 | assert log["span.id"] == "bff20904aa5883a6" 110 | assert log["trace.id"] == "294740ce41cc9f202dedb563db123532" 111 | end 112 | 113 | test "does not crash on invalid span and trace ids" do 114 | Logger.metadata( 115 | span_id: :foo, 116 | trace_id: 123 117 | ) 118 | 119 | log = 120 | capture_log(fn -> 121 | Logger.debug("Hello") 122 | end) 123 | |> decode_or_print_error() 124 | 125 | assert log["span.id"] == "foo" 126 | assert log["trace.id"] == 123 127 | end 128 | 129 | test "does not crash on invalid OTEL span and trace ids" do 130 | Logger.metadata( 131 | otel_span_id: :foo, 132 | otel_trace_id: 123 133 | ) 134 | 135 | log = 136 | capture_log(fn -> 137 | Logger.debug("Hello") 138 | end) 139 | |> decode_or_print_error() 140 | 141 | assert log["span.id"] == "foo" 142 | assert log["trace.id"] == 123 143 | end 144 | 145 | test "logs metadata" do 146 | Logger.metadata( 147 | date: Date.utc_today(), 148 | time: Time.new(10, 10, 11), 149 | pid: self(), 150 | ref: make_ref(), 151 | atom: :atom, 152 | list: [1, 2, 3], 153 | map: %{foo: :bar}, 154 | struct: URI.parse("https://example.com"), 155 | binary: "binary", 156 | node: node() 157 | ) 158 | 159 | log_entry = 160 | capture_log(fn -> 161 | Logger.debug("Hello", float: 3.14) 162 | end) 163 | |> decode_or_print_error() 164 | 165 | assert %{ 166 | "message" => "Hello", 167 | "atom" => "atom", 168 | "binary" => "binary", 169 | "date" => _, 170 | "list" => [1, 2, 3], 171 | "map" => %{"foo" => "bar"}, 172 | "node" => "nonode@nohost", 173 | "ref" => _ref, 174 | "float" => 3.14, 175 | "struct" => %{ 176 | "authority" => "example.com", 177 | "fragment" => nil, 178 | "host" => "example.com", 179 | "path" => nil, 180 | "port" => 443, 181 | "query" => nil, 182 | "scheme" => "https", 183 | "userinfo" => nil 184 | } 185 | } = log_entry 186 | end 187 | 188 | test "logs exceptions" do 189 | log_entry = 190 | capture_log(fn -> 191 | pid = 192 | spawn(fn -> 193 | raise RuntimeError 194 | end) 195 | 196 | ref = Process.monitor(pid) 197 | assert_receive {:DOWN, ^ref, _, _, _} 198 | Process.sleep(100) 199 | end) 200 | |> decode_or_print_error() 201 | 202 | assert %{ 203 | "message" => message, 204 | "error.message" => "runtime error", 205 | "error.stack_trace" => stacktrace, 206 | "error.type" => "Elixir.RuntimeError" 207 | } = log_entry 208 | 209 | assert message =~ ~r/Process #PID<\d.\d+.\d> raised an exception/ 210 | assert stacktrace =~ "** (RuntimeError) runtime error" 211 | assert stacktrace =~ ~r/test\/logger_json\/formatters\/elastic_test.exs:\d+: anonymous fn\/0/ 212 | assert stacktrace =~ "in LoggerJSON.Formatters.ElasticTest.\"test logs exceptions\"/1" 213 | assert log_entry["error_logger"] == nil 214 | end 215 | 216 | test "logs exceptions with id and code" do 217 | defmodule TestException do 218 | defexception [:message, :id, :code] 219 | end 220 | 221 | log_entry = 222 | capture_log(fn -> 223 | pid = 224 | spawn(fn -> 225 | raise TestException, id: :oops_id, code: 42, message: "oops!" 226 | end) 227 | 228 | ref = Process.monitor(pid) 229 | assert_receive {:DOWN, ^ref, _, _, _} 230 | Process.sleep(100) 231 | end) 232 | |> decode_or_print_error() 233 | 234 | assert %{ 235 | "message" => message, 236 | "error.message" => "oops!", 237 | "error.stack_trace" => _, 238 | "error.type" => "Elixir.LoggerJSON.Formatters.ElasticTest.TestException", 239 | "error.id" => "oops_id", 240 | "error.code" => 42 241 | } = log_entry 242 | 243 | assert message =~ ~r/Process #PID<\d.\d+.\d> raised an exception/ 244 | end 245 | 246 | test "logged exception stacktrace is in default Elixir format" do 247 | error = %RuntimeError{message: "oops"} 248 | 249 | stacktrace = [ 250 | {Foo, :bar, 0, [file: ~c"foo/bar.ex", line: 123]}, 251 | {Foo.Bar, :baz, 1, [file: ~c"foo/bar/baz.ex", line: 456]} 252 | ] 253 | 254 | Logger.metadata(crash_reason: {error, stacktrace}) 255 | 256 | log_entry = 257 | capture_log(fn -> 258 | Logger.debug("foo") 259 | end) 260 | |> decode_or_print_error() 261 | 262 | assert log_entry["error.stack_trace"] == 263 | """ 264 | ** (RuntimeError) oops 265 | foo/bar.ex:123: Foo.bar/0 266 | foo/bar/baz.ex:456: Foo.Bar.baz/1 267 | """ 268 | end 269 | 270 | test "logs throws" do 271 | Logger.metadata(crash_reason: {:throw, {:error, :whatever}}) 272 | 273 | log_entry = 274 | capture_log(fn -> 275 | Logger.debug("oops!") 276 | end) 277 | |> decode_or_print_error() 278 | 279 | assert %{ 280 | "message" => "oops!", 281 | "error.message" => "throw: {:error, :whatever}", 282 | "error.stack_trace" => "** (throw) {:error, :whatever}", 283 | "error.type" => "throw", 284 | "log.logger" => "Elixir.LoggerJSON.Formatters.ElasticTest", 285 | "log.origin" => %{ 286 | "file.line" => _, 287 | "file.name" => _, 288 | "function" => _ 289 | } 290 | } = log_entry 291 | end 292 | 293 | test "logs exits" do 294 | Logger.metadata(crash_reason: {:exit, :sad_failure}) 295 | 296 | log_entry = 297 | capture_log(fn -> 298 | Logger.debug("oops!") 299 | end) 300 | |> decode_or_print_error() 301 | 302 | assert %{ 303 | "message" => "oops!", 304 | "error.message" => "exit: :sad_failure", 305 | "error.stack_trace" => "** (exit) :sad_failure", 306 | "error.type" => "exit", 307 | "log.logger" => "Elixir.LoggerJSON.Formatters.ElasticTest", 308 | "log.origin" => %{ 309 | "file.line" => _, 310 | "file.name" => _, 311 | "function" => _ 312 | } 313 | } = log_entry 314 | end 315 | 316 | test "logs process exits" do 317 | Logger.metadata(crash_reason: {{:EXIT, self()}, :sad_failure}) 318 | 319 | log_entry = 320 | capture_log(fn -> 321 | Logger.debug("oops!") 322 | end) 323 | |> decode_or_print_error() 324 | 325 | assert %{ 326 | "message" => "oops!", 327 | "error.message" => error_message, 328 | "error.stack_trace" => stacktrace, 329 | "error.type" => "exit", 330 | "log.logger" => "Elixir.LoggerJSON.Formatters.ElasticTest", 331 | "log.origin" => %{ 332 | "file.line" => _, 333 | "file.name" => _, 334 | "function" => _ 335 | } 336 | } = log_entry 337 | 338 | assert stacktrace =~ ~r/\*\* \(EXIT from #PID<\d+\.\d+\.\d+>\) :sad_failure/ 339 | assert error_message =~ ~r/process #PID<\d+\.\d+\.\d+> exit: :sad_failure/ 340 | end 341 | 342 | test "logs reasons in tuple" do 343 | Logger.metadata(crash_reason: {:socket_closed_unexpectedly, []}) 344 | 345 | log_entry = 346 | capture_log(fn -> 347 | Logger.debug("oops!") 348 | end) 349 | |> decode_or_print_error() 350 | 351 | assert %{ 352 | "message" => "oops!", 353 | "error.message" => "socket_closed_unexpectedly: []", 354 | "error.stack_trace" => "** (socket_closed_unexpectedly) []", 355 | "error.type" => "socket_closed_unexpectedly", 356 | "log.logger" => "Elixir.LoggerJSON.Formatters.ElasticTest", 357 | "log.origin" => %{ 358 | "file.line" => _, 359 | "file.name" => _, 360 | "function" => _ 361 | } 362 | } = log_entry 363 | end 364 | 365 | test "logs http context" do 366 | conn = 367 | Plug.Test.conn("GET", "/", "") 368 | |> Plug.Conn.put_req_header("user-agent", "Mozilla/5.0") 369 | |> Plug.Conn.put_req_header("referer", "http://www.example2.com/") 370 | |> Plug.Conn.put_req_header("x-forwarded-for", "") 371 | |> Plug.Conn.send_resp(200, "Hi!") 372 | 373 | Logger.metadata(conn: conn, duration_us: 1337) 374 | 375 | log_entry = 376 | capture_log(fn -> 377 | Logger.debug("Hello") 378 | end) 379 | |> decode_or_print_error() 380 | 381 | assert %{ 382 | "client.ip" => "", 383 | "event.duration" => 1_337_000, 384 | "http.version" => "HTTP/1.1", 385 | "http.request.method" => "GET", 386 | "http.request.referrer" => "http://www.example2.com/", 387 | "http.response.status_code" => 200, 388 | "url.path" => "/", 389 | "user_agent.original" => "Mozilla/5.0" 390 | } = log_entry 391 | end 392 | 393 | test "logs exception http context" do 394 | conn = 395 | Plug.Test.conn("patch", "/", "") 396 | |> Plug.Conn.put_req_header("user-agent", "Mozilla/5.0") 397 | |> Plug.Conn.put_req_header("referer", "http://www.example.com/") 398 | |> Plug.Conn.put_req_header("x-forwarded-for", "") 399 | |> Plug.Conn.send_resp(503, "oops") 400 | 401 | Logger.metadata(crash_reason: {{:EXIT, self()}, :foo}, conn: conn) 402 | 403 | log_entry = 404 | capture_log(fn -> 405 | Logger.debug("Hello") 406 | end) 407 | |> decode_or_print_error() 408 | 409 | assert %{ 410 | "client.ip" => "", 411 | "http.version" => "HTTP/1.1", 412 | "http.request.method" => "PATCH", 413 | "http.request.referrer" => "http://www.example.com/", 414 | "http.response.status_code" => 503, 415 | "url.path" => "/", 416 | "user_agent.original" => "Mozilla/5.0" 417 | } = log_entry 418 | end 419 | 420 | test "logs caught errors" do 421 | log_entry = 422 | capture_log(fn -> 423 | try do 424 | raise "oops" 425 | rescue 426 | e in RuntimeError -> Logger.error("Something went wrong", crash_reason: {e, __STACKTRACE__}) 427 | end 428 | end) 429 | |> decode_or_print_error() 430 | 431 | assert %{ 432 | "message" => "Something went wrong", 433 | "error.message" => "oops", 434 | "error.type" => "Elixir.RuntimeError", 435 | "error.stack_trace" => stacktrace, 436 | "log.level" => "error", 437 | "log.logger" => "Elixir.LoggerJSON.Formatters.ElasticTest", 438 | "log.origin" => %{ 439 | "file.name" => origin_file, 440 | "file.line" => origin_line, 441 | "function" => origin_function 442 | } 443 | } = log_entry 444 | 445 | assert origin_line > 0 446 | assert String.ends_with?(origin_file, "test/logger_json/formatters/elastic_test.exs") 447 | assert String.starts_with?(origin_function, "test logs caught errors/1") 448 | assert String.starts_with?(stacktrace, "** (RuntimeError) oops") 449 | end 450 | 451 | test "logs Task/GenServer termination" do 452 | test_pid = self() 453 | 454 | logs = 455 | capture_log(fn -> 456 | {:ok, _} = Supervisor.start_link([{CrashingGenServer, :ok}], strategy: :one_for_one) 457 | 458 | {:ok, _} = 459 | Task.start(fn -> 460 | try do 461 | GenServer.call(CrashingGenServer, :boom) 462 | catch 463 | _ -> nil 464 | after 465 | send(test_pid, :done) 466 | end 467 | end) 468 | 469 | # Wait for task to finish 470 | receive do 471 | :done -> nil 472 | end 473 | 474 | # Let logs flush 475 | Process.sleep(100) 476 | end) 477 | 478 | [_, log_entry] = 479 | logs 480 | |> String.trim() 481 | |> String.split("\n") 482 | |> Enum.map(&decode_or_print_error/1) 483 | 484 | assert %{ 485 | "error.message" => "boom", 486 | "error.type" => "Elixir.RuntimeError", 487 | "error.stack_trace" => "** (RuntimeError) boom" <> _, 488 | "message" => message 489 | } = log_entry 490 | 491 | assert message =~ ~r/Task #PID<\d+.\d+.\d+> started from #{inspect(test_pid)} terminating/ 492 | end 493 | 494 | if @encoder == Jason do 495 | test "passing options to encoder" do 496 | formatter = Elastic.new(encoder_opts: [pretty: true]) 497 | :logger.update_handler_config(:default, :formatter, formatter) 498 | 499 | assert capture_log(fn -> 500 | Logger.debug("Hello") 501 | end) =~ 502 | ~r/\n\s{2}"message": "Hello"/ 503 | end 504 | end 505 | 506 | test "accepts opts as a tuple" do 507 | :logger.update_handler_config(:default, :formatter, {Elastic, metadata: :all}) 508 | 509 | assert capture_log(fn -> 510 | Logger.debug("hello") 511 | end) 512 | |> decode_or_print_error() 513 | |> Map.has_key?("message") 514 | end 515 | 516 | test "reads metadata from the given application env" do 517 | Application.put_env(:logger_json, :test_elastic_metadata_key, [:foo]) 518 | formatter = Elastic.new(metadata: {:from_application_env, {:logger_json, :test_elastic_metadata_key}}) 519 | :logger.update_handler_config(:default, :formatter, formatter) 520 | 521 | Logger.metadata(foo: "foo") 522 | 523 | log = 524 | capture_log(fn -> 525 | Logger.debug("Hello") 526 | end) 527 | |> decode_or_print_error() 528 | 529 | assert %{ 530 | "foo" => "foo" 531 | } = log 532 | end 533 | 534 | test "reads metadata from the given application env at given path" do 535 | Application.put_env(:logger_json, :test_elastic_metadata_key, metadata: [:foo]) 536 | formatter = Elastic.new(metadata: {:from_application_env, {:logger_json, :test_elastic_metadata_key}, [:metadata]}) 537 | :logger.update_handler_config(:default, :formatter, formatter) 538 | 539 | Logger.metadata(foo: "foo") 540 | 541 | log = 542 | capture_log(fn -> 543 | Logger.debug("Hello") 544 | end) 545 | |> decode_or_print_error() 546 | 547 | assert %{ 548 | "foo" => "foo" 549 | } = log 550 | end 551 | end 552 | -------------------------------------------------------------------------------- /test/logger_json/formatters/google_cloud_test.exs: -------------------------------------------------------------------------------- 1 | defmodule LoggerJSON.Formatters.GoogleCloudTest do 2 | use LoggerJSON.Case 3 | use ExUnitProperties 4 | alias LoggerJSON.Formatters.GoogleCloud 5 | require Logger 6 | 7 | @encoder LoggerJSON.Formatter.encoder() 8 | 9 | setup do 10 | formatter = GoogleCloud.new(metadata: :all, project_id: "myproj-101") 11 | :logger.update_handler_config(:default, :formatter, formatter) 12 | end 13 | 14 | property "allows to log any binary messages" do 15 | check all message <- StreamData.binary() do 16 | assert capture_log(fn -> 17 | Logger.debug(message) 18 | end) 19 | |> @encoder.decode!() 20 | end 21 | end 22 | 23 | property "allows to log any structured messages" do 24 | check all message <- StreamData.map_of(StreamData.atom(:alphanumeric), StreamData.term()) do 25 | assert capture_log(fn -> 26 | Logger.debug(message) 27 | end) 28 | |> @encoder.decode!() 29 | end 30 | 31 | check all message <- StreamData.keyword_of(StreamData.term()) do 32 | assert capture_log(fn -> 33 | Logger.debug(message) 34 | end) 35 | |> @encoder.decode!() 36 | end 37 | end 38 | 39 | test "logs an LogEntry of a given level" do 40 | for level <- [:info, :debug, :warning, :notice] do 41 | log_entry = 42 | capture_log(level, fn -> 43 | Logger.log(level, "Hello") 44 | end) 45 | |> decode_or_print_error() 46 | 47 | pid = inspect(self()) 48 | level_string = String.upcase(to_string(level)) 49 | 50 | assert %{ 51 | "logging.googleapis.com/operation" => %{"producer" => ^pid}, 52 | "logging.googleapis.com/sourceLocation" => %{ 53 | "file" => _, 54 | "function" => _, 55 | "line" => _ 56 | }, 57 | "domain" => ["elixir"], 58 | "message" => "Hello", 59 | "severity" => ^level_string, 60 | "time" => _ 61 | } = log_entry 62 | 63 | refute Map.has_key?(log_entry, "@type") 64 | end 65 | end 66 | 67 | test "reports errors to Google Cloud Error Reporter" do 68 | for level <- [:error, :emergency, :alert, :critical] do 69 | log_entry = 70 | capture_log(level, fn -> 71 | Logger.log(level, "Hello") 72 | end) 73 | |> decode_or_print_error() 74 | 75 | pid = inspect(self()) 76 | level_string = String.upcase(to_string(level)) 77 | 78 | assert %{ 79 | "@type" => "type.googleapis.com/google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent", 80 | "logging.googleapis.com/operation" => %{"producer" => ^pid}, 81 | "logging.googleapis.com/sourceLocation" => %{ 82 | "file" => _, 83 | "function" => _, 84 | "line" => _ 85 | }, 86 | "domain" => ["elixir"], 87 | "message" => "Hello", 88 | "severity" => ^level_string, 89 | "time" => _ 90 | } = log_entry 91 | end 92 | end 93 | 94 | test "logs an LogEntry when an operation" do 95 | log_entry = 96 | capture_log(:info, fn -> 97 | Logger.log(:info, "Hello", request_id: "1234567890") 98 | end) 99 | |> decode_or_print_error() 100 | 101 | pid = inspect(self()) 102 | 103 | assert %{ 104 | "logging.googleapis.com/operation" => %{"producer" => ^pid, "id" => "1234567890"} 105 | } = log_entry 106 | end 107 | 108 | test "logs an LogEntry with a map payload" do 109 | log_entry = 110 | capture_log(fn -> 111 | Logger.debug(%{foo: :bar, fiz: [1, 2, 3, "buz"]}) 112 | end) 113 | |> decode_or_print_error() 114 | 115 | assert %{ 116 | "fiz" => [1, 2, 3, "buz"], 117 | "foo" => "bar" 118 | } = log_entry 119 | end 120 | 121 | test "logs an LogEntry with a keyword payload" do 122 | log_entry = 123 | capture_log(fn -> 124 | Logger.debug(a: {0, false}) 125 | end) 126 | |> decode_or_print_error() 127 | 128 | assert %{ 129 | "a" => [0, false] 130 | } = log_entry 131 | end 132 | 133 | test "logs OpenTelemetry span and trace ids" do 134 | Logger.metadata( 135 | otel_span_id: ~c"bff20904aa5883a6", 136 | otel_trace_flags: ~c"01", 137 | otel_trace_id: ~c"294740ce41cc9f202dedb563db123532" 138 | ) 139 | 140 | log_entry = 141 | capture_log(fn -> 142 | Logger.debug("Hello") 143 | end) 144 | |> decode_or_print_error() 145 | 146 | assert log_entry["logging.googleapis.com/spanId"] == "bff20904aa5883a6" 147 | assert log_entry["logging.googleapis.com/trace"] == "projects/myproj-101/traces/294740ce41cc9f202dedb563db123532" 148 | end 149 | 150 | test "logs span and trace ids without project_id" do 151 | System.delete_env("GCLOUD_PROJECT") 152 | System.delete_env("GOOGLE_PROJECT_ID") 153 | System.delete_env("GOOGLE_CLOUD_PROJECT") 154 | 155 | formatter = GoogleCloud.new(metadata: :all) 156 | :logger.update_handler_config(:default, :formatter, formatter) 157 | 158 | Logger.metadata( 159 | otel_span_id: ~c"bff20904aa5883a6", 160 | otel_trace_flags: ~c"01", 161 | otel_trace_id: ~c"294740ce41cc9f202dedb563db123532" 162 | ) 163 | 164 | log_entry = 165 | capture_log(fn -> 166 | Logger.debug("Hello") 167 | end) 168 | |> decode_or_print_error() 169 | 170 | assert log_entry["logging.googleapis.com/spanId"] == "bff20904aa5883a6" 171 | assert log_entry["logging.googleapis.com/trace"] == "294740ce41cc9f202dedb563db123532" 172 | end 173 | 174 | test "logs span and trace ids" do 175 | Logger.metadata( 176 | span_id: "bff20904aa5883a6", 177 | trace_id: "294740ce41cc9f202dedb563db123532" 178 | ) 179 | 180 | log_entry = 181 | capture_log(fn -> 182 | Logger.debug("Hello") 183 | end) 184 | |> decode_or_print_error() 185 | 186 | assert log_entry["logging.googleapis.com/spanId"] == "bff20904aa5883a6" 187 | assert log_entry["logging.googleapis.com/trace"] == "294740ce41cc9f202dedb563db123532" 188 | end 189 | 190 | test "does not crash on invalid span and trace ids" do 191 | System.delete_env("GCLOUD_PROJECT") 192 | System.delete_env("GOOGLE_PROJECT_ID") 193 | System.delete_env("GOOGLE_CLOUD_PROJECT") 194 | 195 | Logger.metadata( 196 | span_id: :foo, 197 | trace_id: 123 198 | ) 199 | 200 | log_entry = 201 | capture_log(fn -> 202 | Logger.debug("Hello") 203 | end) 204 | |> decode_or_print_error() 205 | 206 | assert log_entry["logging.googleapis.com/spanId"] == "foo" 207 | assert log_entry["logging.googleapis.com/trace"] == 123 208 | end 209 | 210 | test "does not crash on invalid OTEL span and trace ids" do 211 | System.delete_env("GCLOUD_PROJECT") 212 | System.delete_env("GOOGLE_PROJECT_ID") 213 | System.delete_env("GOOGLE_CLOUD_PROJECT") 214 | 215 | formatter = GoogleCloud.new(metadata: :all) 216 | :logger.update_handler_config(:default, :formatter, formatter) 217 | 218 | Logger.metadata( 219 | otel_span_id: :foo, 220 | otel_trace_id: 123 221 | ) 222 | 223 | log_entry = 224 | capture_log(fn -> 225 | Logger.debug("Hello") 226 | end) 227 | |> decode_or_print_error() 228 | 229 | assert log_entry["logging.googleapis.com/spanId"] == "foo" 230 | assert log_entry["logging.googleapis.com/trace"] == 123 231 | end 232 | 233 | test "logs request id" do 234 | Logger.metadata(request_id: "1234567890") 235 | 236 | log_entry = 237 | capture_log(fn -> 238 | Logger.debug("Hello") 239 | end) 240 | |> decode_or_print_error() 241 | 242 | assert log_entry["logging.googleapis.com/operation"]["id"] == "1234567890" 243 | 244 | assert log_entry["request_id"] == "1234567890" 245 | end 246 | 247 | test "logs metadata" do 248 | Logger.metadata( 249 | date: Date.utc_today(), 250 | time: Time.new(10, 10, 11), 251 | pid: self(), 252 | ref: make_ref(), 253 | atom: :atom, 254 | list: [1, 2, 3], 255 | map: %{foo: :bar}, 256 | struct: URI.parse("https://example.com"), 257 | binary: "binary", 258 | node: node() 259 | ) 260 | 261 | log_entry = 262 | capture_log(fn -> 263 | Logger.debug("Hello", float: 3.14) 264 | end) 265 | |> decode_or_print_error() 266 | 267 | assert %{ 268 | "atom" => "atom", 269 | "binary" => "binary", 270 | "date" => _, 271 | "domain" => ["elixir"], 272 | "list" => [1, 2, 3], 273 | "map" => %{"foo" => "bar"}, 274 | "message" => "Hello", 275 | "node" => "nonode@nohost", 276 | "ref" => _ref, 277 | "float" => 3.14, 278 | "struct" => %{ 279 | "authority" => "example.com", 280 | "fragment" => nil, 281 | "host" => "example.com", 282 | "path" => nil, 283 | "port" => 443, 284 | "query" => nil, 285 | "scheme" => "https", 286 | "userinfo" => nil 287 | } 288 | } = log_entry 289 | end 290 | 291 | test "logs exceptions" do 292 | log_entry = 293 | capture_log(fn -> 294 | pid = 295 | spawn(fn -> 296 | raise RuntimeError 297 | end) 298 | 299 | ref = Process.monitor(pid) 300 | assert_receive {:DOWN, ^ref, _, _, _} 301 | Process.sleep(100) 302 | end) 303 | |> decode_or_print_error() 304 | 305 | assert %{ 306 | "@type" => "type.googleapis.com/google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent", 307 | "message" => "runtime error", 308 | "stack_trace" => stacktrace, 309 | "serviceContext" => %{"service" => "nonode@nohost"} 310 | } = log_entry 311 | 312 | assert stacktrace =~ "** (RuntimeError) runtime error" 313 | assert stacktrace =~ "test/" 314 | assert stacktrace =~ ":in `" 315 | end 316 | 317 | test "logged exception stacktrace is in Ruby format for Elixir errors" do 318 | error = %RuntimeError{message: "oops"} 319 | 320 | stacktrace = [ 321 | {Foo, :bar, 0, [file: ~c"foo/bar.ex", line: 123]}, 322 | {Foo.Bar, :baz, 1, [file: ~c"foo/bar/baz.ex", line: 456]} 323 | ] 324 | 325 | Logger.metadata(crash_reason: {error, stacktrace}) 326 | 327 | log_entry = 328 | capture_log(fn -> 329 | Logger.debug("foo") 330 | end) 331 | |> decode_or_print_error() 332 | 333 | assert log_entry["stack_trace"] == 334 | """ 335 | ** (RuntimeError) oops 336 | foo/bar.ex:123:in `Foo.bar/0' 337 | foo/bar/baz.ex:456:in `Foo.Bar.baz/1' 338 | """ 339 | end 340 | 341 | test "logs exception user context" do 342 | Logger.metadata(crash_reason: {{:EXIT, self()}, :foo}) 343 | 344 | # The keys are applied in the order of their precedence 345 | [:user_id, :identity_id, :actor_id, :account_id] 346 | |> Enum.reverse() 347 | |> Enum.reduce([], fn key, metadata -> 348 | metadata = Keyword.put(metadata, key, "foo_#{key}") 349 | Logger.metadata(metadata) 350 | 351 | log_entry = 352 | capture_log(fn -> 353 | Logger.debug("Hello") 354 | end) 355 | |> decode_or_print_error() 356 | 357 | [entity, _id] = key |> Atom.to_string() |> String.split("_") 358 | 359 | assert log_entry["context"]["user"] == "#{entity}:foo_#{key}" 360 | 361 | metadata 362 | end) 363 | end 364 | 365 | test "logs http context" do 366 | conn = 367 | Plug.Test.conn("GET", "/", "") 368 | |> Plug.Conn.put_req_header("user-agent", "Mozilla/5.0") 369 | |> Plug.Conn.put_req_header("referer", "http://www.example2.com/") 370 | |> Plug.Conn.put_req_header("x-forwarded-for", "") 371 | |> Plug.Conn.send_resp(200, "Hi!") 372 | 373 | Logger.metadata(conn: conn) 374 | 375 | log_entry = 376 | capture_log(fn -> 377 | Logger.debug("Hello", duration_us: 123_456) 378 | end) 379 | |> decode_or_print_error() 380 | 381 | assert log_entry["httpRequest"] == %{ 382 | "protocol" => "HTTP/1.1", 383 | "referer" => "http://www.example2.com/", 384 | "remoteIp" => "", 385 | "requestMethod" => "GET", 386 | "requestUrl" => "http://www.example.com/", 387 | "status" => 200, 388 | "userAgent" => "Mozilla/5.0", 389 | "latency" => "0.123456s" 390 | } 391 | end 392 | 393 | test "logs exception http context" do 394 | conn = 395 | Plug.Test.conn("patch", "/", "") 396 | |> Plug.Conn.put_req_header("user-agent", "Mozilla/5.0") 397 | |> Plug.Conn.put_req_header("referer", "http://www.example.com/") 398 | |> Plug.Conn.put_req_header("x-forwarded-for", "") 399 | |> Plug.Conn.send_resp(503, "oops") 400 | 401 | Logger.metadata(crash_reason: {{:EXIT, self()}, :foo}, conn: conn) 402 | 403 | log_entry = 404 | capture_log(fn -> 405 | Logger.debug("Hello") 406 | end) 407 | |> decode_or_print_error() 408 | 409 | assert log_entry["context"]["httpRequest"] == %{ 410 | "protocol" => "HTTP/1.1", 411 | "referer" => "http://www.example.com/", 412 | "remoteIp" => "", 413 | "requestMethod" => "PATCH", 414 | "requestUrl" => "http://www.example.com/", 415 | "status" => 503, 416 | "userAgent" => "Mozilla/5.0", 417 | "latency" => nil 418 | } 419 | end 420 | 421 | test "logs throws" do 422 | Logger.metadata(crash_reason: {:throw, {:error, :whatever}}) 423 | 424 | log_entry = 425 | capture_log(fn -> 426 | Logger.debug("oops!") 427 | end) 428 | |> decode_or_print_error() 429 | 430 | assert %{ 431 | "@type" => "type.googleapis.com/google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent", 432 | "message" => "oops!", 433 | "stack_trace" => "** (throw) {:error, :whatever}", 434 | "serviceContext" => %{"service" => "nonode@nohost"}, 435 | "context" => %{ 436 | "reportLocation" => %{ 437 | "filePath" => _, 438 | "functionName" => _, 439 | "lineNumber" => _ 440 | } 441 | } 442 | } = log_entry 443 | end 444 | 445 | test "logs exits" do 446 | Logger.metadata(crash_reason: {:exit, :sad_failure}) 447 | 448 | log_entry = 449 | capture_log(fn -> 450 | Logger.debug("oops!") 451 | end) 452 | |> decode_or_print_error() 453 | 454 | assert %{ 455 | "@type" => "type.googleapis.com/google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent", 456 | "message" => "oops!", 457 | "stack_trace" => "** (exit) :sad_failure", 458 | "serviceContext" => %{"service" => "nonode@nohost"} 459 | } = log_entry 460 | end 461 | 462 | test "logs Task/GenServer termination" do 463 | test_pid = self() 464 | 465 | logs = 466 | capture_log(fn -> 467 | {:ok, _} = Supervisor.start_link([{CrashingGenServer, :ok}], strategy: :one_for_one) 468 | 469 | {:ok, _} = 470 | Task.start(fn -> 471 | try do 472 | GenServer.call(CrashingGenServer, :boom) 473 | catch 474 | _ -> nil 475 | after 476 | send(test_pid, :done) 477 | end 478 | end) 479 | 480 | # Wait for task to finish 481 | receive do 482 | :done -> nil 483 | end 484 | 485 | # Let logs flush 486 | Process.sleep(100) 487 | end) 488 | 489 | [_, log_entry] = 490 | logs 491 | |> String.trim() 492 | |> String.split("\n") 493 | |> Enum.map(&decode_or_print_error/1) 494 | 495 | assert %{ 496 | "@type" => "type.googleapis.com/google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent", 497 | "message" => message, 498 | "stack_trace" => "** (RuntimeError) boom" <> _, 499 | "serviceContext" => %{"service" => "nonode@nohost"} 500 | } = log_entry 501 | 502 | assert message =~ ~r/Task #PID<\d+.\d+.\d+> started from #{inspect(test_pid)} terminating/ 503 | end 504 | 505 | test "does not crash on unknown error tuples" do 506 | Logger.metadata(crash_reason: {{:something, :else}, [:unknown]}) 507 | 508 | log_entry = 509 | capture_log(fn -> 510 | Logger.debug("oops!") 511 | end) 512 | |> decode_or_print_error() 513 | 514 | assert %{ 515 | "@type" => "type.googleapis.com/google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent", 516 | "message" => "oops!", 517 | "stack_trace" => "** ({:something, :else}) [:unknown]", 518 | "serviceContext" => %{"service" => "nonode@nohost"} 519 | } = log_entry 520 | end 521 | 522 | test "does not crash on unknown errors" do 523 | Logger.metadata(crash_reason: :what_is_this?) 524 | 525 | log_entry = 526 | capture_log(fn -> 527 | Logger.debug("oops!") 528 | end) 529 | |> decode_or_print_error() 530 | 531 | assert %{ 532 | "@type" => "type.googleapis.com/google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent", 533 | "message" => "oops!", 534 | "stack_trace" => nil, 535 | "serviceContext" => %{"service" => "nonode@nohost"} 536 | } = log_entry 537 | end 538 | 539 | test "logs process exits" do 540 | Logger.metadata(crash_reason: {{:EXIT, self()}, :sad_failure}) 541 | 542 | log_entry = 543 | capture_log(fn -> 544 | Logger.debug("oops!") 545 | end) 546 | |> decode_or_print_error() 547 | 548 | assert %{ 549 | "@type" => "type.googleapis.com/google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent", 550 | "message" => "oops!", 551 | "stack_trace" => stacktrace, 552 | "serviceContext" => %{"service" => "nonode@nohost"} 553 | } = log_entry 554 | 555 | assert stacktrace =~ "** (EXIT from #PID<" 556 | assert stacktrace =~ ">) :sad_failure" 557 | end 558 | 559 | test "logs reasons in tuple" do 560 | Logger.metadata(crash_reason: {:socket_closed_unexpectedly, []}) 561 | 562 | log_entry = 563 | capture_log(fn -> 564 | Logger.debug("oops!") 565 | end) 566 | |> decode_or_print_error() 567 | 568 | assert %{ 569 | "@type" => "type.googleapis.com/google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent", 570 | "message" => "oops!", 571 | "stack_trace" => "** (socket_closed_unexpectedly) []", 572 | "serviceContext" => %{"service" => "nonode@nohost"} 573 | } = log_entry 574 | end 575 | 576 | if @encoder == Jason do 577 | test "passing options to encoder" do 578 | formatter = GoogleCloud.new(encoder_opts: [pretty: true]) 579 | :logger.update_handler_config(:default, :formatter, formatter) 580 | 581 | assert capture_log(fn -> 582 | Logger.debug("Hello") 583 | end) =~ 584 | ~r/\n\s{2}"message": "Hello"/ 585 | end 586 | end 587 | 588 | test "accepts opts as a tuple" do 589 | :logger.update_handler_config(:default, :formatter, {GoogleCloud, metadata: :all, project_id: "myproj-101"}) 590 | 591 | assert capture_log(fn -> 592 | Logger.debug("hello") 593 | end) 594 | |> decode_or_print_error() 595 | |> Map.has_key?("message") 596 | end 597 | 598 | test "reads metadata from the given application env" do 599 | Application.put_env(:logger_json, :test_google_cloud_metadata_key, [:foo]) 600 | formatter = GoogleCloud.new(metadata: {:from_application_env, {:logger_json, :test_google_cloud_metadata_key}}) 601 | :logger.update_handler_config(:default, :formatter, formatter) 602 | 603 | Logger.metadata(foo: "foo") 604 | 605 | log = 606 | capture_log(fn -> 607 | Logger.debug("Hello") 608 | end) 609 | |> decode_or_print_error() 610 | 611 | assert %{ 612 | "foo" => "foo" 613 | } = log 614 | end 615 | 616 | test "reads metadata from the given application env at given path" do 617 | Application.put_env(:logger_json, :test_google_cloud_metadata_key, metadata: [:foo]) 618 | 619 | formatter = 620 | GoogleCloud.new(metadata: {:from_application_env, {:logger_json, :test_google_cloud_metadata_key}, [:metadata]}) 621 | 622 | :logger.update_handler_config(:default, :formatter, formatter) 623 | 624 | Logger.metadata(foo: "foo") 625 | 626 | log = 627 | capture_log(fn -> 628 | Logger.debug("Hello") 629 | end) 630 | |> decode_or_print_error() 631 | 632 | assert %{ 633 | "foo" => "foo" 634 | } = log 635 | end 636 | 637 | test "reads project_id from environment_variables" do 638 | for {env_var, project_id} <- [ 639 | {"GCLOUD_PROJECT", "myproj-101"}, 640 | {"GOOGLE_PROJECT_ID", "myproj-102"}, 641 | {"GOOGLE_CLOUD_PROJECT", "myproj-103"} 642 | ] do 643 | System.put_env(env_var, project_id) 644 | 645 | formatter = GoogleCloud.new(metadata: :all) 646 | :logger.update_handler_config(:default, :formatter, formatter) 647 | 648 | Logger.metadata( 649 | otel_span_id: ~c"bff20904aa5883a6", 650 | otel_trace_flags: ~c"01", 651 | otel_trace_id: ~c"294740ce41cc9f202dedb563db123532" 652 | ) 653 | 654 | log_entry = 655 | capture_log(fn -> 656 | Logger.debug("Hello") 657 | end) 658 | |> decode_or_print_error() 659 | 660 | assert log_entry["logging.googleapis.com/trace"] == 661 | "projects/#{project_id}/traces/294740ce41cc9f202dedb563db123532" 662 | end 663 | end 664 | end 665 | -------------------------------------------------------------------------------- /test/logger_json/plug_test.exs: -------------------------------------------------------------------------------- 1 | defmodule LoggerJSON.PlugTest do 2 | use LoggerJSON.Case, async: false 3 | import LoggerJSON.Plug 4 | require Logger 5 | 6 | describe "attach/3" do 7 | test "attaches a telemetry handler" do 8 | assert attach( 9 | "logger-json-phoenix-requests", 10 | [:phoenix, :endpoint, :stop], 11 | :info 12 | ) == :ok 13 | 14 | assert [ 15 | %{ 16 | function: _function, 17 | id: "logger-json-phoenix-requests", 18 | config: :info, 19 | event_name: [:phoenix, :endpoint, :stop] 20 | } 21 | ] = :telemetry.list_handlers([:phoenix, :endpoint, :stop]) 22 | end 23 | end 24 | 25 | describe "telemetry_logging_handler/4 for Basic formatter" do 26 | setup do 27 | formatter = LoggerJSON.Formatters.Basic.new(metadata: :all) 28 | :logger.update_handler_config(:default, :formatter, formatter) 29 | end 30 | 31 | test "logs request latency and metadata" do 32 | conn = Plug.Test.conn(:get, "/") |> Plug.Conn.put_status(200) 33 | 34 | log = 35 | capture_log(fn -> 36 | telemetry_logging_handler( 37 | [:phoenix, :endpoint, :stop], 38 | %{duration: 500_000}, 39 | %{conn: conn}, 40 | :info 41 | ) 42 | 43 | Logger.flush() 44 | end) 45 | 46 | assert %{ 47 | "message" => "GET / [Sent 200 in 500us]", 48 | "metadata" => %{"duration_us" => 500}, 49 | "request" => %{ 50 | "client" => %{"ip" => "127.0.0.1", "user_agent" => nil}, 51 | "connection" => %{"method" => "GET", "path" => "/", "protocol" => "HTTP/1.1", "status" => 200} 52 | } 53 | } = decode_or_print_error(log) 54 | end 55 | 56 | test "logs unsent connections" do 57 | conn = Plug.Test.conn(:get, "/") 58 | 59 | log = 60 | capture_log(fn -> 61 | telemetry_logging_handler( 62 | [:phoenix, :endpoint, :stop], 63 | %{duration: 500_000}, 64 | %{conn: conn}, 65 | :info 66 | ) 67 | 68 | Logger.flush() 69 | end) 70 | 71 | assert %{ 72 | "message" => "GET / [Sent in 500us]", 73 | "metadata" => %{"duration_us" => 500}, 74 | "request" => %{ 75 | "client" => %{"ip" => "127.0.0.1", "user_agent" => nil}, 76 | "connection" => %{"method" => "GET", "path" => "/", "protocol" => "HTTP/1.1", "status" => nil} 77 | } 78 | } = decode_or_print_error(log) 79 | end 80 | 81 | test "logs chunked responses" do 82 | conn = %{Plug.Test.conn(:get, "/") | state: :set_chunked} 83 | 84 | log = 85 | capture_log(fn -> 86 | telemetry_logging_handler( 87 | [:phoenix, :endpoint, :stop], 88 | %{duration: 500_000}, 89 | %{conn: conn}, 90 | :info 91 | ) 92 | 93 | Logger.flush() 94 | end) 95 | 96 | assert %{ 97 | "message" => "GET / [Chunked in 500us]", 98 | "metadata" => %{"duration_us" => 500}, 99 | "request" => %{ 100 | "client" => %{"ip" => "127.0.0.1", "user_agent" => nil}, 101 | "connection" => %{"method" => "GET", "path" => "/", "protocol" => "HTTP/1.1", "status" => nil} 102 | } 103 | } = decode_or_print_error(log) 104 | end 105 | 106 | test "logs long-running responses" do 107 | conn = %{Plug.Test.conn(:get, "/") | state: :set_chunked} 108 | 109 | log = 110 | capture_log(fn -> 111 | telemetry_logging_handler( 112 | [:phoenix, :endpoint, :stop], 113 | %{duration: 500_000_000}, 114 | %{conn: conn}, 115 | :info 116 | ) 117 | 118 | Logger.flush() 119 | end) 120 | 121 | assert %{"message" => "GET / [Chunked in 500ms]"} = decode_or_print_error(log) 122 | end 123 | 124 | test "allows disabling logging at runtime" do 125 | conn = Plug.Test.conn(:get, "/") 126 | 127 | log = 128 | capture_log(fn -> 129 | telemetry_logging_handler( 130 | [:phoenix, :endpoint, :stop], 131 | %{duration: 5000}, 132 | %{conn: conn}, 133 | {__MODULE__, :ignore_log, [:arg]} 134 | ) 135 | 136 | Logger.flush() 137 | end) 138 | 139 | assert log == "" 140 | end 141 | end 142 | 143 | describe "telemetry_logging_handler/4 for DataDog formatter" do 144 | setup do 145 | formatter = LoggerJSON.Formatters.Datadog.new(metadata: [:network, :phoenix, :duration, :http, :"usr.id"]) 146 | :logger.update_handler_config(:default, :formatter, formatter) 147 | end 148 | 149 | test "logs request latency and metadata" do 150 | conn = Plug.Test.conn(:get, "/") |> Plug.Conn.put_status(200) 151 | 152 | log = 153 | capture_log(fn -> 154 | telemetry_logging_handler( 155 | [:phoenix, :endpoint, :stop], 156 | %{duration: 500_000}, 157 | %{conn: conn}, 158 | :info 159 | ) 160 | 161 | Logger.flush() 162 | end) 163 | 164 | assert %{ 165 | "message" => "GET / [Sent 200 in 500us]", 166 | "http" => %{ 167 | "method" => "GET", 168 | "referer" => nil, 169 | "request_id" => nil, 170 | "status_code" => 200, 171 | "url" => "http://www.example.com/", 172 | "url_details" => %{ 173 | "host" => "www.example.com", 174 | "path" => "/", 175 | "port" => 80, 176 | "queryString" => "", 177 | "scheme" => "http" 178 | }, 179 | "useragent" => nil 180 | }, 181 | "logger" => %{}, 182 | "network" => %{"client" => %{"ip" => "127.0.0.1"}} 183 | } = decode_or_print_error(log) 184 | end 185 | 186 | test "logs requests" do 187 | conn = 188 | Plug.Test.conn(:get, "/foo/bar?baz=qux#frag") 189 | |> Plug.Conn.put_req_header("user-agent", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)") 190 | |> Plug.Conn.put_req_header("referer", "http://www.example.com/") 191 | |> Plug.Conn.put_status(200) 192 | 193 | log = 194 | capture_log(fn -> 195 | Logger.metadata(request_id: "123") 196 | 197 | telemetry_logging_handler( 198 | [:phoenix, :endpoint, :stop], 199 | %{duration: 500_000}, 200 | %{conn: conn}, 201 | :info 202 | ) 203 | 204 | Logger.flush() 205 | end) 206 | 207 | assert %{ 208 | "http" => %{ 209 | "method" => "GET", 210 | "referer" => "http://www.example.com/", 211 | "request_id" => "123", 212 | "status_code" => 200, 213 | "url" => "http://www.example.com/foo/bar?baz=qux", 214 | "url_details" => %{ 215 | "host" => "www.example.com", 216 | "path" => "/foo/bar", 217 | "port" => 80, 218 | "queryString" => "baz=qux", 219 | "scheme" => "http" 220 | }, 221 | "useragent" => "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)" 222 | }, 223 | "message" => "GET /foo/bar [Sent 200 in 500us]", 224 | "network" => %{"client" => %{"ip" => "127.0.0.1"}} 225 | } = decode_or_print_error(log) 226 | end 227 | 228 | test "logs unsent connections" do 229 | conn = Plug.Test.conn(:get, "/") 230 | 231 | log = 232 | capture_log(fn -> 233 | telemetry_logging_handler( 234 | [:phoenix, :endpoint, :stop], 235 | %{duration: 500_000}, 236 | %{conn: conn}, 237 | :info 238 | ) 239 | 240 | Logger.flush() 241 | end) 242 | 243 | assert %{ 244 | "message" => "GET / [Sent in 500us]", 245 | "http" => %{ 246 | "method" => "GET", 247 | "referer" => nil, 248 | "request_id" => nil, 249 | "status_code" => nil, 250 | "url" => "http://www.example.com/", 251 | "url_details" => %{ 252 | "host" => "www.example.com", 253 | "path" => "/", 254 | "port" => 80, 255 | "queryString" => "", 256 | "scheme" => "http" 257 | }, 258 | "useragent" => nil 259 | }, 260 | "logger" => %{}, 261 | "network" => %{"client" => %{"ip" => "127.0.0.1"}} 262 | } = decode_or_print_error(log) 263 | end 264 | 265 | test "logs chunked responses" do 266 | conn = %{Plug.Test.conn(:get, "/") | state: :set_chunked} 267 | 268 | log = 269 | capture_log(fn -> 270 | telemetry_logging_handler( 271 | [:phoenix, :endpoint, :stop], 272 | %{duration: 500_000}, 273 | %{conn: conn}, 274 | :info 275 | ) 276 | 277 | Logger.flush() 278 | end) 279 | 280 | assert %{ 281 | "message" => "GET / [Chunked in 500us]", 282 | "http" => %{ 283 | "method" => "GET", 284 | "referer" => nil, 285 | "request_id" => nil, 286 | "status_code" => nil, 287 | "url" => "http://www.example.com/", 288 | "url_details" => %{ 289 | "host" => "www.example.com", 290 | "path" => "/", 291 | "port" => 80, 292 | "queryString" => "", 293 | "scheme" => "http" 294 | }, 295 | "useragent" => nil 296 | }, 297 | "logger" => %{}, 298 | "network" => %{"client" => %{"ip" => "127.0.0.1"}} 299 | } = decode_or_print_error(log) 300 | end 301 | 302 | test "logs long-running responses" do 303 | conn = %{Plug.Test.conn(:get, "/") | state: :set_chunked} 304 | 305 | log = 306 | capture_log(fn -> 307 | telemetry_logging_handler( 308 | [:phoenix, :endpoint, :stop], 309 | %{duration: 500_000_000}, 310 | %{conn: conn}, 311 | :info 312 | ) 313 | 314 | Logger.flush() 315 | end) 316 | 317 | assert %{"message" => "GET / [Chunked in 500ms]"} = decode_or_print_error(log) 318 | end 319 | 320 | test "allows disabling logging at runtime" do 321 | conn = Plug.Test.conn(:get, "/") 322 | 323 | log = 324 | capture_log(fn -> 325 | telemetry_logging_handler( 326 | [:phoenix, :endpoint, :stop], 327 | %{duration: 5000}, 328 | %{conn: conn}, 329 | {__MODULE__, :ignore_log, [:arg]} 330 | ) 331 | 332 | Logger.flush() 333 | end) 334 | 335 | assert log == "" 336 | end 337 | end 338 | 339 | describe "telemetry_logging_handler/4 for GoogleCloud formatter" do 340 | setup do 341 | formatter = LoggerJSON.Formatters.GoogleCloud.new(metadata: {:all_except, [:conn]}) 342 | :logger.update_handler_config(:default, :formatter, formatter) 343 | end 344 | 345 | test "logs request latency and metadata" do 346 | conn = Plug.Test.conn(:get, "/") |> Plug.Conn.put_status(200) 347 | 348 | log = 349 | capture_log(fn -> 350 | telemetry_logging_handler( 351 | [:phoenix, :endpoint, :stop], 352 | %{duration: 500_000}, 353 | %{conn: conn}, 354 | :info 355 | ) 356 | 357 | Logger.flush() 358 | end) 359 | 360 | assert %{ 361 | "message" => "GET / [Sent 200 in 500us]", 362 | "duration_us" => 500, 363 | "httpRequest" => %{ 364 | "protocol" => "HTTP/1.1", 365 | "referer" => nil, 366 | "remoteIp" => "127.0.0.1", 367 | "requestMethod" => "GET", 368 | "requestUrl" => "http://www.example.com/", 369 | "status" => 200, 370 | "userAgent" => nil 371 | }, 372 | "severity" => "INFO" 373 | } = decode_or_print_error(log) 374 | end 375 | 376 | test "logs unsent connections" do 377 | conn = Plug.Test.conn(:get, "/") 378 | 379 | log = 380 | capture_log(fn -> 381 | telemetry_logging_handler( 382 | [:phoenix, :endpoint, :stop], 383 | %{duration: 500_000}, 384 | %{conn: conn}, 385 | :info 386 | ) 387 | 388 | Logger.flush() 389 | end) 390 | 391 | assert %{ 392 | "message" => "GET / [Sent in 500us]", 393 | "duration_us" => 500, 394 | "httpRequest" => %{ 395 | "protocol" => "HTTP/1.1", 396 | "referer" => nil, 397 | "remoteIp" => "127.0.0.1", 398 | "requestMethod" => "GET", 399 | "requestUrl" => "http://www.example.com/", 400 | "status" => nil, 401 | "userAgent" => nil 402 | }, 403 | "severity" => "INFO" 404 | } = decode_or_print_error(log) 405 | end 406 | 407 | test "logs chunked responses" do 408 | conn = %{Plug.Test.conn(:get, "/") | state: :set_chunked} 409 | 410 | log = 411 | capture_log(fn -> 412 | telemetry_logging_handler( 413 | [:phoenix, :endpoint, :stop], 414 | %{duration: 500_000}, 415 | %{conn: conn}, 416 | :info 417 | ) 418 | 419 | Logger.flush() 420 | end) 421 | 422 | assert %{ 423 | "message" => "GET / [Chunked in 500us]", 424 | "duration_us" => 500, 425 | "httpRequest" => %{ 426 | "protocol" => "HTTP/1.1", 427 | "referer" => nil, 428 | "remoteIp" => "127.0.0.1", 429 | "requestMethod" => "GET", 430 | "requestUrl" => "http://www.example.com/", 431 | "status" => nil, 432 | "userAgent" => nil 433 | }, 434 | "severity" => "INFO" 435 | } = decode_or_print_error(log) 436 | end 437 | 438 | test "logs long-running responses" do 439 | conn = %{Plug.Test.conn(:get, "/") | state: :set_chunked} 440 | 441 | log = 442 | capture_log(fn -> 443 | telemetry_logging_handler( 444 | [:phoenix, :endpoint, :stop], 445 | %{duration: 500_000_000}, 446 | %{conn: conn}, 447 | :info 448 | ) 449 | 450 | Logger.flush() 451 | end) 452 | 453 | assert %{"message" => "GET / [Chunked in 500ms]"} = decode_or_print_error(log) 454 | end 455 | 456 | test "allows disabling logging at runtime" do 457 | conn = Plug.Test.conn(:get, "/") 458 | 459 | log = 460 | capture_log(fn -> 461 | telemetry_logging_handler( 462 | [:phoenix, :endpoint, :stop], 463 | %{duration: 5000}, 464 | %{conn: conn}, 465 | {__MODULE__, :ignore_log, [:arg]} 466 | ) 467 | 468 | Logger.flush() 469 | end) 470 | 471 | assert log == "" 472 | end 473 | end 474 | 475 | describe "telemetry_logging_handler/4 for Elastic formatter" do 476 | setup do 477 | formatter = LoggerJSON.Formatters.Elastic.new(metadata: nil) 478 | :logger.update_handler_config(:default, :formatter, formatter) 479 | end 480 | 481 | test "logs request latency and metadata" do 482 | conn = Plug.Test.conn(:get, "/") |> Plug.Conn.put_status(200) 483 | 484 | log = 485 | capture_log(fn -> 486 | telemetry_logging_handler( 487 | [:phoenix, :endpoint, :stop], 488 | %{duration: 500_000}, 489 | %{conn: conn}, 490 | :info 491 | ) 492 | 493 | Logger.flush() 494 | end) 495 | 496 | assert %{ 497 | "client.ip" => "127.0.0.1", 498 | "http.request.method" => "GET", 499 | "http.request.referrer" => nil, 500 | "http.response.status_code" => 200, 501 | "http.version" => "HTTP/1.1", 502 | "url.path" => "/", 503 | "user_agent.original" => nil 504 | } = decode_or_print_error(log) 505 | end 506 | 507 | test "logs unsent connections" do 508 | conn = Plug.Test.conn(:get, "/") 509 | 510 | log = 511 | capture_log(fn -> 512 | telemetry_logging_handler( 513 | [:phoenix, :endpoint, :stop], 514 | %{duration: 500_000}, 515 | %{conn: conn}, 516 | :info 517 | ) 518 | 519 | Logger.flush() 520 | end) 521 | 522 | assert %{ 523 | "client.ip" => "127.0.0.1", 524 | "http.request.method" => "GET", 525 | "http.request.referrer" => nil, 526 | "http.response.status_code" => nil, 527 | "http.version" => "HTTP/1.1", 528 | "url.path" => "/", 529 | "user_agent.original" => nil 530 | } = decode_or_print_error(log) 531 | end 532 | 533 | test "logs chunked responses" do 534 | conn = %{Plug.Test.conn(:get, "/") | state: :set_chunked} 535 | 536 | log = 537 | capture_log(fn -> 538 | telemetry_logging_handler( 539 | [:phoenix, :endpoint, :stop], 540 | %{duration: 500_000}, 541 | %{conn: conn}, 542 | :info 543 | ) 544 | 545 | Logger.flush() 546 | end) 547 | 548 | assert %{ 549 | "client.ip" => "127.0.0.1", 550 | "http.request.method" => "GET", 551 | "http.request.referrer" => nil, 552 | "http.response.status_code" => nil, 553 | "http.version" => "HTTP/1.1", 554 | "url.path" => "/", 555 | "user_agent.original" => nil 556 | } = decode_or_print_error(log) 557 | end 558 | 559 | test "logs long-running responses" do 560 | conn = %{Plug.Test.conn(:get, "/") | state: :set_chunked} 561 | 562 | log = 563 | capture_log(fn -> 564 | telemetry_logging_handler( 565 | [:phoenix, :endpoint, :stop], 566 | %{duration: 500_000_000}, 567 | %{conn: conn}, 568 | :info 569 | ) 570 | 571 | Logger.flush() 572 | end) 573 | 574 | assert %{"message" => "GET / [Chunked in 500ms]"} = decode_or_print_error(log) 575 | end 576 | 577 | test "allows disabling logging at runtime" do 578 | conn = Plug.Test.conn(:get, "/") 579 | 580 | log = 581 | capture_log(fn -> 582 | telemetry_logging_handler( 583 | [:phoenix, :endpoint, :stop], 584 | %{duration: 5000}, 585 | %{conn: conn}, 586 | {__MODULE__, :ignore_log, [:arg]} 587 | ) 588 | 589 | Logger.flush() 590 | end) 591 | 592 | assert log == "" 593 | end 594 | end 595 | 596 | test "telemetry_logging_handler/4 returns :ok even when Logger is not called" do 597 | log = 598 | capture_log(fn -> 599 | assert :ok == telemetry_logging_handler([], %{duration: 0}, %{conn: %Plug.Conn{}}, false) 600 | 601 | Logger.flush() 602 | end) 603 | 604 | assert log == "" 605 | end 606 | 607 | def ignore_log(%Plug.Conn{}, :arg), do: false 608 | end 609 | -------------------------------------------------------------------------------- /test/logger_json_test.exs: -------------------------------------------------------------------------------- 1 | defmodule LoggerJSONTest do 2 | use ExUnit.Case 3 | 4 | describe "configure_log_level_from_env/1" do 5 | test "configures log level from environment variable" do 6 | System.put_env("LOGGER_JSON_TEST_LOG_LEVEL", "warning") 7 | assert LoggerJSON.configure_log_level_from_env!("LOGGER_JSON_TEST_LOG_LEVEL") == :ok 8 | assert Logger.level() == :warning 9 | end 10 | end 11 | 12 | describe "configure_log_level/1" do 13 | test "configures log level" do 14 | assert LoggerJSON.configure_log_level!("debug") == :ok 15 | assert Logger.level() == :debug 16 | 17 | assert LoggerJSON.configure_log_level!(:info) == :ok 18 | assert Logger.level() == :info 19 | 20 | assert LoggerJSON.configure_log_level!(nil) == :ok 21 | assert Logger.level() == :info 22 | end 23 | 24 | test "raises on invalid log level" do 25 | message = 26 | "Log level should be one of [:error, :info, :debug, :emergency, :alert, :critical, :warning, :notice] values, got: :invalid" 27 | 28 | assert_raise ArgumentError, message, fn -> 29 | LoggerJSON.configure_log_level!(:invalid) 30 | end 31 | end 32 | end 33 | end 34 | -------------------------------------------------------------------------------- /test/support/crashing_gen_server.ex: -------------------------------------------------------------------------------- 1 | defmodule CrashingGenServer do 2 | use GenServer 3 | 4 | def start_link(_) do 5 | GenServer.start_link(__MODULE__, :ok, name: __MODULE__) 6 | end 7 | 8 | def init(state) do 9 | {:ok, state} 10 | end 11 | 12 | def handle_call(:boom, _, _) do 13 | raise "boom" 14 | end 15 | end 16 | -------------------------------------------------------------------------------- /test/support/logger_case.ex: -------------------------------------------------------------------------------- 1 | defmodule LoggerJSON.Case do 2 | @moduledoc false 3 | use ExUnit.CaseTemplate 4 | import ExUnit.CaptureIO 5 | 6 | @encoder LoggerJSON.Formatter.encoder() 7 | 8 | using _ do 9 | quote do 10 | import LoggerJSON.Case 11 | end 12 | end 13 | 14 | def capture_log(level \\ :debug, fun) do 15 | Logger.configure(level: level) 16 | 17 | capture_io(:user, fn -> 18 | fun.() 19 | Logger.flush() 20 | end) 21 | after 22 | Logger.configure(level: :debug) 23 | end 24 | 25 | def decode_or_print_error(data) do 26 | try do 27 | @encoder.decode!(data) 28 | rescue 29 | _reason -> 30 | IO.puts(data) 31 | flunk("Failed to decode JSON") 32 | end 33 | end 34 | end 35 | -------------------------------------------------------------------------------- /test/support/name_struct.ex: -------------------------------------------------------------------------------- 1 | defmodule NameStruct do 2 | @moduledoc """ 3 | This struct is required for tests on structs that implement the Jason.Encoder protocol. 4 | 5 | Defining this struct in the test module wouldn't work, since the .exs files 6 | are not compiled with the application so not protocol consolidation would happen. 7 | """ 8 | 9 | @derive LoggerJSON.Formatter.encoder_protocol() 10 | 11 | defstruct [:name] 12 | end 13 | -------------------------------------------------------------------------------- /test/test_helper.exs: -------------------------------------------------------------------------------- 1 | ExUnit.configure(formatters: [JUnitFormatter, ExUnit.CLIFormatter]) 2 | ExUnit.start() 3 | --------------------------------------------------------------------------------