├── .tool-versions ├── .github ├── CODEOWNERS ├── dependabot.yml ├── workflows │ ├── publish.yml │ └── ci.yml └── ISSUE_TEMPLATE │ └── bug_report.md ├── .formatter.exs ├── test ├── test_helper.exs └── uinta │ ├── formatter │ └── datadog_test.exs │ ├── formatter_test.exs │ └── plug_test.exs ├── guides └── config.md ├── CREDITS.md ├── lib ├── uinta │ ├── types.ex │ ├── formatter │ │ ├── standard.ex │ │ ├── datadog.ex │ │ └── util.ex │ ├── formatter.ex │ └── plug.ex └── uinta.ex ├── RELEASE.md ├── .gitignore ├── .dialyzer.ignore-warnings ├── LICENSE.md ├── mix.exs ├── CODE_OF_CONDUCT.md ├── CHANGELOG.md ├── mix.lock ├── README.md ├── CONTRIBUTING.md └── .credo.exs /.tool-versions: -------------------------------------------------------------------------------- 1 | elixir 1.17.3 2 | erlang 26.2.5 3 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # add github group owner for uinta only 2 | * @podium/oss-engineers 3 | -------------------------------------------------------------------------------- /.formatter.exs: -------------------------------------------------------------------------------- 1 | # Used by "mix format" 2 | [ 3 | inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"] 4 | ] 5 | -------------------------------------------------------------------------------- /test/test_helper.exs: -------------------------------------------------------------------------------- 1 | ExUnit.start() 2 | Logger.configure_backend(:console, colors: [enabled: false], metadata: [:request_id]) 3 | -------------------------------------------------------------------------------- /guides/config.md: -------------------------------------------------------------------------------- 1 | ## Local Development configuration 2 | 3 | This section describes the available configuration when working with the Uinta 4 | 5 | -------------------------------------------------------------------------------- /CREDITS.md: -------------------------------------------------------------------------------- 1 | Credits in no special order: 2 | 3 | - [Dennis Beatty](https://github.com/dnsbty) 4 | - [Emmanuel Pinault](https://github.com/epinault) 5 | -------------------------------------------------------------------------------- /lib/uinta/types.ex: -------------------------------------------------------------------------------- 1 | defmodule Uinta.Types do 2 | @moduledoc """ 3 | Defines types 4 | """ 5 | @type level :: :debug | :info | :warn | :error 6 | @type time :: {{1970..10_000, 1..12, 1..31}, {0..23, 0..59, 0..59, 0..999}} 7 | end 8 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | # Maintain dependencies for GitHub Actions 4 | - package-ecosystem: "github-actions" 5 | directory: "/" 6 | schedule: 7 | interval: "weekly" 8 | 9 | - package-ecosystem: "mix" 10 | directory: "/" 11 | schedule: 12 | interval: "weekly" -------------------------------------------------------------------------------- /RELEASE.md: -------------------------------------------------------------------------------- 1 | # Release Instructions 2 | 3 | 1. Check related deps for required version bumps and compatibility 4 | 2. Bump version in related files below 5 | 3. Bump external dependency version in related external files below 6 | 4. Run tests: 7 | - `mix test` in the root folder 8 | - `mix credo` in the root folder 9 | 5. Commit, push code 10 | 6. Publish `uinta` packages and docs 11 | 12 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: Publish 2 | 3 | on: 4 | push: 5 | tags: 6 | - "*" 7 | 8 | jobs: 9 | publish: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Check out 13 | uses: actions/checkout@v6 14 | 15 | - name: Publish package to hex.pm 16 | uses: hipcall/github_action_publish_hex@v1 17 | env: 18 | HEX_API_KEY: ${{ secrets.HEX_API_KEY }} 19 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: bug 6 | assignees: epinault 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | ** Provide the following details 14 | 15 | - Elixir version (elixir -v): 16 | - Erlang version (erl -v): 17 | - Operating system: 18 | 19 | **Expected behavior** 20 | A clear and concise description of what you expected to happen. 21 | 22 | **Actual behavior** 23 | A clear and concise description of what actually happens. 24 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # The directory Mix will write compiled artifacts to. 2 | /_build/ 3 | 4 | # If you run "mix test --cover", coverage assets end up here. 5 | /cover/ 6 | 7 | # The directory Mix downloads your dependencies sources to. 8 | /deps/ 9 | 10 | # Where third-party dependencies like ExDoc output generated docs. 11 | /doc/ 12 | 13 | # Ignore .fetch files in case you like to edit your project deps locally. 14 | /.fetch 15 | 16 | # If the VM crashes, it generates a dump, let's ignore it too. 17 | erl_crash.dump 18 | 19 | # Also ignore archive artifacts (built via "mix archive.build"). 20 | *.ez 21 | 22 | # Ignore package tarball (built via "mix hex.build"). 23 | uinta-*.tar 24 | 25 | -------------------------------------------------------------------------------- /test/uinta/formatter/datadog_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Uinta.Formatter.DatadogTest do 2 | use ExUnit.Case, async: true 3 | 4 | alias Uinta.Formatter.Datadog 5 | 6 | describe "format/4" do 7 | test "it adds dd.trace_id and dd.span_id when properties are hex" do 8 | metadata = [trace_id: "e4705f4a1b95d6ae5ec373e00f013b91", span_id: "8e00cc3f6e137140"] 9 | result = Datadog.format(:info, "Hello World", {{1980, 1, 1}, {0, 0, 0, 0}}, metadata) 10 | 11 | %{"dd.trace_id" => trace_id, "dd.span_id" => span_id} = Jason.decode!(result) 12 | assert trace_id == "6828428866185411473" 13 | assert span_id == "10232402926187540800" 14 | end 15 | end 16 | end 17 | -------------------------------------------------------------------------------- /lib/uinta/formatter/standard.ex: -------------------------------------------------------------------------------- 1 | defmodule Uinta.Formatter.Standard do 2 | @moduledoc """ 3 | No special handling, outputs everything as JSON. See Uinta.Formatter for more information. 4 | """ 5 | 6 | alias Uinta.Formatter.Util 7 | alias Uinta.Types 8 | 9 | @type level :: :debug | :info | :warn | :error 10 | @type time :: {{1970..10_000, 1..12, 1..31}, {0..23, 0..59, 0..59, 0..999}} 11 | 12 | @doc """ 13 | See Uinta.formatter.format/4 14 | """ 15 | @spec format(Types.level(), iodata(), Types.time(), Keyword.t()) :: iodata() 16 | def format(level, message, timestamp, metadata) do 17 | level |> Util.format(message, timestamp, metadata) |> Util.encode() 18 | rescue 19 | e -> 20 | "Could not format: #{inspect({level, message, metadata})}, Due to error: #{inspect(e)}" 21 | end 22 | end 23 | -------------------------------------------------------------------------------- /.dialyzer.ignore-warnings: -------------------------------------------------------------------------------- 1 | lib/uinta/formatter/util.ex:62: Unknown type 'Elixir.Logger.Formatter':time/0 2 | lib/uinta/plug.ex:222: The test binary() == 'nil' can never evaluate to 'true' 3 | lib/uinta/plug.ex:224: The pattern 'false' can never match the type 'true' 4 | lib/uinta/plug.ex:257: The pattern <#{'query':=_query@1}, _> can never match the type <#{'operation':=binary(), 'type':=<<_:40,_:_*24>>, 'variables':=binary()},#{'filter_variables':=[binary()], 'format':='json' | 'map' | 'string', 'ignored_paths':=[binary()], 'include_datadog_fields':=boolean(), 'include_unnamed_queries':='true', 'include_variables':=boolean(), 'level':='alert' | 'critical' | 'debug' | 'emergency' | 'error' | 'info' | 'notice' | 'warn' | 'warning'}> 5 | lib/uinta/plug.ex:262: The variable _ can never match since previous clauses completely covered the type #{'operation':=binary(), 'type':=<<_:40,_:_*24>>, 'variables':=binary()} 6 | -------------------------------------------------------------------------------- /lib/uinta.ex: -------------------------------------------------------------------------------- 1 | defmodule Uinta do 2 | @moduledoc """ 3 | Uinta is a plugin for the default Elixir logger that lowers log volume while 4 | maximizing log usefulness. It is not a logger backend, but rather includes 5 | `Uinta.Formatter` which will format logs on top of the default Elixir logger 6 | backend. 7 | 8 | In addition to the formatter, Uinta also includes `Uinta.Plug`. The plug is a 9 | drop-in replacement for `Plug.Logger` that will log out the request and 10 | response on a single line. It can also put the request info into the 11 | top-level JSON for easier parsing by your log aggregator. 12 | 13 | ## Installation 14 | 15 | The formatter and plug will be installed separately depending on the 16 | functionality that you want. 17 | 18 | To install the formatter, see the instructions in `Uinta.Formatter` 19 | 20 | To install the plug, see the instructions in `Uinta.Plug` 21 | """ 22 | end 23 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Dennis Beatty 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /mix.exs: -------------------------------------------------------------------------------- 1 | defmodule Uinta.MixProject do 2 | use Mix.Project 3 | 4 | @project_url "https://github.com/podium/uinta" 5 | @version "0.16.0" 6 | 7 | def project do 8 | [ 9 | app: :uinta, 10 | name: "Uinta", 11 | description: "Simpler structured logs and lower log volume for Elixir apps", 12 | version: @version, 13 | elixir: "~> 1.14", 14 | source_url: @project_url, 15 | homepage_url: @project_url, 16 | start_permanent: Mix.env() == :prod, 17 | deps: deps(), 18 | dialyzer: [ 19 | ignore_warnings: ".dialyzer.ignore-warnings", 20 | list_unused_filters: true, 21 | plt_add_apps: [:mix] 22 | ], 23 | docs: docs(), 24 | package: package(), 25 | test_coverage: [summary: [threshold: 80]] 26 | ] 27 | end 28 | 29 | def application do 30 | [ 31 | extra_applications: [:logger] 32 | ] 33 | end 34 | 35 | defp deps do 36 | [ 37 | {:credo, "~> 1.7", only: [:dev, :test]}, 38 | {:dialyxir, "~> 1.4", only: :dev, runtime: false}, 39 | {:ex_doc, "~> 0.30", only: :dev, runtime: false}, 40 | {:jason, "~> 1.4"}, 41 | {:plug, "~> 1.10", optional: true} 42 | ] 43 | end 44 | 45 | defp docs do 46 | [ 47 | main: "Uinta", 48 | extras: [ 49 | {:"README.md", title: "Readme"}, 50 | "CHANGELOG.md" 51 | ], 52 | source_url: @project_url, 53 | source_ref: "v#{@version}", 54 | homepage_url: @project_url 55 | ] 56 | end 57 | 58 | defp package do 59 | [ 60 | maintainers: ["Podium"], 61 | licenses: ["MIT"], 62 | links: %{ 63 | "GitHub" => @project_url, 64 | "Changelog" => "#{@project_url}/blob/master/CHANGELOG.md" 65 | } 66 | ] 67 | end 68 | end 69 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Code of Conduct 2 | 3 | As contributors and maintainers of this project, and in the interest of fostering an open and welcoming community, we pledge to respect all people who contribute through reporting issues, posting feature requests, updating documentation, submitting pull requests or patches, and other activities. 4 | 5 | We are committed to making participation in this project a harassment-free experience for everyone, regardless of level of experience, gender, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, religion, or nationality. 6 | 7 | Examples of unacceptable behavior by participants include: 8 | 9 | * The use of sexualized language or imagery 10 | * Personal attacks 11 | * Trolling or insulting/derogatory comments 12 | * Public or private harassment 13 | * Publishing other's private information, such as physical or electronic addresses, without explicit permission 14 | * Other unethical or unprofessional conduct. 15 | 16 | Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct. By adopting this Code of Conduct, project maintainers commit themselves to fairly and consistently applying these principles to every aspect of managing this project. Project maintainers who do not follow or enforce the Code of Conduct may be permanently removed from the project team. 17 | 18 | This code of conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. 19 | 20 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by opening an issue or contacting one or more of the project maintainers. 21 | 22 | This Code of Conduct is adapted from the [Contributor Covenant](https://www.contributor-covenant.org), version 1.2.0, available at [https://www.contributor-covenant.org/version/1/2/0/code-of-conduct/](https://www.contributor-covenant.org/version/1/2/0/code-of-conduct/) 23 | -------------------------------------------------------------------------------- /lib/uinta/formatter/datadog.ex: -------------------------------------------------------------------------------- 1 | defmodule Uinta.Formatter.Datadog do 2 | @moduledoc """ 3 | 4 | ## Installation 5 | 6 | ``` 7 | config :logger, :console, format: {Uinta.Formatter.Datadog, :format} 8 | ``` 9 | 10 | ## Features 11 | 12 | ### Trace and Span id correlation 13 | 14 | Takes trace ids and span ids in the OpenTelemetry format (as hex) and convert them into Datadog 15 | format in the correct key. Logger.metadata must include the keys `:trace_id` and `:span_id`. 16 | Be sure to add those two keys onto the allowlist in the `:logger` config. See 17 | [Datadog's documentation](https://docs.datadoghq.com/tracing/connect_logs_and_traces/opentelemetry/) 18 | for more details. 19 | 20 | #### Example 21 | 22 | ```elixir 23 | ctx = OpenTelemetry.Tracer.current_span_ctx() 24 | Logger.metadata([ 25 | trace_id: OpenTelemetry.Span.hex_trace_id(ctx), 26 | span_id: OpenTelemetry.Span.hex_span_id(ctx) 27 | ]) 28 | ``` 29 | """ 30 | 31 | alias Uinta.Formatter.Util 32 | alias Uinta.Types 33 | 34 | @doc """ 35 | See Uinta.formatter.format/4 36 | """ 37 | @spec format(Types.level(), iodata(), Types.time(), Keyword.t()) :: iodata() 38 | def format(level, message, timestamp, metadata) do 39 | level 40 | |> Util.format(message, timestamp, metadata) 41 | |> add_datadog_trace(metadata) 42 | |> Util.encode() 43 | rescue 44 | e -> 45 | "Could not format: #{inspect({level, message, metadata})}, Due to error: #{inspect(e)}" 46 | end 47 | 48 | @spec add_datadog_trace(map(), Keyword.t()) :: map() 49 | defp add_datadog_trace(log, metadata) do 50 | log 51 | |> Map.put("dd.trace_id", to_datadog_id(Keyword.get(metadata, :trace_id))) 52 | |> Map.put("dd.span_id", to_datadog_id(Keyword.get(metadata, :span_id))) 53 | end 54 | 55 | defp to_datadog_id(id) when is_nil(id), do: nil 56 | 57 | defp to_datadog_id(<<_high::bytes-size(16)>> <> <>) do 58 | # OpenTelemetry uses 128 bits for the trace id and 64 bits for the span id. 59 | # DataDog uses the lower 64 bits of each, as an unsigned integer, for its trace/span ids. 60 | to_datadog_id(low) 61 | end 62 | 63 | defp to_datadog_id(id) do 64 | case Integer.parse(id, 16) do 65 | {integer, _remainder_of_binary} -> 66 | Integer.to_string(integer, 10) 67 | 68 | _error -> 69 | nil 70 | end 71 | end 72 | end 73 | -------------------------------------------------------------------------------- /lib/uinta/formatter.ex: -------------------------------------------------------------------------------- 1 | defmodule Uinta.Formatter do 2 | @moduledoc """ 3 | The Uinta Formatter will wrap normal log statements in a JSON object. The log 4 | level, timestamp, and metadata will all be attached to it as parts of the 5 | object. 6 | 7 | The formatter can also format stuctured log messages. When a JSON string is 8 | received for formatting, it will be decoded and merged with the map to be 9 | output. In this way any keys that are passed to it will be on the high level 10 | object, so that they won't need to be extracted from a secondary object later 11 | on. 12 | 13 | JSON tends to be a great solution for making logs easily machine parseable, 14 | while still being mostly human readable. However, it is recommended that if 15 | you have separate configuration for development and production environments 16 | that you only enable this in the production environment as it can still 17 | decrease developer productivity to have to mentally parse JSON during 18 | development. 19 | 20 | ## Installation 21 | 22 | To use the formatter, you'll need to add it to your logger configuration. In 23 | your (production) config file, see if you have a line that looks something 24 | like this: 25 | 26 | ``` 27 | config :logger, :console, format: "[$level] $message\\n" 28 | ``` 29 | 30 | If you have it, you'll want to replace it with this: 31 | 32 | ``` 33 | config :logger, :console, format: {Uinta.Formatter, :format} 34 | ``` 35 | 36 | If you don't have it, you'll want to just add that line. 37 | 38 | ## Available Formatters 39 | 40 | ### Standard 41 | 42 | No special handling, outputs everything as JSON. 43 | 44 | ### Datadog 45 | 46 | Support adding Datadog specific metadata to logs. 47 | """ 48 | alias Uinta.Types 49 | 50 | @doc """ 51 | This function takes in four arguments, as defined by 52 | [Logger](https://hexdocs.pm/logger/Logger.html#module-custom-formatting): 53 | 54 | - `level` is the log level, one of `:debug`, `:info`, `:warn`, and `:error` 55 | - `message` is the message to be formatted. This should be iodata 56 | (typically String or iolist) 57 | - `timestamp` is a timestamp formatted according to 58 | `t:Logger.Formatter.time/0` 59 | - `metadata` is a keyword list containing metadata that will be included 60 | with the log line 61 | 62 | However, this line should not be called manually. Instead it should be called 63 | by configuring the Elixir logger in your project to use it as a custom log 64 | formatter. See [the installation instructions](#module-installation) for more 65 | information. 66 | 67 | Delegates to Uinta.Formatter.Standard for backward compatibility 68 | """ 69 | @spec format(Types.level(), iodata(), Types.time(), Keyword.t()) :: iodata() 70 | defdelegate format(level, message, timestamp, metadata), to: Uinta.Formatter.Standard 71 | end 72 | -------------------------------------------------------------------------------- /lib/uinta/formatter/util.ex: -------------------------------------------------------------------------------- 1 | defmodule Uinta.Formatter.Util do 2 | @moduledoc """ 3 | Utilities for building a formatter 4 | """ 5 | 6 | alias Uinta.Types 7 | 8 | @doc """ 9 | Format as a map with metadata as a map, timestamp, level, and message. 10 | """ 11 | def format(level, message, timestamp, metadata) do 12 | message 13 | |> to_map() 14 | |> add_timestamp_and_level(level, timestamp) 15 | |> add_metadata(metadata) 16 | end 17 | 18 | @doc """ 19 | Stringify as JSON 20 | """ 21 | @spec encode(map()) :: String.t() 22 | def encode(formatted_logs) do 23 | formatted_logs 24 | |> Jason.encode!() 25 | |> Kernel.<>("\n") 26 | end 27 | 28 | @spec to_map(iodata()) :: map() 29 | defp to_map(message) when is_binary(message) do 30 | case Jason.decode(message) do 31 | {:ok, decoded} -> decoded 32 | _ -> %{"message" => message} 33 | end 34 | end 35 | 36 | defp to_map(message) when is_list(message) do 37 | %{"message" => to_string(message)} 38 | rescue 39 | _e in ArgumentError -> to_map(inspect(message)) 40 | end 41 | 42 | defp to_map(message), do: %{"message" => "#{inspect(message)}"} 43 | 44 | @spec add_timestamp_and_level(map(), atom(), Types.time()) :: map() 45 | defp add_timestamp_and_level(log, level, timestamp) do 46 | formatted_timestamp = format_timestamp(timestamp) 47 | 48 | log 49 | |> Map.put("log_level", level) 50 | |> Map.put("timestamp", formatted_timestamp) 51 | end 52 | 53 | @spec add_metadata(map(), Keyword.t()) :: map() 54 | defp add_metadata(log, metadata) do 55 | metadata = for {k, v} <- metadata, s = serialize(v), into: %{}, do: {k, s} 56 | Map.put(log, "metadata", metadata) 57 | end 58 | 59 | @doc """ 60 | RFC3339 UTC "Zulu" format. 61 | """ 62 | @spec format_timestamp(Types.time()) :: String.t() 63 | def format_timestamp({date, time}) do 64 | IO.iodata_to_binary([format_date(date), ?T, format_time(time), ?Z]) 65 | end 66 | 67 | defp format_date({yy, mm, dd}) do 68 | [Integer.to_string(yy), ?-, pad2(mm), ?-, pad2(dd)] 69 | end 70 | 71 | defp format_time({hh, mi, ss, ms}) do 72 | [pad2(hh), ?:, pad2(mi), ?:, pad2(ss), ?., pad3(ms)] 73 | end 74 | 75 | defp pad3(int) when int < 10, do: [?0, ?0, Integer.to_string(int)] 76 | defp pad3(int) when int < 100, do: [?0, Integer.to_string(int)] 77 | defp pad3(int), do: Integer.to_string(int) 78 | 79 | defp pad2(int) when int < 10, do: [?0, Integer.to_string(int)] 80 | defp pad2(int), do: Integer.to_string(int) 81 | 82 | @spec serialize(term()) :: String.t() | nil 83 | defp serialize(value) do 84 | cond do 85 | String.Chars.impl_for(value) -> 86 | serialize_to_string(value) 87 | 88 | Inspect.impl_for(value) -> 89 | inspect(value) 90 | 91 | true -> 92 | nil 93 | end 94 | end 95 | 96 | defp serialize_to_string(value) do 97 | to_string(value) 98 | rescue 99 | _ -> inspect(value) 100 | end 101 | end 102 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # CHANGELOG 2 | 3 | ## v0.16.0 (2025-09-12) 4 | 5 | ### Changed 6 | 7 | * Allows for custom sampled status codes to be specified. These are still honored 8 | by the `success_log_sampling_ratio` option, even if they may not be traditional 9 | 'successful' status codes. 10 | * Adds a `Uinta.Plug.default_sampled_status_codes/0` function to return the default sampled status codes. 11 | These are now explicitly listed, instead of just being all 1xx and 2xx integers. 12 | 13 | ## v0.15.1 (2024-12-04) 14 | 15 | ### Changed 16 | 17 | * Fix dialyzer errors 18 | ## v0.15.0 (2024-10-01) 19 | 20 | ### Changed 21 | 22 | * Remove support for Elixir 1.13. Minimum is Elixir 1.14 23 | 24 | ## v0.14.0 (2024-04-16) 25 | * Support dynamic log level in `Uinta.Plug`. 26 | * Option `:log` now accepts `{module, function, args}` tuple called with prepended `conn` to determine log level. 27 | 28 | ## v0.13.0 (2024-01-09) 29 | ### Changed 30 | * Support not double encoding the payload. In order to do that, a new plugs option `format` was added. We are deprecating the `json` option instead though it is backward compatible for a little while 31 | 32 | - `:format` - Output format, either :json, :string, or :map. Default is `:string` 33 | 34 | To migrate easily, just find where you initialize your plug, and change the :json to :format 35 | 36 | ## v0.12.1 (2023-12-01) 37 | ### Changed 38 | * handle white space in front of a mutation/query 39 | 40 | ## v0.12.0 (2023-11-07) 41 | ### Changed 42 | * Upgrade dependencies 43 | * fix bad regex to capture better operation name 44 | 45 | ## v0.11.0 (2022-11-28) 46 | ### Changed 47 | * Upgrade dependencies 48 | * Clean up some docs 49 | * some internal code cleaning 50 | * ** Breaking ** Support Elixir 1.11 and above 51 | 52 | ## v0.10.4 (2022-07-25) 53 | 54 | ### Changed 55 | * Add support for formatting metadata list values 56 | 57 | ## v0.10.3 (2022-07-19) 58 | 59 | ### Changed 60 | * Log out error on rescue when we are unable to format 61 | 62 | ## v0.10.2 (2022-05-31) 63 | 64 | ### Added 65 | * Removed remapping for status as done in Datadog directly 66 | 67 | ## v0.10.1 (2022-05-31) 68 | 69 | ### Added 70 | * Adds support for mapped fields in DataDog. To enable this, use `include_datadog_fields: true` in your plug initialization 71 | 72 | ## v0.10.0 (2022-05-09) 73 | 74 | ### Added 75 | * `Uinta.Formatter.Datadog` - Drop in replacement for `Uinta.Formatter` that adds Datadog specific metadata to help correlate traces and logs 76 | 77 | ## v0.9.2 (2022-03-08) 78 | 79 | ### Changed 80 | * Doesn't crash when "query" is not a string 81 | 82 | ## v0.9.1 (2022-01-21) 83 | 84 | ### Changed 85 | * Client ip is now properly serialize as a string 86 | 87 | ## v0.9 (2022-01-05) 88 | 89 | ### Added 90 | * adds more fields to the log: 91 | * referer 92 | * user_agent 93 | * x_forwarded_for 94 | * x_forwarded_proto 95 | * x_forwarded_port 96 | * via 97 | 98 | 99 | ## v0.8 (2021-11-22) 100 | 101 | ### Added 102 | `operationName` for GQL requests 103 | 104 | ### Changed 105 | `path` now always path and not sometimes `operationName` 106 | 107 | 108 | ## v0.7 (2021-08-16) 109 | 110 | ### Added 111 | 112 | * configurable sampling of successful requests. Use the :success_log_sampling_ratio configuration in init and specify a ratio of the sample to log. Ratio can support a precision up to 4 digits 113 | * update dependencies 114 | ## v0.6 (2021-04-08) 115 | 116 | ### Added 117 | 118 | * ignore request path options to `Uinta.Plug`, to filter out request unwanted unless HTTP status returned 119 | is not a 200-level status 120 | * Adds a CHANGELOG 121 | * Adds a duration_ms as a number 122 | 123 | ### Changed 124 | 125 | * Upgraded the package dependencies 126 | 127 | 128 | ## v0.5 (2020-11-04) 129 | 130 | ### Added 131 | * ignore request path options to `Uinta.Plug`, to filter out request unwanted unless HTTP status returned 132 | is not a 200-level status 133 | 134 | 135 | ## v0.4.2 (2020-03-31) 136 | 137 | ### Changed 138 | 139 | * properly handle GraphQL queries with no commas 140 | 141 | 142 | ## v0.4.1 (2020-03-31) 143 | 144 | ### Changed 145 | 146 | * handle array arguments properly in the regex 147 | 148 | 149 | ## v0.4.0 (2020-03-30) 150 | 151 | ### Changed 152 | 153 | * better handle queries with no operationName 154 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - master 7 | push: 8 | branches: 9 | - master 10 | 11 | jobs: 12 | setup: 13 | runs-on: ${{ matrix.os }} 14 | env: 15 | MIX_ENV: test 16 | 17 | strategy: 18 | fail-fast: false 19 | matrix: 20 | os: [ubuntu-22.04] 21 | elixir_version: [1.15, 1.16, 1.17] 22 | otp_version: [ 25, 26, 27] 23 | exclude: 24 | - otp_version: 27 25 | elixir_version: 1.15 26 | - otp_version: 27 27 | elixir_version: 1.16 28 | 29 | steps: 30 | - uses: actions/checkout@v6 31 | 32 | - name: Set up Elixir 33 | id: beam 34 | uses: erlef/setup-beam@v1 35 | with: 36 | otp-version: ${{matrix.otp_version}} 37 | elixir-version: ${{matrix.elixir_version}} 38 | 39 | - uses: actions/cache@v5 40 | with: 41 | path: | 42 | deps 43 | _build 44 | key: deps-${{ runner.os }}-${{ matrix.otp_version }}-${{ matrix.elixir_version }}-${{ hashFiles('**/mix.lock') }} 45 | restore-keys: | 46 | deps-${{ runner.os }}-${{ matrix.otp_version }}-${{ matrix.elixir_version }} 47 | 48 | - run: mix deps.get 49 | 50 | - run: mix deps.unlock --check-unused 51 | 52 | - run: mix deps.compile 53 | 54 | - run: mix compile --warnings-as-errors 55 | 56 | - run: mix credo --strict --format=oneline 57 | 58 | - run: mix test --warnings-as-errors --cover 59 | 60 | dialyzer: 61 | runs-on: ubuntu-22.04 62 | env: 63 | MIX_ENV: dev 64 | 65 | steps: 66 | - uses: actions/checkout@v6 67 | 68 | - name: Set up Elixir 69 | id: beam 70 | uses: erlef/setup-beam@v1 71 | with: 72 | elixir-version: 1.16 73 | otp-version: 26 74 | 75 | - uses: actions/cache@v5 76 | with: 77 | path: | 78 | deps 79 | _build 80 | key: deps-${{ runner.os }}-${{ steps.beam.outputs.otp-version }}-${{ steps.beam.outputs.elixir-version }}-${{ hashFiles('**/mix.lock') }} 81 | restore-keys: | 82 | deps-${{ runner.os }}-${{ steps.beam.outputs.otp-version }}-${{ steps.beam.outputs.elixir-version }} 83 | 84 | - run: mix deps.get 85 | 86 | - name: Restore PLT cache 87 | id: plt_cache_restore 88 | uses: actions/cache/restore@v5 89 | with: 90 | key: | 91 | plts-${{ runner.os }}-${{ steps.beam.outputs.otp-version }}-${{ steps.beam.outputs.elixir-version }}-${{ hashFiles('**/mix.lock') }} 92 | restore-keys: | 93 | plts-${{ runner.os }}-${{ steps.beam.outputs.otp-version }}-${{ steps.beam.outputs.elixir-version }}- 94 | path: | 95 | priv/plts 96 | 97 | - name: Create PLTs 98 | if: steps.plt_cache_restore.outputs.cache-hit != 'true' 99 | run: mix dialyzer --plt 100 | 101 | - name: Save PLT cache 102 | id: plt_cache_save 103 | if: steps.plt_cache_restore.outputs.cache-hit != 'true' 104 | uses: actions/cache/save@v5 105 | with: 106 | key: | 107 | plts-${{ runner.os }}-${{ steps.beam.outputs.otp-version }}-${{ steps.beam.outputs.elixir-version }}-${{ hashFiles('**/mix.lock') }} 108 | path: | 109 | priv/plts 110 | 111 | - name: Run dialyzer 112 | run: mix dialyzer --format github --format dialyxir 113 | 114 | check_format: 115 | runs-on: ubuntu-22.04 116 | env: 117 | MIX_ENV: dev 118 | 119 | steps: 120 | - uses: actions/checkout@v6 121 | 122 | - name: Set up Elixir 123 | id: beam 124 | uses: erlef/setup-beam@v1 125 | with: 126 | elixir-version: 1.16 127 | otp-version: 26 128 | 129 | - uses: actions/cache@v5 130 | with: 131 | path: | 132 | deps 133 | _build 134 | key: deps-${{ runner.os }}-${{ steps.beam.outputs.otp-version }}-${{ steps.beam.outputs.elixir-version }}-${{ hashFiles('**/mix.lock') }} 135 | restore-keys: | 136 | deps-${{ runner.os }}-${{ steps.beam.outputs.otp-version }}-${{ steps.beam.outputs.elixir-version }} 137 | 138 | - run: mix deps.get 139 | 140 | - run: mix format --check-formatted 141 | -------------------------------------------------------------------------------- /test/uinta/formatter_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Uinta.FormatterTest do 2 | use ExUnit.Case 3 | alias Uinta.Formatter 4 | 5 | test "formats message properly" do 6 | message = "this is a log message" 7 | date = {2020, 3, 16} 8 | time = {10, 16, 32} 9 | timestamp = {date, Tuple.append(time, 548)} 10 | metadata = [request_id: "req_1234", user_uid: "26dbba1d-5b72-4e5c-b1a7-701589343291"] 11 | 12 | formatted = Formatter.format(:info, message, timestamp, metadata) 13 | 14 | assert formatted == 15 | "{\"log_level\":\"info\",\"message\":\"this is a log message\",\"metadata\":{\"request_id\":\"req_1234\",\"user_uid\":\"26dbba1d-5b72-4e5c-b1a7-701589343291\"},\"timestamp\":\"2020-03-16T10:16:32.548Z\"}\n" 16 | end 17 | 18 | test "merges with log message json when applicable" do 19 | message = "{\"method\":\"GET\",\"path\":\"/\",\"status\":\"200\",\"timing\":\"69µs\"}" 20 | date = {2020, 3, 16} 21 | timestamp = {date, {10, 16, 32, 548}} 22 | metadata = [request_id: "req_1234", user_uid: "26dbba1d-5b72-4e5c-b1a7-701589343291"] 23 | 24 | formatted = Formatter.format(:info, message, timestamp, metadata) 25 | 26 | assert formatted == 27 | "{\"log_level\":\"info\",\"metadata\":{\"request_id\":\"req_1234\",\"user_uid\":\"26dbba1d-5b72-4e5c-b1a7-701589343291\"},\"method\":\"GET\",\"path\":\"/\",\"status\":\"200\",\"timestamp\":\"2020-03-16T10:16:32.548Z\",\"timing\":\"69µs\"}\n" 28 | end 29 | 30 | test "formats metadata values that are lists of atoms" do 31 | metadata = [prop: [:elixir]] 32 | result = Formatter.format(:info, "Testing", {{1980, 1, 1}, {0, 0, 0, 0}}, metadata) 33 | 34 | %{"metadata" => %{"prop" => metadata_value}} = Jason.decode!(result) 35 | assert metadata_value == "[:elixir]" 36 | end 37 | 38 | test "formats metadata datetime values" do 39 | metadata = [prop: ~U[2022-07-26 22:07:46.217735Z]] 40 | result = Formatter.format(:info, "Testing", {{1980, 1, 1}, {0, 0, 0, 0}}, metadata) 41 | 42 | %{"metadata" => %{"prop" => metadata_value}} = Jason.decode!(result) 43 | assert metadata_value == "2022-07-26 22:07:46.217735Z" 44 | end 45 | 46 | test "formats metadata string values" do 47 | result = Formatter.format(:info, "Testing", {{1980, 1, 1}, {0, 0, 0, 0}}, prop: "test") 48 | 49 | %{"metadata" => %{"prop" => metadata_value}} = Jason.decode!(result) 50 | assert metadata_value == "test" 51 | end 52 | 53 | test "formats metadata integer values" do 54 | result = Formatter.format(:info, "Testing", {{1980, 1, 1}, {0, 0, 0, 0}}, prop: 1) 55 | 56 | %{"metadata" => %{"prop" => metadata_value}} = Jason.decode!(result) 57 | assert metadata_value == "1" 58 | end 59 | 60 | test "formats metadata float values" do 61 | result = Formatter.format(:info, "Testing", {{1980, 1, 1}, {0, 0, 0, 0}}, prop: 0.1) 62 | 63 | %{"metadata" => %{"prop" => metadata_value}} = Jason.decode!(result) 64 | assert metadata_value == "0.1" 65 | end 66 | 67 | test "formats metadata boolean values" do 68 | result = Formatter.format(:info, "Testing", {{1980, 1, 1}, {0, 0, 0, 0}}, prop: true) 69 | 70 | %{"metadata" => %{"prop" => metadata_value}} = Jason.decode!(result) 71 | assert metadata_value == "true" 72 | end 73 | 74 | test "formats metadata atom values" do 75 | result = Formatter.format(:info, "Testing", {{1980, 1, 1}, {0, 0, 0, 0}}, prop: :test) 76 | 77 | %{"metadata" => %{"prop" => metadata_value}} = Jason.decode!(result) 78 | assert metadata_value == "test" 79 | end 80 | 81 | test "formats metadata tuple values" do 82 | result = Formatter.format(:info, "Testing", {{1980, 1, 1}, {0, 0, 0, 0}}, prop: {:test}) 83 | 84 | %{"metadata" => %{"prop" => metadata_value}} = Jason.decode!(result) 85 | assert metadata_value == "{:test}" 86 | end 87 | 88 | test "formats metadata map values" do 89 | result = 90 | Formatter.format(:info, "Testing", {{1980, 1, 1}, {0, 0, 0, 0}}, prop: %{test: "test"}) 91 | 92 | %{"metadata" => %{"prop" => metadata_value}} = Jason.decode!(result) 93 | assert metadata_value == "%{test: \"test\"}" 94 | end 95 | 96 | test "formats metadata charlist" do 97 | result = Formatter.format(:info, "Testing", {{1980, 1, 1}, {0, 0, 0, 0}}, prop: ~c"abc") 98 | 99 | %{"metadata" => %{"prop" => metadata_value}} = Jason.decode!(result) 100 | assert metadata_value == "abc" 101 | end 102 | end 103 | -------------------------------------------------------------------------------- /mix.lock: -------------------------------------------------------------------------------- 1 | %{ 2 | "bunt": {:hex, :bunt, "1.0.0", "081c2c665f086849e6d57900292b3a161727ab40431219529f13c4ddcf3e7a44", [:mix], [], "hexpm", "dc5f86aa08a5f6fa6b8096f0735c4e76d54ae5c9fa2c143e5a1fc7c1cd9bb6b5"}, 3 | "credo": {:hex, :credo, "1.7.14", "c7e75216cea8d978ba8c60ed9dede4cc79a1c99a266c34b3600dd2c33b96bc92", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "12a97d6bb98c277e4fb1dff45aaf5c137287416009d214fb46e68147bd9e0203"}, 4 | "dialyxir": {:hex, :dialyxir, "1.4.7", "dda948fcee52962e4b6c5b4b16b2d8fa7d50d8645bbae8b8685c3f9ecb7f5f4d", [:mix], [{:erlex, ">= 0.2.8", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "b34527202e6eb8cee198efec110996c25c5898f43a4094df157f8d28f27d9efe"}, 5 | "earmark_parser": {:hex, :earmark_parser, "1.4.44", "f20830dd6b5c77afe2b063777ddbbff09f9759396500cdbe7523efd58d7a339c", [:mix], [], "hexpm", "4778ac752b4701a5599215f7030989c989ffdc4f6df457c5f36938cc2d2a2750"}, 6 | "erlex": {:hex, :erlex, "0.2.8", "cd8116f20f3c0afe376d1e8d1f0ae2452337729f68be016ea544a72f767d9c12", [:mix], [], "hexpm", "9d66ff9fedf69e49dc3fd12831e12a8a37b76f8651dd21cd45fcf5561a8a7590"}, 7 | "ex_doc": {:hex, :ex_doc, "0.39.3", "519c6bc7e84a2918b737aec7ef48b96aa4698342927d080437f61395d361dcee", [:mix], [{:earmark_parser, "~> 1.4.44", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.0", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14 or ~> 1.0", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1 or ~> 1.0", [hex: :makeup_erlang, repo: "hexpm", optional: false]}, {:makeup_html, ">= 0.1.0", [hex: :makeup_html, repo: "hexpm", optional: true]}], "hexpm", "0590955cf7ad3b625780ee1c1ea627c28a78948c6c0a9b0322bd976a079996e1"}, 8 | "file_system": {:hex, :file_system, "1.1.1", "31864f4685b0148f25bd3fbef2b1228457c0c89024ad67f7a81a3ffbc0bbad3a", [:mix], [], "hexpm", "7a15ff97dfe526aeefb090a7a9d3d03aa907e100e262a0f8f7746b78f8f87a5d"}, 9 | "jason": {:hex, :jason, "1.4.4", "b9226785a9aa77b6857ca22832cffa5d5011a667207eb2a0ad56adb5db443b8a", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "c5eb0cab91f094599f94d55bc63409236a8ec69a21a67814529e8d5f6cc90b3b"}, 10 | "makeup": {:hex, :makeup, "1.2.1", "e90ac1c65589ef354378def3ba19d401e739ee7ee06fb47f94c687016e3713d1", [:mix], [{:nimble_parsec, "~> 1.4", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "d36484867b0bae0fea568d10131197a4c2e47056a6fbe84922bf6ba71c8d17ce"}, 11 | "makeup_elixir": {:hex, :makeup_elixir, "1.0.1", "e928a4f984e795e41e3abd27bfc09f51db16ab8ba1aebdba2b3a575437efafc2", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "7284900d412a3e5cfd97fdaed4f5ed389b8f2b4cb49efc0eb3bd10e2febf9507"}, 12 | "makeup_erlang": {:hex, :makeup_erlang, "1.0.2", "03e1804074b3aa64d5fad7aa64601ed0fb395337b982d9bcf04029d68d51b6a7", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "af33ff7ef368d5893e4a267933e7744e46ce3cf1f61e2dccf53a111ed3aa3727"}, 13 | "mime": {:hex, :mime, "2.0.7", "b8d739037be7cd402aee1ba0306edfdef982687ee7e9859bee6198c1e7e2f128", [:mix], [], "hexpm", "6171188e399ee16023ffc5b76ce445eb6d9672e2e241d2df6050f3c771e80ccd"}, 14 | "nimble_parsec": {:hex, :nimble_parsec, "1.4.2", "8efba0122db06df95bfaa78f791344a89352ba04baedd3849593bfce4d0dc1c6", [:mix], [], "hexpm", "4b21398942dda052b403bbe1da991ccd03a053668d147d53fb8c4e0efe09c973"}, 15 | "plug": {:hex, :plug, "1.19.1", "09bac17ae7a001a68ae393658aa23c7e38782be5c5c00c80be82901262c394c0", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "560a0017a8f6d5d30146916862aaf9300b7280063651dd7e532b8be168511e62"}, 16 | "plug_crypto": {:hex, :plug_crypto, "2.1.1", "19bda8184399cb24afa10be734f84a16ea0a2bc65054e23a62bb10f06bc89491", [:mix], [], "hexpm", "6470bce6ffe41c8bd497612ffde1a7e4af67f36a15eea5f921af71cf3e11247c"}, 17 | "telemetry": {:hex, :telemetry, "1.3.0", "fedebbae410d715cf8e7062c96a1ef32ec22e764197f70cda73d82778d61e7a2", [:rebar3], [], "hexpm", "7015fc8919dbe63764f4b4b87a95b7c0996bd539e0d499be6ec9d7f3875b79e6"}, 18 | } 19 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Uinta 2 | 3 | [![Build Status](https://github.com/podium/uinta/actions/workflows/ci.yml/badge.svg)](https://github.com/podium/uinta/actions/workflows/ci.yml) [![Hex.pm](https://img.shields.io/hexpm/v/uinta.svg)](https://hex.pm/packages/uinta) [![Documentation](https://img.shields.io/badge/documentation-gray)](https://hexdocs.pm/uinta) 4 | [![Total Download](https://img.shields.io/hexpm/dt/uinta.svg)](https://hex.pm/packages/uinta) 5 | [![License](https://img.shields.io/hexpm/l/uinta.svg)](https://github.com/podium/uinta/blob/master/LICENSE.md) 6 | 7 | Uinta is a plugin for the default Elixir logger that lowers log volume while 8 | maximizing log usefulness. It is not a logger backend, but rather includes 9 | `Uinta.Formatter` which will format logs on top of the default Elixir logger 10 | backend. 11 | 12 | In addition to the formatter, Uinta also includes `Uinta.Plug`. The plug is a 13 | drop-in replacement for `Plug.Logger` that will log out the request and response 14 | on a single line. It can also put the request info into the top-level JSON for 15 | easier parsing by your log aggregator. 16 | 17 | ## Why Uinta? 18 | 19 | At Podium we log millions of lines per minute and store around a terabyte of log 20 | data per day. A large percentage of those lines are the typical `GET /` and 21 | `Sent 200 in 2ms` that `Phoenix.Logger` sends by default. By combining those 22 | into a single line, we're able to cut out that percentage of lines so that the 23 | indexes in our Elasticsearch cluster will be smaller and searches will be 24 | faster. 25 | 26 | In addition, about 2/3 of those requests are GraphQL requests. Their first log 27 | line simply says `POST /graphql` every time, which gives us no insight into what 28 | the request is actually doing. `Uinta.Plug` will extract GraphQL query names 29 | when they exist to make these log lines more useful without having to enable 30 | debug logs: `QUERY messagesForLocation (/graphql)` or `MUTATION createMessage (/graphql)`. 31 | 32 | For smaller organizations, the ability to filter out lines pertaining to certain 33 | requests paths can also be useful to cut down on log noise. Kubernetes health 34 | checks and other requests don't usually need to show up in the logs, so 35 | `Uinta.Plug` allows you to ignore certain paths as long as they return a 36 | 200-level status code. 37 | 38 | When set up to do so, Uinta will additionally wrap the log line in a JSON object 39 | so that it can more easily be parsed by Fluentbit and other log parsers. This 40 | increases log line size, but improves searchability and makes logs more useful. 41 | 42 | ## Installation 43 | 44 | The package can be installed by adding `uinta` to your list of dependencies in 45 | `mix.exs`: 46 | 47 | ```elixir 48 | def deps do 49 | [ 50 | {:uinta, "~> 0.6"} 51 | ] 52 | end 53 | ``` 54 | 55 | ### Available Formatters 56 | 57 | #### Standard 58 | 59 | No special formatting or fields, good for general use. 60 | 61 | Module: `Uinta.Formatter` (or `Uinta.Formatter.Standard`) 62 | 63 | #### Datadog 64 | 65 | Adds Datadog specific metadata to the log output. See the module for more setup information and details. 66 | 67 | Module: `Uinta.Formatter.Datadog` 68 | 69 | To enable the full log format to use DataDog log service, just use `include_datadog_fields: true` in your plug initialization 70 | 71 | ### Formatter Installation 72 | 73 | To use the formatter, you'll need to add it to your logger configuration. In 74 | your (production) config file, look for a line that looks something like 75 | this: 76 | 77 | ```elixir 78 | config :logger, :console, format: "[$level] $message\\n" 79 | ``` 80 | 81 | You'll want to replace it with this: 82 | 83 | ```elixir 84 | config :logger, :console, format: {Uinta.Formatter, :format} 85 | ``` 86 | 87 | ### Plug Installation 88 | 89 | Installation of the plug will depend on how your app currently logs requests. 90 | Open `YourApp.Endpoint` and look for the following line: 91 | 92 | ```elixir 93 | plug Plug.Logger 94 | ``` 95 | 96 | If it exists in your endpoint, replace it with this (using the options you 97 | want): 98 | 99 | ```elixir 100 | plug Uinta.Plug, format: :string, log: :info 101 | ``` 102 | 103 | You can also perform log sampling by setting the `success_log_sampling_ratio`. Following is a 20% log sampling 104 | 105 | ```elixir 106 | plug Uinta.Plug, success_log_sampling_ratio: 0.2 107 | ``` 108 | 109 | You can also specify custom sampled status codes to be logged. This is useful if you want to sample out 110 | more than just the default 1xx and 2xx status codes. The following is a 20% log sampling with custom sampled status codes: 111 | 112 | ```elixir 113 | plug Uinta.Plug, 114 | success_log_sampling_ratio: 0.2, 115 | sampled_status_codes: Uinta.Plug.default_sampled_status_codes() ++ [401] 116 | ``` 117 | 118 | If your endpoint didn't call `Plug.Logger`, add the above line above the line 119 | that looks like this: 120 | 121 | ```elixir 122 | plug Plug.RequestId 123 | ``` 124 | 125 | Now you will also want to add the following anywhere in your main config file to 126 | make sure that you aren't logging each request twice: 127 | 128 | ```elixir 129 | config :phoenix, logger: false 130 | ``` 131 | 132 | ## Attribution 133 | 134 | Much of this work, especially `Uinta.Plug`, is based on Elixir's 135 | [`Plug.Logger`](https://github.com/elixir-plug/plug/blob/v1.9.0/lib/plug/logger.ex) 136 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to Uinta 2 | 3 | Please take a moment to review this document in order to make the contribution 4 | process easy and effective for everyone involved! 5 | Also make sure you read our [Code of Conduct](CODE_OF_CONDUCT.md) that outlines our commitment towards an open and welcoming environment. 6 | 7 | ## Using the issue tracker 8 | 9 | Use the issues tracker for: 10 | 11 | * [Bug reports](#bug-reports) 12 | * [Submitting pull requests](#pull-requests) 13 | 14 | We do our best to keep the issue tracker tidy and organized, making it useful 15 | for everyone. For example, we classify open issues per perceived difficulty, 16 | making it easier for developers to [contribute to Uinta](#pull-requests). 17 | 18 | ## Bug reports 19 | 20 | A bug is either a _demonstrable problem_ that is caused by the code in the repository, 21 | or indicate missing, unclear, or misleading documentation. Good bug reports are extremely 22 | helpful - thank you! 23 | 24 | Guidelines for bug reports: 25 | 26 | 1. **Use the GitHub issue search** — check if the issue has already been 27 | reported. 28 | 29 | 2. **Check if the issue has been fixed** — try to reproduce it using the 30 | `master` branch in the repository. 31 | 32 | 3. **Isolate and report the problem** — ideally create a reduced test 33 | case. 34 | 35 | Please try to be as detailed as possible in your report. Include information about 36 | your Operating System, as well as your Erlang, Elixir and Uinta versions. Please provide steps to 37 | reproduce the issue as well as the outcome you were expecting! All these details 38 | will help developers to fix any potential bugs. 39 | 40 | Example: 41 | 42 | > Short and descriptive example bug report title 43 | > 44 | > A summary of the issue and the environment in which it occurs. If suitable, 45 | > include the steps required to reproduce the bug. 46 | > 47 | > 1. This is the first step 48 | > 2. This is the second step 49 | > 3. Further steps, etc. 50 | > 51 | > `` - a link to the reduced test case (e.g. a GitHub Gist) 52 | > 53 | > Any other information you want to share that is relevant to the issue being 54 | > reported. This might include the lines of code that you have identified as 55 | > causing the bug, and potential solutions (and your opinions on their 56 | > merits). 57 | 58 | ## Contributing Documentation 59 | 60 | Code documentation (`@doc`, `@moduledoc`, `@typedoc`) has a special convention: 61 | the first paragraph is considered to be a short summary. 62 | 63 | For functions, macros and callbacks say what it will do. For example write 64 | something like: 65 | 66 | ```elixir 67 | @doc """ 68 | Marks the given value as HTML safe. 69 | """ 70 | def safe({:safe, value}), do: {:safe, value} 71 | ``` 72 | 73 | For modules, protocols and types say what it is. For example write 74 | something like: 75 | 76 | ```elixir 77 | defmodule MyModule do 78 | @moduledoc """ 79 | Conveniences for working HTML strings and templates. 80 | ... 81 | """ 82 | ``` 83 | 84 | Keep in mind that the first paragraph might show up in a summary somewhere, long 85 | texts in the first paragraph create very ugly summaries. As a rule of thumb 86 | anything longer than 80 characters is too long. 87 | 88 | Try to keep unnecessary details out of the first paragraph, it's only there to 89 | give a user a quick idea of what the documented "thing" does/is. The rest of the 90 | documentation string can contain the details, for example when a value and when 91 | `nil` is returned. 92 | 93 | If possible include examples, preferably in a form that works with doctests. 94 | This makes it easy to test the examples so that they don't go stale and examples 95 | are often a great help in explaining what a function does. 96 | 97 | ## Pull requests 98 | 99 | Good pull requests - patches, improvements, new features - are a fantastic 100 | help. They should remain focused in scope and avoid containing unrelated 101 | commits. 102 | 103 | **IMPORTANT**: By submitting a patch, you agree that your work will be 104 | licensed under the license used by the project. 105 | 106 | If you have any large pull request in mind (e.g. implementing features, 107 | refactoring code, etc), **please ask first** otherwise you risk spending 108 | a lot of time working on something that the project's developers might 109 | not want to merge into the project. 110 | 111 | Please adhere to the coding conventions in the project (indentation, 112 | accurate comments, etc.) and don't forget to add your own tests and 113 | documentation. When working with git, we recommend the following process 114 | in order to craft an excellent pull request: 115 | 116 | 1. [Fork](https://help.github.com/articles/fork-a-repo/) the project, clone your fork, 117 | and configure the remotes: 118 | 119 | ```bash 120 | # Clone your fork of the repo into the current directory 121 | git clone https://github.com//uinta 122 | 123 | # Navigate to the newly cloned directory 124 | cd uinta 125 | 126 | # Assign the original repo to a remote called "upstream" 127 | git remote add upstream https://github.com/podium/uinta 128 | ``` 129 | 130 | 2. If you cloned a while ago, get the latest changes from upstream, and update your fork: 131 | 132 | ```bash 133 | git checkout master 134 | git pull upstream master 135 | git push 136 | ``` 137 | 138 | 3. Create a new topic branch (off of `master`) to contain your feature, change, 139 | or fix. 140 | 141 | **IMPORTANT**: Making changes in `master` is discouraged. You should always 142 | keep your local `master` in sync with upstream `master` and make your 143 | changes in topic branches. 144 | 145 | ```bash 146 | git checkout -b 147 | ``` 148 | 149 | 4. Commit your changes in logical chunks. Keep your commit messages organized, 150 | with a short description in the first line and more detailed information on 151 | the following lines. Feel free to use Git's 152 | [interactive rebase](https://help.github.com/articles/about-git-rebase/) 153 | feature to tidy up your commits before making them public. 154 | 155 | 5. Make sure all the tests are still passing. 156 | 157 | ```bash 158 | mix test 159 | ``` 160 | 161 | 6. Push your topic branch up to your fork: 162 | 163 | ```bash 164 | git push origin 165 | ``` 166 | 167 | 7. [Open a Pull Request](https://help.github.com/articles/about-pull-requests/) 168 | with a clear title and description. 169 | 170 | 8. If you haven't updated your pull request for a while, you should consider 171 | rebasing on master and resolving any conflicts. 172 | 173 | **IMPORTANT**: _Never ever_ merge upstream `master` into your branches. You 174 | should always `git rebase` on `master` to bring your changes up to date when 175 | necessary. 176 | 177 | ```bash 178 | git checkout master 179 | git pull upstream master 180 | git checkout 181 | git rebase master 182 | ``` 183 | 184 | Thank you for your contributions! 185 | 186 | ## Guides 187 | 188 | These Guides aim to be inclusive. We use "we" and "our" instead of "you" and 189 | "your" to foster this sense of inclusion. 190 | 191 | Ideally there is something for everybody in each guide, from beginner to expert. 192 | This is hard, maybe impossible. When we need to compromise, we do so on behalf 193 | of beginning users because expert users have more tools at their disposal to 194 | help themselves. 195 | 196 | The general pattern we use for presenting information is to first introduce a 197 | small, discrete topic, then write a small amount of code to demonstrate the 198 | concept, then verify that the code worked. 199 | 200 | In this way, we build from small, easily digestible concepts into more complex 201 | ones. The shorter this cycle is, as long as the information is still clear and 202 | complete, the better. 203 | 204 | For formatting the guides: 205 | 206 | - We use the `elixir` code fence for all module code. 207 | - We use the `iex` for IEx sessions. 208 | - We use the `console` code fence for shell commands. 209 | - We use the `html` code fence for html templates, even if there is elixir code 210 | in the template. 211 | - We use backticks for filenames and directory paths. 212 | - We use backticks for module names, function names, and variable names. 213 | - Documentation line length should hard wrapped at around 100 characters if possible. 214 | -------------------------------------------------------------------------------- /.credo.exs: -------------------------------------------------------------------------------- 1 | # Last updated for credo 1.6.1 2 | %{ 3 | # 4 | # You can have as many configs as you like in the `configs:` field. 5 | configs: [ 6 | %{ 7 | # 8 | # Run any config using `mix credo -C `. If no config name is given 9 | # "default" is used. 10 | # 11 | name: "default", 12 | # 13 | # These are the files included in the analysis: 14 | files: %{ 15 | # 16 | # You can give explicit globs or simply directories. 17 | # In the latter case `**/*.{ex,exs}` will be used. 18 | # 19 | included: [ 20 | "lib/", 21 | "src/", 22 | "test/", 23 | "web/", 24 | "apps/*/lib/", 25 | "apps/*/src/", 26 | "apps/*/test/", 27 | "apps/*/web/" 28 | ], 29 | excluded: [~r"/_build/", ~r"/deps/", ~r"/node_modules/"] 30 | }, 31 | # 32 | # Load and configure plugins here: 33 | # 34 | plugins: [], 35 | # 36 | # If you create your own checks, you must specify the source files for 37 | # them here, so they can be loaded by Credo before running the analysis. 38 | # 39 | requires: [], 40 | # 41 | # If you want to enforce a style guide and need a more traditional linting 42 | # experience, you can change `strict` to `true` below: 43 | # 44 | strict: true, 45 | # 46 | # To modify the timeout for parsing files, change this value: 47 | # 48 | parse_timeout: 5000, 49 | # 50 | # If you want to use uncolored output by default, you can change `color` 51 | # to `false` below: 52 | # 53 | color: true, 54 | # 55 | # You can customize the parameters of any check by adding a second element 56 | # to the tuple. 57 | # 58 | # To disable a check put `false` as second element: 59 | # 60 | # {Credo.Check.Design.DuplicatedCode, false} 61 | # 62 | checks: %{ 63 | enabled: [ 64 | # 65 | ## Consistency Checks 66 | # 67 | {Credo.Check.Consistency.ExceptionNames, []}, 68 | {Credo.Check.Consistency.LineEndings, []}, 69 | {Credo.Check.Consistency.ParameterPatternMatching, []}, 70 | {Credo.Check.Consistency.SpaceAroundOperators, []}, 71 | {Credo.Check.Consistency.SpaceInParentheses, []}, 72 | {Credo.Check.Consistency.TabsOrSpaces, []}, 73 | 74 | # 75 | ## Design Checks 76 | # 77 | # You can customize the priority of any check 78 | # Priority values are: `low, normal, high, higher` 79 | # 80 | {Credo.Check.Design.AliasUsage, 81 | priority: :low, if_nested_deeper_than: 2, if_called_more_often_than: 0}, 82 | {Credo.Check.Design.SkipTestWithoutComment, []}, 83 | # You can also customize the exit_status of each check. 84 | # If you don't want TODO comments to cause `mix credo` to fail, just 85 | # set this value to 0 (zero). 86 | # 87 | {Credo.Check.Design.TagTODO, [exit_status: 2]}, 88 | {Credo.Check.Design.TagFIXME, []}, 89 | 90 | # 91 | ## Readability Checks 92 | # 93 | {Credo.Check.Readability.AliasAs, 94 | files: %{excluded: ["lib/*_web.ex", "test/support/conn_case.ex"]}}, 95 | {Credo.Check.Readability.AliasOrder, []}, 96 | {Credo.Check.Readability.BlockPipe, []}, 97 | {Credo.Check.Readability.FunctionNames, []}, 98 | {Credo.Check.Readability.ImplTrue, []}, 99 | {Credo.Check.Readability.LargeNumbers, []}, 100 | {Credo.Check.Readability.MaxLineLength, priority: :low, max_length: 120}, 101 | {Credo.Check.Readability.ModuleAttributeNames, []}, 102 | {Credo.Check.Readability.ModuleDoc, []}, 103 | {Credo.Check.Readability.ModuleNames, []}, 104 | {Credo.Check.Readability.MultiAlias, []}, 105 | {Credo.Check.Readability.ParenthesesInCondition, []}, 106 | {Credo.Check.Readability.ParenthesesOnZeroArityDefs, []}, 107 | {Credo.Check.Readability.PipeIntoAnonymousFunctions, []}, 108 | {Credo.Check.Readability.PredicateFunctionNames, []}, 109 | {Credo.Check.Readability.PreferImplicitTry, []}, 110 | {Credo.Check.Readability.RedundantBlankLines, []}, 111 | {Credo.Check.Readability.Semicolons, []}, 112 | {Credo.Check.Readability.SinglePipe, []}, 113 | {Credo.Check.Readability.SpaceAfterCommas, []}, 114 | {Credo.Check.Readability.StrictModuleLayout, []}, 115 | {Credo.Check.Readability.StringSigils, []}, 116 | {Credo.Check.Readability.TrailingBlankLine, []}, 117 | {Credo.Check.Readability.TrailingWhiteSpace, []}, 118 | {Credo.Check.Readability.UnnecessaryAliasExpansion, []}, 119 | {Credo.Check.Readability.VariableNames, []}, 120 | {Credo.Check.Readability.WithCustomTaggedTuple, []}, 121 | {Credo.Check.Readability.WithSingleClause, []}, 122 | 123 | # 124 | ## Refactoring Opportunities 125 | # 126 | {Credo.Check.Refactor.Apply, []}, 127 | {Credo.Check.Refactor.CondStatements, []}, 128 | {Credo.Check.Refactor.CyclomaticComplexity, []}, 129 | {Credo.Check.Refactor.FilterFilter, []}, 130 | {Credo.Check.Refactor.FilterReject, []}, 131 | {Credo.Check.Refactor.FunctionArity, []}, 132 | {Credo.Check.Refactor.IoPuts, []}, 133 | {Credo.Check.Refactor.LongQuoteBlocks, []}, 134 | {Credo.Check.Refactor.MapJoin, []}, 135 | {Credo.Check.Refactor.MapMap, []}, 136 | {Credo.Check.Refactor.MatchInCondition, []}, 137 | {Credo.Check.Refactor.NegatedConditionsInUnless, []}, 138 | {Credo.Check.Refactor.NegatedConditionsWithElse, []}, 139 | {Credo.Check.Refactor.Nesting, []}, 140 | {Credo.Check.Refactor.PipeChainStart, []}, 141 | {Credo.Check.Refactor.RedundantWithClauseResult, []}, 142 | {Credo.Check.Refactor.RejectFilter, []}, 143 | {Credo.Check.Refactor.RejectReject, []}, 144 | {Credo.Check.Refactor.UnlessWithElse, []}, 145 | {Credo.Check.Refactor.WithClauses, []}, 146 | 147 | # 148 | ## Warnings 149 | # 150 | {Credo.Check.Warning.ApplicationConfigInModuleAttribute, []}, 151 | {Credo.Check.Warning.BoolOperationOnSameValues, []}, 152 | {Credo.Check.Warning.ExpensiveEmptyEnumCheck, []}, 153 | {Credo.Check.Warning.IExPry, []}, 154 | {Credo.Check.Warning.IoInspect, []}, 155 | {Credo.Check.Warning.MapGetUnsafePass, []}, 156 | {Credo.Check.Warning.MixEnv, []}, 157 | {Credo.Check.Warning.OperationOnSameValues, []}, 158 | {Credo.Check.Warning.OperationWithConstantResult, []}, 159 | {Credo.Check.Warning.RaiseInsideRescue, []}, 160 | {Credo.Check.Warning.SpecWithStruct, []}, 161 | {Credo.Check.Warning.UnsafeExec, []}, 162 | {Credo.Check.Warning.UnsafeToAtom, []}, 163 | {Credo.Check.Warning.UnusedEnumOperation, []}, 164 | {Credo.Check.Warning.UnusedFileOperation, []}, 165 | {Credo.Check.Warning.UnusedKeywordOperation, []}, 166 | {Credo.Check.Warning.UnusedListOperation, []}, 167 | {Credo.Check.Warning.UnusedPathOperation, []}, 168 | {Credo.Check.Warning.UnusedRegexOperation, []}, 169 | {Credo.Check.Warning.UnusedStringOperation, []}, 170 | {Credo.Check.Warning.UnusedTupleOperation, []}, 171 | {Credo.Check.Warning.WrongTestFileExtension, []} 172 | ], 173 | disabled: [ 174 | # 175 | # Controversial and experimental checks (opt-in, just move the check to `:enabled` 176 | # and be sure to use `mix credo --strict` to see low priority checks if you set 177 | # `strict: false` above) 178 | # 179 | {Credo.Check.Consistency.MultiAliasImportRequireUse, []}, 180 | {Credo.Check.Consistency.UnusedVariableNames, []}, 181 | {Credo.Check.Design.DuplicatedCode, []}, 182 | {Credo.Check.Readability.SeparateAliasRequire, []}, 183 | {Credo.Check.Readability.SingleFunctionToBlockPipe, []}, 184 | {Credo.Check.Refactor.ABCSize, []}, 185 | {Credo.Check.Refactor.AppendSingleItem, []}, 186 | {Credo.Check.Refactor.DoubleBooleanNegation, []}, 187 | {Credo.Check.Refactor.ModuleDependencies, []}, 188 | {Credo.Check.Refactor.NegatedIsNil, []}, 189 | {Credo.Check.Refactor.VariableRebinding, []}, 190 | {Credo.Check.Warning.LeakyEnvironment, []}, 191 | {Credo.Check.Readability.Specs, 192 | files: %{ 193 | excluded: [ 194 | "lib/*_web.ex", 195 | "lib/*_web/controllers/*_controller.ex", 196 | "lib/*_web/graphql/*/resolvers.ex" 197 | ] 198 | }} 199 | 200 | # 201 | # Custom checks can be created using `mix credo.gen.check`. 202 | # 203 | ] 204 | } 205 | } 206 | ] 207 | } 208 | -------------------------------------------------------------------------------- /lib/uinta/plug.ex: -------------------------------------------------------------------------------- 1 | if Code.ensure_loaded?(Plug) do 2 | defmodule Uinta.Plug do 3 | @moduledoc """ 4 | This plug combines the request and response logs into a single line. This 5 | brings many benefits including: 6 | 7 | - Removing the need to visually match up the request and response makes it 8 | easier to read your logs and get a full picture of what has happened. 9 | 10 | - Having a single line for both request and response halves the number of 11 | request logs that your log aggregator will need to process and index, which 12 | leads to saved costs 13 | 14 | In addition to combining the log lines, it also gives you the ability to 15 | output request logs in JSON format so that you can easily have your log 16 | aggregator parse the fields. To do this, pass `json: true` in the options 17 | when calling the plug. 18 | 19 | You will also gain the ability to not log certain paths that are requested, 20 | as long as those paths return a 200-level status code. This can be 21 | particularly useful for things like not showing health checks in your logs 22 | to cut down on noise. To do this, just pass `ignored_paths: 23 | ["/path_to_ignore"]` in the options. 24 | 25 | Finally, GraphQL requests will replace `POST /graphql` with the GraphQL 26 | operation type and name like `QUERY getUser` or `MUTATION createUser` if an 27 | operation name is provided. This will give you more visibility into your 28 | GraphQL requests without having to log out the entire request body or go 29 | into debug mode. If desired, the GraphQL variables can be included in the 30 | log line as well. The query can also be included if unnamed. 31 | 32 | ## Installation 33 | 34 | Installation of the plug will depend on how your app currently logs requests. 35 | Open `YourApp.Endpoint` and look for the following line: 36 | 37 | ``` 38 | plug Plug.Logger 39 | ``` 40 | 41 | If it exists in your endpoint, replace it with this (using the options you 42 | want): 43 | 44 | ``` 45 | plug Uinta.Plug, 46 | log: :info, 47 | format: :string, 48 | include_variables: false, 49 | ignored_paths: [], 50 | filter_variables: [], 51 | success_log_sampling_ratio: 1.0, 52 | include_datadog_fields: false 53 | ``` 54 | 55 | If your endpoint didn't call `Plug.Logger`, add the above line above the line 56 | that looks like this: 57 | 58 | ``` 59 | plug Plug.RequestId 60 | ``` 61 | 62 | Now you will also want to add the following anywhere in your main config file to 63 | make sure that you aren't logging each request twice: 64 | 65 | ``` 66 | config :phoenix, logger: false 67 | ``` 68 | 69 | ## Options 70 | 71 | - `:log` - The log level at which this plug should log its request info. 72 | Default is `:info` 73 | - Can be a `{module, function_name, args}` tuple where function is applied with `conn` prepended to args to determine log level. 74 | - `:format` - Output format, either :json, :string, or :map. Default is `:string` 75 | - `:json` - Whether or not plug should log in JSON format. Default is `false` (obsolete) 76 | - `:ignored_paths` - A list of paths that should not log requests. Default 77 | is `[]`. 78 | - `:include_variables` - Whether or not to include any GraphQL variables in 79 | the log line when applicable. Default is `false`. 80 | - `:filter_variables` - A list of variable names that should be filtered 81 | out from the logs. By default `password`, `passwordConfirmation`, 82 | `idToken`, and `refreshToken` will be filtered. 83 | - `:include_unnamed_queries` - Whether or not to include the full query 84 | body for queries with no name supplied 85 | - `:success_log_sampling_ratio` - What percentage of successful requests 86 | should be logged. Defaults to 1.0 87 | - `:include_datadog_fields` - Whether or not to add logger specific field based on Datadog logger. Default is 88 | `false`. See https://docs.datadoghq.com/logs/log_configuration/attributes_naming_convention/#http-requests for details 89 | """ 90 | @behaviour Plug 91 | 92 | alias Plug.Conn 93 | require Logger 94 | 95 | @default_filter ~w(password passwordConfirmation idToken refreshToken) 96 | @default_sampling_ratio 1.0 97 | @default_sampled_status_codes [ 98 | 100, 99 | 101, 100 | 102, 101 | 103, 102 | 200, 103 | 201, 104 | 202, 105 | 203, 106 | 204, 107 | 205, 108 | 206, 109 | 207, 110 | 208, 111 | 226 112 | ] 113 | 114 | @query_name_regex ~r/^\s*(?:query|mutation)\s+(\w+)|{\W+(\w+)\W+?{/m 115 | 116 | @type format :: :json | :map | :string 117 | @type graphql_info :: %{ 118 | type: String.t(), 119 | operation: String.t(), 120 | variables: String.t(), 121 | query: String.t() | nil 122 | } 123 | @type opts :: %{ 124 | level: Logger.level() | {module(), atom(), list()}, 125 | format: format(), 126 | include_unnamed_queries: boolean(), 127 | include_variables: boolean(), 128 | include_datadog_fields: boolean(), 129 | ignored_paths: list(String.t()), 130 | filter_variables: list(String.t()), 131 | sampled_status_codes: list(non_neg_integer()) 132 | } 133 | 134 | @impl Plug 135 | def init(opts) do 136 | format = 137 | case Keyword.fetch(opts, :format) do 138 | {:ok, value} when value in [:json, :map, :string] -> 139 | value 140 | 141 | :error -> 142 | if Keyword.get(opts, :json, false), do: :json, else: :string 143 | end 144 | 145 | %{ 146 | level: Keyword.get(opts, :log, :info), 147 | format: format, 148 | ignored_paths: Keyword.get(opts, :ignored_paths, []), 149 | include_unnamed_queries: Keyword.get(opts, :include_unnamed_queries, false), 150 | include_variables: Keyword.get(opts, :include_variables, false), 151 | filter_variables: Keyword.get(opts, :filter_variables, @default_filter), 152 | include_datadog_fields: Keyword.get(opts, :include_datadog_fields, false), 153 | sampled_status_codes: 154 | Keyword.get(opts, :sampled_status_codes, @default_sampled_status_codes), 155 | success_log_sampling_ratio: 156 | Keyword.get( 157 | opts, 158 | :success_log_sampling_ratio, 159 | @default_sampling_ratio 160 | ) 161 | } 162 | end 163 | 164 | @doc """ 165 | Returns the default sampled status codes. These are all 1xx and 2xx status codes 166 | according to the original HTTP RFC, and inclusions from the Wikipedia article on 167 | HTTP status codes. 168 | 169 | It's recommended to extend this list with your own status codes if you want to sample more than the default. 170 | 171 | - [RFC](https://datatracker.ietf.org/doc/html/rfc2616#section-6.1.1) 172 | - [Wikipedia](https://en.wikipedia.org/wiki/List_of_HTTP_status_codes) 173 | """ 174 | @spec default_sampled_status_codes() :: list(non_neg_integer()) 175 | def default_sampled_status_codes, do: @default_sampled_status_codes 176 | 177 | @impl Plug 178 | def call(conn, opts) do 179 | start = System.monotonic_time() 180 | 181 | Conn.register_before_send(conn, fn conn -> 182 | log_request(conn, start, opts) 183 | conn 184 | end) 185 | end 186 | 187 | defp log_request(conn, start, opts) do 188 | if should_log_request?(conn, opts) do 189 | level = log_level(conn, opts) 190 | 191 | Logger.log(level, fn -> 192 | stop = System.monotonic_time() 193 | diff = System.convert_time_unit(stop - start, :native, :microsecond) 194 | 195 | graphql_info = graphql_info(conn, opts) 196 | info = info(conn, graphql_info, diff, opts) 197 | 198 | format_line(info, opts.format) 199 | end) 200 | end 201 | end 202 | 203 | @spec log_level(Plug.Conn.t(), opts()) :: Logger.level() 204 | defp log_level(conn, opts) 205 | 206 | defp log_level(_conn, %{level: level}) when is_atom(level) do 207 | level 208 | end 209 | 210 | defp log_level(conn, %{level: {module, function, args}}) 211 | when is_atom(module) and is_atom(function) and is_list(args) do 212 | apply(module, function, [conn | args]) 213 | end 214 | 215 | @spec info(Plug.Conn.t(), graphql_info(), integer(), opts()) :: map() 216 | defp info(conn, graphql_info, diff, opts) do 217 | info = %{ 218 | connection_type: connection_type(conn), 219 | method: method(conn, graphql_info), 220 | path: conn.request_path, 221 | operation_name: graphql_info[:operation], 222 | query: query(graphql_info, opts), 223 | status: Integer.to_string(conn.status), 224 | timing: formatted_diff(diff), 225 | duration_ms: diff / 1000, 226 | client_ip: client_ip(conn), 227 | user_agent: get_first_value_for_header(conn, "user-agent"), 228 | referer: get_first_value_for_header(conn, "referer"), 229 | x_forwarded_for: get_first_value_for_header(conn, "x-forwarded-for"), 230 | x_forwarded_proto: get_first_value_for_header(conn, "x-forwarded-proto"), 231 | x_forwarded_port: get_first_value_for_header(conn, "x-forwarded-port"), 232 | via: get_first_value_for_header(conn, "via"), 233 | variables: variables(graphql_info) 234 | } 235 | 236 | case opts[:include_datadog_fields] do 237 | true -> 238 | dd_fields = %{ 239 | "http.url" => info[:path], 240 | "http.status_code" => conn.status, 241 | "http.method" => info[:method], 242 | "http.referer" => info[:referer], 243 | "http.request_id" => Logger.metadata()[:request_id], 244 | "http.useragent" => info[:user_agent], 245 | "http.version" => Plug.Conn.get_http_protocol(conn), 246 | "duration" => info[:duration_ms] * 1_000_000, 247 | "network.client.ip" => info[:client_ip] 248 | } 249 | 250 | Map.merge(info, dd_fields) 251 | 252 | _ -> 253 | info 254 | end 255 | end 256 | 257 | @spec format_line(map(), format()) :: iodata() | map() | String.t() 258 | defp format_line(info, :map) do 259 | format_info(info) 260 | end 261 | 262 | defp format_line(info, :json) do 263 | info = format_info(info) 264 | 265 | case Jason.encode(info) do 266 | {:ok, encoded} -> encoded 267 | _ -> inspect(info) 268 | end 269 | end 270 | 271 | defp format_line(info, :string) do 272 | log = [info.method, ?\s, info.operation_name || info.path] 273 | log = if is_nil(info.operation_name), do: log, else: [log, " (", info.path, ")"] 274 | log = if is_nil(info.variables), do: log, else: [log, " with ", info.variables] 275 | log = [log, " - ", info.connection_type, ?\s, info.status, " in ", info.timing] 276 | if is_nil(info.query), do: log, else: [log, "\nQuery: ", info.query] 277 | end 278 | 279 | # Format structured data for output 280 | @spec format_info(map()) :: map() 281 | defp format_info(info) do 282 | info 283 | |> Map.delete(:connection_type) 284 | |> Map.reject(fn {_, value} -> is_nil(value) end) 285 | end 286 | 287 | defp get_first_value_for_header(conn, name) do 288 | conn 289 | |> Plug.Conn.get_req_header(name) 290 | |> List.first() 291 | end 292 | 293 | def client_ip(conn) do 294 | case :inet.ntoa(conn.remote_ip) do 295 | {:error, _} -> 296 | "" 297 | 298 | ip -> 299 | List.to_string(ip) 300 | end 301 | end 302 | 303 | @spec method(Plug.Conn.t(), graphql_info()) :: String.t() 304 | defp method(_, %{type: type}), do: type 305 | defp method(conn, _), do: conn.method 306 | 307 | @spec query(graphql_info(), opts()) :: String.t() | nil 308 | defp query(_, %{include_unnamed_queries: false}), do: nil 309 | defp query(%{query: query}, _) when not is_nil(query), do: query 310 | defp query(_, _), do: nil 311 | 312 | @spec variables(graphql_info() | nil) :: String.t() | nil 313 | defp variables(%{variables: variables}) when not is_nil(variables), do: variables 314 | defp variables(_), do: nil 315 | 316 | @spec graphql_info(Plug.Conn.t(), opts()) :: graphql_info() | nil 317 | defp graphql_info(%{method: "POST", params: params = %{"query" => query}}, opts) 318 | when is_binary(query) do 319 | type = 320 | query 321 | |> String.trim() 322 | |> query_type() 323 | 324 | if is_nil(type) do 325 | nil 326 | else 327 | %{type: type} 328 | |> put_operation_name(params) 329 | |> put_query(params["query"], opts) 330 | |> put_variables(params["variables"], opts) 331 | end 332 | end 333 | 334 | defp graphql_info(_, _), do: nil 335 | 336 | @spec put_operation_name(map(), map()) :: map() 337 | defp put_operation_name(info, params) do 338 | operation = operation_name(params) 339 | Map.put(info, :operation, operation) 340 | end 341 | 342 | @spec put_query(map(), String.t(), opts()) :: map() 343 | defp put_query(%{operation: "unnamed"} = info, query, %{include_unnamed_queries: true}), 344 | do: Map.put(info, :query, query) 345 | 346 | defp put_query(info, _query, _opts), do: info 347 | 348 | @spec put_variables(map(), any(), opts()) :: map() 349 | defp put_variables(info, _variables, %{include_variables: false}), do: info 350 | defp put_variables(info, variables, _) when not is_map(variables), do: info 351 | 352 | defp put_variables(info, variables, opts) do 353 | filtered = filter_variables(variables, opts.filter_variables) 354 | 355 | case Jason.encode(filtered) do 356 | {:ok, encoded} -> Map.put(info, :variables, encoded) 357 | _ -> info 358 | end 359 | end 360 | 361 | @spec filter_variables(map(), list(String.t())) :: map() 362 | defp filter_variables(variables, to_filter) do 363 | variables 364 | |> Enum.map(&filter(&1, to_filter)) 365 | |> Enum.into(%{}) 366 | end 367 | 368 | @spec filter({String.t(), term()}, list(String.t())) :: {String.t(), term()} 369 | defp filter({key, value}, to_filter) do 370 | if key in to_filter do 371 | {key, "[FILTERED]"} 372 | else 373 | {key, value} 374 | end 375 | end 376 | 377 | @spec formatted_diff(integer()) :: String.t() 378 | defp formatted_diff(diff) when diff > 1000 do 379 | "#{diff |> div(1000) |> Integer.to_string()}ms" 380 | end 381 | 382 | defp formatted_diff(diff), do: "#{Integer.to_string(diff)}µs" 383 | 384 | @spec connection_type(Plug.Conn.t()) :: String.t() 385 | defp connection_type(%{state: :set_chunked}), do: "Chunked" 386 | defp connection_type(_), do: "Sent" 387 | 388 | @spec operation_name(map()) :: String.t() | nil 389 | defp operation_name(%{"operationName" => name}), do: name 390 | 391 | defp operation_name(%{"query" => query}) do 392 | case Regex.run(@query_name_regex, query, capture: :all_but_first) do 393 | [query_name] -> query_name 394 | _ -> "unnamed" 395 | end 396 | end 397 | 398 | defp operation_name(_), do: "unnamed" 399 | 400 | @spec query_type(term()) :: String.t() | nil 401 | defp query_type("query" <> _), do: "QUERY" 402 | defp query_type("mutation" <> _), do: "MUTATION" 403 | defp query_type("{" <> _), do: "QUERY" 404 | defp query_type(_), do: nil 405 | 406 | defp should_log_request?(conn, opts) do 407 | cond do 408 | is_integer(conn.status) and conn.status not in opts.sampled_status_codes -> 409 | # log all HTTP status >= 300 (usually errors) 410 | true 411 | 412 | conn.request_path in opts.ignored_paths -> 413 | false 414 | 415 | true -> 416 | should_include_in_sample?(opts[:success_log_sampling_ratio]) 417 | end 418 | end 419 | 420 | defp should_include_in_sample?(ratio) when is_float(ratio) and ratio >= 1.0, do: true 421 | 422 | defp should_include_in_sample?(ratio) do 423 | random_float() <= ratio 424 | end 425 | 426 | # Returns a float (4 digit precision) between 0.0 and 1.0 427 | # 428 | # Alternative: 429 | # :crypto.rand_uniform(1, 10_000) / 10_000 430 | # 431 | defp random_float do 432 | :rand.uniform(10_000) / 10_000 433 | end 434 | end 435 | end 436 | -------------------------------------------------------------------------------- /test/uinta/plug_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Uinta.PlugTest do 2 | use ExUnit.Case 3 | import Plug.Test 4 | import Plug.Conn 5 | import ExUnit.CaptureLog 6 | 7 | require Logger 8 | 9 | defmodule MyPlug do 10 | use Plug.Builder 11 | 12 | plug(Uinta.Plug) 13 | plug(:passthrough) 14 | 15 | defp passthrough(conn, _) do 16 | Plug.Conn.send_resp(conn, 200, "Passthrough") 17 | end 18 | end 19 | 20 | defmodule MyPlugWithCustomSampledStatusCodes do 21 | use Plug.Builder 22 | 23 | plug(Uinta.Plug, 24 | success_log_sampling_ratio: 0, 25 | sampled_status_codes: Uinta.Plug.default_sampled_status_codes() ++ [401] 26 | ) 27 | 28 | plug(:passthrough) 29 | 30 | defp passthrough(conn, _) do 31 | Plug.Conn.send_resp(conn, 401, "Passthrough") 32 | end 33 | end 34 | 35 | defmodule JsonPlug do 36 | use Plug.Builder 37 | 38 | plug(Uinta.Plug, json: true) 39 | plug(:passthrough) 40 | 41 | defp passthrough(conn, _) do 42 | Plug.Conn.send_resp(conn, 200, "Passthrough") 43 | end 44 | end 45 | 46 | defmodule MapPlug do 47 | use Plug.Builder 48 | 49 | plug(Uinta.Plug, format: :map) 50 | plug(:passthrough) 51 | 52 | defp passthrough(conn, _) do 53 | Plug.Conn.send_resp(conn, 200, "Passthrough") 54 | end 55 | end 56 | 57 | defmodule JsonPlugWithDataDogFields do 58 | use Plug.Builder 59 | 60 | plug(Uinta.Plug, json: true, include_datadog_fields: true) 61 | plug(:passthrough) 62 | 63 | defp passthrough(conn, _) do 64 | Plug.Conn.send_resp(conn, 200, "Passthrough") 65 | end 66 | end 67 | 68 | defmodule IgnoredPathsPlug do 69 | use Plug.Builder 70 | 71 | plug(Uinta.Plug, ignored_paths: ["/ignore"]) 72 | plug(:passthrough) 73 | 74 | defp passthrough(conn, _) do 75 | Plug.Conn.send_resp(conn, 200, "Passthrough") 76 | end 77 | end 78 | 79 | defmodule IgnoredPathsErrorPlug do 80 | use Plug.Builder 81 | 82 | plug(Uinta.Plug, ignored_paths: ["/ignore"]) 83 | plug(:passthrough) 84 | 85 | defp passthrough(conn, _) do 86 | Plug.Conn.send_resp(conn, 500, "Passthrough") 87 | end 88 | end 89 | 90 | defmodule IncludeVariablesPlug do 91 | use Plug.Builder 92 | 93 | plug(Uinta.Plug, include_variables: true, filter_variables: ~w(password)) 94 | plug(:passthrough) 95 | 96 | defp passthrough(conn, _) do 97 | Plug.Conn.send_resp(conn, 200, "Passthrough") 98 | end 99 | end 100 | 101 | defmodule IncludeUnnamedQueriesPlug do 102 | use Plug.Builder 103 | 104 | plug(Uinta.Plug, include_unnamed_queries: true) 105 | plug(:passthrough) 106 | 107 | defp passthrough(conn, _) do 108 | Plug.Conn.send_resp(conn, 200, "Passthrough") 109 | end 110 | end 111 | 112 | defmodule MyChunkedPlug do 113 | use Plug.Builder 114 | 115 | plug(Uinta.Plug) 116 | plug(:passthrough) 117 | 118 | defp passthrough(conn, _) do 119 | Plug.Conn.send_chunked(conn, 200) 120 | end 121 | end 122 | 123 | defmodule MyHaltingPlug do 124 | use Plug.Builder, log_on_halt: :debug 125 | 126 | plug(:halter) 127 | defp halter(conn, _), do: halt(conn) 128 | end 129 | 130 | defmodule MyDebugLevelPlug do 131 | use Plug.Builder 132 | 133 | plug(Uinta.Plug, log: :debug) 134 | plug(:passthrough) 135 | 136 | defp passthrough(conn, _) do 137 | Plug.Conn.send_resp(conn, 200, "Passthrough") 138 | end 139 | end 140 | 141 | defmodule MyDynamicLevelPlug do 142 | use Plug.Builder 143 | 144 | plug(Uinta.Plug, log: {__MODULE__, :level, [:some, :opts]}) 145 | plug(:passthrough) 146 | 147 | def level(conn, :some, :opts) do 148 | case conn.status do 149 | info when info in 100..199 -> 150 | :debug 151 | 152 | success when success in 200..299 -> 153 | :info 154 | 155 | redirect when redirect in 300..399 -> 156 | :notice 157 | 158 | client_error when client_error in 400..499 -> 159 | :warning 160 | 161 | server_error when server_error in 500..599 -> 162 | :error 163 | end 164 | end 165 | 166 | defp passthrough(conn, _opts) do 167 | status = Map.fetch!(conn.private, :dynamic_status) 168 | Plug.Conn.send_resp(conn, status, "Body shouldn't matter") 169 | end 170 | end 171 | 172 | defmodule SampleSuccessPlug do 173 | use Plug.Builder 174 | 175 | plug(Uinta.Plug, success_log_sampling_ratio: 0) 176 | plug(:passthrough) 177 | 178 | defp passthrough(conn, _) do 179 | Plug.Conn.send_resp(conn, 200, "Passthrough") 180 | end 181 | end 182 | 183 | test "logs proper message to console" do 184 | message = 185 | capture_log(fn -> 186 | MyPlug.call(conn(:get, "/"), []) 187 | end) 188 | 189 | assert message =~ ~r"\[info\]\s+GET / - Sent 200 in [0-9]+[µm]s"u 190 | 191 | message = 192 | capture_log(fn -> 193 | MyPlug.call(conn(:get, "/hello/world"), []) 194 | end) 195 | 196 | assert message =~ ~r"\[info\]\s+GET /hello/world - Sent 200 in [0-9]+[µm]s"u 197 | end 198 | 199 | test "logs proper graphql message to console" do 200 | variables = %{"user_uid" => "b1641ddf-b7b0-445e-bcbb-96ef359eae81"} 201 | params = %{"operationName" => "getUser", "query" => "query getUser", "variables" => variables} 202 | 203 | message = 204 | capture_log(fn -> 205 | MyPlug.call(conn(:post, "/graphql", params), []) 206 | end) 207 | 208 | assert message =~ ~r"\[info\]\s+QUERY getUser \(/graphql\) - Sent 200 in [0-9]+[µm]s"u 209 | end 210 | 211 | test "does not try to parse query details from non-string 'query' params" do 212 | params = %{"query" => ["query FakeQuery {}"]} 213 | 214 | message = 215 | capture_log(fn -> 216 | MyPlug.call(conn(:post, "/hello/world", params), []) 217 | end) 218 | 219 | assert message =~ ~r"\[info\]\s+POST /hello/world - Sent 200 in [0-9]+[µm]s"u 220 | end 221 | 222 | test "logs map to console" do 223 | message = 224 | capture_log(fn -> 225 | MapPlug.call(conn(:get, "/"), []) 226 | end) 227 | 228 | assert message =~ ~r/client_ip: \"127.0.0.1\"/u 229 | assert message =~ ~r/duration_ms: [0-9]+\.?[0-9]+/u 230 | assert message =~ ~r/method: \"GET\"/u 231 | assert message =~ ~r"path: \"/\""u 232 | assert message =~ ~r/status: \"200\"/u 233 | assert message =~ ~r/timing: \"[0-9]+[µm]s\"/u 234 | end 235 | 236 | test "logs proper json to console" do 237 | message = 238 | capture_log(fn -> 239 | JsonPlug.call(conn(:get, "/"), []) 240 | end) 241 | 242 | assert message =~ ~r/client_ip\":\"127.0.0.1\"/u 243 | assert message =~ ~r/duration_ms\":[0-9]+\.?[0-9]+/u 244 | assert message =~ ~r/method\":\"GET\"/u 245 | assert message =~ ~r"path\":\"/\""u 246 | assert message =~ ~r/status\":\"200\"/u 247 | assert message =~ ~r/timing\":\"[0-9]+[µm]s\"/u 248 | end 249 | 250 | test "logs proper json with Datadog fields to console" do 251 | message = 252 | capture_log(fn -> 253 | JsonPlugWithDataDogFields.call(conn(:get, "/"), []) 254 | end) 255 | 256 | assert message =~ ~r/client_ip\":\"127.0.0.1\"/u 257 | assert message =~ ~r/duration_ms\":[0-9]+\.?[0-9]+/u 258 | assert message =~ ~r/method\":\"GET\"/u 259 | assert message =~ ~r"path\":\"/\""u 260 | assert message =~ ~r/status\":\"200\"/u 261 | assert message =~ ~r/timing\":\"[0-9]+[µm]s\"/u 262 | 263 | assert message =~ ~r/network.client.ip\":\"127.0.0.1\"/u 264 | assert message =~ ~r/duration\":[0-9]+\.?[0-9]+/u 265 | assert message =~ ~r/http.method\":\"GET\"/u 266 | assert message =~ ~r"http.url\":\"/\""u 267 | assert message =~ ~r/http.status_code\":200/u 268 | end 269 | 270 | test "logs graphql json to console, use operationName" do 271 | variables = %{"user_uid" => "b1641ddf-b7b0-445e-bcbb-96ef359eae81"} 272 | 273 | params = %{ 274 | "operationName" => "getUser", 275 | "query" => "query totoQuery", 276 | "variables" => variables 277 | } 278 | 279 | message = 280 | capture_log(fn -> 281 | JsonPlug.call(conn(:post, "/graphql", params), []) 282 | end) 283 | 284 | assert message =~ ~r/client_ip\":\"127.0.0.1\"/u 285 | assert message =~ ~r/duration_ms\":[0-9]+\.?[0-9]+/u 286 | assert message =~ ~r"path\":\"/graphql\""u 287 | assert message =~ ~r/status\":\"200\"/u 288 | assert message =~ ~r/timing\":\"[0-9]+[µm]s\"/u 289 | assert message =~ ~r/method\":\"QUERY\"/u 290 | assert message =~ ~r/operation_name\":\"getUser\"/u 291 | end 292 | 293 | test "logs graphql json to console use Query for operationName" do 294 | variables = %{"user_uid" => "b1641ddf-b7b0-445e-bcbb-96ef359eae81"} 295 | params = %{"query" => "query getUser", "variables" => variables} 296 | 297 | message = 298 | capture_log(fn -> 299 | JsonPlug.call(conn(:post, "/graphql", params), []) 300 | end) 301 | 302 | assert message =~ ~r/client_ip\":\"127.0.0.1\"/u 303 | assert message =~ ~r/duration_ms\":[0-9]+\.?[0-9]+/u 304 | assert message =~ ~r"path\":\"/graphql\""u 305 | assert message =~ ~r/status\":\"200\"/u 306 | assert message =~ ~r/timing\":\"[0-9]+[µm]s\"/u 307 | assert message =~ ~r/method\":\"QUERY\"/u 308 | assert message =~ ~r/operation_name\":\"getUser\"/u 309 | end 310 | 311 | test "logs graphql json to console use mutation for operationName" do 312 | variables = %{"user_uid" => "b1641ddf-b7b0-445e-bcbb-96ef359eae81"} 313 | 314 | query = """ 315 | mutation track($userId: String!, $event: String!, $properties: [String]) { 316 | track(userId: $userId, event: $event, properties: $properties) { 317 | status 318 | } 319 | } 320 | """ 321 | 322 | params = %{"query" => query, "variables" => variables} 323 | 324 | message = 325 | capture_log(fn -> 326 | JsonPlug.call(conn(:post, "/graphql", params), []) 327 | end) 328 | 329 | assert message =~ ~r/client_ip\":\"127.0.0.1\"/u 330 | assert message =~ ~r/duration_ms\":[0-9]+\.?[0-9]+/u 331 | assert message =~ ~r"path\":\"/graphql\""u 332 | assert message =~ ~r/status\":\"200\"/u 333 | assert message =~ ~r/timing\":\"[0-9]+[µm]s\"/u 334 | assert message =~ ~r/method\":\"MUTATION\"/u 335 | assert message =~ ~r/operation_name\":\"track\"/u 336 | end 337 | 338 | test "logs graphql json to console with extra headers" do 339 | variables = %{"user_uid" => "b1641ddf-b7b0-445e-bcbb-96ef359eae81"} 340 | params = %{"operationName" => "getUser", "query" => "query getUser", "variables" => variables} 341 | 342 | conn = 343 | :post 344 | |> conn("/graphql", params) 345 | |> put_req_header("x-forwarded-for", "someip") 346 | |> put_req_header("referer", "http://I.am.referer") 347 | |> put_req_header("user-agent", "Mozilla") 348 | |> put_req_header("x-forwarded-proto", "http") 349 | |> put_req_header("x-forwarded-port", "4000") 350 | 351 | message = 352 | capture_log(fn -> 353 | JsonPlug.call(conn, []) 354 | end) 355 | 356 | assert message =~ ~r/client_ip\":\"127.0.0.1\"/u 357 | assert message =~ ~r/x_forwarded_for\":\"someip\"/u 358 | assert message =~ ~r"referer\":\"http://I.am.referer\""u 359 | assert message =~ ~r/user_agent\":\"Mozilla\"/u 360 | assert message =~ ~r/x_forwarded_proto\":\"http\"/u 361 | assert message =~ ~r/x_forwarded_port\":\"4000\"/u 362 | end 363 | 364 | test "logs paths with double slashes and trailing slash" do 365 | message = 366 | capture_log(fn -> 367 | MyPlug.call(conn(:get, "/hello//world/"), []) 368 | end) 369 | 370 | assert message =~ ~r"/hello//world/"u 371 | end 372 | 373 | test "logs chunked if chunked reply" do 374 | message = 375 | capture_log(fn -> 376 | MyChunkedPlug.call(conn(:get, "/hello/world"), []) 377 | end) 378 | 379 | assert message =~ ~r"Chunked 200 in [0-9]+[µm]s"u 380 | end 381 | 382 | test "logs proper log level to console" do 383 | message = 384 | capture_log(fn -> 385 | MyDebugLevelPlug.call(conn(:get, "/"), []) 386 | end) 387 | 388 | assert message =~ ~r"\[debug\] GET / - Sent 200 in [0-9]+[µm]s"u 389 | end 390 | 391 | test "logs dynamic log low level" do 392 | conn = 393 | :get 394 | |> conn("/") 395 | |> put_private(:dynamic_status, 100) 396 | 397 | message = 398 | capture_log(fn -> 399 | MyDynamicLevelPlug.call(conn, []) 400 | end) 401 | 402 | assert message =~ ~r"\[debug\] GET / - Sent 100 in [0-9]+[µm]s"u 403 | end 404 | 405 | test "logs dynamic log mid level" do 406 | conn = 407 | :get 408 | |> conn("/") 409 | |> put_private(:dynamic_status, 307) 410 | 411 | message = 412 | capture_log(fn -> 413 | MyDynamicLevelPlug.call(conn, []) 414 | end) 415 | 416 | assert message =~ ~r"\[notice\] GET / - Sent 307 in [0-9]+[µm]s"u 417 | end 418 | 419 | test "logs dynamic log high level" do 420 | conn = 421 | :get 422 | |> conn("/") 423 | |> put_private(:dynamic_status, 502) 424 | 425 | message = 426 | capture_log(fn -> 427 | MyDynamicLevelPlug.call(conn, []) 428 | end) 429 | 430 | assert message =~ ~r"\[error\] GET / - Sent 502 in [0-9]+[µm]s"u 431 | end 432 | 433 | test "ignores ignored_paths when a 200-level status is returned" do 434 | message = 435 | capture_log(fn -> 436 | IgnoredPathsPlug.call(conn(:post, "/ignore", []), []) 437 | end) 438 | 439 | refute message =~ "Sent 200" 440 | end 441 | 442 | test "logs ignored_paths when an error status is returned" do 443 | message = 444 | capture_log(fn -> 445 | IgnoredPathsErrorPlug.call(conn(:post, "/ignore", []), []) 446 | end) 447 | 448 | assert message =~ ~r"\[info\]\s+POST /ignore - Sent 500 in [0-9]+[µm]s"u 449 | end 450 | 451 | test "includes variables when applicable" do 452 | variables = %{"user_uid" => "b1641ddf-b7b0-445e-bcbb-96ef359eae81"} 453 | params = %{"operationName" => "getUser", "query" => "query getUser", "variables" => variables} 454 | 455 | message = 456 | capture_log(fn -> 457 | IncludeVariablesPlug.call(conn(:post, "/graphql", params), []) 458 | end) 459 | 460 | assert message =~ "with {\"user_uid\":\"b1641ddf-b7b0-445e-bcbb-96ef359eae81\"}" 461 | end 462 | 463 | test "doesn't try to include variables on non-graphql requests" do 464 | message = capture_log(fn -> IncludeVariablesPlug.call(conn(:post, "/", %{}), []) end) 465 | refute message =~ "with" 466 | end 467 | 468 | test "doesn't try to include variables when none were given" do 469 | params = %{"operationName" => "getUser", "query" => "query getUser"} 470 | 471 | message = 472 | capture_log(fn -> IncludeVariablesPlug.call(conn(:post, "/graphql", params), []) end) 473 | 474 | refute message =~ "with" 475 | end 476 | 477 | test "filters variables when applicable" do 478 | variables = %{ 479 | "user_uid" => "b1641ddf-b7b0-445e-bcbb-96ef359eae81", 480 | "password" => "password123" 481 | } 482 | 483 | params = %{"operationName" => "getUser", "query" => "query getUser", "variables" => variables} 484 | 485 | message = 486 | capture_log(fn -> 487 | IncludeVariablesPlug.call(conn(:post, "/graphql", params), []) 488 | end) 489 | 490 | assert message =~ 491 | "with {\"password\":\"[FILTERED]\",\"user_uid\":\"b1641ddf-b7b0-445e-bcbb-96ef359eae81\"}" 492 | end 493 | 494 | test "gets the GraphQL operation name from the query when it isn't in a separate param" do 495 | query = """ 496 | mutation CreateReviewForEpisode($ep: Episode!, $review: ReviewInput!) { 497 | createReview(episode: $ep, review: $review) { 498 | stars 499 | commentary 500 | } 501 | } 502 | """ 503 | 504 | variables = %{ 505 | "ep" => "JEDI", 506 | "review" => %{"stars" => 5, "commentary" => "This is a great movie!"} 507 | } 508 | 509 | params = %{"query" => query, "variables" => variables} 510 | 511 | message = capture_log(fn -> MyPlug.call(conn(:post, "/graphql", params), []) end) 512 | assert message =~ "MUTATION CreateReviewForEpisode" 513 | end 514 | 515 | test "gets the GraphQL operation name from the query when there is an array parameter" do 516 | query = """ 517 | mutation track($userId: String!, $event: String!, $properties: [String]) { 518 | track(userId: $userId, event: $event, properties: $properties) { 519 | status 520 | } 521 | } 522 | """ 523 | 524 | variables = %{ 525 | "userId" => "55203f63-0b79-426c-840e-ea68bdac765c", 526 | "event" => "WEBSITE_WIDGET_PROMPT_SHOW", 527 | "properties" => ["green", "firefox"] 528 | } 529 | 530 | params = %{"query" => query, "variables" => variables} 531 | 532 | message = capture_log(fn -> MyPlug.call(conn(:post, "/graphql", params), []) end) 533 | assert message =~ "MUTATION track" 534 | end 535 | 536 | test "gets the GraphQL operation name from the query when it uses no commas and has whitespace in the parameters" do 537 | query = """ 538 | mutation CreateReviewForEpisode( $ep: Episode! $review: ReviewInput! ) { 539 | createReview(episode: $ep, review: $review) { 540 | stars 541 | commentary 542 | } 543 | } 544 | """ 545 | 546 | variables = %{ 547 | "ep" => "JEDI", 548 | "review" => %{"stars" => 5, "commentary" => "This is a great movie!"} 549 | } 550 | 551 | params = %{"query" => query, "variables" => variables} 552 | 553 | message = capture_log(fn -> MyPlug.call(conn(:post, "/graphql", params), []) end) 554 | assert message =~ "MUTATION CreateReviewForEpisode" 555 | end 556 | 557 | test "does not log custom sampled status codes when they are in the configured sampled status codes" do 558 | message = capture_log(fn -> MyPlugWithCustomSampledStatusCodes.call(conn(:get, "/"), []) end) 559 | refute message =~ "GET / - Sent 401 in [0-9]+[µm]s" 560 | end 561 | 562 | test "includes the query when it isn't named" do 563 | query = """ 564 | { 565 | hero { 566 | name 567 | } 568 | } 569 | """ 570 | 571 | params = %{"query" => query} 572 | 573 | message = 574 | capture_log(fn -> IncludeUnnamedQueriesPlug.call(conn(:post, "/graphql", params), []) end) 575 | 576 | assert message =~ "QUERY unnamed" 577 | assert message =~ "hero {" 578 | end 579 | 580 | test "does not log when sample percent is set" do 581 | message = 582 | capture_log(fn -> 583 | SampleSuccessPlug.call(conn(:get, "/"), []) 584 | end) 585 | 586 | refute message =~ ~r"\[debug\] GET / - Sent 200 in [0-9]+[µm]s"u 587 | end 588 | end 589 | --------------------------------------------------------------------------------