├── .formatter.exs ├── .github ├── dependabot.yml └── elixir_ci.yml ├── .gitignore ├── LICENSE ├── README.md ├── config └── config.exs ├── lib ├── cli.ex ├── logflare_logger.ex ├── logflare_logger │ ├── application.ex │ ├── backend_config.ex │ ├── batch_cache.ex │ ├── formatter.ex │ ├── http_backend.ex │ ├── log_params.ex │ └── stacktrace.ex ├── mix │ └── tasks │ │ └── verify_config.ex ├── pending_log_event.ex ├── repo.ex └── utils.ex ├── mix.exs ├── mix.lock └── test ├── cache_test.exs ├── exception_logging_test.exs ├── http_backend_gen_event_test.exs ├── integration_test.exs ├── log_params_test.exs ├── logflare_logger_test.exs ├── payload_cases_test.exs ├── pending_log_event_test.exs ├── support └── utils.ex └── test_helper.exs /.formatter.exs: -------------------------------------------------------------------------------- 1 | # Used by "mix format" 2 | [ 3 | inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"], 4 | import_deps: [:typed_struct, :tesla] 5 | ] 6 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: mix 4 | directory: "/" 5 | schedule: 6 | interval: daily 7 | open-pull-requests-limit: 10 8 | ignore: 9 | - dependency-name: ex_doc 10 | versions: 11 | - 0.24.0 12 | - 0.24.1 13 | - dependency-name: etso 14 | versions: 15 | - 0.1.3 16 | - 0.1.4 17 | -------------------------------------------------------------------------------- /.github/elixir_ci.yml: -------------------------------------------------------------------------------- 1 | name: Elixir CI 2 | 3 | on: 4 | workflow_dispatch: 5 | pull_request: 6 | branches: [master] 7 | 8 | permissions: 9 | contents: read 10 | 11 | jobs: 12 | build: 13 | name: Build and test 14 | runs-on: ubuntu-latest 15 | env: 16 | MIX_ENV: test 17 | SHELL: /bin/bash 18 | steps: 19 | - uses: actions/checkout@v3 20 | - name: Set up Elixir 21 | uses: erlef/setup-beam@v1 22 | with: 23 | elixir-version: "1.14.4" # Define the elixir version [required] 24 | otp-version: "25" # Define the OTP version [required] 25 | - name: Restore dependencies cache 26 | uses: actions/cache@v3 27 | with: 28 | path: | 29 | deps 30 | key: ${{ runner.os }}-mix-${{ hashFiles('**/mix.lock') }} 31 | restore-keys: ${{ runner.os }}-mix- 32 | - name: Install dependencies 33 | run: mix deps.get 34 | - name: Run compilation warnings check 35 | run: mix test.compile 36 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # The directory Mix will write compiled artifacts to. 2 | /_build/ 3 | 4 | # If you run "mix test --cover", coverage assets end up here. 5 | /cover/ 6 | 7 | # The directory Mix downloads your dependencies sources to. 8 | /deps/ 9 | 10 | # Where third-party dependencies like ExDoc output generated docs. 11 | /doc/ 12 | 13 | # Ignore .fetch files in case you like to edit your project deps locally. 14 | /.fetch 15 | 16 | # If the VM crashes, it generates a dump, let's ignore it too. 17 | erl_crash.dump 18 | 19 | # Also ignore archive artifacts (built via "mix archive.build"). 20 | *.ez 21 | 22 | # Ignore package tarball (built via "mix hex.build"). 23 | logflare_logger_backend-*.tar 24 | 25 | # Dev tooling 26 | 27 | /.idea/ 28 | /.elixir_ls/ 29 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 Supabase, Inc. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # LogflareLogger 2 | 3 | An Elixir Logger backend for [Logflare](https://github.com/Logflare/logflare). Streams logs to the [Logflare.app](https://logflare.app) API. 4 | 5 | # Configuration 6 | 7 | Get your `api_key` and create a `source` at [logflare.app](https://logflare.app/dashboard) 8 | 9 | You will need a Logflare source **source_id** which you can copy from your dashboard after you create a one. 10 | 11 | ```elixir 12 | config :logger, 13 | level: :info, # or other Logger level 14 | backends: [LogflareLogger.HttpBackend] 15 | 16 | config :logflare_logger_backend, 17 | url: "https://api.logflare.app", # https://api.logflare.app is configured by default and you can set your own url 18 | level: :info, # Default LogflareLogger level is :info. Note that log messages are filtered by the :logger application first 19 | api_key: "...", # your Logflare API key, found on your dashboard 20 | source_id: "...", # the Logflare source UUID, found on your Logflare dashboard 21 | flush_interval: 1_000, # minimum time in ms before a log batch is sent 22 | max_batch_size: 50, # maximum number of events before a log batch is sent 23 | metadata: :all # optionally you can drop keys if they exist with `metadata: [drop: [:list, :keys, :to, :drop]]` 24 | ``` 25 | 26 | Alternatively, you can configure these options in your system environment. Prefix the above option names with `LOGFLARE_`. 27 | 28 | ```bash 29 | export LOGFLARE_URL="https://api.logflare.app" 30 | export LOGFLARE_API_KEY="..." 31 | export LOGFLARE_SOURCE_ID="..." 32 | ``` 33 | 34 | ## Usage 35 | 36 | After configuring LogflareLogger in `config.exs`, use `Logger.info, Logger.error, ...` functions to send log events to Logflare app. 37 | 38 | ## Usage with context 39 | 40 | `LogflareLogger.context` function signatures follows the one of `Logger.metadata` with slight modifications to parameters and return values. 41 | 42 | ```elixir 43 | # Merges map or keyword with existing context, will overwrite values. 44 | LogflareLogger.context(%{user: %{id: 3735928559}}) 45 | LogflareLogger.context(user: %{id: 3735928559}) 46 | 47 | # Get all context entries or a value for a specific key 48 | LogflareLogger.context(:user) 49 | LogflareLogger.context() 50 | 51 | # Deletes all context entries or specific context key/value 52 | LogflareLogger.context(user: nil) 53 | LogflareLogger.reset_context() 54 | ``` 55 | 56 | ## Current limitations 57 | 58 | Logflare log event BigQuery table schema is auto-generated per source. If you send a log with `Logger.info("first", user: %{id: 1})`, Logflare will generate a metadata field of type integer. If in the future, you'll send a log event to the same source using `Logger.info("first", user: %{id: "d9c2feff-d38a-4671-8de4-a1e7f7dd7e3c"1})`, the log with a binary id will be rejected. 59 | 60 | LogflareLogger log payloads sent to Logflare API are encoded using [BERT](http://bert-rpc.org). 61 | 62 | At this moment LogflareLogger doesn't support full one-to-one logging of Elixir types and applies the following conversions: 63 | 64 | - atoms converted to strings 65 | - charlists are converted to strings 66 | - tuples converted to arrays 67 | - keyword lists converted to maps 68 | - structs converted to maps 69 | - NaiveDateTime and DateTime are converted using the `String.Chars` protocol 70 | - pids are converted to strings 71 | 72 | LogflareLogger doesn't support: 73 | 74 | - non-binary messages, e.g. `Logger.info(%{user_count: 1337})` 75 | 76 | ## Exceptions 77 | 78 | LogflareLogger automatically logs all exceptions and formats stacktraces. 79 | 80 | ## Troubleshooting 81 | 82 | Run `mix logflare_logger.verify_config` to test your config. 83 | 84 | Email for help! 85 | 86 | ## Installation 87 | 88 | If [available in Hex](https://hex.pm/docs/publish), the package can be installed 89 | by adding `logflare_logger_backend` to your list of dependencies in `mix.exs`: 90 | 91 | ```elixir 92 | def deps do 93 | [ 94 | {:logflare_logger_backend, "~> 0.11.4"} 95 | ] 96 | end 97 | ``` 98 | 99 | Documentation can be generated with [ExDoc](https://github.com/elixir-lang/ex_doc) 100 | and published on [HexDocs](https://hexdocs.pm). Once published, the docs can 101 | be found at [https://hexdocs.pm/logflare_logger_backend](https://hexdocs.pm/logflare_logger_backend). 102 | -------------------------------------------------------------------------------- /config/config.exs: -------------------------------------------------------------------------------- 1 | # This file is responsible for configuring your application 2 | # and its dependencies with the aid of the Mix.Config module. 3 | import Config 4 | 5 | # This configuration is loaded before any dependency and is restricted 6 | # to this project. If another project depends on this project, this 7 | # file won't be loaded nor affect the parent project. For this reason, 8 | # if you want to provide default values for your application for 9 | # third-party users, it should be done in your "mix.exs" file. 10 | 11 | # You can configure your application as: 12 | # 13 | # config :logflare_logger, key: :value 14 | # 15 | # and access this configuration in your application as: 16 | # 17 | # Application.get_env(:logflare_logger, :key) 18 | # 19 | # You can also configure a third-party app: 20 | # 21 | # config :logger, level: :info 22 | # 23 | 24 | # It is also possible to import configuration files, relative to this 25 | # directory. For example, you can emulate configuration per environment 26 | # by uncommenting the line below and defining dev.exs, test.exs and such. 27 | # Configuration from the imported file will override the ones defined 28 | # here (which is why it is important to import them last). 29 | # 30 | # import_config "#{Mix.env()}.exs" 31 | 32 | config :elixir, :time_zone_database, Tzdata.TimeZoneDatabase 33 | 34 | if Mix.env() == :test do 35 | config :logger, :console, level: :warning 36 | end 37 | -------------------------------------------------------------------------------- /lib/cli.ex: -------------------------------------------------------------------------------- 1 | defmodule LogflareLogger.CLI do 2 | def throw_on_missing_url!(url) do 3 | unless url do 4 | throw("Logflare API url #{not_configured()}") 5 | end 6 | end 7 | 8 | def throw_on_missing_source!(source_id) do 9 | unless source_id do 10 | throw("Logflare source_id #{not_configured()}") 11 | end 12 | end 13 | 14 | def throw_on_missing_api_key!(api_key) do 15 | unless api_key do 16 | throw("Logflare API key #{not_configured()}") 17 | end 18 | end 19 | 20 | def not_configured() do 21 | "for LogflareLogger backend is NOT configured" 22 | end 23 | end 24 | -------------------------------------------------------------------------------- /lib/logflare_logger.ex: -------------------------------------------------------------------------------- 1 | defmodule LogflareLogger do 2 | @moduledoc """ 3 | """ 4 | alias LogflareLogger.{BatchCache, Formatter} 5 | 6 | def debug(message, metadata \\ []) do 7 | log(:debug, message, metadata) 8 | end 9 | 10 | def info(message, metadata \\ []) do 11 | log(:info, message, metadata) 12 | end 13 | 14 | def warn(message, metadata \\ []) do 15 | log(:warning, message, metadata) 16 | end 17 | 18 | def error(message, metadata \\ []) do 19 | log(:error, message, metadata) 20 | end 21 | 22 | def log(level, message, metadata) do 23 | dt = NaiveDateTime.utc_now() 24 | {date, {hour, minute, second}} = NaiveDateTime.to_erl(dt) 25 | datetime = {date, {hour, minute, second, dt.microsecond}} 26 | config = :ets.lookup(:logflare_logger_table, :config) |> Keyword.get(:config) 27 | 28 | metadata = 29 | metadata 30 | |> Map.new() 31 | |> Map.merge(Map.new(Logger.metadata())) 32 | |> Map.merge(%{pid: self()}) 33 | |> Enum.to_list() 34 | 35 | log_event = Formatter.format_event(level, message, datetime, metadata, config) 36 | BatchCache.put(log_event, config) 37 | end 38 | 39 | @doc """ 40 | If no argument is provided, returns the LogflareLogger context stored in the process dictionary. 41 | """ 42 | @spec context() :: map() 43 | def context() do 44 | Logger.metadata() 45 | |> Map.new() 46 | end 47 | 48 | @doc """ 49 | If the argument is an atom, returns LogflareLogger context for the given key. 50 | If the argument is a map or keyword list, their keys/values are merged with the existing LogflareLogger context in the process dictionary. Setting the key to nil will remove that key from the context. 51 | """ 52 | @spec context(map() | keyword()) :: map() 53 | def context(map) when is_map(map) do 54 | map 55 | |> Keyword.new() 56 | |> context() 57 | end 58 | 59 | def context(keyword) when is_list(keyword) do 60 | Logger.metadata(keyword) 61 | context() 62 | end 63 | 64 | @spec context(atom) :: map() 65 | def context(key) when is_atom(key) do 66 | context() 67 | |> Map.get(key) 68 | end 69 | 70 | @doc """ 71 | If no argument is passed, resets the whole context from the process dictionary. 72 | If argument is an atom or a list of atoms, resets the context keeping only the given keys. 73 | """ 74 | @spec reset_context() :: %{} 75 | def reset_context() do 76 | Logger.reset_metadata() 77 | context() 78 | end 79 | 80 | @spec reset_context(keyword) :: map 81 | def reset_context(keys) when is_list(keys) do 82 | Logger.reset_metadata(keys) 83 | context() 84 | end 85 | end 86 | -------------------------------------------------------------------------------- /lib/logflare_logger/application.ex: -------------------------------------------------------------------------------- 1 | defmodule LogflareLogger.Application do 2 | @moduledoc false 3 | 4 | use Application 5 | 6 | def start(_type, _args) do 7 | children = [ 8 | LogflareLogger.Repo 9 | ] 10 | 11 | opts = [strategy: :one_for_one, name: LogflareLogger.Supervisor] 12 | Supervisor.start_link(children, opts) 13 | end 14 | end 15 | -------------------------------------------------------------------------------- /lib/logflare_logger/backend_config.ex: -------------------------------------------------------------------------------- 1 | defmodule LogflareLogger.BackendConfig do 2 | @moduledoc false 3 | @default_batch_size 100 4 | @default_flush_interval 5000 5 | 6 | alias LogflareLogger.{Formatter} 7 | 8 | use TypedStruct 9 | 10 | # TypeSpecs 11 | 12 | typedstruct do 13 | field :api_client, Tesla.Client.t() 14 | field :format, {atom, atom}, default: {Formatter, :format} 15 | field :level, atom, default: :info 16 | field :source_id, String.t() 17 | field :metadata, list(atom) | atom, default: :all 18 | field :batch_max_size, non_neg_integer, default: @default_batch_size 19 | field :batch_size, non_neg_integer, default: 0 20 | field :flush_interval, non_neg_integer, default: @default_flush_interval 21 | end 22 | end 23 | -------------------------------------------------------------------------------- /lib/logflare_logger/batch_cache.ex: -------------------------------------------------------------------------------- 1 | defmodule LogflareLogger.BatchCache do 2 | @moduledoc """ 3 | Caches the batch, dispatches API post request if the batch is larger than configured max batch size or flush is called. 4 | 5 | Doesn't error or drop the message if the API is unresponsive, holds them 6 | """ 7 | 8 | alias LogflareLogger.Repo 9 | alias LogflareLogger.PendingLoggerEvent 10 | import Ecto.Query 11 | 12 | # batch limit prevents runaway memory usage if API is unresponsive 13 | @batch_limit 10_000 14 | 15 | def put(event, config) do 16 | if GenServer.whereis(Repo) do 17 | %PendingLoggerEvent{} 18 | |> PendingLoggerEvent.changeset(%{body: event}) 19 | |> Repo.insert!() 20 | 21 | pending_events = pending_events_not_in_flight() 22 | pending_events_count = Enum.count(pending_events) 23 | 24 | if pending_events_count > @batch_limit do 25 | pending_events 26 | |> Enum.take(pending_events_count - @batch_limit) 27 | |> Enum.each(&Repo.delete/1) 28 | end 29 | 30 | if pending_events_count >= config.batch_max_size, 31 | do: flush(config) 32 | 33 | {:ok, :insert_successful} 34 | else 35 | {:error, :repo_not_found} 36 | end 37 | end 38 | 39 | def flush(config) do 40 | api_request_started_at = System.monotonic_time() 41 | 42 | pending_events = pending_events_not_in_flight() 43 | 44 | if !Enum.empty?(pending_events) && events_in_flight() == [] do 45 | ples = 46 | pending_events 47 | |> Enum.map(fn ple -> 48 | {:ok, ple} = 49 | ple 50 | |> PendingLoggerEvent.changeset(%{api_request_started_at: api_request_started_at}) 51 | |> Repo.update() 52 | 53 | ple 54 | end) 55 | 56 | Task.start(fn -> 57 | ples 58 | |> post_logs(config) 59 | |> case do 60 | {:ok, %Tesla.Env{status: status, body: body}} -> 61 | unless status in 200..299 do 62 | IO.warn( 63 | "Logflare API warning: HTTP response status is #{status}. Response body is: #{inspect(body)}" 64 | ) 65 | end 66 | 67 | for ple <- ples do 68 | Repo.delete(ple) 69 | end 70 | 71 | {:error, reason} -> 72 | IO.warn("Logflare API error: #{inspect(reason)}") 73 | 74 | reset_events_in_flight(ples) 75 | 76 | :noop 77 | end 78 | end) 79 | else 80 | :noop 81 | end 82 | end 83 | 84 | def clear do 85 | Repo.all(PendingLoggerEvent) |> Enum.map(&Repo.delete(&1)) 86 | end 87 | 88 | def post_logs(events, %{api_client: api_client, source_id: source_id}) do 89 | events = Enum.map(events, & &1.body) 90 | LogflareApiClient.post_logs(api_client, events, source_id) 91 | end 92 | 93 | def sort_by_created_asc(pending_events) do 94 | # etso id is System.monotonic_time 95 | Enum.sort_by(pending_events, & &1.id, &<=/2) 96 | end 97 | 98 | def events_in_flight() do 99 | from(PendingLoggerEvent) 100 | |> where([le], le.api_request_started_at != 0) 101 | |> Repo.all() 102 | |> sort_by_created_asc() 103 | end 104 | 105 | def pending_events_not_in_flight() do 106 | from(PendingLoggerEvent) 107 | |> where([le], le.api_request_started_at == 0) 108 | |> Repo.all() 109 | |> sort_by_created_asc() 110 | end 111 | 112 | def reset_events_in_flight(events) do 113 | for e <- events do 114 | e 115 | |> PendingLoggerEvent.changeset(%{api_request_started_at: 0}) 116 | |> Repo.update() 117 | end 118 | end 119 | end 120 | -------------------------------------------------------------------------------- /lib/logflare_logger/formatter.ex: -------------------------------------------------------------------------------- 1 | defmodule LogflareLogger.Formatter do 2 | @moduledoc false 3 | 4 | require Logger 5 | 6 | alias LogflareLogger.LogParams 7 | alias LogflareLogger.BackendConfig, as: Config 8 | 9 | def format(level, message, ts, metadata) do 10 | try do 11 | LogParams.encode(ts, level, message, metadata) 12 | rescue 13 | e -> 14 | %{ 15 | "timestamp" => NaiveDateTime.to_iso8601(NaiveDateTime.utc_now(), :extended) <> "Z", 16 | "message" => "LogflareLogger formatter error: #{inspect(e, safe: true)}", 17 | "metadata" => %{ 18 | "formatter_error_params" => %{ 19 | "metadata" => 20 | inspect(metadata, safe: true, limit: :infinity, printable_limit: :infinity), 21 | "timestamp" => inspect(ts), 22 | "message" => inspect(message), 23 | "level" => inspect(level) 24 | }, 25 | "level" => "error" 26 | } 27 | } 28 | end 29 | end 30 | 31 | def format_event(level, msg, ts, meta, %Config{metadata: :all}) do 32 | format(level, msg, ts, Map.new(meta)) 33 | end 34 | 35 | def format_event(level, msg, ts, meta, %Config{metadata: [drop: dropkeys]}) 36 | when is_list(dropkeys) do 37 | meta = 38 | meta 39 | |> Enum.into(%{}) 40 | |> Map.drop(dropkeys) 41 | 42 | format(level, msg, ts, meta) 43 | end 44 | 45 | def format_event(level, msg, ts, meta, %Config{metadata: metakeys}) when is_list(metakeys) do 46 | IO.warn( 47 | "Your logflare_logger_backend configuration key `metadata` is deprecated. Looks like you're using a list of keywords. Please use `metadata: :all` or `metadata: [drop: [:keys, :to, :drop]]`" 48 | ) 49 | 50 | format(level, msg, ts, Map.new(meta)) 51 | end 52 | 53 | def format_event(_, _, _, _, nil) do 54 | raise("LogflareLogger is not configured!") 55 | end 56 | end 57 | -------------------------------------------------------------------------------- /lib/logflare_logger/http_backend.ex: -------------------------------------------------------------------------------- 1 | defmodule LogflareLogger.HttpBackend do 2 | @moduledoc """ 3 | Implements :gen_event behaviour, handles incoming Logger messages 4 | """ 5 | 6 | @default_api_url "https://api.logflare.app" 7 | @app :logflare_logger_backend 8 | @behaviour :gen_event 9 | 10 | require Logger 11 | alias LogflareLogger.{Formatter, BatchCache, CLI, Utils} 12 | alias LogflareLogger.BackendConfig, as: Config 13 | 14 | @type level :: Logger.level() 15 | @type message :: Logger.message() 16 | @type metadata :: Logger.metadata() 17 | @type log_msg :: {level, pid, {Logger, message, term, metadata}} | :flush 18 | 19 | @spec init(__MODULE__, keyword) :: {:ok, Config.t()} 20 | def init(__MODULE__, options \\ []) when is_list(options) do 21 | schedule_in_flight_check() 22 | msg = "#{__MODULE__} v#{Application.spec(@app, :vsn)} started." 23 | log_after(:info, msg) 24 | 25 | options 26 | |> configure_merge(%Config{}) 27 | |> schedule_flush() 28 | end 29 | 30 | @spec handle_event(log_msg, Config.t()) :: {:ok, Config.t()} 31 | def handle_event(:flush, config), do: flush!(config) 32 | 33 | def handle_event({_, gl, _}, config) when node(gl) != node() do 34 | {:ok, config} 35 | end 36 | 37 | def handle_event({level, _gl, {Logger, msg, datetime, metadata}}, %Config{} = config) do 38 | if log_level_matches?(level, config.level) do 39 | level 40 | |> Formatter.format_event(msg, datetime, metadata, config) 41 | |> BatchCache.put(config) 42 | end 43 | 44 | {:ok, config} 45 | end 46 | 47 | def handle_info({:log_after, level, message}, config) do 48 | Logger.log(level, message) 49 | 50 | {:ok, config} 51 | end 52 | 53 | def handle_info(:in_flight_check, config) do 54 | # If we somehow have events in flight stuck in our Repo, they get reset here to get flushed to Logflare. 55 | if GenServer.whereis(LogflareLogger.Repo) do 56 | count = BatchCache.events_in_flight() |> BatchCache.reset_events_in_flight() |> Enum.count() 57 | 58 | if count > 0 do 59 | msg = 60 | "#{__MODULE__} v#{Application.spec(@app, :vsn)} resetting #{count} log events in flight. If this continues please submit an issue." 61 | 62 | log_after(:warning, msg) 63 | end 64 | end 65 | 66 | {:ok, config} 67 | end 68 | 69 | def handle_info(:flush, config), do: flush!(config) 70 | 71 | def handle_info(_term, config), do: {:ok, config} 72 | 73 | @spec handle_call({:configure, keyword()}, Config.t()) :: {:ok, :ok, Config.t()} 74 | def handle_call({:configure, options}, %Config{} = config) do 75 | config = configure_merge(options, config) 76 | # Makes sure that next flush is done 77 | # after the configuration update 78 | # if the flush interval is lower than default or previous config 79 | schedule_flush(config) 80 | {:ok, :ok, config} 81 | end 82 | 83 | def code_change(_old_vsn, config, _extra), do: {:ok, config} 84 | 85 | def terminate(_reason, _state), do: :ok 86 | 87 | @spec configure_merge(keyword, Config.t()) :: Config.t() 88 | def configure_merge(options, %Config{} = config) when is_list(options) do 89 | # Configuration values are populated according to the following priorities: 90 | # 1. Dynamically confgiured options with Logger.configure(...) 91 | # 2. Application environment 92 | # 3. System environment 93 | # 4. Current config 94 | 95 | sys_options = Utils.find_logflare_sys_envs() 96 | app_options = Application.get_all_env(@app) 97 | 98 | options = 99 | app_options 100 | |> Keyword.merge(sys_options) 101 | |> Keyword.merge(options) 102 | 103 | url = Keyword.get(options, :url) || @default_api_url 104 | api_key = Keyword.get(options, :api_key) 105 | source_id = Keyword.get(options, :source_id) 106 | level = Keyword.get(options, :level, config.level) 107 | format = Keyword.get(options, :format, config.format) 108 | metadata = Keyword.get(options, :metadata, config.metadata) 109 | batch_max_size = Keyword.get(options, :batch_max_size, config.batch_max_size) 110 | flush_interval = Keyword.get(options, :flush_interval, config.flush_interval) 111 | 112 | CLI.throw_on_missing_url!(url) 113 | CLI.throw_on_missing_source!(source_id) 114 | CLI.throw_on_missing_api_key!(api_key) 115 | 116 | api_client = LogflareApiClient.new(%{url: url, api_key: api_key}) 117 | 118 | config = 119 | struct!( 120 | Config, 121 | %{ 122 | api_client: api_client, 123 | source_id: source_id, 124 | level: level, 125 | format: format, 126 | metadata: metadata, 127 | batch_size: config.batch_size, 128 | batch_max_size: batch_max_size, 129 | flush_interval: flush_interval 130 | } 131 | ) 132 | 133 | if :ets.info(:logflare_logger_table) === :undefined do 134 | :ets.new(:logflare_logger_table, [:named_table, :set, :public]) 135 | end 136 | 137 | :ets.insert(:logflare_logger_table, {:config, config}) 138 | 139 | config 140 | end 141 | 142 | # Batching and flushing 143 | 144 | @spec flush!(Config.t()) :: {:ok, Config.t()} 145 | defp flush!(%Config{} = config) do 146 | if GenServer.whereis(LogflareLogger.Repo) do 147 | BatchCache.flush(config) 148 | end 149 | 150 | schedule_flush(config) 151 | end 152 | 153 | @spec schedule_flush(Config.t()) :: {:ok, Config.t()} 154 | defp schedule_flush(%Config{} = config) do 155 | Process.send_after(self(), :flush, config.flush_interval) 156 | {:ok, config} 157 | end 158 | 159 | defp schedule_in_flight_check() do 160 | Process.send_after(self(), :in_flight_check, 0) 161 | end 162 | 163 | defp log_after(level, message, delay \\ 5_000) do 164 | # We'd like to see these in Logflare so we delay the log message to make sure Logger and the Logflare backend has been started 165 | Process.send_after(self(), {:log_after, level, message}, delay) 166 | end 167 | 168 | # Events 169 | 170 | @spec log_level_matches?(level, level | nil) :: boolean 171 | defp log_level_matches?(_lvl, nil), do: true 172 | defp log_level_matches?(lvl, min), do: Logger.compare_levels(lvl, min) != :lt 173 | end 174 | -------------------------------------------------------------------------------- /lib/logflare_logger/log_params.ex: -------------------------------------------------------------------------------- 1 | defmodule LogflareLogger.LogParams do 2 | @moduledoc """ 3 | Parses and encodes incoming Logger messages for further serialization. 4 | """ 5 | alias LogflareLogger.{Stacktrace, Utils} 6 | @default_metadata_keys Utils.default_metadata_keys() 7 | 8 | @doc """ 9 | Creates a LogParams struct when all fields have serializable values 10 | """ 11 | def encode(timestamp, level, message, metadata) do 12 | new(timestamp, level, message, metadata) 13 | end 14 | 15 | def new(timestamp, level, message, metadata) do 16 | message = encode_message(message) 17 | timestamp = encode_timestamp(timestamp) 18 | metadata = encode_metadata(metadata) 19 | 20 | {stacktrace, metadata} = Map.pop(metadata, "stacktrace") 21 | 22 | {system_context, user_context} = Map.split(metadata, @default_metadata_keys) 23 | 24 | system_context = 25 | system_context 26 | |> enrich(:vm) 27 | 28 | log_params = %{ 29 | "timestamp" => timestamp, 30 | "message" => message, 31 | "metadata" => 32 | user_context 33 | |> Map.put("level", Atom.to_string(level)) 34 | |> Map.put("context", system_context) 35 | } 36 | 37 | if stacktrace do 38 | put_in(log_params, ~w[metadata stacktrace], stacktrace) 39 | else 40 | log_params 41 | end 42 | end 43 | 44 | def enrich(context, :vm) do 45 | Map.merge(context, %{"vm" => %{"node" => "#{Node.self()}"}}) 46 | end 47 | 48 | @doc """ 49 | Encodes message, if is iodata converts to binary. 50 | """ 51 | def encode_message(message) do 52 | to_string(message) 53 | end 54 | 55 | @doc """ 56 | Converts erlang datetime tuple into ISO:Extended binary. 57 | """ 58 | 59 | def encode_timestamp({date, {hour, minute, second}}) do 60 | encode_timestamp({date, {hour, minute, second, 0}}) 61 | end 62 | 63 | def encode_timestamp({date, {hour, minute, second, {_micro, 6} = fractions_with_precision}}) do 64 | {date, {hour, minute, second}} 65 | |> NaiveDateTime.from_erl!(fractions_with_precision) 66 | |> NaiveDateTime.to_iso8601(:extended) 67 | |> Kernel.<>("Z") 68 | end 69 | 70 | def encode_timestamp({date, {hour, minute, second, milli}}) when is_integer(milli) do 71 | erldt = 72 | {date, {hour, minute, second}} 73 | |> :calendar.local_time_to_universal_time_dst() 74 | |> case do 75 | [] -> {date, {hour, minute, second}} 76 | [dt_utc] -> dt_utc 77 | [_, dt_utc] -> dt_utc 78 | end 79 | 80 | erldt 81 | |> NaiveDateTime.from_erl!({milli * 1000, 6}) 82 | |> NaiveDateTime.to_iso8601(:extended) 83 | |> Kernel.<>("Z") 84 | end 85 | 86 | def encode_metadata(meta) when is_list(meta) when is_map(meta) do 87 | meta 88 | |> encode_crash_reason() 89 | |> convert_mfa() 90 | |> convert_initial_call() 91 | |> traverse_convert() 92 | |> Map.drop(["report_cb", "erl_level"]) 93 | end 94 | 95 | @doc """ 96 | Adds formatted stacktrace to the metadata 97 | """ 98 | def encode_crash_reason(%{crash_reason: cr} = meta) when not is_nil(cr) do 99 | {_err, stacktrace} = cr 100 | 101 | meta 102 | |> Map.drop([:crash_reason]) 103 | |> Map.merge(%{stacktrace: Stacktrace.format(stacktrace)}) 104 | end 105 | 106 | def encode_crash_reason(meta), do: meta 107 | 108 | def convert_initial_call(%{initial_call: {m, f, a}} = meta) when is_integer(a) do 109 | %{meta | initial_call: {m, f, "#{a}"}} 110 | end 111 | 112 | def convert_initial_call(meta), do: meta 113 | 114 | def convert_mfa(%{mfa: {m, f, a}} = meta) when is_integer(a) do 115 | %{meta | mfa: {m, f, "#{a}"}} 116 | end 117 | 118 | def convert_mfa(meta), do: meta 119 | 120 | def traverse_convert(%NaiveDateTime{} = v), do: to_string(v) 121 | def traverse_convert(%DateTime{} = v), do: to_string(v) 122 | 123 | def traverse_convert(%{__struct__: _} = v) do 124 | v |> Map.from_struct() |> traverse_convert() 125 | end 126 | 127 | def traverse_convert(data) when is_map(data) do 128 | for {k, v} <- data, into: Map.new() do 129 | {traverse_convert(k), traverse_convert(v)} 130 | end 131 | end 132 | 133 | def traverse_convert(xs) when is_list(xs) do 134 | cond do 135 | Keyword.keyword?(xs) -> 136 | xs 137 | |> Enum.into(Map.new()) 138 | |> traverse_convert() 139 | 140 | length(xs) > 0 and List.ascii_printable?(xs) -> 141 | to_string(xs) 142 | 143 | true -> 144 | for x <- xs, do: traverse_convert(x) 145 | end 146 | end 147 | 148 | def traverse_convert(x) when is_tuple(x) do 149 | x |> Tuple.to_list() |> traverse_convert() 150 | end 151 | 152 | @doc """ 153 | All atoms are converted to strings for Logflare server to be able 154 | to safely convert binary to terms using :erlang.binary_to_term(binary, [:safe]) 155 | """ 156 | def traverse_convert(x) when is_boolean(x), do: x 157 | 158 | def traverse_convert(nil), do: nil 159 | 160 | def traverse_convert(x) when is_atom(x), do: Atom.to_string(x) 161 | 162 | def traverse_convert(x) when is_function(x), do: inspect(x) 163 | 164 | def traverse_convert(x) when is_pid(x) do 165 | x 166 | |> :erlang.pid_to_list() 167 | |> to_string() 168 | end 169 | 170 | def traverse_convert(x), do: x 171 | end 172 | -------------------------------------------------------------------------------- /lib/logflare_logger/stacktrace.ex: -------------------------------------------------------------------------------- 1 | defmodule LogflareLogger.Stacktrace do 2 | @moduledoc """ 3 | Handles stacktrace formatting for logged exceptions 4 | """ 5 | 6 | def format(stacktrace) when is_list(stacktrace) do 7 | for i <- stacktrace, do: i |> format_entry() 8 | end 9 | 10 | defp format_entry({mod, fun, arity_or_args, location}) do 11 | %{ 12 | module: format_field(:module, mod), 13 | file: format_field(:file, location), 14 | line: format_field(:line, location), 15 | function: format_field(:function, fun, arity_or_args), 16 | arity: format_field(:arity, arity_or_args), 17 | args: format_field(:args, arity_or_args) 18 | } 19 | end 20 | 21 | defp format_field(_, ""), do: nil 22 | defp format_field(_, nil), do: nil 23 | 24 | defp format_field(field, term) when is_atom(term) do 25 | format_field(field, to_string(term)) 26 | end 27 | 28 | defp format_field(:module, mod) when is_binary(mod) do 29 | String.replace_prefix(mod, "Elixir.", "") 30 | end 31 | 32 | defp format_field(field, []) when field in [:file, :line] do 33 | nil 34 | end 35 | 36 | defp format_field(:file, location) do 37 | case Keyword.get(location, :file) do 38 | nil -> nil 39 | x -> to_string(x) 40 | end 41 | end 42 | 43 | defp format_field(:line, location) do 44 | case Keyword.get(location, :line) do 45 | nil -> nil 46 | int when is_integer(int) -> int 47 | _ -> nil 48 | end 49 | end 50 | 51 | defp format_field(:arity, arity) when is_integer(arity), do: arity 52 | defp format_field(:arity, _), do: nil 53 | 54 | defp format_field(:args, args) when is_list(args), do: inspect(args) 55 | defp format_field(:args, _), do: nil 56 | 57 | defp format_field(:function, fun, args) do 58 | arity = format_field(:arity, args) 59 | 60 | if arity do 61 | "#{fun}/#{arity}" 62 | else 63 | "#{fun}" 64 | end 65 | end 66 | end 67 | -------------------------------------------------------------------------------- /lib/mix/tasks/verify_config.ex: -------------------------------------------------------------------------------- 1 | defmodule Mix.Tasks.LogflareLogger.VerifyConfig do 2 | alias LogflareLogger.CLI 3 | use Mix.Task 4 | 5 | @app :logflare_logger_backend 6 | @default_api_url "https://api.logflare.app" 7 | 8 | @impl Mix.Task 9 | def run(_args \\ []) do 10 | IO.puts("You are verifying config for the #{Mix.env()} environment") 11 | {:ok, _} = Application.ensure_all_started(:logflare_logger_backend) 12 | 13 | api_key = get_env(:api_key) || System.get_env("LOGFLARE_API_KEY") 14 | source_id = get_env(:source_id) || System.get_env("LOGFLARE_SOURCE_ID") 15 | url = get_env(:url) || System.get_env("LOGFLARE_URL") || @default_api_url 16 | 17 | CLI.throw_on_missing_api_key!(api_key) 18 | CLI.throw_on_missing_source!(source_id) 19 | CLI.throw_on_missing_url!(url) 20 | 21 | client = LogflareApiClient.new(%{api_key: api_key, url: url}) 22 | 23 | timestamp = NaiveDateTime.utc_now() |> NaiveDateTime.to_iso8601() |> Kernel.<>("Z") 24 | 25 | result = 26 | LogflareApiClient.post_logs( 27 | client, 28 | [ 29 | %{ 30 | "message" => "LogflareLogger has been properly setup", 31 | "metadata" => %{}, 32 | "level" => "info", 33 | "timestamp" => timestamp 34 | } 35 | ], 36 | source_id 37 | ) 38 | 39 | case result do 40 | {:ok, %{status: status}} when status in 200..299 -> 41 | IO.puts("Logflare API endpoint responded ok, check your dashboard!") 42 | 43 | {:ok, %{status: status, body: body}} -> 44 | IO.puts("HTTP request to Logflare API endpoint returned an HTTP status code #{status}.") 45 | IO.puts("Response body is: #{body}") 46 | 47 | {:error, tesla_env} -> 48 | IO.puts("Unknown Error") 49 | IO.inspect(tesla_env) 50 | end 51 | end 52 | 53 | def get_env(key) do 54 | Application.get_env(@app, key) 55 | end 56 | end 57 | -------------------------------------------------------------------------------- /lib/pending_log_event.ex: -------------------------------------------------------------------------------- 1 | defmodule LogflareLogger.PendingLoggerEvent do 2 | use Ecto.Schema 3 | import Ecto.Changeset 4 | 5 | schema "logger_events" do 6 | field :body, :map 7 | field :api_request_started_at, :integer, default: 0 8 | end 9 | 10 | def changeset(struct, params) do 11 | struct 12 | |> cast(params, [:body, :api_request_started_at]) 13 | |> update_change(:body, &fix_body/1) 14 | end 15 | 16 | defp fix_body(change) do 17 | change 18 | |> Enum.map(fn {k, v} -> {k, check_deep_struct(v)} end) 19 | |> Map.new() 20 | end 21 | 22 | defp check_deep_struct(value) when is_map(value) do 23 | value 24 | |> Enum.map(fn {k, v} -> {k, check_deep_struct(v)} end) 25 | |> Map.new() 26 | end 27 | 28 | defp check_deep_struct(value) when is_list(value) do 29 | single_type? = 30 | value 31 | |> Enum.map(&type/1) 32 | |> Enum.uniq() 33 | |> then(&(length(&1) == 1)) 34 | 35 | case single_type? do 36 | true -> 37 | value 38 | 39 | false -> 40 | Enum.map(value, fn 41 | v when is_binary(v) -> v 42 | v -> Jason.encode!(v) 43 | end) 44 | end 45 | end 46 | 47 | defp check_deep_struct(value), do: value 48 | 49 | defp type(v) when is_map(v), do: :map 50 | defp type(v) when is_list(v), do: :list 51 | defp type(v) when is_integer(v), do: :integer 52 | defp type(v) when is_float(v), do: :float 53 | defp type(v) when is_number(v), do: :number 54 | defp type(v) when is_binary(v), do: :binary 55 | defp type(v) when is_boolean(v), do: :boolean 56 | defp type(_), do: :other 57 | end 58 | -------------------------------------------------------------------------------- /lib/repo.ex: -------------------------------------------------------------------------------- 1 | defmodule LogflareLogger.Repo do 2 | use Ecto.Repo, otp_app: :logflare_logger_backend, adapter: Etso.Adapter 3 | end 4 | -------------------------------------------------------------------------------- /lib/utils.ex: -------------------------------------------------------------------------------- 1 | defmodule LogflareLogger.Utils do 2 | @moduledoc false 3 | def default_metadata_keys do 4 | ~w[ 5 | application 6 | module 7 | function 8 | file 9 | line 10 | pid 11 | crash_reason 12 | initial_call 13 | registered_name 14 | domain 15 | gl 16 | time 17 | mfa 18 | ] 19 | end 20 | 21 | def find_logflare_sys_envs() do 22 | envs = System.get_env() 23 | 24 | for {"LOGFLARE_" <> k, v} <- envs do 25 | k = String.downcase(k) |> String.to_atom() 26 | v = if k == :level, do: String.to_atom(v), else: v 27 | 28 | {k, v} 29 | end 30 | end 31 | end 32 | -------------------------------------------------------------------------------- /mix.exs: -------------------------------------------------------------------------------- 1 | defmodule LogflareLogger.MixProject do 2 | use Mix.Project 3 | 4 | def project do 5 | [ 6 | app: :logflare_logger_backend, 7 | version: "0.11.5-rc.0", 8 | elixir: "~> 1.8", 9 | elixirc_paths: elixirc_paths(Mix.env()), 10 | start_permanent: Mix.env() == :prod, 11 | deps: deps(), 12 | description: description(), 13 | package: package(), 14 | name: "Logflare Logger Backend", 15 | source_url: "https://github.com/Logflare/logflare_logger_backend", 16 | homepage_url: "https://logflare.app", 17 | docs: [ 18 | main: "readme", 19 | # logo: "path/to/logo.png", 20 | extras: ["README.md"] 21 | ] 22 | ] 23 | end 24 | 25 | # Run "mix help compile.app" to learn about applications. 26 | def application do 27 | [ 28 | extra_applications: [:logger], 29 | mod: {LogflareLogger.Application, []} 30 | ] 31 | end 32 | 33 | defp elixirc_paths(:test), do: ["lib", "test/support"] 34 | defp elixirc_paths(_), do: ["lib"] 35 | 36 | # Run "mix help deps" to learn about dependencies. 37 | defp deps do 38 | [ 39 | {:typed_struct, "~> 0.3.0"}, 40 | {:bertex, "~> 1.3"}, 41 | {:logflare_etso, "~> 1.1.2"}, 42 | {:logflare_api_client, "~> 0.3.5"}, 43 | 44 | # Test and Dev 45 | {:placebo, "~> 2.0", only: :test}, 46 | {:ex_doc, "~> 0.29.4", only: :dev, runtime: false}, 47 | {:mix_test_watch, "~> 1.0", only: :dev, runtime: false}, 48 | {:bypass, "~> 2.0", only: :test} 49 | ] 50 | end 51 | 52 | defp description() do 53 | "Easily ship structured logs and log based metrics to Logflare with the Logflare Logger backend." 54 | end 55 | 56 | defp package() do 57 | [ 58 | links: %{"GitHub" => "https://github.com/Logflare/logflare_logger_backend"}, 59 | licenses: ["MIT"] 60 | ] 61 | end 62 | end 63 | -------------------------------------------------------------------------------- /mix.lock: -------------------------------------------------------------------------------- 1 | %{ 2 | "bertex": {:hex, :bertex, "1.3.0", "0ad0df9159b5110d9d2b6654f72fbf42a54884ef43b6b651e6224c0af30ba3cb", [:mix], [], "hexpm", "0a5d5e478bb5764b7b7bae37cae1ca491200e58b089df121a2fe1c223d8ee57a"}, 3 | "bypass": {:hex, :bypass, "2.1.0", "909782781bf8e20ee86a9cabde36b259d44af8b9f38756173e8f5e2e1fabb9b1", [:mix], [{:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.0", [hex: :plug_cowboy, repo: "hexpm", optional: false]}, {:ranch, "~> 1.3", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "d9b5df8fa5b7a6efa08384e9bbecfe4ce61c77d28a4282f79e02f1ef78d96b80"}, 4 | "castore": {:hex, :castore, "1.0.2", "0c6292ecf3e3f20b7c88408f00096337c4bfd99bd46cc2fe63413ddbe45b3573", [:mix], [], "hexpm", "40b2dd2836199203df8500e4a270f10fc006cc95adc8a319e148dc3077391d96"}, 5 | "certifi": {:hex, :certifi, "2.5.3", "70bdd7e7188c804f3a30ee0e7c99655bc35d8ac41c23e12325f36ab449b70651", [:rebar3], [{:parse_trans, "~>3.3", [hex: :parse_trans, repo: "hexpm", optional: false]}], "hexpm", "ed516acb3929b101208a9d700062d520f3953da3b6b918d866106ffa980e1c10"}, 6 | "cowboy": {:hex, :cowboy, "2.10.0", "ff9ffeff91dae4ae270dd975642997afe2a1179d94b1887863e43f681a203e26", [:make, :rebar3], [{:cowlib, "2.12.1", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "3afdccb7183cc6f143cb14d3cf51fa00e53db9ec80cdcd525482f5e99bc41d6b"}, 7 | "cowboy_telemetry": {:hex, :cowboy_telemetry, "0.4.0", "f239f68b588efa7707abce16a84d0d2acf3a0f50571f8bb7f56a15865aae820c", [:rebar3], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7d98bac1ee4565d31b62d59f8823dfd8356a169e7fcbb83831b8a5397404c9de"}, 8 | "cowlib": {:hex, :cowlib, "2.12.1", "a9fa9a625f1d2025fe6b462cb865881329b5caff8f1854d1cbc9f9533f00e1e1", [:make, :rebar3], [], "hexpm", "163b73f6367a7341b33c794c4e88e7dbfe6498ac42dcd69ef44c5bc5507c8db0"}, 9 | "decimal": {:hex, :decimal, "2.1.1", "5611dca5d4b2c3dd497dec8f68751f1f1a54755e8ed2a966c2633cf885973ad6", [:mix], [], "hexpm", "53cfe5f497ed0e7771ae1a475575603d77425099ba5faef9394932b35020ffcc"}, 10 | "earmark": {:hex, :earmark, "1.4.3", "364ca2e9710f6bff494117dbbd53880d84bebb692dafc3a78eb50aa3183f2bfd", [:mix], [], "hexpm", "8cf8a291ebf1c7b9539e3cddb19e9cef066c2441b1640f13c34c1d3cfc825fec"}, 11 | "earmark_parser": {:hex, :earmark_parser, "1.4.32", "fa739a0ecfa34493de19426681b23f6814573faee95dfd4b4aafe15a7b5b32c6", [:mix], [], "hexpm", "b8b0dd77d60373e77a3d7e8afa598f325e49e8663a51bcc2b88ef41838cca755"}, 12 | "ecto": {:hex, :ecto, "3.10.3", "eb2ae2eecd210b4eb8bece1217b297ad4ff824b4384c0e3fdd28aaf96edd6135", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "44bec74e2364d491d70f7e42cd0d690922659d329f6465e89feb8a34e8cd3433"}, 13 | "etso": {:git, "https://github.com/Logflare/etso.git", "35500c76e3aecf9738a1ccc4c06b74035e425122", [tag: "v1.1.1"]}, 14 | "ex_doc": {:hex, :ex_doc, "0.29.4", "6257ecbb20c7396b1fe5accd55b7b0d23f44b6aa18017b415cb4c2b91d997729", [:mix], [{:earmark_parser, "~> 1.4.31", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "2c6699a737ae46cb61e4ed012af931b57b699643b24dabe2400a8168414bc4f5"}, 15 | "file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"}, 16 | "finch": {:hex, :finch, "0.16.0", "40733f02c89f94a112518071c0a91fe86069560f5dbdb39f9150042f44dcfb1a", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: false]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.3", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2.6 or ~> 1.0", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "f660174c4d519e5fec629016054d60edd822cdfe2b7270836739ac2f97735ec5"}, 17 | "hackney": {:hex, :hackney, "1.17.0", "717ea195fd2f898d9fe9f1ce0afcc2621a41ecfe137fae57e7fe6e9484b9aa99", [:rebar3], [{:certifi, "~>2.5", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "~>6.1.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "~>1.0.0", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "~>3.3", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~>1.1.0", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}, {:unicode_util_compat, "~>0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "64c22225f1ea8855f584720c0e5b3cd14095703af1c9fbc845ba042811dc671c"}, 18 | "hpax": {:hex, :hpax, "0.1.2", "09a75600d9d8bbd064cdd741f21fc06fc1f4cf3d0fcc335e5aa19be1a7235c84", [:mix], [], "hexpm", "2c87843d5a23f5f16748ebe77969880e29809580efdaccd615cd3bed628a8c13"}, 19 | "idna": {:hex, :idna, "6.1.1", "8a63070e9f7d0c62eb9d9fcb360a7de382448200fbbd1b106cc96d3d8099df8d", [:rebar3], [{:unicode_util_compat, "~>0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "92376eb7894412ed19ac475e4a86f7b413c1b9fbb5bd16dccd57934157944cea"}, 20 | "jason": {:hex, :jason, "1.4.1", "af1504e35f629ddcdd6addb3513c3853991f694921b1b9368b0bd32beb9f1b63", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "fbb01ecdfd565b56261302f7e1fcc27c4fb8f32d56eab74db621fc154604a7a1"}, 21 | "logflare_api_client": {:hex, :logflare_api_client, "0.3.5", "c427ebf65a8402d68b056d4a5ef3e1eb3b90c0ad1d0de97d1fe23807e0c1b113", [:mix], [{:bertex, "~> 1.3", [hex: :bertex, repo: "hexpm", optional: false]}, {:finch, "~> 0.10", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: false]}, {:tesla, "~> 1.0", [hex: :tesla, repo: "hexpm", optional: false]}], "hexpm", "16d29abcb80c4f72745cdf943379da02a201504813c3aa12b4d4acb0302b7723"}, 22 | "logflare_etso": {:hex, :logflare_etso, "1.1.2", "040bd3e482aaf0ed20080743b7562242ec5079fd88a6f9c8ce5d8298818292e9", [:mix], [{:ecto, "~> 3.8", [hex: :ecto, repo: "hexpm", optional: false]}], "hexpm", "ab96be42900730a49b132891f43a9be1d52e4ad3ee9ed9cb92565c5f87345117"}, 23 | "makeup": {:hex, :makeup, "1.1.0", "6b67c8bc2882a6b6a445859952a602afc1a41c2e08379ca057c0f525366fc3ca", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "0a45ed501f4a8897f580eabf99a2e5234ea3e75a4373c8a52824f6e873be57a6"}, 24 | "makeup_elixir": {:hex, :makeup_elixir, "0.16.1", "cc9e3ca312f1cfeccc572b37a09980287e243648108384b97ff2b76e505c3555", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "e127a341ad1b209bd80f7bd1620a15693a9908ed780c3b763bccf7d200c767c6"}, 25 | "makeup_erlang": {:hex, :makeup_erlang, "0.1.1", "3fcb7f09eb9d98dc4d208f49cc955a34218fc41ff6b84df7c75b3e6e533cc65f", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "174d0809e98a4ef0b3309256cbf97101c6ec01c4ab0b23e926a9e17df2077cbb"}, 26 | "meck": {:hex, :meck, "0.9.2", "85ccbab053f1db86c7ca240e9fc718170ee5bda03810a6292b5306bf31bae5f5", [:rebar3], [], "hexpm", "81344f561357dc40a8344afa53767c32669153355b626ea9fcbc8da6b3045826"}, 27 | "metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm", "69b09adddc4f74a40716ae54d140f93beb0fb8978d8636eaded0c31b6f099f16"}, 28 | "mime": {:hex, :mime, "2.0.3", "3676436d3d1f7b81b5a2d2bd8405f412c677558c81b1c92be58c00562bb59095", [:mix], [], "hexpm", "27a30bf0db44d25eecba73755acf4068cbfe26a4372f9eb3e4ea3a45956bff6b"}, 29 | "mimerl": {:hex, :mimerl, "1.2.0", "67e2d3f571088d5cfd3e550c383094b47159f3eee8ffa08e64106cdf5e981be3", [:rebar3], [], "hexpm", "f278585650aa581986264638ebf698f8bb19df297f66ad91b18910dfc6e19323"}, 30 | "mint": {:hex, :mint, "1.5.1", "8db5239e56738552d85af398798c80648db0e90f343c8469f6c6d8898944fb6f", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "4a63e1e76a7c3956abd2c72f370a0d0aecddc3976dea5c27eccbecfa5e7d5b1e"}, 31 | "mix_test_watch": {:hex, :mix_test_watch, "1.1.0", "330bb91c8ed271fe408c42d07e0773340a7938d8a0d281d57a14243eae9dc8c3", [:mix], [{:file_system, "~> 0.2.1 or ~> 0.3", [hex: :file_system, repo: "hexpm", optional: false]}], "hexpm", "52b6b1c476cbb70fd899ca5394506482f12e5f6b0d6acff9df95c7f1e0812ec3"}, 32 | "nimble_options": {:hex, :nimble_options, "1.0.2", "92098a74df0072ff37d0c12ace58574d26880e522c22801437151a159392270e", [:mix], [], "hexpm", "fd12a8db2021036ce12a309f26f564ec367373265b53e25403f0ee697380f1b8"}, 33 | "nimble_parsec": {:hex, :nimble_parsec, "1.3.1", "2c54013ecf170e249e9291ed0a62e5832f70a476c61da16f6aac6dca0189f2af", [:mix], [], "hexpm", "2682e3c0b2eb58d90c6375fc0cc30bc7be06f365bf72608804fb9cffa5e1b167"}, 34 | "nimble_pool": {:hex, :nimble_pool, "1.0.0", "5eb82705d138f4dd4423f69ceb19ac667b3b492ae570c9f5c900bb3d2f50a847", [:mix], [], "hexpm", "80be3b882d2d351882256087078e1b1952a28bf98d0a287be87e4a24a710b67a"}, 35 | "parse_trans": {:hex, :parse_trans, "3.3.1", "16328ab840cc09919bd10dab29e431da3af9e9e7e7e6f0089dd5a2d2820011d8", [:rebar3], [], "hexpm", "07cd9577885f56362d414e8c4c4e6bdf10d43a8767abb92d24cbe8b24c54888b"}, 36 | "placebo": {:hex, :placebo, "2.0.0", "c0e773dec77e941bcbcc14d10b759f2d66775aff9b75051f3e41939b64300e81", [:mix], [{:meck, "~> 0.9", [hex: :meck, repo: "hexpm", optional: false]}], "hexpm", "e0872cec8848d7e59ba96396f45ee1ad34662c689c86ba6190694d38b4289844"}, 37 | "plug": {:hex, :plug, "1.14.2", "cff7d4ec45b4ae176a227acd94a7ab536d9b37b942c8e8fa6dfc0fff98ff4d80", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "842fc50187e13cf4ac3b253d47d9474ed6c296a8732752835ce4a86acdf68d13"}, 38 | "plug_cowboy": {:hex, :plug_cowboy, "2.6.1", "9a3bbfceeb65eff5f39dab529e5cd79137ac36e913c02067dba3963a26efe9b2", [:mix], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "de36e1a21f451a18b790f37765db198075c25875c64834bcc82d90b309eb6613"}, 39 | "plug_crypto": {:hex, :plug_crypto, "1.2.5", "918772575e48e81e455818229bf719d4ab4181fcbf7f85b68a35620f78d89ced", [:mix], [], "hexpm", "26549a1d6345e2172eb1c233866756ae44a9609bd33ee6f99147ab3fd87fd842"}, 40 | "ranch": {:hex, :ranch, "1.8.0", "8c7a100a139fd57f17327b6413e4167ac559fbc04ca7448e9be9057311597a1d", [:make, :rebar3], [], "hexpm", "49fbcfd3682fab1f5d109351b61257676da1a2fdbe295904176d5e521a2ddfe5"}, 41 | "ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.6", "cf344f5692c82d2cd7554f5ec8fd961548d4fd09e7d22f5b62482e5aeaebd4b0", [:make, :mix, :rebar3], [], "hexpm", "bdb0d2471f453c88ff3908e7686f86f9be327d065cc1ec16fa4540197ea04680"}, 42 | "telemetry": {:hex, :telemetry, "1.2.1", "68fdfe8d8f05a8428483a97d7aab2f268aaff24b49e0f599faa091f1d4e7f61c", [:rebar3], [], "hexpm", "dad9ce9d8effc621708f99eac538ef1cbe05d6a874dd741de2e689c47feafed5"}, 43 | "tesla": {:hex, :tesla, "1.7.0", "a62dda2f80d4f8a925eb7b8c5b78c461e0eb996672719fe1a63b26321a5f8b4e", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:exjsx, ">= 3.0.0", [hex: :exjsx, repo: "hexpm", optional: true]}, {:finch, "~> 0.13", [hex: :finch, repo: "hexpm", optional: true]}, {:fuse, "~> 2.4", [hex: :fuse, repo: "hexpm", optional: true]}, {:gun, "~> 1.3", [hex: :gun, repo: "hexpm", optional: true]}, {:hackney, "~> 1.6", [hex: :hackney, repo: "hexpm", optional: true]}, {:ibrowse, "4.4.0", [hex: :ibrowse, repo: "hexpm", optional: true]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.0", [hex: :mint, repo: "hexpm", optional: true]}, {:msgpax, "~> 2.3", [hex: :msgpax, repo: "hexpm", optional: true]}, {:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "2e64f01ebfdb026209b47bc651a0e65203fcff4ae79c11efb73c4852b00dc313"}, 44 | "typed_struct": {:hex, :typed_struct, "0.3.0", "939789e3c1dca39d7170c87f729127469d1315dcf99fee8e152bb774b17e7ff7", [:mix], [], "hexpm", "c50bd5c3a61fe4e198a8504f939be3d3c85903b382bde4865579bc23111d1b6d"}, 45 | "unicode_util_compat": {:hex, :unicode_util_compat, "0.7.0", "bc84380c9ab48177092f43ac89e4dfa2c6d62b40b8bd132b1059ecc7232f9a78", [:rebar3], [], "hexpm", "25eee6d67df61960cf6a794239566599b09e17e668d3700247bc498638152521"}, 46 | } 47 | -------------------------------------------------------------------------------- /test/cache_test.exs: -------------------------------------------------------------------------------- 1 | defmodule LogflareLogger.BatchCacheTest do 2 | @moduledoc false 3 | use ExUnit.Case 4 | alias LogflareLogger.BatchCache 5 | 6 | @backend_config %{ 7 | api_client: LogflareApiClient.new(%{url: "http://localhost:4000", api_key: ""}), 8 | source_id: "source-id", 9 | batch_max_size: 10 10 | } 11 | 12 | test "cache puts events, gets events and resets batch" do 13 | ev1 = %{metadata: %{}, message: "log1"} 14 | ev2 = %{metadata: %{}, message: "log2"} 15 | ev3 = %{metadata: %{}, message: "log3"} 16 | 17 | assert BatchCache.put(ev1, @backend_config) === {:ok, :insert_successful} 18 | assert BatchCache.put(ev2, @backend_config) === {:ok, :insert_successful} 19 | 20 | BatchCache.clear() 21 | 22 | assert BatchCache.put(ev3, @backend_config) === {:ok, :insert_successful} 23 | end 24 | end 25 | -------------------------------------------------------------------------------- /test/exception_logging_test.exs: -------------------------------------------------------------------------------- 1 | defmodule LogflareLogger.ExceptionLoggingTest do 2 | @moduledoc false 3 | use ExUnit.Case 4 | alias LogflareLogger.HttpBackend 5 | require Logger 6 | use Placebo 7 | 8 | @logger_backend HttpBackend 9 | @api_key "l3kh47jsakf2370dasg" 10 | @source "source2354551" 11 | 12 | setup do 13 | Application.put_env(:logflare_logger_backend, :url, "http://127.0.0.1:4000") 14 | Application.put_env(:logflare_logger_backend, :api_key, @api_key) 15 | Application.put_env(:logflare_logger_backend, :source_id, @source) 16 | Application.put_env(:logflare_logger_backend, :level, :info) 17 | Application.put_env(:logflare_logger_backend, :flush_interval, 500) 18 | Application.put_env(:logflare_logger_backend, :max_batch_size, 5) 19 | 20 | Logger.add_backend(@logger_backend) 21 | 22 | :ok 23 | end 24 | 25 | test "logger backends sends a formatted log event after an exception" do 26 | allow(LogflareApiClient.post_logs(any(), any(), any()), 27 | return: {:ok, %Tesla.Env{status: 200}} 28 | ) 29 | 30 | spawn(fn -> 3.14 / 0 end) 31 | spawn(fn -> 3.14 / 0 end) 32 | spawn(fn -> 3.14 / 0 end) 33 | spawn(fn -> Enum.find(nil, & &1) end) 34 | 35 | Process.sleep(500) 36 | 37 | assert_called( 38 | LogflareApiClient.post_logs( 39 | any(), 40 | is(fn xs -> 41 | [ 42 | %{ 43 | "message" => _, 44 | "metadata" => %{ 45 | "level" => "error", 46 | "context" => %{"pid" => _}, 47 | "stacktrace" => [ 48 | %{ 49 | "arity" => _, 50 | "args" => _, 51 | "file" => _, 52 | "line" => _, 53 | "function" => _, 54 | "module" => _ 55 | } 56 | | _ 57 | ] 58 | }, 59 | "timestamp" => _ 60 | } 61 | | _ 62 | ] = xs 63 | 64 | true 65 | end), 66 | any() 67 | ) 68 | ) 69 | end 70 | end 71 | -------------------------------------------------------------------------------- /test/http_backend_gen_event_test.exs: -------------------------------------------------------------------------------- 1 | defmodule LogflareLogger.HttpBackendTest do 2 | use ExUnit.Case 3 | alias LogflareLogger.{HttpBackend, Formatter, BatchCache, Repo, PendingLoggerEvent} 4 | use Placebo 5 | 6 | @default_config [ 7 | format: {Formatter, :format}, 8 | min_level: :info, 9 | flush_interval: 300, 10 | url: "http://localhost:4000/logs/elixir/logger", 11 | source_id: "source", 12 | api_key: "api_key", 13 | batch_max_size: 10, 14 | metadata: [] 15 | ] 16 | 17 | setup do 18 | on_exit(fn -> 19 | BatchCache.clear() 20 | Logger.flush() 21 | end) 22 | 23 | :ok 24 | end 25 | 26 | describe "HttpBackend.init/2" do 27 | test "succeeds with correct config" do 28 | {:ok, state} = init_with_default() 29 | assert state.level == :info 30 | assert_receive :flush, @default_config[:flush_interval] + 10 31 | end 32 | end 33 | 34 | describe "HttpBackend.handle_event/2" do 35 | test "flushes after :flush msg" do 36 | {:ok, state} = init_with_default() 37 | {:ok, _state} = HttpBackend.handle_event(:flush, state) 38 | assert_receive :flush, @default_config[:flush_interval] + 10 39 | end 40 | 41 | test "new log message gets flushed within the interval" do 42 | {:ok, state} = init_with_default() 43 | msg = {:info, nil, {Logger, "log message", ts(0), []}} 44 | {:ok, _state} = HttpBackend.handle_event(msg, state) 45 | assert_receive :flush, @default_config[:flush_interval] + 10 46 | end 47 | 48 | test "flushes after batch reaches max_batch_size" do 49 | allow(LogflareApiClient.post_logs(any(), any(), any()), return: {:ok, %Tesla.Env{}}) 50 | 51 | {:ok, state} = init_with_default(flush_interval: 60_000) 52 | 53 | generate_logs(state, @default_config[:batch_max_size]) 54 | 55 | Process.sleep(200) 56 | 57 | assert_called( 58 | LogflareApiClient.post_logs( 59 | any(), 60 | is(fn batch -> 61 | assert length(batch) == @default_config[:batch_max_size] 62 | end), 63 | any() 64 | ), 65 | once() 66 | ) 67 | end 68 | 69 | test "flush not called if log events are in flight" do 70 | allow(LogflareApiClient.post_logs(any(), any(), any()), return: {:ok, %Tesla.Env{}}) 71 | 72 | {:ok, state} = init_with_default(flush_interval: 60_000) 73 | 74 | generate_logs(state, @default_config[:batch_max_size] - 1) 75 | 76 | for e <- BatchCache.pending_events_not_in_flight() do 77 | e 78 | |> PendingLoggerEvent.changeset(%{api_request_started_at: System.monotonic_time()}) 79 | |> Repo.update() 80 | end 81 | 82 | generate_logs(state, @default_config[:batch_max_size]) 83 | 84 | refute_called(LogflareApiClient.post_logs(any(), any(), any())) 85 | end 86 | end 87 | 88 | describe "HttpBackend.handle_info/2" do 89 | test "flushes after :flush msg" do 90 | {:ok, state} = init_with_default() 91 | {:ok, _state} = HttpBackend.handle_info(:flush, state) 92 | assert_receive :flush, @default_config[:flush_interval] + 10 93 | end 94 | end 95 | 96 | defp generate_logs(state, count) do 97 | for i <- 1..count do 98 | msg = {:info, nil, {Logger, "log message", ts(i), []}} 99 | {:ok, _state} = HttpBackend.handle_event(msg, state) 100 | end 101 | end 102 | 103 | defp init_with_default() do 104 | HttpBackend.init(HttpBackend, @default_config) 105 | end 106 | 107 | defp init_with_default(kw) do 108 | config = Keyword.merge(@default_config, kw) 109 | HttpBackend.init(HttpBackend, config) 110 | end 111 | 112 | defp ts(sec) do 113 | {{2019, 1, 1}, {0, 0, sec, 0}} 114 | end 115 | end 116 | -------------------------------------------------------------------------------- /test/integration_test.exs: -------------------------------------------------------------------------------- 1 | defmodule LogflareLogger.IntegrationTest do 2 | @moduledoc false 3 | use ExUnit.Case, async: false 4 | import ExUnit.CaptureLog 5 | 6 | alias LogflareLogger.{HttpBackend, TestUtils} 7 | require Logger 8 | 9 | @path LogflareApiClient.api_path() 10 | 11 | @logger_backend HttpBackend 12 | @api_key "l3kh47jsakf2370dasg" 13 | @source "source2354551" 14 | 15 | setup do 16 | bypass = Bypass.open() 17 | 18 | url = Application.get_env(:logflare_logger_backend, :url) 19 | api_key = Application.get_env(:logflare_logger_backend, :api_key) 20 | source_id = Application.get_env(:logflare_logger_backend, :source_id) 21 | level = Application.get_env(:logflare_logger_backend, :level) 22 | flush_interval = Application.get_env(:logflare_logger_backend, :flush_interval) 23 | max_batch_size = Application.get_env(:logflare_logger_backend, :max_batch_size) 24 | 25 | Application.put_env(:logflare_logger_backend, :url, "http://127.0.0.1:#{bypass.port}") 26 | Application.put_env(:logflare_logger_backend, :api_key, @api_key) 27 | Application.put_env(:logflare_logger_backend, :source_id, @source) 28 | Application.put_env(:logflare_logger_backend, :level, :info) 29 | Application.put_env(:logflare_logger_backend, :flush_interval, 900) 30 | Application.put_env(:logflare_logger_backend, :max_batch_size, 100) 31 | 32 | Logger.add_backend(@logger_backend) 33 | 34 | on_exit(fn -> 35 | LogflareLogger.context(test_context: nil) 36 | Logger.remove_backend(@logger_backend, flush: true) 37 | Application.put_env(:logflare_logger_backend, :url, url) 38 | Application.put_env(:logflare_logger_backend, :api_key, api_key) 39 | Application.put_env(:logflare_logger_backend, :source_id, source_id) 40 | Application.put_env(:logflare_logger_backend, :level, level) 41 | Application.put_env(:logflare_logger_backend, :flush_interval, flush_interval) 42 | Application.put_env(:logflare_logger_backend, :max_batch_size, max_batch_size) 43 | end) 44 | 45 | {:ok, bypass: bypass} 46 | end 47 | 48 | test "logger backend sends a POST request", %{bypass: bypass} do 49 | :ok = Logger.configure_backend(@logger_backend, metadata: []) 50 | log_msg = "Incoming log from test" 51 | LogflareLogger.context(test_context: %{some_metric: 1337}) 52 | 53 | Bypass.expect(bypass, "POST", @path, fn conn -> 54 | {:ok, body, conn} = Plug.Conn.read_body(conn) 55 | 56 | assert {"x-api-key", @api_key} in conn.req_headers 57 | 58 | body = TestUtils.decode_logger_body(body) 59 | 60 | assert %{ 61 | "batch" => [ 62 | %{ 63 | "message" => "Incoming log from test " <> _, 64 | "metadata" => %{ 65 | "level" => level, 66 | "context" => %{"pid" => _}, 67 | "test_context" => %{"some_metric" => 1337} 68 | }, 69 | "timestamp" => _ 70 | } 71 | | _ 72 | ], 73 | "source" => @source 74 | } = body 75 | 76 | assert length(body["batch"]) == 10 77 | assert level in ["info", "error"] 78 | 79 | Plug.Conn.resp(conn, 200, "") 80 | end) 81 | 82 | capture_log(fn -> 83 | for n <- 1..10, do: Logger.info(log_msg <> " ##{n}") 84 | 85 | Process.sleep(1_000) 86 | 87 | for n <- 1..10, do: Logger.error(log_msg <> " ##{20 + n}") 88 | 89 | Process.sleep(1_000) 90 | 91 | for n <- 1..10, do: Logger.debug(log_msg <> " ##{30 + n}") 92 | 93 | Process.sleep(1_000) 94 | end) 95 | end 96 | 97 | test "doesn't POST log events with a lower level", %{bypass: _bypass} do 98 | log_msg = "Incoming log from test" 99 | 100 | :ok = Logger.debug(log_msg) 101 | end 102 | 103 | @msg "Incoming log from test with all metadata" 104 | test "correctly handles metadata keys", %{bypass: bypass} do 105 | Bypass.expect_once(bypass, "POST", @path, fn conn -> 106 | {:ok, body, conn} = Plug.Conn.read_body(conn) 107 | 108 | body = TestUtils.decode_logger_body(body) 109 | 110 | assert %{ 111 | "batch" => [ 112 | %{ 113 | "message" => @msg, 114 | "metadata" => %{ 115 | "level" => "info", 116 | "context" => %{ 117 | "pid" => _pidbinary, 118 | "module" => _, 119 | "file" => _, 120 | "line" => _, 121 | "function" => _ 122 | }, 123 | "test_context" => _ 124 | }, 125 | "timestamp" => _ 126 | } 127 | | _ 128 | ], 129 | "source" => @source 130 | } = body 131 | 132 | assert length(body["batch"]) == 45 133 | 134 | Plug.Conn.resp(conn, 200, "") 135 | end) 136 | 137 | :ok = Logger.configure_backend(@logger_backend, metadata: :all) 138 | LogflareLogger.context(test_context: %{some_metric: 7331}) 139 | for _n <- 1..45, do: Logger.info(@msg) 140 | 141 | Process.sleep(1_000) 142 | end 143 | end 144 | -------------------------------------------------------------------------------- /test/log_params_test.exs: -------------------------------------------------------------------------------- 1 | defmodule LogflareLogger.LogParamsTest do 2 | @moduledoc false 3 | use ExUnit.Case 4 | alias LogflareLogger.{LogParams} 5 | require Logger 6 | use Placebo 7 | 8 | describe "LogParams conversion" do 9 | test "tuples to lists" do 10 | x = %{tuples: {1, "tuple1", {2, "tuple2", {3, "tuple3"}}}} 11 | user_context = build_user_context(x) 12 | 13 | assert user_context === %{"tuples" => [1, "tuple1", [2, "tuple2", [3, "tuple3"]]]} 14 | end 15 | 16 | test "structs to maps" do 17 | x = %Time{hour: 0, minute: 0, second: 0} 18 | user_context = build_user_context(%{struct: x}) 19 | 20 | assert user_context === %{ 21 | "struct" => %{ 22 | "calendar" => "Elixir.Calendar.ISO", 23 | "hour" => 0, 24 | "microsecond" => [0, 0], 25 | "minute" => 0, 26 | "second" => 0 27 | } 28 | } 29 | end 30 | 31 | test "charlists to strings" do 32 | x = ~c"just a simple charlist" 33 | user_context = build_user_context(%{charlist: %{x => [x, %{x => {x, x}}]}}) 34 | 35 | x = to_string(x) 36 | assert user_context === %{"charlist" => %{x => [x, %{x => [x, x]}]}} 37 | end 38 | 39 | test "keywords to maps" do 40 | x = [a: 2, b: [a: 6]] 41 | user_context = build_user_context(%{keyword: %{1 => [x, %{two: {x, x}}]}}) 42 | 43 | x = %{"a" => 2, "b" => %{"a" => 6}} 44 | assert user_context === %{"keyword" => %{1 => [x, %{"two" => [x, x]}]}} 45 | end 46 | 47 | test "pid to string" do 48 | user_context = build_user_context(user_pid: self()) 49 | 50 | %{"user_pid" => pid} = user_context 51 | assert is_binary(pid) 52 | end 53 | 54 | test "function to string" do 55 | user_context = 56 | build_user_context(user_field: %{error_response: [:invalid, &String.to_atom/1]}) 57 | 58 | %{"user_field" => %{"error_response" => [invalid, fun]}} = user_context 59 | assert fun == "&String.to_atom/1" 60 | assert invalid == "invalid" 61 | end 62 | 63 | test "NaiveDateTime and DateTime to String.Chars protocol" do 64 | {:ok, ndt} = NaiveDateTime.new(1337, 4, 19, 0, 0, 0) 65 | 66 | user_context = 67 | build_user_context( 68 | datetimes: %{ 69 | ndt: ndt, 70 | dt: DateTime.from_naive!(ndt, "Etc/UTC") 71 | } 72 | ) 73 | 74 | assert user_context == %{ 75 | "datetimes" => %{"dt" => "1337-04-19 00:00:00Z", "ndt" => "1337-04-19 00:00:00"} 76 | } 77 | end 78 | end 79 | 80 | describe "LogParams doesn't convert" do 81 | test "booleans" do 82 | x = %{true: {true, [true]}} 83 | user_context = build_user_context(x) 84 | 85 | assert user_context === %{true: [true, [true]]} 86 | end 87 | end 88 | 89 | describe "LogParams" do 90 | test "correctly encodes timestamp datetimes without millis" do 91 | {date, time} = :calendar.local_time() 92 | {hour, minute, second} = time 93 | 94 | utc = %{NaiveDateTime.utc_now() | microsecond: {0, 6}} 95 | 96 | utcstring = NaiveDateTime.to_iso8601(utc, :extended) <> "Z" 97 | 98 | assert utcstring == LogParams.encode_timestamp({date, time}) 99 | end 100 | 101 | test "correctly encdoes timestamp datetimes with millis" do 102 | {date, time} = :calendar.local_time() 103 | {hour, minute, second} = time 104 | 105 | utc = %{NaiveDateTime.utc_now() | microsecond: {314_000, 6}} 106 | utcstring = NaiveDateTime.to_iso8601(utc, :extended) <> "Z" 107 | {millis, _} = utc.microsecond 108 | 109 | assert utcstring == 110 | LogParams.encode_timestamp({date, {hour, minute, second, round(millis / 1000)}}) 111 | end 112 | 113 | test "handles report_cb" do 114 | metadata = [report: %{}, report_cb: fn x -> x end, level: :info] 115 | timestamp = :calendar.universal_time() 116 | lp = LogParams.encode(timestamp, :info, "test message", metadata) 117 | 118 | assert %{ 119 | "message" => "test message", 120 | "metadata" => %{ 121 | "context" => %{"vm" => %{"node" => "nonode@nohost"}}, 122 | "level" => "info", 123 | "report" => %{} 124 | }, 125 | "timestamp" => _ 126 | } = lp 127 | end 128 | 129 | test "puts level field in metadata" do 130 | timestamp = :calendar.universal_time() 131 | lp = LogParams.encode(timestamp, :info, "test message", level: "nope") 132 | 133 | assert lp["metadata"]["level"] == "info" 134 | end 135 | 136 | test "vm and node data is present in system context" do 137 | timestamp = :calendar.universal_time() 138 | lp = LogParams.encode(timestamp, :info, "test message", level: "nope") 139 | 140 | assert lp["metadata"]["context"]["vm"]["node"] == "#{Node.self()}" 141 | end 142 | end 143 | 144 | defp build_user_context(metadata) do 145 | timestamp = :calendar.universal_time() 146 | 147 | LogParams.encode(timestamp, :info, "test message", metadata) 148 | |> Map.get("metadata") 149 | |> Map.drop(["context", "level"]) 150 | end 151 | end 152 | -------------------------------------------------------------------------------- /test/logflare_logger_test.exs: -------------------------------------------------------------------------------- 1 | defmodule LogflareLoggerTest do 2 | @moduledoc false 3 | alias LogflareLogger.HttpBackend 4 | use ExUnit.Case 5 | import LogflareLogger 6 | doctest LogflareLogger 7 | use Placebo 8 | require Logger 9 | 10 | @logger_backend HttpBackend 11 | @api_key "l3kh47jsakf2370dasg" 12 | @source "source2354551" 13 | 14 | setup_all do 15 | Application.put_env(:logflare_logger_backend, :url, "http://127.0.0.1:4000") 16 | Application.put_env(:logflare_logger_backend, :api_key, @api_key) 17 | Application.put_env(:logflare_logger_backend, :source_id, @source) 18 | Application.put_env(:logflare_logger_backend, :level, :info) 19 | Application.put_env(:logflare_logger_backend, :flush_interval, 100) 20 | Application.put_env(:logflare_logger_backend, :max_batch_size, 2) 21 | 22 | case Logger.add_backend(@logger_backend) do 23 | {:ok, _pid} -> :noop 24 | {:error, :already_present} -> :noop 25 | {:error, err} -> throw(err) 26 | end 27 | 28 | on_exit(&LogflareLogger.reset_context/0) 29 | :ok 30 | end 31 | 32 | describe "debug, info, warn, error functions" do 33 | test "uses same configuration as Logger functions" do 34 | allow(LogflareApiClient.new(any()), return: %Tesla.Client{}) 35 | 36 | allow(LogflareApiClient.post_logs(any(), any(), any()), 37 | return: {:ok, %Tesla.Env{status: 200}} 38 | ) 39 | 40 | LogflareLogger.context(%{context_key: [:context_value, 1, "string"]}) 41 | Logger.bare_log(:info, "msg", data: %{a: 1}) 42 | LogflareLogger.info("msg", data: %{a: 1}) 43 | 44 | Process.sleep(200) 45 | 46 | assert_called( 47 | LogflareApiClient.post_logs( 48 | any(), 49 | is(fn [logger, logflare_logger] -> 50 | assert Map.drop(logger["metadata"]["context"], ~w[domain gl time]) == 51 | Map.drop(logflare_logger["metadata"]["context"], ~w[domain gl time]) 52 | 53 | assert Map.drop(logger["metadata"], ~w[context]) == 54 | Map.drop(logflare_logger["metadata"], ~w[context]) 55 | 56 | assert Map.drop(logger, ~w[metadata timestamp]) == 57 | Map.drop(logflare_logger, ~w[metadata timestamp]) 58 | end), 59 | any() 60 | ) 61 | ) 62 | end 63 | end 64 | 65 | describe "Context" do 66 | test "gets, sets and unsets one context key" do 67 | assert context() == %{} 68 | 69 | assert context(advanced_logging: true) == %{advanced_logging: true} 70 | assert context(advanced_logging: false) == %{advanced_logging: false} 71 | 72 | assert context(simple_logging: true) == %{ 73 | simple_logging: true, 74 | advanced_logging: false 75 | } 76 | 77 | assert context() == %{simple_logging: true, advanced_logging: false} 78 | 79 | context(simple_logging: nil) 80 | context(advanced_logging: nil) 81 | assert context() == %{} 82 | end 83 | 84 | test "gets, sets and unsets multiple context keys" do 85 | assert context() == %{} 86 | 87 | assert context(key1: 1, key2: 2) == %{key1: 1, key2: 2} 88 | assert context(key2: 3, key4: 4) == %{key1: 1, key2: 3, key4: 4} 89 | assert context() == %{key1: 1, key2: 3, key4: 4} 90 | 91 | reset_context() 92 | assert context() == %{} 93 | end 94 | 95 | test "set context raises for invalid values" do 96 | assert_raise FunctionClauseError, fn -> 97 | context(11.11) 98 | end 99 | 100 | assert_raise FunctionClauseError, fn -> 101 | context("false") 102 | end 103 | 104 | assert_raise FunctionClauseError, fn -> 105 | context(1_000) 106 | end 107 | end 108 | end 109 | end 110 | -------------------------------------------------------------------------------- /test/payload_cases_test.exs: -------------------------------------------------------------------------------- 1 | defmodule LogflareLogger.PayloadCasesTest do 2 | @moduledoc false 3 | use ExUnit.Case, async: false 4 | alias LogflareLogger.HttpBackend 5 | require Logger 6 | use Placebo 7 | 8 | @logger_backend HttpBackend 9 | @api_key "test_api_key" 10 | @source "dad2a85c-683e-4150-abf1-f3001cf39e57" 11 | 12 | setup do 13 | url = Application.get_env(:logflare_logger_backend, :url) 14 | api_key = Application.get_env(:logflare_logger_backend, :api_key) 15 | source_id = Application.get_env(:logflare_logger_backend, :source_id) 16 | level = Application.get_env(:logflare_logger_backend, :level) 17 | flush_interval = Application.get_env(:logflare_logger_backend, :flush_interval) 18 | max_batch_size = Application.get_env(:logflare_logger_backend, :max_batch_size) 19 | 20 | Application.put_env(:logflare_logger_backend, :url, "http://127.0.0.1:4000") 21 | Application.put_env(:logflare_logger_backend, :api_key, @api_key) 22 | Application.put_env(:logflare_logger_backend, :source_id, @source) 23 | Application.put_env(:logflare_logger_backend, :level, :info) 24 | Application.put_env(:logflare_logger_backend, :flush_interval, 900) 25 | Application.put_env(:logflare_logger_backend, :max_batch_size, 100) 26 | 27 | Logger.add_backend(@logger_backend) 28 | 29 | on_exit(fn -> 30 | LogflareLogger.context(test_context: nil) 31 | Logger.remove_backend(@logger_backend, flush: true) 32 | Application.put_env(:logflare_logger_backend, :url, url) 33 | Application.put_env(:logflare_logger_backend, :api_key, api_key) 34 | Application.put_env(:logflare_logger_backend, :source_id, source_id) 35 | Application.put_env(:logflare_logger_backend, :level, level) 36 | Application.put_env(:logflare_logger_backend, :flush_interval, flush_interval) 37 | Application.put_env(:logflare_logger_backend, :max_batch_size, max_batch_size) 38 | end) 39 | 40 | :ok 41 | end 42 | 43 | describe "payload edge cases" do 44 | test "simple tuple" do 45 | allow(LogflareApiClient.new(any()), return: %Tesla.Client{}) 46 | 47 | allow(LogflareApiClient.post_logs(any(), any(), any()), 48 | return: {:ok, %Tesla.Env{status: 200}} 49 | ) 50 | 51 | members = ["chase", "bob", "drew"] 52 | 53 | Logger.info("Test list!", 54 | test_list: List.to_tuple(members) 55 | ) 56 | 57 | Process.sleep(500) 58 | assert_called(LogflareApiClient.post_logs(any(), any(), any())) 59 | end 60 | end 61 | end 62 | -------------------------------------------------------------------------------- /test/pending_log_event_test.exs: -------------------------------------------------------------------------------- 1 | defmodule LogflareLogger.PendingLoggerEventTest do 2 | use ExUnit.Case 3 | alias LogflareLogger.PendingLoggerEvent 4 | 5 | describe "changeset/2" do 6 | test "list with different types handled in metadata" do 7 | # changes applied to nested map 8 | body = %{ 9 | "metadata" => %{ 10 | "conn" => %{ 11 | "adapter" => [ 12 | "Elixir.Plug.Cowboy.Conn", 13 | %{"peer" => [[127, 0, 0, 1], 60164]} 14 | ] 15 | } 16 | } 17 | } 18 | 19 | changeset = PendingLoggerEvent.changeset(%PendingLoggerEvent{}, %{body: body}) 20 | 21 | assert changeset.changes.body 22 | |> get_in(["metadata", "conn", "adapter"]) 23 | |> Enum.all?(fn x -> is_binary(x) end) 24 | 25 | assert changeset.changes.body == %{ 26 | "metadata" => %{ 27 | "conn" => %{ 28 | "adapter" => ["Elixir.Plug.Cowboy.Conn", "{\"peer\":[[127,0,0,1],60164]}"] 29 | } 30 | } 31 | } 32 | 33 | # No changes applied on lists with same types 34 | body = %{"metadata" => %{"conn" => %{"adapter" => ["Elixir.Plug.Cowboy.Conn", "normal"]}}} 35 | changeset = PendingLoggerEvent.changeset(%PendingLoggerEvent{}, %{body: body}) 36 | 37 | assert changeset.changes.body == body 38 | 39 | # No changes applied on lists with same types 40 | body = %{"metadata" => %{"conn" => %{"adapter" => %{"a" => "b", "c" => "d"}}}} 41 | changeset = PendingLoggerEvent.changeset(%PendingLoggerEvent{}, %{body: body}) 42 | 43 | assert changeset.changes.body == body 44 | end 45 | end 46 | end 47 | -------------------------------------------------------------------------------- /test/support/utils.ex: -------------------------------------------------------------------------------- 1 | defmodule LogflareLogger.TestUtils do 2 | def decode_logger_body(body) do 3 | body 4 | |> :zlib.gunzip() 5 | |> Bertex.safe_decode() 6 | end 7 | end 8 | -------------------------------------------------------------------------------- /test/test_helper.exs: -------------------------------------------------------------------------------- 1 | ExUnit.configure(seed: 1337) 2 | ExUnit.start() 3 | ExUnit.configure(exclude: [integration: true]) 4 | 5 | Application.ensure_all_started(:bypass) 6 | --------------------------------------------------------------------------------