├── test ├── test_helper.exs └── exometer │ ├── newrelic_reporter_test.exs │ ├── newrelic_reporter_collector_test.exs │ └── newrelic_reporter_transformer_test.exs ├── .travis.yml ├── lib ├── exometer │ ├── supervisor.ex │ ├── newrelic_reporter │ │ ├── reporter_sup.ex │ │ ├── reporter.ex │ │ ├── collector.ex │ │ ├── transformer.ex │ │ └── request.ex │ └── newrelic_reporter.ex └── map_utils.ex ├── mix.exs ├── .gitignore ├── config └── config.exs ├── mix.lock └── README.md /test/test_helper.exs: -------------------------------------------------------------------------------- 1 | ExUnit.start() 2 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: elixir 2 | 3 | elixir: 4 | - 1.4.2 5 | 6 | otp_release: 7 | - 19.2 8 | 9 | sudo: required 10 | 11 | services: 12 | - docker 13 | 14 | script: 15 | - mix test --trace 16 | -------------------------------------------------------------------------------- /lib/exometer/supervisor.ex: -------------------------------------------------------------------------------- 1 | defmodule Exometer.NewrelicReporter.Supervisor do 2 | use Supervisor 3 | 4 | def start_link do 5 | :supervisor.start_link(__MODULE__, []) 6 | end 7 | 8 | def init([]) do 9 | children = [ 10 | # Define workers and child supervisors to be supervised 11 | # worker(Spacesuit.Worker, []) 12 | ] 13 | 14 | # See http://elixir-lang.org/docs/stable/Supervisor.Behaviour.html 15 | # for other strategies and supported options 16 | supervise(children, strategy: :one_for_one) 17 | end 18 | end 19 | -------------------------------------------------------------------------------- /lib/map_utils.ex: -------------------------------------------------------------------------------- 1 | # Deep Merge maps 2 | # http://stackoverflow.com/questions/38864001/elixir-how-to-deep-merge-maps 3 | defmodule MapUtils do 4 | def deep_merge(left, right) do 5 | Map.merge(left, right, &deep_resolve/3) 6 | end 7 | 8 | # Key exists in both maps, and both values are maps as well. 9 | # These can be merged recursively. 10 | defp deep_resolve(_key, left = %{}, right = %{}) do 11 | deep_merge(left, right) 12 | end 13 | 14 | # Key exists in both maps, but at least one of the values is 15 | # NOT a map. We fall back to standard merge behavior, preferring 16 | # the value on the right. 17 | defp deep_resolve(_key, _left, right) do 18 | right 19 | end 20 | end 21 | -------------------------------------------------------------------------------- /test/exometer/newrelic_reporter_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExometerNewrelicReporterTest do 2 | use ExUnit.Case 3 | doctest Exometer.NewrelicReporter 4 | 5 | alias Exometer.NewrelicReporter 6 | 7 | test "it starts the supervisor if a New Relic license key is present" do 8 | NewrelicReporter.exometer_init( 9 | [license_key: "empty", application_name: "beowulf"] 10 | ) 11 | 12 | pid = Process.whereis(NewrelicReporter.ReporterSupervisor) 13 | assert pid != nil 14 | end 15 | 16 | test "it does not start the supervisor if not configured" do 17 | NewrelicReporter.exometer_init([license_key: "empty"]) 18 | 19 | assert Process.whereis(NewrelicReporter.ReporterSupervisor) == nil 20 | end 21 | end 22 | -------------------------------------------------------------------------------- /mix.exs: -------------------------------------------------------------------------------- 1 | defmodule Exometer.NewrelicReporter.Mixfile do 2 | use Mix.Project 3 | 4 | def project do 5 | [app: :exometer_newrelic_reporter, 6 | version: "0.1.0", 7 | elixir: "~> 1.4", 8 | build_embedded: Mix.env == :prod, 9 | start_permanent: Mix.env == :prod, 10 | test_coverage: [tool: ExCoveralls], 11 | deps: deps()] 12 | end 13 | 14 | # Configuration for the OTP application 15 | # 16 | # Type "mix help compile.app" for more information 17 | def application do 18 | [applications: [:httpoison, :logger], 19 | mod: {Exometer.NewrelicReporter, []}] 20 | end 21 | 22 | defp deps do 23 | [ 24 | {:httpoison, "~> 0.9.0"}, 25 | {:poison, "~> 2.0"}, 26 | {:excoveralls, "~> 0.6", only: :test} 27 | ] 28 | end 29 | end 30 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by https://www.gitignore.io/api/vim,osx,elixir 2 | 3 | ### Vim ### 4 | # swap 5 | *.s[a-w][a-z] 6 | [._]s[a-w][a-z] 7 | # session 8 | Session.vim 9 | # temporary 10 | .netrwhist 11 | *~ 12 | # auto-generated tag files 13 | tags 14 | 15 | 16 | ### OSX ### 17 | *.DS_Store 18 | .AppleDouble 19 | .LSOverride 20 | 21 | # Icon must end with two \r 22 | Icon 23 | 24 | 25 | # Thumbnails 26 | ._* 27 | 28 | # Files that might appear in the root of a volume 29 | .DocumentRevisions-V100 30 | .fseventsd 31 | .Spotlight-V100 32 | .TemporaryItems 33 | .Trashes 34 | .VolumeIcon.icns 35 | .com.apple.timemachine.donotpresent 36 | 37 | # Directories potentially created on remote AFP share 38 | .AppleDB 39 | .AppleDesktop 40 | Network Trash Folder 41 | Temporary Items 42 | .apdisk 43 | 44 | 45 | ### Elixir ### 46 | /_build 47 | /cover 48 | /deps 49 | erl_crash.dump 50 | *.ez 51 | -------------------------------------------------------------------------------- /lib/exometer/newrelic_reporter/reporter_sup.ex: -------------------------------------------------------------------------------- 1 | defmodule Exometer.NewrelicReporter.ReporterSupervisor do 2 | require Logger 3 | use Supervisor 4 | 5 | require IEx 6 | 7 | alias Exometer.NewrelicReporter.{Collector, Reporter} 8 | 9 | def start_link(opts) do 10 | Supervisor.start_link(__MODULE__, opts, name: __MODULE__) 11 | end 12 | 13 | def init(opts) do 14 | case Keyword.fetch(opts, :license_key) do 15 | {:ok, _} -> supervise_children(opts) 16 | _ -> {:stop, :error, opts} 17 | end 18 | end 19 | 20 | defp supervise_children(opts) do 21 | worker_opts = [ restart: :permanent ] 22 | 23 | children = [ 24 | worker(Collector, [opts], worker_opts), 25 | worker(Reporter, [opts], worker_opts) 26 | ] 27 | 28 | sup_opts = [ 29 | strategy: :one_for_one, 30 | max_restarts: 5, 31 | max_seconds: 5, 32 | name: Collector.Supervisor 33 | ] 34 | 35 | supervise(children, sup_opts) 36 | end 37 | end 38 | -------------------------------------------------------------------------------- /config/config.exs: -------------------------------------------------------------------------------- 1 | # This file is responsible for configuring your application 2 | # and its dependencies with the aid of the Mix.Config module. 3 | use Mix.Config 4 | 5 | # This configuration is loaded before any dependency and is restricted 6 | # to this project. If another project depends on this project, this 7 | # file won't be loaded nor affect the parent project. For this reason, 8 | # if you want to provide default values for your application for 9 | # 3rd-party users, it should be done in your "mix.exs" file. 10 | 11 | # You can configure for your application as: 12 | # 13 | # config :exometer_newrelic_reporter, key: :value 14 | # 15 | # And access this configuration in your application as: 16 | # 17 | # Application.get_env(:exometer_newrelic_reporter, :key) 18 | # 19 | # Or configure a 3rd-party app: 20 | # 21 | # config :logger, level: :info 22 | # 23 | 24 | # It is also possible to import configuration files, relative to this 25 | # directory. For example, you can emulate configuration per environment 26 | # by uncommenting the line below and defining dev.exs, test.exs and such. 27 | # Configuration from the imported file will override the ones defined 28 | # here (which is why it is important to import them last). 29 | # 30 | # import_config "#{Mix.env}.exs" 31 | -------------------------------------------------------------------------------- /lib/exometer/newrelic_reporter.ex: -------------------------------------------------------------------------------- 1 | defmodule Exometer.NewrelicReporter do 2 | require Logger 3 | use Application 4 | 5 | alias Exometer.NewrelicReporter.{Collector, ReporterSupervisor, Supervisor} 6 | 7 | def start(_type, _opts) do 8 | Supervisor.start_link() 9 | end 10 | 11 | @doc """ 12 | Entrypoint to our reporter, invoked by Exometer with configuration options. 13 | """ 14 | def exometer_init(opts) do 15 | Logger.info "New Relic plugin starting with opts: #{inspect(opts)}" 16 | # This is the first place we have access to the configuration 17 | # so we start the supervisor here 18 | if Keyword.get(opts, :license_key) && Keyword.get(opts, :application_name) do 19 | ReporterSupervisor.start_link(opts) 20 | else 21 | Logger.warn "Missing New Relic license key or application name, skipping startup!" 22 | end 23 | {:ok, opts} 24 | end 25 | 26 | @doc """ 27 | Invoked by Exometer when there is new data to report. 28 | """ 29 | def exometer_report(metric, data_point, _extra, values, settings) do 30 | Collector.collect(metric, data_point, values, settings) 31 | {:ok, settings} 32 | end 33 | 34 | def exometer_call(_, _, opts), do: {:ok, opts} 35 | def exometer_cast(_, opts), do: {:ok, opts} 36 | def exometer_info(_, opts), do: {:ok, opts} 37 | def exometer_newentry(_, opts), do: {:ok, opts} 38 | def exometer_setopts(_, _, _, opts), do: {:ok, opts} 39 | def exometer_subscribe(_, _, _, _, opts), do: {:ok, opts} 40 | def exometer_terminate(_, _), do: nil 41 | def exometer_unsubscribe(_, _, _, opts), do: {:ok, opts} 42 | end 43 | -------------------------------------------------------------------------------- /mix.lock: -------------------------------------------------------------------------------- 1 | %{"certifi": {:hex, :certifi, "0.4.0", "a7966efb868b179023618d29a407548f70c52466bf1849b9e8ebd0e34b7ea11f", [:rebar3], []}, 2 | "excoveralls": {:hex, :excoveralls, "0.6.2", "0e993d096f1fbb6e70a3daced5c89aac066bda6bce57829622aa2d1e2b338cfb", [:mix], [{:exjsx, "~> 3.0", [hex: :exjsx, optional: false]}, {:hackney, ">= 0.12.0", [hex: :hackney, optional: false]}]}, 3 | "exjsx": {:hex, :exjsx, "3.2.1", "1bc5bf1e4fd249104178f0885030bcd75a4526f4d2a1e976f4b428d347614f0f", [:mix], [{:jsx, "~> 2.8.0", [hex: :jsx, optional: false]}]}, 4 | "hackney": {:hex, :hackney, "1.6.1", "ddd22d42db2b50e6a155439c8811b8f6df61a4395de10509714ad2751c6da817", [:rebar3], [{:certifi, "0.4.0", [hex: :certifi, optional: false]}, {:idna, "1.2.0", [hex: :idna, optional: false]}, {:metrics, "1.0.1", [hex: :metrics, optional: false]}, {:mimerl, "1.0.2", [hex: :mimerl, optional: false]}, {:ssl_verify_fun, "1.1.0", [hex: :ssl_verify_fun, optional: false]}]}, 5 | "httpoison": {:hex, :httpoison, "0.9.0", "68187a2daddfabbe7ca8f7d75ef227f89f0e1507f7eecb67e4536b3c516faddb", [:mix], [{:hackney, "~> 1.6.0", [hex: :hackney, optional: false]}]}, 6 | "idna": {:hex, :idna, "1.2.0", "ac62ee99da068f43c50dc69acf700e03a62a348360126260e87f2b54eced86b2", [:rebar3], []}, 7 | "jsx": {:hex, :jsx, "2.8.2", "7acc7d785b5abe8a6e9adbde926a24e481f29956dd8b4df49e3e4e7bcc92a018", [:mix, :rebar3], []}, 8 | "metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], []}, 9 | "mimerl": {:hex, :mimerl, "1.0.2", "993f9b0e084083405ed8252b99460c4f0563e41729ab42d9074fd5e52439be88", [:rebar3], []}, 10 | "poison": {:hex, :poison, "2.2.0", "4763b69a8a77bd77d26f477d196428b741261a761257ff1cf92753a0d4d24a63", [:mix], []}, 11 | "ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.0", "edee20847c42e379bf91261db474ffbe373f8acb56e9079acb6038d4e0bf414f", [:make, :rebar], []}} 12 | -------------------------------------------------------------------------------- /test/exometer/newrelic_reporter_collector_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExometerNewrelicReporterCollectorTest do 2 | use ExUnit.Case 3 | doctest Exometer.NewrelicReporter.Collector 4 | 5 | alias Exometer.NewrelicReporter.Collector 6 | 7 | setup_all do 8 | Collector.start_link() 9 | :ok 10 | end 11 | 12 | test "Updating storage works" do 13 | expected = %{50 => [{1489428062, 63742}], 14 | 75 => [{1489428062, 91211}], 90 => [{1489428062, 180538}], 15 | 95 => [{1489428062, 185563}], 99 => [{1489428062, 196066}], 16 | 999 => [{1489428062, 196066}], :max => [{1489428062, 196066}], 17 | :mean => [{1489428062, 84964}], 18 | :median => [{1489428062, 63742}], 19 | :min => [{1489428062, 48818}], :n => [{1489428062, 56}]} 20 | 21 | for {key, values} <- [ {50, 63742}, {75, 91211}, {90, 180538}, {95, 185563}, 22 | {99, 196066}, {999, 196066}, {:max, 196066}, {:mean, 84964}, {:median, 63742}, 23 | {:min, 48818}, {:n, 56} ] do 24 | Collector.collect([:elixometer, :timers, :timed, "proxyHandler-handle"], key, values, %{}) 25 | end 26 | 27 | assert %{"timed": %{"proxyHandler-handle" => timings}} = Collector.peek() 28 | assert Map.keys(timings) == Map.keys(expected) 29 | Enum.each(timings, fn {key, values} -> 30 | assert [{time, value}] = values 31 | assert is_integer(time) 32 | assert is_integer(value) 33 | [{_, expected_value}] = expected[key] 34 | assert expected_value == value 35 | end) 36 | 37 | end 38 | 39 | test "Can support multiple metrics in the same storage" do 40 | for {key, values} <- [ {50, 63742}, {75, 91211}, {90, 180538} ] do 41 | Collector.collect([:elixometer, :timers, :timed, "proxyHandler-handle"], key, values, %{}) 42 | Collector.collect([:elixometer, :timers, :timed, "anotherMetric"], key, values, %{}) 43 | end 44 | 45 | assert %{"timed": %{"proxyHandler-handle" => _}} = Collector.peek() 46 | assert %{"timed": %{"anotherMetric" => _}} = Collector.peek() 47 | end 48 | end 49 | -------------------------------------------------------------------------------- /test/exometer/newrelic_reporter_transformer_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExometerNewrelicReporterTransformerTest do 2 | use ExUnit.Case 3 | doctest Exometer.NewrelicReporter.Transformer 4 | 5 | alias Exometer.NewrelicReporter.Transformer 6 | 7 | setup_all do 8 | %{data: %{timed: %{"proxyHandler-handle" => %{50 => [{1487685123, 63742}], 9 | 75 => [{1487685123, 91211}], 90 => [{1487685123, 180538}], 10 | 95 => [{1487685123, 185563}], 99 => [{1487685123, 196066}], 11 | 999 => [{1487685123, 196066}], :max => [{1487685123, 196066}], 12 | :mean => [{1487685123, 84964}], :median => [{1487685123, 63742}], 13 | :min => [{1487685123, 48818}], :n => [{1487685123, 56}]}}}, 14 | min: 48818, 15 | max: 196066, 16 | mean: 84964, 17 | count: 56 18 | } 19 | end 20 | 21 | test "Transformation works properly", context do 22 | expected = [[%{name: "proxyHandler/handle/50", scope: ""}, [63742, 0, 0, 0, 0, 0]], 23 | [%{name: "proxyHandler/handle/75", scope: ""}, [91211, 0, 0, 0, 0, 0]], 24 | [%{name: "proxyHandler/handle/90", scope: ""}, [180538, 0, 0, 0, 0, 0]], 25 | [%{name: "proxyHandler/handle/95", scope: ""}, [185563, 0, 0, 0, 0, 0]], 26 | [%{name: "proxyHandler/handle/99", scope: ""}, [196066, 0, 0, 0, 0, 0]], 27 | [%{name: "proxyHandler/handle/999", scope: ""}, [196066, 0, 0, 0, 0, 0]], 28 | [%{name: "proxyHandler/handle/max", scope: ""}, [196066, 0, 0, 0, 0, 0]], 29 | [%{name: "proxyHandler/handle/mean", scope: ""}, [84964, 0, 0, 0, 0, 0]], 30 | [%{name: "proxyHandler/handle/median", scope: ""}, [63742, 0, 0, 0, 0, 0]], 31 | [%{name: "proxyHandler/handle/min", scope: ""}, [48818, 0, 0, 0, 0, 0]], 32 | [%{name: "proxyHandler/handle/n", scope: ""}, [56, 0, 0, 0, 0, 0]]] 33 | 34 | assert expected == Transformer.transform(context[:data]) 35 | end 36 | 37 | test "Synthesizing New Relic metrics", context do 38 | %{min: min, max: max, mean: mean, count: count} = context 39 | 40 | expected = [ 41 | [%{name: "HttpDispatcher", scope: ""}, 42 | [count, count * mean / 1000, count * mean / 1000, min / 1000, max / 1000, 0]] 43 | ] 44 | 45 | synthesize_metrics = %{ 46 | "proxyHandler-handle" => "HttpDispatcher" 47 | } 48 | 49 | assert expected == Transformer.synthesize(context[:data], synthesize_metrics) 50 | end 51 | end 52 | -------------------------------------------------------------------------------- /lib/exometer/newrelic_reporter/reporter.ex: -------------------------------------------------------------------------------- 1 | defmodule Exometer.NewrelicReporter.Reporter do 2 | @moduledoc """ 3 | Retrieves stored metrics and sends them to NewRelic every N milliseconds 4 | """ 5 | 6 | use GenServer 7 | require Logger 8 | 9 | alias __MODULE__, as: Reporter 10 | alias Exometer.NewrelicReporter.Transformer 11 | alias Exometer.NewrelicReporter.Collector 12 | alias Exometer.NewrelicReporter.Request 13 | 14 | @default_interval 60000 15 | 16 | def start_link(opts \\ []) do 17 | GenServer.start_link(Reporter, opts, name: Reporter) 18 | end 19 | 20 | @doc """ 21 | Start our reporter. The main work is triggered once we 22 | get our configuration passed to set_configuration/1. 23 | """ 24 | def init(opts) do 25 | Logger.info "New Relic Reporter configured with: #{inspect(opts)}" 26 | 27 | opts_with_interval = case Keyword.fetch(opts, :interval) do 28 | {:ok, _} -> opts 29 | :error -> Keyword.put_new(opts, :interval, @default_interval) 30 | end 31 | 32 | new_opts = opts_with_interval |> Keyword.merge(opts) 33 | report_now(new_opts) 34 | {:ok, new_opts} 35 | end 36 | 37 | @doc """ 38 | Report into New Relic "now" (after waiting about 1000ms). Used 39 | when we need to send data more or less right away, without 40 | waiting on the timer loop. 41 | """ 42 | def report_now(opts) do 43 | Process.send_after(Reporter, :report, 1000) 44 | opts 45 | end 46 | 47 | # Takes exactly what's in the metrics store and posts the contents 48 | # to New Relic using the call_count field to contain the metric value 49 | defp prepare_raw_metrics do 50 | Collector.peek |> Transformer.transform 51 | end 52 | 53 | # Take the data from the metrics store and synthesize normal New Relic 54 | # metrics 55 | defp synthesize_metrics(opts) do 56 | case Keyword.fetch(opts, :synthesize_metrics) do 57 | {:ok, metrics} -> 58 | Collector.peek |> Transformer.synthesize(metrics) 59 | :error -> 60 | [] 61 | end 62 | end 63 | 64 | # %{timed: %{"proxyHandler-handle" => %{50 => [{1487680368, 1234}]}}} 65 | 66 | @doc """ 67 | Collect, aggregate, format, and report our metrics to NewRelic 68 | """ 69 | def handle_info(:report, opts) do 70 | Logger.info "Reporting to New Relic" 71 | 72 | synthesize_metrics(opts) ++ prepare_raw_metrics() |> Request.request(opts) 73 | Collector.empty 74 | wait_then_report(opts) 75 | {:noreply, opts} 76 | end 77 | 78 | def handle_cast(msg, opts) do 79 | Logger.debug "Got unexpected message: #{inspect(msg)}" 80 | {:noreply, opts} 81 | end 82 | 83 | defp wait_then_report(opts) do 84 | Process.send_after(Reporter, :report, opts[:interval]) 85 | opts 86 | end 87 | end 88 | -------------------------------------------------------------------------------- /lib/exometer/newrelic_reporter/collector.ex: -------------------------------------------------------------------------------- 1 | defmodule Exometer.NewrelicReporter.Collector do 2 | @moduledoc """ 3 | A collector for our NewRelic metrics. Allows for storage, aggregation, and retrieval 4 | of our metric data 5 | """ 6 | 7 | use GenServer 8 | 9 | require Logger 10 | 11 | alias __MODULE__, as: Collector 12 | 13 | def start_link(opts \\ %{}) do 14 | GenServer.start_link(Collector, opts, name: Collector) 15 | end 16 | 17 | @doc """ 18 | Initialize our Collector with empty storage 19 | """ 20 | def init(opts) do 21 | Logger.info("Starting NewRelic Collector") 22 | {:ok, settings: opts, storage: %{}} 23 | end 24 | 25 | @doc """ 26 | Record the metric data at the given key on the GenServer 27 | """ 28 | def collect(metric, data_point, values, settings) do 29 | GenServer.cast(Collector, {metric, data_point, values, settings}) 30 | end 31 | 32 | @doc """ 33 | Asynchronsously store our metric data by the type and name derived from the stat key 34 | """ 35 | def handle_cast({metric, data_point, values, settings}, opts) do 36 | storage = 37 | storage_key(metric, data_point) 38 | |> store(values, opts) 39 | 40 | opts = [storage: storage, settings: settings] 41 | 42 | {:noreply, opts} 43 | end 44 | 45 | @doc """ 46 | Empty all of our stored metrics 47 | """ 48 | def empty, do: GenServer.call(Collector, :empty) 49 | 50 | @doc """ 51 | Peek at the stored metrics without flushing them. Used when synthesizing 52 | metrics into New Relic combined metrics. 53 | """ 54 | def peek, do: GenServer.call(Collector, :peek) 55 | 56 | @doc """ 57 | Retrieve the current stored values and reset storage 58 | """ 59 | def handle_call(:empty, _from, opts) do 60 | {:reply, :ok, Keyword.put(opts, :storage, %{})} 61 | end 62 | 63 | @doc """ 64 | Retrieve the current stored values without resetting 65 | """ 66 | def handle_call(:peek, _from, opts) do 67 | values = Keyword.fetch!(opts, :storage) 68 | 69 | {:reply, values, opts} 70 | end 71 | 72 | defp store({type, name, data_point}, values, opts) do 73 | now = :os.system_time(:seconds) 74 | storage = Keyword.fetch!(opts, :storage) 75 | 76 | entry = storage 77 | |> Map.get(type, %{}) 78 | |> Map.get(name, %{}) 79 | |> Map.put(data_point, [{now, values}]) 80 | 81 | updated = %{ 82 | type => %{ 83 | name => entry 84 | } 85 | } 86 | 87 | MapUtils.deep_merge(storage, updated) 88 | end 89 | 90 | defp storage_key(metric, data_point) do 91 | [_app, _env, type] = Enum.slice(metric, 0..2) 92 | name = 93 | metric 94 | |> Enum.slice(3..-1) 95 | |> Enum.join("/") 96 | 97 | {type, name, data_point} 98 | end 99 | end 100 | -------------------------------------------------------------------------------- /lib/exometer/newrelic_reporter/transformer.ex: -------------------------------------------------------------------------------- 1 | defmodule Exometer.NewrelicReporter.Transformer do 2 | @moduledoc """ 3 | Transform data into a format that can be sent to New Relic 4 | """ 5 | 6 | require Logger 7 | 8 | def transform(data) when is_map(data) do 9 | Logger.debug "Preparing to send to New Relic: #{inspect(data)}" 10 | data 11 | |> Enum.flat_map(&transform/1) 12 | end 13 | 14 | def transform({:timed, metrics}) do 15 | metrics |> Enum.flat_map(&transform_metric/1) 16 | end 17 | 18 | def transform({"timed", metrics}) do 19 | transform({:timed, metrics}) 20 | end 21 | 22 | # %{timed: %{"proxyHandler-handle" => %{50 => [{1487680368, 1234}]}}} 23 | 24 | defp transform_metric({name, values}) do 25 | values |> Enum.map(fn {data_point, val} -> transform_one(name, data_point, val) end) 26 | end 27 | 28 | defp transform_one(name, data_point, val) when length(val) == 0 do 29 | [ %{name: newrelic_name(name, data_point), scope: ""}, [ 0,0,0,0,0,0 ] ] 30 | end 31 | 32 | defp transform_one(name, data_point, val) do 33 | # New Relic metrics are: 34 | # [{ name: name, scope: "" }, [ count, total, exclusive_time, min, max, sum_of_squares ]] 35 | transformed_val = 36 | val 37 | |> Enum.map(fn {_t, v} -> v end) 38 | 39 | [ %{name: newrelic_name(name, data_point), scope: ""}, transformed_val ++ [ 0,0,0,0,0 ] ] 40 | end 41 | 42 | @doc """ 43 | Take a map of metrics to synthesize metrics and process them. Only supports 44 | timers currently. 45 | """ 46 | def synthesize(data, synth_list) when is_map(data) do 47 | synth_list 48 | |> Enum.flat_map(fn {metric_name, output_name} -> 49 | data 50 | |> Enum.map(fn {type, value} -> synthesize_metric({type, value}, metric_name, output_name) end) 51 | end) 52 | end 53 | 54 | @doc """ 55 | Take a timer histogram and synthesize the fields we would put in a normal 56 | New Relic metric. Uses the mean*count to fudge total_time and exclusive_time. 57 | """ 58 | def synthesize_metric({:timed, metrics}, metric_name, output_name) do 59 | Logger.debug "Preparing to send synthesized to New Relic: #{inspect(metrics)} as #{output_name}" 60 | synthesize_one(output_name, Map.get(metrics, metric_name)) 61 | end 62 | 63 | def synthesize_metric({"timed", metrics}, metric_name, output_name) do 64 | synthesize_metric({:timed, metrics}, metric_name, output_name) 65 | end 66 | 67 | def synthesize_one(output_name, values) when is_nil(values) do 68 | [ %{name: output_name, scope: ""}, [ 0,0,0,0,0,0 ] ] 69 | end 70 | 71 | def synthesize_one(output_name, values) when length(values) == 0 do 72 | [ %{name: output_name, scope: ""}, [ 0,0,0,0,0,0 ] ] 73 | end 74 | 75 | def synthesize_one(output_name, values) do 76 | 77 | case values do 78 | %{min: [{_, min}], max: [{_, max}], mean: [{_, mean}], n: [{_, count}]} -> 79 | [ %{name: output_name, scope: ""}, [ count, count * mean / 1000, count * mean / 1000, min / 1000, max / 1000, 0 ] ] 80 | 81 | _ -> 82 | Logger.error "Got unexpected values: #{inspect(values)}. Perhaps missing keys?" 83 | [ %{name: output_name, scope: ""}, [ 0,0,0,0,0,0 ] ] 84 | end 85 | end 86 | 87 | # Transform dashes into slashes for New Relic namespacing 88 | defp newrelic_name(name, data_point) do 89 | String.split(name, "-") ++ [data_point] 90 | |> Enum.join("/") 91 | end 92 | end 93 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Exometer NewRelic Reporter 2 | 3 | ![travis status](https://api.travis-ci.org/Nitro/exometer_newrelic_reporter.svg) 4 | 5 | This uses [exometer](https://github.com/Feuerlabs/exometer) for metrics 6 | gathering and is installed with Pinterest's 7 | [elixometer](https://github.com/pinterest/elixometer) wrapper which makes 8 | tracking timings and generating Exometer stats really simple and easy. 9 | 10 | Metrics are, of course, reported to [New Relic](https://newrelic.com/). The 11 | code pretends to be a New Relic python agent and so you may notice that it 12 | shows up as Python in New Relic because New Relic does not yet support Erlang 13 | or Elixir. 14 | 15 | ## Installation 16 | 17 | 1. Add `exometer_newrelic_reporter` to your list of dependencies in `mix.exs`: 18 | 19 | ```elixir 20 | def deps do 21 | [ {:exometer_newrelic_reporter, github: "nitro/exometer_newrelic_reporter"} ] 22 | end 23 | ``` 24 | 25 | 2. Ensure `exometer_newrelic_reporter` is started **before your application** 26 | and **before elixometer**: 27 | 28 | ```elixir 29 | def application do 30 | [applications: [:exometer_newrelic_reporter, :elixometer]] 31 | end 32 | ``` 33 | 34 | ## Configuration 35 | 36 | The following assumes you're using Elixometer though configuration should be similar for Exometer: 37 | 38 | ```elixir 39 | # If we have a NEW_RELIC_LICENSE_KEY, we'll use a New Relic reporter 40 | if System.get_env("NEW_RELIC_LICENSE_KEY") != "" do 41 | config :exometer_core, report: [ 42 | reporters: ["Elixir.Exometer.NewrelicReporter": 43 | [ 44 | application_name: "Spacesuit #{Mix.env}", 45 | license_key: System.get_env("NEW_RELIC_LICENSE_KEY"), 46 | synthesize_metrics: %{ 47 | "proxyHandler-handle" => "HttpDispatcher" 48 | } 49 | ] 50 | ] 51 | ] 52 | 53 | config :elixometer, reporter: :"Elixir.Exometer.NewrelicReporter", 54 | update_frequency: 60_000 55 | end 56 | ``` 57 | 58 | Note the `"Elixir."` prefix when setting our module, this is required by 59 | exometer and Erlang in order to lookup the module. 60 | 61 | **Note** you **must** set the `update_frequency` to 60,000 which is the 62 | expected timeframe (60 seconds) for a New Relic agent. Anything else will lead 63 | to unhappiness. 64 | 65 | ### Synthesized Metrics vs Raw Metrics 66 | 67 | By default anything captured by an Elixometer `@timed` annotation will be 68 | sent as histogram metrics suitable for display on a custom dashboard at 69 | New Relic. Fields will all be sent as "Call count" values. These we're 70 | calling Raw Metrics. 71 | 72 | But New Relic metrics actually contain a few fields that allow them to be 73 | used in the normal ways you expect. They actually contain: 74 | ```elixir 75 | [call_count, total, exclusive, min, max, sum_of_squares] 76 | ``` 77 | 78 | This reporter supports generating metrics that look like this from histograms 79 | used by Exometer's timed traces. You'll probably want to simulate an 80 | `HttpDispatcher` metric, for example, to capture the normal response time and 81 | throughput for your application. In the example configuration above, you see a 82 | section labeled `synthesize_metrics`. This is taking a metrics we've called 83 | `proxyHandler-handle` internally and turning it into a `HttpDispatcher` metric 84 | so that we can see the two main charts on the New Relic application page. 85 | 86 | The annotation we used in our application to grab that `proxyHandler-handle` 87 | metric looks like this: 88 | 89 | ```elixir 90 | @timed(key: "timed.proxyHandler-handle", units: :millisecond) 91 | def handle(...) do 92 | # The thing you want to time 93 | end 94 | ``` 95 | 96 | **Note:** The `units` entry is important here. Elixometer will by default 97 | capture in microseconds, which is not what New Relic is expecting. If you don't 98 | pass this value, you'll see weird numbers in the New Relic console. 99 | -------------------------------------------------------------------------------- /lib/exometer/newrelic_reporter/request.ex: -------------------------------------------------------------------------------- 1 | defmodule Exometer.NewrelicReporter.Request do 2 | require Logger 3 | 4 | alias HTTPoison.{Response, Error} 5 | 6 | @agent_version "2.78.0.57" 7 | @base_url "https://~s/agent_listener/invoke_raw_method" 8 | @collector "collector-pool.newrelic.com" 9 | @language "python" 10 | @protocol_v 14 11 | @max_retries 3 12 | @retry_delay [ 1, 5, 7 ] # Expects at least @max_retries items 13 | 14 | def request(data, opts) do 15 | license_key = Keyword.fetch!(opts, :license_key) 16 | 17 | license_key 18 | |> redirect_host 19 | |> connect(opts) 20 | |> push_metrics(data) 21 | |> log_response 22 | end 23 | 24 | @doc """ 25 | Record metrics on New Relic 26 | """ 27 | def push_metrics({redirect_host, license_key, agent_run_id}, data) do 28 | now = :os.system_time(:seconds) 29 | body = 30 | [agent_run_id, now - 60, now, data] 31 | |> Poison.encode! 32 | 33 | newrelic_request( 34 | redirect_host, license_key, body, 35 | %{method: :metric_data, run_id: agent_run_id} 36 | ) 37 | end 38 | 39 | @doc """ 40 | Record an error on New Relic 41 | """ 42 | def push_errors({redirect_host, license_key, run_id}, errors) do 43 | body = [run_id, errors] 44 | newrelic_request(redirect_host, license_key, body, %{method: :error_data, run_id: run_id}) 45 | end 46 | 47 | defp base_params(license_key) do 48 | %{ 49 | license_key: license_key, 50 | marshal_format: :json, 51 | protocol_version: @protocol_v 52 | } 53 | end 54 | 55 | defp connect({redirect_host, license_key}, opts) do 56 | body = opts 57 | |> connect_payload 58 | |> Poison.encode! 59 | 60 | items_we_want = 61 | redirect_host 62 | |> newrelic_request(license_key, body, %{method: :connect}) 63 | |> extract_return_value 64 | |> Map.take(["agent_run_id", "messages"]) 65 | 66 | Logger.debug "Got API message: #{inspect(items_we_want["messages"])}" 67 | 68 | run_id = items_we_want["agent_run_id"] 69 | {redirect_host, license_key, run_id} 70 | end 71 | 72 | defp connect_payload(opts) do 73 | app_name = Keyword.fetch!(opts, :application_name) 74 | high_security = Keyword.get(opts, :high_security, false) 75 | 76 | [%{ 77 | agent_version: @agent_version, 78 | app_name: [app_name], 79 | environment: %{}, 80 | high_security: high_security, 81 | host: hostname(), 82 | identifier: app_name, 83 | language: @language, 84 | pid: pid(), 85 | settings: %{} 86 | }] 87 | end 88 | 89 | defp extract_return_value(%Error{id: _, reason: reason}) do 90 | Logger.error "Error from New Relic connect: #{reason}" 91 | throw(:newrelic_error) 92 | end 93 | 94 | defp extract_return_value(%Response{status_code: 200, body: body}) do 95 | body 96 | |> Poison.decode! 97 | |> Map.get("return_value") 98 | end 99 | 100 | defp extract_return_value(%Response{status_code: _, body: body}) do 101 | body 102 | |> Poison.decode! 103 | |> Map.get("exception") 104 | |> Map.get("message") 105 | |> Logger.error 106 | 107 | throw(:newrelic_error) 108 | end 109 | 110 | defp hostname do 111 | {:ok, hostname} = :inet.gethostname() 112 | hostname |> to_string 113 | end 114 | 115 | defp log_response(%Response{status_code: status_code}) when status_code in 200..299 do 116 | Logger.info("Successfully submitted to NewRelic") 117 | end 118 | 119 | defp log_response(%Response{status_code: status_code, body: body}) do 120 | Logger.error("Error submitting to NewRelic (HTTP #{status_code}): #{body}") 121 | end 122 | 123 | defp newrelic_params(host, license_key, params) do 124 | url = 125 | @base_url 126 | |> :io_lib.format([host]) 127 | |> to_string 128 | 129 | params = 130 | license_key 131 | |> base_params 132 | |> Map.merge(params) 133 | 134 | {url, params} 135 | end 136 | 137 | defp newrelic_request(host, license_key, params) do 138 | {url, params} = newrelic_params(host, license_key, params) 139 | HTTPoison.get!(url, [], params: params) 140 | end 141 | 142 | defp newrelic_request(host, license_key, body, params) do 143 | {url, params} = newrelic_params(host, license_key, params) 144 | headers = [ 145 | {"Content-Encoding", "identity"}, 146 | {"Content-Type", "application/octet-stream"}, 147 | {"User-Agent", "NewRelic-PythonAgent/#{@agent_version}"} 148 | ] 149 | do_request(url, body, headers, params) 150 | end 151 | 152 | # Request with some retries around error conditions. Sleep defined 153 | # lengths between retries. 154 | defp do_request(url, body, headers, params, count \\ 0) 155 | 156 | defp do_request(url, body, headers, params, count) when count < @max_retries do 157 | case HTTPoison.post(url, body, headers, params: params) do 158 | {:ok, response} -> 159 | response 160 | 161 | {:error, err} -> 162 | Logger.warn "Error talking to New Relic: '#{inspect(err)}'. Retrying." 163 | Process.sleep(Enum.at(@retry_delay, count)) 164 | do_request(url, body, headers, params, count + 1) 165 | end 166 | end 167 | 168 | defp do_request(url, body, headers, params, count) when count >= @max_retries do 169 | case HTTPoison.post(url, body, headers, params: params) do 170 | {:ok, response} -> 171 | response 172 | 173 | {:error, err} -> 174 | Logger.error "Failed to post to New Relic, #{@max_retries} retries exceeded. Giving up" 175 | err 176 | end 177 | end 178 | 179 | defp pid, do: :os.getpid() |> List.to_integer 180 | 181 | defp redirect_host(license_key) do 182 | redirect_host = 183 | @collector 184 | |> newrelic_request(license_key, %{method: :get_redirect_host}) 185 | |> extract_return_value 186 | 187 | {redirect_host, license_key} 188 | end 189 | end 190 | --------------------------------------------------------------------------------