├── project
├── build.properties
├── sbt-pgp.sbt
├── scalafmt.sbt
├── sbt-headers.sbt
├── sbt-release.sbt
├── sbt-sonatype.sbt
├── sbt-tpolecat.sbt
├── sbt-github-actions.sbt
└── Dependencies.scala
├── version.sbt
├── .gitignore
├── core
└── src
│ ├── it
│ ├── scala
│ │ └── fr
│ │ │ └── davit
│ │ │ └── akka
│ │ │ └── http
│ │ │ └── metrics
│ │ │ └── core
│ │ │ ├── TestRegistry.scala
│ │ │ └── HttpMetricsItSpec.scala
│ └── resources
│ │ ├── application.conf
│ │ └── logback-test.xml
│ ├── test
│ ├── resources
│ │ ├── application.conf
│ │ └── logback-test.xml
│ └── scala
│ │ └── fr
│ │ └── davit
│ │ └── akka
│ │ └── http
│ │ └── metrics
│ │ └── core
│ │ ├── scaladsl
│ │ └── server
│ │ │ └── HttpMetricsDirectivesSpec.scala
│ │ ├── TestRegistry.scala
│ │ ├── MeterStageSpec.scala
│ │ ├── HttpMetricsSpec.scala
│ │ └── HttpMetricsRegistrySpec.scala
│ └── main
│ └── scala
│ └── fr
│ └── davit
│ └── akka
│ └── http
│ └── metrics
│ └── core
│ ├── HttpMetricsHandler.scala
│ ├── Metrics.scala
│ ├── HttpMessageLabeler.scala
│ ├── scaladsl
│ ├── server
│ │ └── HttpMetricsDirectives.scala
│ └── HttpMetricsServerBuilder.scala
│ ├── HttpMetricsNames.scala
│ ├── MeterStage.scala
│ ├── HttpMetrics.scala
│ ├── HttpMetricsSettings.scala
│ └── HttpMetricsRegistry.scala
├── datadog
└── src
│ ├── it
│ ├── resources
│ │ ├── application.conf
│ │ └── logback-test.xml
│ └── scala
│ │ └── fr
│ │ └── davit
│ │ └── akka
│ │ └── http
│ │ └── metrics
│ │ └── datadog
│ │ └── DatadogRegistrySpec.scala
│ └── main
│ └── scala
│ └── fr
│ └── davit
│ └── akka
│ └── http
│ └── metrics
│ └── datadog
│ ├── DatadogSettings.scala
│ ├── DatadogRegistry.scala
│ └── StatsDMetrics.scala
├── dropwizard
└── src
│ ├── it
│ ├── resources
│ │ ├── application.conf
│ │ └── logback-test.xml
│ └── scala
│ │ └── fr
│ │ └── davit
│ │ └── akka
│ │ └── http
│ │ └── metrics
│ │ └── dropwizard
│ │ └── DropwizardMetricsItSpec.scala
│ ├── test
│ ├── resources
│ │ ├── application.conf
│ │ └── logback-test.xml
│ └── scala
│ │ └── fr
│ │ └── davit
│ │ └── akka
│ │ └── http
│ │ └── metrics
│ │ └── dropwizard
│ │ ├── marshalling
│ │ └── DropwizardMarshallersSpec.scala
│ │ └── DropwizardRegistrySpec.scala
│ └── main
│ └── scala
│ └── fr
│ └── davit
│ └── akka
│ └── http
│ └── metrics
│ └── dropwizard
│ ├── marshalling
│ └── DropwizardMarshallers.scala
│ ├── DropwizardSettings.scala
│ ├── DropwizardMetrics.scala
│ └── DropwizardRegistry.scala
├── graphite
└── src
│ ├── it
│ ├── resources
│ │ ├── application.conf
│ │ └── logback-test.xml
│ └── scala
│ │ └── fr
│ │ └── davit
│ │ └── akka
│ │ └── http
│ │ └── metrics
│ │ └── graphite
│ │ └── GraphiteRegistrySpec.scala
│ └── main
│ └── scala
│ └── fr
│ └── davit
│ └── akka
│ └── http
│ └── metrics
│ └── graphite
│ ├── GraphiteSettings.scala
│ ├── GraphiteRegistry.scala
│ ├── CarbonMetrics.scala
│ └── CarbonClient.scala
├── prometheus
└── src
│ ├── it
│ ├── resources
│ │ ├── application.conf
│ │ └── logback-test.xml
│ └── scala
│ │ └── fr
│ │ └── davit
│ │ └── akka
│ │ └── http
│ │ └── metrics
│ │ └── prometheus
│ │ └── PrometheusMetricsItSpec.scala
│ ├── test
│ ├── resources
│ │ ├── application.conf
│ │ └── logback-test.xml
│ └── scala
│ │ └── fr
│ │ └── davit
│ │ └── akka
│ │ └── http
│ │ └── metrics
│ │ └── prometheus
│ │ ├── marshalling
│ │ └── PrometheusMarshallersSpec.scala
│ │ └── PrometheusRegistrySpec.scala
│ └── main
│ └── scala
│ └── fr
│ └── davit
│ └── akka
│ └── http
│ └── metrics
│ └── prometheus
│ ├── PrometheusConverters.scala
│ ├── marshalling
│ └── PrometheusMarshallers.scala
│ ├── PrometheusMetrics.scala
│ ├── PrometheusSettings.scala
│ └── PrometheusRegistry.scala
├── dropwizard-v5
└── src
│ ├── it
│ ├── resources
│ │ ├── application.conf
│ │ └── logback-test.xml
│ └── scala
│ │ └── fr
│ │ └── davit
│ │ └── akka
│ │ └── http
│ │ └── metrics
│ │ └── dropwizard
│ │ └── DropwizardMetricsItSpec.scala
│ ├── test
│ ├── resources
│ │ ├── application.conf
│ │ └── logback-test.xml
│ └── scala
│ │ └── fr
│ │ └── davit
│ │ └── akka
│ │ └── http
│ │ └── metrics
│ │ └── dropwizard
│ │ ├── marshalling
│ │ └── DropwizardMarshallersSpec.scala
│ │ └── DropwizardRegistrySpec.scala
│ └── main
│ └── scala
│ └── fr
│ └── davit
│ └── akka
│ └── http
│ └── metrics
│ └── dropwizard
│ ├── marshalling
│ └── DropwizardMarshallers.scala
│ ├── DropwizardSettings.scala
│ ├── DropwizardRegistry.scala
│ └── DropwizardMetrics.scala
├── .scala-steward.conf
├── .scalafmt.conf
├── .github
└── workflows
│ ├── ci.yml
│ └── clean.yml
├── CHANGELOG.md
└── LICENSE
/project/build.properties:
--------------------------------------------------------------------------------
1 | sbt.version=1.8.0
2 |
--------------------------------------------------------------------------------
/version.sbt:
--------------------------------------------------------------------------------
1 | ThisBuild / version := "1.7.2-SNAPSHOT"
2 |
--------------------------------------------------------------------------------
/project/sbt-pgp.sbt:
--------------------------------------------------------------------------------
1 | addSbtPlugin("com.github.sbt" % "sbt-pgp" % "2.2.1")
2 |
--------------------------------------------------------------------------------
/project/scalafmt.sbt:
--------------------------------------------------------------------------------
1 | addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.0")
2 |
--------------------------------------------------------------------------------
/project/sbt-headers.sbt:
--------------------------------------------------------------------------------
1 | addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.9.0")
2 |
--------------------------------------------------------------------------------
/project/sbt-release.sbt:
--------------------------------------------------------------------------------
1 | addSbtPlugin("com.github.sbt" % "sbt-release" % "1.1.0")
2 |
--------------------------------------------------------------------------------
/project/sbt-sonatype.sbt:
--------------------------------------------------------------------------------
1 | addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.15")
2 |
--------------------------------------------------------------------------------
/project/sbt-tpolecat.sbt:
--------------------------------------------------------------------------------
1 | addSbtPlugin("io.github.davidgregory084" % "sbt-tpolecat" % "0.3.3")
2 |
--------------------------------------------------------------------------------
/project/sbt-github-actions.sbt:
--------------------------------------------------------------------------------
1 | addSbtPlugin("com.codecommit" % "sbt-github-actions" % "0.14.2")
2 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.class
2 | *.log
3 | .idea/
4 | .bloop/
5 | .metals/
6 | out/
7 | target/
8 | project/target/
9 | project/metals.sbt
10 |
--------------------------------------------------------------------------------
/core/src/it/scala/fr/davit/akka/http/metrics/core/TestRegistry.scala:
--------------------------------------------------------------------------------
1 | ../../../../../../../../test/scala/fr/davit/akka/http/metrics/core/TestRegistry.scala
--------------------------------------------------------------------------------
/core/src/it/resources/application.conf:
--------------------------------------------------------------------------------
1 | akka {
2 | loggers = ["akka.event.slf4j.Slf4jLogger"]
3 | loglevel = "DEBUG"
4 | logging-filter = "akka.event.slf4j.Slf4jLoggingFilter"
5 | }
--------------------------------------------------------------------------------
/core/src/test/resources/application.conf:
--------------------------------------------------------------------------------
1 | akka {
2 | loggers = ["akka.event.slf4j.Slf4jLogger"]
3 | loglevel = "DEBUG"
4 | logging-filter = "akka.event.slf4j.Slf4jLoggingFilter"
5 | }
--------------------------------------------------------------------------------
/datadog/src/it/resources/application.conf:
--------------------------------------------------------------------------------
1 | akka {
2 | loggers = ["akka.event.slf4j.Slf4jLogger"]
3 | loglevel = "DEBUG"
4 | logging-filter = "akka.event.slf4j.Slf4jLoggingFilter"
5 | }
--------------------------------------------------------------------------------
/dropwizard/src/it/resources/application.conf:
--------------------------------------------------------------------------------
1 | akka {
2 | loggers = ["akka.event.slf4j.Slf4jLogger"]
3 | loglevel = "DEBUG"
4 | logging-filter = "akka.event.slf4j.Slf4jLoggingFilter"
5 | }
--------------------------------------------------------------------------------
/dropwizard/src/test/resources/application.conf:
--------------------------------------------------------------------------------
1 | akka {
2 | loggers = ["akka.event.slf4j.Slf4jLogger"]
3 | loglevel = "DEBUG"
4 | logging-filter = "akka.event.slf4j.Slf4jLoggingFilter"
5 | }
--------------------------------------------------------------------------------
/graphite/src/it/resources/application.conf:
--------------------------------------------------------------------------------
1 | akka {
2 | loggers = ["akka.event.slf4j.Slf4jLogger"]
3 | loglevel = "DEBUG"
4 | logging-filter = "akka.event.slf4j.Slf4jLoggingFilter"
5 | }
--------------------------------------------------------------------------------
/prometheus/src/it/resources/application.conf:
--------------------------------------------------------------------------------
1 | akka {
2 | loggers = ["akka.event.slf4j.Slf4jLogger"]
3 | loglevel = "DEBUG"
4 | logging-filter = "akka.event.slf4j.Slf4jLoggingFilter"
5 | }
--------------------------------------------------------------------------------
/prometheus/src/test/resources/application.conf:
--------------------------------------------------------------------------------
1 | akka {
2 | loggers = ["akka.event.slf4j.Slf4jLogger"]
3 | loglevel = "DEBUG"
4 | logging-filter = "akka.event.slf4j.Slf4jLoggingFilter"
5 | }
--------------------------------------------------------------------------------
/dropwizard-v5/src/it/resources/application.conf:
--------------------------------------------------------------------------------
1 | akka {
2 | loggers = ["akka.event.slf4j.Slf4jLogger"]
3 | loglevel = "DEBUG"
4 | logging-filter = "akka.event.slf4j.Slf4jLoggingFilter"
5 | }
--------------------------------------------------------------------------------
/dropwizard-v5/src/test/resources/application.conf:
--------------------------------------------------------------------------------
1 | akka {
2 | loggers = ["akka.event.slf4j.Slf4jLogger"]
3 | loglevel = "DEBUG"
4 | logging-filter = "akka.event.slf4j.Slf4jLoggingFilter"
5 | }
--------------------------------------------------------------------------------
/core/src/it/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | ./target/akka-http-metrics-core.it-test.log
6 | false
7 |
8 | %date{HH:mm:ss} %-5level %logger{0} {%class %method} - %msg%n
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/core/src/test/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | ./target/akka-http-metrics-core.test.log
6 | false
7 |
8 | %date{HH:mm:ss} %-5level %logger{0} {%class %method} - %msg%n
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/datadog/src/it/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | ./target/akka-http-metrics-datadog.it-test.log
6 | false
7 |
8 | %date{HH:mm:ss} %-5level %logger{0} {%class %method} - %msg%n
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/graphite/src/it/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | ./target/akka-http-metrics-graphite.it-test.log
6 | false
7 |
8 | %date{HH:mm:ss} %-5level %logger{0} {%class %method} - %msg%n
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/dropwizard-v5/src/it/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | ./target/akka-http-metrics-dropwizard.it-test.log
6 | false
7 |
8 | %date{HH:mm:ss} %-5level %logger{0} {%class %method} - %msg%n
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/dropwizard-v5/src/test/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | ./target/akka-http-metrics-dropwizard.test.log
6 | false
7 |
8 | %date{HH:mm:ss} %-5level %logger{0} {%class %method} - %msg%n
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/dropwizard/src/it/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | ./target/akka-http-metrics-dropwizard.it-test.log
6 | false
7 |
8 | %date{HH:mm:ss} %-5level %logger{0} {%class %method} - %msg%n
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/dropwizard/src/test/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | ./target/akka-http-metrics-dropwizard.test.log
6 | false
7 |
8 | %date{HH:mm:ss} %-5level %logger{0} {%class %method} - %msg%n
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/prometheus/src/it/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | ./target/akka-http-metrics-prometheus.it-test.log
6 | false
7 |
8 | %date{HH:mm:ss} %-5level %logger{0} {%class %method} - %msg%n
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/prometheus/src/test/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | ./target/akka-http-metrics-prometheus.test.log
6 | false
7 |
8 | %date{HH:mm:ss} %-5level %logger{0} {%class %method} - %msg%n
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/.scala-steward.conf:
--------------------------------------------------------------------------------
1 | updates.pin = [
2 | # Stay on akka Apache 2.0 license
3 | { groupId = "com.typesafe.akka", artifactId="akka-stream", version = "2.6." }
4 | { groupId = "com.typesafe.akka", artifactId="akka-slf4j", version = "2.6." }
5 | { groupId = "com.typesafe.akka", artifactId="akka-stream-testkit", version = "2.6." }
6 | { groupId = "com.typesafe.akka", artifactId="akka-testkit", version = "2.6." }
7 | { groupId = "com.typesafe.akka", artifactId="akka-http", version = "10.2." }
8 | { groupId = "com.typesafe.akka", artifactId="akka-http-spray-json", version = "10.2." }
9 | { groupId = "com.typesafe.akka", artifactId="akka-http-testkit", version = "10.2." }
10 | ]
11 |
--------------------------------------------------------------------------------
/.scalafmt.conf:
--------------------------------------------------------------------------------
1 | version = 3.5.9
2 | project.git = true # only format files tracked by git
3 | maxColumn = 120
4 | runner.dialect = scala213
5 | align.openParenCallSite = false
6 | align.openParenDefnSite = false
7 | # setting 'align = more' tokens explicitly to work with other align options
8 | align.tokens = [
9 | {code = "=", owner = "(Enumerator.Val|Defn.(Va(l|r)|Def|Type))"},
10 | {code = "{", owner = "Template"},
11 | {code = "}", owner = "Template"},
12 | {code = "->", owner = "Term.ApplyInfix"},
13 | {code = "<-", owner = "Enumerator.Generator"},
14 | {code = "=>", owner = "Case"},
15 | {code = "%", owner = "Term.ApplyInfix"},
16 | {code = "%%", owner = "Term.ApplyInfix"},
17 | {code = "%%%", owner = "Term.ApplyInfix"}
18 | ]
19 | assumeStandardLibraryStripMargin = true
20 | rewrite.rules = [
21 | PreferCurlyFors,
22 | SortImports
23 | ]
--------------------------------------------------------------------------------
/core/src/main/scala/fr/davit/akka/http/metrics/core/HttpMetricsHandler.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.core
18 |
19 | import akka.http.scaladsl.model.{HttpRequest, HttpResponse}
20 |
21 | trait HttpMetricsHandler {
22 |
23 | def onRequest(request: HttpRequest): HttpRequest
24 |
25 | def onResponse(request: HttpRequest, response: HttpResponse): HttpResponse
26 |
27 | def onFailure(request: HttpRequest, cause: Throwable): Throwable
28 |
29 | def onConnection(): Unit
30 |
31 | def onDisconnection(): Unit
32 | }
33 |
--------------------------------------------------------------------------------
/core/src/main/scala/fr/davit/akka/http/metrics/core/Metrics.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.core
18 |
19 | import scala.concurrent.duration.FiniteDuration
20 |
21 | case class Dimension(name: String, label: String)
22 |
23 | trait Counter {
24 | def inc(dimensions: Seq[Dimension] = Seq.empty): Unit
25 | }
26 |
27 | trait Gauge {
28 | def inc(dimensions: Seq[Dimension] = Seq.empty): Unit
29 |
30 | def dec(dimensions: Seq[Dimension] = Seq.empty): Unit
31 | }
32 |
33 | trait Timer {
34 | def observe(duration: FiniteDuration, dimensions: Seq[Dimension] = Seq.empty): Unit
35 | }
36 |
37 | trait Histogram {
38 | def update[T: Numeric](value: T, dimensions: Seq[Dimension] = Seq.empty): Unit
39 | }
40 |
--------------------------------------------------------------------------------
/dropwizard/src/main/scala/fr/davit/akka/http/metrics/dropwizard/marshalling/DropwizardMarshallers.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.dropwizard.marshalling
18 |
19 | import java.io.StringWriter
20 |
21 | import akka.http.scaladsl.marshalling.{Marshaller, ToEntityMarshaller}
22 | import akka.http.scaladsl.model.{ContentTypes, HttpEntity}
23 | import com.fasterxml.jackson.databind.ObjectMapper
24 | import fr.davit.akka.http.metrics.dropwizard.DropwizardRegistry
25 |
26 | trait DropwizardMarshallers {
27 |
28 | implicit val registryToEntityMarshaller: ToEntityMarshaller[DropwizardRegistry] = {
29 |
30 | val writer = new ObjectMapper().writer()
31 |
32 | Marshaller.opaque { registry =>
33 | val output = new StringWriter()
34 | try {
35 | writer.writeValue(output, registry.underlying)
36 | HttpEntity(output.toString).withContentType(ContentTypes.`application/json`)
37 | } finally {
38 | output.close()
39 | }
40 | }
41 | }
42 | }
43 |
44 | object DropwizardMarshallers extends DropwizardMarshallers
45 |
--------------------------------------------------------------------------------
/dropwizard-v5/src/main/scala/fr/davit/akka/http/metrics/dropwizard/marshalling/DropwizardMarshallers.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.dropwizard.marshalling
18 |
19 | import java.io.StringWriter
20 |
21 | import akka.http.scaladsl.marshalling.{Marshaller, ToEntityMarshaller}
22 | import akka.http.scaladsl.model.{ContentTypes, HttpEntity}
23 | import com.fasterxml.jackson.databind.ObjectMapper
24 | import fr.davit.akka.http.metrics.dropwizard.DropwizardRegistry
25 |
26 | trait DropwizardMarshallers {
27 |
28 | implicit val registryToEntityMarshaller: ToEntityMarshaller[DropwizardRegistry] = {
29 |
30 | val writer = new ObjectMapper().writer()
31 |
32 | Marshaller.opaque { registry =>
33 | val output = new StringWriter()
34 | try {
35 | writer.writeValue(output, registry.underlying)
36 | HttpEntity(output.toString).withContentType(ContentTypes.`application/json`)
37 | } finally {
38 | output.close()
39 | }
40 | }
41 | }
42 | }
43 |
44 | object DropwizardMarshallers extends DropwizardMarshallers
45 |
--------------------------------------------------------------------------------
/prometheus/src/main/scala/fr/davit/akka/http/metrics/prometheus/PrometheusConverters.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.prometheus
18 |
19 | trait PrometheusConverters {
20 |
21 | implicit def convertCounter(counter: io.prometheus.client.Counter): PrometheusCounter =
22 | new PrometheusCounter(counter)
23 |
24 | implicit def convertGauge(gauge: io.prometheus.client.Gauge): PrometheusGauge =
25 | new PrometheusGauge(gauge)
26 |
27 | implicit def convertSummaryTimer(summary: io.prometheus.client.Summary): PrometheusSummaryTimer =
28 | new PrometheusSummaryTimer(summary)
29 |
30 | implicit def convertHistogramTimer(histogram: io.prometheus.client.Histogram): PrometheusHistogramTimer =
31 | new PrometheusHistogramTimer(histogram)
32 |
33 | implicit def convertSummary(summary: io.prometheus.client.Summary): PrometheusSummary =
34 | new PrometheusSummary(summary)
35 |
36 | implicit def convertHistogram(histogram: io.prometheus.client.Histogram): PrometheusHistogram =
37 | new PrometheusHistogram(histogram)
38 |
39 | }
40 |
41 | object PrometheusConverters extends PrometheusConverters
42 |
--------------------------------------------------------------------------------
/prometheus/src/main/scala/fr/davit/akka/http/metrics/prometheus/marshalling/PrometheusMarshallers.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.prometheus.marshalling
18 |
19 | import java.io.StringWriter
20 |
21 | import akka.http.scaladsl.marshalling.{Marshaller, ToEntityMarshaller}
22 | import akka.http.scaladsl.model.{ContentType, HttpCharsets, HttpEntity, MediaTypes}
23 | import fr.davit.akka.http.metrics.prometheus.PrometheusRegistry
24 | import io.prometheus.client.exporter.common.TextFormat
25 |
26 | trait PrometheusMarshallers {
27 |
28 | val PrometheusContentType: ContentType = {
29 | MediaTypes.`text/plain` withParams Map("version" -> "0.0.4") withCharset HttpCharsets.`UTF-8`
30 | }
31 |
32 | implicit val marshaller: ToEntityMarshaller[PrometheusRegistry] = {
33 | Marshaller.opaque { registry =>
34 | val output = new StringWriter()
35 | try {
36 | TextFormat.write004(output, registry.underlying.metricFamilySamples)
37 | HttpEntity(output.toString).withContentType(PrometheusContentType)
38 | } finally {
39 | output.close()
40 | }
41 | }
42 | }
43 | }
44 |
45 | object PrometheusMarshallers extends PrometheusMarshallers
46 |
--------------------------------------------------------------------------------
/graphite/src/main/scala/fr/davit/akka/http/metrics/graphite/GraphiteSettings.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.graphite
18 |
19 | import akka.http.scaladsl.model.StatusCodes
20 | import fr.davit.akka.http.metrics.core.HttpMetricsNames.HttpMetricsNamesImpl
21 | import fr.davit.akka.http.metrics.core.HttpMetricsSettings.HttpMetricsSettingsImpl
22 | import fr.davit.akka.http.metrics.core.{HttpMetricsNames, HttpMetricsSettings}
23 |
24 | object GraphiteMetricsNames {
25 |
26 | val default: HttpMetricsNames = HttpMetricsNamesImpl(
27 | requests = "requests",
28 | requestsActive = "requests.active",
29 | requestsFailures = "requests.failures",
30 | requestsSize = "requests.bytes",
31 | responses = "responses",
32 | responsesErrors = "responses.errors",
33 | responsesDuration = "responses.duration",
34 | responsesSize = "responses.bytes",
35 | connections = "connections",
36 | connectionsActive = "connections.active"
37 | )
38 |
39 | }
40 |
41 | object GraphiteSettings {
42 |
43 | val default: HttpMetricsSettings = HttpMetricsSettingsImpl(
44 | "akka.http",
45 | GraphiteMetricsNames.default,
46 | _.status.isInstanceOf[StatusCodes.ServerError],
47 | includeMethodDimension = false,
48 | includePathDimension = false,
49 | includeStatusDimension = false
50 | )
51 |
52 | }
53 |
--------------------------------------------------------------------------------
/dropwizard-v5/src/main/scala/fr/davit/akka/http/metrics/dropwizard/DropwizardSettings.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.dropwizard
18 |
19 | import akka.http.scaladsl.model.StatusCodes
20 | import fr.davit.akka.http.metrics.core.HttpMetricsNames.HttpMetricsNamesImpl
21 | import fr.davit.akka.http.metrics.core.{HttpMetricsNames, HttpMetricsSettings}
22 | import fr.davit.akka.http.metrics.core.HttpMetricsSettings.HttpMetricsSettingsImpl
23 |
24 | object DropwizardMetricsNames {
25 |
26 | val default: HttpMetricsNames = HttpMetricsNamesImpl(
27 | requests = "requests",
28 | requestsActive = "requests.active",
29 | requestsFailures = "requests.failures",
30 | requestsSize = "requests.bytes",
31 | responses = "responses",
32 | responsesErrors = "responses.errors",
33 | responsesDuration = "responses.duration",
34 | responsesSize = "responses.bytes",
35 | connections = "connections",
36 | connectionsActive = "connections.active"
37 | )
38 |
39 | }
40 |
41 | object DropwizardSettings {
42 |
43 | val default: HttpMetricsSettings = HttpMetricsSettingsImpl(
44 | "akka.http",
45 | DropwizardMetricsNames.default,
46 | _.status.isInstanceOf[StatusCodes.ServerError],
47 | includeMethodDimension = false,
48 | includePathDimension = false,
49 | includeStatusDimension = false
50 | )
51 |
52 | }
53 |
--------------------------------------------------------------------------------
/dropwizard/src/main/scala/fr/davit/akka/http/metrics/dropwizard/DropwizardSettings.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.dropwizard
18 |
19 | import akka.http.scaladsl.model.StatusCodes
20 | import fr.davit.akka.http.metrics.core.HttpMetricsNames.HttpMetricsNamesImpl
21 | import fr.davit.akka.http.metrics.core.{HttpMetricsNames, HttpMetricsSettings}
22 | import fr.davit.akka.http.metrics.core.HttpMetricsSettings.HttpMetricsSettingsImpl
23 |
24 | object DropwizardMetricsNames {
25 |
26 | val default: HttpMetricsNames = HttpMetricsNamesImpl(
27 | requests = "requests",
28 | requestsActive = "requests.active",
29 | requestsFailures = "requests.failures",
30 | requestsSize = "requests.bytes",
31 | responses = "responses",
32 | responsesErrors = "responses.errors",
33 | responsesDuration = "responses.duration",
34 | responsesSize = "responses.bytes",
35 | connections = "connections",
36 | connectionsActive = "connections.active"
37 | )
38 |
39 | }
40 |
41 | object DropwizardSettings {
42 |
43 | val default: HttpMetricsSettings = HttpMetricsSettingsImpl(
44 | "akka.http",
45 | DropwizardMetricsNames.default,
46 | _.status.isInstanceOf[StatusCodes.ServerError],
47 | includeMethodDimension = false,
48 | includePathDimension = false,
49 | includeStatusDimension = false
50 | )
51 |
52 | }
53 |
--------------------------------------------------------------------------------
/datadog/src/main/scala/fr/davit/akka/http/metrics/datadog/DatadogSettings.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.datadog
18 |
19 | import akka.http.scaladsl.model.StatusCodes
20 | import fr.davit.akka.http.metrics.core.HttpMetricsNames.HttpMetricsNamesImpl
21 | import fr.davit.akka.http.metrics.core.{HttpMetricsNames, HttpMetricsSettings}
22 | import fr.davit.akka.http.metrics.core.HttpMetricsSettings.HttpMetricsSettingsImpl
23 |
24 | object DatadogMetricsNames {
25 |
26 | val default: HttpMetricsNames = HttpMetricsNamesImpl(
27 | requests = "requests_count",
28 | requestsActive = "requests_active",
29 | requestsFailures = "requests_failures_count",
30 | requestsSize = "requests_bytes",
31 | responses = "responses_count",
32 | responsesErrors = "responses_errors_count",
33 | responsesDuration = "responses_duration",
34 | responsesSize = "responses_bytes",
35 | connections = "connections_count",
36 | connectionsActive = "connections_active"
37 | )
38 |
39 | }
40 |
41 | object DatadogSettings {
42 |
43 | val default: HttpMetricsSettings = HttpMetricsSettingsImpl(
44 | "akka.http",
45 | DatadogMetricsNames.default,
46 | _.status.isInstanceOf[StatusCodes.ServerError],
47 | includeMethodDimension = false,
48 | includePathDimension = false,
49 | includeStatusDimension = false
50 | )
51 |
52 | }
53 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | # This file was automatically generated by sbt-github-actions using the
2 | # githubWorkflowGenerate task. You should add and commit this file to
3 | # your git repository. It goes without saying that you shouldn't edit
4 | # this file by hand! Instead, if you wish to make changes, you should
5 | # change your sbt build configuration to revise the workflow description
6 | # to meet your needs, then regenerate this file.
7 |
8 | name: Continuous Integration
9 |
10 | on:
11 | pull_request:
12 | branches: [main]
13 | push:
14 | branches: [main]
15 |
16 | env:
17 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
18 |
19 | jobs:
20 | build:
21 | name: Build and Test
22 | strategy:
23 | matrix:
24 | os: [ubuntu-latest]
25 | scala: [2.13.10, 2.12.17]
26 | java: [temurin@11]
27 | runs-on: ${{ matrix.os }}
28 | steps:
29 | - name: Checkout current branch (full)
30 | uses: actions/checkout@v2
31 | with:
32 | fetch-depth: 0
33 |
34 | - name: Setup Java (temurin@11)
35 | if: matrix.java == 'temurin@11'
36 | uses: actions/setup-java@v2
37 | with:
38 | distribution: temurin
39 | java-version: 11
40 |
41 | - name: Cache sbt
42 | uses: actions/cache@v2
43 | with:
44 | path: |
45 | ~/.sbt
46 | ~/.ivy2/cache
47 | ~/.coursier/cache/v1
48 | ~/.cache/coursier/v1
49 | ~/AppData/Local/Coursier/Cache/v1
50 | ~/Library/Caches/Coursier/v1
51 | key: ${{ runner.os }}-sbt-cache-v2-${{ hashFiles('**/*.sbt') }}-${{ hashFiles('project/build.properties') }}
52 |
53 | - name: Check that workflows are up to date
54 | run: sbt ++${{ matrix.scala }} githubWorkflowCheck
55 |
56 | - name: Check project
57 | run: sbt ++${{ matrix.scala }} scalafmtCheckAll headerCheckAll
58 |
59 | - name: Build project
60 | run: 'sbt ++${{ matrix.scala }} test it:test'
61 |
--------------------------------------------------------------------------------
/graphite/src/main/scala/fr/davit/akka/http/metrics/graphite/GraphiteRegistry.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.graphite
18 |
19 | import fr.davit.akka.http.metrics.core.{HttpMetricsSettings, _}
20 |
21 | object GraphiteRegistry {
22 |
23 | def apply(client: CarbonClient, settings: HttpMetricsSettings = GraphiteSettings.default): GraphiteRegistry = {
24 | new GraphiteRegistry(settings)(client)
25 | }
26 | }
27 |
28 | class GraphiteRegistry(settings: HttpMetricsSettings)(implicit client: CarbonClient)
29 | extends HttpMetricsRegistry(settings) {
30 |
31 | lazy val requests: Counter = new CarbonCounter(settings.namespace, settings.metricsNames.requests)
32 | lazy val requestsActive: Gauge = new CarbonGauge(settings.namespace, settings.metricsNames.requestsActive)
33 | lazy val requestsFailures: Counter = new CarbonCounter(settings.namespace, settings.metricsNames.requestsFailures)
34 | lazy val requestsSize: Histogram = new CarbonHistogram(settings.namespace, settings.metricsNames.requestsSize)
35 | lazy val responses: Counter = new CarbonCounter(settings.namespace, settings.metricsNames.responses)
36 | lazy val responsesErrors: Counter = new CarbonCounter(settings.namespace, settings.metricsNames.responsesErrors)
37 | lazy val responsesDuration: Timer = new CarbonTimer(settings.namespace, settings.metricsNames.responsesDuration)
38 | lazy val responsesSize: Histogram = new CarbonHistogram(settings.namespace, settings.metricsNames.responsesSize)
39 | lazy val connections: Counter = new CarbonCounter(settings.namespace, settings.metricsNames.connections)
40 | lazy val connectionsActive: Gauge = new CarbonGauge(settings.namespace, settings.metricsNames.connectionsActive)
41 | }
42 |
--------------------------------------------------------------------------------
/graphite/src/main/scala/fr/davit/akka/http/metrics/graphite/CarbonMetrics.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.graphite
18 |
19 | import fr.davit.akka.http.metrics.core.{Counter, Dimension, Gauge, Histogram, Timer}
20 |
21 | import scala.concurrent.duration.FiniteDuration
22 |
23 | abstract class CarbonMetrics(namespace: String, name: String) {
24 | protected lazy val metricName: String = s"$namespace.$name"
25 | }
26 |
27 | class CarbonCounter(namespace: String, name: String)(implicit client: CarbonClient)
28 | extends CarbonMetrics(namespace, name)
29 | with Counter {
30 |
31 | override def inc(dimensions: Seq[Dimension] = Seq.empty): Unit = {
32 | client.publish(metricName, 1, dimensions)
33 | }
34 | }
35 |
36 | class CarbonGauge(namespace: String, name: String)(implicit client: CarbonClient)
37 | extends CarbonMetrics(namespace, name)
38 | with Gauge {
39 |
40 | override def inc(dimensions: Seq[Dimension] = Seq.empty): Unit = {
41 | client.publish(metricName, 1, dimensions)
42 | }
43 |
44 | override def dec(dimensions: Seq[Dimension] = Seq.empty): Unit = {
45 | client.publish(metricName, -1, dimensions)
46 | }
47 | }
48 |
49 | class CarbonTimer(namespace: String, name: String)(implicit client: CarbonClient)
50 | extends CarbonMetrics(namespace, name)
51 | with Timer {
52 |
53 | override def observe(duration: FiniteDuration, dimensions: Seq[Dimension] = Seq.empty): Unit = {
54 | client.publish(metricName, duration.toMillis, dimensions)
55 | }
56 | }
57 |
58 | class CarbonHistogram(namespace: String, name: String)(implicit client: CarbonClient)
59 | extends CarbonMetrics(namespace, name)
60 | with Histogram {
61 |
62 | override def update[T: Numeric](value: T, dimensions: Seq[Dimension] = Seq.empty): Unit = {
63 | client.publish(metricName, value, dimensions)
64 | }
65 | }
66 |
--------------------------------------------------------------------------------
/dropwizard-v5/src/main/scala/fr/davit/akka/http/metrics/dropwizard/DropwizardRegistry.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.dropwizard
18 |
19 | import fr.davit.akka.http.metrics.core.{HttpMetricsSettings, _}
20 | import io.dropwizard.metrics5.MetricRegistry
21 |
22 | object DropwizardRegistry {
23 |
24 | def apply(
25 | registry: MetricRegistry = new MetricRegistry(),
26 | settings: HttpMetricsSettings = DropwizardSettings.default
27 | ): DropwizardRegistry = {
28 | new DropwizardRegistry(settings)(registry)
29 | }
30 | }
31 |
32 | class DropwizardRegistry(settings: HttpMetricsSettings)(implicit val underlying: MetricRegistry)
33 | extends HttpMetricsRegistry(settings) {
34 |
35 | lazy val requests: Counter = new DropwizardCounter(settings.namespace, settings.metricsNames.requests)
36 | lazy val requestsActive: Gauge = new DropwizardGauge(settings.namespace, settings.metricsNames.requestsActive)
37 | lazy val requestsFailures: Counter = new DropwizardCounter(settings.namespace, settings.metricsNames.requestsFailures)
38 | lazy val requestsSize: Histogram = new DropwizardHistogram(settings.namespace, settings.metricsNames.requestsSize)
39 | lazy val responses: Counter = new DropwizardCounter(settings.namespace, settings.metricsNames.responses)
40 | lazy val responsesErrors: Counter = new DropwizardCounter(settings.namespace, settings.metricsNames.responsesErrors)
41 | lazy val responsesDuration: Timer = new DropwizardTimer(settings.namespace, settings.metricsNames.responsesDuration)
42 | lazy val responsesSize: Histogram = new DropwizardHistogram(settings.namespace, settings.metricsNames.responsesSize)
43 | lazy val connections: Counter = new DropwizardCounter(settings.namespace, settings.metricsNames.connections)
44 | lazy val connectionsActive: Gauge = new DropwizardGauge(settings.namespace, settings.metricsNames.connectionsActive)
45 | }
46 |
--------------------------------------------------------------------------------
/datadog/src/main/scala/fr/davit/akka/http/metrics/datadog/DatadogRegistry.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.datadog
18 |
19 | import com.timgroup.statsd.StatsDClient
20 | import fr.davit.akka.http.metrics.core.{HttpMetricsSettings, _}
21 |
22 | object DatadogRegistry {
23 |
24 | def apply(client: StatsDClient, settings: HttpMetricsSettings = DatadogSettings.default): DatadogRegistry = {
25 | new DatadogRegistry(settings)(client)
26 | }
27 | }
28 |
29 | /** see [https://docs.datadoghq.com/developers/faq/what-best-practices-are-recommended-for-naming-metrics-and-tags/]
30 | * @param client
31 | */
32 | class DatadogRegistry(settings: HttpMetricsSettings)(implicit client: StatsDClient)
33 | extends HttpMetricsRegistry(settings) {
34 |
35 | lazy val requests: Counter = new StatsDCounter(settings.namespace, settings.metricsNames.requests)
36 | lazy val requestsActive: Gauge = new StatsDGauge(settings.namespace, settings.metricsNames.requestsActive)
37 | lazy val requestsFailures: Counter = new StatsDCounter(settings.namespace, settings.metricsNames.requestsFailures)
38 | lazy val requestsSize: Histogram = new StatsDHistogram(settings.namespace, settings.metricsNames.requestsSize)
39 | lazy val responses: Counter = new StatsDCounter(settings.namespace, settings.metricsNames.responses)
40 | lazy val responsesErrors: Counter = new StatsDCounter(settings.namespace, settings.metricsNames.responsesErrors)
41 | lazy val responsesDuration: Timer = new StatsDTimer(settings.namespace, settings.metricsNames.responsesDuration)
42 | lazy val responsesSize: Histogram = new StatsDHistogram(settings.namespace, settings.metricsNames.responsesSize)
43 | lazy val connections: Counter = new StatsDCounter(settings.namespace, settings.metricsNames.connections)
44 | lazy val connectionsActive: Gauge = new StatsDGauge(settings.namespace, settings.metricsNames.connectionsActive)
45 | }
46 |
--------------------------------------------------------------------------------
/.github/workflows/clean.yml:
--------------------------------------------------------------------------------
1 | # This file was automatically generated by sbt-github-actions using the
2 | # githubWorkflowGenerate task. You should add and commit this file to
3 | # your git repository. It goes without saying that you shouldn't edit
4 | # this file by hand! Instead, if you wish to make changes, you should
5 | # change your sbt build configuration to revise the workflow description
6 | # to meet your needs, then regenerate this file.
7 |
8 | name: Clean
9 |
10 | on: push
11 |
12 | jobs:
13 | delete-artifacts:
14 | name: Delete Artifacts
15 | runs-on: ubuntu-latest
16 | env:
17 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
18 | steps:
19 | - name: Delete artifacts
20 | run: |
21 | # Customize those three lines with your repository and credentials:
22 | REPO=${GITHUB_API_URL}/repos/${{ github.repository }}
23 |
24 | # A shortcut to call GitHub API.
25 | ghapi() { curl --silent --location --user _:$GITHUB_TOKEN "$@"; }
26 |
27 | # A temporary file which receives HTTP response headers.
28 | TMPFILE=/tmp/tmp.$$
29 |
30 | # An associative array, key: artifact name, value: number of artifacts of that name.
31 | declare -A ARTCOUNT
32 |
33 | # Process all artifacts on this repository, loop on returned "pages".
34 | URL=$REPO/actions/artifacts
35 | while [[ -n "$URL" ]]; do
36 |
37 | # Get current page, get response headers in a temporary file.
38 | JSON=$(ghapi --dump-header $TMPFILE "$URL")
39 |
40 | # Get URL of next page. Will be empty if we are at the last page.
41 | URL=$(grep '^Link:' "$TMPFILE" | tr ',' '\n' | grep 'rel="next"' | head -1 | sed -e 's/.*/' -e 's/>.*//')
42 | rm -f $TMPFILE
43 |
44 | # Number of artifacts on this page:
45 | COUNT=$(( $(jq <<<$JSON -r '.artifacts | length') ))
46 |
47 | # Loop on all artifacts on this page.
48 | for ((i=0; $i < $COUNT; i++)); do
49 |
50 | # Get name of artifact and count instances of this name.
51 | name=$(jq <<<$JSON -r ".artifacts[$i].name?")
52 | ARTCOUNT[$name]=$(( $(( ${ARTCOUNT[$name]} )) + 1))
53 |
54 | id=$(jq <<<$JSON -r ".artifacts[$i].id?")
55 | size=$(( $(jq <<<$JSON -r ".artifacts[$i].size_in_bytes?") ))
56 | printf "Deleting '%s' #%d, %'d bytes\n" $name ${ARTCOUNT[$name]} $size
57 | ghapi -X DELETE $REPO/actions/artifacts/$id
58 | done
59 | done
60 |
--------------------------------------------------------------------------------
/dropwizard/src/main/scala/fr/davit/akka/http/metrics/dropwizard/DropwizardMetrics.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.dropwizard
18 |
19 | import fr.davit.akka.http.metrics.core.{Counter, Dimension, Gauge, Histogram, Timer}
20 | import com.codahale.metrics.MetricRegistry
21 |
22 | import scala.concurrent.duration.FiniteDuration
23 |
24 | abstract class DropwizardMetrics(namespace: String, name: String) {
25 | protected lazy val metricName: String = MetricRegistry.name(namespace, name)
26 | }
27 |
28 | class DropwizardCounter(namespace: String, name: String)(implicit registry: MetricRegistry)
29 | extends DropwizardMetrics(namespace, name)
30 | with Counter {
31 |
32 | override def inc(dimensions: Seq[Dimension] = Seq.empty): Unit = {
33 | registry.counter(metricName).inc()
34 | }
35 | }
36 |
37 | class DropwizardGauge(namespace: String, name: String)(implicit registry: MetricRegistry)
38 | extends DropwizardMetrics(namespace, name)
39 | with Gauge {
40 |
41 | override def inc(dimensions: Seq[Dimension] = Seq.empty): Unit = {
42 | registry.counter(metricName).inc()
43 | }
44 |
45 | override def dec(dimensions: Seq[Dimension] = Seq.empty): Unit = {
46 | registry.counter(metricName).dec()
47 | }
48 | }
49 |
50 | class DropwizardTimer(namespace: String, name: String)(implicit registry: MetricRegistry)
51 | extends DropwizardMetrics(namespace, name)
52 | with Timer {
53 |
54 | override def observe(duration: FiniteDuration, dimensions: Seq[Dimension] = Seq.empty): Unit = {
55 | registry.timer(metricName).update(duration.length, duration.unit)
56 | }
57 | }
58 |
59 | class DropwizardHistogram(namespace: String, name: String)(implicit registry: MetricRegistry)
60 | extends DropwizardMetrics(namespace, name)
61 | with Histogram {
62 |
63 | override def update[T](value: T, dimensions: Seq[Dimension] = Seq.empty)(implicit numeric: Numeric[T]): Unit = {
64 | registry.histogram(metricName).update(numeric.toLong(value))
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/prometheus/src/main/scala/fr/davit/akka/http/metrics/prometheus/PrometheusMetrics.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.prometheus
18 |
19 | import fr.davit.akka.http.metrics.core._
20 |
21 | import scala.concurrent.duration.FiniteDuration
22 |
23 | class PrometheusCounter(counter: io.prometheus.client.Counter) extends Counter {
24 |
25 | override def inc(dimensions: Seq[Dimension]): Unit = {
26 | counter.labels(dimensions.map(_.label): _*).inc()
27 | }
28 | }
29 |
30 | class PrometheusGauge(gauge: io.prometheus.client.Gauge) extends Gauge {
31 |
32 | override def inc(dimensions: Seq[Dimension] = Seq.empty): Unit = {
33 | gauge.labels(dimensions.map(_.label): _*).inc()
34 | }
35 |
36 | override def dec(dimensions: Seq[Dimension] = Seq.empty): Unit = {
37 | gauge.labels(dimensions.map(_.label): _*).dec()
38 | }
39 | }
40 |
41 | class PrometheusSummaryTimer(summary: io.prometheus.client.Summary) extends Timer {
42 |
43 | override def observe(duration: FiniteDuration, dimensions: Seq[Dimension] = Seq.empty): Unit = {
44 | summary.labels(dimensions.map(_.label): _*).observe(duration.toMillis.toDouble / 1000.0)
45 | }
46 | }
47 |
48 | class PrometheusHistogramTimer(summary: io.prometheus.client.Histogram) extends Timer {
49 |
50 | override def observe(duration: FiniteDuration, dimensions: Seq[Dimension] = Seq.empty): Unit = {
51 | summary.labels(dimensions.map(_.label): _*).observe(duration.toMillis.toDouble / 1000.0)
52 | }
53 | }
54 |
55 | class PrometheusSummary(summary: io.prometheus.client.Summary) extends Histogram {
56 |
57 | override def update[T](value: T, dimensions: Seq[Dimension] = Seq.empty)(implicit numeric: Numeric[T]): Unit = {
58 | summary.labels(dimensions.map(_.label): _*).observe(numeric.toDouble(value))
59 | }
60 | }
61 |
62 | class PrometheusHistogram(histogram: io.prometheus.client.Histogram) extends Histogram {
63 |
64 | override def update[T](value: T, dimensions: Seq[Dimension] = Seq.empty)(implicit numeric: Numeric[T]): Unit = {
65 | histogram.labels(dimensions.map(_.label): _*).observe(numeric.toDouble(value))
66 | }
67 | }
68 |
--------------------------------------------------------------------------------
/core/src/main/scala/fr/davit/akka/http/metrics/core/HttpMessageLabeler.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.core
18 |
19 | import akka.http.scaladsl.model._
20 | import fr.davit.akka.http.metrics.core.HttpMessageLabeler.Unlabelled
21 |
22 | object HttpMessageLabeler {
23 | val Unlabelled = "unlabelled"
24 | }
25 |
26 | /** Create the labels for a given HTTP dimension See [[HttpResponseLabeler]] or [[HttpResponseLabeler]]
27 | */
28 | sealed trait HttpMessageLabeler {
29 |
30 | /** The dimension name */
31 | def name: String
32 | }
33 |
34 | trait HttpRequestLabeler extends HttpMessageLabeler {
35 |
36 | /** The label for the request */
37 | def label(request: HttpRequest): String
38 |
39 | /** the metric [[Dimension]] for the request */
40 | def dimension(request: HttpRequest): Dimension = Dimension(name, label(request))
41 | }
42 | trait HttpResponseLabeler extends HttpMessageLabeler {
43 |
44 | /** The label for the message */
45 | def label(response: HttpResponse): String
46 |
47 | /** the metric [[Dimension]] for the response */
48 | def dimension(response: HttpResponse): Dimension = Dimension(name, label(response))
49 | }
50 |
51 | object MethodLabeler extends HttpRequestLabeler {
52 | override def name = "method"
53 | override def label(request: HttpRequest): String = request.method.value
54 | }
55 |
56 | object StatusGroupLabeler extends HttpResponseLabeler {
57 | override def name = "status"
58 | override def label(response: HttpResponse): String = response.status match {
59 | case _: StatusCodes.Success => "2xx"
60 | case _: StatusCodes.Redirection => "3xx"
61 | case _: StatusCodes.ClientError => "4xx"
62 | case _: StatusCodes.ServerError => "5xx"
63 | case _ => "other"
64 | }
65 | }
66 |
67 | trait AttributeLabeler extends HttpResponseLabeler {
68 | lazy val key: AttributeKey[String] = AttributeKey(s"metrics-$name-label")
69 | override def label(response: HttpResponse): String = response.attribute(key).getOrElse(Unlabelled)
70 | }
71 |
72 | object PathLabeler extends AttributeLabeler {
73 | override def name: String = "path"
74 | }
75 |
--------------------------------------------------------------------------------
/dropwizard/src/main/scala/fr/davit/akka/http/metrics/dropwizard/DropwizardRegistry.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.dropwizard
18 |
19 | import fr.davit.akka.http.metrics.core.{HttpMetricsSettings, _}
20 | import com.codahale.metrics.MetricRegistry
21 | import com.typesafe.scalalogging.LazyLogging
22 |
23 | object DropwizardRegistry {
24 |
25 | def apply(
26 | registry: MetricRegistry = new MetricRegistry(),
27 | settings: HttpMetricsSettings = DropwizardSettings.default
28 | ): DropwizardRegistry = {
29 | new DropwizardRegistry(settings)(registry)
30 | }
31 | }
32 |
33 | class DropwizardRegistry(settings: HttpMetricsSettings)(implicit val underlying: MetricRegistry)
34 | extends HttpMetricsRegistry(settings)
35 | with LazyLogging {
36 |
37 | if (
38 | settings.serverDimensions.nonEmpty ||
39 | settings.includeMethodDimension ||
40 | settings.includePathDimension ||
41 | settings.includeStatusDimension
42 | ) {
43 | logger.warn("Dropwizard metrics do not support label. All metrics dimensions will be ignored")
44 | }
45 |
46 | lazy val requests: Counter = new DropwizardCounter(settings.namespace, settings.metricsNames.requests)
47 | lazy val requestsActive: Gauge = new DropwizardGauge(settings.namespace, settings.metricsNames.requestsActive)
48 | lazy val requestsFailures: Counter = new DropwizardCounter(settings.namespace, settings.metricsNames.requestsFailures)
49 | lazy val requestsSize: Histogram = new DropwizardHistogram(settings.namespace, settings.metricsNames.requestsSize)
50 | lazy val responses: Counter = new DropwizardCounter(settings.namespace, settings.metricsNames.responses)
51 | lazy val responsesErrors: Counter = new DropwizardCounter(settings.namespace, settings.metricsNames.responsesErrors)
52 | lazy val responsesDuration: Timer = new DropwizardTimer(settings.namespace, settings.metricsNames.responsesDuration)
53 | lazy val responsesSize: Histogram = new DropwizardHistogram(settings.namespace, settings.metricsNames.responsesSize)
54 | lazy val connections: Counter = new DropwizardCounter(settings.namespace, settings.metricsNames.connections)
55 | lazy val connectionsActive: Gauge = new DropwizardGauge(settings.namespace, settings.metricsNames.connectionsActive)
56 | }
57 |
--------------------------------------------------------------------------------
/datadog/src/main/scala/fr/davit/akka/http/metrics/datadog/StatsDMetrics.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.datadog
18 |
19 | import com.timgroup.statsd.StatsDClient
20 | import fr.davit.akka.http.metrics.core.{Counter, Dimension, Gauge, Histogram, Timer}
21 |
22 | import scala.concurrent.duration.FiniteDuration
23 |
24 | object StatsDMetrics {
25 | def dimensionToTag(dimension: Dimension): String = s"${dimension.name}:${dimension.label}"
26 | }
27 |
28 | abstract class StatsDMetrics(namespace: String, name: String) {
29 | protected lazy val metricName: String = s"$namespace.$name"
30 | }
31 |
32 | class StatsDCounter(namespace: String, name: String)(implicit client: StatsDClient)
33 | extends StatsDMetrics(namespace: String, name: String)
34 | with Counter {
35 |
36 | override def inc(dimensions: Seq[Dimension] = Seq.empty): Unit = {
37 | client.increment(metricName, dimensions.map(StatsDMetrics.dimensionToTag): _*)
38 | }
39 | }
40 |
41 | class StatsDGauge(namespace: String, name: String)(implicit client: StatsDClient)
42 | extends StatsDMetrics(namespace: String, name: String)
43 | with Gauge {
44 |
45 | override def inc(dimensions: Seq[Dimension] = Seq.empty): Unit = {
46 | client.increment(metricName, dimensions.map(StatsDMetrics.dimensionToTag): _*)
47 | }
48 |
49 | override def dec(dimensions: Seq[Dimension] = Seq.empty): Unit = {
50 | client.decrement(metricName, dimensions.map(StatsDMetrics.dimensionToTag): _*)
51 | }
52 | }
53 |
54 | class StatsDTimer(namespace: String, name: String)(implicit client: StatsDClient)
55 | extends StatsDMetrics(namespace: String, name: String)
56 | with Timer {
57 |
58 | override def observe(duration: FiniteDuration, dimensions: Seq[Dimension] = Seq.empty): Unit = {
59 | client.distribution(metricName, duration.toMillis, dimensions.map(StatsDMetrics.dimensionToTag): _*)
60 | }
61 | }
62 |
63 | class StatsDHistogram(namespace: String, name: String)(implicit client: StatsDClient)
64 | extends StatsDMetrics(namespace: String, name: String)
65 | with Histogram {
66 |
67 | override def update[T](value: T, dimensions: Seq[Dimension] = Seq.empty)(implicit numeric: Numeric[T]): Unit = {
68 | client.distribution(metricName, numeric.toDouble(value), dimensions.map(StatsDMetrics.dimensionToTag): _*)
69 | }
70 | }
71 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 | ## Unreleased
4 |
5 | ## v1.7.1 (2022-06-07)
6 |
7 | - Fix prometheus dimension ordering
8 |
9 | ## v1.7.0 (2022-04-11)
10 |
11 | - Update dependencies
12 | - Add support for custom dimensions
13 | - Change from sequence to varagrs API in settings
14 |
15 | ## v1.6.0 (2021-05-07)
16 |
17 | - Update dependencies
18 | - Throw explicit exception when trace-id not found in by the MereStage
19 | - Seal and convert routes internally
20 | - Move HttpMetricsServerBuilder to http.metrics.core package
21 | - Remove deprecated HttpMetricsRoute
22 |
23 | ## v1.5.1 (2021-02-16)
24 |
25 | - Update dependencies
26 | - Add API to create HttpMetricsSettings
27 |
28 | ## v1.5.0 (2021-01-12)
29 |
30 | - Update dependencies
31 | - Split dropwizard v4 and v5. v4 being the default.
32 | - Fix bug when HttpEntity.Default is used with HTTP/1.0 protocol
33 |
34 | ## 1.4.1 (2020-12-14)
35 |
36 | - Fix regression due to automatic transformation of entity into streams
37 | - Fix regression in meterFunction APIs
38 |
39 | ## 1.4.0 (2020-12-12)
40 |
41 | - Split HttpMetricsHandler API with separated callbacks
42 | - Add requests failures counter for unserved requests
43 | - Compute sizes and durations metrics on end of entity stream
44 | - Remove deprecated API
45 |
46 | ## 1.3.0 (2020-11-09)
47 |
48 | - Fix Metrics BidiFlow closing
49 | - Adding support for custom server-level dimensions
50 |
51 | ## 1.2.0 (2020-08-29)
52 |
53 | - Update to akka-http 10.2
54 | - Update libraries
55 | - Add metrics names to settings
56 | - Streamline metrics, name and doc
57 |
58 | ## 1.1.1 (2020-06-10)
59 |
60 | - Update libraries
61 | - Fix implicit execution context regression
62 |
63 | ## 1.1.0 (2020-04-18)
64 |
65 | - Fix implicits for HTTP/2 API
66 | - Explicit path labelling
67 |
68 | ## 1.0.0 (2020-03-14)
69 |
70 | - Update libraries
71 | - Decorrelate routes from registry
72 | - Add namespace setting
73 | - Add histogram config for prometheus registry
74 | - Add method dimension
75 |
76 | ## 0.6.0 (2019-08-25)
77 |
78 | - Use static path dimension to for unhandled requests
79 | - Add graphite carbon support
80 |
81 | ## 0.5.0 (2019-07-27)
82 |
83 | - Update to akka-http 10.1.9
84 | - Add path label support
85 | - Add async handler api
86 |
87 | ## 0.4.0 (2019-07-06)
88 |
89 | - Add scala 2.13 support
90 | - Add TCP connection metrics
91 |
92 | ## 0.3.0 (2019-04-12)
93 |
94 | - Use status group dimension on responses and duration
95 | - Change response metric names for more consistency
96 |
97 | ## 0.2.1 (2019-03-23)
98 |
99 | - Fix prometheus time conversion [#4](https://github.com/RustedBones/akka-http-metrics/issues/4)
100 |
101 | ## 0.2.0 (2018-12-28)
102 |
103 | - Initial release
104 |
105 | ## [Deprecated] [akka-http-prometheus](https://github.com/RustedBones/akka-http-prometheus)
106 |
107 | See original's project [changelog](https://github.com/RustedBones/akka-http-prometheus/blob/master/CHANGELOG.md)
--------------------------------------------------------------------------------
/project/Dependencies.scala:
--------------------------------------------------------------------------------
1 | import sbt._
2 |
3 | object Dependencies {
4 |
5 | object Versions {
6 | val Akka = "2.6.20"
7 | val AkkaHttp = "10.2.10"
8 | val Datadog = "4.1.0"
9 | val Dropwizard = "4.2.14"
10 | val DropwizardV5 = "5.0.0"
11 | val Enumeratum = "1.7.0"
12 | val Logback = "1.2.11"
13 | val Prometheus = "0.16.0"
14 | val ScalaCollectionCompat = "2.9.0"
15 | val ScalaLogging = "3.9.4"
16 | val ScalaMock = "5.2.0"
17 | val ScalaTest = "3.2.14"
18 | }
19 |
20 | val AkkaHttp = "com.typesafe.akka" %% "akka-http" % Versions.AkkaHttp
21 | val Datadog = "com.datadoghq" % "java-dogstatsd-client" % Versions.Datadog
22 | val DropwizardCore = "io.dropwizard.metrics" % "metrics-core" % Versions.Dropwizard
23 | val DropwizardJson = "io.dropwizard.metrics" % "metrics-json" % Versions.Dropwizard
24 | val DropwizardV5Core = "io.dropwizard.metrics5" % "metrics-core" % Versions.DropwizardV5
25 | val DropwizardV5Json = "io.dropwizard.metrics5" % "metrics-json" % Versions.DropwizardV5
26 | val Enumeratum = "com.beachape" %% "enumeratum" % Versions.Enumeratum
27 | val PrometheusCommon = "io.prometheus" % "simpleclient_common" % Versions.Prometheus
28 | val ScalaLogging = "com.typesafe.scala-logging" %% "scala-logging" % Versions.ScalaLogging
29 |
30 | object Provided {
31 | val AkkaStream = "com.typesafe.akka" %% "akka-stream" % Versions.Akka % "provided"
32 | }
33 |
34 | object Test {
35 | val AkkaHttpJson = "com.typesafe.akka" %% "akka-http-spray-json" % Versions.AkkaHttp % "it,test"
36 | val AkkaHttpTestkit = "com.typesafe.akka" %% "akka-http-testkit" % Versions.AkkaHttp % "it,test"
37 | val AkkaSlf4j = "com.typesafe.akka" %% "akka-slf4j" % Versions.Akka % "it,test"
38 | val AkkaStreamTestkit = "com.typesafe.akka" %% "akka-stream-testkit" % Versions.Akka % "it,test"
39 | val AkkaTestkit = "com.typesafe.akka" %% "akka-testkit" % Versions.Akka % "it,test"
40 | val DropwizardJvm = "io.dropwizard.metrics" % "metrics-jvm" % Versions.Dropwizard % "it,test"
41 | val DropwizardV5Jvm = "io.dropwizard.metrics5" % "metrics-jvm" % Versions.DropwizardV5 % "it,test"
42 | val Logback = "ch.qos.logback" % "logback-classic" % Versions.Logback % "it,test"
43 | val PrometheusHotspot = "io.prometheus" % "simpleclient_hotspot" % Versions.Prometheus % "it,test"
44 |
45 | val ScalaCollectionCompat =
46 | "org.scala-lang.modules" %% "scala-collection-compat" % Versions.ScalaCollectionCompat % "it,test"
47 | val ScalaMock = "org.scalamock" %% "scalamock" % Versions.ScalaMock % "it,test"
48 | val ScalaTest = "org.scalatest" %% "scalatest" % Versions.ScalaTest % "it,test"
49 | }
50 |
51 | }
52 |
--------------------------------------------------------------------------------
/dropwizard/src/test/scala/fr/davit/akka/http/metrics/dropwizard/marshalling/DropwizardMarshallersSpec.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.dropwizard.marshalling
18 |
19 | import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport
20 | import akka.http.scaladsl.testkit.ScalatestRouteTest
21 | import fr.davit.akka.http.metrics.core.Dimension
22 | import fr.davit.akka.http.metrics.core.scaladsl.server.HttpMetricsDirectives._
23 | import fr.davit.akka.http.metrics.dropwizard.DropwizardRegistry
24 | import org.scalatest.BeforeAndAfterAll
25 | import org.scalatest.flatspec.AnyFlatSpec
26 | import org.scalatest.matchers.should.Matchers
27 | import spray.json.{DefaultJsonProtocol, JsValue}
28 |
29 | import scala.concurrent.duration._
30 |
31 | class DropwizardMarshallersSpec extends AnyFlatSpec with Matchers with ScalatestRouteTest with BeforeAndAfterAll {
32 |
33 | private case class JsonResponse(metrics: Map[String, JsValue])
34 |
35 | private trait Fixture extends SprayJsonSupport with DefaultJsonProtocol with DropwizardMarshallers {
36 | implicit val metricsFormat = jsonFormat1(JsonResponse)
37 |
38 | val registry = DropwizardRegistry()
39 | registry.underlying.counter("other.metric")
40 | }
41 |
42 | override def afterAll(): Unit = {
43 | cleanUp()
44 | super.afterAll()
45 | }
46 |
47 | "DropwizardMarshallers" should "expose metrics as json format" in new Fixture {
48 | // use metrics so they appear in the report
49 | val dimensions = Seq(Dimension("status", "2xx"))
50 | registry.requests.inc()
51 | registry.requestsActive.inc()
52 | registry.requestsSize.update(10)
53 | registry.responses.inc(dimensions)
54 | registry.responsesErrors.inc()
55 | registry.responsesDuration.observe(1.second, dimensions)
56 | registry.responsesSize.update(10)
57 | registry.connections.inc()
58 | registry.connectionsActive.inc()
59 |
60 | Get() ~> metrics(registry) ~> check {
61 | val json = responseAs[JsonResponse]
62 | // println(json)
63 | json.metrics.keys should contain theSameElementsAs Seq(
64 | "akka.http.requests",
65 | "akka.http.requests.active",
66 | "akka.http.requests.bytes",
67 | "akka.http.responses",
68 | "akka.http.responses.errors",
69 | "akka.http.responses.duration",
70 | "akka.http.responses.bytes",
71 | "akka.http.connections",
72 | "akka.http.connections.active",
73 | "other.metric"
74 | ).toSet
75 | }
76 | }
77 |
78 | }
79 |
--------------------------------------------------------------------------------
/dropwizard-v5/src/test/scala/fr/davit/akka/http/metrics/dropwizard/marshalling/DropwizardMarshallersSpec.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.dropwizard.marshalling
18 |
19 | import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport
20 | import akka.http.scaladsl.testkit.ScalatestRouteTest
21 | import fr.davit.akka.http.metrics.core.Dimension
22 | import fr.davit.akka.http.metrics.core.scaladsl.server.HttpMetricsDirectives._
23 | import fr.davit.akka.http.metrics.dropwizard.DropwizardRegistry
24 | import org.scalatest.BeforeAndAfterAll
25 | import org.scalatest.flatspec.AnyFlatSpec
26 | import org.scalatest.matchers.should.Matchers
27 | import spray.json.{DefaultJsonProtocol, JsValue}
28 |
29 | import scala.concurrent.duration._
30 |
31 | class DropwizardMarshallersSpec extends AnyFlatSpec with Matchers with ScalatestRouteTest with BeforeAndAfterAll {
32 |
33 | private case class JsonResponse(metrics: Map[String, JsValue])
34 |
35 | private trait Fixture extends SprayJsonSupport with DefaultJsonProtocol with DropwizardMarshallers {
36 | implicit val metricsFormat = jsonFormat1(JsonResponse)
37 |
38 | val registry = DropwizardRegistry()
39 | registry.underlying.counter("other.metric")
40 | }
41 |
42 | override def afterAll(): Unit = {
43 | cleanUp()
44 | super.afterAll()
45 | }
46 |
47 | "DropwizardMarshallers" should "expose metrics as json format" in new Fixture {
48 | // use metrics so they appear in the report
49 | val dimensions = Seq(Dimension("status", "2xx"))
50 | registry.requests.inc()
51 | registry.requestsActive.inc()
52 | registry.requestsSize.update(10)
53 | registry.responses.inc(dimensions)
54 | registry.responsesErrors.inc()
55 | registry.responsesDuration.observe(1.second, dimensions)
56 | registry.responsesSize.update(10)
57 | registry.connections.inc()
58 | registry.connectionsActive.inc()
59 |
60 | Get() ~> metrics(registry) ~> check {
61 | val json = responseAs[JsonResponse]
62 | // println(json)
63 | json.metrics.keys should contain theSameElementsAs Seq(
64 | "akka.http.requests",
65 | "akka.http.requests.active",
66 | "akka.http.requests.bytes",
67 | "akka.http.responses{status=2xx}",
68 | "akka.http.responses.errors",
69 | "akka.http.responses.duration{status=2xx}",
70 | "akka.http.responses.bytes",
71 | "akka.http.connections",
72 | "akka.http.connections.active",
73 | "other.metric"
74 | ).toSet
75 | }
76 | }
77 |
78 | }
79 |
--------------------------------------------------------------------------------
/dropwizard-v5/src/main/scala/fr/davit/akka/http/metrics/dropwizard/DropwizardMetrics.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.dropwizard
18 |
19 | import fr.davit.akka.http.metrics.core.{Counter, Dimension, Gauge, Histogram, Timer}
20 | import io.dropwizard.metrics5.{MetricName, MetricRegistry}
21 |
22 | import scala.concurrent.duration.FiniteDuration
23 |
24 | object DropwizardMetrics {
25 |
26 | implicit class RichMetricsName(val metricName: MetricName) extends AnyVal {
27 |
28 | def tagged(dimensions: Seq[Dimension]): MetricName =
29 | metricName.tagged(dimensions.flatMap(d => Seq(d.name, d.label)): _*)
30 |
31 | }
32 | }
33 |
34 | abstract class DropwizardMetrics(namespace: String, name: String) {
35 | protected lazy val metricName: MetricName = MetricName.build(namespace, name)
36 | }
37 |
38 | class DropwizardCounter(namespace: String, name: String)(implicit registry: MetricRegistry)
39 | extends DropwizardMetrics(namespace, name)
40 | with Counter {
41 |
42 | import DropwizardMetrics._
43 |
44 | override def inc(dimensions: Seq[Dimension] = Seq.empty): Unit = {
45 | registry.counter(metricName.tagged(dimensions)).inc()
46 | }
47 | }
48 |
49 | class DropwizardGauge(namespace: String, name: String)(implicit registry: MetricRegistry)
50 | extends DropwizardMetrics(namespace, name)
51 | with Gauge {
52 |
53 | import DropwizardMetrics._
54 |
55 | override def inc(dimensions: Seq[Dimension] = Seq.empty): Unit = {
56 | registry.counter(metricName.tagged(dimensions)).inc()
57 | }
58 |
59 | override def dec(dimensions: Seq[Dimension] = Seq.empty): Unit = {
60 | registry.counter(metricName.tagged(dimensions)).dec()
61 | }
62 | }
63 |
64 | class DropwizardTimer(namespace: String, name: String)(implicit registry: MetricRegistry)
65 | extends DropwizardMetrics(namespace, name)
66 | with Timer {
67 |
68 | import DropwizardMetrics._
69 |
70 | override def observe(duration: FiniteDuration, dimensions: Seq[Dimension] = Seq.empty): Unit = {
71 | registry.timer(metricName.tagged(dimensions)).update(duration.length, duration.unit)
72 | }
73 | }
74 |
75 | class DropwizardHistogram(namespace: String, name: String)(implicit registry: MetricRegistry)
76 | extends DropwizardMetrics(namespace, name)
77 | with Histogram {
78 |
79 | import DropwizardMetrics._
80 |
81 | override def update[T](value: T, dimensions: Seq[Dimension] = Seq.empty)(implicit numeric: Numeric[T]): Unit = {
82 | registry.histogram(metricName.tagged(dimensions)).update(numeric.toLong(value))
83 | }
84 | }
85 |
--------------------------------------------------------------------------------
/graphite/src/main/scala/fr/davit/akka/http/metrics/graphite/CarbonClient.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.graphite
18 |
19 | import java.time.{Clock, Instant}
20 |
21 | import akka.NotUsed
22 | import akka.actor.ActorSystem
23 | import akka.event.Logging
24 | import akka.stream.scaladsl.{Flow, Keep, RestartFlow, Sink, Source, Tcp}
25 | import akka.stream.{OverflowStrategy, QueueOfferResult, RestartSettings}
26 | import akka.util.ByteString
27 | import fr.davit.akka.http.metrics.core.Dimension
28 |
29 | import scala.concurrent.Await
30 | import scala.concurrent.duration.{Duration, _}
31 |
32 | object CarbonClient {
33 |
34 | def apply(host: String, port: Int)(implicit system: ActorSystem): CarbonClient = new CarbonClient(host, port)
35 | }
36 |
37 | class CarbonClient(host: String, port: Int)(implicit system: ActorSystem) extends AutoCloseable {
38 |
39 | private val logger = Logging(system.eventStream, classOf[CarbonClient])
40 | protected val clock: Clock = Clock.systemUTC()
41 |
42 | private def serialize[T](name: String, value: T, dimensions: Seq[Dimension], ts: Instant): ByteString = {
43 | val tags = dimensions.map(d => d.name + "=" + d.label).toList
44 | val taggedMetric = (name :: tags).mkString(";")
45 | ByteString(s"$taggedMetric $value ${ts.getEpochSecond}\n")
46 | }
47 |
48 | private def connection: Flow[ByteString, ByteString, NotUsed] = {
49 | // TODO read from config
50 | val settings = RestartSettings(
51 | minBackoff = 3.seconds,
52 | maxBackoff = 30.seconds,
53 | randomFactor = 0.2 // adds 20% "noise" to vary the intervals slightly
54 | )
55 | RestartFlow.withBackoff(settings)(() => Tcp().outgoingConnection(host, port))
56 | }
57 |
58 | private val queue = Source
59 | .queue[ByteString](19, OverflowStrategy.dropHead)
60 | .via(connection)
61 | .toMat(Sink.ignore)(Keep.left)
62 | .run()
63 |
64 | def publish[T](
65 | name: String,
66 | value: T,
67 | dimensions: Seq[Dimension] = Seq.empty,
68 | ts: Instant = Instant
69 | .now(clock)
70 | ): Unit = {
71 | // it's reasonable to block until the message in enqueued
72 | Await.result(queue.offer(serialize(name, value, dimensions, ts)), Duration.Inf) match {
73 | case QueueOfferResult.Enqueued => logger.debug("Metric {} enqueued", name)
74 | case QueueOfferResult.Dropped => logger.debug("Metric {} dropped", name)
75 | case QueueOfferResult.Failure(e) => logger.error(e, s"Failed publishing metric $name")
76 | case QueueOfferResult.QueueClosed => throw new Exception("Failed publishing metric to closed carbon client")
77 | }
78 | }
79 |
80 | override def close(): Unit = {
81 | queue.complete()
82 | Await.result(queue.watchCompletion(), Duration.Inf)
83 | }
84 | }
85 |
--------------------------------------------------------------------------------
/core/src/main/scala/fr/davit/akka/http/metrics/core/scaladsl/server/HttpMetricsDirectives.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.core.scaladsl.server
18 |
19 | import akka.http.scaladsl.marshalling.ToEntityMarshaller
20 | import akka.http.scaladsl.server.Directives._
21 | import akka.http.scaladsl.server.PathMatcher.{Matched, Unmatched}
22 | import akka.http.scaladsl.server.directives.BasicDirectives.{mapRequestContext, tprovide}
23 | import akka.http.scaladsl.server.directives.RouteDirectives.reject
24 | import akka.http.scaladsl.server.util.Tuple
25 | import akka.http.scaladsl.server.{Directive, Directive0, PathMatcher, StandardRoute}
26 | import fr.davit.akka.http.metrics.core.{AttributeLabeler, HttpMetricsRegistry, PathLabeler}
27 |
28 | trait HttpMetricsDirectives {
29 |
30 | def metrics[T <: HttpMetricsRegistry: ToEntityMarshaller](registry: T): StandardRoute = complete(registry)
31 |
32 | def metricsLabeled(labeler: AttributeLabeler, label: String): Directive0 =
33 | mapResponse(_.addAttribute(labeler.key, label))
34 |
35 | /////////////////////////////////////////////////////////////////////////////
36 | // path
37 | /////////////////////////////////////////////////////////////////////////////
38 | def pathLabeled[L](pm: PathMatcher[L]): Directive[L] =
39 | pathPrefixLabeled(pm ~ PathEnd)
40 |
41 | def pathLabeled[L](pm: PathMatcher[L], label: String): Directive[L] =
42 | pathPrefixLabeled(pm ~ PathEnd, label)
43 |
44 | def pathPrefixLabeled[L](pm: PathMatcher[L]): Directive[L] =
45 | rawPathPrefixLabeled(Slash ~ pm)
46 |
47 | def pathPrefixLabeled[L](pm: PathMatcher[L], label: String): Directive[L] =
48 | rawPathPrefixLabeled(Slash ~ pm, label)
49 |
50 | def rawPathPrefixLabeled[L](pm: PathMatcher[L]): Directive[L] =
51 | rawPathPrefixLabeled(pm, None)
52 |
53 | def rawPathPrefixLabeled[L](pm: PathMatcher[L], label: String): Directive[L] =
54 | rawPathPrefixLabeled(pm, Some(label))
55 |
56 | private def rawPathPrefixLabeled[L](pm: PathMatcher[L], label: Option[String]): Directive[L] = {
57 | implicit val LIsTuple: Tuple[L] = pm.ev
58 | extractRequestContext.flatMap { ctx =>
59 | val pathCandidate = ctx.unmatchedPath.toString
60 | pm(ctx.unmatchedPath) match {
61 | case Matched(rest, values) =>
62 | tprovide(values) & mapRequestContext(_ withUnmatchedPath rest) & mapResponse { response =>
63 | val suffix = response.attribute(PathLabeler.key).getOrElse("")
64 | val pathLabel = label match {
65 | case Some(l) => "/" + l + suffix // pm matches additional slash prefix
66 | case None => pathCandidate.substring(0, pathCandidate.length - rest.charCount) + suffix
67 | }
68 | response.addAttribute(PathLabeler.key, pathLabel)
69 | }
70 | case Unmatched =>
71 | reject
72 | }
73 | }
74 | }
75 | }
76 |
77 | object HttpMetricsDirectives extends HttpMetricsDirectives
78 |
--------------------------------------------------------------------------------
/core/src/test/scala/fr/davit/akka/http/metrics/core/scaladsl/server/HttpMetricsDirectivesSpec.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.core.scaladsl.server
18 |
19 | import akka.http.scaladsl.marshalling.PredefinedToEntityMarshallers._
20 | import akka.http.scaladsl.model.StatusCodes
21 | import akka.http.scaladsl.server.Directives._
22 | import akka.http.scaladsl.testkit.ScalatestRouteTest
23 | import fr.davit.akka.http.metrics.core.{AttributeLabeler, PathLabeler, TestRegistry}
24 | import org.scalatest.flatspec.AnyFlatSpec
25 | import org.scalatest.matchers.should.Matchers
26 |
27 | class HttpMetricsDirectivesSpec extends AnyFlatSpec with Matchers with ScalatestRouteTest {
28 |
29 | import HttpMetricsDirectives._
30 |
31 | "HttpMetricsDirectives" should "expose the registry" in {
32 | implicit val marshaller = StringMarshaller.compose[TestRegistry](r => s"active: ${r.requestsActive.value()}")
33 | val registry = new TestRegistry()
34 | registry.requestsActive.inc()
35 |
36 | val route = path("metrics") {
37 | metrics(registry)
38 | }
39 |
40 | Get("/metrics") ~> route ~> check {
41 | responseAs[String] shouldBe "active: 1"
42 | }
43 | }
44 |
45 | it should "put label on custom dimension" in {
46 | object CustomLabeler extends AttributeLabeler {
47 | def name = "dim"
48 | }
49 | val route = metricsLabeled(CustomLabeler, "label") {
50 | complete(StatusCodes.OK)
51 | }
52 |
53 | Get() ~> route ~> check {
54 | response.attribute(CustomLabeler.key) shouldBe Some("label")
55 | }
56 | }
57 |
58 | it should "put label on path" in {
59 | val route = pathPrefixLabeled("api") {
60 | pathPrefix("user" / LongNumber) { _ =>
61 | path("address") {
62 | complete(StatusCodes.OK)
63 | }
64 | }
65 | }
66 |
67 | Get("/api/user/1234/address") ~> route ~> check {
68 | response.attribute(PathLabeler.key) shouldBe Some("/api")
69 | }
70 | }
71 |
72 | it should "combine labelled segments" in {
73 | val route = pathPrefixLabeled("api") {
74 | pathPrefixLabeled("user" / LongNumber, "user/:userId") { _ =>
75 | pathLabeled("address") {
76 | complete(StatusCodes.OK)
77 | }
78 | }
79 | }
80 |
81 | Get("/api/user/1234/address") ~> route ~> check {
82 | response.attribute(PathLabeler.key) shouldBe Some("/api/user/:userId/address")
83 | }
84 | }
85 |
86 | it should "not add extra attribute when label directives are not used" in {
87 | val route = pathPrefix("api") {
88 | pathPrefix("user" / LongNumber) { _ =>
89 | path("address") {
90 | complete(StatusCodes.OK)
91 | }
92 | }
93 | }
94 |
95 | Get("/api/user/1234/address") ~> route ~> check {
96 | response.attribute(PathLabeler.key) shouldBe empty
97 | }
98 | }
99 | }
100 |
--------------------------------------------------------------------------------
/core/src/main/scala/fr/davit/akka/http/metrics/core/HttpMetricsNames.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.core
18 |
19 | trait HttpMetricsNames {
20 | def requests: String
21 | def requestsActive: String
22 | def requestsFailures: String
23 | def requestsSize: String
24 | def responses: String
25 | def responsesErrors: String
26 | def responsesDuration: String
27 | def responsesSize: String
28 | def connections: String
29 | def connectionsActive: String
30 |
31 | def withRequests(name: String): HttpMetricsNames
32 | def withRequestsActive(name: String): HttpMetricsNames
33 | def withRequestsFailures(name: String): HttpMetricsNames
34 | def withRequestSize(name: String): HttpMetricsNames
35 | def withResponses(name: String): HttpMetricsNames
36 | def withResponsesErrors(name: String): HttpMetricsNames
37 | def withResponsesDuration(name: String): HttpMetricsNames
38 | def withResponseSize(name: String): HttpMetricsNames
39 | def withConnections(name: String): HttpMetricsNames
40 | def withConnectionsActive(name: String): HttpMetricsNames
41 |
42 | }
43 |
44 | object HttpMetricsNames {
45 |
46 | def apply(
47 | requests: String,
48 | requestsActive: String,
49 | requestsFailures: String,
50 | requestsSize: String,
51 | responses: String,
52 | responsesErrors: String,
53 | responsesDuration: String,
54 | responsesSize: String,
55 | connections: String,
56 | connectionsActive: String
57 | ): HttpMetricsNames = HttpMetricsNamesImpl(
58 | requests,
59 | requestsActive,
60 | requestsFailures,
61 | requestsSize,
62 | responses,
63 | responsesErrors,
64 | responsesDuration,
65 | responsesSize,
66 | connections,
67 | connectionsActive
68 | )
69 |
70 | private[metrics] case class HttpMetricsNamesImpl(
71 | requests: String,
72 | requestsActive: String,
73 | requestsFailures: String,
74 | requestsSize: String,
75 | responses: String,
76 | responsesErrors: String,
77 | responsesDuration: String,
78 | responsesSize: String,
79 | connections: String,
80 | connectionsActive: String
81 | ) extends HttpMetricsNames {
82 | def withRequests(name: String): HttpMetricsNamesImpl = copy(requests = name)
83 | def withRequestsActive(name: String): HttpMetricsNamesImpl = copy(requestsActive = name)
84 | def withRequestsFailures(name: String): HttpMetricsNames = copy(requestsFailures = name)
85 | def withRequestSize(name: String): HttpMetricsNamesImpl = copy(requestsSize = name)
86 | def withResponses(name: String): HttpMetricsNamesImpl = copy(responses = name)
87 | def withResponsesErrors(name: String): HttpMetricsNamesImpl = copy(responsesErrors = name)
88 | def withResponsesDuration(name: String): HttpMetricsNamesImpl = copy(responsesDuration = name)
89 | def withResponseSize(name: String): HttpMetricsNamesImpl = copy(responsesSize = name)
90 | def withConnections(name: String): HttpMetricsNamesImpl = copy(connections = name)
91 | def withConnectionsActive(name: String): HttpMetricsNamesImpl = copy(connectionsActive = name)
92 | }
93 |
94 | }
95 |
--------------------------------------------------------------------------------
/dropwizard/src/it/scala/fr/davit/akka/http/metrics/dropwizard/DropwizardMetricsItSpec.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.dropwizard
18 |
19 | import java.util.concurrent.TimeUnit
20 | import akka.actor.ActorSystem
21 | import akka.http.scaladsl.Http
22 | import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport
23 | import akka.http.scaladsl.model.{HttpRequest, StatusCodes, Uri}
24 | import akka.http.scaladsl.server.Directives._
25 | import akka.http.scaladsl.server.Route
26 | import akka.http.scaladsl.unmarshalling.Unmarshal
27 | import akka.testkit.TestKit
28 | import fr.davit.akka.http.metrics.core.HttpMetrics._
29 | import fr.davit.akka.http.metrics.core.scaladsl.server.HttpMetricsDirectives._
30 | import fr.davit.akka.http.metrics.dropwizard.marshalling.DropwizardMarshallers._
31 | import com.codahale.metrics.MetricRegistry
32 | import com.codahale.metrics.jvm.{CachedThreadStatesGaugeSet, GarbageCollectorMetricSet, MemoryUsageGaugeSet}
33 | import org.scalatest.BeforeAndAfterAll
34 | import org.scalatest.concurrent.ScalaFutures
35 | import org.scalatest.flatspec.AnyFlatSpecLike
36 | import org.scalatest.matchers.should.Matchers
37 | import org.scalatest.time.{Millis, Seconds, Span}
38 | import spray.json.{DefaultJsonProtocol, JsValue}
39 |
40 | import scala.concurrent.duration._
41 |
42 | class DropwizardMetricsItSpec
43 | extends TestKit(ActorSystem("DropwizardMetricsItSpec"))
44 | with AnyFlatSpecLike
45 | with Matchers
46 | with ScalaFutures
47 | with BeforeAndAfterAll
48 | with SprayJsonSupport
49 | with DefaultJsonProtocol {
50 |
51 | implicit val defaultPatience = PatienceConfig(timeout = Span(10, Seconds), interval = Span(500, Millis))
52 |
53 | private case class JsonResponse(metrics: Map[String, JsValue])
54 | implicit private val metricsFormat = jsonFormat1(JsonResponse)
55 |
56 | override def afterAll(): Unit = {
57 | Http().shutdownAllConnectionPools()
58 | TestKit.shutdownActorSystem(system)
59 | }
60 |
61 | "DropwizardMetrics" should "expose external metrics" in {
62 | val settings = DropwizardSettings.default
63 | val dropwizard: MetricRegistry = new MetricRegistry()
64 | dropwizard.register("jvm.gc", new GarbageCollectorMetricSet())
65 | dropwizard.register("jvm.threads", new CachedThreadStatesGaugeSet(10, TimeUnit.SECONDS))
66 | dropwizard.register("jvm.memory", new MemoryUsageGaugeSet())
67 |
68 | val registry = DropwizardRegistry(dropwizard, settings)
69 |
70 | val route: Route = (get & path("metrics"))(metrics(registry))
71 |
72 | val binding = Http()
73 | .newMeteredServerAt("localhost", 0, registry)
74 | .bindFlow(route)
75 | .futureValue
76 |
77 | val uri = Uri("/metrics")
78 | .withScheme("http")
79 | .withAuthority(binding.localAddress.getHostString, binding.localAddress.getPort)
80 | val request = HttpRequest().withUri(uri)
81 |
82 | val response = Http()
83 | .singleRequest(request)
84 | .futureValue
85 |
86 | response.status shouldBe StatusCodes.OK
87 | val body = Unmarshal(response).to[JsonResponse].futureValue
88 |
89 | body.metrics.keys.filter(_.startsWith("jvm.gc")) should not be empty
90 | body.metrics.keys.filter(_.startsWith("jvm.memory")) should not be empty
91 | body.metrics.keys.filter(_.startsWith("jvm.threads")) should not be empty
92 |
93 | binding.terminate(30.seconds).futureValue
94 | }
95 | }
96 |
--------------------------------------------------------------------------------
/dropwizard-v5/src/it/scala/fr/davit/akka/http/metrics/dropwizard/DropwizardMetricsItSpec.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.dropwizard
18 |
19 | import java.util.concurrent.TimeUnit
20 | import akka.actor.ActorSystem
21 | import akka.http.scaladsl.Http
22 | import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport
23 | import akka.http.scaladsl.model.{HttpRequest, StatusCodes, Uri}
24 | import akka.http.scaladsl.server.Directives._
25 | import akka.http.scaladsl.server.Route
26 | import akka.http.scaladsl.unmarshalling.Unmarshal
27 | import akka.testkit.TestKit
28 | import fr.davit.akka.http.metrics.core.HttpMetrics._
29 | import fr.davit.akka.http.metrics.core.scaladsl.server.HttpMetricsDirectives._
30 | import fr.davit.akka.http.metrics.dropwizard.marshalling.DropwizardMarshallers._
31 | import io.dropwizard.metrics5.MetricRegistry
32 | import io.dropwizard.metrics5.jvm.{CachedThreadStatesGaugeSet, GarbageCollectorMetricSet, MemoryUsageGaugeSet}
33 | import org.scalatest.BeforeAndAfterAll
34 | import org.scalatest.concurrent.ScalaFutures
35 | import org.scalatest.flatspec.AnyFlatSpecLike
36 | import org.scalatest.matchers.should.Matchers
37 | import org.scalatest.time.{Millis, Seconds, Span}
38 | import spray.json.{DefaultJsonProtocol, JsValue}
39 |
40 | import scala.concurrent.duration._
41 |
42 | class DropwizardMetricsItSpec
43 | extends TestKit(ActorSystem("DropwizardMetricsItSpec"))
44 | with AnyFlatSpecLike
45 | with Matchers
46 | with ScalaFutures
47 | with BeforeAndAfterAll
48 | with SprayJsonSupport
49 | with DefaultJsonProtocol {
50 |
51 | implicit val defaultPatience = PatienceConfig(timeout = Span(10, Seconds), interval = Span(500, Millis))
52 |
53 | private case class JsonResponse(metrics: Map[String, JsValue])
54 | implicit private val metricsFormat = jsonFormat1(JsonResponse)
55 |
56 | override def afterAll(): Unit = {
57 | Http().shutdownAllConnectionPools()
58 | TestKit.shutdownActorSystem(system)
59 | }
60 |
61 | "DropwizardMetrics" should "expose external metrics" in {
62 | val settings = DropwizardSettings.default
63 | val dropwizard: MetricRegistry = new MetricRegistry()
64 | dropwizard.register("jvm.gc", new GarbageCollectorMetricSet())
65 | dropwizard.register("jvm.threads", new CachedThreadStatesGaugeSet(10, TimeUnit.SECONDS))
66 | dropwizard.register("jvm.memory", new MemoryUsageGaugeSet())
67 |
68 | val registry = DropwizardRegistry(dropwizard, settings)
69 |
70 | val route: Route = (get & path("metrics"))(metrics(registry))
71 |
72 | val binding = Http()
73 | .newMeteredServerAt("localhost", 0, registry)
74 | .bindFlow(route)
75 | .futureValue
76 |
77 | val uri = Uri("/metrics")
78 | .withScheme("http")
79 | .withAuthority(binding.localAddress.getHostString, binding.localAddress.getPort)
80 | val request = HttpRequest().withUri(uri)
81 |
82 | val response = Http()
83 | .singleRequest(request)
84 | .futureValue
85 |
86 | response.status shouldBe StatusCodes.OK
87 | val body = Unmarshal(response).to[JsonResponse].futureValue
88 |
89 | body.metrics.keys.filter(_.startsWith("jvm.gc")) should not be empty
90 | body.metrics.keys.filter(_.startsWith("jvm.memory")) should not be empty
91 | body.metrics.keys.filter(_.startsWith("jvm.threads")) should not be empty
92 |
93 | binding.terminate(30.seconds).futureValue
94 | }
95 | }
96 |
--------------------------------------------------------------------------------
/dropwizard/src/test/scala/fr/davit/akka/http/metrics/dropwizard/DropwizardRegistrySpec.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.dropwizard
18 |
19 | import fr.davit.akka.http.metrics.core.Dimension
20 | import org.scalatest.flatspec.AnyFlatSpec
21 | import org.scalatest.matchers.should.Matchers
22 |
23 | import scala.concurrent.duration._
24 | import scala.jdk.CollectionConverters._
25 |
26 | class DropwizardRegistrySpec extends AnyFlatSpec with Matchers {
27 |
28 | val dimensions = Seq(Dimension("status", "2xx"), Dimension("path", "/api"))
29 |
30 | trait Fixture {
31 | val registry = DropwizardRegistry()
32 |
33 | def underlyingCounter(name: String): Long = {
34 | registry.underlying.getCounters.asScala(name).getCount
35 | }
36 |
37 | def underlyingHistogram(name: String): Long = {
38 | registry.underlying.getHistograms.asScala(name).getSnapshot.getValues.sum
39 | }
40 |
41 | def underlyingTimer(name: String): Long = {
42 | registry.underlying.getTimers.asScala(name).getSnapshot.getValues.sum
43 | }
44 |
45 | }
46 |
47 | "DropwizardRegistry" should "set requestsActive metrics in the underlying registry" in new Fixture {
48 | registry.requestsActive.inc()
49 | underlyingCounter("akka.http.requests.active") shouldBe 1L
50 | }
51 |
52 | it should "set requests metrics in the underlying registry" in new Fixture {
53 | registry.requests.inc()
54 | underlyingCounter("akka.http.requests") shouldBe 1L
55 | }
56 |
57 | it should "set requestsSize metrics in the underlying registry" in new Fixture {
58 | registry.requestsSize.update(3)
59 | underlyingHistogram("akka.http.requests.bytes") shouldBe 3L
60 | }
61 |
62 | it should "set responses metrics in the underlying registry" in new Fixture {
63 | registry.responses.inc()
64 | underlyingCounter("akka.http.responses") shouldBe 1L
65 |
66 | registry.responses.inc(dimensions)
67 | underlyingCounter("akka.http.responses") shouldBe 2L
68 | }
69 |
70 | it should "set responsesErrors metrics in the underlying registry" in new Fixture {
71 | registry.responsesErrors.inc()
72 | underlyingCounter("akka.http.responses.errors") shouldBe 1L
73 |
74 | registry.responsesErrors.inc(dimensions)
75 | underlyingCounter("akka.http.responses.errors") shouldBe 2L
76 | }
77 |
78 | it should "set responsesDuration metrics in the underlying registry" in new Fixture {
79 | registry.responsesDuration.observe(3.seconds)
80 | underlyingTimer("akka.http.responses.duration") shouldBe 3000000000L
81 |
82 | registry.responsesDuration.observe(3.seconds, dimensions)
83 | underlyingTimer("akka.http.responses.duration") shouldBe 6000000000L
84 | }
85 |
86 | it should "set responsesSize metrics in the underlying registry" in new Fixture {
87 | registry.responsesSize.update(3)
88 | underlyingHistogram("akka.http.responses.bytes") shouldBe 3L
89 |
90 | registry.responsesSize.update(3, dimensions)
91 | underlyingHistogram("akka.http.responses.bytes") shouldBe 6L
92 | }
93 |
94 | it should "set connectionsActive metrics in the underlying registry" in new Fixture {
95 | registry.connectionsActive.inc()
96 | underlyingCounter("akka.http.connections.active") shouldBe 1L
97 | }
98 |
99 | it should "set connections metrics in the underlying registry" in new Fixture {
100 | registry.connections.inc()
101 | underlyingCounter("akka.http.connections") shouldBe 1L
102 | }
103 | }
104 |
--------------------------------------------------------------------------------
/core/src/main/scala/fr/davit/akka/http/metrics/core/MeterStage.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.core
18 |
19 | import akka.http.scaladsl.model.{HttpRequest, HttpResponse}
20 | import akka.stream.stage.{GraphStage, GraphStageLogic, InHandler, OutHandler}
21 | import akka.stream.{Attributes, BidiShape, Inlet, Outlet}
22 |
23 | import scala.collection.mutable
24 |
25 | object MeterStage {
26 | val PrematureCloseException = new IllegalStateException("Stream completed prematurely")
27 | }
28 |
29 | private[metrics] class MeterStage(metricsHandler: HttpMetricsHandler)
30 | extends GraphStage[BidiShape[HttpRequest, HttpRequest, HttpResponse, HttpResponse]] {
31 |
32 | import MeterStage._
33 |
34 | private val requestIn = Inlet[HttpRequest]("MeterStage.requestIn")
35 | private val requestOut = Outlet[HttpRequest]("MeterStage.requestOut")
36 | private val responseIn = Inlet[HttpResponse]("MeterStage.responseIn")
37 | private val responseOut = Outlet[HttpResponse]("MeterStage.responseOut")
38 |
39 | override def initialAttributes = Attributes.name("MeterStage")
40 |
41 | val shape = new BidiShape(requestIn, requestOut, responseIn, responseOut)
42 |
43 | override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) {
44 | // using a FIFO stack is OK because HTTP/1 connection must respect ordering
45 | // see: https://doc.akka.io/docs/akka-http/current/server-side/low-level-api.html#request-response-cycle
46 | // HTTP/2 is defined with function only
47 | private val pending = mutable.Stack[HttpRequest]()
48 | private var failure = Option.empty[Throwable]
49 |
50 | override def preStart(): Unit = {
51 | metricsHandler.onConnection()
52 | }
53 |
54 | override def postStop(): Unit = {
55 | val cause = failure.getOrElse(PrematureCloseException)
56 | pending.foreach(metricsHandler.onFailure(_, cause))
57 | metricsHandler.onDisconnection()
58 | }
59 |
60 | val requestHandler = new InHandler with OutHandler {
61 |
62 | override def onPush(): Unit = {
63 | val request = metricsHandler.onRequest(grab(requestIn))
64 | pending.push(request)
65 | push(requestOut, request)
66 | }
67 | override def onPull(): Unit = pull(requestIn)
68 |
69 | override def onUpstreamFinish(): Unit = complete(requestOut)
70 | override def onUpstreamFailure(ex: Throwable): Unit = fail(requestOut, ex)
71 | override def onDownstreamFinish(cause: Throwable): Unit = cancel(requestIn)
72 | }
73 |
74 | val responseHandler = new InHandler with OutHandler {
75 |
76 | override def onPush(): Unit = {
77 | val response = grab(responseIn)
78 | val request = pending.pop()
79 | push(responseOut, metricsHandler.onResponse(request, response))
80 | }
81 | override def onPull(): Unit = pull(responseIn)
82 |
83 | override def onUpstreamFinish(): Unit = {
84 | complete(responseOut)
85 | }
86 |
87 | override def onUpstreamFailure(ex: Throwable): Unit = {
88 | failure = Some(ex)
89 | fail(responseOut, ex)
90 | }
91 |
92 | override def onDownstreamFinish(cause: Throwable): Unit = {
93 | failure = Some(cause)
94 | cancel(responseIn)
95 | }
96 | }
97 |
98 | setHandlers(requestIn, requestOut, requestHandler)
99 | setHandlers(responseIn, responseOut, responseHandler)
100 | }
101 | }
102 |
--------------------------------------------------------------------------------
/prometheus/src/test/scala/fr/davit/akka/http/metrics/prometheus/marshalling/PrometheusMarshallersSpec.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.prometheus.marshalling
18 |
19 | import akka.http.scaladsl.testkit.ScalatestRouteTest
20 | import fr.davit.akka.http.metrics.core.{Dimension, StatusGroupLabeler}
21 | import fr.davit.akka.http.metrics.core.scaladsl.server.HttpMetricsDirectives.metrics
22 | import fr.davit.akka.http.metrics.prometheus.{PrometheusRegistry, PrometheusSettings}
23 | import io.prometheus.client.CollectorRegistry
24 | import org.scalatest.BeforeAndAfterAll
25 | import org.scalatest.flatspec.AnyFlatSpec
26 | import org.scalatest.matchers.should.Matchers
27 |
28 | import scala.concurrent.duration._
29 |
30 | class PrometheusMarshallersSpec extends AnyFlatSpec with Matchers with ScalatestRouteTest with BeforeAndAfterAll {
31 |
32 | trait Fixture extends PrometheusMarshallers {
33 |
34 | val registry = PrometheusRegistry(
35 | new CollectorRegistry(),
36 | PrometheusSettings.default.withIncludeStatusDimension(true)
37 | )
38 |
39 | io.prometheus.client.Counter
40 | .build("other_metric", "An other metric")
41 | .register(registry.underlying)
42 | }
43 |
44 | override def afterAll(): Unit = {
45 | cleanUp()
46 | super.afterAll()
47 | }
48 |
49 | "PrometheusMarshallers" should "expose metrics as prometheus format" in new Fixture {
50 | // register labeled metrics so they appear at least once
51 | // use metrics so they appear in the report
52 | val dimensions = Seq(Dimension(StatusGroupLabeler.name, "2xx"))
53 | registry.requests.inc()
54 | registry.requestsActive.inc()
55 | registry.requestsSize.update(10)
56 | registry.responses.inc(dimensions)
57 | registry.responsesErrors.inc(dimensions)
58 | registry.responsesDuration.observe(1.second, dimensions)
59 | registry.responsesSize.update(10, dimensions)
60 | registry.connections.inc()
61 | registry.connectionsActive.inc()
62 |
63 | Get() ~> metrics(registry) ~> check {
64 | response.entity.contentType shouldBe PrometheusMarshallers.PrometheusContentType
65 | val text = responseAs[String]
66 | // println(text)
67 | val metrics = text
68 | .split('\n')
69 | .filterNot(_.startsWith("#"))
70 | .map(_.takeWhile(c => c != ' ' && c != '{'))
71 | .distinct
72 | metrics should contain theSameElementsAs Seq(
73 | "akka_http_requests_total",
74 | "akka_http_requests_active",
75 | "akka_http_requests_created",
76 | "akka_http_requests_size_bytes_bucket",
77 | "akka_http_requests_size_bytes_count",
78 | "akka_http_requests_size_bytes_sum",
79 | "akka_http_requests_size_bytes_created",
80 | "akka_http_responses_total",
81 | "akka_http_responses_created",
82 | "akka_http_responses_errors_total",
83 | "akka_http_responses_errors_created",
84 | "akka_http_responses_duration_seconds_bucket",
85 | "akka_http_responses_duration_seconds_count",
86 | "akka_http_responses_duration_seconds_sum",
87 | "akka_http_responses_duration_seconds_created",
88 | "akka_http_responses_size_bytes_bucket",
89 | "akka_http_responses_size_bytes_count",
90 | "akka_http_responses_size_bytes_sum",
91 | "akka_http_responses_size_bytes_created",
92 | "akka_http_connections_total",
93 | "akka_http_connections_active",
94 | "akka_http_connections_created",
95 | "other_metric_total",
96 | "other_metric_created"
97 | )
98 | }
99 | }
100 | }
101 |
--------------------------------------------------------------------------------
/core/src/main/scala/fr/davit/akka/http/metrics/core/HttpMetrics.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.core
18 |
19 | import akka.NotUsed
20 | import akka.actor.ClassicActorSystemProvider
21 | import akka.http.scaladsl.HttpExt
22 | import akka.http.scaladsl.model._
23 | import akka.http.scaladsl.server.{Directives, ExceptionHandler, RejectionHandler, Route}
24 | import akka.http.scaladsl.settings.RoutingSettings
25 | import akka.stream.scaladsl.{BidiFlow, Flow}
26 | import fr.davit.akka.http.metrics.core.scaladsl.HttpMetricsServerBuilder
27 |
28 | import scala.concurrent.{ExecutionContext, Future}
29 | import scala.util.{Failure, Success, Try}
30 |
31 | final class HttpMetrics(private val http: HttpExt) extends AnyVal {
32 |
33 | def newMeteredServerAt(interface: String, port: Int, metricsHandler: HttpMetricsHandler): HttpMetricsServerBuilder =
34 | HttpMetricsServerBuilder(interface, port, metricsHandler, http.system)
35 |
36 | }
37 |
38 | object HttpMetrics {
39 |
40 | implicit def enrichHttp(http: HttpExt): HttpMetrics = new HttpMetrics(http)
41 |
42 | private def markUnhandled(inner: Route): Route = {
43 | Directives.mapResponse(markUnhandled).tapply(_ => inner)
44 | }
45 |
46 | private def markUnhandled(response: HttpResponse): HttpResponse = {
47 | response.addAttribute(PathLabeler.key, "unhandled")
48 | }
49 |
50 | /** This will take precedence over the RouteResult.routeToFlow to seal the route with proper handler for metrics
51 | * labeling
52 | */
53 | def metricsRouteToFlow(
54 | route: Route
55 | )(implicit system: ClassicActorSystemProvider): Flow[HttpRequest, HttpResponse, NotUsed] =
56 | Flow[HttpRequest].mapAsync(1)(metricsRouteToFunction(route))
57 |
58 | /** This will take precedence over the RouteResult.routeToFunction to seal the route with proper handler for metrics
59 | * labeling
60 | */
61 | def metricsRouteToFunction(
62 | route: Route
63 | )(implicit system: ClassicActorSystemProvider): HttpRequest => Future[HttpResponse] = {
64 | val routingSettings = RoutingSettings(system)
65 | val exceptionHandler = ExceptionHandler.default(routingSettings).andThen(markUnhandled _)
66 | val rejectionHandler = RejectionHandler.default.mapRejectionResponse(markUnhandled)
67 |
68 | import akka.http.scaladsl.server.directives.ExecutionDirectives._
69 | Route.toFunction {
70 | (handleExceptions(exceptionHandler) & handleRejections(rejectionHandler)) {
71 | route
72 | }
73 | }
74 | }
75 |
76 | def meterFunction(handler: HttpRequest => Future[HttpResponse], metricsHandler: HttpMetricsHandler)(implicit
77 | executionContext: ExecutionContext
78 | ): HttpRequest => Future[HttpResponse] =
79 | (metricsHandler.onRequest _)
80 | .andThen(r => (r, handler(r)))
81 | .andThen { case (req, resp) =>
82 | resp.transform(
83 | r => metricsHandler.onResponse(req, r),
84 | e => metricsHandler.onFailure(req, e)
85 | )
86 | }
87 |
88 | def meterFunctionSync(
89 | handler: HttpRequest => HttpResponse,
90 | metricsHandler: HttpMetricsHandler
91 | ): HttpRequest => HttpResponse =
92 | (metricsHandler.onRequest _)
93 | .andThen(r => (r, Try(handler(r))))
94 | .andThen {
95 | case (req, Success(resp)) =>
96 | metricsHandler.onResponse(req, resp)
97 | case (req, Failure(e)) =>
98 | metricsHandler.onFailure(req, e)
99 | throw e
100 | }
101 |
102 | def meterFlow(
103 | metricsHandler: HttpMetricsHandler
104 | ): BidiFlow[HttpRequest, HttpRequest, HttpResponse, HttpResponse, NotUsed] = BidiFlow
105 | .fromGraph(new MeterStage(metricsHandler))
106 |
107 | }
108 |
--------------------------------------------------------------------------------
/core/src/main/scala/fr/davit/akka/http/metrics/core/HttpMetricsSettings.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.core
18 |
19 | import akka.http.scaladsl.model.HttpResponse
20 |
21 | import scala.collection.immutable
22 |
23 | trait HttpMetricsSettings {
24 |
25 | /** Metrics namespace */
26 | def namespace: String
27 |
28 | /** Name of the individual metrics */
29 | def metricsNames: HttpMetricsNames
30 |
31 | /** Function that defines if the http response should be counted as an error */
32 | def defineError: HttpResponse => Boolean
33 |
34 | /** Include the method dimension on metrics */
35 | def includeMethodDimension: Boolean
36 |
37 | /** Include the path dimension on metrics */
38 | def includePathDimension: Boolean
39 |
40 | /** Include the status group dimension on metrics */
41 | def includeStatusDimension: Boolean
42 |
43 | /** Static dimensions to be set on all metrics */
44 | def serverDimensions: immutable.Seq[Dimension]
45 |
46 | /** Custom dimensions */
47 | def customDimensions: immutable.Seq[HttpMessageLabeler]
48 |
49 | def withNamespace(namespace: String): HttpMetricsSettings
50 | def withMetricsNames(metricsNames: HttpMetricsNames): HttpMetricsSettings
51 | def withDefineError(fn: HttpResponse => Boolean): HttpMetricsSettings
52 | def withIncludeMethodDimension(include: Boolean): HttpMetricsSettings
53 | def withIncludePathDimension(include: Boolean): HttpMetricsSettings
54 | def withIncludeStatusDimension(include: Boolean): HttpMetricsSettings
55 | def withServerDimensions(dims: Dimension*): HttpMetricsSettings
56 | def withCustomDimensions(labelers: HttpMessageLabeler*): HttpMetricsSettings
57 | }
58 |
59 | object HttpMetricsSettings {
60 |
61 | def apply(
62 | namespace: String,
63 | metricsNames: HttpMetricsNames,
64 | defineError: HttpResponse => Boolean,
65 | includeMethodDimension: Boolean,
66 | includePathDimension: Boolean,
67 | includeStatusDimension: Boolean,
68 | serverDimensions: immutable.Seq[Dimension],
69 | customDimensions: immutable.Seq[HttpMessageLabeler]
70 | ): HttpMetricsSettings = HttpMetricsSettingsImpl(
71 | namespace,
72 | metricsNames,
73 | defineError,
74 | includeMethodDimension,
75 | includePathDimension,
76 | includeStatusDimension,
77 | serverDimensions,
78 | customDimensions
79 | )
80 |
81 | private[metrics] case class HttpMetricsSettingsImpl(
82 | namespace: String,
83 | metricsNames: HttpMetricsNames,
84 | defineError: HttpResponse => Boolean,
85 | includeMethodDimension: Boolean,
86 | includePathDimension: Boolean,
87 | includeStatusDimension: Boolean,
88 | serverDimensions: immutable.Seq[Dimension] = immutable.Seq.empty,
89 | customDimensions: immutable.Seq[HttpMessageLabeler] = immutable.Seq.empty
90 | ) extends HttpMetricsSettings {
91 |
92 | def withNamespace(namespace: String): HttpMetricsSettings = copy(namespace = namespace)
93 | def withMetricsNames(metricsNames: HttpMetricsNames): HttpMetricsSettings = copy(metricsNames = metricsNames)
94 | def withDefineError(fn: HttpResponse => Boolean): HttpMetricsSettings = copy(defineError = fn)
95 | def withIncludeMethodDimension(include: Boolean): HttpMetricsSettings = copy(includeMethodDimension = include)
96 | def withIncludePathDimension(include: Boolean): HttpMetricsSettings = copy(includePathDimension = include)
97 | def withIncludeStatusDimension(include: Boolean): HttpMetricsSettings = copy(includeStatusDimension = include)
98 | def withServerDimensions(dims: Dimension*): HttpMetricsSettings = copy(serverDimensions = dims.toVector)
99 | def withCustomDimensions(dims: HttpMessageLabeler*): HttpMetricsSettings = copy(customDimensions = dims.toVector)
100 |
101 | }
102 | }
103 |
--------------------------------------------------------------------------------
/dropwizard-v5/src/test/scala/fr/davit/akka/http/metrics/dropwizard/DropwizardRegistrySpec.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.dropwizard
18 |
19 | import fr.davit.akka.http.metrics.core.{Dimension, PathLabeler, StatusGroupLabeler}
20 | import io.dropwizard.metrics5.MetricName
21 | import org.scalatest.flatspec.AnyFlatSpec
22 | import org.scalatest.matchers.should.Matchers
23 |
24 | import scala.concurrent.duration._
25 | import scala.jdk.CollectionConverters._
26 |
27 | class DropwizardRegistrySpec extends AnyFlatSpec with Matchers {
28 |
29 | val dimensions = Seq(Dimension(StatusGroupLabeler.name, "2xx"), Dimension(PathLabeler.name, "/api"))
30 |
31 | trait Fixture {
32 | val registry = DropwizardRegistry()
33 |
34 | def underlyingCounter(name: String, dims: Seq[Dimension] = Seq.empty): Long = {
35 | registry.underlying.getCounters.asScala(metricName(name, dims)).getCount
36 | }
37 |
38 | def underlyingHistogram(name: String, dims: Seq[Dimension] = Seq.empty): Long = {
39 | registry.underlying.getHistograms.asScala(metricName(name, dims)).getSum
40 | }
41 |
42 | def underlyingTimer(name: String, dims: Seq[Dimension] = Seq.empty): Long = {
43 | registry.underlying.getTimers.asScala(metricName(name, dims)).getSum
44 | }
45 |
46 | private def metricName(name: String, dims: Seq[Dimension]): MetricName = {
47 | MetricName.build(name).tagged(dims.map(d => d.name -> d.label).toMap.asJava)
48 | }
49 | }
50 |
51 | "DropwizardRegistry" should "set requestsActive metrics in the underlying registry" in new Fixture {
52 | registry.requestsActive.inc()
53 | underlyingCounter("akka.http.requests.active") shouldBe 1L
54 | }
55 |
56 | it should "set requests metrics in the underlying registry" in new Fixture {
57 | registry.requests.inc()
58 | underlyingCounter("akka.http.requests") shouldBe 1L
59 | }
60 |
61 | it should "set requestsSize metrics in the underlying registry" in new Fixture {
62 | registry.requestsSize.update(3)
63 | underlyingHistogram("akka.http.requests.bytes") shouldBe 3L
64 | }
65 |
66 | it should "set responses metrics in the underlying registry" in new Fixture {
67 | registry.responses.inc()
68 | underlyingCounter("akka.http.responses") shouldBe 1L
69 |
70 | registry.responses.inc(dimensions)
71 | underlyingCounter("akka.http.responses", dimensions) shouldBe 1L
72 | }
73 |
74 | it should "set responsesErrors metrics in the underlying registry" in new Fixture {
75 | registry.responsesErrors.inc()
76 | underlyingCounter("akka.http.responses.errors") shouldBe 1L
77 |
78 | registry.responsesErrors.inc(dimensions)
79 | underlyingCounter("akka.http.responses.errors", dimensions) shouldBe 1L
80 | }
81 |
82 | it should "set responsesDuration metrics in the underlying registry" in new Fixture {
83 | registry.responsesDuration.observe(3.seconds)
84 | underlyingTimer("akka.http.responses.duration") shouldBe 3000000000L
85 |
86 | registry.responsesDuration.observe(3.seconds, dimensions)
87 | underlyingTimer("akka.http.responses.duration", dimensions) shouldBe 3000000000L
88 | }
89 |
90 | it should "set responsesSize metrics in the underlying registry" in new Fixture {
91 | registry.responsesSize.update(3)
92 | underlyingHistogram("akka.http.responses.bytes") shouldBe 3L
93 |
94 | registry.responsesSize.update(3, dimensions)
95 | underlyingHistogram("akka.http.responses.bytes", dimensions) shouldBe 3L
96 | }
97 |
98 | it should "set connectionsActive metrics in the underlying registry" in new Fixture {
99 | registry.connectionsActive.inc()
100 | underlyingCounter("akka.http.connections.active") shouldBe 1L
101 | }
102 |
103 | it should "set connections metrics in the underlying registry" in new Fixture {
104 | registry.connections.inc()
105 | underlyingCounter("akka.http.connections") shouldBe 1L
106 | }
107 | }
108 |
--------------------------------------------------------------------------------
/core/src/test/scala/fr/davit/akka/http/metrics/core/TestRegistry.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.core
18 |
19 | import akka.http.scaladsl.marshalling.{Marshaller, ToEntityMarshaller}
20 | import akka.http.scaladsl.model.{HttpEntity, StatusCodes}
21 | import fr.davit.akka.http.metrics.core.HttpMetricsNames.HttpMetricsNamesImpl
22 | import fr.davit.akka.http.metrics.core.HttpMetricsSettings.HttpMetricsSettingsImpl
23 |
24 | import scala.collection.mutable
25 | import scala.concurrent.duration.FiniteDuration
26 |
27 | object TestRegistry {
28 |
29 | val settings: HttpMetricsSettings = HttpMetricsSettingsImpl(
30 | "", // not used
31 | HttpMetricsNamesImpl("", "", "", "", "", "", "", "", "", ""), // not used
32 | _.status.isInstanceOf[StatusCodes.ServerError],
33 | includeMethodDimension = false,
34 | includePathDimension = false,
35 | includeStatusDimension = false
36 | )
37 |
38 | implicit val marshaller: ToEntityMarshaller[TestRegistry] = Marshaller.opaque(_ => HttpEntity.Empty)
39 |
40 | private def keyer(dimensions: Seq[Dimension]): String = dimensions
41 | .map(d => d.name + "=" + d.label)
42 | .mkString(":")
43 |
44 | class TestCounter extends Counter {
45 | protected val acc = mutable.Map[String, Long]()
46 |
47 | override def inc(dimensions: Seq[Dimension] = Seq.empty): Unit = {
48 | val key = keyer(dimensions)
49 | acc.get(key) match {
50 | case Some(v) => acc += (key -> (v + 1))
51 | case None => acc += (key -> 1)
52 | }
53 | }
54 |
55 | def value(dimensions: Seq[Dimension] = Seq.empty): Long = acc.getOrElse(keyer(dimensions), 0)
56 | }
57 |
58 | class TestGauge extends TestCounter with Gauge {
59 |
60 | override def dec(dimensions: Seq[Dimension] = Seq.empty): Unit = {
61 | val key = keyer(dimensions)
62 | acc.get(key) match {
63 | case Some(v) => acc += (key -> (v - 1))
64 | case None => acc += (key -> -1)
65 | }
66 | }
67 | }
68 |
69 | class TestTimer extends Timer {
70 | protected val acc = mutable.Map[String, List[FiniteDuration]]()
71 |
72 | override def observe(duration: FiniteDuration, dimensions: Seq[Dimension] = Seq.empty): Unit = {
73 | val key = keyer(dimensions)
74 | acc.get(key) match {
75 | case Some(vs) => acc += (key -> (duration :: vs))
76 | case None => acc += (key -> (duration :: Nil))
77 | }
78 | }
79 |
80 | def values(dimensions: Seq[Dimension] = Seq.empty): List[FiniteDuration] = acc.getOrElse(keyer(dimensions), Nil)
81 | }
82 |
83 | final class TestHistogram extends Histogram {
84 | protected val acc = mutable.Map[String, List[Long]]()
85 |
86 | override def update[T](value: T, dimensions: Seq[Dimension] = Seq.empty)(implicit numeric: Numeric[T]): Unit = {
87 | val key = keyer(dimensions)
88 | acc.get(key) match {
89 | case Some(vs) => acc += (key -> (numeric.toLong(value) :: vs))
90 | case None => acc += (key -> (numeric.toLong(value) :: Nil))
91 | }
92 | }
93 |
94 | def values(dimensions: Seq[Dimension] = Seq.empty): List[Long] = acc.getOrElse(keyer(dimensions), Nil)
95 | }
96 |
97 | }
98 |
99 | final class TestRegistry(settings: HttpMetricsSettings = TestRegistry.settings) extends HttpMetricsRegistry(settings) {
100 |
101 | import TestRegistry._
102 |
103 | override val requests = new TestCounter
104 | override val requestsActive = new TestGauge
105 | override val requestsFailures = new TestCounter
106 | override val requestsSize = new TestHistogram
107 | override val responses = new TestCounter
108 | override val responsesErrors = new TestCounter
109 | override val responsesDuration = new TestTimer
110 | override val responsesSize = new TestHistogram
111 | override val connections = new TestCounter
112 | override val connectionsActive = new TestGauge
113 | }
114 |
--------------------------------------------------------------------------------
/core/src/main/scala/fr/davit/akka/http/metrics/core/scaladsl/HttpMetricsServerBuilder.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.core.scaladsl
18 |
19 | import akka.actor.ClassicActorSystemProvider
20 | import akka.event.LoggingAdapter
21 | import akka.http.scaladsl.Http.ServerBinding
22 | import akka.http.scaladsl._
23 | import akka.http.scaladsl.model.{HttpRequest, HttpResponse}
24 | import akka.http.scaladsl.server.Route
25 | import akka.http.scaladsl.settings.ServerSettings
26 | import akka.stream.scaladsl.Source
27 | import akka.stream.{Materializer, SystemMaterializer}
28 | import fr.davit.akka.http.metrics.core.{HttpMetrics, HttpMetricsHandler}
29 |
30 | import scala.annotation.nowarn
31 | import scala.concurrent.Future
32 |
33 | /** Metered server builder
34 | *
35 | * Use HttpExt.newMeteredServerAt() to create a builder, use methods to customize settings, and then call one of the
36 | * bind* methods to bind a server.
37 | *
38 | * Does not extend akka.http.scaladsl.ServerBuilder to seal routes internally in order to ensure proper metrics
39 | * instrumentations
40 | */
41 | final case class HttpMetricsServerBuilder(
42 | interface: String,
43 | port: Int,
44 | metricsHandler: HttpMetricsHandler,
45 | context: ConnectionContext,
46 | log: LoggingAdapter,
47 | settings: ServerSettings,
48 | system: ClassicActorSystemProvider,
49 | materializer: Materializer
50 | ) {
51 |
52 | private lazy val http: HttpExt = Http(system)
53 |
54 | def onInterface(newInterface: String): HttpMetricsServerBuilder = copy(interface = newInterface)
55 | def onPort(newPort: Int): HttpMetricsServerBuilder = copy(port = newPort)
56 | def meterTo(metricsHandler: HttpMetricsHandler): HttpMetricsServerBuilder = copy(metricsHandler = metricsHandler)
57 | def logTo(newLog: LoggingAdapter): HttpMetricsServerBuilder = copy(log = newLog)
58 | def withSettings(newSettings: ServerSettings): HttpMetricsServerBuilder = copy(settings = newSettings)
59 | def adaptSettings(f: ServerSettings => ServerSettings): HttpMetricsServerBuilder = copy(settings = f(settings))
60 | def enableHttps(newContext: HttpsConnectionContext): HttpMetricsServerBuilder = copy(context = newContext)
61 | def withMaterializer(newMaterializer: Materializer): HttpMetricsServerBuilder = copy(materializer = newMaterializer)
62 |
63 | @nowarn("msg=deprecated")
64 | def connectionSource(): Source[Http.IncomingConnection, Future[ServerBinding]] =
65 | http
66 | .bind(interface, port, context, settings, log)
67 | .map(c => c.copy(_flow = c._flow.join(HttpMetrics.meterFlow(metricsHandler))))
68 |
69 | @nowarn("msg=deprecated")
70 | def bindFlow(route: Route): Future[ServerBinding] = {
71 | val flow = HttpMetrics.metricsRouteToFlow(route)(system)
72 | val meteredFlow = HttpMetrics.meterFlow(metricsHandler).join(flow)
73 | http.bindAndHandle(
74 | meteredFlow,
75 | interface,
76 | port,
77 | context,
78 | settings,
79 | log
80 | )(materializer)
81 | }
82 |
83 | @nowarn("msg=deprecated")
84 | def bind(route: Route): Future[ServerBinding] = {
85 | val handler = HttpMetrics.metricsRouteToFunction(route)(system)
86 | val meteredHandler = HttpMetrics.meterFunction(handler, metricsHandler)(materializer.executionContext)
87 | http.bindAndHandleAsync(
88 | meteredHandler,
89 | interface,
90 | port,
91 | context,
92 | settings,
93 | parallelism = 0,
94 | log
95 | )(materializer)
96 | }
97 |
98 | @nowarn("msg=deprecated")
99 | def bindSync(handler: HttpRequest => HttpResponse): Future[ServerBinding] = {
100 | val meteredHandler = HttpMetrics.meterFunctionSync(handler, metricsHandler)
101 | http.bindAndHandleSync(
102 | meteredHandler,
103 | interface,
104 | port,
105 | context,
106 | settings,
107 | log
108 | )(materializer)
109 | }
110 | }
111 |
112 | object HttpMetricsServerBuilder {
113 |
114 | def apply(
115 | interface: String,
116 | port: Int,
117 | metricsHandler: HttpMetricsHandler,
118 | system: ClassicActorSystemProvider
119 | ): HttpMetricsServerBuilder =
120 | HttpMetricsServerBuilder(
121 | interface,
122 | port,
123 | metricsHandler,
124 | HttpConnectionContext,
125 | system.classicSystem.log,
126 | ServerSettings(system.classicSystem),
127 | system,
128 | SystemMaterializer(system).materializer
129 | )
130 | }
131 |
--------------------------------------------------------------------------------
/core/src/it/scala/fr/davit/akka/http/metrics/core/HttpMetricsItSpec.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.core
18 |
19 | import akka.actor.ActorSystem
20 | import akka.http.scaladsl.Http
21 | import akka.http.scaladsl.model.ws.{BinaryMessage, Message, TextMessage, WebSocketRequest}
22 | import akka.http.scaladsl.model.{HttpRequest, StatusCodes, Uri}
23 | import akka.http.scaladsl.server.Directives._
24 | import akka.http.scaladsl.server.Route
25 | import akka.http.scaladsl.unmarshalling.Unmarshal
26 | import akka.stream.scaladsl.{Flow, Keep, Sink, Source}
27 | import akka.testkit.TestKit
28 | import fr.davit.akka.http.metrics.core.HttpMetrics._
29 | import org.scalatest.BeforeAndAfterAll
30 | import org.scalatest.concurrent.ScalaFutures
31 | import org.scalatest.flatspec.AnyFlatSpecLike
32 | import org.scalatest.matchers.should.Matchers
33 | import org.scalatest.time.{Millis, Seconds, Span}
34 |
35 | import scala.concurrent.duration._
36 |
37 | class HttpMetricsItSpec
38 | extends TestKit(ActorSystem("HttpMetricsItSpec"))
39 | with AnyFlatSpecLike
40 | with Matchers
41 | with ScalaFutures
42 | with BeforeAndAfterAll {
43 |
44 | implicit val defaultPatience = PatienceConfig(timeout = Span(10, Seconds), interval = Span(500, Millis))
45 |
46 | override def afterAll(): Unit = {
47 | Http().shutdownAllConnectionPools()
48 | TestKit.shutdownActorSystem(system)
49 | }
50 |
51 | trait Fixture {
52 |
53 | val settings: HttpMetricsSettings = TestRegistry.settings
54 | .withNamespace("com.example.service")
55 |
56 | val registry = new TestRegistry(settings)
57 |
58 | val greeter: Flow[Message, Message, Any] =
59 | Flow[Message].mapConcat {
60 | case tm: TextMessage =>
61 | TextMessage(Source.single("Hello ") ++ tm.textStream ++ Source.single("!")) :: Nil
62 | case bm: BinaryMessage =>
63 | // ignore binary messages but drain content to avoid the stream being clogged
64 | bm.dataStream.runWith(Sink.ignore)
65 | Nil
66 | }
67 |
68 | val route: Route = {
69 | pathEndOrSingleSlash {
70 | get {
71 | complete("Hello world")
72 | }
73 | } ~ path("greeter") {
74 | get {
75 | handleWebSocketMessages(greeter)
76 | }
77 | }
78 | }
79 | }
80 |
81 | "HttpMetrics" should "record metrics on flow handler" in new Fixture {
82 |
83 | val binding = Http()
84 | .newMeteredServerAt("localhost", 0, registry)
85 | .bindFlow(route)
86 | .futureValue
87 |
88 | val uri = Uri()
89 | .withScheme("http")
90 | .withAuthority(binding.localAddress.getHostString, binding.localAddress.getPort)
91 | val request = HttpRequest().withUri(uri)
92 |
93 | val response = Http()
94 | .singleRequest(request)
95 | .futureValue
96 |
97 | response.status shouldBe StatusCodes.OK
98 | Unmarshal(response).to[String].futureValue shouldBe "Hello world"
99 | registry.connections.value() shouldBe 1
100 | registry.requests.value() shouldBe 1
101 |
102 | binding.terminate(30.seconds).futureValue
103 | }
104 |
105 | it should "record metrics on function handler" in new Fixture {
106 |
107 | val binding = Http()
108 | .newMeteredServerAt("localhost", 0, registry)
109 | .bind(route)
110 | .futureValue
111 |
112 | val uri = Uri()
113 | .withScheme("http")
114 | .withAuthority(binding.localAddress.getHostString, binding.localAddress.getPort)
115 | val request = HttpRequest().withUri(uri)
116 |
117 | val response = Http()
118 | .singleRequest(request)
119 | .futureValue
120 |
121 | response.status shouldBe StatusCodes.OK
122 | Unmarshal(response).to[String].futureValue shouldBe "Hello world"
123 | registry.connections.value() shouldBe 0 // No connection metrics with function handler
124 | registry.requests.value() shouldBe 1
125 |
126 | binding.terminate(30.seconds).futureValue
127 | }
128 |
129 | it should "support web socket" in new Fixture {
130 |
131 | val binding = Http()
132 | .newMeteredServerAt("localhost", 0, registry)
133 | .bindFlow(route)
134 | .futureValue
135 |
136 | val uri = Uri()
137 | .withScheme("ws")
138 | .withAuthority(binding.localAddress.getHostString, binding.localAddress.getPort)
139 | .withPath(Uri.Path("/greeter"))
140 | val request = WebSocketRequest(uri)
141 | val flow = Flow.fromSinkAndSourceMat(Sink.ignore, Source.single(TextMessage("test")))(Keep.left)
142 |
143 | val (response, _) = Http()
144 | .singleWebSocketRequest(request, flow)
145 |
146 | response.futureValue.response.status shouldBe StatusCodes.SwitchingProtocols
147 | registry.connections.value() shouldBe 1
148 | registry.requests.value() shouldBe 1
149 |
150 | binding.terminate(30.seconds).futureValue
151 | }
152 | }
153 |
--------------------------------------------------------------------------------
/datadog/src/it/scala/fr/davit/akka/http/metrics/datadog/DatadogRegistrySpec.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.datadog
18 |
19 | import akka.actor.ActorSystem
20 | import akka.io.{IO, Udp}
21 | import akka.testkit.{TestKit, TestProbe}
22 | import com.timgroup.statsd.NonBlockingStatsDClientBuilder
23 | import fr.davit.akka.http.metrics.core.{Dimension, PathLabeler, StatusGroupLabeler}
24 | import org.scalatest.BeforeAndAfterAll
25 | import org.scalatest.flatspec.AnyFlatSpecLike
26 | import org.scalatest.matchers.should.Matchers
27 |
28 | import java.net.InetSocketAddress
29 | import scala.concurrent.duration._
30 |
31 | class DatadogRegistrySpec
32 | extends TestKit(ActorSystem("DatadogRegistrySpec"))
33 | with AnyFlatSpecLike
34 | with Matchers
35 | with BeforeAndAfterAll {
36 |
37 | val dimensions = Seq(Dimension(StatusGroupLabeler.name, "2xx"), Dimension(PathLabeler.name, "/api"))
38 |
39 | def withFixture(test: (TestProbe, DatadogRegistry) => Any) = {
40 | val statsd = TestProbe()
41 | statsd.send(IO(Udp), Udp.Bind(statsd.ref, new InetSocketAddress(0)))
42 | val port = statsd.expectMsgType[Udp.Bound].localAddress.getPort
43 | val socket = statsd.sender()
44 | val client = new NonBlockingStatsDClientBuilder()
45 | .hostname("localhost")
46 | .port(port)
47 | .build()
48 | val registry = DatadogRegistry(client)
49 | try {
50 | test(statsd, registry)
51 | } finally {
52 | client.close()
53 | socket ! Udp.Unbind
54 | }
55 | }
56 |
57 | override def afterAll(): Unit = {
58 | shutdown()
59 | super.afterAll()
60 | }
61 |
62 | "DatadogRegistry" should "send requestsActive datagrams to the statsd server" in withFixture { (statsd, registry) =>
63 | registry.requestsActive.inc()
64 | statsd.expectMsgType[Udp.Received].data.utf8String shouldBe "akka.http.requests_active:1|c\n"
65 | }
66 |
67 | it should "send requests datagrams to the statsd server" in withFixture { (statsd, registry) =>
68 | registry.requests.inc()
69 | statsd.expectMsgType[Udp.Received].data.utf8String shouldBe "akka.http.requests_count:1|c\n"
70 | }
71 |
72 | it should "send requestsSize datagrams to the statsd server" in withFixture { (statsd, registry) =>
73 | registry.requestsSize.update(3)
74 | statsd.expectMsgType[Udp.Received].data.utf8String shouldBe "akka.http.requests_bytes:3|d\n"
75 |
76 | registry.requestsSize.update(3, dimensions)
77 | statsd.expectMsgType[Udp.Received].data.utf8String shouldBe "akka.http.requests_bytes:3|d|#path:/api,status:2xx\n"
78 | }
79 |
80 | it should "send responses datagrams to the statsd server" in withFixture { (statsd, registry) =>
81 | registry.responses.inc()
82 | statsd.expectMsgType[Udp.Received].data.utf8String shouldBe "akka.http.responses_count:1|c\n"
83 |
84 | registry.responses.inc(dimensions)
85 | statsd.expectMsgType[Udp.Received].data.utf8String shouldBe "akka.http.responses_count:1|c|#path:/api,status:2xx\n"
86 | }
87 |
88 | it should "send responsesErrors datagrams to the statsd server" in withFixture { (statsd, registry) =>
89 | registry.responsesErrors.inc()
90 | statsd.expectMsgType[Udp.Received].data.utf8String shouldBe "akka.http.responses_errors_count:1|c\n"
91 |
92 | registry.responsesErrors.inc(dimensions)
93 | statsd
94 | .expectMsgType[Udp.Received]
95 | .data
96 | .utf8String shouldBe "akka.http.responses_errors_count:1|c|#path:/api,status:2xx\n"
97 | }
98 |
99 | it should "send responsesDuration datagrams to the statsd server" in withFixture { (statsd, registry) =>
100 | registry.responsesDuration.observe(3.seconds)
101 | statsd.expectMsgType[Udp.Received].data.utf8String shouldBe "akka.http.responses_duration:3000|d\n"
102 |
103 | registry.responsesDuration.observe(3.seconds, dimensions)
104 | statsd
105 | .expectMsgType[Udp.Received]
106 | .data
107 | .utf8String shouldBe "akka.http.responses_duration:3000|d|#path:/api,status:2xx\n"
108 | }
109 |
110 | it should "send responsesSize datagrams to the statsd server" in withFixture { (statsd, registry) =>
111 | registry.responsesSize.update(3)
112 | statsd.expectMsgType[Udp.Received].data.utf8String shouldBe "akka.http.responses_bytes:3|d\n"
113 |
114 | registry.responsesSize.update(3, dimensions)
115 | statsd.expectMsgType[Udp.Received].data.utf8String shouldBe "akka.http.responses_bytes:3|d|#path:/api,status:2xx\n"
116 | }
117 |
118 | it should "send connectionsActive datagrams to the statsd server" in withFixture { (statsd, registry) =>
119 | registry.connectionsActive.inc()
120 | statsd.expectMsgType[Udp.Received].data.utf8String shouldBe "akka.http.connections_active:1|c\n"
121 | }
122 | it should "send connections datagrams to the statsd server" in withFixture { (statsd, registry) =>
123 | registry.connections.inc()
124 | statsd.expectMsgType[Udp.Received].data.utf8String shouldBe "akka.http.connections_count:1|c\n"
125 | }
126 | }
127 |
--------------------------------------------------------------------------------
/prometheus/src/main/scala/fr/davit/akka/http/metrics/prometheus/PrometheusSettings.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.prometheus
18 |
19 | import akka.http.scaladsl.model.{HttpResponse, StatusCodes}
20 | import fr.davit.akka.http.metrics.core.HttpMetricsNames.HttpMetricsNamesImpl
21 | import fr.davit.akka.http.metrics.core.{Dimension, HttpMessageLabeler, HttpMetricsNames, HttpMetricsSettings}
22 | import fr.davit.akka.http.metrics.prometheus.Quantiles.Quantile
23 |
24 | import scala.collection.immutable
25 | import scala.concurrent.duration._
26 |
27 | sealed trait HistogramConfig
28 |
29 | sealed trait TimerConfig
30 |
31 | final case class Quantiles(qs: List[Quantile], maxAge: FiniteDuration = 10.minutes, ageBuckets: Int = 5)
32 | extends HistogramConfig
33 | with TimerConfig
34 |
35 | object Quantiles {
36 |
37 | final case class Quantile(percentile: Double, error: Double = 0.001)
38 |
39 | def apply(percentiles: Double*): Quantiles = {
40 | val quantiles = percentiles.map { p =>
41 | // the higher the percentile, the lowe the error
42 | val error = (1 - p) / 10
43 | Quantile(p, error)
44 | }
45 | Quantiles(quantiles.toList)
46 | }
47 | }
48 |
49 | final case class Buckets(bs: List[Double]) extends HistogramConfig with TimerConfig
50 |
51 | object Buckets {
52 | def apply(b: Double*): Buckets = Buckets(b.toList)
53 | }
54 |
55 | object PrometheusMetricsNames {
56 |
57 | val default: HttpMetricsNames = HttpMetricsNamesImpl(
58 | requests = "requests_total",
59 | requestsActive = "requests_active",
60 | requestsFailures = "requests_failures_total",
61 | requestsSize = "requests_size_bytes",
62 | responses = "responses_total",
63 | responsesErrors = "responses_errors_total",
64 | responsesDuration = "responses_duration_seconds",
65 | responsesSize = "responses_size_bytes",
66 | connections = "connections_total",
67 | connectionsActive = "connections_active"
68 | )
69 | }
70 |
71 | final case class PrometheusSettings(
72 | namespace: String,
73 | metricsNames: HttpMetricsNames,
74 | defineError: HttpResponse => Boolean,
75 | includeMethodDimension: Boolean,
76 | includePathDimension: Boolean,
77 | includeStatusDimension: Boolean,
78 | serverDimensions: immutable.Seq[Dimension] = immutable.Seq.empty,
79 | customDimensions: immutable.Seq[HttpMessageLabeler] = immutable.Seq.empty,
80 | receivedBytesConfig: HistogramConfig,
81 | durationConfig: TimerConfig,
82 | sentBytesConfig: HistogramConfig
83 | ) extends HttpMetricsSettings {
84 |
85 | def withNamespace(namespace: String): PrometheusSettings = copy(namespace = namespace)
86 | def withMetricsNames(metricsNames: HttpMetricsNames): PrometheusSettings = copy(metricsNames = metricsNames)
87 | def withDefineError(fn: HttpResponse => Boolean): PrometheusSettings = copy(defineError = defineError)
88 | def withIncludeMethodDimension(include: Boolean): PrometheusSettings = copy(includeMethodDimension = include)
89 | def withIncludePathDimension(include: Boolean): PrometheusSettings = copy(includePathDimension = include)
90 | def withIncludeStatusDimension(include: Boolean): PrometheusSettings = copy(includeStatusDimension = include)
91 | def withServerDimensions(dims: Dimension*): PrometheusSettings = copy(serverDimensions = dims.toVector)
92 | def withCustomDimensions(dims: HttpMessageLabeler*): PrometheusSettings = copy(customDimensions = dims.toVector)
93 | def withReceivedBytesConfig(config: HistogramConfig): PrometheusSettings = copy(receivedBytesConfig = config)
94 | def withDurationConfig(config: TimerConfig): PrometheusSettings = copy(durationConfig = config)
95 | def withSentBytesConfig(config: HistogramConfig): PrometheusSettings = copy(sentBytesConfig = config)
96 | }
97 |
98 | object PrometheusSettings {
99 |
100 | // generic durations adapted to network durations in seconds
101 | val DurationBuckets: Buckets = {
102 | Buckets(0.005, 0.01, .025, .05, .075, .1, .25, .5, .75, 1, 2.5, 5, 7.5, 10)
103 | }
104 |
105 | // generic buckets adapted to network messages sized
106 | val BytesBuckets: Buckets = {
107 | val buckets = Range(0, 1000, 100) ++ Range(1000, 10000, 1000) ++ Range(10000, 100000, 10000)
108 | Buckets(buckets.map(_.toDouble).toList)
109 | }
110 |
111 | // basic quantiles
112 | val DefaultQuantiles: Quantiles = Quantiles(0.75, 0.95, 0.98, 0.99, 0.999)
113 |
114 | val default: PrometheusSettings = PrometheusSettings(
115 | namespace = "akka_http",
116 | defineError = _.status.isInstanceOf[StatusCodes.ServerError],
117 | includeMethodDimension = false,
118 | includePathDimension = false,
119 | includeStatusDimension = false,
120 | serverDimensions = immutable.Seq.empty[Dimension],
121 | receivedBytesConfig = BytesBuckets,
122 | durationConfig = DurationBuckets,
123 | sentBytesConfig = BytesBuckets,
124 | metricsNames = PrometheusMetricsNames.default
125 | )
126 | }
127 |
--------------------------------------------------------------------------------
/prometheus/src/it/scala/fr/davit/akka/http/metrics/prometheus/PrometheusMetricsItSpec.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.prometheus
18 |
19 | import akka.actor.ActorSystem
20 | import akka.http.scaladsl.Http
21 | import akka.http.scaladsl.model.{HttpRequest, StatusCodes, Uri}
22 | import akka.http.scaladsl.server.Directives._
23 | import akka.http.scaladsl.server.Route
24 | import akka.http.scaladsl.unmarshalling.Unmarshal
25 | import akka.testkit.TestKit
26 | import fr.davit.akka.http.metrics.core.HttpMetrics._
27 | import fr.davit.akka.http.metrics.core.scaladsl.server.HttpMetricsDirectives._
28 | import fr.davit.akka.http.metrics.prometheus.marshalling.PrometheusMarshallers._
29 | import io.prometheus.client.hotspot.DefaultExports
30 | import org.scalatest.BeforeAndAfterAll
31 | import org.scalatest.concurrent.ScalaFutures
32 | import org.scalatest.flatspec.AnyFlatSpecLike
33 | import org.scalatest.matchers.should.Matchers
34 | import org.scalatest.time.{Millis, Seconds, Span}
35 |
36 | import scala.concurrent.duration._
37 |
38 | class PrometheusMetricsItSpec
39 | extends TestKit(ActorSystem("PrometheusMetricsItSpec"))
40 | with AnyFlatSpecLike
41 | with Matchers
42 | with ScalaFutures
43 | with BeforeAndAfterAll {
44 |
45 | implicit val defaultPatience = PatienceConfig(timeout = Span(10, Seconds), interval = Span(500, Millis))
46 |
47 | override def afterAll(): Unit = {
48 | Http().shutdownAllConnectionPools()
49 | TestKit.shutdownActorSystem(system)
50 | }
51 |
52 | "PrometheusMetrics" should "expose external metrics" in {
53 | val settings = PrometheusSettings.default
54 | .withIncludeMethodDimension(true)
55 | .withIncludePathDimension(true)
56 | .withIncludeStatusDimension(true)
57 | DefaultExports.initialize() // JVM
58 | val registry = PrometheusRegistry(settings = settings)
59 |
60 | val route: Route = (get & path("metrics"))(metrics(registry))
61 |
62 | val binding = Http()
63 | .newMeteredServerAt("localhost", 0, registry)
64 | .bindFlow(route)
65 | .futureValue
66 |
67 | val uri = Uri("/metrics")
68 | .withScheme("http")
69 | .withAuthority(binding.localAddress.getHostString, binding.localAddress.getPort)
70 | val request = HttpRequest().withUri(uri)
71 |
72 | val response = Http()
73 | .singleRequest(request)
74 | .futureValue
75 |
76 | response.status shouldBe StatusCodes.OK
77 | val body = Unmarshal(response).to[String].futureValue
78 |
79 | body
80 | .split('\n')
81 | .filter(_.startsWith("# TYPE ")) should contain allElementsOf Seq(
82 | "# TYPE akka_http_connections_active gauge",
83 | "# TYPE akka_http_connections_created gauge",
84 | "# TYPE akka_http_connections_total counter",
85 | "# TYPE akka_http_requests_active gauge",
86 | "# TYPE akka_http_requests_created gauge",
87 | "# TYPE akka_http_requests_size_bytes histogram",
88 | "# TYPE akka_http_requests_size_bytes_created gauge",
89 | "# TYPE akka_http_requests_total counter",
90 | "# TYPE jvm_buffer_pool_capacity_bytes gauge",
91 | "# TYPE jvm_buffer_pool_used_buffers gauge",
92 | "# TYPE jvm_buffer_pool_used_bytes gauge",
93 | "# TYPE jvm_classes_currently_loaded gauge",
94 | "# TYPE jvm_classes_loaded_total counter",
95 | "# TYPE jvm_classes_unloaded_total counter",
96 | "# TYPE jvm_gc_collection_seconds summary",
97 | "# TYPE jvm_info gauge",
98 | "# TYPE jvm_memory_bytes_committed gauge",
99 | "# TYPE jvm_memory_bytes_init gauge",
100 | "# TYPE jvm_memory_bytes_max gauge",
101 | "# TYPE jvm_memory_bytes_used gauge",
102 | "# TYPE jvm_memory_objects_pending_finalization gauge",
103 | "# TYPE jvm_memory_pool_allocated_bytes_total counter",
104 | "# TYPE jvm_memory_pool_bytes_committed gauge",
105 | "# TYPE jvm_memory_pool_bytes_init gauge",
106 | "# TYPE jvm_memory_pool_bytes_max gauge",
107 | "# TYPE jvm_memory_pool_bytes_used gauge",
108 | "# TYPE jvm_memory_pool_collection_committed_bytes gauge",
109 | "# TYPE jvm_memory_pool_collection_init_bytes gauge",
110 | "# TYPE jvm_memory_pool_collection_max_bytes gauge",
111 | "# TYPE jvm_memory_pool_collection_used_bytes gauge",
112 | "# TYPE jvm_threads_current gauge",
113 | "# TYPE jvm_threads_daemon gauge",
114 | "# TYPE jvm_threads_deadlocked gauge",
115 | "# TYPE jvm_threads_deadlocked_monitor gauge",
116 | "# TYPE jvm_threads_peak gauge",
117 | "# TYPE jvm_threads_started_total counter",
118 | "# TYPE jvm_threads_state gauge",
119 | "# TYPE process_cpu_seconds_total counter",
120 | "# TYPE process_max_fds gauge",
121 | "# TYPE process_open_fds gauge",
122 | "# TYPE process_resident_memory_bytes gauge",
123 | "# TYPE process_start_time_seconds gauge",
124 | "# TYPE process_virtual_memory_bytes gauge"
125 | )
126 |
127 | binding.terminate(30.seconds).futureValue
128 | Http()
129 | }
130 | }
131 |
--------------------------------------------------------------------------------
/graphite/src/it/scala/fr/davit/akka/http/metrics/graphite/GraphiteRegistrySpec.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.graphite
18 |
19 | import akka.actor.{ActorRef, ActorSystem}
20 | import akka.io.{IO, Tcp}
21 | import akka.testkit.{TestActor, TestKit, TestProbe}
22 | import fr.davit.akka.http.metrics.core.{Dimension, PathLabeler, StatusGroupLabeler}
23 | import org.scalatest.BeforeAndAfterAll
24 | import org.scalatest.flatspec.AnyFlatSpecLike
25 | import org.scalatest.matchers.should.Matchers
26 |
27 | import java.net.InetSocketAddress
28 | import java.time.{Clock, Instant, ZoneId}
29 | import scala.concurrent.duration._
30 |
31 | class GraphiteRegistrySpec
32 | extends TestKit(ActorSystem("GraphiteRegistrySpec"))
33 | with AnyFlatSpecLike
34 | with Matchers
35 | with BeforeAndAfterAll {
36 |
37 | val dimensions = Seq(Dimension(PathLabeler.name, "/api"), Dimension(StatusGroupLabeler.name, "2xx"))
38 | val timestamp = Instant.ofEpochSecond(1234)
39 |
40 | def withFixture(test: (TestProbe, GraphiteRegistry) => Any) = {
41 | val carbon = TestProbe()
42 | val handler = TestProbe()
43 | carbon.send(IO(Tcp), Tcp.Bind(carbon.ref, new InetSocketAddress(0)))
44 | val port = carbon.expectMsgType[Tcp.Bound].localAddress.getPort
45 | val socket = carbon.sender()
46 | carbon.setAutoPilot((sender: ActorRef, msg: Any) =>
47 | msg match {
48 | case _: Tcp.Connected =>
49 | sender ! Tcp.Register(handler.ref)
50 | TestActor.KeepRunning
51 | case _ =>
52 | throw new Exception(s"Unexpected message $msg")
53 | }
54 | )
55 |
56 | val client = new CarbonClient("localhost", port) {
57 | override val clock: Clock = Clock.fixed(timestamp, ZoneId.systemDefault())
58 | }
59 | val registry = GraphiteRegistry(client)
60 | try {
61 | test(handler, registry)
62 | } finally {
63 | // client.close()
64 | socket ! Tcp.Unbind
65 | }
66 | }
67 |
68 | override def afterAll(): Unit = {
69 | shutdown()
70 | super.afterAll()
71 | }
72 |
73 | "GraphiteRegistry" should "send requestsActive datagrams to the carbon server" in withFixture { (carbon, registry) =>
74 | registry.requestsActive.inc()
75 | carbon.expectMsgType[Tcp.Received].data.utf8String shouldBe "akka.http.requests.active 1 1234\n"
76 | }
77 |
78 | it should "send requests datagrams to the carbon server" in withFixture { (carbon, registry) =>
79 | registry.requests.inc()
80 | carbon.expectMsgType[Tcp.Received].data.utf8String shouldBe "akka.http.requests 1 1234\n"
81 | }
82 |
83 | it should "send requestsSize datagrams to the carbon server" in withFixture { (carbon, registry) =>
84 | registry.requestsSize.update(3)
85 | carbon.expectMsgType[Tcp.Received].data.utf8String shouldBe "akka.http.requests.bytes 3 1234\n"
86 |
87 | registry.requestsSize.update(3, dimensions)
88 | carbon.expectMsgType[Tcp.Received].data.utf8String shouldBe "akka.http.requests.bytes;path=/api;status=2xx 3 1234\n"
89 | }
90 |
91 | it should "send responses datagrams to the carbon server" in withFixture { (carbon, registry) =>
92 | registry.responses.inc()
93 | carbon.expectMsgType[Tcp.Received].data.utf8String shouldBe "akka.http.responses 1 1234\n"
94 |
95 | registry.responses.inc(dimensions)
96 | carbon.expectMsgType[Tcp.Received].data.utf8String shouldBe "akka.http.responses;path=/api;status=2xx 1 1234\n"
97 | }
98 |
99 | it should "send responsesErrors datagrams to the carbon server" in withFixture { (carbon, registry) =>
100 | registry.responsesErrors.inc()
101 | carbon.expectMsgType[Tcp.Received].data.utf8String shouldBe "akka.http.responses.errors 1 1234\n"
102 |
103 | registry.responsesErrors.inc(dimensions)
104 | carbon
105 | .expectMsgType[Tcp.Received]
106 | .data
107 | .utf8String shouldBe "akka.http.responses.errors;path=/api;status=2xx 1 1234\n"
108 | }
109 |
110 | it should "send responsesDuration datagrams to the carbon server" in withFixture { (carbon, registry) =>
111 | registry.responsesDuration.observe(3.seconds)
112 | carbon.expectMsgType[Tcp.Received].data.utf8String shouldBe "akka.http.responses.duration 3000 1234\n"
113 |
114 | registry.responsesDuration.observe(3.seconds, dimensions)
115 | carbon
116 | .expectMsgType[Tcp.Received]
117 | .data
118 | .utf8String shouldBe "akka.http.responses.duration;path=/api;status=2xx 3000 1234\n"
119 | }
120 |
121 | it should "send responsesSize datagrams to the carbon server" in withFixture { (carbon, registry) =>
122 | registry.responsesSize.update(3)
123 | carbon.expectMsgType[Tcp.Received].data.utf8String shouldBe "akka.http.responses.bytes 3 1234\n"
124 |
125 | registry.responsesSize.update(3, dimensions)
126 | carbon
127 | .expectMsgType[Tcp.Received]
128 | .data
129 | .utf8String shouldBe "akka.http.responses.bytes;path=/api;status=2xx 3 1234\n"
130 | }
131 |
132 | it should "send connectionsActive datagrams to the carbon server" in withFixture { (carbon, registry) =>
133 | registry.connectionsActive.inc()
134 | carbon.expectMsgType[Tcp.Received].data.utf8String shouldBe "akka.http.connections.active 1 1234\n"
135 | }
136 | it should "send connections datagrams to the carbon server" in withFixture { (carbon, registry) =>
137 | registry.connections.inc()
138 | carbon.expectMsgType[Tcp.Received].data.utf8String shouldBe "akka.http.connections 1 1234\n"
139 | }
140 |
141 | }
142 |
--------------------------------------------------------------------------------
/core/src/main/scala/fr/davit/akka/http/metrics/core/HttpMetricsRegistry.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.core
18 |
19 | import akka.http.scaladsl.model._
20 | import akka.stream.scaladsl.{Flow, Sink}
21 | import akka.util.ByteString
22 |
23 | import scala.concurrent.duration.Deadline
24 |
25 | object HttpMetricsRegistry {
26 | private[metrics] val TraceTimestampKey: AttributeKey[Deadline] = AttributeKey("trace-time")
27 | }
28 |
29 | abstract class HttpMetricsRegistry(settings: HttpMetricsSettings) extends HttpMetricsHandler {
30 |
31 | import HttpMetricsRegistry._
32 |
33 | private val requestLabelers: Seq[HttpRequestLabeler] = {
34 | val builder = Seq.newBuilder[HttpRequestLabeler]
35 | if (settings.includeMethodDimension) builder += MethodLabeler
36 | builder ++= settings.customDimensions.collect { case l: HttpRequestLabeler => l }
37 | builder.result()
38 | }
39 |
40 | private val responseLabelers: Seq[HttpResponseLabeler] = {
41 | val builder = Seq.newBuilder[HttpResponseLabeler]
42 | if (settings.includeStatusDimension) builder += StatusGroupLabeler
43 | if (settings.includePathDimension) builder += PathLabeler
44 | builder ++= settings.customDimensions.collect { case l: HttpResponseLabeler => l }
45 | builder.result()
46 | }
47 |
48 | private def requestDimensions(request: HttpRequest): Seq[Dimension] = {
49 | requestLabelers.map(_.dimension(request))
50 | }
51 |
52 | private def responseDimensions(response: HttpResponse): Seq[Dimension] = {
53 | responseLabelers.map(_.dimension(response))
54 | }
55 |
56 | override def onRequest(request: HttpRequest): HttpRequest = {
57 | val start = Deadline.now
58 | val dims = settings.serverDimensions ++ requestDimensions(request)
59 | requestsActive.inc(dims)
60 | requests.inc(dims)
61 |
62 | val entity = request.entity match {
63 | case data: HttpEntity.Strict =>
64 | requestsSize.update(data.contentLength, dims)
65 | data
66 | case data: HttpEntity.Default =>
67 | requestsSize.update(data.contentLength, dims)
68 | data
69 | case data: HttpEntity.Chunked =>
70 | val collectSizeSink = Flow[ByteString]
71 | .map(_.length)
72 | .fold(0L)(_ + _)
73 | .to(Sink.foreach(size => requestsSize.update(size, dims)))
74 | data.transformDataBytes(Flow[ByteString].alsoTo(collectSizeSink))
75 | }
76 |
77 | // modify the request
78 | request
79 | .addAttribute(TraceTimestampKey, start)
80 | .withEntity(entity)
81 | }
82 |
83 | override def onResponse(request: HttpRequest, response: HttpResponse): HttpResponse = {
84 | val start = request.attribute(TraceTimestampKey).get
85 | val reqDims = settings.serverDimensions ++ requestDimensions(request)
86 | val respDims = reqDims ++ responseDimensions(response)
87 |
88 | requestsActive.dec(reqDims)
89 | responses.inc(respDims)
90 | if (settings.defineError(response)) responsesErrors.inc(respDims)
91 | response.entity match {
92 | case data: HttpEntity.Strict =>
93 | responsesSize.update(data.contentLength, respDims)
94 | responsesDuration.observe(Deadline.now - start, respDims)
95 | response
96 | case data: HttpEntity.Default =>
97 | responsesSize.update(data.contentLength, respDims)
98 | responsesDuration.observe(Deadline.now - start, respDims)
99 | response
100 | case _: HttpEntity.Chunked | _: HttpEntity.CloseDelimited =>
101 | val collectSizeSink = Flow[ByteString]
102 | .map(_.length)
103 | .fold(0L)(_ + _)
104 | .to(Sink.foreach { size =>
105 | responsesSize.update(size, respDims)
106 | responsesDuration.observe(Deadline.now - start, respDims)
107 | })
108 | response.transformEntityDataBytes(Flow[ByteString].alsoTo(collectSizeSink))
109 | }
110 | }
111 |
112 | override def onFailure(request: HttpRequest, cause: Throwable): Throwable = {
113 | val dims = settings.serverDimensions ++ requestDimensions(request)
114 | requestsActive.dec(dims)
115 | requestsFailures.inc(dims)
116 | cause
117 | }
118 |
119 | override def onConnection(): Unit = {
120 | val dims = settings.serverDimensions
121 | connections.inc(dims)
122 | connectionsActive.inc(dims)
123 | }
124 |
125 | override def onDisconnection(): Unit = {
126 | val dims = settings.serverDimensions
127 | connectionsActive.dec(dims)
128 | }
129 |
130 | // --------------------------------------------------------------------------------------------------------------------
131 | // requests
132 | // --------------------------------------------------------------------------------------------------------------------
133 | def requests: Counter
134 |
135 | def requestsActive: Gauge
136 |
137 | def requestsFailures: Counter
138 |
139 | def requestsSize: Histogram
140 |
141 | // --------------------------------------------------------------------------------------------------------------------
142 | // responses
143 | // --------------------------------------------------------------------------------------------------------------------
144 | def responses: Counter
145 |
146 | def responsesErrors: Counter
147 |
148 | def responsesDuration: Timer
149 |
150 | def responsesSize: Histogram
151 |
152 | // --------------------------------------------------------------------------------------------------------------------
153 | // Connections
154 | // --------------------------------------------------------------------------------------------------------------------
155 | def connections: Counter
156 |
157 | def connectionsActive: Gauge
158 | }
159 |
--------------------------------------------------------------------------------
/core/src/test/scala/fr/davit/akka/http/metrics/core/MeterStageSpec.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.core
18 |
19 | import akka.actor.ActorSystem
20 | import akka.http.scaladsl.model.{HttpRequest, HttpResponse}
21 | import akka.stream.ClosedShape
22 | import akka.stream.scaladsl.{GraphDSL, RunnableGraph}
23 | import akka.stream.testkit.scaladsl.{TestSink, TestSource}
24 | import akka.testkit.TestKit
25 | import org.scalamock.scalatest.MockFactory
26 | import org.scalatest.concurrent.ScalaFutures
27 | import org.scalatest.flatspec.AnyFlatSpecLike
28 | import org.scalatest.matchers.should.Matchers
29 |
30 | class MeterStageSpec
31 | extends TestKit(ActorSystem("MeterStageSpec"))
32 | with AnyFlatSpecLike
33 | with Matchers
34 | with MockFactory
35 | with ScalaFutures {
36 |
37 | val request = HttpRequest()
38 | val response = HttpResponse()
39 |
40 | trait Fixture {
41 | val handler = mock[HttpMetricsHandler]
42 |
43 | (handler.onConnection _)
44 | .expects()
45 | .returns((): Unit)
46 |
47 | val (requestIn, requestOut, responseIn, responseOut) = RunnableGraph
48 | .fromGraph(
49 | GraphDSL.createGraph(
50 | TestSource.probe[HttpRequest],
51 | TestSink.probe[HttpRequest],
52 | TestSource.probe[HttpResponse],
53 | TestSink.probe[HttpResponse]
54 | )((_, _, _, _)) { implicit builder => (reqIn, reqOut, respIn, respOut) =>
55 | import GraphDSL.Implicits._
56 | val meter = builder.add(new MeterStage(handler))
57 |
58 | reqIn ~> meter.in1
59 | meter.out1 ~> reqOut
60 | respIn ~> meter.in2
61 | meter.out2 ~> respOut
62 | ClosedShape
63 | }
64 | )
65 | .run()
66 |
67 | // simulate downstream demand
68 | responseOut.request(1)
69 | requestOut.request(1)
70 | }
71 |
72 | "MeterStage" should "call onConnection on materialization and onDisconnection once terminated" in new Fixture {
73 | (handler.onDisconnection _)
74 | .expects()
75 | .returns((): Unit)
76 |
77 | requestIn.sendComplete()
78 | requestOut.expectComplete()
79 |
80 | responseIn.sendComplete()
81 | responseOut.expectComplete()
82 | }
83 |
84 | it should "call onRequest wen request is offered" in new Fixture {
85 | (handler.onRequest _)
86 | .expects(request)
87 | .returns(request)
88 |
89 | requestIn.sendNext(request)
90 | requestOut.expectNext() shouldBe request
91 |
92 | (handler.onResponse _)
93 | .expects(request, response)
94 | .returns(response)
95 |
96 | responseIn.sendNext(response)
97 | responseOut.expectNext() shouldBe response
98 | }
99 |
100 | it should "flush the stream before stopping" in new Fixture {
101 | (handler.onRequest _)
102 | .expects(request)
103 | .returns(request)
104 |
105 | requestIn.sendNext(request)
106 | requestOut.expectNext() shouldBe request
107 |
108 | // close request side
109 | requestIn.sendComplete()
110 | requestOut.expectComplete()
111 |
112 | // response should still be accepted
113 | (handler.onResponse _)
114 | .expects(request, response)
115 | .returns(response)
116 |
117 | responseIn.sendNext(response)
118 | responseOut.expectNext() shouldBe response
119 | }
120 |
121 | it should "propagate error from request in" in new Fixture {
122 | (handler.onRequest _)
123 | .expects(request)
124 | .returns(request)
125 |
126 | requestIn.sendNext(request)
127 | requestOut.expectNext() shouldBe request
128 |
129 | val error = new Exception("BOOM!")
130 | requestIn.sendError(error)
131 | requestOut.expectError(error)
132 | }
133 |
134 | it should "propagate error from request out" in new Fixture {
135 | (handler.onRequest _)
136 | .expects(request)
137 | .returns(request)
138 |
139 | requestIn.sendNext(request)
140 | requestOut.expectNext() shouldBe request
141 |
142 | val error = new Exception("BOOM!")
143 | requestOut.cancel(error)
144 | requestIn.expectCancellation()
145 | }
146 |
147 | it should "terminate and fail pending" in new Fixture {
148 | (handler.onRequest _)
149 | .expects(request)
150 | .returns(request)
151 |
152 | requestIn.sendNext(request)
153 | requestIn.sendComplete()
154 | requestOut.expectNext() shouldBe request
155 | requestOut.expectComplete()
156 |
157 | (handler.onFailure _)
158 | .expects(request, MeterStage.PrematureCloseException)
159 | .returns(MeterStage.PrematureCloseException)
160 |
161 | responseIn.sendComplete()
162 | responseOut.expectComplete()
163 | }
164 |
165 | it should "propagate error from response in and fail pending" in new Fixture {
166 | (handler.onRequest _)
167 | .expects(request)
168 | .returns(request)
169 |
170 | requestIn.sendNext(request)
171 | requestIn.sendComplete()
172 | requestOut.expectNext() shouldBe request
173 | requestOut.expectComplete()
174 |
175 | val error = new Exception("BOOM!")
176 | (handler.onFailure _)
177 | .expects(request, error)
178 | .returns(error)
179 |
180 | responseIn.sendError(error)
181 | responseOut.expectError(error)
182 | }
183 |
184 | it should "propagate error from response out and fail pending" in new Fixture {
185 | (handler.onRequest _)
186 | .expects(request)
187 | .returns(request)
188 |
189 | requestIn.sendNext(request)
190 | requestIn.sendComplete()
191 | requestOut.expectNext() shouldBe request
192 | requestOut.expectComplete()
193 |
194 | val error = new Exception("BOOM!")
195 | (handler.onFailure _)
196 | .expects(request, error)
197 | .returns(error)
198 |
199 | responseOut.cancel(error)
200 | responseIn.expectCancellation()
201 | }
202 | }
203 |
--------------------------------------------------------------------------------
/core/src/test/scala/fr/davit/akka/http/metrics/core/HttpMetricsSpec.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.core
18 |
19 | import akka.actor.ActorSystem
20 | import akka.http.scaladsl.marshalling.Marshal
21 | import akka.http.scaladsl.model.{HttpRequest, HttpResponse, StatusCodes}
22 | import akka.http.scaladsl.server.Directives._
23 | import akka.http.scaladsl.server.{RequestContext, RouteResult}
24 | import akka.stream.scaladsl.Keep
25 | import akka.stream.testkit.scaladsl.{TestSink, TestSource}
26 | import akka.testkit.TestKit
27 | import org.scalamock.matchers.ArgCapture.CaptureOne
28 | import org.scalamock.scalatest.MockFactory
29 | import org.scalatest.BeforeAndAfterAll
30 | import org.scalatest.concurrent.ScalaFutures
31 | import org.scalatest.flatspec.AnyFlatSpecLike
32 | import org.scalatest.matchers.should.Matchers
33 |
34 | import scala.concurrent.{ExecutionContext, Future}
35 |
36 | class HttpMetricsSpec
37 | extends TestKit(ActorSystem("HttpMetricsSpec"))
38 | with AnyFlatSpecLike
39 | with Matchers
40 | with ScalaFutures
41 | with MockFactory
42 | with BeforeAndAfterAll {
43 |
44 | implicit val ec: ExecutionContext = system.dispatcher
45 |
46 | abstract class Fixture[T] {
47 | val metricsHandler = mock[HttpMetricsHandler]
48 | val server = mockFunction[RequestContext, Future[RouteResult]]
49 |
50 | (metricsHandler.onConnection _)
51 | .expects()
52 | .returns((): Unit)
53 |
54 | (metricsHandler.onDisconnection _)
55 | .expects()
56 | .returns((): Unit)
57 |
58 | val (source, sink) = TestSource
59 | .probe[HttpRequest]
60 | .via(HttpMetrics.meterFlow(metricsHandler).join(HttpMetrics.metricsRouteToFlow(server)))
61 | .toMat(TestSink.probe[HttpResponse])(Keep.both)
62 | .run()
63 | }
64 |
65 | override def afterAll(): Unit = {
66 | TestKit.shutdownActorSystem(system)
67 | }
68 |
69 | "HttpMetrics" should "provide newMeteredServerAt extension" in {
70 | """
71 | |import akka.http.scaladsl.Http
72 | |import fr.davit.akka.http.metrics.core.HttpMetrics._
73 | |val registry = new TestRegistry(TestRegistry.settings)
74 | |implicit val system: ActorSystem = ActorSystem()
75 | |Http().newMeteredServerAt("localhost", 8080, registry)
76 | """.stripMargin should compile
77 | }
78 |
79 | it should "seal route mark unhandled requests" in {
80 | {
81 | val handler = HttpMetrics.metricsRouteToFunction(reject)
82 | val response = handler(HttpRequest()).futureValue
83 | response.attributes(PathLabeler.key) shouldBe "unhandled"
84 | }
85 |
86 | {
87 | val handler = HttpMetrics.metricsRouteToFunction(failWith(new Exception("BOOM!")))
88 | val response = handler(HttpRequest()).futureValue
89 | response.attributes(PathLabeler.key) shouldBe "unhandled"
90 | }
91 | }
92 |
93 | it should "call the metrics handler on connection" in new Fixture {
94 | sink.request(1)
95 | source.sendComplete()
96 | sink.expectComplete()
97 | }
98 |
99 | it should "call the metrics handler on handled requests" in new Fixture {
100 | val request = CaptureOne[HttpRequest]()
101 | val response = CaptureOne[HttpResponse]()
102 | (metricsHandler.onRequest _)
103 | .expects(capture(request))
104 | .onCall { req: HttpRequest => req }
105 |
106 | server
107 | .expects(*)
108 | .onCall(complete(StatusCodes.OK))
109 |
110 | (metricsHandler.onResponse _)
111 | .expects(*, capture(response))
112 | .onCall { (_: HttpRequest, resp: HttpResponse) => resp }
113 |
114 | sink.request(1)
115 | source.sendNext(HttpRequest())
116 | sink.expectNext()
117 |
118 | source.sendComplete()
119 | sink.expectComplete()
120 |
121 | val expected = Marshal(StatusCodes.OK)
122 | .to[HttpResponse]
123 | .futureValue
124 |
125 | response.value shouldBe expected
126 | }
127 |
128 | it should "call the metrics handler on rejected requests" in new Fixture {
129 | val request = CaptureOne[HttpRequest]()
130 | val response = CaptureOne[HttpResponse]()
131 | (metricsHandler.onRequest _)
132 | .expects(capture(request))
133 | .onCall { req: HttpRequest => req }
134 |
135 | server
136 | .expects(*)
137 | .onCall(reject)
138 |
139 | (metricsHandler.onResponse _)
140 | .expects(*, capture(response))
141 | .onCall { (_: HttpRequest, resp: HttpResponse) => resp }
142 |
143 | sink.request(1)
144 | source.sendNext(HttpRequest())
145 | sink.expectNext()
146 |
147 | source.sendComplete()
148 | sink.expectComplete()
149 |
150 | val expected = Marshal(StatusCodes.NotFound -> "The requested resource could not be found.")
151 | .to[HttpResponse]
152 | .futureValue
153 | .addAttribute(PathLabeler.key, "unhandled")
154 | response.value shouldBe expected
155 | }
156 |
157 | it should "call the metrics handler on error requests" in new Fixture {
158 | val request = CaptureOne[HttpRequest]()
159 | val response = CaptureOne[HttpResponse]()
160 | (metricsHandler.onRequest _)
161 | .expects(capture(request))
162 | .onCall { req: HttpRequest => req }
163 |
164 | server
165 | .expects(*)
166 | .onCall(failWith(new Exception("BOOM!")))
167 |
168 | (metricsHandler.onResponse _)
169 | .expects(*, capture(response))
170 | .onCall { (_: HttpRequest, resp: HttpResponse) => resp }
171 |
172 | sink.request(1)
173 | source.sendNext(HttpRequest())
174 | sink.expectNext()
175 |
176 | source.sendComplete()
177 | sink.expectComplete()
178 |
179 | val expected = Marshal(StatusCodes.InternalServerError)
180 | .to[HttpResponse]
181 | .futureValue
182 | .addAttribute(PathLabeler.key, "unhandled")
183 | response.value shouldBe expected
184 | }
185 |
186 | }
187 |
--------------------------------------------------------------------------------
/prometheus/src/main/scala/fr/davit/akka/http/metrics/prometheus/PrometheusRegistry.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.prometheus
18 |
19 | import fr.davit.akka.http.metrics.core._
20 | import fr.davit.akka.http.metrics.prometheus.Quantiles.Quantile
21 | import io.prometheus.client.CollectorRegistry
22 |
23 | object PrometheusRegistry {
24 |
25 | implicit private class RichSummaryBuilder(val builder: io.prometheus.client.Summary.Builder) extends AnyVal {
26 |
27 | def quantiles(qs: Quantile*): io.prometheus.client.Summary.Builder = {
28 | qs.foldLeft(builder) { case (b, q) =>
29 | b.quantile(q.percentile, q.error)
30 | }
31 | }
32 |
33 | }
34 |
35 | def apply(
36 | underlying: CollectorRegistry = CollectorRegistry.defaultRegistry,
37 | settings: PrometheusSettings = PrometheusSettings.default
38 | ): PrometheusRegistry = {
39 | new PrometheusRegistry(settings, underlying)
40 | }
41 |
42 | // order dimension by names
43 | // this helps making sure same dimension are given in the same order
44 | // when creating collectors and observing metrics
45 | implicit val DimensionOrdering: Ordering[Dimension] = Ordering.by(_.name)
46 | }
47 |
48 | /** Prometheus registry For metrics naming see [https://prometheus.io/docs/practices/naming/]
49 | */
50 | class PrometheusRegistry(settings: PrometheusSettings, val underlying: CollectorRegistry)
51 | extends HttpMetricsRegistry(settings) {
52 |
53 | import PrometheusConverters._
54 | import PrometheusRegistry._
55 |
56 | private val methodDimension = if (settings.includeMethodDimension) Some(MethodLabeler.name) else None
57 | private val pathDimension = if (settings.includePathDimension) Some(PathLabeler.name) else None
58 | private val statusDimension = if (settings.includeStatusDimension) Some(StatusGroupLabeler.name) else None
59 |
60 | private[prometheus] val serverDimensions = settings.serverDimensions.map(_.name)
61 |
62 | private val customRequestDimensions = settings.customDimensions.collect { case l: HttpRequestLabeler => l.name }
63 | private[prometheus] val requestsDimensions = (methodDimension ++ customRequestDimensions).toSeq
64 |
65 | private val customResponseDimensions = settings.customDimensions.collect { case l: HttpResponseLabeler => l.name }
66 | private[prometheus] val responsesDimensions = (statusDimension ++ pathDimension ++ customResponseDimensions).toSeq
67 |
68 | lazy val requests: Counter = io.prometheus.client.Counter
69 | .build()
70 | .namespace(settings.namespace)
71 | .name(settings.metricsNames.requests)
72 | .help("Total HTTP requests")
73 | .labelNames(serverDimensions ++ requestsDimensions: _*)
74 | .register(underlying)
75 |
76 | lazy val requestsActive: Gauge = io.prometheus.client.Gauge
77 | .build()
78 | .namespace(settings.namespace)
79 | .name(settings.metricsNames.requestsActive)
80 | .help("Active HTTP requests")
81 | .labelNames(serverDimensions ++ requestsDimensions: _*)
82 | .register(underlying)
83 |
84 | lazy val requestsFailures: Counter = io.prometheus.client.Counter
85 | .build()
86 | .namespace(settings.namespace)
87 | .name(settings.metricsNames.requestsFailures)
88 | .help("Total unserved requests")
89 | .labelNames(serverDimensions ++ requestsDimensions: _*)
90 | .register(underlying)
91 |
92 | lazy val requestsSize: Histogram = {
93 | val help = "HTTP request size"
94 | settings.receivedBytesConfig match {
95 | case Quantiles(qs, maxAge, ageBuckets) =>
96 | io.prometheus.client.Summary
97 | .build()
98 | .namespace(settings.namespace)
99 | .name(settings.metricsNames.requestsSize)
100 | .help(help)
101 | .labelNames(serverDimensions ++ requestsDimensions: _*)
102 | .quantiles(qs: _*)
103 | .maxAgeSeconds(maxAge.toSeconds)
104 | .ageBuckets(ageBuckets)
105 | .register(underlying)
106 |
107 | case Buckets(bs) =>
108 | io.prometheus.client.Histogram
109 | .build()
110 | .namespace(settings.namespace)
111 | .name(settings.metricsNames.requestsSize)
112 | .help(help)
113 | .labelNames(serverDimensions ++ requestsDimensions: _*)
114 | .buckets(bs: _*)
115 | .register(underlying)
116 | }
117 | }
118 |
119 | lazy val responses: Counter = io.prometheus.client.Counter
120 | .build()
121 | .namespace(settings.namespace)
122 | .name(settings.metricsNames.responses)
123 | .help("HTTP responses")
124 | .labelNames(serverDimensions ++ requestsDimensions ++ responsesDimensions: _*)
125 | .register(underlying)
126 |
127 | lazy val responsesErrors: Counter = io.prometheus.client.Counter
128 | .build()
129 | .namespace(settings.namespace)
130 | .name(settings.metricsNames.responsesErrors)
131 | .help("Total HTTP errors")
132 | .labelNames(serverDimensions ++ requestsDimensions ++ responsesDimensions: _*)
133 | .register(underlying)
134 |
135 | lazy val responsesDuration: Timer = {
136 | val help = "HTTP response duration"
137 |
138 | settings.durationConfig match {
139 | case Quantiles(qs, maxAge, ageBuckets) =>
140 | io.prometheus.client.Summary
141 | .build()
142 | .namespace(settings.namespace)
143 | .name(settings.metricsNames.responsesDuration)
144 | .help(help)
145 | .labelNames(serverDimensions ++ requestsDimensions ++ responsesDimensions: _*)
146 | .quantiles(qs: _*)
147 | .maxAgeSeconds(maxAge.toSeconds)
148 | .ageBuckets(ageBuckets)
149 | .register(underlying)
150 | case Buckets(bs) =>
151 | io.prometheus.client.Histogram
152 | .build()
153 | .namespace(settings.namespace)
154 | .name(settings.metricsNames.responsesDuration)
155 | .help(help)
156 | .labelNames(serverDimensions ++ requestsDimensions ++ responsesDimensions: _*)
157 | .buckets(bs: _*)
158 | .register(underlying)
159 | }
160 | }
161 |
162 | lazy val responsesSize: Histogram = {
163 | val help = "HTTP response size"
164 |
165 | settings.sentBytesConfig match {
166 | case Quantiles(qs, maxAge, ageBuckets) =>
167 | io.prometheus.client.Summary
168 | .build()
169 | .namespace(settings.namespace)
170 | .name(settings.metricsNames.responsesSize)
171 | .help(help)
172 | .labelNames(serverDimensions ++ requestsDimensions ++ responsesDimensions: _*)
173 | .quantiles(qs: _*)
174 | .maxAgeSeconds(maxAge.toSeconds)
175 | .ageBuckets(ageBuckets)
176 | .register(underlying)
177 |
178 | case Buckets(bs) =>
179 | io.prometheus.client.Histogram
180 | .build()
181 | .namespace(settings.namespace)
182 | .name(settings.metricsNames.responsesSize)
183 | .help(help)
184 | .labelNames(serverDimensions ++ requestsDimensions ++ responsesDimensions: _*)
185 | .buckets(bs: _*)
186 | .register(underlying)
187 | }
188 | }
189 |
190 | lazy val connections: Counter = io.prometheus.client.Counter
191 | .build()
192 | .namespace(settings.namespace)
193 | .name(settings.metricsNames.connections)
194 | .help("Total TCP connections")
195 | .labelNames(serverDimensions: _*)
196 | .register(underlying)
197 |
198 | lazy val connectionsActive: Gauge = io.prometheus.client.Gauge
199 | .build()
200 | .namespace(settings.namespace)
201 | .name(settings.metricsNames.connectionsActive)
202 | .help("Active TCP connections")
203 | .labelNames(serverDimensions: _*)
204 | .register(underlying)
205 | }
206 |
--------------------------------------------------------------------------------
/prometheus/src/test/scala/fr/davit/akka/http/metrics/prometheus/PrometheusRegistrySpec.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.prometheus
18 |
19 | import akka.http.scaladsl.model.{HttpRequest, HttpResponse}
20 | import fr.davit.akka.http.metrics.core._
21 | import io.prometheus.client.CollectorRegistry
22 | import org.scalatest.flatspec.AnyFlatSpec
23 | import org.scalatest.matchers.should.Matchers
24 |
25 | import scala.concurrent.duration._
26 |
27 | class PrometheusRegistrySpec extends AnyFlatSpec with Matchers {
28 |
29 | object CustomRequestLabeler extends HttpRequestLabeler {
30 | override def name = "custom_request_dim"
31 | def label = "custom_request_label"
32 | override def label(request: HttpRequest): String = label
33 | }
34 |
35 | object CustomResponseLabeler extends HttpResponseLabeler {
36 | override def name = "custom_request_dim"
37 | def label = "custom_response_label"
38 | override def label(response: HttpResponse): String = label
39 | }
40 |
41 | val serverDimensions = List(
42 | Dimension("server_dim", "server_label")
43 | )
44 | val requestsDimensions = List(
45 | Dimension(MethodLabeler.name, "GET"),
46 | Dimension(CustomRequestLabeler.name, CustomRequestLabeler.label)
47 | )
48 | val responsesDimensions = List(
49 | Dimension(StatusGroupLabeler.name, "2xx"),
50 | Dimension(PathLabeler.name, "/api"),
51 | Dimension(CustomResponseLabeler.name, CustomResponseLabeler.label)
52 | )
53 |
54 | trait Fixture {
55 |
56 | val registry = PrometheusRegistry(
57 | new CollectorRegistry(),
58 | PrometheusSettings.default
59 | )
60 |
61 | def underlyingCounterValue(name: String, dims: Seq[Dimension] = Seq.empty): Long = {
62 | registry.underlying.getSampleValue(name, dims.map(_.name).toArray, dims.map(_.label).toArray).toLong
63 | }
64 |
65 | def underlyingHistogramValue(name: String, dims: Seq[Dimension] = Seq.empty): Double = {
66 | registry.underlying.getSampleValue(s"${name}_sum", dims.map(_.name).toArray, dims.map(_.label).toArray)
67 | }
68 | }
69 |
70 | trait DimensionFixture extends Fixture {
71 |
72 | override val registry = PrometheusRegistry(
73 | new CollectorRegistry(),
74 | PrometheusSettings.default
75 | .withIncludeMethodDimension(true)
76 | .withIncludePathDimension(true)
77 | .withIncludeStatusDimension(true)
78 | .withServerDimensions(serverDimensions: _*)
79 | .withCustomDimensions(CustomRequestLabeler, CustomResponseLabeler)
80 | )
81 | }
82 |
83 | trait MetricsNamesFixture extends Fixture {
84 |
85 | override val registry = PrometheusRegistry(
86 | new CollectorRegistry(),
87 | PrometheusSettings.default
88 | .withNamespace("test_server")
89 | .withMetricsNames(
90 | PrometheusMetricsNames.default
91 | .withConnectionsActive("test_connections_active")
92 | .withRequestsActive("test_requests_active")
93 | .withConnections("test_connections_total")
94 | .withResponsesDuration("test_responses_duration_seconds")
95 | .withResponsesErrors("test_responses_errors_total")
96 | .withRequests("test_requests_total")
97 | .withRequestSize("test_requests_size_bytes")
98 | .withResponses("test_responses_total")
99 | .withResponseSize("test_responses_size_bytes")
100 | )
101 | )
102 | }
103 |
104 | it should "not have any dimensions by default" in new Fixture {
105 | registry.serverDimensions shouldBe empty
106 | registry.requestsDimensions shouldBe empty
107 | registry.responsesDimensions shouldBe empty
108 | }
109 |
110 | it should "add proper dimensions when configured" in new DimensionFixture {
111 | registry.serverDimensions should contain theSameElementsInOrderAs serverDimensions.map(_.name)
112 | registry.requestsDimensions should contain theSameElementsInOrderAs requestsDimensions.map(_.name)
113 | registry.responsesDimensions should contain theSameElementsInOrderAs responsesDimensions.map(_.name)
114 | }
115 |
116 | it should "set requestsActive metrics in the underlying registry" in new Fixture {
117 | registry.requestsActive.inc()
118 | underlyingCounterValue("akka_http_requests_active") shouldBe 1L
119 | }
120 |
121 | it should "set requestsActive metrics in the underlying registry using updated name" in new MetricsNamesFixture {
122 | registry.requestsActive.inc()
123 | underlyingCounterValue("test_server_test_requests_active") shouldBe 1L
124 | }
125 |
126 | it should "set requestsActive metrics in the underlying registry with dimensions" in new DimensionFixture {
127 | val dim = serverDimensions ++ requestsDimensions
128 | registry.requestsActive.inc(dim)
129 | underlyingCounterValue("akka_http_requests_active", dim) shouldBe 1L
130 | }
131 |
132 | it should "set requests metrics in the underlying registry" in new Fixture {
133 | registry.requests.inc()
134 | underlyingCounterValue("akka_http_requests_total") shouldBe 1L
135 | }
136 |
137 | it should "set requests metrics in the underlying registry using updated name" in new MetricsNamesFixture {
138 | registry.requests.inc()
139 | underlyingCounterValue("test_server_test_requests_total") shouldBe 1L
140 | }
141 |
142 | it should "set requests metrics in the underlying registry with dimensions" in new DimensionFixture {
143 | val dims = serverDimensions ++ requestsDimensions
144 | registry.requests.inc(dims)
145 | underlyingCounterValue("akka_http_requests_total", dims) shouldBe 1L
146 | }
147 |
148 | it should "set requestsSize metrics in the underlying registry" in new Fixture {
149 | registry.requestsSize.update(3)
150 | underlyingHistogramValue("akka_http_requests_size_bytes") shouldBe 3L
151 | }
152 |
153 | it should "set requestsSize metrics in the underlying registry using updated name" in new MetricsNamesFixture {
154 | registry.requestsSize.update(3)
155 | underlyingHistogramValue("test_server_test_requests_size_bytes") shouldBe 3L
156 | }
157 |
158 | it should "set requestsSize metrics in the underlying registry with dimensions" in new DimensionFixture {
159 | val dims = serverDimensions ++ requestsDimensions
160 | registry.requestsSize.update(3, dims)
161 | underlyingHistogramValue("akka_http_requests_size_bytes", dims) shouldBe 3L
162 | }
163 |
164 | it should "set responses metrics in the underlying registry" in new Fixture {
165 | registry.responses.inc()
166 | underlyingCounterValue("akka_http_responses_total") shouldBe 1L
167 | }
168 |
169 | it should "set responses metrics in the underlying registry using updated name" in new MetricsNamesFixture {
170 | registry.responses.inc()
171 | underlyingCounterValue("test_server_test_responses_total") shouldBe 1L
172 | }
173 |
174 | it should "set responses metrics in the underlying registry with dimensions" in new DimensionFixture {
175 | val dims = serverDimensions ++ requestsDimensions ++ responsesDimensions
176 | registry.responses.inc(dims)
177 | underlyingCounterValue("akka_http_responses_total", dims) shouldBe 1L
178 | }
179 |
180 | it should "set responsesErrors metrics in the underlying registry" in new Fixture {
181 | registry.responsesErrors.inc()
182 | underlyingCounterValue("akka_http_responses_errors_total") shouldBe 1L
183 | }
184 |
185 | it should "set responsesErrors metrics in the underlying registry using updated name" in new MetricsNamesFixture {
186 | registry.responsesErrors.inc()
187 | underlyingCounterValue("test_server_test_responses_errors_total") shouldBe 1L
188 | }
189 |
190 | it should "set responsesErrors metrics in the underlying registry with dimensions" in new DimensionFixture {
191 | val dims = serverDimensions ++ requestsDimensions ++ responsesDimensions
192 | registry.responsesErrors.inc(dims)
193 | underlyingCounterValue("akka_http_responses_errors_total", dims) shouldBe 1L
194 | }
195 |
196 | it should "set responsesDuration metrics in the underlying registry" in new Fixture {
197 | registry.responsesDuration.observe(3.seconds)
198 | underlyingHistogramValue("akka_http_responses_duration_seconds") shouldBe 3.0
199 | }
200 |
201 | it should "set responsesDuration metrics in the underlying registry using updated name" in new MetricsNamesFixture {
202 | registry.responsesDuration.observe(3.seconds)
203 | underlyingHistogramValue("test_server_test_responses_duration_seconds") shouldBe 3.0
204 | }
205 |
206 | it should "set responsesDuration metrics in the underlying registry with dimension" in new DimensionFixture {
207 | val dims = serverDimensions ++ requestsDimensions ++ responsesDimensions
208 | registry.responsesDuration.observe(3.seconds, dims)
209 | underlyingHistogramValue("akka_http_responses_duration_seconds", dims) shouldBe 3.0
210 | }
211 |
212 | it should "set responsesSize metrics in the underlying registry" in new Fixture {
213 | registry.responsesSize.update(3)
214 | underlyingHistogramValue("akka_http_responses_size_bytes") shouldBe 3L
215 | }
216 |
217 | it should "set responsesSize metrics in the underlying registry using updated name" in new MetricsNamesFixture {
218 | registry.responsesSize.update(3)
219 | underlyingHistogramValue("test_server_test_responses_size_bytes") shouldBe 3L
220 | }
221 |
222 | it should "set responsesSize metrics in the underlying registry with dimensions" in new DimensionFixture {
223 | val dims = serverDimensions ++ requestsDimensions ++ responsesDimensions
224 | registry.responsesSize.update(3, dims)
225 | underlyingHistogramValue("akka_http_responses_size_bytes", dims) shouldBe 3L
226 | }
227 |
228 | it should "set connectionsActive metrics in the underlying registry" in new Fixture {
229 | registry.connectionsActive.inc()
230 | underlyingCounterValue("akka_http_connections_active") shouldBe 1L
231 | }
232 |
233 | it should "set connectionsActive metrics in the underlying registry using updated name" in new MetricsNamesFixture {
234 | registry.connectionsActive.inc()
235 | underlyingCounterValue("test_server_test_connections_active") shouldBe 1L
236 | }
237 |
238 | it should "set connectionsActive metrics in the underlying registry with dimensions" in new DimensionFixture {
239 | val dims = serverDimensions
240 | registry.connectionsActive.inc(dims)
241 | underlyingCounterValue("akka_http_connections_active", dims) shouldBe 1L
242 | }
243 |
244 | it should "set connections metrics in the underlying registry" in new Fixture {
245 | registry.connections.inc()
246 | underlyingCounterValue("akka_http_connections_total") shouldBe 1L
247 | }
248 |
249 | it should "set connections metrics in the underlying registry using updated name" in new MetricsNamesFixture {
250 | registry.connections.inc()
251 | underlyingCounterValue("test_server_test_connections_total") shouldBe 1L
252 | }
253 |
254 | it should "set connections metrics in the underlying registry with dimensions" in new DimensionFixture {
255 | val dims = serverDimensions
256 | registry.connections.inc(dims)
257 | underlyingCounterValue("akka_http_connections_total", dims) shouldBe 1L
258 | }
259 | }
260 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/core/src/test/scala/fr/davit/akka/http/metrics/core/HttpMetricsRegistrySpec.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Michel Davit
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package fr.davit.akka.http.metrics.core
18 |
19 | import akka.actor.ActorSystem
20 | import akka.http.scaladsl.model._
21 | import akka.http.scaladsl.model.headers.{`User-Agent`, ProductVersion}
22 | import akka.stream.Materializer
23 | import akka.stream.scaladsl.Source
24 | import akka.testkit.TestKit
25 | import akka.util.ByteString
26 | import fr.davit.akka.http.metrics.core.HttpMetricsRegistry.TraceTimestampKey
27 | import org.scalatest.concurrent.ScalaFutures
28 | import org.scalatest.flatspec.AnyFlatSpecLike
29 | import org.scalatest.matchers.should.Matchers
30 |
31 | import scala.concurrent.duration._
32 |
33 | class HttpMetricsRegistrySpec
34 | extends TestKit(ActorSystem("HttpMetricsRegistrySpec"))
35 | with AnyFlatSpecLike
36 | with Matchers
37 | with ScalaFutures {
38 |
39 | implicit val materializer: Materializer = Materializer(system)
40 |
41 | val testDimension = Dimension("dim", "label")
42 | val testRequest = HttpRequest().addAttribute(TraceTimestampKey, Deadline.now)
43 | val testResponse = HttpResponse()
44 |
45 | abstract class Fixture(settings: HttpMetricsSettings = TestRegistry.settings) {
46 | val registry = new TestRegistry(settings)
47 | }
48 |
49 | "HttpMetricsRegistry" should "set the request timestamp" in new Fixture() {
50 | registry.onRequest(HttpRequest()).attribute(TraceTimestampKey) shouldBe defined
51 | }
52 |
53 | it should "compute the number of requests" in new Fixture() {
54 | registry.requests.value() shouldBe 0
55 | registry.onRequest(testRequest)
56 | registry.requests.value() shouldBe 1
57 | registry.onRequest(testRequest)
58 | registry.requests.value() shouldBe 2
59 | }
60 |
61 | it should "compute the number of failures" in new Fixture() {
62 | registry.requestsFailures.value() shouldBe 0
63 | registry.onFailure(testRequest, new Exception("BOOM!"))
64 | registry.requestsFailures.value() shouldBe 1
65 | }
66 |
67 | it should "compute the number of active requests" in new Fixture() {
68 | registry.requestsActive.value() shouldBe 0
69 | registry.onRequest(testRequest)
70 | registry.onRequest(testRequest)
71 | registry.requestsActive.value() shouldBe 2
72 | registry.onResponse(testRequest, testResponse)
73 | registry.onFailure(testRequest, new Exception("BOOM!"))
74 | registry.requestsActive.value() shouldBe 0
75 | }
76 |
77 | it should "compute the requests size" in new Fixture() {
78 | val data = "This is the request content"
79 | val request = HttpRequest(entity = data)
80 | registry.requestsSize.values() shouldBe empty
81 | registry.onRequest(request).discardEntityBytes().future().futureValue
82 | registry.requestsSize.values().head shouldBe data.getBytes.length
83 | }
84 |
85 | it should "compute the requests size for streamed data" in new Fixture() {
86 | val data = Source(List("a", "b", "c")).map(ByteString.apply)
87 | val request = testRequest.withEntity(HttpEntity(ContentTypes.`application/octet-stream`, data))
88 | registry.requestsSize.values() shouldBe empty
89 | registry.onRequest(request).discardEntityBytes().future().futureValue
90 | registry.requestsSize.values().head shouldBe "abc".getBytes.length
91 | }
92 |
93 | it should "not transform strict requests" in new Fixture() {
94 | registry.onRequest(testRequest).entity.isKnownEmpty() shouldBe true
95 | registry.onRequest(testRequest.withEntity("strict data")).entity.isStrict() shouldBe true
96 | }
97 |
98 | it should "compute the number of errors" in new Fixture() {
99 | registry.responsesErrors.value() shouldBe 0
100 | registry.onResponse(testRequest, testResponse.withStatus(StatusCodes.OK))
101 | registry.onResponse(testRequest, testResponse.withStatus(StatusCodes.TemporaryRedirect))
102 | registry.onResponse(testRequest, testResponse.withStatus(StatusCodes.BadRequest))
103 | registry.responsesErrors.value() shouldBe 0
104 | registry.onResponse(testRequest, testResponse.withStatus(StatusCodes.InternalServerError))
105 | registry.responsesErrors.value() shouldBe 1
106 | }
107 |
108 | it should "compute the response size" in new Fixture() {
109 | val data = "This is the response content"
110 | val request = testRequest
111 | val response = HttpResponse(entity = data)
112 | registry.responsesSize.values() shouldBe empty
113 | registry.onResponse(request, response).discardEntityBytes().future().futureValue
114 | registry.responsesSize.values().head shouldBe data.getBytes.length
115 | }
116 |
117 | it should "compute the response size for streamed data" in new Fixture() {
118 | val data = Source(List("a", "b", "c")).map(ByteString.apply)
119 | val response = testResponse.withEntity(HttpEntity(ContentTypes.`application/octet-stream`, data))
120 | registry.responsesSize.values() shouldBe empty
121 | registry.onResponse(testRequest, response).discardEntityBytes().future().futureValue
122 | registry.responsesSize.values().head shouldBe "abc".getBytes.length
123 | }
124 |
125 | it should "compute the response time" in new Fixture() {
126 | val duration = 500.millis
127 | val start = Deadline.now - duration
128 | val request = testRequest.addAttribute(TraceTimestampKey, start)
129 | registry.responsesDuration.values() shouldBe empty
130 | registry.onResponse(request, testResponse).discardEntityBytes().future().futureValue
131 | registry.responsesDuration.values().head should be > duration
132 | }
133 |
134 | it should "not transform strict responses" in new Fixture() {
135 | registry.onResponse(testRequest, testResponse).entity.isKnownEmpty() shouldBe true
136 | registry.onResponse(testRequest, testResponse.withEntity("strict data")).entity.isStrict() shouldBe true
137 | }
138 |
139 | it should "not transform default responses" in new Fixture() {
140 | val data = Source(List("a", "b", "c")).map(ByteString.apply)
141 | val length = "abc".getBytes.length.toLong
142 |
143 | val response = testResponse
144 | .withProtocol(HttpProtocols.`HTTP/1.0`) // HTTP/1.0 does not support Chucking. stream MUST not be transformed
145 | .withEntity(HttpEntity.Default(ContentTypes.`application/octet-stream`, length, data))
146 |
147 | registry.onResponse(testRequest, response).entity shouldBe a[HttpEntity.Default]
148 | }
149 |
150 | it should "compute the number of connections" in new Fixture() {
151 | registry.connections.value() shouldBe 0
152 | registry.onConnection()
153 | registry.connections.value() shouldBe 1
154 | registry.onDisconnection()
155 | registry.onConnection()
156 | registry.connections.value() shouldBe 2
157 | }
158 |
159 | it should "compute the number of active connections" in new Fixture() {
160 | registry.connectionsActive.value() shouldBe 0
161 | registry.onConnection()
162 | registry.onConnection()
163 | registry.connectionsActive.value() shouldBe 2
164 | registry.onDisconnection()
165 | registry.onDisconnection()
166 | registry.connectionsActive.value() shouldBe 0
167 | }
168 |
169 | it should "add method dimension when enabled" in new Fixture(
170 | TestRegistry.settings.withIncludeMethodDimension(true)
171 | ) {
172 | registry.onRequest(testRequest)
173 | registry.onResponse(testRequest, testResponse)
174 | registry.onFailure(testRequest, new Exception("BOOM!"))
175 | registry.requests.value(Seq(Dimension(MethodLabeler.name, "GET"))) shouldBe 1
176 | registry.requests.value(Seq(Dimension(MethodLabeler.name, "PUT"))) shouldBe 0
177 | registry.requestsFailures.value(Seq(Dimension(MethodLabeler.name, "GET"))) shouldBe 1
178 | registry.requestsFailures.value(Seq(Dimension(MethodLabeler.name, "PUT"))) shouldBe 0
179 | registry.responses.value(Seq(Dimension(MethodLabeler.name, "GET"))) shouldBe 1
180 | registry.responses.value(Seq(Dimension(MethodLabeler.name, "PUT"))) shouldBe 0
181 | }
182 |
183 | it should "add status code dimension when enabled" in new Fixture(
184 | TestRegistry.settings.withIncludeStatusDimension(true)
185 | ) {
186 | registry.onResponse(testRequest, testResponse)
187 | registry.responses.value(Seq(Dimension(StatusGroupLabeler.name, "2xx"))) shouldBe 1
188 | registry.responses.value(Seq(Dimension(StatusGroupLabeler.name, "3xx"))) shouldBe 0
189 | registry.responses.value(Seq(Dimension(StatusGroupLabeler.name, "4xx"))) shouldBe 0
190 | registry.responses.value(Seq(Dimension(StatusGroupLabeler.name, "5xx"))) shouldBe 0
191 | }
192 |
193 | it should "default label dimension to 'unlabelled' when enabled but not annotated by directives" in new Fixture(
194 | TestRegistry.settings.withIncludePathDimension(true)
195 | ) {
196 | registry.onResponse(testRequest, testResponse)
197 | registry.responses.value(Seq(Dimension(PathLabeler.name, HttpMessageLabeler.Unlabelled))) shouldBe 1
198 | registry.responses.value(Seq(Dimension(PathLabeler.name, "unhandled"))) shouldBe 0
199 | }
200 |
201 | it should "increment proper dimension label" in new Fixture(
202 | TestRegistry.settings.withIncludePathDimension(true)
203 | ) {
204 | val label = "/api"
205 | registry.onResponse(testRequest, testResponse.addAttribute(PathLabeler.key, label))
206 | registry.responses.value(Seq(Dimension(PathLabeler.name, label))) shouldBe 1
207 | registry.responses.value(Seq(Dimension(PathLabeler.name, HttpMessageLabeler.Unlabelled))) shouldBe 0
208 | }
209 |
210 | it should "not increment path dimension label if disabled" in new Fixture(
211 | TestRegistry.settings.withIncludePathDimension(false)
212 | ) {
213 | val label = "/api"
214 | registry.onResponse(testRequest, testResponse.addAttribute(PathLabeler.key, label))
215 | registry.responses.value() shouldBe 1
216 | registry.responses.value(Seq(Dimension("path", label))) shouldBe 0
217 | }
218 |
219 | it should "increment proper server dimension" in new Fixture(
220 | TestRegistry.settings.withServerDimensions(testDimension)
221 | ) {
222 | registry.onConnection()
223 | registry.connections.value(Seq(testDimension)) shouldBe 1
224 |
225 | registry.onRequest(testRequest)
226 | registry.onResponse(testRequest, testResponse)
227 | registry.requests.value(Seq(testDimension)) shouldBe 1
228 | registry.responses.value(Seq(testDimension)) shouldBe 1
229 | }
230 |
231 | // try vivaldi: https://vivaldi.com
232 | val testUserAgent = `User-Agent`(
233 | ProductVersion("Mozilla", "5.0", "X11; Linux x86_64"),
234 | ProductVersion("AppleWebKit", "537.36", "KHTML, like Gecko"),
235 | ProductVersion("Chrome", "98.0.4758.141"),
236 | ProductVersion("Safari", "537.36")
237 | )
238 | // based on https://developer.mozilla.org/en-US/docs/Web/HTTP/Browser_detection_using_the_user_agent#browser_name
239 | object BrowserLabeler extends HttpRequestLabeler {
240 | override def name: String = "browser"
241 | override def label(request: HttpRequest): String = {
242 | val products = for {
243 | ua <- request.header[`User-Agent`].toSeq
244 | pv <- ua.products
245 | } yield pv.product
246 | if (products.contains("Seamonkey")) "seamonkey"
247 | else if (products.contains("Firefox")) "firefox"
248 | else if (products.contains("Chromium")) "chromium"
249 | else if (products.contains("Chrome")) "chrome"
250 | else if (products.contains("Safari")) "safari"
251 | else if (products.contains("OPR") || products.contains("Opera")) "opera"
252 | else "other"
253 | }
254 | }
255 |
256 | it should "increment proper custom request dimension" in new Fixture(
257 | TestRegistry.settings.withCustomDimensions(BrowserLabeler)
258 | ) {
259 | val agent = Dimension(BrowserLabeler.name, "chrome")
260 | registry.onConnection()
261 | registry.connections.value() shouldBe 1
262 |
263 | val requestWithUA = testRequest.addHeader(testUserAgent)
264 | registry.onRequest(requestWithUA)
265 | registry.onResponse(requestWithUA, testResponse)
266 | registry.requests.value(Seq(agent)) shouldBe 1
267 | registry.responses.value(Seq(agent)) shouldBe 1
268 | }
269 |
270 | object GrpcServiceLabeler extends AttributeLabeler {
271 | def name: String = "grpc-service"
272 | }
273 | it should "increment proper custom response dimension" in new Fixture(
274 | TestRegistry.settings.withCustomDimensions(GrpcServiceLabeler)
275 | ) {
276 | val service = Dimension(GrpcServiceLabeler.name, "health")
277 | registry.onConnection()
278 | registry.connections.value() shouldBe 1
279 |
280 | val responseWithAttribute = testResponse.addAttribute(GrpcServiceLabeler.key, "health")
281 | registry.onRequest(testRequest)
282 | registry.onResponse(testRequest, responseWithAttribute)
283 | registry.requests.value(Seq()) shouldBe 1
284 | registry.responses.value(Seq(service)) shouldBe 1
285 | }
286 | }
287 |
--------------------------------------------------------------------------------