├── .gitignore
├── LICENSE
├── README.md
├── pom.xml
├── sample
├── Dockerfile
├── README.md
└── docker-run.sh
└── src
└── main
└── java
└── fr
└── ippon
└── spark
└── metrics
├── SparkReporter.java
└── measures
├── CounterMeasure.java
├── GaugeMeasure.java
├── HistogramMeasure.java
├── Measure.java
├── MeterMeasure.java
├── SnapshotMeasure.java
└── TimerMeasure.java
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea
2 | *.iml
3 | *~
4 | target/
5 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright 2014 Ippon Technologies
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 |
15 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | metrics-spark-reporter
2 | =============
3 |
4 | ## Dropwizard Metrics reporter for Apache Spark Streaming
5 |
6 | This is a reporter for the [Metrics library] (https://dropwizard.github.io/metrics/3.1.0/)
7 | of [DropWizard] (http://dropwizard.io/),
8 | similar to the [graphite] (https://dropwizard.github.io/metrics/3.1.0/manual/graphite/#manual-graphite)
9 | or [ganglia] (https://dropwizard.github.io/metrics/3.1.0/manual/ganglia/#manual-ganglia) reporters,
10 | except that it reports to metrics-spark-receiver.
11 |
12 | This reporter is using sockets for sending data to the Spark Streaming Receiver.
13 |
14 | ## Metrics
15 |
16 | The library Metrics provides 5 types of measure :
17 | * [Gauge] (https://dropwizard.github.io/metrics/3.1.0/getting-started/#gauges) :
18 | an instantaneous measurement of a value.
19 | * [Counter] (https://dropwizard.github.io/metrics/3.1.0/getting-started/#counters) :
20 | a gauge for an AtomicLong instance.
21 | * [Meter] (https://dropwizard.github.io/metrics/3.1.0/getting-started/#meters) :
22 | a measure of the rate of events over time.
23 | * [Histogram] (https://dropwizard.github.io/metrics/3.1.0/getting-started/#histograms) :
24 | a measure of the statistical distribution of values in a stream of data.
25 | * [Timer] (https://dropwizard.github.io/metrics/3.1.0/getting-started/#timers) :
26 | a measure of both the rate that a particular piece of code is called and the distribution of its duration.
27 |
28 | ## Configuration
29 |
30 | In order to do a Spark reporting, you need to add the dependency :
31 | ```
32 |
33 | fr.ippon.spark.metrics
34 | metrics-spark-reporter
35 | 1.2
36 |
37 | ```
38 |
39 | And implement the SparkReporter like :
40 | ```
41 | SparkReporter sparkReporter = SparkReporter.forRegistry(metricRegistry)
42 | .convertRatesTo(TimeUnit.SECONDS)
43 | .convertDurationsTo(TimeUnit.MILLISECONDS)
44 | .build("localhost", 9999);
45 |
46 | sparkReporter.start(10, TimeUnit.SECONDS);
47 | ```
48 |
49 | ## Test
50 |
51 | There is two ways to test this Reporter :
52 |
53 | * With a sample [spark-jhipster] (https://github.com/ahars/spark-jhipster)
54 | * With Docker (in sample/).
55 |
56 |
57 | Test sending data with the [JHipster] (http://jhipster.github.io/) sample which report
58 | to a Spark Streaming app implementing
59 | the java custom receiver [spark-jhipster] (https://github.com/ahars/spark-jhipster).
60 |
61 | Send data by launching the JHipster sample with the Maven command :
62 | ```
63 | $ mvn spring-boot:run
64 | ```
65 |
66 | Display metrics received by launching one of those two classes
67 | of [metrics-spark](https://github.com/ahars/metrics-spark) :
68 | * `MetricsToConsole` to display data in the console.
69 | * `MetricsToES` to send data to an ElasticSearch server via Spark in order to use Kibana.
70 |
71 |
--------------------------------------------------------------------------------
/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 | 4.0.0
6 |
7 | fr.ippon.spark.metrics
8 | metrics-spark-reporter
9 | 1.2
10 | jar
11 |
12 | Apache Spark Integration for Metrics
13 | A reporter for Apache Spark which announces measurements
14 | to a Spark Streaming application.
15 | https://github.com/ippontech/metrics-spark-reporter
16 |
17 |
18 |
19 | The Apache License, Version 2.0
20 | http://www.apache.org/licenses/LICENSE-2.0.txt
21 |
22 |
23 |
24 |
25 |
26 | Antoine Hars
27 | ahars@ippon.fr
28 | Ippon Technologies
29 | http://www.ippon.fr
30 |
31 |
32 |
33 |
34 | scm:git:https://github.com/ippontech/metrics-spark-reporter.git
35 | scm:git:git@github.com:ippontech/metrics-spark-reporter.git
36 | https://github.com/ippontech/metrics-spark-reporter
37 |
38 |
39 |
40 | 1.8
41 | UTF-8
42 | 2.6
43 | 3.1.0
44 | 2.4.4
45 |
46 |
47 |
48 |
49 | joda-time
50 | joda-time
51 | ${joda.time.version}
52 |
53 |
54 | io.dropwizard.metrics
55 | metrics-core
56 | ${dropwizard.metrics.version}
57 |
58 |
59 | com.fasterxml.jackson.core
60 | jackson-databind
61 | ${jackson.core.version}
62 |
63 |
64 |
65 |
66 |
67 | ossrh
68 | https://oss.sonatype.org/content/repositories/snapshots
69 |
70 |
71 | ossrh
72 | https://oss.sonatype.org/service/local/staging/deploy/maven2/
73 |
74 |
75 |
76 |
77 |
78 |
79 | org.apache.maven.plugins
80 | maven-compiler-plugin
81 | 3.2
82 |
83 | ${java.version}
84 | ${java.version}
85 |
86 |
87 |
88 | org.apache.maven.plugins
89 | maven-source-plugin
90 | 2.4
91 |
92 |
93 | attach-sources
94 |
95 | jar
96 |
97 |
98 |
99 |
100 |
101 | org.apache.maven.plugins
102 | maven-javadoc-plugin
103 | 2.10.1
104 |
105 |
106 | attach-javadocs
107 |
108 | jar
109 |
110 |
111 |
112 |
113 |
114 | org.apache.maven.plugins
115 | maven-gpg-plugin
116 | 1.5
117 |
118 |
119 | sign-artifacts
120 | verify
121 |
122 | sign
123 |
124 |
125 |
126 |
127 |
128 | org.sonatype.plugins
129 | nexus-staging-maven-plugin
130 | 1.6.3
131 | true
132 |
133 | ossrh
134 | https://oss.sonatype.org/
135 | true
136 |
137 |
138 |
139 |
140 |
141 |
142 |
--------------------------------------------------------------------------------
/sample/Dockerfile:
--------------------------------------------------------------------------------
1 | # DOCKER-VERSION 1.3.2
2 | FROM ubuntu:14.04
3 | MAINTAINER Antoine Hars
4 |
5 | # make sure the package repository is up to date
6 | RUN echo "deb http://archive.ubuntu.com/ubuntu trusty main universe" > /etc/apt/sources.list
7 | RUN apt-get -y update
8 |
9 | # install python-software-properties (so you can do add-apt-repository)
10 | RUN DEBIAN_FRONTEND=noninteractive apt-get install -y -q python-software-properties software-properties-common
11 |
12 | # install oracle java from PPA & other tools
13 | RUN add-apt-repository ppa:webupd8team/java -y
14 | RUN echo oracle-java8-installer shared/accepted-oracle-license-v1-1 select true | /usr/bin/debconf-set-selections
15 | RUN apt-get update && apt-get install -y \
16 | sudo \
17 | git \
18 | maven \
19 | oracle-java8-installer \
20 | && apt-get clean
21 |
22 | # Set oracle java as the default java
23 | RUN update-java-alternatives -s java-8-oracle
24 | RUN echo "export JAVA_HOME=/usr/lib/jvm/java-8-oracle" >> ~/.bashrc
25 |
26 | # install the spark-jhipster
27 | RUN git clone https://github.com/ahars/spark-jhipster.git
28 | RUN cd spark-jhipster && \
29 | mvn package
30 |
31 | EXPOSE 8080
32 |
33 | CMD java -jar spark-jhipster/target/spark-jhipster-0.0.1-SNAPSHOT.war
34 |
--------------------------------------------------------------------------------
/sample/README.md:
--------------------------------------------------------------------------------
1 | SparkReporter Sample
2 | =============
3 |
4 | In order to check out this sample, you need [Docker](https://www.docker.com/).
5 |
6 | ## Spark JHipster
7 |
8 | Run the `docker-run.sh` which will build an image of the Dockerfile and run this image.
9 | At the end, it launches the app spark-jhipster that send metrics of the sample JHipster application into a stream.
10 |
11 | You can use the sample available in [metrics-spark-receiver] (https://github.com/ippontech/metrics-spark-receiver)
12 | to receive and display metrics.
13 |
--------------------------------------------------------------------------------
/sample/docker-run.sh:
--------------------------------------------------------------------------------
1 | sudo docker build --rm -t ippontech/spark-jhipster .
2 | sudo docker run -t -i --rm -p 8080:8080 --name jhipster --link spark:spark ippontech/spark-jhipster
3 |
--------------------------------------------------------------------------------
/src/main/java/fr/ippon/spark/metrics/SparkReporter.java:
--------------------------------------------------------------------------------
1 | package fr.ippon.spark.metrics;
2 |
3 | import com.codahale.metrics.*;
4 | import com.fasterxml.jackson.databind.ObjectMapper;
5 | import fr.ippon.spark.metrics.measures.*;
6 | import org.slf4j.Logger;
7 | import org.slf4j.LoggerFactory;
8 |
9 | import javax.net.SocketFactory;
10 | import java.io.IOException;
11 | import java.io.PrintWriter;
12 | import java.net.Socket;
13 | import java.util.Map;
14 | import java.util.SortedMap;
15 | import java.util.concurrent.TimeUnit;
16 |
17 | /**
18 | * A reporter which publishes metric values to a Spark Receiver.
19 | *
20 | */
21 | public class SparkReporter extends ScheduledReporter {
22 |
23 | private static final Logger LOGGER = LoggerFactory.getLogger(SparkReporter.class);
24 |
25 | private String sparkHost;
26 | private int sparkPort;
27 | private Socket socket;
28 | private ObjectMapper mapper;
29 | private PrintWriter writer;
30 |
31 | private SparkReporter(MetricRegistry registry, String sparkHost, int sparkPort, TimeUnit rateUnit,
32 | TimeUnit durationUnit, MetricFilter filter) {
33 | super(registry, "spark-reporter", filter, rateUnit, durationUnit);
34 | this.sparkHost = sparkHost;
35 | this.sparkPort = sparkPort;
36 | this.mapper = new ObjectMapper();
37 | }
38 |
39 | /**
40 | * Connect the reporter to a Spark application receiver and do a reporting.
41 | * Two try for each reporting and warn if the connection failed
42 | *
43 | * @param gauges a map of the gauges captured periodically
44 | * @param counters a map of the counters captured periodically
45 | * @param histograms a map of the histograms captured periodically
46 | * @param meters a map of the meters captured periodically
47 | * @param timers a map of the timers captured periodically
48 | */
49 | @Override
50 | public void report(SortedMap gauges,
51 | SortedMap counters,
52 | SortedMap histograms,
53 | SortedMap meters,
54 | SortedMap timers) {
55 | try {
56 | connect();
57 | doReport(gauges, counters, histograms, meters, timers);
58 | } catch (IOException ioe1) {
59 | try {
60 | connect();
61 | doReport(gauges, counters, histograms, meters, timers);
62 | } catch (IOException ioe2) {
63 | LOGGER.warn("Unable to report to Spark : "+ ioe2.getClass().getCanonicalName());
64 | }
65 | }
66 | }
67 |
68 | private void doReport(SortedMap gauges,
69 | SortedMap counters,
70 | SortedMap histograms,
71 | SortedMap meters,
72 | SortedMap timers) throws IOException {
73 |
74 | if (gauges.isEmpty() && counters.isEmpty() && histograms.isEmpty() &&
75 | meters.isEmpty() && timers.isEmpty()) {
76 | return;
77 | }
78 |
79 | if (!gauges.isEmpty()) {
80 | for (Map.Entry entry : gauges.entrySet()) {
81 | reportGauge(entry.getKey(), entry.getValue());
82 | }
83 | }
84 |
85 | if (!counters.isEmpty()) {
86 | for (Map.Entry entry : counters.entrySet()) {
87 | reportCounter(entry.getKey(), entry.getValue());
88 | }
89 | }
90 |
91 | if (!histograms.isEmpty()) {
92 | for (Map.Entry entry : histograms.entrySet()) {
93 | reportHistogram(entry.getKey(), entry.getValue());
94 | }
95 | }
96 |
97 | if (!meters.isEmpty()) {
98 | for (Map.Entry entry : meters.entrySet()) {
99 | reportMetered(entry.getKey(), entry.getValue());
100 | }
101 | }
102 |
103 | if (!timers.isEmpty()) {
104 | for (Map.Entry entry : timers.entrySet()) {
105 | reportTimer(entry.getKey(), entry.getValue());
106 | }
107 | }
108 | }
109 |
110 | private void connect() throws IOException {
111 | if (writer != null && writer.checkError()) {
112 | closeConnection();
113 | }
114 | if (socket == null) {
115 | socket = SocketFactory.getDefault().createSocket(sparkHost, sparkPort);
116 | writer = new PrintWriter(socket.getOutputStream());
117 | }
118 | }
119 |
120 | private void closeConnection() throws IOException {
121 | writer.close();
122 | socket.close();
123 | writer = null;
124 | socket = null;
125 | }
126 |
127 | private void reportGauge(String name, Gauge gauge) throws IOException {
128 | if (this.isANumber(gauge.getValue())) {
129 | writer.println(mapper.writeValueAsString(new GaugeMeasure(name, gauge)));
130 | }
131 | }
132 |
133 | private void reportCounter(String name, Counter counter) throws IOException {
134 | writer.println(mapper.writeValueAsString(new CounterMeasure(name, counter)));
135 | }
136 |
137 | private void reportHistogram(String name, Histogram histogram) throws IOException {
138 | writer.println(mapper.writeValueAsString(new HistogramMeasure(name, histogram)));
139 | }
140 |
141 | private void reportMetered(String name, Metered meter) throws IOException {
142 | writer.println(mapper.writeValueAsString(new MeterMeasure(name, meter)));
143 | }
144 |
145 | private void reportTimer(String name, Timer timer) throws IOException {
146 | writer.println(mapper.writeValueAsString(new TimerMeasure(name, timer)));
147 | }
148 |
149 | private boolean isANumber(Object object) {
150 | if (object instanceof Float || object instanceof Double ||
151 | object instanceof Integer || object instanceof Long) {
152 | return true;
153 | } else {
154 | return false;
155 | }
156 | }
157 |
158 | /**
159 | * Returns a new {@link Builder} for {@link SparkReporter}.
160 | *
161 | * @param registry the registry to report
162 | * @return a {@link Builder} instance for a {@link SparkReporter}
163 | */
164 | public static Builder forRegistry(MetricRegistry registry) {
165 | return new Builder(registry);
166 | }
167 |
168 | /**
169 | * A builder for {@link SparkReporter} instances.
170 | */
171 | public static class Builder {
172 |
173 | private final MetricRegistry registry;
174 | private Clock clock;
175 | private String prefix;
176 | private TimeUnit rateUnit;
177 | private TimeUnit durationUnit;
178 | private MetricFilter filter;
179 |
180 | private Builder(MetricRegistry registry) {
181 | this.registry = registry;
182 | this.clock = Clock.defaultClock();
183 | this.prefix = null;
184 | this.rateUnit = TimeUnit.SECONDS;
185 | this.durationUnit = TimeUnit.MILLISECONDS;
186 | this.filter = MetricFilter.ALL;
187 | }
188 |
189 | /**
190 | * Use the given {@link Clock} instance for the time.
191 | *
192 | * @param clock a {@link Clock} instance
193 | * @return {@code this}
194 | */
195 | public Builder withClock(Clock clock) {
196 | this.clock = clock;
197 | return this;
198 | }
199 |
200 | /**
201 | * Prefix all metric names with the given string.
202 | *
203 | * @param prefix the prefix for all metric names
204 | * @return {@code this}
205 | */
206 | public Builder prefixedWith(String prefix) {
207 | this.prefix = prefix;
208 | return this;
209 | }
210 |
211 | /**
212 | * Convert rates to the given time unit.
213 | *
214 | * @param rateUnit a unit of time
215 | * @return {@code this}
216 | */
217 | public Builder convertRatesTo(TimeUnit rateUnit) {
218 | this.rateUnit = rateUnit;
219 | return this;
220 | }
221 |
222 | /**
223 | * Convert durations to the given time unit.
224 | *
225 | * @param durationUnit a unit of time
226 | * @return {@code this}
227 | */
228 | public Builder convertDurationsTo(TimeUnit durationUnit) {
229 | this.durationUnit = durationUnit;
230 | return this;
231 | }
232 |
233 | /**
234 | * Only report metrics which match the given filter.
235 | *
236 | * @param filter a {@link MetricFilter}
237 | * @return {@code this}
238 | */
239 | public Builder filter(MetricFilter filter) {
240 | this.filter = filter;
241 | return this;
242 | }
243 |
244 | /**
245 | * Build a {@link SparkReporter} with the given properties.
246 | *
247 | * @param sparkHost the host of the Spark application
248 | * @param sparkPort the port of the Spark application
249 | * @return a {@link SparkReporter}
250 | */
251 | public SparkReporter build(String sparkHost, int sparkPort) {
252 | return new SparkReporter(registry, sparkHost, sparkPort, rateUnit, durationUnit, filter);
253 | }
254 | }
255 | }
256 |
--------------------------------------------------------------------------------
/src/main/java/fr/ippon/spark/metrics/measures/CounterMeasure.java:
--------------------------------------------------------------------------------
1 | package fr.ippon.spark.metrics.measures;
2 |
3 | import com.codahale.metrics.Counter;
4 |
5 | public class CounterMeasure extends Measure {
6 |
7 | private Long count;
8 |
9 | public CounterMeasure(String name, Counter counter) {
10 | super(name, "counter");
11 | this.count = counter.getCount();
12 | }
13 |
14 | public Long getCount() {
15 | return count;
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/src/main/java/fr/ippon/spark/metrics/measures/GaugeMeasure.java:
--------------------------------------------------------------------------------
1 | package fr.ippon.spark.metrics.measures;
2 |
3 | import com.codahale.metrics.Gauge;
4 |
5 | public class GaugeMeasure extends Measure {
6 |
7 | private Object value;
8 |
9 | public GaugeMeasure(String name, Gauge gauge) {
10 | super(name, "gauge");
11 | this.value = gauge.getValue();
12 | }
13 |
14 | public Object getValue() {
15 | return value;
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/src/main/java/fr/ippon/spark/metrics/measures/HistogramMeasure.java:
--------------------------------------------------------------------------------
1 | package fr.ippon.spark.metrics.measures;
2 |
3 | import com.codahale.metrics.Histogram;
4 |
5 | public class HistogramMeasure extends SnapshotMeasure {
6 |
7 | private Long count;
8 |
9 | public HistogramMeasure(String name, Histogram histogram) {
10 | super(name, "histogram", histogram.getSnapshot());
11 | this.count = histogram.getCount();
12 | }
13 |
14 | public Long getCount() {
15 | return count;
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/src/main/java/fr/ippon/spark/metrics/measures/Measure.java:
--------------------------------------------------------------------------------
1 | package fr.ippon.spark.metrics.measures;
2 |
3 | import org.joda.time.DateTime;
4 |
5 | import java.io.Serializable;
6 |
7 | public abstract class Measure implements Serializable {
8 |
9 | private String name;
10 | private String metric;
11 | private String timestamp;
12 |
13 | public Measure(String name, String metric) {
14 | this.name = name;
15 | this.metric = metric;
16 | this.timestamp = DateTime.now().toString();
17 | }
18 |
19 | public String getName() {
20 | return name;
21 | }
22 |
23 | public String getMetric() {
24 | return metric;
25 | }
26 |
27 | public String getTimestamp() {
28 | return timestamp;
29 | }
30 |
31 | }
32 |
--------------------------------------------------------------------------------
/src/main/java/fr/ippon/spark/metrics/measures/MeterMeasure.java:
--------------------------------------------------------------------------------
1 | package fr.ippon.spark.metrics.measures;
2 |
3 | import com.codahale.metrics.Metered;
4 |
5 | public class MeterMeasure extends Measure {
6 |
7 | private Long count;
8 | private Double m1Rate;
9 | private Double m5Rate;
10 | private Double m15Rate;
11 | private Double meanRate;
12 |
13 | public MeterMeasure(String name, Metered meter) {
14 | super(name, "meter");
15 | this.count = meter.getCount();
16 | this.m1Rate = meter.getOneMinuteRate();
17 | this.m5Rate = meter.getFiveMinuteRate();
18 | this.m15Rate = meter.getFifteenMinuteRate();
19 | this.meanRate = meter.getMeanRate();
20 | }
21 |
22 | public Long getCount() {
23 | return count;
24 | }
25 |
26 | public Double getM1Rate() {
27 | return m1Rate;
28 | }
29 |
30 | public Double getM5Rate() {
31 | return m5Rate;
32 | }
33 |
34 | public Double getM15Rate() {
35 | return m15Rate;
36 | }
37 |
38 | public Double getMeanRate() {
39 | return meanRate;
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/src/main/java/fr/ippon/spark/metrics/measures/SnapshotMeasure.java:
--------------------------------------------------------------------------------
1 | package fr.ippon.spark.metrics.measures;
2 |
3 | import com.codahale.metrics.Snapshot;
4 |
5 | public abstract class SnapshotMeasure extends Measure {
6 |
7 | private Long max;
8 | private Long min;
9 | private Double mean;
10 | private Double median;
11 | private Double p75;
12 | private Double p95;
13 | private Double p98;
14 | private Double p99;
15 | private Double p999;
16 | private Double stdDev;
17 |
18 |
19 | public SnapshotMeasure(String name, String metric, Snapshot snapshot) {
20 | super(name, metric);
21 | this.max = snapshot.getMax();
22 | this.min = snapshot.getMin();
23 | this.mean = snapshot.getMean();
24 | this.median = snapshot.getMedian();
25 | this.p75 = snapshot.get75thPercentile();
26 | this.p95 = snapshot.get95thPercentile();
27 | this.p98 = snapshot.get98thPercentile();
28 | this.p99 = snapshot.get99thPercentile();
29 | this.p999 = snapshot.get999thPercentile();
30 | this.stdDev = snapshot.getStdDev();
31 | }
32 |
33 | public Long getMax() {
34 | return max;
35 | }
36 |
37 | public Long getMin() {
38 | return min;
39 | }
40 |
41 | public Double getMean() {
42 | return mean;
43 | }
44 |
45 | public Double getMedian() {
46 | return median;
47 | }
48 |
49 | public Double getP75() {
50 | return p75;
51 | }
52 |
53 | public Double getP95() {
54 | return p95;
55 | }
56 |
57 | public Double getP98() {
58 | return p98;
59 | }
60 |
61 | public Double getP99() {
62 | return p99;
63 | }
64 |
65 | public Double getP999() {
66 | return p999;
67 | }
68 |
69 | public Double getStdDev() {
70 | return stdDev;
71 | }
72 | }
73 |
--------------------------------------------------------------------------------
/src/main/java/fr/ippon/spark/metrics/measures/TimerMeasure.java:
--------------------------------------------------------------------------------
1 | package fr.ippon.spark.metrics.measures;
2 |
3 | import com.codahale.metrics.Timer;
4 |
5 | public class TimerMeasure extends SnapshotMeasure {
6 |
7 | private Long count;
8 | private Double m1Rate;
9 | private Double m5Rate;
10 | private Double m15Rate;
11 | private Double meanRate;
12 |
13 | public TimerMeasure(String name, Timer timer) {
14 | super(name, "timer", timer.getSnapshot());
15 | this.count = timer.getCount();
16 | this.m1Rate = timer.getOneMinuteRate();
17 | this.m5Rate = timer.getFiveMinuteRate();
18 | this.m15Rate = timer.getFifteenMinuteRate();
19 | this.meanRate = timer.getMeanRate();
20 | }
21 |
22 | public Long getCount() {
23 | return count;
24 | }
25 |
26 | public Double getM1Rate() {
27 | return m1Rate;
28 | }
29 |
30 | public Double getM5Rate() {
31 | return m5Rate;
32 | }
33 |
34 | public Double getM15Rate() {
35 | return m15Rate;
36 | }
37 |
38 | public Double getMeanRate() {
39 | return meanRate;
40 | }
41 | }
42 |
--------------------------------------------------------------------------------