├── .gitignore ├── LICENSE.txt ├── README.md ├── pom.xml └── src ├── main └── java │ └── io │ └── github │ └── hengyunabc │ └── metrics │ ├── HostUtil.java │ ├── KafkaReporter.java │ ├── MessageListener.java │ ├── MetricsKafkaConsumer.java │ └── ThreadFactoryBuilder.java └── test ├── java └── io │ └── github │ └── hengyunabc │ └── metrics │ └── test │ ├── AnnotationObject.java │ ├── KafkaReporterSample.java │ ├── MetricsKafkaConsumerSample.java │ └── SpringTest.java └── resources ├── log4j.properties └── spring-test.xml /.gitignore: -------------------------------------------------------------------------------- 1 | .gradle 2 | *.sw? 3 | .#* 4 | *# 5 | *~ 6 | /build 7 | /code 8 | .classpath 9 | .project 10 | .settings 11 | .metadata 12 | .factorypath 13 | .recommenders 14 | bin 15 | build 16 | lib/ 17 | target 18 | .factorypath 19 | .springBeans 20 | interpolated*.xml 21 | dependency-reduced-pom.xml 22 | build.log 23 | _site/ 24 | .*.md.html 25 | manifest.yml 26 | MANIFEST.MF 27 | settings.xml 28 | activemq-data 29 | overridedb.* 30 | *.iml 31 | *.ipr 32 | *.iws 33 | .idea 34 | *.jar 35 | .DS_Store 36 | .factorypath 37 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | 204 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # metrics-kafka 2 | Dropwizard Metrics reporter for kafka. 3 | 4 | 5 | https://github.com/dropwizard/metrics 6 | 7 | Report json metrics data to kafka. Kafka comsumer can process metrics data. 8 | 9 | 10 | ## Example 11 | 12 | ### Environment Setup 13 | 14 | http://kafka.apache.org/082/documentation.html#quickstart 15 | 16 | ### Reporter 17 | 18 | ```java 19 | import java.io.IOException; 20 | import java.util.Properties; 21 | import java.util.Timer; 22 | import java.util.TimerTask; 23 | import java.util.concurrent.TimeUnit; 24 | 25 | import com.codahale.metrics.ConsoleReporter; 26 | import com.codahale.metrics.Histogram; 27 | import com.codahale.metrics.MetricRegistry; 28 | import com.codahale.metrics.Timer.Context; 29 | import com.codahale.metrics.jvm.GarbageCollectorMetricSet; 30 | import com.codahale.metrics.jvm.MemoryUsageGaugeSet; 31 | 32 | import io.github.hengyunabc.metrics.KafkaReporter; 33 | import kafka.producer.ProducerConfig; 34 | 35 | public class KafkaReporterSample { 36 | static final MetricRegistry metrics = new MetricRegistry(); 37 | static public Timer timer = new Timer(); 38 | 39 | public static void main(String args[]) throws IOException, 40 | InterruptedException { 41 | ConsoleReporter reporter = ConsoleReporter.forRegistry(metrics) 42 | .convertRatesTo(TimeUnit.SECONDS) 43 | .convertDurationsTo(TimeUnit.MILLISECONDS).build(); 44 | metrics.register("jvm.mem", new MemoryUsageGaugeSet()); 45 | metrics.register("jvm.gc", new GarbageCollectorMetricSet()); 46 | 47 | final Histogram responseSizes = metrics.histogram("response-sizes"); 48 | final com.codahale.metrics.Timer metricsTimer = metrics 49 | .timer("test-timer"); 50 | 51 | timer.schedule(new TimerTask() { 52 | int i = 100; 53 | 54 | @Override 55 | public void run() { 56 | Context context = metricsTimer.time(); 57 | try { 58 | TimeUnit.MILLISECONDS.sleep(500); 59 | } catch (InterruptedException e) { 60 | // TODO Auto-generated catch block 61 | e.printStackTrace(); 62 | } 63 | responseSizes.update(i++); 64 | context.stop(); 65 | } 66 | 67 | }, 1000, 1000); 68 | 69 | reporter.start(5, TimeUnit.SECONDS); 70 | 71 | String hostName = "localhost"; 72 | String topic = "test-kafka-reporter"; 73 | Properties props = new Properties(); 74 | props.put("metadata.broker.list", "127.0.0.1:9092"); 75 | props.put("serializer.class", "kafka.serializer.StringEncoder"); 76 | props.put("partitioner.class", "kafka.producer.DefaultPartitioner"); 77 | props.put("request.required.acks", "1"); 78 | 79 | String prefix = "test."; 80 | ProducerConfig config = new ProducerConfig(props); 81 | KafkaReporter kafkaReporter = KafkaReporter.forRegistry(metrics) 82 | .config(config).topic(topic).hostName(hostName).prefix(prefix).build(); 83 | 84 | kafkaReporter.start(1, TimeUnit.SECONDS); 85 | 86 | TimeUnit.SECONDS.sleep(500); 87 | } 88 | } 89 | ``` 90 | 91 | The json send to kafka will like this: 92 | ```json 93 | { 94 | "timers": { 95 | "test.test-timer": { 96 | "count": 43, 97 | "max": 505.33599999999996, 98 | "mean": 502.585391215306, 99 | "min": 500.191, 100 | "p50": 502.443, 101 | "p75": 504.046, 102 | "p95": 505.291, 103 | "p98": 505.33599999999996, 104 | "p99": 505.33599999999996, 105 | "p999": 505.33599999999996, 106 | "stddev": 1.6838970975560197, 107 | "m15_rate": 0.8076284847453551, 108 | "m1_rate": 0.8883929708459906, 109 | "m5_rate": 0.8220236458023953, 110 | "mean_rate": 0.9799289583409866, 111 | "duration_units": "milliseconds", 112 | "rate_units": "calls/second" 113 | } 114 | }, 115 | "durationUnit": "milliseconds", 116 | "meters": {}, 117 | "clock": 1453287302764, 118 | "hostName": "localhost", 119 | "rateUnit": "second", 120 | "histograms": { 121 | "test.response-sizes": { 122 | "count": 43, 123 | "max": 142, 124 | "mean": 123.29413148075862, 125 | "min": 100, 126 | "p50": 124, 127 | "p75": 134, 128 | "p95": 141, 129 | "p98": 142, 130 | "p99": 142, 131 | "p999": 142, 132 | "stddev": 12.28197980813012 133 | } 134 | }, 135 | "counters": {}, 136 | "gauges": { 137 | "test.jvm.mem.pools.Code-Cache.used": { 138 | "value": 769088 139 | }, 140 | "test.jvm.mem.pools.Code-Cache.usage": { 141 | "value": 0.015280405680338541 142 | }, 143 | "test.jvm.mem.heap.committed": { 144 | "value": 128974848 145 | }, 146 | "test.jvm.mem.pools.PS-Old-Gen.usage": { 147 | "value": 0.00048653738839285715 148 | }, 149 | "test.jvm.mem.non-heap.used": { 150 | "value": 17222048 151 | }, 152 | "test.jvm.gc.PS-MarkSweep.count": { 153 | "value": 0 154 | }, 155 | "test.jvm.mem.pools.Code-Cache.init": { 156 | "value": 2555904 157 | }, 158 | "test.jvm.mem.pools.PS-Survivor-Space.usage": { 159 | "value": 0.99683837890625 160 | }, 161 | "test.jvm.mem.pools.PS-Eden-Space.max": { 162 | "value": 705691648 163 | }, 164 | "test.jvm.mem.pools.PS-Perm-Gen.init": { 165 | "value": 22020096 166 | }, 167 | "test.jvm.mem.total.init": { 168 | "value": 158793728 169 | }, 170 | "test.jvm.mem.heap.max": { 171 | "value": 1908932608 172 | }, 173 | "test.jvm.mem.heap.init": { 174 | "value": 134217728 175 | }, 176 | "test.jvm.mem.pools.PS-Eden-Space.usage": { 177 | "value": 0.039622597318878856 178 | }, 179 | "test.jvm.mem.pools.PS-Survivor-Space.used": { 180 | "value": 5226304 181 | }, 182 | "test.jvm.mem.pools.Code-Cache.committed": { 183 | "value": 2555904 184 | }, 185 | "test.jvm.mem.pools.PS-Old-Gen.committed": { 186 | "value": 89128960 187 | }, 188 | "test.jvm.mem.non-heap.max": { 189 | "value": 136314880 190 | }, 191 | "test.jvm.gc.PS-Scavenge.count": { 192 | "value": 1 193 | }, 194 | "test.jvm.mem.pools.PS-Survivor-Space.init": { 195 | "value": 5242880 196 | }, 197 | "test.jvm.mem.pools.PS-Perm-Gen.committed": { 198 | "value": 22020096 199 | }, 200 | "test.jvm.mem.pools.PS-Eden-Space.used": { 201 | "value": 27961336 202 | }, 203 | "test.jvm.mem.pools.PS-Old-Gen.used": { 204 | "value": 696384 205 | }, 206 | "test.jvm.mem.pools.Code-Cache.max": { 207 | "value": 50331648 208 | }, 209 | "test.jvm.mem.pools.PS-Perm-Gen.usage": { 210 | "value": 0.19135079732755336 211 | }, 212 | "test.jvm.mem.total.committed": { 213 | "value": 153550848 214 | }, 215 | "test.jvm.mem.non-heap.init": { 216 | "value": 24576000 217 | }, 218 | "test.jvm.mem.pools.PS-Eden-Space.committed": { 219 | "value": 34603008 220 | }, 221 | "test.jvm.mem.total.max": { 222 | "value": 2045247488 223 | }, 224 | "test.jvm.mem.pools.PS-Survivor-Space.committed": { 225 | "value": 5242880 226 | }, 227 | "test.jvm.gc.PS-MarkSweep.time": { 228 | "value": 0 229 | }, 230 | "test.jvm.mem.heap.used": { 231 | "value": 33884024 232 | }, 233 | "test.jvm.mem.heap.usage": { 234 | "value": 0.017750246319853318 235 | }, 236 | "test.jvm.mem.pools.PS-Perm-Gen.max": { 237 | "value": 85983232 238 | }, 239 | "test.jvm.mem.pools.PS-Survivor-Space.max": { 240 | "value": 5242880 241 | }, 242 | "test.jvm.mem.pools.PS-Old-Gen.init": { 243 | "value": 89128960 244 | }, 245 | "test.jvm.mem.total.used": { 246 | "value": 51106240 247 | }, 248 | "test.jvm.mem.pools.PS-Perm-Gen.used": { 249 | "value": 16453128 250 | }, 251 | "test.jvm.mem.pools.PS-Eden-Space.init": { 252 | "value": 34603008 253 | }, 254 | "test.jvm.mem.non-heap.committed": { 255 | "value": 24576000 256 | }, 257 | "test.jvm.gc.PS-Scavenge.time": { 258 | "value": 19 259 | }, 260 | "test.jvm.mem.pools.PS-Old-Gen.max": { 261 | "value": 1431306240 262 | }, 263 | "test.jvm.mem.non-heap.usage": { 264 | "value": 0.12634142362154446 265 | } 266 | }, 267 | "ip": "192.158.1.113" 268 | } 269 | ``` 270 | 271 | ### KafkaConsumer 272 | 273 | ```java 274 | import java.io.IOException; 275 | 276 | import io.github.hengyunabc.metrics.MessageListener; 277 | import io.github.hengyunabc.metrics.MetricsKafkaConsumer; 278 | 279 | public class MetricsKafkaConsumerSample { 280 | 281 | String zookeeper; 282 | String topic; 283 | String group; 284 | 285 | MetricsKafkaConsumer consumer; 286 | 287 | public static void main(String[] args) throws IOException { 288 | 289 | String zookeeper = "localhost:2181"; 290 | String topic = "test-kafka-reporter"; 291 | String group = "consumer-test"; 292 | 293 | MetricsKafkaConsumer consumer = new MetricsKafkaConsumer(); 294 | 295 | consumer = new MetricsKafkaConsumer(); 296 | consumer.setZookeeper(zookeeper); 297 | consumer.setTopic(topic); 298 | consumer.setGroup(group); 299 | consumer.setMessageListener(new MessageListener() { 300 | 301 | @Override 302 | public void onMessage(String message) { 303 | System.err.println(message); 304 | } 305 | }); 306 | consumer.init(); 307 | 308 | System.in.read(); 309 | 310 | consumer.desotry(); 311 | } 312 | } 313 | ``` 314 | 315 | ## Maven dependency 316 | 317 | ```xml 318 | 319 | io.github.hengyunabc 320 | metrics-kafka 321 | 0.0.1 322 | 323 | ``` 324 | 325 | ## Others 326 | 327 | https://github.com/hengyunabc/zabbix-api 328 | 329 | https://github.com/hengyunabc/zabbix-sender 330 | 331 | https://github.com/hengyunabc/metrics-zabbix 332 | 333 | https://github.com/hengyunabc/kafka-zabbix 334 | 335 | ## License 336 | 337 | Apache License V2 338 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | org.sonatype.oss 4 | oss-parent 5 | 9 6 | 7 | 8 | 4.0.0 9 | io.github.hengyunabc 10 | metrics-kafka 11 | 0.0.1 12 | 13 | metrics-kafka 14 | Dropwizard Metrics reporter for kafka. 15 | https://github.com/hengyunabc/metrics-kafka 16 | 2015 17 | 18 | 19 | 20 | Apache 2 21 | http://www.apache.org/licenses/LICENSE-2.0.txt 22 | repo 23 | A business-friendly OSS license 24 | 25 | 26 | 27 | 28 | 29 | hengyunabc 30 | hengyunabc 31 | hengyunabc@gmail.com 32 | 33 | 34 | 35 | 36 | scm:git:git@github.com:hengyunabc/metrics-kafka.git 37 | scm:git:git@github.com:hengyunabc/metrics-kafka.git 38 | https://github.com/hengyunabc/metrics-kafka 39 | 40 | 41 | 42 | 1.6 43 | 1.6 44 | 1.6 45 | UTF-8 46 | UTF-8 47 | 48 | 3.1.2 49 | 50 | 51 | 1.0.13 52 | 1.7.5 53 | 54 | 55 | 4.11 56 | 57 | 58 | 59 | 60 | 61 | 62 | org.apache.kafka 63 | kafka_2.10 64 | 0.8.2.2 65 | 66 | 67 | org.apache.zookeeper 68 | zookeeper 69 | 70 | 71 | 72 | 73 | 74 | org.apache.zookeeper 75 | zookeeper 76 | 3.4.5 77 | 78 | 79 | com.sun.jmx 80 | jmxri 81 | 82 | 83 | com.sun.jdmk 84 | jmxtools 85 | 86 | 87 | javax.jms 88 | jms 89 | 90 | 91 | 92 | 93 | 94 | io.dropwizard.metrics 95 | metrics-core 96 | ${metrics.version} 97 | 98 | 99 | 100 | io.dropwizard.metrics 101 | metrics-json 102 | ${metrics.version} 103 | 104 | 105 | 106 | io.dropwizard.metrics 107 | metrics-jvm 108 | ${metrics.version} 109 | test 110 | 111 | 112 | 113 | 114 | org.slf4j 115 | slf4j-api 116 | ${slf4j.version} 117 | compile 118 | 119 | 120 | ch.qos.logback 121 | logback-classic 122 | ${logback.version} 123 | runtime 124 | 125 | 126 | 127 | 128 | org.springframework 129 | spring-test 130 | 3.2.3.RELEASE 131 | test 132 | 133 | 134 | org.springframework 135 | spring-core 136 | 3.2.3.RELEASE 137 | test 138 | 139 | 140 | org.springframework 141 | spring-context 142 | 3.2.3.RELEASE 143 | test 144 | 145 | 146 | org.springframework 147 | spring-beans 148 | 3.2.3.RELEASE 149 | test 150 | 151 | 152 | com.ryantenney.metrics 153 | metrics-spring 154 | 3.0.3 155 | test 156 | 157 | 158 | junit 159 | junit 160 | ${junit.version} 161 | test 162 | 163 | 164 | 165 | 166 | 167 | 168 | release-sign-artifacts 169 | 170 | 171 | performRelease 172 | true 173 | 174 | 175 | 176 | 177 | 178 | org.apache.maven.plugins 179 | maven-gpg-plugin 180 | 181 | 182 | sign-artifacts 183 | verify 184 | 185 | sign 186 | 187 | 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | 198 | org.apache.maven.plugins 199 | maven-source-plugin 200 | 201 | 202 | attach-sources 203 | 204 | jar 205 | 206 | 207 | 208 | 209 | 210 | 211 | org.apache.maven.plugins 212 | maven-javadoc-plugin 213 | 214 | -Xdoclint:none 215 | 216 | 217 | 218 | attach-javadocs 219 | 220 | jar 221 | 222 | 223 | 224 | 225 | 226 | 227 | 228 | -------------------------------------------------------------------------------- /src/main/java/io/github/hengyunabc/metrics/HostUtil.java: -------------------------------------------------------------------------------- 1 | package io.github.hengyunabc.metrics; 2 | 3 | import java.net.InetAddress; 4 | import java.net.NetworkInterface; 5 | import java.net.SocketException; 6 | import java.net.UnknownHostException; 7 | import java.util.Enumeration; 8 | 9 | import org.slf4j.Logger; 10 | import org.slf4j.LoggerFactory; 11 | 12 | /** 13 | * http://stackoverflow.com/questions/7348711/recommended-way-to-get-hostname-in-java 14 | * @author hengyunabc 15 | * 16 | */ 17 | public class HostUtil { 18 | private static final Logger logger = LoggerFactory 19 | .getLogger(HostUtil.class); 20 | 21 | public static void main(String[] args) { 22 | System.err.println(getHostName()); 23 | System.err.println(getHostAddress()); 24 | System.err.println(getNotLoopbackAddress()); 25 | } 26 | 27 | public static String getHostName() { 28 | try { 29 | String hostName = InetAddress.getLocalHost().getHostName(); 30 | if (hostName != null && !hostName.isEmpty()) { 31 | return hostName; 32 | } 33 | } catch (UnknownHostException e) { 34 | logger.error("get hostName error!", e); 35 | } 36 | 37 | String host = System.getenv("COMPUTERNAME"); 38 | if (host != null) 39 | return host; 40 | host = System.getenv("HOSTNAME"); 41 | if (host != null) 42 | return host; 43 | 44 | return null; 45 | } 46 | 47 | public static String getNotLoopbackAddress() { 48 | String hostName = null; 49 | Enumeration interfaces; 50 | try { 51 | interfaces = NetworkInterface.getNetworkInterfaces(); 52 | while (interfaces.hasMoreElements()) { 53 | NetworkInterface nic = interfaces.nextElement(); 54 | Enumeration addresses = nic.getInetAddresses(); 55 | while (hostName == null && addresses.hasMoreElements()) { 56 | InetAddress address = addresses.nextElement(); 57 | if (!address.isLoopbackAddress()) { 58 | hostName = address.getHostName(); 59 | } 60 | } 61 | } 62 | } catch (SocketException e) { 63 | logger.error("getNotLoopbackAddress error!", e); 64 | } 65 | return hostName; 66 | } 67 | 68 | public static String getHostAddress() { 69 | try { 70 | return InetAddress.getLocalHost().getHostAddress(); 71 | } catch (UnknownHostException e) { 72 | logger.error("get hostAddress error!", e); 73 | } 74 | 75 | return null; 76 | } 77 | 78 | } 79 | -------------------------------------------------------------------------------- /src/main/java/io/github/hengyunabc/metrics/KafkaReporter.java: -------------------------------------------------------------------------------- 1 | package io.github.hengyunabc.metrics; 2 | 3 | import java.util.HashMap; 4 | import java.util.Map; 5 | import java.util.Map.Entry; 6 | import java.util.SortedMap; 7 | import java.util.concurrent.ExecutorService; 8 | import java.util.concurrent.Executors; 9 | import java.util.concurrent.TimeUnit; 10 | 11 | import org.slf4j.Logger; 12 | import org.slf4j.LoggerFactory; 13 | 14 | import com.codahale.metrics.Counter; 15 | import com.codahale.metrics.Gauge; 16 | import com.codahale.metrics.Histogram; 17 | import com.codahale.metrics.Meter; 18 | import com.codahale.metrics.MetricFilter; 19 | import com.codahale.metrics.MetricRegistry; 20 | import com.codahale.metrics.ScheduledReporter; 21 | import com.codahale.metrics.Timer; 22 | import com.codahale.metrics.json.MetricsModule; 23 | import com.fasterxml.jackson.databind.ObjectMapper; 24 | 25 | import kafka.javaapi.producer.Producer; 26 | import kafka.producer.KeyedMessage; 27 | import kafka.producer.ProducerConfig; 28 | 29 | /** 30 | * 31 | * @author hengyunabc 32 | * 33 | * 34 | */ 35 | public class KafkaReporter extends ScheduledReporter { 36 | private static final Logger logger = LoggerFactory 37 | .getLogger(KafkaReporter.class); 38 | 39 | String topic; 40 | ProducerConfig config; 41 | Producer producer; 42 | ExecutorService kafkaExecutor; 43 | 44 | private String prefix; 45 | private String hostName; 46 | private String ip; 47 | 48 | int count = 0; 49 | 50 | ObjectMapper mapper; 51 | 52 | private KafkaReporter(MetricRegistry registry, String name, 53 | TimeUnit rateUnit, TimeUnit durationUnit, boolean showSamples, MetricFilter filter, 54 | String topic, ProducerConfig config, String prefix, 55 | String hostName, String ip) { 56 | super(registry, name, filter, rateUnit, durationUnit); 57 | this.topic = topic; 58 | this.config = config; 59 | this.prefix = prefix; 60 | this.hostName = hostName; 61 | this.ip = ip; 62 | 63 | this.mapper = new ObjectMapper().registerModule(new MetricsModule(rateUnit, 64 | durationUnit, 65 | showSamples)); 66 | 67 | producer = new Producer(config); 68 | 69 | kafkaExecutor = Executors 70 | .newSingleThreadExecutor(new ThreadFactoryBuilder() 71 | .setNameFormat("kafka-producer-%d").build()); 72 | } 73 | 74 | public static Builder forRegistry(MetricRegistry registry) { 75 | return new Builder(registry); 76 | } 77 | 78 | public static class Builder { 79 | private final MetricRegistry registry; 80 | private String name = "kafka-reporter"; 81 | private TimeUnit rateUnit; 82 | private TimeUnit durationUnit; 83 | 84 | private boolean showSamples; 85 | 86 | private MetricFilter filter; 87 | 88 | private String prefix = ""; 89 | private String hostName; 90 | private String ip; 91 | 92 | private String topic; 93 | private ProducerConfig config; 94 | 95 | public Builder(MetricRegistry registry) { 96 | this.registry = registry; 97 | 98 | this.rateUnit = TimeUnit.SECONDS; 99 | this.durationUnit = TimeUnit.MILLISECONDS; 100 | this.filter = MetricFilter.ALL; 101 | } 102 | 103 | /** 104 | * Convert rates to the given time unit. 105 | * 106 | * @param rateUnit 107 | * a unit of time 108 | * @return {@code this} 109 | */ 110 | public Builder convertRatesTo(TimeUnit rateUnit) { 111 | this.rateUnit = rateUnit; 112 | return this; 113 | } 114 | 115 | /** 116 | * Convert durations to the given time unit. 117 | * 118 | * @param durationUnit 119 | * a unit of time 120 | * @return {@code this} 121 | */ 122 | public Builder convertDurationsTo(TimeUnit durationUnit) { 123 | this.durationUnit = durationUnit; 124 | return this; 125 | } 126 | 127 | public Builder showSamples(boolean showSamples) { 128 | this.showSamples = showSamples; 129 | return this; 130 | } 131 | 132 | /** 133 | * Only report metrics which match the given filter. 134 | * 135 | * @param filter 136 | * a {@link MetricFilter} 137 | * @return {@code this} 138 | */ 139 | public Builder filter(MetricFilter filter) { 140 | this.filter = filter; 141 | return this; 142 | } 143 | 144 | /** 145 | * default register name is "kafka-reporter". 146 | * 147 | * @param name 148 | * @return 149 | */ 150 | public Builder name(String name) { 151 | this.name = name; 152 | return this; 153 | } 154 | 155 | public Builder topic(String topic) { 156 | this.topic = topic; 157 | return this; 158 | } 159 | 160 | public Builder config(ProducerConfig config) { 161 | this.config = config; 162 | return this; 163 | } 164 | 165 | public Builder prefix(String prefix) { 166 | this.prefix = prefix; 167 | return this; 168 | } 169 | 170 | public Builder hostName(String hostName) { 171 | this.hostName = hostName; 172 | return this; 173 | } 174 | 175 | public Builder ip(String ip) { 176 | this.ip = ip; 177 | return this; 178 | } 179 | 180 | /** 181 | * Builds a {@link KafkaReporter} with the given properties. 182 | * 183 | * @return a {@link KafkaReporter} 184 | */ 185 | public KafkaReporter build() { 186 | if (hostName == null) { 187 | hostName = HostUtil.getHostName(); 188 | logger.info(name + " detect hostName: " + hostName); 189 | } 190 | if (ip == null) { 191 | ip = HostUtil.getHostAddress(); 192 | logger.info(name + " detect ip: " + ip); 193 | } 194 | 195 | return new KafkaReporter(registry, name, rateUnit, durationUnit, showSamples, 196 | filter, topic, config, prefix, hostName, ip); 197 | } 198 | } 199 | 200 | private Map addPrefix(SortedMap map){ 201 | Map result = new HashMap(map.size()); 202 | for (Entry entry : map.entrySet()) { 203 | result.put(prefix + entry.getKey(), entry.getValue()); 204 | } 205 | return result; 206 | } 207 | 208 | @SuppressWarnings("rawtypes") 209 | @Override 210 | public void report(SortedMap gauges, 211 | SortedMap counters, 212 | SortedMap histograms, 213 | SortedMap meters, SortedMap timers) { 214 | 215 | final Map result = new HashMap(16); 216 | 217 | result.put("hostName", hostName); 218 | result.put("ip", ip); 219 | result.put("rateUnit", getRateUnit()); 220 | result.put("durationUnit", getDurationUnit()); 221 | 222 | result.put("gauges", addPrefix(gauges)); 223 | result.put("counters", addPrefix(counters)); 224 | result.put("histograms", addPrefix(histograms)); 225 | result.put("meters", addPrefix(meters)); 226 | result.put("timers", addPrefix(timers)); 227 | 228 | result.put("clock", System.currentTimeMillis()); 229 | 230 | kafkaExecutor.execute(new Runnable() { 231 | @Override 232 | public void run() { 233 | try { 234 | KeyedMessage message = new KeyedMessage( 235 | topic, "" + count++, mapper.writeValueAsString(result)); 236 | producer.send(message); 237 | } catch (Exception e) { 238 | logger.error("send metrics to kafka error!", e); 239 | } 240 | } 241 | }); 242 | } 243 | 244 | } 245 | -------------------------------------------------------------------------------- /src/main/java/io/github/hengyunabc/metrics/MessageListener.java: -------------------------------------------------------------------------------- 1 | package io.github.hengyunabc.metrics; 2 | 3 | /** 4 | * not thread safe. 5 | * 6 | * @author hengyunabc 7 | * 8 | */ 9 | public interface MessageListener { 10 | 11 | /** 12 | * receive metrics message, not threadsafe. 13 | * 14 | * @param jsonStringMessage 15 | */ 16 | public void onMessage(String jsonStringMessage); 17 | } 18 | -------------------------------------------------------------------------------- /src/main/java/io/github/hengyunabc/metrics/MetricsKafkaConsumer.java: -------------------------------------------------------------------------------- 1 | package io.github.hengyunabc.metrics; 2 | 3 | import java.util.HashMap; 4 | import java.util.List; 5 | import java.util.Map; 6 | import java.util.Properties; 7 | import java.util.concurrent.ExecutorService; 8 | import java.util.concurrent.Executors; 9 | 10 | import org.slf4j.Logger; 11 | import org.slf4j.LoggerFactory; 12 | 13 | import kafka.consumer.ConsumerConfig; 14 | import kafka.consumer.ConsumerIterator; 15 | import kafka.consumer.KafkaStream; 16 | import kafka.javaapi.consumer.ConsumerConnector; 17 | 18 | public class MetricsKafkaConsumer { 19 | private static final Logger logger = LoggerFactory.getLogger(MetricsKafkaConsumer.class); 20 | String zookeeper; 21 | String group; 22 | String topic; 23 | 24 | int threadNumber = 1; 25 | 26 | int zookeeperSessionTimeoutMs = 4000; 27 | int zookeeperSyncTimeMs = 2000; 28 | int autoCommitIntervalMs = 1000; 29 | 30 | MessageListener messageListener; 31 | 32 | ConsumerConnector consumer; 33 | 34 | ExecutorService executor; 35 | 36 | @SuppressWarnings("rawtypes") 37 | public void init() { 38 | Properties props = new Properties(); 39 | props.put("zookeeper.connect", zookeeper); 40 | props.put("group.id", group); 41 | props.put("zookeeper.session.timeout.ms", "" + zookeeperSessionTimeoutMs); 42 | props.put("zookeeper.sync.time.ms", "" + zookeeperSyncTimeMs); 43 | props.put("auto.commit.interval.ms", "" + autoCommitIntervalMs); 44 | 45 | ConsumerConfig config = new ConsumerConfig(props); 46 | 47 | consumer = kafka.consumer.Consumer.createJavaConsumerConnector(config); 48 | 49 | Map topicCountMap = new HashMap(); 50 | topicCountMap.put(topic, threadNumber); 51 | Map>> consumerMap = consumer.createMessageStreams(topicCountMap); 52 | List> streams = consumerMap.get(topic); 53 | 54 | // now launch all the threads 55 | // 56 | executor = Executors.newFixedThreadPool(threadNumber, 57 | new ThreadFactoryBuilder().setNameFormat("kafka-metrics-consumer-%d").build()); 58 | 59 | for (final KafkaStream stream : streams) { 60 | executor.submit(new Runnable() { 61 | @SuppressWarnings("unchecked") 62 | @Override 63 | public void run() { 64 | 65 | ConsumerIterator it = stream.iterator(); 66 | while (it.hasNext()) { 67 | try { 68 | messageListener.onMessage(new String(it.next().message())); 69 | } catch (RuntimeException e) { 70 | logger.error("consumer kafka metrics message error!", e); 71 | } 72 | } 73 | } 74 | }); 75 | } 76 | 77 | } 78 | 79 | public void desotry() { 80 | try { 81 | if (consumer != null) { 82 | consumer.shutdown(); 83 | } 84 | } finally { 85 | if (executor != null) { 86 | executor.shutdown(); 87 | } 88 | } 89 | } 90 | 91 | public String getZookeeper() { 92 | return zookeeper; 93 | } 94 | 95 | public void setZookeeper(String zookeeper) { 96 | this.zookeeper = zookeeper; 97 | } 98 | 99 | public String getGroup() { 100 | return group; 101 | } 102 | 103 | public void setGroup(String group) { 104 | this.group = group; 105 | } 106 | 107 | public String getTopic() { 108 | return topic; 109 | } 110 | 111 | public void setTopic(String topic) { 112 | this.topic = topic; 113 | } 114 | 115 | public int getThreadNumber() { 116 | return threadNumber; 117 | } 118 | 119 | public void setThreadNumber(int threadNumber) { 120 | this.threadNumber = threadNumber; 121 | } 122 | 123 | public int getZookeeperSessionTimeoutMs() { 124 | return zookeeperSessionTimeoutMs; 125 | } 126 | 127 | public void setZookeeperSessionTimeoutMs(int zookeeperSessionTimeoutMs) { 128 | this.zookeeperSessionTimeoutMs = zookeeperSessionTimeoutMs; 129 | } 130 | 131 | public int getZookeeperSyncTimeMs() { 132 | return zookeeperSyncTimeMs; 133 | } 134 | 135 | public void setZookeeperSyncTimeMs(int zookeeperSyncTimeMs) { 136 | this.zookeeperSyncTimeMs = zookeeperSyncTimeMs; 137 | } 138 | 139 | public int getAutoCommitIntervalMs() { 140 | return autoCommitIntervalMs; 141 | } 142 | 143 | public void setAutoCommitIntervalMs(int autoCommitIntervalMs) { 144 | this.autoCommitIntervalMs = autoCommitIntervalMs; 145 | } 146 | 147 | public MessageListener getMessageListener() { 148 | return messageListener; 149 | } 150 | 151 | public void setMessageListener(MessageListener messageListener) { 152 | this.messageListener = messageListener; 153 | } 154 | } 155 | -------------------------------------------------------------------------------- /src/main/java/io/github/hengyunabc/metrics/ThreadFactoryBuilder.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2010 The Guava Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package io.github.hengyunabc.metrics; 18 | 19 | import java.lang.Thread.UncaughtExceptionHandler; 20 | import java.util.concurrent.Executors; 21 | import java.util.concurrent.ThreadFactory; 22 | import java.util.concurrent.atomic.AtomicLong; 23 | 24 | /** 25 | * From google guava. 26 | * A ThreadFactory builder, providing any combination of these features: 27 | *
    28 | *
  • whether threads should be marked as {@linkplain Thread#setDaemon daemon} 29 | * threads 30 | *
  • a {@linkplain ThreadFactoryBuilder#setNameFormat naming format} 31 | *
  • a {@linkplain Thread#setPriority thread priority} 32 | *
  • an {@linkplain Thread#setUncaughtExceptionHandler uncaught exception 33 | * handler} 34 | *
  • a {@linkplain ThreadFactory#newThread backing thread factory} 35 | *
36 | *

37 | * If no backing thread factory is provided, a default backing thread factory is 38 | * used as if by calling {@code setThreadFactory(} 39 | * {@link Executors#defaultThreadFactory()}{@code )}. 40 | * 41 | * @author Kurt Alfred Kluever 42 | * @since 4.0 43 | */ 44 | public final class ThreadFactoryBuilder { 45 | private String nameFormat = null; 46 | private Boolean daemon = null; 47 | private Integer priority = null; 48 | private UncaughtExceptionHandler uncaughtExceptionHandler = null; 49 | private ThreadFactory backingThreadFactory = null; 50 | 51 | /** 52 | * Creates a new {@link ThreadFactory} builder. 53 | */ 54 | public ThreadFactoryBuilder() { 55 | } 56 | 57 | /** 58 | * Sets the naming format to use when naming threads ({@link Thread#setName} 59 | * ) which are created with this ThreadFactory. 60 | * 61 | * @param nameFormat 62 | * a {@link String#format(String, Object...)}-compatible format 63 | * String, to which a unique integer (0, 1, etc.) will be 64 | * supplied as the single parameter. This integer will be unique 65 | * to the built instance of the ThreadFactory and will be 66 | * assigned sequentially. For example, {@code "rpc-pool-%d"} will 67 | * generate thread names like {@code "rpc-pool-0"}, 68 | * {@code "rpc-pool-1"}, {@code "rpc-pool-2"}, etc. 69 | * @return this for the builder pattern 70 | */ 71 | public ThreadFactoryBuilder setNameFormat(String nameFormat) { 72 | String.format(nameFormat, 0); // fail fast if the format is bad or null 73 | this.nameFormat = nameFormat; 74 | return this; 75 | } 76 | 77 | /** 78 | * Sets daemon or not for new threads created with this ThreadFactory. 79 | * 80 | * @param daemon 81 | * whether or not new Threads created with this ThreadFactory 82 | * will be daemon threads 83 | * @return this for the builder pattern 84 | */ 85 | public ThreadFactoryBuilder setDaemon(boolean daemon) { 86 | this.daemon = daemon; 87 | return this; 88 | } 89 | 90 | /** 91 | * Sets the priority for new threads created with this ThreadFactory. 92 | * 93 | * @param priority 94 | * the priority for new Threads created with this ThreadFactory 95 | * @return this for the builder pattern 96 | */ 97 | public ThreadFactoryBuilder setPriority(int priority) { 98 | // Thread#setPriority() already checks for validity. These error 99 | // messages 100 | // are nicer though and will fail-fast. 101 | checkArgument(priority >= Thread.MIN_PRIORITY, "Thread priority (%s) must be >= %s", priority, 102 | Thread.MIN_PRIORITY); 103 | checkArgument(priority <= Thread.MAX_PRIORITY, "Thread priority (%s) must be <= %s", priority, 104 | Thread.MAX_PRIORITY); 105 | this.priority = priority; 106 | return this; 107 | } 108 | 109 | /** 110 | * Sets the {@link UncaughtExceptionHandler} for new threads created with 111 | * this ThreadFactory. 112 | * 113 | * @param uncaughtExceptionHandler 114 | * the uncaught exception handler for new Threads created with 115 | * this ThreadFactory 116 | * @return this for the builder pattern 117 | */ 118 | public ThreadFactoryBuilder setUncaughtExceptionHandler(UncaughtExceptionHandler uncaughtExceptionHandler) { 119 | this.uncaughtExceptionHandler = checkNotNull(uncaughtExceptionHandler); 120 | return this; 121 | } 122 | 123 | /** 124 | * Sets the backing {@link ThreadFactory} for new threads created with this 125 | * ThreadFactory. Threads will be created by invoking #newThread(Runnable) 126 | * on this backing {@link ThreadFactory}. 127 | * 128 | * @param backingThreadFactory 129 | * the backing {@link ThreadFactory} which will be delegated to 130 | * during thread creation. 131 | * @return this for the builder pattern 132 | * 133 | * @see MoreExecutors 134 | */ 135 | public ThreadFactoryBuilder setThreadFactory(ThreadFactory backingThreadFactory) { 136 | this.backingThreadFactory = checkNotNull(backingThreadFactory); 137 | return this; 138 | } 139 | 140 | /** 141 | * Returns a new thread factory using the options supplied during the 142 | * building process. After building, it is still possible to change the 143 | * options used to build the ThreadFactory and/or build again. State is not 144 | * shared amongst built instances. 145 | * 146 | * @return the fully constructed {@link ThreadFactory} 147 | */ 148 | public ThreadFactory build() { 149 | return build(this); 150 | } 151 | 152 | private static ThreadFactory build(ThreadFactoryBuilder builder) { 153 | final String nameFormat = builder.nameFormat; 154 | final Boolean daemon = builder.daemon; 155 | final Integer priority = builder.priority; 156 | final UncaughtExceptionHandler uncaughtExceptionHandler = builder.uncaughtExceptionHandler; 157 | final ThreadFactory backingThreadFactory = (builder.backingThreadFactory != null) ? builder.backingThreadFactory 158 | : Executors.defaultThreadFactory(); 159 | final AtomicLong count = (nameFormat != null) ? new AtomicLong(0) : null; 160 | return new ThreadFactory() { 161 | @Override 162 | public Thread newThread(Runnable runnable) { 163 | Thread thread = backingThreadFactory.newThread(runnable); 164 | if (nameFormat != null) { 165 | thread.setName(String.format(nameFormat, count.getAndIncrement())); 166 | } 167 | if (daemon != null) { 168 | thread.setDaemon(daemon); 169 | } 170 | if (priority != null) { 171 | thread.setPriority(priority); 172 | } 173 | if (uncaughtExceptionHandler != null) { 174 | thread.setUncaughtExceptionHandler(uncaughtExceptionHandler); 175 | } 176 | return thread; 177 | } 178 | }; 179 | } 180 | 181 | /** 182 | * Substitutes each {@code %s} in {@code template} with an argument. These 183 | * are matched by position: the first {@code %s} gets {@code args[0]}, etc. 184 | * If there are more arguments than placeholders, the unmatched arguments 185 | * will be appended to the end of the formatted message in square braces. 186 | * 187 | * @param template 188 | * a non-null string containing 0 or more {@code %s} 189 | * placeholders. 190 | * @param args 191 | * the arguments to be substituted into the message template. 192 | * Arguments are converted to strings using 193 | * {@link String#valueOf(Object)}. Arguments can be null. 194 | */ 195 | // Note that this is somewhat-improperly used from Verify.java as well. 196 | static String format(String template, Object... args) { 197 | template = String.valueOf(template); // null -> "null" 198 | 199 | // start substituting the arguments into the '%s' placeholders 200 | StringBuilder builder = new StringBuilder(template.length() + 16 * args.length); 201 | int templateStart = 0; 202 | int i = 0; 203 | while (i < args.length) { 204 | int placeholderStart = template.indexOf("%s", templateStart); 205 | if (placeholderStart == -1) { 206 | break; 207 | } 208 | builder.append(template.substring(templateStart, placeholderStart)); 209 | builder.append(args[i++]); 210 | templateStart = placeholderStart + 2; 211 | } 212 | builder.append(template.substring(templateStart)); 213 | 214 | // if we run out of placeholders, append the extra args in square braces 215 | if (i < args.length) { 216 | builder.append(" ["); 217 | builder.append(args[i++]); 218 | while (i < args.length) { 219 | builder.append(", "); 220 | builder.append(args[i++]); 221 | } 222 | builder.append(']'); 223 | } 224 | 225 | return builder.toString(); 226 | } 227 | 228 | /** 229 | * Ensures the truth of an expression involving one or more parameters to 230 | * the calling method. 231 | * 232 | * @param expression 233 | * a boolean expression 234 | * @param errorMessageTemplate 235 | * a template for the exception message should the check fail. 236 | * The message is formed by replacing each {@code %s} placeholder 237 | * in the template with an argument. These are matched by 238 | * position - the first {@code %s} gets 239 | * {@code errorMessageArgs[0]}, etc. Unmatched arguments will be 240 | * appended to the formatted message in square braces. Unmatched 241 | * placeholders will be left as-is. 242 | * @param errorMessageArgs 243 | * the arguments to be substituted into the message template. 244 | * Arguments are converted to strings using 245 | * {@link String#valueOf(Object)}. 246 | * @throws IllegalArgumentException 247 | * if {@code expression} is false 248 | * @throws NullPointerException 249 | * if the check fails and either {@code errorMessageTemplate} or 250 | * {@code errorMessageArgs} is null (don't let this happen) 251 | */ 252 | private void checkArgument(boolean expression, String errorMessageTemplate, Object... errorMessageArgs) { 253 | if (!expression) { 254 | throw new IllegalArgumentException(format(errorMessageTemplate, errorMessageArgs)); 255 | } 256 | 257 | } 258 | 259 | /** 260 | * Ensures that an object reference passed as a parameter to the calling 261 | * method is not null. 262 | * 263 | * @param reference 264 | * an object reference 265 | * @return the non-null reference that was validated 266 | * @throws NullPointerException 267 | * if {@code reference} is null 268 | */ 269 | public static T checkNotNull(T reference) { 270 | if (reference == null) { 271 | throw new NullPointerException(); 272 | } 273 | return reference; 274 | } 275 | 276 | } 277 | -------------------------------------------------------------------------------- /src/test/java/io/github/hengyunabc/metrics/test/AnnotationObject.java: -------------------------------------------------------------------------------- 1 | package io.github.hengyunabc.metrics.test; 2 | 3 | import java.util.Random; 4 | import java.util.concurrent.TimeUnit; 5 | 6 | import com.codahale.metrics.annotation.Timed; 7 | import com.ryantenney.metrics.annotation.Counted; 8 | 9 | public class AnnotationObject { 10 | 11 | Random random = new Random(); 12 | 13 | /** 14 | * stat call times and time. 15 | */ 16 | @Timed 17 | public void call() { 18 | try { 19 | TimeUnit.MILLISECONDS.sleep(random.nextInt(3000)); 20 | } catch (InterruptedException e) { 21 | e.printStackTrace(); 22 | } 23 | } 24 | 25 | /** 26 | * stat user login times. 27 | */ 28 | @Counted 29 | public void userLogin(){ 30 | 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /src/test/java/io/github/hengyunabc/metrics/test/KafkaReporterSample.java: -------------------------------------------------------------------------------- 1 | package io.github.hengyunabc.metrics.test; 2 | 3 | import java.io.IOException; 4 | import java.util.Properties; 5 | import java.util.Timer; 6 | import java.util.TimerTask; 7 | import java.util.concurrent.TimeUnit; 8 | 9 | import com.codahale.metrics.ConsoleReporter; 10 | import com.codahale.metrics.Histogram; 11 | import com.codahale.metrics.MetricRegistry; 12 | import com.codahale.metrics.Timer.Context; 13 | import com.codahale.metrics.jvm.GarbageCollectorMetricSet; 14 | import com.codahale.metrics.jvm.MemoryUsageGaugeSet; 15 | 16 | import io.github.hengyunabc.metrics.KafkaReporter; 17 | import kafka.producer.ProducerConfig; 18 | 19 | public class KafkaReporterSample { 20 | static final MetricRegistry metrics = new MetricRegistry(); 21 | static public Timer timer = new Timer(); 22 | 23 | public static void main(String args[]) throws IOException, 24 | InterruptedException { 25 | ConsoleReporter reporter = ConsoleReporter.forRegistry(metrics) 26 | .convertRatesTo(TimeUnit.SECONDS) 27 | .convertDurationsTo(TimeUnit.MILLISECONDS).build(); 28 | metrics.register("jvm.mem", new MemoryUsageGaugeSet()); 29 | metrics.register("jvm.gc", new GarbageCollectorMetricSet()); 30 | 31 | final Histogram responseSizes = metrics.histogram("response-sizes"); 32 | final com.codahale.metrics.Timer metricsTimer = metrics 33 | .timer("test-timer"); 34 | 35 | timer.schedule(new TimerTask() { 36 | int i = 100; 37 | 38 | @Override 39 | public void run() { 40 | Context context = metricsTimer.time(); 41 | try { 42 | TimeUnit.MILLISECONDS.sleep(500); 43 | } catch (InterruptedException e) { 44 | // TODO Auto-generated catch block 45 | e.printStackTrace(); 46 | } 47 | responseSizes.update(i++); 48 | context.stop(); 49 | } 50 | 51 | }, 1000, 1000); 52 | 53 | reporter.start(5, TimeUnit.SECONDS); 54 | 55 | String hostName = "localhost"; 56 | String topic = "test-kafka-reporter"; 57 | Properties props = new Properties(); 58 | props.put("metadata.broker.list", "127.0.0.1:9092"); 59 | props.put("serializer.class", "kafka.serializer.StringEncoder"); 60 | props.put("partitioner.class", "kafka.producer.DefaultPartitioner"); 61 | props.put("request.required.acks", "1"); 62 | 63 | String prefix = "test."; 64 | ProducerConfig config = new ProducerConfig(props); 65 | KafkaReporter kafkaReporter = KafkaReporter.forRegistry(metrics) 66 | .config(config).topic(topic).hostName(hostName).prefix(prefix).build(); 67 | 68 | kafkaReporter.start(1, TimeUnit.SECONDS); 69 | 70 | TimeUnit.SECONDS.sleep(500); 71 | } 72 | } -------------------------------------------------------------------------------- /src/test/java/io/github/hengyunabc/metrics/test/MetricsKafkaConsumerSample.java: -------------------------------------------------------------------------------- 1 | package io.github.hengyunabc.metrics.test; 2 | 3 | import java.io.IOException; 4 | 5 | import io.github.hengyunabc.metrics.MessageListener; 6 | import io.github.hengyunabc.metrics.MetricsKafkaConsumer; 7 | 8 | public class MetricsKafkaConsumerSample { 9 | 10 | String zookeeper; 11 | String topic; 12 | String group; 13 | 14 | MetricsKafkaConsumer consumer; 15 | 16 | public static void main(String[] args) throws IOException { 17 | 18 | String zookeeper = "localhost:2181"; 19 | String topic = "test-kafka-reporter"; 20 | String group = "consumer-test"; 21 | 22 | MetricsKafkaConsumer consumer = new MetricsKafkaConsumer(); 23 | 24 | consumer = new MetricsKafkaConsumer(); 25 | consumer.setZookeeper(zookeeper); 26 | consumer.setTopic(topic); 27 | consumer.setGroup(group); 28 | consumer.setMessageListener(new MessageListener() { 29 | 30 | @Override 31 | public void onMessage(String message) { 32 | System.err.println(message); 33 | } 34 | }); 35 | consumer.init(); 36 | 37 | System.in.read(); 38 | 39 | consumer.desotry(); 40 | } 41 | 42 | } 43 | -------------------------------------------------------------------------------- /src/test/java/io/github/hengyunabc/metrics/test/SpringTest.java: -------------------------------------------------------------------------------- 1 | package io.github.hengyunabc.metrics.test; 2 | 3 | import io.github.hengyunabc.metrics.KafkaReporter; 4 | 5 | import java.util.Properties; 6 | import java.util.concurrent.TimeUnit; 7 | 8 | import kafka.producer.ProducerConfig; 9 | 10 | import org.junit.Before; 11 | import org.junit.Test; 12 | import org.junit.runner.RunWith; 13 | import org.springframework.aop.framework.AopContext; 14 | import org.springframework.beans.factory.annotation.Autowired; 15 | import org.springframework.test.context.ContextConfiguration; 16 | import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; 17 | 18 | import com.codahale.metrics.MetricRegistry; 19 | 20 | @RunWith(SpringJUnit4ClassRunner.class) 21 | @ContextConfiguration("classpath:spring-test.xml") 22 | public class SpringTest { 23 | 24 | @Autowired 25 | AnnotationObject annotationObject; 26 | 27 | @Autowired 28 | MetricRegistry metrics; 29 | 30 | @Before 31 | public void before(){ 32 | startKafkaReporter(); 33 | } 34 | 35 | public void startKafkaReporter(){ 36 | String hostName = "192.168.66.30"; 37 | String topic = "test-kafka-reporter"; 38 | Properties props = new Properties(); 39 | props.put("metadata.broker.list", "192.168.90.147:9091"); 40 | props.put("serializer.class", "kafka.serializer.StringEncoder"); 41 | props.put("partitioner.class", "kafka.producer.DefaultPartitioner"); 42 | props.put("request.required.acks", "1"); 43 | 44 | String prefix = "test."; 45 | ProducerConfig config = new ProducerConfig(props); 46 | KafkaReporter kafkaReporter = KafkaReporter.forRegistry(metrics) 47 | .config(config).topic(topic).hostName(hostName).prefix(prefix).build(); 48 | 49 | kafkaReporter.start(3, TimeUnit.SECONDS); 50 | } 51 | 52 | @Test 53 | public void test() throws InterruptedException{ 54 | Thread t = new Thread(new Runnable() { 55 | @Override 56 | public void run() { 57 | while (true) { 58 | annotationObject.call(); 59 | annotationObject.userLogin(); 60 | } 61 | } 62 | }); 63 | t.start(); 64 | 65 | TimeUnit.SECONDS.sleep(500); 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /src/test/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | # Root logger option 2 | log4j.rootLogger=INFO, stdout 3 | 4 | # Direct log messages to a log file 5 | log4j.appender.file=org.apache.log4j.RollingFileAppender 6 | log4j.appender.file.File=C:\\logging.log 7 | log4j.appender.file.MaxFileSize=10MB 8 | log4j.appender.file.MaxBackupIndex=10 9 | log4j.appender.file.layout=org.apache.log4j.PatternLayout 10 | log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n 11 | 12 | # Direct log messages to stdout 13 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender 14 | log4j.appender.stdout.Target=System.out 15 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout 16 | log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n -------------------------------------------------------------------------------- /src/test/resources/spring-test.xml: -------------------------------------------------------------------------------- 1 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 18 | 19 | 20 | 21 | 23 | 24 | 26 | 28 | 29 | 30 | 31 | 32 | 33 | --------------------------------------------------------------------------------