├── .gitignore ├── LICENSE ├── README.md ├── bin ├── create-topic.sh ├── debug.sh └── read-topic-with-headers.sh ├── docker-compose.yml ├── pom.xml └── src ├── main └── java │ └── com │ └── github │ └── themeetgroup │ └── kafka │ └── connect │ └── rabbitmq │ ├── CommonRabbitMQConnectorConfig.java │ ├── sink │ ├── RabbitMQSinkConnector.java │ ├── RabbitMQSinkConnectorConfig.java │ ├── RabbitMQSinkHeaderParser.java │ └── RabbitMQSinkTask.java │ └── source │ ├── ConnectConsumer.java │ ├── RabbitMQSourceConnector.java │ ├── RabbitMQSourceConnectorConfig.java │ ├── RabbitMQSourceTask.java │ └── data │ ├── BytesSourceMessageConverter.java │ ├── MessageConverter.java │ ├── SourceMessageConverter.java │ ├── SourceRecordBuilder.java │ └── StringSourceMessageConverter.java └── test ├── java └── com │ └── github │ └── themeetgroup │ └── kafka │ └── connect │ └── rabbitmq │ └── source │ └── data │ ├── MessageConverterTest.java │ └── TransformationTest.java └── resources └── logback.xml /.gitignore: -------------------------------------------------------------------------------- 1 | *.class 2 | 3 | # Mobile Tools for Java (J2ME) 4 | .mtj.tmp/ 5 | 6 | # Package Files # 7 | *.jar 8 | *.war 9 | *.ear 10 | 11 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 12 | hs_err_pid* 13 | target 14 | 15 | #Project and IDE files 16 | .project 17 | .classpath 18 | .settings 19 | .okhttpcache 20 | .idea 21 | .vscode 22 | .github 23 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | # Introduction 3 | 4 | # Source Connectors 5 | 6 | 7 | ## RabbitMQSourceConnector 8 | 9 | Connector is used to read from a RabbitMQ Queue or Topic. 10 | 11 | 12 | 13 | 14 | 15 | 16 | ### Configuration 17 | 18 | ##### `kafka.topic` 19 | *Importance:* High 20 | 21 | *Type:* String 22 | 23 | 24 | Kafka topic to write the messages to. 25 | ##### `rabbitmq.queue` 26 | *Importance:* High 27 | 28 | *Type:* List 29 | 30 | 31 | rabbitmq.queue 32 | ##### `rabbitmq.host` 33 | *Importance:* High 34 | 35 | *Type:* String 36 | 37 | *Default Value:* localhost 38 | 39 | 40 | The RabbitMQ host to connect to. See `ConnectionFactory.setHost(java.lang.String) `_ 41 | ##### `rabbitmq.password` 42 | *Importance:* High 43 | 44 | *Type:* String 45 | 46 | *Default Value:* guest 47 | 48 | 49 | The password to authenticate to RabbitMQ with. See `ConnectionFactory.setPassword(java.lang.String) `_ 50 | ##### `rabbitmq.username` 51 | *Importance:* High 52 | 53 | *Type:* String 54 | 55 | *Default Value:* guest 56 | 57 | 58 | The username to authenticate to RabbitMQ with. See `ConnectionFactory.setUsername(java.lang.String) `_ 59 | ##### `rabbitmq.virtual.host` 60 | *Importance:* High 61 | 62 | *Type:* String 63 | 64 | *Default Value:* / 65 | 66 | Converter to compose the Kafka message. 67 | ##### `message.converter` 68 | *Importance:* Medium 69 | 70 | *Type:* String 71 | 72 | *Default Value:* com.github.themeetgroup.kafka.connect.rabbitmq.source.data.MessageConverter 73 | 74 | The virtual host to use when connecting to the broker. See `ConnectionFactory.setVirtualHost(java.lang.String) `_ 75 | ##### `rabbitmq.port` 76 | *Importance:* Medium 77 | 78 | *Type:* Int 79 | 80 | *Default Value:* 5672 81 | 82 | 83 | The RabbitMQ port to connect to. See `ConnectionFactory.setPort(int) `_ 84 | ##### `rabbitmq.prefetch.count` 85 | *Importance:* Medium 86 | 87 | *Type:* Int 88 | 89 | *Default Value:* 0 90 | 91 | 92 | Maximum number of messages that the server will deliver, 0 if unlimited. See `Channel.basicQos(int, boolean) `_ 93 | ##### `rabbitmq.prefetch.global` 94 | *Importance:* Medium 95 | 96 | *Type:* Boolean 97 | 98 | *Default Value:* false 99 | 100 | 101 | True if the settings should be applied to the entire channel rather than each consumer. See `Channel.basicQos(int, boolean) `_ 102 | ##### `rabbitmq.automatic.recovery.enabled` 103 | *Importance:* Low 104 | 105 | *Type:* Boolean 106 | 107 | *Default Value:* true 108 | 109 | 110 | Enables or disables automatic connection recovery. See `ConnectionFactory.setAutomaticRecoveryEnabled(boolean) `_ 111 | ##### `rabbitmq.connection.timeout.ms` 112 | *Importance:* Low 113 | 114 | *Type:* Int 115 | 116 | *Default Value:* 60000 117 | 118 | 119 | Connection TCP establishment timeout in milliseconds. zero for infinite. See `ConnectionFactory.setConnectionTimeout(int) `_ 120 | ##### `rabbitmq.handshake.timeout.ms` 121 | *Importance:* Low 122 | 123 | *Type:* Int 124 | 125 | *Default Value:* 10000 126 | 127 | 128 | The AMQP0-9-1 protocol handshake timeout, in milliseconds. See `ConnectionFactory.setHandshakeTimeout(int) `_ 129 | ##### `rabbitmq.network.recovery.interval.ms` 130 | *Importance:* Low 131 | 132 | *Type:* Int 133 | 134 | *Default Value:* 10000 135 | 136 | 137 | See `ConnectionFactory.setNetworkRecoveryInterval(long) `_ 138 | ##### `rabbitmq.requested.channel.max` 139 | *Importance:* Low 140 | 141 | *Type:* Int 142 | 143 | *Default Value:* 0 144 | 145 | 146 | Initially requested maximum channel number. Zero for unlimited. See `ConnectionFactory.setRequestedChannelMax(int) `_ 147 | ##### `rabbitmq.requested.frame.max` 148 | *Importance:* Low 149 | 150 | *Type:* Int 151 | 152 | *Default Value:* 0 153 | 154 | 155 | Initially requested maximum frame size, in octets. Zero for unlimited. See `ConnectionFactory.setRequestedFrameMax(int) `_ 156 | ##### `rabbitmq.requested.heartbeat.seconds` 157 | *Importance:* Low 158 | 159 | *Type:* Int 160 | 161 | *Default Value:* 60 162 | 163 | 164 | Set the requested heartbeat timeout. Heartbeat frames will be sent at about 1/2 the timeout interval. If server heartbeat timeout is configured to a non-zero value, this method can only be used to lower the value; otherwise any value provided by the client will be used. See `ConnectionFactory.setRequestedHeartbeat(int) `_ 165 | ##### `rabbitmq.shutdown.timeout.ms` 166 | *Importance:* Low 167 | 168 | *Type:* Int 169 | 170 | *Default Value:* 10000 171 | 172 | 173 | Set the shutdown timeout. This is the amount of time that Consumer implementations have to continue working through deliveries (and other Consumer callbacks) after the connection has closed but before the ConsumerWorkService is torn down. If consumers exceed this timeout then any remaining queued deliveries (and other Consumer callbacks, *including* the Consumer's handleShutdownSignal() invocation) will be lost. See `ConnectionFactory.setShutdownTimeout(int) `_ 174 | ##### `rabbitmq.topology.recovery.enabled` 175 | *Importance:* Low 176 | 177 | *Type:* Boolean 178 | 179 | *Default Value:* true 180 | 181 | 182 | Enables or disables topology recovery. See `ConnectionFactory.setTopologyRecoveryEnabled(boolean) `_ 183 | 184 | #### Examples 185 | 186 | ##### Standalone Example 187 | 188 | This configuration is used typically along with [standalone mode](http://docs.confluent.io/current/connect/concepts.html#standalone-workers). 189 | 190 | ```properties 191 | name=RabbitMQSourceConnector1 192 | connector.class=com.github.themeetgroup.kafka.connect.rabbitmq.source.RabbitMQSourceConnector 193 | tasks.max=1 194 | kafka.topic=< Required Configuration > 195 | rabbitmq.queue=< Required Configuration > 196 | ``` 197 | 198 | ##### Distributed Example 199 | 200 | This configuration is used typically along with [distributed mode](http://docs.confluent.io/current/connect/concepts.html#distributed-workers). 201 | Write the following json to `connector.json`, configure all of the required values, and use the command below to 202 | post the configuration to one the distributed connect worker(s). 203 | 204 | ```json 205 | { 206 | "config" : { 207 | "name" : "RabbitMQSourceConnector1", 208 | "connector.class" : "com.github.themeetgroup.kafka.connect.rabbitmq.source.RabbitMQSourceConnector", 209 | "tasks.max" : "1", 210 | "kafka.topic" : "< Required Configuration >", 211 | "rabbitmq.queue" : "< Required Configuration >" 212 | } 213 | } 214 | ``` 215 | 216 | Use curl to post the configuration to one of the Kafka Connect Workers. Change `http://localhost:8083/` the the endpoint of 217 | one of your Kafka Connect worker(s). 218 | 219 | Create a new instance. 220 | ```bash 221 | curl -s -X POST -H 'Content-Type: application/json' --data @connector.json http://localhost:8083/connectors 222 | ``` 223 | 224 | Update an existing instance. 225 | ```bash 226 | curl -s -X PUT -H 'Content-Type: application/json' --data @connector.json http://localhost:8083/connectors/TestSinkConnector1/config 227 | ``` 228 | 229 | 230 | 231 | # Sink Connectors 232 | 233 | 234 | ## RabbitMQSinkConnector 235 | 236 | Connector is used to read data from a Kafka topic and publish it on a RabbitMQ exchange and routing key pair. 237 | 238 | 239 | 240 | 241 | 242 | 243 | ### Configuration 244 | 245 | ##### `rabbitmq.exchange` 246 | *Importance:* High 247 | 248 | *Type:* String 249 | 250 | 251 | exchange to publish the messages on. 252 | ##### `rabbitmq.routing.key` 253 | *Importance:* High 254 | 255 | *Type:* String 256 | 257 | 258 | routing key used for publishing the messages. 259 | ##### `topics` 260 | *Importance:* High 261 | 262 | *Type:* String 263 | 264 | 265 | Kafka topic to read the messages from. 266 | ##### `rabbitmq.host` 267 | *Importance:* High 268 | 269 | *Type:* String 270 | 271 | *Default Value:* localhost 272 | 273 | 274 | The RabbitMQ host to connect to. See `ConnectionFactory.setHost(java.lang.String) `_ 275 | ##### `rabbitmq.password` 276 | *Importance:* High 277 | 278 | *Type:* String 279 | 280 | *Default Value:* guest 281 | 282 | 283 | The password to authenticate to RabbitMQ with. See `ConnectionFactory.setPassword(java.lang.String) `_ 284 | ##### `rabbitmq.username` 285 | *Importance:* High 286 | 287 | *Type:* String 288 | 289 | *Default Value:* guest 290 | 291 | 292 | The username to authenticate to RabbitMQ with. See `ConnectionFactory.setUsername(java.lang.String) `_ 293 | ##### `rabbitmq.virtual.host` 294 | *Importance:* High 295 | 296 | *Type:* String 297 | 298 | *Default Value:* / 299 | 300 | 301 | The virtual host to use when connecting to the broker. See `ConnectionFactory.setVirtualHost(java.lang.String) `_ 302 | ##### `rabbitmq.port` 303 | *Importance:* Medium 304 | 305 | *Type:* Int 306 | 307 | *Default Value:* 5672 308 | 309 | 310 | The RabbitMQ port to connect to. See `ConnectionFactory.setPort(int) `_ 311 | ##### `rabbitmq.automatic.recovery.enabled` 312 | *Importance:* Low 313 | 314 | *Type:* Boolean 315 | 316 | *Default Value:* true 317 | 318 | 319 | Enables or disables automatic connection recovery. See `ConnectionFactory.setAutomaticRecoveryEnabled(boolean) `_ 320 | ##### `rabbitmq.connection.timeout.ms` 321 | *Importance:* Low 322 | 323 | *Type:* Int 324 | 325 | *Default Value:* 60000 326 | 327 | 328 | Connection TCP establishment timeout in milliseconds. zero for infinite. See `ConnectionFactory.setConnectionTimeout(int) `_ 329 | ##### `rabbitmq.handshake.timeout.ms` 330 | *Importance:* Low 331 | 332 | *Type:* Int 333 | 334 | *Default Value:* 10000 335 | 336 | 337 | The AMQP0-9-1 protocol handshake timeout, in milliseconds. See `ConnectionFactory.setHandshakeTimeout(int) `_ 338 | ##### `rabbitmq.network.recovery.interval.ms` 339 | *Importance:* Low 340 | 341 | *Type:* Int 342 | 343 | *Default Value:* 10000 344 | 345 | 346 | See `ConnectionFactory.setNetworkRecoveryInterval(long) `_ 347 | ##### `rabbitmq.requested.channel.max` 348 | *Importance:* Low 349 | 350 | *Type:* Int 351 | 352 | *Default Value:* 0 353 | 354 | 355 | Initially requested maximum channel number. Zero for unlimited. See `ConnectionFactory.setRequestedChannelMax(int) `_ 356 | ##### `rabbitmq.requested.frame.max` 357 | *Importance:* Low 358 | 359 | *Type:* Int 360 | 361 | *Default Value:* 0 362 | 363 | 364 | Initially requested maximum frame size, in octets. Zero for unlimited. See `ConnectionFactory.setRequestedFrameMax(int) `_ 365 | ##### `rabbitmq.requested.heartbeat.seconds` 366 | *Importance:* Low 367 | 368 | *Type:* Int 369 | 370 | *Default Value:* 60 371 | 372 | 373 | Set the requested heartbeat timeout. Heartbeat frames will be sent at about 1/2 the timeout interval. If server heartbeat timeout is configured to a non-zero value, this method can only be used to lower the value; otherwise any value provided by the client will be used. See `ConnectionFactory.setRequestedHeartbeat(int) `_ 374 | ##### `rabbitmq.shutdown.timeout.ms` 375 | *Importance:* Low 376 | 377 | *Type:* Int 378 | 379 | *Default Value:* 10000 380 | 381 | 382 | Set the shutdown timeout. This is the amount of time that Consumer implementations have to continue working through deliveries (and other Consumer callbacks) after the connection has closed but before the ConsumerWorkService is torn down. If consumers exceed this timeout then any remaining queued deliveries (and other Consumer callbacks, *including* the Consumer's handleShutdownSignal() invocation) will be lost. See `ConnectionFactory.setShutdownTimeout(int) `_ 383 | ##### `rabbitmq.topology.recovery.enabled` 384 | *Importance:* Low 385 | 386 | *Type:* Boolean 387 | 388 | *Default Value:* true 389 | 390 | 391 | Enables or disables topology recovery. See `ConnectionFactory.setTopologyRecoveryEnabled(boolean) `_ 392 | 393 | #### Examples 394 | 395 | ##### Standalone Example 396 | 397 | This configuration is used typically along with [standalone mode](http://docs.confluent.io/current/connect/concepts.html#standalone-workers). 398 | 399 | ```properties 400 | name=RabbitMQSinkConnector1 401 | connector.class=com.github.themeetgroup.kafka.connect.rabbitmq.sink.RabbitMQSinkConnector 402 | tasks.max=1 403 | topics=< Required Configuration > 404 | rabbitmq.exchange=< Required Configuration > 405 | rabbitmq.routing.key=< Required Configuration > 406 | topics=< Required Configuration > 407 | ``` 408 | 409 | ##### Distributed Example 410 | 411 | This configuration is used typically along with [distributed mode](http://docs.confluent.io/current/connect/concepts.html#distributed-workers). 412 | Write the following json to `connector.json`, configure all of the required values, and use the command below to 413 | post the configuration to one the distributed connect worker(s). 414 | 415 | ```json 416 | { 417 | "config" : { 418 | "name" : "RabbitMQSinkConnector1", 419 | "connector.class" : "com.github.themeetgroup.kafka.connect.rabbitmq.sink.RabbitMQSinkConnector", 420 | "tasks.max" : "1", 421 | "topics" : "< Required Configuration >", 422 | "rabbitmq.exchange" : "< Required Configuration >", 423 | "rabbitmq.routing.key" : "< Required Configuration >" 424 | } 425 | } 426 | ``` 427 | 428 | Use curl to post the configuration to one of the Kafka Connect Workers. Change `http://localhost:8083/` the the endpoint of 429 | one of your Kafka Connect worker(s). 430 | 431 | Create a new instance. 432 | ```bash 433 | curl -s -X POST -H 'Content-Type: application/json' --data @connector.json http://localhost:8083/connectors 434 | ``` 435 | 436 | Update an existing instance. 437 | ```bash 438 | curl -s -X PUT -H 'Content-Type: application/json' --data @connector.json http://localhost:8083/connectors/TestSinkConnector1/config 439 | ``` 440 | 441 | 442 | -------------------------------------------------------------------------------- /bin/create-topic.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Copyright © 2016 Jeremy Custenborder (jcustenborder@gmail.com) 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | # 17 | 18 | kafka-topics --create --topic rabbitmq.test --bootstrap-server 127.0.0.1:9092 -------------------------------------------------------------------------------- /bin/debug.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Copyright © 2016 Jeremy Custenborder (jcustenborder@gmail.com) 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | # 17 | 18 | 19 | : ${DEBUG_SUSPEND_FLAG:='n'} 20 | export KAFKA_DEBUG='y' 21 | 22 | set -e 23 | 24 | mvn clean package 25 | connect-standalone config/connect-avro-docker.properties config/RabbitMQSourceConnector.properties -------------------------------------------------------------------------------- /bin/read-topic-with-headers.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Copyright © 2016 Jeremy Custenborder (jcustenborder@gmail.com) 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | # 17 | 18 | kafkacat -b localhost:9092 -t rabbitmq.test -C \ 19 | -f '\nKey (%K bytes): %k 20 | Value (%S bytes): %s 21 | Timestamp: %T 22 | Partition: %p 23 | Offset: %o 24 | Headers: %h\n' -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | version: '2' 18 | services: 19 | zookeeper: 20 | image: confluentinc/cp-zookeeper:6.0.0 21 | hostname: zookeeper 22 | container_name: zookeeper 23 | ports: 24 | - "2181:2181" 25 | environment: 26 | ZOOKEEPER_CLIENT_PORT: 2181 27 | ZOOKEEPER_TICK_TIME: 2000 28 | 29 | broker: 30 | image: confluentinc/cp-server:6.0.0 31 | hostname: broker 32 | container_name: broker 33 | depends_on: 34 | - zookeeper 35 | ports: 36 | - "9092:9092" 37 | - "9101:9101" 38 | environment: 39 | KAFKA_BROKER_ID: 1 40 | KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181' 41 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT 42 | KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker:29092,PLAINTEXT_HOST://localhost:9092 43 | KAFKA_METRIC_REPORTERS: io.confluent.metrics.reporter.ConfluentMetricsReporter 44 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 45 | KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 46 | KAFKA_CONFLUENT_LICENSE_TOPIC_REPLICATION_FACTOR: 1 47 | KAFKA_CONFLUENT_BALANCER_TOPIC_REPLICATION_FACTOR: 1 48 | KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 49 | KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 50 | KAFKA_JMX_PORT: 9101 51 | KAFKA_JMX_HOSTNAME: localhost 52 | KAFKA_CONFLUENT_SCHEMA_REGISTRY_URL: http://schema-registry:8081 53 | CONFLUENT_METRICS_REPORTER_BOOTSTRAP_SERVERS: broker:29092 54 | CONFLUENT_METRICS_REPORTER_TOPIC_REPLICAS: 1 55 | CONFLUENT_METRICS_ENABLE: 'true' 56 | CONFLUENT_SUPPORT_CUSTOMER_ID: 'anonymous' 57 | 58 | schema-registry: 59 | image: confluentinc/cp-schema-registry:6.0.0 60 | hostname: schema-registry 61 | container_name: schema-registry 62 | depends_on: 63 | - broker 64 | ports: 65 | - "8081:8081" 66 | environment: 67 | SCHEMA_REGISTRY_HOST_NAME: schema-registry 68 | SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'broker:29092' 69 | rabbitmq: 70 | image: rabbitmq:3-management 71 | ports: 72 | - '15672:15672' 73 | - '5672:5672' -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4.0.0 4 | 5 | io.confluent 6 | kafka-connect-parent 7 | 4.1.0 8 | 9 | com.themeetgroup.kafka.connect 10 | kafka-connect-rabbitmq 11 | 0.1.0 12 | kafka-connect-rabbitmq 13 | A Kafka Connect connector reading and writing data from RabbitMQ. 14 | https://github.com/themeetgroup/kafka-connect-rabbitmq 15 | 2017 16 | 17 | 18 | 19 | The Apache License, Version 2.0 20 | https://www.apache.org/licenses/LICENSE-2.0 21 | repo 22 | 23 | 24 | 25 | 26 | 27 | jcustenborder 28 | Jeremy Custenborder 29 | https://github.com/jcustenborder 30 | 31 | Committer 32 | 33 | 34 | 35 | insidn 36 | Jan Uyttenhove 37 | https://github.com/insidin 38 | 39 | Committer 40 | 41 | 42 | 43 | 44 | 45 | scm:git:https://github.com/themeetgroup/kafka-connect-rabbitmq.git 46 | scm:git:git@github.com:themeetgroup/kafka-connect-rabbitmq.git 47 | https://github.com/themeetgroup/kafka-connect-rabbitmq 48 | 49 | 50 | 51 | github 52 | https://github.com/themeetgroup/kafka-connect-rabbitmq/issues 53 | 54 | 55 | 56 | 5.10.0 57 | 3.3.0 58 | 59 | 60 | 61 | 62 | com.rabbitmq 63 | amqp-client 64 | ${rabbitmq.version} 65 | 66 | 67 | com.github.jcustenborder.kafka.connect 68 | connect-utils-testing-data 69 | ${connect-utils.version} 70 | 71 | 72 | org.apache.avro 73 | avro 74 | 1.8.2 75 | 76 | 77 | io.confluent 78 | kafka-avro-serializer 79 | 5.1.0 80 | 81 | 82 | 83 | 84 | 85 | org.apache.maven.plugins 86 | maven-javadoc-plugin 87 | 88 | 8 89 | false 90 | 91 | 92 | 93 | io.confluent 94 | kafka-connect-maven-plugin 95 | 0.11.2 96 | 97 | 98 | hub 99 | 100 | kafka-connect 101 | 102 | 103 | themeetgroup 104 | The Meet Group 105 | themeetgroup 106 | kafka-connect-docker 107 | 108 | sink 109 | source 110 | transform 111 | 112 | 113 | RabbitMQ 114 | Messaging 115 | 116 | Kafka Connect RabbitMQ 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | org.apache.maven.plugins 128 | maven-checkstyle-plugin 129 | 3.1.1 130 | 131 | 132 | 133 | checkstyle 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | -------------------------------------------------------------------------------- /src/main/java/com/github/themeetgroup/kafka/connect/rabbitmq/CommonRabbitMQConnectorConfig.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.themeetgroup.kafka.connect.rabbitmq; 17 | 18 | import com.rabbitmq.client.ConnectionFactory; 19 | import org.apache.kafka.common.config.AbstractConfig; 20 | import org.apache.kafka.common.config.ConfigDef; 21 | 22 | import java.security.KeyManagementException; 23 | import java.security.NoSuchAlgorithmException; 24 | import java.util.Map; 25 | 26 | public abstract class CommonRabbitMQConnectorConfig extends AbstractConfig { 27 | 28 | public static final String USERNAME_CONFIG = "rabbitmq.username"; 29 | public static final String PASSWORD_CONFIG = "rabbitmq.password"; 30 | public static final String VIRTUAL_HOST_CONFIG = "rabbitmq.virtual.host"; 31 | public static final String REQUESTED_CHANNEL_MAX_CONFIG = "rabbitmq.requested.channel.max"; 32 | public static final String REQUESTED_FRAME_MAX_CONFIG = "rabbitmq.requested.frame.max"; 33 | public static final String CONNECTION_TIMEOUT_CONFIG = "rabbitmq.connection.timeout.ms"; 34 | public static final String HANDSHAKE_TIMEOUT_CONFIG = "rabbitmq.handshake.timeout.ms"; 35 | public static final String SHUTDOWN_TIMEOUT_CONFIG = "rabbitmq.shutdown.timeout.ms"; 36 | public static final String REQUESTED_HEARTBEAT_CONFIG = "rabbitmq.requested.heartbeat.seconds"; 37 | public static final String AUTOMATIC_RECOVERY_ENABLED_CONFIG = "rabbitmq.automatic.recovery.enabled"; 38 | public static final String TOPOLOGY_RECOVERY_ENABLED_CONFIG = "rabbitmq.topology.recovery.enabled"; 39 | public static final String NETWORK_RECOVERY_INTERVAL_CONFIG = "rabbitmq.network.recovery.interval.ms"; 40 | public static final String HOST_CONFIG = "rabbitmq.host"; 41 | public static final String PORT_CONFIG = "rabbitmq.port"; 42 | public static final String USE_SSL = "rabbitmq.ssl"; 43 | static final String HOST_DOC = "The RabbitMQ host to connect to. See `ConnectionFactory.setHost(java.lang.String) `_"; 44 | static final String USERNAME_DOC = "The username to authenticate to RabbitMQ with. See `ConnectionFactory.setUsername(java.lang.String) `_"; 45 | static final String PASSWORD_DOC = "The password to authenticate to RabbitMQ with. See `ConnectionFactory.setPassword(java.lang.String) `_"; 46 | static final String VIRTUAL_HOST_DOC = "The virtual host to use when connecting to the broker. See `ConnectionFactory.setVirtualHost(java.lang.String) `_"; 47 | static final String REQUESTED_CHANNEL_MAX_DOC = "Initially requested maximum channel number. Zero for unlimited. See `ConnectionFactory.setRequestedChannelMax(int) `_"; 48 | static final String REQUESTED_FRAME_MAX_DOC = "Initially requested maximum frame size, in octets. Zero for unlimited. See `ConnectionFactory.setRequestedFrameMax(int) `_"; 49 | static final String CONNECTION_TIMEOUT_DOC = "Connection TCP establishment timeout in milliseconds. zero for infinite. See `ConnectionFactory.setConnectionTimeout(int) `_"; 50 | static final String HANDSHAKE_TIMEOUT_DOC = "The AMQP0-9-1 protocol handshake timeout, in milliseconds. See `ConnectionFactory.setHandshakeTimeout(int) `_"; 51 | static final String SHUTDOWN_TIMEOUT_DOC = "Set the shutdown timeout. This is the amount of time that Consumer " + 52 | "implementations have to continue working through deliveries (and other Consumer callbacks) after the connection " + 53 | "has closed but before the ConsumerWorkService is torn down. If consumers exceed this timeout then any remaining " + 54 | "queued deliveries (and other Consumer callbacks, *including* the Consumer's handleShutdownSignal() invocation) " + 55 | "will be lost. " + 56 | "See `ConnectionFactory.setShutdownTimeout(int) `_"; 57 | static final String REQUESTED_HEARTBEAT_DOC = "Set the requested heartbeat timeout. Heartbeat frames will be sent " + 58 | "at about 1/2 the timeout interval. If server heartbeat timeout is configured to a non-zero value, this method " + 59 | "can only be used to lower the value; otherwise any value provided by the client will be used. " + 60 | "See `ConnectionFactory.setRequestedHeartbeat(int) `_"; 61 | static final String AUTOMATIC_RECOVERY_ENABLED_DOC = "Enables or disables automatic connection recovery. See `ConnectionFactory.setAutomaticRecoveryEnabled(boolean) `_"; 62 | static final String TOPOLOGY_RECOVERY_ENABLED_DOC = "Enables or disables topology recovery. See `ConnectionFactory.setTopologyRecoveryEnabled(boolean) `_"; 63 | static final String NETWORK_RECOVERY_INTERVAL_DOC = "See `ConnectionFactory.setNetworkRecoveryInterval(long) `_"; 64 | static final String PORT_DOC = "The RabbitMQ port to connect to. See `ConnectionFactory.setPort(int) `_"; 65 | static final String USE_SSL_DOC = "Enable SSL/TLS"; 66 | public final String username; 67 | public final String password; 68 | public final String virtualHost; 69 | public final int requestedChannelMax; 70 | public final int requestedFrameMax; 71 | public final int connectionTimeout; 72 | public final int handshakeTimeout; 73 | public final int shutdownTimeout; 74 | public final int requestedHeartbeat; 75 | public final boolean automaticRecoveryEnabled; 76 | public final boolean topologyRecoveryEnabled; 77 | public final long networkRecoveryInterval; 78 | public final String host; 79 | public final int port; 80 | public final boolean useSsl; 81 | public final ConnectionFactory connectionFactory; 82 | 83 | public CommonRabbitMQConnectorConfig(ConfigDef definition, Map originals) { 84 | super(definition, originals); 85 | this.username = this.getString(USERNAME_CONFIG); 86 | this.password = this.getString(PASSWORD_CONFIG); 87 | this.virtualHost = this.getString(VIRTUAL_HOST_CONFIG); 88 | this.requestedChannelMax = this.getInt(REQUESTED_CHANNEL_MAX_CONFIG); 89 | this.requestedFrameMax = this.getInt(REQUESTED_FRAME_MAX_CONFIG); 90 | this.connectionTimeout = this.getInt(CONNECTION_TIMEOUT_CONFIG); 91 | this.handshakeTimeout = this.getInt(HANDSHAKE_TIMEOUT_CONFIG); 92 | this.shutdownTimeout = this.getInt(SHUTDOWN_TIMEOUT_CONFIG); 93 | this.requestedHeartbeat = this.getInt(REQUESTED_HEARTBEAT_CONFIG); 94 | this.automaticRecoveryEnabled = this.getBoolean(AUTOMATIC_RECOVERY_ENABLED_CONFIG); 95 | this.topologyRecoveryEnabled = this.getBoolean(TOPOLOGY_RECOVERY_ENABLED_CONFIG); 96 | this.networkRecoveryInterval = this.getInt(NETWORK_RECOVERY_INTERVAL_CONFIG); 97 | this.host = this.getString(HOST_CONFIG); 98 | this.port = this.getInt(PORT_CONFIG); 99 | this.useSsl = this.getBoolean(USE_SSL); 100 | this.connectionFactory = connectionFactory(); 101 | } 102 | 103 | public static ConfigDef config() { 104 | return new ConfigDef() 105 | .define(HOST_CONFIG, ConfigDef.Type.STRING, ConnectionFactory.DEFAULT_HOST, ConfigDef.Importance.HIGH, HOST_DOC) 106 | .define(USERNAME_CONFIG, ConfigDef.Type.STRING, ConnectionFactory.DEFAULT_USER, ConfigDef.Importance.HIGH, USERNAME_DOC) 107 | .define(PASSWORD_CONFIG, ConfigDef.Type.STRING, ConnectionFactory.DEFAULT_PASS, ConfigDef.Importance.HIGH, PASSWORD_DOC) 108 | .define(VIRTUAL_HOST_CONFIG, ConfigDef.Type.STRING, ConnectionFactory.DEFAULT_VHOST, ConfigDef.Importance.HIGH, VIRTUAL_HOST_DOC) 109 | .define(REQUESTED_CHANNEL_MAX_CONFIG, ConfigDef.Type.INT, ConnectionFactory.DEFAULT_CHANNEL_MAX, ConfigDef.Importance.LOW, REQUESTED_CHANNEL_MAX_DOC) 110 | .define(REQUESTED_FRAME_MAX_CONFIG, ConfigDef.Type.INT, ConnectionFactory.DEFAULT_FRAME_MAX, ConfigDef.Importance.LOW, REQUESTED_FRAME_MAX_DOC) 111 | .define(CONNECTION_TIMEOUT_CONFIG, ConfigDef.Type.INT, ConnectionFactory.DEFAULT_CONNECTION_TIMEOUT, ConfigDef.Importance.LOW, CONNECTION_TIMEOUT_DOC) 112 | .define(HANDSHAKE_TIMEOUT_CONFIG, ConfigDef.Type.INT, ConnectionFactory.DEFAULT_HANDSHAKE_TIMEOUT, ConfigDef.Importance.LOW, HANDSHAKE_TIMEOUT_DOC) 113 | .define(SHUTDOWN_TIMEOUT_CONFIG, ConfigDef.Type.INT, ConnectionFactory.DEFAULT_SHUTDOWN_TIMEOUT, ConfigDef.Importance.LOW, SHUTDOWN_TIMEOUT_DOC) 114 | .define(REQUESTED_HEARTBEAT_CONFIG, ConfigDef.Type.INT, ConnectionFactory.DEFAULT_HEARTBEAT, ConfigDef.Importance.LOW, REQUESTED_HEARTBEAT_DOC) 115 | .define(AUTOMATIC_RECOVERY_ENABLED_CONFIG, ConfigDef.Type.BOOLEAN, true, ConfigDef.Importance.LOW, AUTOMATIC_RECOVERY_ENABLED_DOC) 116 | .define(TOPOLOGY_RECOVERY_ENABLED_CONFIG, ConfigDef.Type.BOOLEAN, true, ConfigDef.Importance.LOW, TOPOLOGY_RECOVERY_ENABLED_DOC) 117 | .define(NETWORK_RECOVERY_INTERVAL_CONFIG, ConfigDef.Type.INT, 10000, ConfigDef.Importance.LOW, NETWORK_RECOVERY_INTERVAL_DOC) 118 | .define(PORT_CONFIG, ConfigDef.Type.INT, ConnectionFactory.DEFAULT_AMQP_PORT, ConfigDef.Importance.MEDIUM, PORT_DOC) 119 | .define(USE_SSL, ConfigDef.Type.BOOLEAN, false, ConfigDef.Importance.HIGH, USE_SSL_DOC); 120 | } 121 | 122 | public final ConnectionFactory connectionFactory() { 123 | ConnectionFactory connectionFactory = new ConnectionFactory(); 124 | 125 | connectionFactory.setHost(this.host); 126 | connectionFactory.setUsername(this.username); 127 | connectionFactory.setPassword(this.password); 128 | connectionFactory.setVirtualHost(this.virtualHost); 129 | connectionFactory.setRequestedChannelMax(this.requestedChannelMax); 130 | connectionFactory.setRequestedFrameMax(this.requestedFrameMax); 131 | connectionFactory.setConnectionTimeout(this.connectionTimeout); 132 | connectionFactory.setHandshakeTimeout(this.handshakeTimeout); 133 | connectionFactory.setShutdownTimeout(this.shutdownTimeout); 134 | connectionFactory.setRequestedHeartbeat(this.requestedHeartbeat); 135 | connectionFactory.setAutomaticRecoveryEnabled(this.automaticRecoveryEnabled); 136 | connectionFactory.setTopologyRecoveryEnabled(this.topologyRecoveryEnabled); 137 | connectionFactory.setNetworkRecoveryInterval(this.networkRecoveryInterval); 138 | 139 | connectionFactory.setPort(this.port); 140 | if (this.useSsl) 141 | try { 142 | connectionFactory.useSslProtocol(); 143 | } catch (NoSuchAlgorithmException | KeyManagementException e) { 144 | e.printStackTrace(); 145 | } 146 | 147 | return connectionFactory; 148 | } 149 | } 150 | -------------------------------------------------------------------------------- /src/main/java/com/github/themeetgroup/kafka/connect/rabbitmq/sink/RabbitMQSinkConnector.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Kyumars Sheykh Esmaili (kyumarss@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.github.themeetgroup.kafka.connect.rabbitmq.sink; 18 | 19 | import java.util.ArrayList; 20 | import java.util.List; 21 | import java.util.Map; 22 | 23 | import org.apache.kafka.common.config.ConfigDef; 24 | import org.apache.kafka.connect.connector.Task; 25 | import org.apache.kafka.connect.sink.SinkConnector; 26 | 27 | import com.github.jcustenborder.kafka.connect.utils.VersionUtil; 28 | import com.github.jcustenborder.kafka.connect.utils.config.Description; 29 | 30 | @Description("Connector is used to read data from a Kafka topic and publish it on a RabbitMQ exchange and routing key pair.") 31 | public class RabbitMQSinkConnector extends SinkConnector { 32 | Map settings; 33 | RabbitMQSinkConnectorConfig config; 34 | 35 | 36 | 37 | @Override 38 | public ConfigDef config() { 39 | return RabbitMQSinkConnectorConfig.config(); 40 | } 41 | 42 | @Override 43 | public void start(Map settings) { 44 | this.config = new RabbitMQSinkConnectorConfig(settings); 45 | this.settings = settings; 46 | } 47 | 48 | @Override 49 | public void stop() { 50 | 51 | } 52 | 53 | @Override 54 | public Class taskClass() { 55 | return RabbitMQSinkTask.class; 56 | } 57 | 58 | @Override 59 | public List> taskConfigs(int maxTasks) { 60 | List> taskConfigs = new ArrayList<>(); 61 | for (int i = 0; i < maxTasks; i++) { 62 | taskConfigs.add(this.settings); 63 | } 64 | return taskConfigs; 65 | 66 | } 67 | 68 | @Override 69 | public String version() { 70 | return VersionUtil.version(this.getClass()); 71 | } 72 | 73 | } 74 | -------------------------------------------------------------------------------- /src/main/java/com/github/themeetgroup/kafka/connect/rabbitmq/sink/RabbitMQSinkConnectorConfig.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Kyumars Sheykh Esmaili (kyumarss@gmail.com) 3 | *

4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | *

8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | *

10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.github.themeetgroup.kafka.connect.rabbitmq.sink; 18 | 19 | import java.util.Map; 20 | 21 | import com.github.themeetgroup.kafka.connect.rabbitmq.CommonRabbitMQConnectorConfig; 22 | import org.apache.kafka.common.config.ConfigDef; 23 | 24 | import com.github.jcustenborder.kafka.connect.utils.template.StructTemplate; 25 | 26 | public class RabbitMQSinkConnectorConfig extends CommonRabbitMQConnectorConfig { 27 | static final String KAFKA_TOPIC_TEMPLATE = "kafkaTopicTemplate"; 28 | public static final String TOPIC_CONF = "topics"; 29 | static final String TOPIC_DOC = "Kafka topic to read the messages from."; 30 | 31 | //TODO: add the support for queue destinations 32 | 33 | public static final String EXCHANGE_CONF = "rabbitmq.exchange"; 34 | static final String EXCHANGE_DOC = "exchange to publish the messages on."; 35 | 36 | public static final String ROUTING_KEY_CONF = "rabbitmq.routing.key"; 37 | static final String ROUTING_KEY_DOC = "routing key used for publishing the messages."; 38 | 39 | 40 | public static final String HEADER_CONF = "rabbitmq.headers"; 41 | public static final String HEADER_CONF_DOC = "Headers to set for outbounf messages. Set with `headername1`:`headervalue1`,`headername2`:`headervalue2`"; 42 | //TODO: include other config variables here 43 | 44 | public final StructTemplate kafkaTopic; 45 | public final String exchange; 46 | public final String routingKey; 47 | 48 | public RabbitMQSinkConnectorConfig(Map settings) { 49 | super(config(), settings); 50 | final String kafkaTopicFormat = this.getString(TOPIC_CONF); 51 | this.kafkaTopic = new StructTemplate(); 52 | this.kafkaTopic.addTemplate(KAFKA_TOPIC_TEMPLATE, kafkaTopicFormat); 53 | this.exchange = this.getString(EXCHANGE_CONF); 54 | this.routingKey = this.getString(ROUTING_KEY_CONF); 55 | } 56 | 57 | public static ConfigDef config() { 58 | return CommonRabbitMQConnectorConfig.config() 59 | .define(TOPIC_CONF, ConfigDef.Type.STRING, ConfigDef.Importance.HIGH, TOPIC_DOC) 60 | .define(EXCHANGE_CONF, ConfigDef.Type.STRING, "", ConfigDef.Importance.MEDIUM, EXCHANGE_DOC) 61 | .define(ROUTING_KEY_CONF, ConfigDef.Type.STRING, ConfigDef.Importance.HIGH, ROUTING_KEY_DOC) 62 | .define(HEADER_CONF, ConfigDef.Type.STRING, null, null, ConfigDef.Importance.LOW, HEADER_CONF_DOC); 63 | 64 | 65 | } 66 | 67 | } 68 | -------------------------------------------------------------------------------- /src/main/java/com/github/themeetgroup/kafka/connect/rabbitmq/sink/RabbitMQSinkHeaderParser.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Kyumars Sheykh Esmaili (kyumarss@gmail.com) 3 | *

4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | *

8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | *

10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.github.themeetgroup.kafka.connect.rabbitmq.sink; 18 | 19 | import com.rabbitmq.client.AMQP; 20 | 21 | import java.util.AbstractMap; 22 | import java.util.Arrays; 23 | import java.util.HashMap; 24 | import java.util.Map; 25 | import java.util.UUID; 26 | import java.util.function.Supplier; 27 | import java.util.stream.Collectors; 28 | 29 | 30 | public class RabbitMQSinkHeaderParser { 31 | private static final String HEADER_SEPARATOR = ","; 32 | private static final String KEY_VALUE_SEPARATOR = ":"; 33 | 34 | 35 | private static final Map> DEFAULT_HEADERS = new HashMap<>(); 36 | 37 | static { 38 | DEFAULT_HEADERS.put("JMSExpiration", () -> 0); 39 | DEFAULT_HEADERS.put("JMSMessageID", () -> UUID.randomUUID().toString()); 40 | DEFAULT_HEADERS.put("JMSPriority", () -> 4); 41 | DEFAULT_HEADERS.put("JMSTimestamp", System::currentTimeMillis); 42 | DEFAULT_HEADERS.put("JMSType", () -> "TextMessage"); 43 | 44 | } 45 | 46 | static AMQP.BasicProperties parse(final String headerConfig) { 47 | final Map headerTemp = DEFAULT_HEADERS.entrySet() 48 | .stream() 49 | .map(entry -> new Pair<>(entry.getKey(), entry.getValue().get())) 50 | .collect(Collectors.toMap(Pair::getKey, Pair::getValue)); 51 | if (headerConfig != null && !headerConfig.isEmpty()) { 52 | final Map headers = Arrays.stream(headerConfig.split(HEADER_SEPARATOR)) 53 | .map(header -> header.split(KEY_VALUE_SEPARATOR)) 54 | .map(Pair::apply) 55 | .collect(Collectors.toMap(Pair::getKey, Pair::getValue)); 56 | headers.forEach((k, v) -> headerTemp.merge(k, v, (o, n) -> n)); 57 | } 58 | return new AMQP.BasicProperties.Builder().headers(headerTemp).build(); 59 | } 60 | 61 | private static final class Pair extends AbstractMap.SimpleEntry { 62 | 63 | private Pair(K key, V value) { 64 | super(key, value); 65 | } 66 | 67 | static Pair apply(String[] array2) { 68 | if (array2.length == 2) { 69 | return new Pair<>(array2[0], array2[1]); 70 | } else { 71 | throw new RuntimeException("Wrong header format"); 72 | } 73 | } 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /src/main/java/com/github/themeetgroup/kafka/connect/rabbitmq/sink/RabbitMQSinkTask.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Kyumars Sheykh Esmaili (kyumarss@gmail.com) 3 | *

4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | *

8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | *

10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.github.themeetgroup.kafka.connect.rabbitmq.sink; 18 | 19 | import com.github.jcustenborder.kafka.connect.utils.VersionUtil; 20 | import com.rabbitmq.client.Channel; 21 | import com.rabbitmq.client.Connection; 22 | import com.rabbitmq.client.ConnectionFactory; 23 | import org.apache.kafka.connect.errors.ConnectException; 24 | import org.apache.kafka.connect.errors.RetriableException; 25 | import org.apache.kafka.connect.sink.SinkRecord; 26 | import org.apache.kafka.connect.sink.SinkTask; 27 | import org.slf4j.Logger; 28 | import org.slf4j.LoggerFactory; 29 | 30 | import java.io.IOException; 31 | import java.util.Collection; 32 | import java.util.Map; 33 | import java.util.concurrent.TimeoutException; 34 | 35 | import static com.github.themeetgroup.kafka.connect.rabbitmq.sink.RabbitMQSinkConnectorConfig.HEADER_CONF; 36 | 37 | public class RabbitMQSinkTask extends SinkTask { 38 | private static final Logger log = LoggerFactory.getLogger(RabbitMQSinkTask.class); 39 | RabbitMQSinkConnectorConfig config; 40 | 41 | Channel channel; 42 | Connection connection; 43 | 44 | 45 | @Override 46 | public String version() { 47 | return VersionUtil.version(this.getClass()); 48 | } 49 | 50 | @Override 51 | public void put(Collection sinkRecords) { 52 | for (SinkRecord record : sinkRecords) { 53 | log.trace("current sinkRecord value: " + record.value()); 54 | if (!(record.value() instanceof byte[])) { 55 | throw new ConnectException("the value of the record has an invalid type (must be of type byte[])"); 56 | } 57 | try { 58 | channel.basicPublish(this.config.exchange, this.config.routingKey, 59 | RabbitMQSinkHeaderParser.parse(config.getString(HEADER_CONF)), (byte[]) record.value()); 60 | } catch (IOException e) { 61 | log.error("There was an error while publishing the outgoing message to RabbitMQ"); 62 | throw new RetriableException(e); 63 | } 64 | } 65 | } 66 | 67 | @Override 68 | public void start(Map settings) { 69 | this.config = new RabbitMQSinkConnectorConfig(settings); 70 | ConnectionFactory connectionFactory = this.config.connectionFactory(); 71 | try { 72 | log.info("Opening connection to {}:{}/{} (SSL: {})", this.config.host, this.config.port, this.config.virtualHost, this.config.useSsl); 73 | this.connection = connectionFactory.newConnection(); 74 | } catch (IOException | TimeoutException e) { 75 | throw new ConnectException(e); 76 | } 77 | 78 | try { 79 | log.info("Creating Channel"); 80 | this.channel = this.connection.createChannel(); 81 | log.info("Declaring queue"); 82 | this.channel.queueDeclare(this.config.routingKey, true, false, false, null); 83 | } catch (IOException e) { 84 | throw new ConnectException(e); 85 | } 86 | } 87 | 88 | @Override 89 | public void stop() { 90 | try { 91 | this.connection.close(); 92 | } catch (IOException e) { 93 | log.error("Exception thrown while closing connection.", e); 94 | } 95 | } 96 | 97 | } 98 | -------------------------------------------------------------------------------- /src/main/java/com/github/themeetgroup/kafka/connect/rabbitmq/source/ConnectConsumer.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.themeetgroup.kafka.connect.rabbitmq.source; 17 | 18 | import com.github.themeetgroup.kafka.connect.rabbitmq.source.data.SourceRecordBuilder; 19 | import com.github.jcustenborder.kafka.connect.utils.data.SourceRecordConcurrentLinkedDeque; 20 | import com.rabbitmq.client.AMQP; 21 | import com.rabbitmq.client.Consumer; 22 | import com.rabbitmq.client.Envelope; 23 | import com.rabbitmq.client.ShutdownSignalException; 24 | import org.apache.kafka.connect.source.SourceRecord; 25 | import org.slf4j.Logger; 26 | import org.slf4j.LoggerFactory; 27 | 28 | import java.lang.reflect.InvocationTargetException; 29 | 30 | class ConnectConsumer implements Consumer { 31 | 32 | private static final Logger log = LoggerFactory.getLogger(ConnectConsumer.class); 33 | private final SourceRecordConcurrentLinkedDeque records; 34 | private final SourceRecordBuilder sourceRecordBuilder; 35 | 36 | ConnectConsumer(SourceRecordConcurrentLinkedDeque records, RabbitMQSourceConnectorConfig config) throws ClassNotFoundException, NoSuchMethodException, InvocationTargetException, InstantiationException, IllegalAccessException { 37 | this.records = records; 38 | this.sourceRecordBuilder = new SourceRecordBuilder(config); 39 | } 40 | 41 | @Override 42 | public void handleConsumeOk(String s) { 43 | log.trace("handleConsumeOk({})", s); 44 | } 45 | 46 | @Override 47 | public void handleCancelOk(String s) { 48 | log.trace("handleCancelOk({})", s); 49 | } 50 | 51 | @Override 52 | public void handleCancel(String s) { 53 | log.trace("handleCancel({})", s); 54 | } 55 | 56 | @Override 57 | public void handleShutdownSignal(String s, ShutdownSignalException e) { 58 | log.trace("handleShutdownSignal({}, {})", s, e); 59 | } 60 | 61 | @Override 62 | public void handleRecoverOk(String s) { 63 | log.trace("handleRecoverOk({})", s); 64 | } 65 | 66 | @Override 67 | public void handleDelivery(String consumerTag, Envelope envelope, AMQP.BasicProperties basicProperties, byte[] bytes) { 68 | log.trace("handleDelivery({})", consumerTag); 69 | 70 | SourceRecord sourceRecord = this.sourceRecordBuilder.sourceRecord(consumerTag, envelope, basicProperties, bytes); 71 | this.records.add(sourceRecord); 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /src/main/java/com/github/themeetgroup/kafka/connect/rabbitmq/source/RabbitMQSourceConnector.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.themeetgroup.kafka.connect.rabbitmq.source; 17 | 18 | import com.github.jcustenborder.kafka.connect.utils.VersionUtil; 19 | import com.github.jcustenborder.kafka.connect.utils.config.Description; 20 | import com.github.jcustenborder.kafka.connect.utils.config.TaskConfigs; 21 | import org.apache.kafka.common.config.ConfigDef; 22 | import org.apache.kafka.connect.connector.Task; 23 | import org.apache.kafka.connect.source.SourceConnector; 24 | 25 | import java.util.List; 26 | import java.util.Map; 27 | 28 | @Description("Connector is used to read from a RabbitMQ Queue or Topic.") 29 | public class RabbitMQSourceConnector extends SourceConnector { 30 | 31 | private Map settings; 32 | 33 | @Override 34 | public String version() { 35 | return VersionUtil.version(this.getClass()); 36 | } 37 | 38 | @Override 39 | public void start(Map settings) { 40 | this.settings = settings; 41 | } 42 | 43 | @Override 44 | public Class taskClass() { 45 | return RabbitMQSourceTask.class; 46 | } 47 | 48 | @Override 49 | public List> taskConfigs(int maxTasks) { 50 | return TaskConfigs.multiple(this.settings, maxTasks); 51 | } 52 | 53 | @Override 54 | public void stop() { } 55 | 56 | @Override 57 | public ConfigDef config() { 58 | return RabbitMQSourceConnectorConfig.config(); 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /src/main/java/com/github/themeetgroup/kafka/connect/rabbitmq/source/RabbitMQSourceConnectorConfig.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.themeetgroup.kafka.connect.rabbitmq.source; 17 | 18 | import com.github.themeetgroup.kafka.connect.rabbitmq.CommonRabbitMQConnectorConfig; 19 | import org.apache.kafka.common.config.ConfigDef; 20 | 21 | import java.util.List; 22 | import java.util.Map; 23 | 24 | public class RabbitMQSourceConnectorConfig extends CommonRabbitMQConnectorConfig { 25 | 26 | public static final String TOPIC_CONF = "kafka.topic"; 27 | public static final String TOPIC_DOC = "Kafka topic to write the messages to."; 28 | 29 | public static final String QUEUE_CONF = "rabbitmq.queue"; 30 | public static final String QUEUE_DOC = "rabbitmq.queue"; 31 | 32 | public static final String PREFETCH_COUNT_CONF = "rabbitmq.prefetch.count"; 33 | public static final String PREFETCH_COUNT_DOC = "Maximum number of messages that the server will deliver, 0 if unlimited. " + 34 | "See `Channel.basicQos(int, boolean) `_"; 35 | 36 | public static final String PREFETCH_GLOBAL_CONF = "rabbitmq.prefetch.global"; 37 | public static final String PREFETCH_GLOBAL_DOC = "True if the settings should be applied to the entire channel rather " + 38 | "than each consumer. " + 39 | "See `Channel.basicQos(int, boolean) `_"; 40 | 41 | public static final String MESSAGE_CONVERTER_CLASSNAME_CONF = "message.converter"; 42 | public static final String MESSAGE_CONVERTER_CLASSNAME_DOC = "Converter to compose the Kafka message. Optional, defaults to " + 43 | "com.github.themeetgroup.kafka.connect.rabbitmq.source.data.MessageConverter"; 44 | 45 | public final String kafkaTopic; 46 | public final List queues; 47 | public final int prefetchCount; 48 | public final boolean prefetchGlobal; 49 | public final String messageConverter; 50 | 51 | public RabbitMQSourceConnectorConfig(Map settings) { 52 | super(config(), settings); 53 | 54 | this.kafkaTopic = this.getString(TOPIC_CONF); 55 | this.queues = this.getList(QUEUE_CONF); 56 | this.prefetchCount = this.getInt(PREFETCH_COUNT_CONF); 57 | this.prefetchGlobal = this.getBoolean(PREFETCH_GLOBAL_CONF); 58 | this.messageConverter = this.getString(MESSAGE_CONVERTER_CLASSNAME_CONF); 59 | } 60 | 61 | public static ConfigDef config() { 62 | return CommonRabbitMQConnectorConfig.config() 63 | .define(TOPIC_CONF, ConfigDef.Type.STRING, ConfigDef.Importance.HIGH, TOPIC_DOC) 64 | .define(PREFETCH_COUNT_CONF, ConfigDef.Type.INT, 0, ConfigDef.Importance.MEDIUM, PREFETCH_COUNT_DOC) 65 | .define(PREFETCH_GLOBAL_CONF, ConfigDef.Type.BOOLEAN, false, ConfigDef.Importance.MEDIUM, PREFETCH_GLOBAL_DOC) 66 | .define(QUEUE_CONF, ConfigDef.Type.LIST, ConfigDef.Importance.HIGH, QUEUE_DOC) 67 | .define(MESSAGE_CONVERTER_CLASSNAME_CONF, ConfigDef.Type.STRING, ConfigDef.Importance.MEDIUM, MESSAGE_CONVERTER_CLASSNAME_DOC); 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /src/main/java/com/github/themeetgroup/kafka/connect/rabbitmq/source/RabbitMQSourceTask.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.themeetgroup.kafka.connect.rabbitmq.source; 17 | 18 | import com.github.jcustenborder.kafka.connect.utils.VersionUtil; 19 | import com.github.jcustenborder.kafka.connect.utils.data.SourceRecordConcurrentLinkedDeque; 20 | import com.rabbitmq.client.Channel; 21 | import com.rabbitmq.client.Connection; 22 | import com.rabbitmq.client.ConnectionFactory; 23 | import org.apache.kafka.connect.errors.ConnectException; 24 | import org.apache.kafka.connect.errors.RetriableException; 25 | import org.apache.kafka.connect.source.SourceRecord; 26 | import org.apache.kafka.connect.source.SourceTask; 27 | import org.slf4j.Logger; 28 | import org.slf4j.LoggerFactory; 29 | 30 | import java.io.IOException; 31 | import java.util.ArrayList; 32 | import java.util.List; 33 | import java.util.Map; 34 | import java.util.concurrent.TimeoutException; 35 | 36 | public class RabbitMQSourceTask extends SourceTask { 37 | 38 | private static final Logger log = LoggerFactory.getLogger(RabbitMQSourceTask.class); 39 | private SourceRecordConcurrentLinkedDeque records; 40 | private Channel channel; 41 | private Connection connection; 42 | 43 | @Override 44 | public String version() { 45 | return VersionUtil.version(this.getClass()); 46 | } 47 | 48 | @Override 49 | public void start(Map settings) { 50 | RabbitMQSourceConnectorConfig config = new RabbitMQSourceConnectorConfig(settings); 51 | this.records = new SourceRecordConcurrentLinkedDeque(); 52 | ConnectConsumer consumer; 53 | try { 54 | consumer = new ConnectConsumer(this.records, config); 55 | } catch (Exception e) { 56 | throw new ConnectException(e); 57 | } 58 | 59 | ConnectionFactory connectionFactory = config.connectionFactory(); 60 | try { 61 | log.info("Opening connection to {}:{}/{} (SSL: {})", config.host, config.port, config.virtualHost, config.useSsl); 62 | this.connection = connectionFactory.newConnection(); 63 | } catch (IOException | TimeoutException e) { 64 | throw new ConnectException(e); 65 | } 66 | 67 | try { 68 | log.info("Creating Channel"); 69 | this.channel = this.connection.createChannel(); 70 | log.info("Declaring queues"); 71 | for (String queue : config.queues) { 72 | this.channel.queueDeclare(queue, true, false, false, null); 73 | } 74 | } catch (IOException e) { 75 | throw new ConnectException(e); 76 | } 77 | 78 | for (String queue : config.queues) { 79 | try { 80 | log.info("Starting consumer"); 81 | this.channel.basicConsume(queue, consumer); 82 | log.info("Setting channel.basicQos({}, {});", config.prefetchCount, config.prefetchGlobal); 83 | this.channel.basicQos(config.prefetchCount, config.prefetchGlobal); 84 | } catch (IOException ex) { 85 | throw new ConnectException(ex); 86 | } 87 | } 88 | 89 | } 90 | 91 | @Override 92 | public void commitRecord(SourceRecord record) { 93 | Long deliveryTag = (Long) record.sourceOffset().get("deliveryTag"); 94 | try { 95 | this.channel.basicAck(deliveryTag, false); 96 | } catch (IOException e) { 97 | throw new RetriableException(e); 98 | } 99 | } 100 | 101 | @Override 102 | public List poll() throws InterruptedException { 103 | List batch = new ArrayList<>(4096); 104 | 105 | while (!this.records.drain(batch)) { 106 | Thread.sleep(1000); 107 | } 108 | 109 | return batch; 110 | } 111 | 112 | @Override 113 | public void stop() { 114 | try { 115 | this.connection.close(); 116 | } catch (IOException e) { 117 | log.error("Exception thrown while closing connection.", e); 118 | } 119 | } 120 | } 121 | -------------------------------------------------------------------------------- /src/main/java/com/github/themeetgroup/kafka/connect/rabbitmq/source/data/BytesSourceMessageConverter.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2020 Jan Uyttenhove (jan@insidin.com) 3 | *

4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | *

8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | *

10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.themeetgroup.kafka.connect.rabbitmq.source.data; 17 | 18 | import com.rabbitmq.client.AMQP; 19 | import com.rabbitmq.client.Envelope; 20 | import org.apache.kafka.connect.data.Schema; 21 | import org.apache.kafka.connect.header.ConnectHeaders; 22 | import org.apache.kafka.connect.header.Headers; 23 | 24 | import static com.github.themeetgroup.kafka.connect.rabbitmq.source.data.MessageConverter.basicProperties; 25 | 26 | public class BytesSourceMessageConverter implements SourceMessageConverter { 27 | 28 | @Override 29 | public byte[] value(String consumerTag, Envelope envelope, AMQP.BasicProperties basicProperties, byte[] body) { 30 | return body; 31 | } 32 | 33 | @Override 34 | public Schema valueSchema() { 35 | return Schema.BYTES_SCHEMA; 36 | } 37 | 38 | @Override 39 | public String key(String consumerTag, Envelope envelope, AMQP.BasicProperties basicProperties, byte[] body) { 40 | return basicProperties.getMessageId(); 41 | } 42 | 43 | @Override 44 | public Schema keySchema() { 45 | return Schema.OPTIONAL_STRING_SCHEMA; 46 | } 47 | 48 | @Override 49 | public Headers headers(String consumerTag, Envelope envelope, AMQP.BasicProperties basicProperties, byte[] body) { 50 | return new ConnectHeaders().addStruct("amqp", basicProperties(basicProperties)); 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /src/main/java/com/github/themeetgroup/kafka/connect/rabbitmq/source/data/MessageConverter.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.themeetgroup.kafka.connect.rabbitmq.source.data; 17 | 18 | import com.google.common.collect.ImmutableMap; 19 | import com.rabbitmq.client.AMQP; 20 | import com.rabbitmq.client.BasicProperties; 21 | import com.rabbitmq.client.Envelope; 22 | import com.rabbitmq.client.LongString; 23 | import org.apache.kafka.connect.data.Schema; 24 | import org.apache.kafka.connect.data.SchemaBuilder; 25 | import org.apache.kafka.connect.data.Struct; 26 | import org.apache.kafka.connect.data.Timestamp; 27 | import org.apache.kafka.connect.header.ConnectHeaders; 28 | import org.apache.kafka.connect.header.Headers; 29 | import org.slf4j.Logger; 30 | import org.slf4j.LoggerFactory; 31 | 32 | import java.util.ArrayList; 33 | import java.util.Date; 34 | import java.util.HashMap; 35 | import java.util.Map; 36 | import java.util.LinkedHashMap; 37 | import java.util.List; 38 | 39 | public class MessageConverter implements SourceMessageConverter { 40 | private static final Logger log = LoggerFactory.getLogger(MessageConverter.class); 41 | static final String FIELD_ENVELOPE_DELIVERYTAG = "deliveryTag"; 42 | static final String FIELD_ENVELOPE_ISREDELIVER = "isRedeliver"; 43 | static final String FIELD_ENVELOPE_EXCHANGE = "exchange"; 44 | static final String FIELD_ENVELOPE_ROUTINGKEY = "routingKey"; 45 | 46 | static final Schema SCHEMA_ENVELOPE = SchemaBuilder.struct() 47 | .name("com.github.themeetgroup.kafka.connect.rabbitmq.Envelope") 48 | .doc("Encapsulates a group of parameters used for AMQP's Basic methods. See " + 49 | "`Envelope `_") 50 | .field(FIELD_ENVELOPE_DELIVERYTAG, SchemaBuilder.int64().doc("The delivery tag included in this parameter envelope. See `Envelope.getDeliveryTag() `_").build()) 51 | .field(FIELD_ENVELOPE_ISREDELIVER, SchemaBuilder.bool().doc("The redelivery flag included in this parameter envelope. See `Envelope.isRedeliver() `_").build()) 52 | .field(FIELD_ENVELOPE_EXCHANGE, SchemaBuilder.string().optional().doc("The name of the exchange included in this parameter envelope. See `Envelope.getExchange() `_")) 53 | .field(FIELD_ENVELOPE_ROUTINGKEY, SchemaBuilder.string().optional().doc("The routing key included in this parameter envelope. See `Envelope.getRoutingKey() `_").build()) 54 | .build(); 55 | 56 | static Struct envelope(Envelope envelope) { 57 | return new Struct(SCHEMA_ENVELOPE) 58 | .put(FIELD_ENVELOPE_DELIVERYTAG, envelope.getDeliveryTag()) 59 | .put(FIELD_ENVELOPE_ISREDELIVER, envelope.isRedeliver()) 60 | .put(FIELD_ENVELOPE_EXCHANGE, envelope.getExchange()) 61 | .put(FIELD_ENVELOPE_ROUTINGKEY, envelope.getRoutingKey()); 62 | } 63 | 64 | static final Schema SCHEMA_HEADER_VALUE; 65 | 66 | static { 67 | SchemaBuilder builder = SchemaBuilder.struct() 68 | .name("com.github.themeetgroup.kafka.connect.rabbitmq.BasicProperties.HeaderValue") 69 | .doc("Used to store the value of a header value. The `type` field stores the type of the data and the corresponding " + 70 | "field to read the data from.") 71 | .field("type", SchemaBuilder.string().doc("Used to define the type for the HeaderValue. " + 72 | "This will define the corresponding field which will contain the value in it's original type.").build() 73 | ) 74 | .field("timestamp", Timestamp.builder().optional().doc("Storage for when the `type` field is set to `timestamp`. Null otherwise.").build()) 75 | .field("array", SchemaBuilder.array(Schema.STRING_SCHEMA).optional().doc("Storage for when the `type` field is set to `array`. Null otherwise.").build()); 76 | 77 | for (Schema.Type v : Schema.Type.values()) { 78 | if (Schema.Type.ARRAY == v || Schema.Type.MAP == v || Schema.Type.STRUCT == v) { 79 | continue; 80 | } 81 | final String doc = String.format("Storage for when the `type` field is set to `%s`. Null otherwise.", v.name().toLowerCase()); 82 | 83 | Schema fieldSchema = SchemaBuilder.type(v) 84 | .doc(doc) 85 | .optional() 86 | .build(); 87 | builder.field(v.name().toLowerCase(), fieldSchema); 88 | } 89 | 90 | SCHEMA_HEADER_VALUE = builder.build(); 91 | } 92 | 93 | static final String FIELD_BASIC_PROPERTIES_CONTENTTYPE = "contentType"; 94 | static final String FIELD_BASIC_PROPERTIES_CONTENTENCODING = "contentEncoding"; 95 | static final String FIELD_BASIC_PROPERTIES_HEADERS = "headers"; 96 | static final String FIELD_BASIC_PROPERTIES_DELIVERYMODE = "deliveryMode"; 97 | static final String FIELD_BASIC_PROPERTIES_PRIORITY = "priority"; 98 | static final String FIELD_BASIC_PROPERTIES_CORRELATIONID = "correlationId"; 99 | static final String FIELD_BASIC_PROPERTIES_REPLYTO = "replyTo"; 100 | static final String FIELD_BASIC_PROPERTIES_EXPIRATION = "expiration"; 101 | static final String FIELD_BASIC_PROPERTIES_MESSAGEID = "messageId"; 102 | static final String FIELD_BASIC_PROPERTIES_TIMESTAMP = "timestamp"; 103 | static final String FIELD_BASIC_PROPERTIES_TYPE = "type"; 104 | static final String FIELD_BASIC_PROPERTIES_USERID = "userId"; 105 | static final String FIELD_BASIC_PROPERTIES_APPID = "appId"; 106 | 107 | static final Schema SCHEMA_KEY = SchemaBuilder.struct() 108 | .name("com.github.themeetgroup.kafka.connect.rabbitmq.MessageKey") 109 | .doc("Key used for partition assignment in Kafka.") 110 | .field( 111 | FIELD_BASIC_PROPERTIES_MESSAGEID, 112 | SchemaBuilder.string().optional().doc("The value in the messageId field. " + 113 | "`BasicProperties.getMessageId() `_").build() 114 | ) 115 | .build(); 116 | 117 | static final Schema SCHEMA_BASIC_PROPERTIES = SchemaBuilder.struct() 118 | .name("com.github.themeetgroup.kafka.connect.rabbitmq.BasicProperties") 119 | .optional() 120 | .doc("Corresponds to the `BasicProperties `_") 121 | .field( 122 | FIELD_BASIC_PROPERTIES_CONTENTTYPE, 123 | SchemaBuilder.string().optional().doc("The value in the contentType field. " + 124 | "See `BasicProperties.getContentType() `_") 125 | .build() 126 | ) 127 | .field( 128 | FIELD_BASIC_PROPERTIES_CONTENTENCODING, 129 | SchemaBuilder.string().optional().doc("The value in the contentEncoding field. " + 130 | "See `BasicProperties.getContentEncoding() `_").build() 131 | ) 132 | .field( 133 | FIELD_BASIC_PROPERTIES_HEADERS, 134 | SchemaBuilder.map(Schema.STRING_SCHEMA, SCHEMA_HEADER_VALUE).build() 135 | ) 136 | .field( 137 | FIELD_BASIC_PROPERTIES_DELIVERYMODE, 138 | SchemaBuilder.int32().optional().doc("The value in the deliveryMode field. " + 139 | "`BasicProperties.html.getDeliveryMode() `_ ").build() 140 | ) 141 | .field( 142 | FIELD_BASIC_PROPERTIES_PRIORITY, 143 | SchemaBuilder.int32().optional().doc("The value in the priority field. " + 144 | "`BasicProperties.getPriority() `_").build() 145 | ) 146 | .field( 147 | FIELD_BASIC_PROPERTIES_CORRELATIONID, 148 | SchemaBuilder.string().optional().doc("The value in the correlationId field. " + 149 | "See `BasicProperties.getCorrelationId() `_").build() 150 | ) 151 | .field( 152 | FIELD_BASIC_PROPERTIES_REPLYTO, 153 | SchemaBuilder.string().optional().doc("The value in the replyTo field. " + 154 | "`BasicProperties.getReplyTo() `_") 155 | ) 156 | .field( 157 | FIELD_BASIC_PROPERTIES_EXPIRATION, 158 | SchemaBuilder.string().optional().doc("The value in the expiration field. " + 159 | "See `BasicProperties.getExpiration() `_").build() 160 | ) 161 | .field( 162 | FIELD_BASIC_PROPERTIES_MESSAGEID, 163 | SchemaBuilder.string().optional().doc("The value in the messageId field. " + 164 | "`BasicProperties.getMessageId() `_").build() 165 | ) 166 | .field( 167 | FIELD_BASIC_PROPERTIES_TIMESTAMP, Timestamp.builder().optional().doc("The value in the timestamp field. " + 168 | "`BasicProperties.getTimestamp() `_").build() 169 | ) 170 | .field( 171 | FIELD_BASIC_PROPERTIES_TYPE, SchemaBuilder.string().optional().doc("The value in the type field. " + 172 | "`BasicProperties.getType() `_").build() 173 | ) 174 | .field( 175 | FIELD_BASIC_PROPERTIES_USERID, 176 | SchemaBuilder.string().optional().doc("The value in the userId field. " + 177 | "`BasicProperties.getUserId() `_").build() 178 | ) 179 | .field( 180 | FIELD_BASIC_PROPERTIES_APPID, 181 | SchemaBuilder.string().optional().doc("The value in the appId field. " + 182 | "`BasicProperties.getAppId() `_").build() 183 | ) 184 | .build(); 185 | 186 | static final Map, String> FIELD_LOOKUP; 187 | 188 | static { 189 | Map, String> fieldLookup = new HashMap<>(); 190 | fieldLookup.put(String.class, Schema.Type.STRING.name().toLowerCase()); 191 | fieldLookup.put(Byte.class, Schema.Type.INT8.name().toLowerCase()); 192 | fieldLookup.put(Short.class, Schema.Type.INT16.name().toLowerCase()); 193 | fieldLookup.put(Integer.class, Schema.Type.INT32.name().toLowerCase()); 194 | fieldLookup.put(Long.class, Schema.Type.INT64.name().toLowerCase()); 195 | fieldLookup.put(Float.class, Schema.Type.FLOAT32.name().toLowerCase()); 196 | fieldLookup.put(Double.class, Schema.Type.FLOAT64.name().toLowerCase()); 197 | fieldLookup.put(Boolean.class, Schema.Type.BOOLEAN.name().toLowerCase()); 198 | fieldLookup.put(ArrayList.class, Schema.Type.ARRAY.name().toLowerCase()); 199 | fieldLookup.put(Date.class, "timestamp"); 200 | FIELD_LOOKUP = ImmutableMap.copyOf(fieldLookup); 201 | } 202 | 203 | static Map headers(BasicProperties basicProperties) { 204 | Map input = basicProperties.getHeaders(); 205 | Map results = new LinkedHashMap<>(); 206 | if (null != input) { 207 | for (Map.Entry kvp : input.entrySet()) { 208 | log.trace("headers() - key = '{}' value= '{}'", kvp.getKey(), kvp.getValue()); 209 | final String field; 210 | final Object headerValue; 211 | 212 | if (kvp.getValue() instanceof LongString) { 213 | headerValue = kvp.getValue().toString(); 214 | } else if (kvp.getValue() instanceof List) { 215 | final List list = (List) kvp.getValue(); 216 | final List values = new ArrayList<>(list.size()); 217 | for (LongString l : list) { 218 | values.add(l.toString()); 219 | } 220 | headerValue = values; 221 | } else { 222 | headerValue = kvp.getValue(); 223 | } 224 | 225 | if (!FIELD_LOOKUP.containsKey(headerValue.getClass())) { 226 | log.warn( 227 | String.format("Could not determine the type for field '%s' type '%s', skipping", kvp.getKey(), headerValue.getClass().getName()) 228 | ); 229 | continue; 230 | } else { 231 | field = FIELD_LOOKUP.get(headerValue.getClass()); 232 | } 233 | 234 | log.trace("headers() - Storing value for header in field = '{}' as {}", field, field); 235 | 236 | Struct value = new Struct(SCHEMA_HEADER_VALUE) 237 | .put("type", field) 238 | .put(field, headerValue); 239 | results.put(kvp.getKey(), value); 240 | } 241 | } 242 | return results; 243 | } 244 | 245 | static Struct basicProperties(BasicProperties basicProperties) { 246 | if (null == basicProperties) { 247 | log.trace("basicProperties() - basicProperties is null."); 248 | return null; 249 | } 250 | 251 | Map headers = headers(basicProperties); 252 | return new Struct(SCHEMA_BASIC_PROPERTIES) 253 | .put(FIELD_BASIC_PROPERTIES_CONTENTTYPE, basicProperties.getContentType()) 254 | .put(FIELD_BASIC_PROPERTIES_CONTENTENCODING, basicProperties.getContentEncoding()) 255 | .put(FIELD_BASIC_PROPERTIES_HEADERS, headers) 256 | .put(FIELD_BASIC_PROPERTIES_DELIVERYMODE, basicProperties.getDeliveryMode()) 257 | .put(FIELD_BASIC_PROPERTIES_PRIORITY, basicProperties.getPriority()) 258 | .put(FIELD_BASIC_PROPERTIES_CORRELATIONID, basicProperties.getCorrelationId()) 259 | .put(FIELD_BASIC_PROPERTIES_REPLYTO, basicProperties.getReplyTo()) 260 | .put(FIELD_BASIC_PROPERTIES_EXPIRATION, basicProperties.getExpiration()) 261 | .put(FIELD_BASIC_PROPERTIES_MESSAGEID, basicProperties.getMessageId()) 262 | .put(FIELD_BASIC_PROPERTIES_TIMESTAMP, basicProperties.getTimestamp()) 263 | .put(FIELD_BASIC_PROPERTIES_TYPE, basicProperties.getType()) 264 | .put(FIELD_BASIC_PROPERTIES_USERID, basicProperties.getUserId()) 265 | .put(FIELD_BASIC_PROPERTIES_APPID, basicProperties.getAppId()); 266 | } 267 | 268 | static final String FIELD_MESSAGE_CONSUMERTAG = "consumerTag"; 269 | static final String FIELD_MESSAGE_ENVELOPE = "envelope"; 270 | static final String FIELD_MESSAGE_BASICPROPERTIES = "basicProperties"; 271 | static final String FIELD_MESSAGE_BODY = "body"; 272 | 273 | 274 | static final Schema SCHEMA_VALUE = SchemaBuilder.struct() 275 | .name("com.github.themeetgroup.kafka.connect.rabbitmq.Message") 276 | .doc("Message as it is delivered to the `RabbitMQ Consumer `_ ") 277 | .field(FIELD_MESSAGE_CONSUMERTAG, SchemaBuilder.string().doc("The consumer tag associated with the consumer").build()) 278 | .field(FIELD_MESSAGE_ENVELOPE, SCHEMA_ENVELOPE) 279 | .field(FIELD_MESSAGE_BASICPROPERTIES, SCHEMA_BASIC_PROPERTIES) 280 | .field(FIELD_MESSAGE_BODY, SchemaBuilder.bytes().doc("The value body (opaque, client-specific byte array)").build()) 281 | .build(); 282 | 283 | @Override 284 | public Struct value(String consumerTag, Envelope envelope, AMQP.BasicProperties basicProperties, byte[] body) { 285 | return new Struct(valueSchema()) 286 | .put(FIELD_MESSAGE_CONSUMERTAG, consumerTag) 287 | .put(FIELD_MESSAGE_ENVELOPE, envelope(envelope)) 288 | .put(FIELD_MESSAGE_BASICPROPERTIES, basicProperties(basicProperties)) 289 | .put(FIELD_MESSAGE_BODY, body); 290 | } 291 | 292 | @Override 293 | public Schema valueSchema() { 294 | return SCHEMA_VALUE; 295 | } 296 | 297 | @Override 298 | public Struct key(String consumerTag, Envelope envelope, AMQP.BasicProperties basicProperties, byte[] body) { 299 | return new Struct(keySchema()) 300 | .put(FIELD_BASIC_PROPERTIES_MESSAGEID, basicProperties.getMessageId()); 301 | } 302 | 303 | @Override 304 | public Schema keySchema() { 305 | return SCHEMA_KEY; 306 | } 307 | 308 | @Override 309 | public Headers headers(String consumerTag, Envelope envelope, AMQP.BasicProperties basicProperties, byte[] body) { 310 | return new ConnectHeaders(); 311 | } 312 | } 313 | -------------------------------------------------------------------------------- /src/main/java/com/github/themeetgroup/kafka/connect/rabbitmq/source/data/SourceMessageConverter.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2020 Jan Uyttenhove (jan@insidin.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.themeetgroup.kafka.connect.rabbitmq.source.data; 17 | 18 | import com.rabbitmq.client.AMQP; 19 | import com.rabbitmq.client.Envelope; 20 | import org.apache.kafka.connect.data.Schema; 21 | import org.apache.kafka.connect.header.Headers; 22 | 23 | public interface SourceMessageConverter { 24 | 25 | V value(String consumerTag, Envelope envelope, AMQP.BasicProperties basicProperties, byte[] body); 26 | 27 | Schema valueSchema(); 28 | 29 | K key(String consumerTag, Envelope envelope, AMQP.BasicProperties basicProperties, byte[] body); 30 | 31 | Schema keySchema(); 32 | 33 | Headers headers(String consumerTag, Envelope envelope, AMQP.BasicProperties basicProperties, byte[] body); 34 | 35 | } 36 | -------------------------------------------------------------------------------- /src/main/java/com/github/themeetgroup/kafka/connect/rabbitmq/source/data/SourceRecordBuilder.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.themeetgroup.kafka.connect.rabbitmq.source.data; 17 | 18 | import com.github.themeetgroup.kafka.connect.rabbitmq.source.RabbitMQSourceConnectorConfig; 19 | import com.google.common.collect.ImmutableMap; 20 | import com.rabbitmq.client.AMQP; 21 | import com.rabbitmq.client.Envelope; 22 | import org.apache.kafka.common.utils.SystemTime; 23 | import org.apache.kafka.common.utils.Time; 24 | import org.apache.kafka.connect.data.Schema; 25 | import org.apache.kafka.connect.header.Headers; 26 | import org.apache.kafka.connect.source.SourceRecord; 27 | 28 | import java.lang.reflect.InvocationTargetException; 29 | 30 | public class SourceRecordBuilder { 31 | 32 | private final Time time = new SystemTime(); 33 | private final SourceMessageConverter messageConverter; 34 | private final RabbitMQSourceConnectorConfig config; 35 | 36 | public SourceRecordBuilder(RabbitMQSourceConnectorConfig config) throws ClassNotFoundException, IllegalAccessException, InstantiationException, NoSuchMethodException, InvocationTargetException { 37 | this.config = config; 38 | String messageConverterClassName = config.messageConverter; 39 | this.messageConverter = messageConverterClassName == null ? 40 | new MessageConverter() : 41 | (SourceMessageConverter) (Class.forName(messageConverterClassName).getConstructor().newInstance()); 42 | } 43 | 44 | public SourceRecord sourceRecord(String consumerTag, Envelope envelope, AMQP.BasicProperties basicProperties, byte[] bytes) { 45 | Object key = this.messageConverter.key(consumerTag, envelope, basicProperties, bytes); 46 | Schema keySchema = this.messageConverter.keySchema(); 47 | Object value = this.messageConverter.value(consumerTag, envelope, basicProperties, bytes); 48 | Schema valueSchema = this.messageConverter.valueSchema(); 49 | Headers headers = this.messageConverter.headers(consumerTag, envelope, basicProperties, bytes); 50 | String topic = this.config.kafkaTopic; 51 | 52 | return new SourceRecord( 53 | ImmutableMap.of("routingKey", envelope.getRoutingKey()), 54 | ImmutableMap.of("deliveryTag", envelope.getDeliveryTag()), 55 | topic, 56 | null, 57 | keySchema, 58 | key, 59 | valueSchema, 60 | value, 61 | null == basicProperties.getTimestamp() ? this.time.milliseconds() : basicProperties.getTimestamp().getTime(), 62 | headers 63 | ); 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /src/main/java/com/github/themeetgroup/kafka/connect/rabbitmq/source/data/StringSourceMessageConverter.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2020 Jan Uyttenhove (jan@insidin.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.themeetgroup.kafka.connect.rabbitmq.source.data; 17 | 18 | import com.rabbitmq.client.AMQP; 19 | import com.rabbitmq.client.Envelope; 20 | import org.apache.kafka.connect.data.Schema; 21 | import org.apache.kafka.connect.header.ConnectHeaders; 22 | import org.apache.kafka.connect.header.Headers; 23 | 24 | import java.nio.charset.Charset; 25 | import java.nio.charset.StandardCharsets; 26 | 27 | import static com.github.themeetgroup.kafka.connect.rabbitmq.source.data.MessageConverter.basicProperties; 28 | 29 | public class StringSourceMessageConverter implements SourceMessageConverter { 30 | 31 | private final Charset charset; 32 | 33 | public StringSourceMessageConverter() { 34 | this.charset = StandardCharsets.UTF_8; // todo make configurable 35 | } 36 | 37 | @Override 38 | public String value(String consumerTag, Envelope envelope, AMQP.BasicProperties basicProperties, byte[] body) { 39 | return new String(body, charset); 40 | } 41 | 42 | @Override 43 | public Schema valueSchema() { 44 | return Schema.STRING_SCHEMA; 45 | } 46 | 47 | @Override 48 | public String key(String consumerTag, Envelope envelope, AMQP.BasicProperties basicProperties, byte[] body) { 49 | return basicProperties.getMessageId(); 50 | } 51 | 52 | @Override 53 | public Schema keySchema() { 54 | return Schema.OPTIONAL_STRING_SCHEMA; 55 | } 56 | 57 | @Override 58 | public Headers headers(String consumerTag, Envelope envelope, AMQP.BasicProperties basicProperties, byte[] body) { 59 | return new ConnectHeaders().addStruct("amqp", basicProperties(basicProperties)); 60 | } 61 | 62 | } 63 | -------------------------------------------------------------------------------- /src/test/java/com/github/themeetgroup/kafka/connect/rabbitmq/source/data/MessageConverterTest.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.themeetgroup.kafka.connect.rabbitmq.source.data; 17 | 18 | import com.google.common.collect.ImmutableMap; 19 | import com.rabbitmq.client.BasicProperties; 20 | import com.rabbitmq.client.Envelope; 21 | import com.rabbitmq.client.LongString; 22 | import com.rabbitmq.client.impl.LongStringHelper; 23 | import org.apache.kafka.connect.data.Schema; 24 | import org.apache.kafka.connect.data.Struct; 25 | import org.junit.jupiter.api.DynamicTest; 26 | import org.junit.jupiter.api.Test; 27 | import org.junit.jupiter.api.TestFactory; 28 | 29 | import java.util.ArrayList; 30 | import java.util.Arrays; 31 | import java.util.Date; 32 | import java.util.List; 33 | import java.util.Map; 34 | import java.util.stream.Stream; 35 | 36 | import static com.github.jcustenborder.kafka.connect.utils.AssertStruct.assertStruct; 37 | import static org.junit.jupiter.api.Assertions.assertEquals; 38 | import static org.junit.jupiter.api.Assertions.assertNotNull; 39 | import static org.junit.jupiter.api.Assertions.assertNull; 40 | import static org.junit.jupiter.api.Assertions.assertTrue; 41 | import static org.junit.jupiter.api.DynamicTest.dynamicTest; 42 | import static org.mockito.Mockito.mock; 43 | import static org.mockito.Mockito.only; 44 | import static org.mockito.Mockito.verify; 45 | import static org.mockito.Mockito.when; 46 | 47 | public class MessageConverterTest { 48 | 49 | 50 | void assertField(final Object expected, final Struct struct, final String fieldName) { 51 | assertEquals(expected, struct.get(fieldName), fieldName + " does not match."); 52 | } 53 | 54 | @Test 55 | public void envelope() { 56 | final Envelope input = new Envelope( 57 | 13246312L, 58 | true, 59 | "exchange", 60 | "routingKey" 61 | ); 62 | 63 | final Struct actual = MessageConverter.envelope(input); 64 | assertNotNull(actual, "actual should not be null."); 65 | assertField(input.getDeliveryTag(), actual, MessageConverter.FIELD_ENVELOPE_DELIVERYTAG); 66 | assertField(input.getExchange(), actual, MessageConverter.FIELD_ENVELOPE_EXCHANGE); 67 | assertField(input.getRoutingKey(), actual, MessageConverter.FIELD_ENVELOPE_ROUTINGKEY); 68 | assertField(input.isRedeliver(), actual, MessageConverter.FIELD_ENVELOPE_ISREDELIVER); 69 | } 70 | 71 | @Test 72 | public void basicPropertiesNull() { 73 | Struct basicProperties = MessageConverter.basicProperties(null); 74 | assertNull(basicProperties); 75 | } 76 | 77 | 78 | Struct struct(Schema.Type type, Object value) { 79 | final String t = type.getName().toLowerCase(); 80 | return new Struct(MessageConverter.SCHEMA_HEADER_VALUE) 81 | .put("type", t) 82 | .put(t, value); 83 | } 84 | 85 | static class HeaderTestCase { 86 | public final Object input; 87 | public final String type; 88 | public final Object expected; 89 | 90 | private HeaderTestCase(Object input, String type, Object expected) { 91 | this.input = input; 92 | this.type = type; 93 | this.expected = expected; 94 | } 95 | 96 | Struct expectedStruct() { 97 | final String field = this.type.toString().toLowerCase(); 98 | return new Struct(MessageConverter.SCHEMA_HEADER_VALUE) 99 | .put("type", field) 100 | .put(field, this.expected); 101 | } 102 | 103 | @Override 104 | public String toString() { 105 | return String.format("%s - %s", this.type, this.input.getClass().getName()); 106 | } 107 | 108 | public static final HeaderTestCase of(Object input, String type, Object expected) { 109 | return new HeaderTestCase(input, type, expected); 110 | } 111 | } 112 | 113 | @TestFactory 114 | public Stream headers() { 115 | final List listHeader = new ArrayList<>(); 116 | listHeader.add(LongStringHelper.asLongString("1")); 117 | listHeader.add(LongStringHelper.asLongString("2")); 118 | listHeader.add(LongStringHelper.asLongString("3")); 119 | listHeader.add(LongStringHelper.asLongString("4")); 120 | final List listHeaderValue = new ArrayList<>(); 121 | listHeaderValue.add("1"); 122 | listHeaderValue.add("2"); 123 | listHeaderValue.add("3"); 124 | listHeaderValue.add("4"); 125 | final List tests = Arrays.asList( 126 | HeaderTestCase.of(Byte.valueOf("1"), Schema.Type.INT8.toString().toLowerCase(), Byte.valueOf("1")), 127 | HeaderTestCase.of(Short.valueOf("1"), Schema.Type.INT16.toString().toLowerCase(), Short.valueOf("1")), 128 | HeaderTestCase.of(Integer.valueOf("1"), Schema.Type.INT32.toString().toLowerCase(), Integer.valueOf("1")), 129 | HeaderTestCase.of(Long.valueOf("1"), Schema.Type.INT64.toString().toLowerCase(), Long.valueOf("1")), 130 | HeaderTestCase.of(Float.valueOf("1"), Schema.Type.FLOAT32.toString().toLowerCase(), Float.valueOf("1")), 131 | HeaderTestCase.of(Double.valueOf("1"), Schema.Type.FLOAT64.toString().toLowerCase(), Double.valueOf("1")), 132 | HeaderTestCase.of("1", Schema.Type.STRING.toString().toLowerCase(), "1"), 133 | HeaderTestCase.of(LongStringHelper.asLongString("1"), Schema.Type.STRING.toString().toLowerCase(), "1"), 134 | HeaderTestCase.of(new Date(1500691965123L), "timestamp", new Date(1500691965123L)), 135 | HeaderTestCase.of(listHeader, "array", listHeaderValue) 136 | ); 137 | 138 | return tests.stream().map(test -> dynamicTest(test.toString(), () -> { 139 | final Map INPUT_HEADERS = ImmutableMap.of("input", test.input); 140 | BasicProperties basicProperties = mock(BasicProperties.class); 141 | when(basicProperties.getHeaders()).thenReturn(INPUT_HEADERS); 142 | final Map actual = MessageConverter.headers(basicProperties); 143 | verify(basicProperties, only()).getHeaders(); 144 | assertNotNull(actual, "actual should not be null."); 145 | assertTrue(actual.containsKey("input"), "actual should contain key 'input'"); 146 | Struct actualStruct = actual.get("input"); 147 | actualStruct.validate(); 148 | assertStruct(test.expectedStruct(), actualStruct); 149 | })); 150 | } 151 | 152 | 153 | } 154 | -------------------------------------------------------------------------------- /src/test/java/com/github/themeetgroup/kafka/connect/rabbitmq/source/data/TransformationTest.java: -------------------------------------------------------------------------------- 1 | package com.github.themeetgroup.kafka.connect.rabbitmq.source.data; 2 | 3 | import org.apache.kafka.connect.sink.SinkRecord; 4 | import org.apache.kafka.connect.transforms.Transformation; 5 | import org.junit.jupiter.api.BeforeEach; 6 | 7 | public abstract class TransformationTest { 8 | final boolean isKey; 9 | final static String TOPIC = "test"; 10 | 11 | 12 | protected TransformationTest(boolean isKey) { 13 | this.isKey = isKey; 14 | } 15 | 16 | protected abstract Transformation create(); 17 | 18 | Transformation transformation; 19 | 20 | @BeforeEach 21 | public void before() { 22 | this.transformation = create(); 23 | } 24 | 25 | 26 | } -------------------------------------------------------------------------------- /src/test/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger - %msg%n 5 | 6 | 7 | 8 | 9 | 10 | 11 | --------------------------------------------------------------------------------