├── .editorconfig
├── .gitignore
├── LICENSE
├── README.md
├── bin
└── debug.sh
├── config
├── AwsLambdaSinkConnector.properties
├── connect-avro-docker.properties
└── connect-json-docker.properties
├── docker-compose.yml
├── pom.xml
└── src
├── main
├── assembly
│ └── package.xml
├── java
│ └── com
│ │ └── tm
│ │ └── kafka
│ │ └── connect
│ │ └── aws
│ │ └── lambda
│ │ ├── AwsLambdaSinkConnector.java
│ │ ├── AwsLambdaSinkConnectorConfig.java
│ │ ├── AwsLambdaSinkTask.java
│ │ ├── ConfigurationAWSCredentialsProvider.java
│ │ ├── VersionUtil.java
│ │ └── converter
│ │ ├── DefaultPayloadConverter.java
│ │ ├── JsonPayloadConverter.java
│ │ └── SinkRecordToPayloadConverter.java
└── resources
│ └── logback.xml
└── test
├── java
└── com
│ └── tm
│ └── kafka
│ └── connect
│ └── aws
│ └── lambda
│ ├── AwsLambdaSinkConnectorConfigTest.java
│ ├── AwsLambdaSinkConnectorTest.java
│ └── AwsLambdaSinkTaskTest.java
└── resources
└── logback.xml
/.editorconfig:
--------------------------------------------------------------------------------
1 | # top-most EditorConfig file
2 | root = true
3 |
4 | # Unix-style newlines with a newline ending every file
5 | [*]
6 | end_of_line = lf
7 | insert_final_newline = true
8 | indent_style = space
9 | indent_size = 2
10 |
11 | # Matches multiple files with brace expansion notation
12 | # Set default charset
13 | [*.{js,py}]
14 | charset = utf-8
15 |
16 | # 4 space indentation
17 | [*.py]
18 | indent_style = space
19 | indent_size = 2
20 |
21 | # Tab indentation (no size specified)
22 | [Makefile]
23 | indent_style = space
24 |
25 | # Indentation override for all JS under lib directory
26 | [lib/**.js]
27 | indent_style = space
28 | indent_size = 2
29 |
30 | # Matches the exact files either package.json or .travis.yml
31 | [{package.json,.travis.yml}]
32 | indent_style = space
33 | indent_size = 2
34 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 |
2 | *~
3 |
4 | # Build products
5 | target/
6 | build/
7 |
8 | # IntelliJ data
9 | *.iml
10 | .idea/
11 | .ipr
12 |
13 | # Eclipse
14 | .classpath
15 | .project
16 | .settings/
17 |
18 | # Documentation build output
19 | /docs/_build
20 |
21 | .DS_Store
22 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "{}"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright {yyyy} {name of copyright owner}
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | Kafka Connect AWS Lambda connector
2 | ==================================
3 |
4 | # Running in development
5 |
6 | The [docker-compose.yml](docker-compose.yml) that is included in this repository is based on the Confluent Platform Docker
7 | images. Take a look at the [quickstart](http://docs.confluent.io/3.0.1/cp-docker-images/docs/quickstart.html#getting-started-with-docker-client)
8 | for the Docker images.
9 |
10 | Examples of the example payload converters:
11 |
12 | ```
13 | docker-compose up -d
14 | ```
15 |
16 | With plain json messages:
17 |
18 | ```
19 | # docker exec -it kafkaconnectawslambda_kafka_1 bash
20 | # cd /data
21 | # ./bin/debug.sh config/connect-json-docker.properties config/AwsLambdaSinkConnector.properties
22 | ```
23 |
24 | ```
25 | # docker exec -it kafkaconnectawslambda_connect_1 bash
26 | # kafka-console-producer --broker-list kafka:9092 --topic aws-lambda-topic \
27 | --property parse.key='true' --property key.separator=':'
28 | > K2:{"f1":"A7"}
29 | ```
30 |
31 | With JsonPayloadConverter the Lambda function sees:
32 |
33 | ```
34 | { schema: null, payload: { f1: 'A7' } }
35 | ```
36 |
37 | With DefaultPayloadConverter the Lambda function sees:
38 |
39 | ```
40 | { kafkaOffset: 34,
41 | timestampType: 'CREATE_TIME',
42 | topic: 'aws-lambda-topic',
43 | kafkaPartition: 0,
44 | keySchema: { type: 'STRING', optional: true },
45 | key: 'K2',
46 | value: { f1: 'A7' },
47 | timestamp: 1518315606220 }
48 | ```
49 |
50 | With schema-registry and Avro messages:
51 | ```
52 | # docker exec -it kafkaconnectawslambda_kafka_1 bash
53 | # cd /data
54 | # ./bin/debug.sh config/connect-avro-docker.properties config/AwsLambdaSinkConnector.properties
55 | ```
56 |
57 | ```
58 | # docker exec -it kafkaconnectawslambda_connect_1 bash
59 | # kafka-avro-console-producer --broker-list kafka:9092 --topic aws-lambda-topic \
60 | --property value.schema='{"type":"record","name":"test","fields":[{"name":"f1","type":"string"}]}' \
61 | --property schema.registry.url='http://schema_registry:8081/'
62 | {"f1":"AZ"}
63 | ```
64 |
65 | With JsonPayloadConverter the Lambda function sees:
66 |
67 | ```
68 | { schema:
69 | { type: 'struct',
70 | fields: [ [Object] ],
71 | optional: false,
72 | name: 'test',
73 | version: 1 },
74 | payload: { f1: 'AZ' } }
75 | ```
76 |
77 |
78 | With DefaultPayloadConverter the Lambda function sees:
79 |
80 | ```
81 | { kafkaOffset: 35,
82 | timestampType: 'CREATE_TIME',
83 | topic: 'aws-lambda-topic',
84 | kafkaPartition: 0,
85 | keySchema: { type: 'STRING', optional: true },
86 | valueSchema:
87 | { type: 'STRUCT',
88 | optional: false,
89 | fields: [ [Object] ],
90 | fieldsByName: { f1: [Object] },
91 | name: 'test',
92 | version: 1 },
93 | value:
94 | { schema:
95 | { type: 'STRUCT',
96 | optional: false,
97 | fields: [Object],
98 | fieldsByName: [Object],
99 | name: 'test',
100 | version: 1 },
101 | values: [ 'AZ' ] },
102 | timestamp: 1518315749655 }
103 | ```
104 |
105 | Start the connector with debugging enabled. This will wait for a debugger to attach.
106 |
107 | ```
108 | export SUSPEND='y'
109 | ./bin/debug.sh config/connect-json-docker.properties config/AwsLambdaSinkConnector.properties
110 | ```
111 |
112 |
--------------------------------------------------------------------------------
/bin/debug.sh:
--------------------------------------------------------------------------------
1 |
2 | #!/usr/bin/env bash
3 |
4 | : ${SUSPEND:='n'}
5 |
6 | set -e
7 |
8 | export KAFKA_JMX_OPTS="-Xdebug -agentlib:jdwp=transport=dt_socket,server=y,suspend=${SUSPEND},address=5005"
9 | export CLASSPATH="$(find target/kafka-connect-aws-lambda-1.0-package/share/java -type f -name '*.jar' | tr '\n' ':')"
10 |
11 | # connect-standalone config/connect-json-docker.properties config/AwsLambdaSinkConnector.properties
12 |
13 | connect-standalone $1 $2
14 |
--------------------------------------------------------------------------------
/config/AwsLambdaSinkConnector.properties:
--------------------------------------------------------------------------------
1 |
2 | name=AwsLambdaSinkConnector
3 | topics=aws-lambda-topic
4 | tasks.max=1
5 | connector.class=com.tm.kafka.connect.aws.lambda.AwsLambdaSinkConnector
6 |
7 | aws.region=us-west-2
8 | aws.function.name=kafka-aws-lambda-test
9 | aws.lambda.payload.converter.class=com.tm.kafka.connect.aws.lambda.converter.JsonPayloadConverter
10 | # aws.lambda.payload.converter.class=com.tm.kafka.connect.aws.lambda.converter.DefaultPayloadConverter
11 | # retry.backoff.ms=5000
12 | # aws.lambda.invoke.async=RequestResponse
13 | # aws.lambda.invoke.async=Event
14 | # aws.lambda.invoke.async=DryRun
15 |
16 | # aws.credentials.provider.class=com.amazonaws.auth.DefaultAWSCredentialsProviderChain
17 | aws.credentials.provider.class=com.tm.kafka.connect.aws.lambda.ConfigurationAWSCredentialsProvider
18 | aws.credentials.provider.aws.access.key.id=${file:/root/.aws/credentials:aws_access_key_id}
19 | aws.credentials.provider.aws.secret.access.key=${file:/root/.aws/credentials:aws_secret_access_key}
20 |
--------------------------------------------------------------------------------
/config/connect-avro-docker.properties:
--------------------------------------------------------------------------------
1 | # Sample configuration for a standalone Kafka Connect worker that uses Avro serialization and
2 | # integrates the the SchemaConfig Registry. This sample configuration assumes a local installation of
3 | # Confluent Platform with all services running on their default ports.
4 | # Bootstrap Kafka servers. If multiple servers are specified, they should be comma-separated.
5 | bootstrap.servers=kafka:9092
6 | # The converters specify the format of data in Kafka and how to translate it into Connect data.
7 | # Every Connect user will need to configure these based on the format they want their data in
8 | # when loaded from or stored into Kafka
9 | key.converter=org.apache.kafka.connect.storage.StringConverter
10 | key.converter.schemas.enable=false
11 | key.converter.schema.registry.url=http://schema_registry:8081/
12 | value.converter=io.confluent.connect.avro.AvroConverter
13 | value.converter.schemas.enable=true
14 | value.converter.schema.registry.url=http://schema_registry:8081/
15 |
16 | # The internal converter used for offsets and config data is configurable and must be specified,
17 | # but most users will always want to use the built-in default. Offset and config data is never
18 | # visible outside of Connect in this format.
19 | internal.key.converter=org.apache.kafka.connect.json.JsonConverter
20 | internal.value.converter=org.apache.kafka.connect.json.JsonConverter
21 | internal.key.converter.schemas.enable=true
22 | internal.value.converter.schemas.enable=true
23 | # Local storage file for offset data
24 | offset.storage.file.filename=/tmp/connect.offsets
25 | # Confuent Control Center Integration -- uncomment these lines to enable Kafka client interceptors
26 | # that will report audit data that can be displayed and analyzed in Confluent Control Center
27 | # producer.interceptor.classes=io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor
28 | # consumer.interceptor.classes=io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor
29 |
--------------------------------------------------------------------------------
/config/connect-json-docker.properties:
--------------------------------------------------------------------------------
1 | # Sample configuration for a standalone Kafka Connect worker that uses Avro serialization and
2 | # integrates the the SchemaConfig Registry. This sample configuration assumes a local installation of
3 | # Confluent Platform with all services running on their default ports.
4 | # Bootstrap Kafka servers. If multiple servers are specified, they should be comma-separated.
5 | bootstrap.servers=kafka:9092
6 | # The converters specify the format of data in Kafka and how to translate it into Connect data.
7 | # Every Connect user will need to configure these based on the format they want their data in
8 | # when loaded from or stored into Kafka
9 | key.converter=org.apache.kafka.connect.storage.StringConverter
10 | key.converter.schemas.enable=false
11 | value.converter=org.apache.kafka.connect.json.JsonConverter
12 | value.converter.schemas.enable=false
13 |
14 | # The internal converter used for offsets and config data is configurable and must be specified,
15 | # but most users will always want to use the built-in default. Offset and config data is never
16 | # visible outside of Connect in this format.
17 | internal.key.converter=org.apache.kafka.connect.json.JsonConverter
18 | internal.value.converter=org.apache.kafka.connect.json.JsonConverter
19 | internal.key.converter.schemas.enable=false
20 | internal.value.converter.schemas.enable=false
21 | # Local storage file for offset data
22 | offset.storage.file.filename=/tmp/connect.offsets
23 | # Confuent Control Center Integration -- uncomment these lines to enable Kafka client interceptors
24 | # that will report audit data that can be displayed and analyzed in Confluent Control Center
25 | # producer.interceptor.classes=io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor
26 | # consumer.interceptor.classes=io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor
27 |
28 | config.providers=file
29 | config.providers.file.class=org.apache.kafka.common.config.provider.FileConfigProvider
30 | config.providers.file.param.secrets=/root/.aws/credentials
31 | config.reload.action=restart
32 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "2"
2 | services:
3 |
4 | zookeeper:
5 | image: confluentinc/cp-zookeeper:5.0.1
6 | container_name: zookeeper
7 | environment:
8 | ZOOKEEPER_CLIENT_PORT: 2181
9 | zk_id: "1"
10 |
11 | kafka:
12 | hostname: kafka
13 | image: confluentinc/cp-kafka:5.0.1
14 | container_name: kafka
15 | links:
16 | - zookeeper
17 | ports:
18 | - "9092:9092"
19 | environment:
20 | KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181"
21 | KAFKA_ADVERTISED_LISTENERS: "PLAINTEXT://:9092"
22 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
23 | volumes:
24 | - ./:/data
25 | - ~/.aws:/root/.aws
26 |
27 | schema-registry:
28 | hostname: schema-registry
29 | image: confluentinc/cp-schema-registry:5.0.1
30 | container_name: schema-registry
31 | links:
32 | - kafka
33 | - zookeeper
34 | ports:
35 | - "8081:8081"
36 | environment:
37 | SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: "zookeeper:2181"
38 | SCHEMA_REGISTRY_HOST_NAME: schema-registry
39 |
40 | # connect:
41 | # hostname: connect
42 | # image: confluentinc/cp-kafka-connect:5.0.1
43 | # container_name: connect
44 | # depends_on:
45 | # - zookeeper
46 | # - kafka
47 | # - schema-registry
48 | # ports:
49 | # - "8083:8083"
50 | # environment:
51 | # CONNECT_BOOTSTRAP_SERVERS: 'kafka:9092'
52 | # CONNECT_REST_ADVERTISED_HOST_NAME: connect
53 | # CONNECT_REST_PORT: 8083
54 | # CONNECT_GROUP_ID: compose-connect-group
55 | # CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs
56 | # CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
57 | # CONNECT_OFFSET_FLUSH_INTERVAL_MS: 10000
58 | # CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets
59 | # CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
60 | # CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status
61 | # CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
62 | # CONNECT_KEY_CONVERTER: io.confluent.connect.avro.AvroConverter
63 | # CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: 'http://schema-registry:8081'
64 | # CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter
65 | # CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: 'http://schema-registry:8081'
66 | # CONNECT_INTERNAL_KEY_CONVERTER: org.apache.kafka.connect.json.JsonConverter
67 | # CONNECT_INTERNAL_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
68 | # CONNECT_ZOOKEEPER_CONNECT: 'zookeeper:2181'
69 | # CONNECT_PLUGIN_PATH: /usr/share/java
70 | # volumes:
71 | # - ./:/data
72 | # - ~/.aws:/root/.aws
73 |
--------------------------------------------------------------------------------
/pom.xml:
--------------------------------------------------------------------------------
1 |
3 | 4.0.0
4 |
5 | com.tm.kafka
6 | kafka-connect-aws-lambda
7 | 1.0
8 | jar
9 |
10 | kafka-connect-aws-lambda
11 | A Kafka Connect Connector for kafka-connect-aws-lambda
12 |
13 |
14 | 2.0.1
15 | 2.12.1
16 | 5.0.1
17 | http://packages.confluent.io/maven/
18 |
19 |
20 |
21 |
22 |
23 | org.apache.kafka
24 | connect-api
25 | ${kafka.version}
26 | provided
27 |
28 |
29 | org.apache.kafka
30 | connect-json
31 | ${kafka.version}
32 | provided
33 |
34 |
35 |
36 | io.confluent
37 | kafka-connect-avro-converter
38 | ${confluent.version}
39 |
40 |
41 | org.slf4j
42 | slf4j-log4j12
43 |
44 |
45 |
46 |
47 |
48 | com.amazonaws
49 | aws-java-sdk-lambda
50 | 1.11.452
51 |
52 |
53 |
54 | org.apache.avro
55 | avro
56 | 1.8.2
57 |
58 |
59 |
60 | com.google.code.gson
61 | gson
62 | 2.8.5
63 |
64 |
65 |
66 | junit
67 | junit
68 | 4.13.1
69 | test
70 |
71 |
72 | ch.qos.logback
73 | logback-classic
74 | 1.2.3
75 | test
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 | com.fasterxml.jackson.core
84 | jackson-core
85 | ${jackson.version}
86 |
87 |
88 | com.fasterxml.jackson.core
89 | jackson-annotations
90 | ${jackson.version}
91 |
92 |
93 | com.fasterxml.jackson.core
94 | jackson-databind
95 | ${jackson.version}
96 |
97 |
98 | com.fasterxml.jackson.dataformat
99 | jackson-dataformat-cbor
100 | ${jackson.version}
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 | confluent
109 | Confluent
110 | ${confluent.maven.repo}
111 |
112 |
113 |
114 |
115 |
116 |
117 | org.codehaus.mojo
118 | versions-maven-plugin
119 | 2.7
120 |
121 |
122 | org.apache.maven.plugins
123 | maven-enforcer-plugin
124 | 3.0.0-M2
125 |
126 |
127 | enforce-versions
128 |
129 | enforce
130 |
131 |
132 |
133 |
134 | 3.0.5
135 |
136 |
137 | 1.8
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 | org.apache.maven.plugins
146 | maven-jar-plugin
147 | 3.1.0
148 |
149 |
150 |
151 | true
152 | true
153 |
154 |
155 |
156 |
157 |
158 | org.apache.maven.plugins
159 | maven-compiler-plugin
160 | 3.8.0
161 | true
162 |
163 | 1.8
164 | 1.8
165 |
166 |
167 |
168 | maven-assembly-plugin
169 | 3.1.0
170 |
171 |
172 | src/main/assembly/package.xml
173 |
174 |
175 |
176 |
177 | make-assembly
178 | package
179 |
180 | single
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 | src/main/resources
189 | true
190 |
191 |
192 |
193 |
194 |
--------------------------------------------------------------------------------
/src/main/assembly/package.xml:
--------------------------------------------------------------------------------
1 |
5 |
6 | package
7 |
8 | dir
9 |
10 | false
11 |
12 |
13 | ${project.basedir}
14 | share/doc/${project.name}/
15 |
16 | README*
17 | LICENSE*
18 | NOTICE*
19 | licenses/
20 |
21 |
22 |
23 | ${project.basedir}/config
24 | etc/${project.name}
25 |
26 | *
27 |
28 |
29 |
30 |
31 |
32 | share/java/${project.name}
33 | true
34 | true
35 |
36 | org.apache.kafka:connect-api
37 |
38 |
39 |
40 |
41 |
--------------------------------------------------------------------------------
/src/main/java/com/tm/kafka/connect/aws/lambda/AwsLambdaSinkConnector.java:
--------------------------------------------------------------------------------
1 | package com.tm.kafka.connect.aws.lambda;
2 |
3 | import org.apache.kafka.common.config.ConfigDef;
4 | import org.apache.kafka.connect.connector.Task;
5 | import org.apache.kafka.connect.sink.SinkConnector;
6 | import org.slf4j.Logger;
7 | import org.slf4j.LoggerFactory;
8 |
9 | import java.util.ArrayList;
10 | import java.util.HashMap;
11 | import java.util.List;
12 | import java.util.Map;
13 |
14 | public class AwsLambdaSinkConnector extends SinkConnector {
15 | private static Logger log = LoggerFactory.getLogger(AwsLambdaSinkConnector.class);
16 | private AwsLambdaSinkConnectorConfig config;
17 |
18 | @Override
19 | public String version() {
20 | return VersionUtil.getVersion();
21 | }
22 |
23 | @Override
24 | public void start(Map map) {
25 | config = new AwsLambdaSinkConnectorConfig(map);
26 | }
27 |
28 | @Override
29 | public Class extends Task> taskClass() {
30 | return AwsLambdaSinkTask.class;
31 | }
32 |
33 | @Override
34 | public List