├── Dockerfile
├── .vscode
└── settings.json
├── assets
└── HTML5_Badge_256.png
├── .envrc
├── create_connector_ccloud.sh
├── src
├── test
│ ├── java
│ │ └── com
│ │ │ └── github
│ │ │ └── cjmatta
│ │ │ └── kafka
│ │ │ └── connect
│ │ │ ├── .DS_Store
│ │ │ ├── ServerSentEventsSourceTaskTest.java
│ │ │ ├── ServerSentEventsSourceConnectorTest.java
│ │ │ ├── ServerSentEventsSourceTaskIT.java
│ │ │ └── sse
│ │ │ └── ServerSentEventClientTest.java
│ └── resources
│ │ └── logback.xml
└── main
│ ├── assembly
│ └── package.xml
│ └── java
│ └── com
│ └── github
│ └── cjmatta
│ └── kafka
│ └── connect
│ └── sse
│ ├── ServerSentEvent.java
│ ├── ServerSentEventsSourceConnector.java
│ ├── ServerSentEventsSourceTask.java
│ ├── ServerSentEventsSourceConnectorConfig.java
│ └── ServerSentEventClient.java
├── config
├── MySourceConnector.properties
├── wikipedia-connector.json
├── kafka-connect-sse.properties
├── connect-avro-docker.properties
└── wikimedia.recentchange.schema.json
├── bin
├── debug.sh
├── submit_config.sh
└── submit_config_extract_field.sh
├── Dockerfile.connect
├── upload-to-confluent-cloud.sh
├── wikipedia-ccloud-config.json
├── recentchange.json
├── .gitignore
├── .github
└── copilot-instructions.md
├── test-local.sh
├── IMPROVEMENT_PLAN.md
├── wikipedia-edit-connect-schema.json
├── docker-compose.yml
├── README.md
├── pom.xml
├── LICENSE
└── manage-connector.sh
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM confluentinc/cp-kafka-connect:latest
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "java.configuration.updateBuildConfiguration": "automatic"
3 | }
--------------------------------------------------------------------------------
/assets/HTML5_Badge_256.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cjmatta/kafka-connect-sse/HEAD/assets/HTML5_Badge_256.png
--------------------------------------------------------------------------------
/.envrc:
--------------------------------------------------------------------------------
1 | export KAFKA_API_KEY="op://Employee/CCloud SSE API Key/username"
2 | export KAFKA_API_SECRET="op://Employee/CCloud SSE API Key/credential"
--------------------------------------------------------------------------------
/create_connector_ccloud.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | confluent connect cluster create --config-file wikipedia-ccloud-config.json --cluster lkc-zm1p10
4 |
--------------------------------------------------------------------------------
/src/test/java/com/github/cjmatta/kafka/connect/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cjmatta/kafka-connect-sse/HEAD/src/test/java/com/github/cjmatta/kafka/connect/.DS_Store
--------------------------------------------------------------------------------
/config/MySourceConnector.properties:
--------------------------------------------------------------------------------
1 | name=MySourceConnector
2 | tasks.max=1
3 | connector.class=com.github.cjmatta.kafka.connect.sse.ServerSentEventsSourceConnector
4 | topic=wikipedia.sse
5 | sse.uri=https://stream.wikimedia.org/v2/stream/recentchange
--------------------------------------------------------------------------------
/bin/debug.sh:
--------------------------------------------------------------------------------
1 |
2 | #!/usr/bin/env bash
3 |
4 | : ${SUSPEND:='n'}
5 |
6 | set -e
7 |
8 | mvn clean package
9 | export KAFKA_JMX_OPTS="-Xdebug -agentlib:jdwp=transport=dt_socket,server=y,suspend=${SUSPEND},address=5005"
10 |
11 | connect-standalone config/connect-avro-docker.properties config/MySourceConnector.properties
12 |
--------------------------------------------------------------------------------
/Dockerfile.connect:
--------------------------------------------------------------------------------
1 | FROM confluentinc/cp-kafka-connect:7.3.0
2 |
3 | # Copy the connector ZIP file
4 | COPY target/components/packages/cjmatta-kafka-connect-sse-1.4.zip /tmp/
5 |
6 | # Install the connector using confluent-hub (installs to /usr/share/confluent-hub-components)
7 | RUN confluent-hub install --no-prompt /tmp/cjmatta-kafka-connect-sse-1.4.zip
8 |
9 |
--------------------------------------------------------------------------------
/src/test/java/com/github/cjmatta/kafka/connect/ServerSentEventsSourceTaskTest.java:
--------------------------------------------------------------------------------
1 | package com.github.cjmatta.kafka.connect;
2 |
3 | import org.junit.jupiter.api.AfterAll;
4 | import org.junit.jupiter.api.BeforeAll;
5 | import org.junit.jupiter.api.BeforeEach;
6 | import org.junit.jupiter.api.Test;
7 |
8 | public class ServerSentEventsSourceTaskTest {
9 | @Test
10 | public void test() {
11 | // Congrats on a passing test!
12 | }
13 | }
--------------------------------------------------------------------------------
/src/test/java/com/github/cjmatta/kafka/connect/ServerSentEventsSourceConnectorTest.java:
--------------------------------------------------------------------------------
1 | package com.github.cjmatta.kafka.connect;
2 |
3 | import org.junit.jupiter.api.AfterAll;
4 | import org.junit.jupiter.api.BeforeAll;
5 | import org.junit.jupiter.api.BeforeEach;
6 | import org.junit.jupiter.api.Test;
7 |
8 | public class ServerSentEventsSourceConnectorTest {
9 | @Test
10 | public void test() {
11 | // Congrats on a passing test!
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/src/test/resources/logback.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger - %msg%n
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/upload-to-confluent-cloud.sh:
--------------------------------------------------------------------------------
1 | confluent connect custom-plugin create "kafka-connect-sse" \
2 | --plugin-file ~/Documents/Projects/kafka-connect-sse/target/components/packages/cjmatta-kafka-connect-sse-1.3.zip \
3 | --connector-class com.github.cjmatta.kafka.connect.sse.ServerSentEventsSourceConnector \
4 | --description "A Kafka Connect source connector for Server Sent Events" \
5 | --documentation-link https://github.com/cjmatta/kafka-connect-sse \
6 | --connector-type Source \
7 | --sensitive-properties http.basic.auth.password \
8 | --cloud aws
--------------------------------------------------------------------------------
/config/wikipedia-connector.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "wikipedia-sse-connector",
3 | "config": {
4 | "connector.class": "com.github.cjmatta.kafka.connect.sse.ServerSentEventsSourceConnector",
5 | "tasks.max": "1",
6 | "sse.uri": "https://stream.wikimedia.org/v2/stream/recentchange",
7 | "topic": "wikipedia-changes",
8 | "http.header.User-Agent": "KafkaConnectSSE-Testing/1.4 (https://github.com/cjmatta/kafka-connect-sse)",
9 | "compression.enabled": "true",
10 | "rate.limit.requests.per.second": "10",
11 | "retry.backoff.initial.ms": "2000",
12 | "retry.backoff.max.ms": "30000",
13 | "retry.max.attempts": "10"
14 | }
15 | }
16 |
17 |
--------------------------------------------------------------------------------
/src/test/java/com/github/cjmatta/kafka/connect/ServerSentEventsSourceTaskIT.java:
--------------------------------------------------------------------------------
1 | package com.github.cjmatta.kafka.connect;
2 |
3 | import org.junit.jupiter.api.AfterAll;
4 | import org.junit.jupiter.api.BeforeAll;
5 | import org.junit.jupiter.api.BeforeEach;
6 | import org.junit.jupiter.api.Test;
7 |
8 | /**
9 | * This test can be used for integration testing with the system you are integrating with. For example
10 | * take a look at https://github.com/jcustenborder/docker-compose-junit-extension to launch docker
11 | * containers for your testing.
12 | */
13 | public class ServerSentEventsSourceTaskIT {
14 | @Test
15 | public void test() {
16 | // Congrats on a passing test!
17 | }
18 | }
--------------------------------------------------------------------------------
/bin/submit_config.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | CONNECT_HOST=localhost
4 |
5 | if [[ $1 ]];then
6 | CONNECT_HOST=$1
7 | fi
8 |
9 | HEADER="Content-Type: application/json"
10 | DATA=$( cat << EOF
11 | {
12 | "name": "wikipedia-sse-1",
13 | "config": {
14 | "connector.class": "com.github.cjmatta.kafka.connect.sse.ServerSentEventsSourceConnector",
15 | "topic": "wikipedia.sse",
16 | "sse.uri": "https://stream.wikimedia.org/v2/stream/recentchange",
17 | "value.converter": "io.confluent.connect.avro.AvroConverter",
18 | "value.converter.schema.registry.url": "http://localhost:8081",
19 | "tasks.max": "1"
20 | }
21 | }
22 | EOF
23 | )
24 |
25 | echo "curl -X POST -H \"${HEADER}\" --data \"${DATA}\" http://${CONNECT_HOST}:8083/connectors"
26 | curl -X POST -H "${HEADER}" --data "${DATA}" http://${CONNECT_HOST}:8083/connectors
27 | echo
28 |
--------------------------------------------------------------------------------
/wikipedia-ccloud-config.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "Wikipedia SSE",
3 | "config": {
4 | "connector.class": "com.github.cjmatta.kafka.connect.sse.ServerSentEventsSourceConnector",
5 | "kafka.auth.mode": "KAFKA_API_KEY",
6 | "kafka.api.key": "WTVSJDQTRFOVUSPJ",
7 | "kafka.api.secret": "93OFB0HWbUqUtCQqveWKx3kwvMCTImIi0JefS7/V9f3ih8BhF4sxOWDPX8D9rOW+",
8 | "tasks.max": "1",
9 | "confluent.custom.plugin.id": "ccp-q2kxd2",
10 | "confluent.connector.type": "CUSTOM",
11 | "confluent.custom.connection.endpoints": "stream.wikimedia.org:443",
12 | "confluent.custom.schema.registry.auto": "true",
13 | "key.converter": "io.confluent.connect.json.JsonSchemaConverter",
14 | "sse.uri": "https://stream.wikimedia.org/v2/stream/recentchange",
15 | "topic": "wikimedia-raw",
16 | "value.converter": "io.confluent.connect.json.JsonSchemaConverter"
17 | }
18 | }
--------------------------------------------------------------------------------
/config/kafka-connect-sse.properties:
--------------------------------------------------------------------------------
1 | name=sse-source-connector
2 | tasks.max=1
3 | connector.class=com.github.cjmatta.kafka.connect.sse.ServerSentEventsSourceConnector
4 | topic=wikipedia-sse-test123
5 | sse.uri=https://stream.wikimedia.org/v2/stream/recentchange
6 | errors.tollerance=all
7 | errors.deadletterqueue.topic.name=wikipedia.dlq
8 | #transforms=ExtractField
9 | transforms=ExtractField,parseJSON
10 | transforms.ExtractField.type=org.apache.kafka.connect.transforms.ExtractField$Value
11 | transforms.ExtractField.field=data
12 | transforms.parseJSON.type=com.github.jcustenborder.kafka.connect.json.FromJson$Value
13 | transforms.parseJSON.json.exclude.locations=#/properties/log_params,#/properties/$schema,#/$schema
14 | transforms.parseJSON.json.schema.location=Url
15 | transforms.parseJSON.json.schema.url=file:/Users/chris/Documents/Projects/kafka-connect-sse/config/wikimedia.recentchange.schema.json
16 | transforms.parseJSON.json.schema.validation.enabled=false
--------------------------------------------------------------------------------
/bin/submit_config_extract_field.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | CONNECT_HOST=localhost
4 |
5 | if [[ $1 ]];then
6 | CONNECT_HOST=$1
7 | fi
8 |
9 | HEADER="Content-Type: application/json"
10 | DATA=$( cat << EOF
11 | {
12 | "name": "wikipedia-sse-1",
13 | "config": {
14 | "connector.class": "com.github.cjmatta.kafka.connect.sse.ServerSentEventsSourceConnector",
15 | "topic": "wikipedia-raw",
16 | "sse.uri": "https://stream.wikimedia.org/v2/stream/recentchange",
17 | "transforms": "extractjson",
18 | "transforms.extractjson.type": "org.apache.kafka.connect.transforms.ExtractField$Value",
19 | "transforms.extractjson.field": "data",
20 | "value.converter": "io.confluent.connect.avro.AvroConverter",
21 | "value.converter.schema.registry.url": "http://localhost:8081",
22 | "tasks.max": "1"
23 | }
24 | }
25 | EOF
26 | )
27 |
28 | echo "curl -X POST -H \"${HEADER}\" --data \"${DATA}\" http://${CONNECT_HOST}:8083/connectors"
29 | curl -X POST -H "${HEADER}" --data "${DATA}" http://${CONNECT_HOST}:8083/connectors
30 | echo
31 |
--------------------------------------------------------------------------------
/recentchange.json:
--------------------------------------------------------------------------------
1 | {
2 | "bot": false,
3 | "comment": "/* wbsetdescription-add:1|en */ Qing dynasty person CBDB = 56896, #quickstatements; [[:toollabs:quickstatements/#/batch/8857|batch #8857]] by [[User:Tagishsimon|]]",
4 | "id": 910619087,
5 | "length": {
6 | "new": 2697,
7 | "old": 2606
8 | },
9 | "meta": {
10 | "domain": "www.wikidata.org",
11 | "dt": "2019-03-04T13:30:32+00:00",
12 | "id": "afa46939-3e81-11e9-bfb6-1866da99521a",
13 | "request_id": "a30366d2-3ff6-4402-84c6-17aae8690056",
14 | "schema_uri": "mediawiki/recentchange/2",
15 | "topic": "eqiad.mediawiki.recentchange",
16 | "uri": "https://www.wikidata.org/wiki/Q45553520",
17 | "partition": 0,
18 | "offset": 1429042371
19 | },
20 | "minor": false,
21 | "namespace": 0,
22 | "parsedcomment": " Qing dynasty person CBDB = 56896, #quickstatements; batch #8857 by User:Tagishsimon",
23 | "patrolled": true,
24 | "revision": {
25 | "new": 874176646,
26 | "old": 850556419
27 | },
28 | "server_name": "www.wikidata.org",
29 | "server_script_path": "/w",
30 | "server_url": "https://www.wikidata.org",
31 | "timestamp": 1551706232,
32 | "title": "Q45553520",
33 | "type": "edit",
34 | "user": "Tagishsimon",
35 | "wiki": "wikidatawiki"
36 | }
--------------------------------------------------------------------------------
/src/main/assembly/package.xml:
--------------------------------------------------------------------------------
1 |
5 |
6 | package
7 |
8 | dir
9 |
10 | false
11 |
12 |
13 | ${project.basedir}
14 | share/doc/${project.name}/
15 |
16 | README*
17 | LICENSE*
18 | NOTICE*
19 | licenses/
20 |
21 |
22 |
23 | ${project.basedir}/config
24 | etc/${project.name}
25 |
26 | *
27 |
28 |
29 |
30 |
31 |
32 | share/java/${project.name}
33 | true
34 | true
35 |
36 | org.apache.kafka:connect-api
37 |
38 |
39 |
40 |
41 |
--------------------------------------------------------------------------------
/src/main/java/com/github/cjmatta/kafka/connect/sse/ServerSentEvent.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright © 2019 Christopher Matta (chris.matta@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | *
16 | */
17 |
18 | package com.github.cjmatta.kafka.connect.sse;
19 |
20 | import org.apache.kafka.connect.data.Schema;
21 | import org.apache.kafka.connect.data.SchemaBuilder;
22 | import org.apache.kafka.connect.data.Struct;
23 |
24 | public class ServerSentEvent extends Struct {
25 | public static final String EVENT = "event";
26 | public static final String ID = "id";
27 | public static final String DATA = "data";
28 |
29 | final public static Schema SCHEMA = SchemaBuilder.struct()
30 | .name("com.github.cjmatta.kafka.connect.sse.ServerSentEvent")
31 | .doc("Server Sent Event Message")
32 | .field(EVENT, SchemaBuilder.string().doc("The event class of this event").required().build())
33 | .field(ID, SchemaBuilder.string().doc("The event ID").optional().build())
34 | .field(DATA, SchemaBuilder.string().doc("The event data payload").required().build());
35 |
36 | public ServerSentEvent(String event, String id, String data) {
37 | super(SCHEMA);
38 | this.put(EVENT, event)
39 | .put(ID, id)
40 | .put(DATA, data);
41 | }
42 |
43 | @Override
44 | public String toString() {
45 | return String.format("[event]=%s [id]=%s [data]=%s",
46 | this.get(EVENT),
47 | this.get(ID),
48 | this.get(DATA)
49 | );
50 | }
51 |
52 | }
53 |
--------------------------------------------------------------------------------
/config/connect-avro-docker.properties:
--------------------------------------------------------------------------------
1 | # Sample configuration for a standalone Kafka Connect worker that uses Avro serialization and
2 | # integrates the the SchemaConfig Registry. This sample configuration assumes a local installation of
3 | # Confluent Platform with all services running on their default ports.
4 | # Bootstrap Kafka servers. If multiple servers are specified, they should be comma-separated.
5 | bootstrap.servers=kafka:9092
6 | # The converters specify the format of data in Kafka and how to translate it into Connect data.
7 | # Every Connect user will need to configure these based on the format they want their data in
8 | # when loaded from or stored into Kafka
9 | key.converter=io.confluent.connect.avro.AvroConverter
10 | key.converter.schema.registry.url=http://schema-registry:8081
11 | value.converter=io.confluent.connect.avro.AvroConverter
12 | value.converter.schema.registry.url=http://schema-registry:8081
13 | # The internal converter used for offsets and config data is configurable and must be specified,
14 | # but most users will always want to use the built-in default. Offset and config data is never
15 | # visible outside of Connect in this format.
16 | internal.key.converter=org.apache.kafka.connect.json.JsonConverter
17 | internal.value.converter=org.apache.kafka.connect.json.JsonConverter
18 | internal.key.converter.schemas.enable=false
19 | internal.value.converter.schemas.enable=false
20 | # Local storage file for offset data
21 | offset.storage.file.filename=/tmp/connect.offsets
22 |
23 | # Confuent Control Center Integration -- uncomment these lines to enable Kafka client interceptors
24 | # that will report audit data that can be displayed and analyzed in Confluent Control Center
25 | # producer.interceptor.classes=io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor
26 | # consumer.interceptor.classes=io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor
27 |
28 | # Load our plugin from the output path.
29 | plugin.path=target/kafka-connect-target,/Users/chris/Downloads/confluent/share/confluent-hub-components
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
2 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
3 |
4 | # User-specific stuff
5 | .idea/**/workspace.xml
6 | .idea/**/tasks.xml
7 | .idea/**/usage.statistics.xml
8 | .idea/**/dictionaries
9 | .idea/**/shelf
10 |
11 | # AWS User-specific
12 | .idea/**/aws.xml
13 |
14 | # Generated files
15 | .idea/**/contentModel.xml
16 |
17 | # Sensitive or high-churn files
18 | .idea/**/dataSources/
19 | .idea/**/dataSources.ids
20 | .idea/**/dataSources.local.xml
21 | .idea/**/sqlDataSources.xml
22 | .idea/**/dynamic.xml
23 | .idea/**/uiDesigner.xml
24 | .idea/**/dbnavigator.xml
25 |
26 | # Gradle
27 | .idea/**/gradle.xml
28 | .idea/**/libraries
29 |
30 | # Gradle and Maven with auto-import
31 | # When using Gradle or Maven with auto-import, you should exclude module files,
32 | # since they will be recreated, and may cause churn. Uncomment if using
33 | # auto-import.
34 | # .idea/artifacts
35 | # .idea/compiler.xml
36 | # .idea/jarRepositories.xml
37 | # .idea/modules.xml
38 | # .idea/*.iml
39 | # .idea/modules
40 | # *.iml
41 | # *.ipr
42 |
43 | # CMake
44 | cmake-build-*/
45 |
46 | # Mongo Explorer plugin
47 | .idea/**/mongoSettings.xml
48 |
49 | # File-based project format
50 | *.iws
51 |
52 | # IntelliJ
53 | out/
54 |
55 | # mpeltonen/sbt-idea plugin
56 | .idea_modules/
57 |
58 | # JIRA plugin
59 | atlassian-ide-plugin.xml
60 |
61 | # Cursive Clojure plugin
62 | .idea/replstate.xml
63 |
64 | # SonarLint plugin
65 | .idea/sonarlint/
66 |
67 | # Crashlytics plugin (for Android Studio and IntelliJ)
68 | com_crashlytics_export_strings.xml
69 | crashlytics.properties
70 | crashlytics-build.properties
71 | fabric.properties
72 |
73 | # Editor-based Rest Client
74 | .idea/httpRequests
75 |
76 | # Android studio 3.1+ serialized cache file
77 | .idea/caches/build_file_checksums.ser
78 |
79 | # Compiled class file
80 | *.class
81 |
82 | # Log file
83 | *.log
84 |
85 | # BlueJ files
86 | *.ctxt
87 |
88 | # Mobile Tools for Java (J2ME)
89 | .mtj.tmp/
90 |
91 | # Package Files #
92 | *.jar
93 | *.war
94 | *.nar
95 | *.ear
96 | *.zip
97 | *.tar.gz
98 | *.rar
99 |
100 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
101 | hs_err_pid*
102 | replay_pid*
103 |
104 | target
105 | .idea*
106 | .settings*
107 | .project*
108 | *.code-workspace
109 |
110 | .classpath
111 |
112 | # macOS
113 | .DS_Store
114 |
--------------------------------------------------------------------------------
/src/main/java/com/github/cjmatta/kafka/connect/sse/ServerSentEventsSourceConnector.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright © 2019 Christopher Matta (chris.matta@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | **/
16 |
17 | package com.github.cjmatta.kafka.connect.sse;
18 |
19 | import com.github.jcustenborder.kafka.connect.utils.VersionUtil;
20 | import com.github.jcustenborder.kafka.connect.utils.config.Description;
21 | import com.github.jcustenborder.kafka.connect.utils.config.TaskConfigs;
22 | import com.github.jcustenborder.kafka.connect.utils.config.Title;
23 | import org.apache.kafka.common.config.ConfigDef;
24 | import org.apache.kafka.connect.connector.Task;
25 | import org.apache.kafka.connect.source.SourceConnector;
26 | import org.slf4j.Logger;
27 | import org.slf4j.LoggerFactory;
28 |
29 | import java.util.List;
30 | import java.util.Map;
31 |
32 | @Description("Kafka Connect source connector for Server Sent Events")
33 | @Title("Kafka Connect Server Sent Events") //This is the display name that will show up in the documentation.
34 | public class ServerSentEventsSourceConnector extends SourceConnector {
35 | /*
36 | Your connector should never use System.out for logging. All of your classes should use slf4j
37 | for logging
38 | */
39 | private static Logger log = LoggerFactory.getLogger(ServerSentEventsSourceConnector.class);
40 | private ServerSentEventsSourceConnectorConfig config;
41 | Map settings;
42 |
43 | @Override
44 | public String version() {
45 | return VersionUtil.version(this.getClass());
46 | }
47 |
48 | @Override
49 | public void start(Map map) {
50 | log.info("Starting Server Sent Events Source Connector");
51 | config = new ServerSentEventsSourceConnectorConfig(map);
52 | this.settings = map;
53 | }
54 |
55 | @Override
56 | public Class extends Task> taskClass() {
57 | return ServerSentEventsSourceTask.class;
58 | }
59 |
60 | @Override
61 | public List