├── settings.gradle ├── .gitignore ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── .travis.yml ├── config ├── FluentdSinkConnector.properties └── FluentdSourceConnector.properties ├── src ├── main │ ├── java │ │ └── org │ │ │ └── fluentd │ │ │ └── kafka │ │ │ ├── RecordConverter.java │ │ │ ├── FluentdConnectorConfigError.java │ │ │ ├── VersionUtil.java │ │ │ ├── SchemalessRecordConverter.java │ │ │ ├── RawJsonStringRecordConverter.java │ │ │ ├── FluentdEventRecord.java │ │ │ ├── FluentdSourceConnector.java │ │ │ ├── FluentdSinkConnector.java │ │ │ ├── SinkRecordConverter.java │ │ │ ├── FluentdSinkTask.java │ │ │ ├── SchemafulRecordConverter.java │ │ │ ├── FluentdSinkConnectorConfig.java │ │ │ ├── MessagePackConverter.java │ │ │ ├── FluentdSourceTask.java │ │ │ └── FluentdSourceConnectorConfig.java │ └── assembly │ │ └── package.xml └── test │ └── java │ └── org │ └── fluentd │ └── kafka │ ├── FluentdSinkConnectorConfigTest.java │ ├── FluentdSourceConnectorConfigTest.java │ ├── FluentdSinkTaskTest.java │ ├── FluentdSinkConnectorTest.java │ ├── FluentdSourceTaskTest.java │ ├── MessagePackConverterTest.java │ └── FluentdSourceConnectorTest.java ├── gradlew.bat ├── gradlew ├── README.md └── LICENSE.txt /settings.gradle: -------------------------------------------------------------------------------- 1 | rootProject.name = 'kafka-connect-fluentd' 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /.gradle/ 2 | /build/ 3 | /bin/ 4 | /out/ 5 | /.idea/ 6 | -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fluent/kafka-connect-fluentd/HEAD/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: java 2 | sudo: false 3 | jdk: 4 | - oraclejdk8 5 | 6 | cache: 7 | directories: 8 | - $HOME/.gradle 9 | 10 | script: 11 | - ./gradlew check 12 | -------------------------------------------------------------------------------- /config/FluentdSinkConnector.properties: -------------------------------------------------------------------------------- 1 | name=FluentdSinkConnector 2 | topics=dummy.log 3 | tasks.max=1 4 | connector.class=org.fluentd.kafka.FluentdSinkConnector 5 | fluentd.connect=localhost:24225 6 | 7 | -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | #Thu May 02 19:05:22 JST 2019 2 | distributionUrl=https\://services.gradle.org/distributions/gradle-5.4.1-all.zip 3 | distributionBase=GRADLE_USER_HOME 4 | distributionPath=wrapper/dists 5 | zipStorePath=wrapper/dists 6 | zipStoreBase=GRADLE_USER_HOME 7 | -------------------------------------------------------------------------------- /config/FluentdSourceConnector.properties: -------------------------------------------------------------------------------- 1 | name=FluentdSourceConnector 2 | tasks.max=1 3 | connector.class=org.fluentd.kafka.FluentdSourceConnector 4 | fluentd.port=24224 5 | fluentd.bind=0.0.0.0 6 | # fluentd.transport=tls 7 | # fluentd.keystore.path=/path/to/influent-server.jks 8 | # fluentd.keystore.password=password 9 | # fluentd.key.password=password 10 | 11 | fluentd.worker.pool.size=1 12 | fluentd.counter.enabled=true 13 | 14 | -------------------------------------------------------------------------------- /src/main/java/org/fluentd/kafka/RecordConverter.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2017 ClearCode Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | **/ 16 | 17 | package org.fluentd.kafka; 18 | 19 | import org.apache.kafka.connect.data.Schema; 20 | 21 | public interface RecordConverter { 22 | FluentdEventRecord convert(Schema schema, Object value); 23 | } 24 | -------------------------------------------------------------------------------- /src/main/java/org/fluentd/kafka/FluentdConnectorConfigError.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2017 ClearCode Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | **/ 16 | 17 | package org.fluentd.kafka; 18 | 19 | public class FluentdConnectorConfigError extends Throwable { 20 | private String message; 21 | public FluentdConnectorConfigError(String message) { 22 | this.message = message; 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/test/java/org/fluentd/kafka/FluentdSinkConnectorConfigTest.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2017 ClearCode Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | **/ 16 | 17 | package org.fluentd.kafka; 18 | 19 | import org.junit.Test; 20 | 21 | public class FluentdSinkConnectorConfigTest { 22 | @Test 23 | public void doc() { 24 | System.out.println(FluentdSinkConnectorConfig.conf().toRst()); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /src/main/java/org/fluentd/kafka/VersionUtil.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2017 ClearCode Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | **/ 16 | 17 | package org.fluentd.kafka; 18 | 19 | /** 20 | * Created by jeremy on 5/3/16. 21 | */ 22 | class VersionUtil { 23 | public static String getVersion() { 24 | try { 25 | return VersionUtil.class.getPackage().getImplementationVersion(); 26 | } catch(Exception ex){ 27 | return "0.0.0.0"; 28 | } 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /src/main/java/org/fluentd/kafka/SchemalessRecordConverter.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2017 ClearCode Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | **/ 16 | 17 | package org.fluentd.kafka; 18 | 19 | import org.apache.kafka.connect.data.Schema; 20 | import org.apache.kafka.connect.data.Struct; 21 | 22 | import java.util.LinkedHashMap; 23 | import java.util.Map; 24 | 25 | public class SchemalessRecordConverter implements RecordConverter { 26 | @Override 27 | public FluentdEventRecord convert(Schema schema, Object value) { 28 | if (value instanceof Map) { 29 | return convertMap((Map)value); 30 | } 31 | // TODO support other types 32 | 33 | return null; 34 | } 35 | 36 | private FluentdEventRecord convertMap(Mapmap) { 37 | Map record = new LinkedHashMap<>(); 38 | map.forEach((key, val) -> { 39 | record.put(key.toString(), val); 40 | }); 41 | return new FluentdEventRecord(null, record); 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /src/main/assembly/package.xml: -------------------------------------------------------------------------------- 1 | 5 | 6 | package 7 | 8 | dir 9 | 10 | false 11 | 12 | 13 | ${project.basedir} 14 | share/doc/${project.name}/ 15 | 16 | README* 17 | LICENSE* 18 | NOTICE* 19 | licenses/ 20 | 21 | 22 | 23 | ${project.basedir}/config 24 | etc/${project.name} 25 | 26 | * 27 | 28 | 29 | 30 | 31 | 32 | share/java/${project.name} 33 | true 34 | true 35 | 36 | org.apache.kafka:connect-api 37 | 38 | 39 | 40 | 41 | -------------------------------------------------------------------------------- /src/main/java/org/fluentd/kafka/RawJsonStringRecordConverter.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2017 ClearCode Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | **/ 16 | 17 | package org.fluentd.kafka; 18 | 19 | import com.fasterxml.jackson.databind.ObjectMapper; 20 | import org.apache.kafka.connect.data.Schema; 21 | import org.apache.kafka.connect.errors.DataException; 22 | 23 | import java.io.IOException; 24 | import java.util.LinkedHashMap; 25 | import java.util.Map; 26 | 27 | public class RawJsonStringRecordConverter implements RecordConverter { 28 | @Override 29 | @SuppressWarnings("unchecked") 30 | public FluentdEventRecord convert(Schema schema, Object value) { 31 | if (value == null) { 32 | return null; 33 | } 34 | 35 | Map record; 36 | 37 | try { 38 | record = new ObjectMapper().readValue((String) value, LinkedHashMap.class); 39 | } catch (IOException e) { 40 | throw new DataException(e); 41 | } 42 | return new FluentdEventRecord(null, record); 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /src/test/java/org/fluentd/kafka/FluentdSourceConnectorConfigTest.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2017 ClearCode Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | **/ 16 | 17 | package org.fluentd.kafka; 18 | 19 | import org.junit.Test; 20 | import static org.junit.Assert.*; 21 | 22 | import java.net.SocketAddress; 23 | import java.util.HashMap; 24 | import java.util.Map; 25 | 26 | public class FluentdSourceConnectorConfigTest { 27 | @Test 28 | public void doc() { 29 | System.out.println(FluentdSourceConnectorConfig.conf().toRst()); 30 | } 31 | 32 | @Test 33 | public void defaultValues() { 34 | Map parsedConfig = new HashMap<>(); 35 | FluentdSourceConnectorConfig conf = new FluentdSourceConnectorConfig(parsedConfig); 36 | assertEquals(24224, conf.getFluentdPort()); 37 | assertEquals("0.0.0.0", conf.getFluentdBind()); 38 | } 39 | 40 | @Test 41 | public void configValues() { 42 | Map parsedConfig = new HashMap<>(); 43 | parsedConfig.put(FluentdSourceConnectorConfig.FLUENTD_PORT, "24225"); 44 | parsedConfig.put(FluentdSourceConnectorConfig.FLUENTD_BIND, "127.0.0.1"); 45 | FluentdSourceConnectorConfig conf = new FluentdSourceConnectorConfig(parsedConfig); 46 | assertEquals(24225, conf.getFluentdPort()); 47 | assertEquals("127.0.0.1", conf.getFluentdBind()); 48 | } 49 | } -------------------------------------------------------------------------------- /src/main/java/org/fluentd/kafka/FluentdEventRecord.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2017 ClearCode Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | **/ 16 | 17 | package org.fluentd.kafka; 18 | 19 | import org.komamitsu.fluency.EventTime; 20 | 21 | import java.util.Map; 22 | 23 | public class FluentdEventRecord { 24 | private String tag; 25 | private EventTime eventTime; 26 | private Long timestamp; 27 | private Map data; 28 | 29 | public FluentdEventRecord(String tag, EventTime eventTime, Map data) { 30 | this.tag = tag; 31 | this.eventTime = eventTime; 32 | this.data = data; 33 | } 34 | 35 | public FluentdEventRecord(String tag, Long timestamp, Map data) { 36 | this.tag = tag; 37 | this.timestamp = timestamp; 38 | this.data = data; 39 | } 40 | 41 | public FluentdEventRecord(String tag, Map data) { 42 | this.tag = tag; 43 | this.timestamp = System.currentTimeMillis() / 1000; 44 | this.data = data; 45 | } 46 | 47 | public String getTag() { 48 | return tag; 49 | } 50 | 51 | public EventTime getEventTime() { 52 | return eventTime; 53 | } 54 | 55 | public Long getTimestamp() { 56 | return timestamp; 57 | } 58 | 59 | public Map getData() { 60 | return data; 61 | } 62 | 63 | public void setTag(String tag) { 64 | this.tag = tag; 65 | } 66 | 67 | public void setEventTime(EventTime eventTime) { 68 | this.eventTime = eventTime; 69 | } 70 | 71 | public void setTimestamp(Long timestamp) { 72 | this.timestamp = timestamp; 73 | } 74 | 75 | public String toString() { 76 | return "FluentdEventRecord{tag=" + getTag() + " eventTime=" + getEventTime() + " data=" + getData() + "}"; 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /src/main/java/org/fluentd/kafka/FluentdSourceConnector.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2017 ClearCode Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | **/ 16 | 17 | package org.fluentd.kafka; 18 | 19 | import java.util.List; 20 | import java.util.Map; 21 | import java.util.HashMap; 22 | import java.util.ArrayList; 23 | import java.util.concurrent.CompletableFuture; 24 | 25 | import org.apache.kafka.common.config.ConfigDef; 26 | import org.apache.kafka.connect.connector.Task; 27 | import org.apache.kafka.connect.errors.ConnectException; 28 | import org.apache.kafka.connect.source.SourceConnector; 29 | import org.slf4j.Logger; 30 | import org.slf4j.LoggerFactory; 31 | 32 | import influent.forward.ForwardCallback; 33 | import influent.forward.ForwardServer; 34 | import influent.EventEntry; 35 | 36 | public class FluentdSourceConnector extends SourceConnector { 37 | private static Logger log = LoggerFactory.getLogger(FluentdSourceConnector.class); 38 | private Map properties; 39 | 40 | @Override 41 | public String version() { 42 | return VersionUtil.getVersion(); 43 | } 44 | 45 | @Override 46 | public void start(Map properties) { 47 | this.properties = properties; 48 | } 49 | 50 | @Override 51 | public Class taskClass() { 52 | return FluentdSourceTask.class; 53 | } 54 | 55 | @Override 56 | public List> taskConfigs(int taskMax) { 57 | //TODO: Define the individual task configurations that will be executed. 58 | List> taskConfigs = new ArrayList<>(); 59 | for (int i = 0; i < taskMax; ++i) { 60 | taskConfigs.add(this.properties); 61 | } 62 | return taskConfigs; 63 | } 64 | 65 | @Override 66 | public void stop() { 67 | //TODO: Do things that are necessary to stop your connector. 68 | } 69 | 70 | @Override 71 | public ConfigDef config() { 72 | return FluentdSourceConnectorConfig.conf(); 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /src/main/java/org/fluentd/kafka/FluentdSinkConnector.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2017 ClearCode Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | **/ 16 | 17 | package org.fluentd.kafka; 18 | 19 | import java.util.ArrayList; 20 | import java.util.HashMap; 21 | import java.util.List; 22 | import java.util.Map; 23 | 24 | import org.apache.kafka.common.config.ConfigDef; 25 | import org.apache.kafka.connect.connector.Task; 26 | import org.apache.kafka.connect.errors.ConnectException; 27 | import org.apache.kafka.connect.sink.SinkConnector; 28 | import org.slf4j.Logger; 29 | import org.slf4j.LoggerFactory; 30 | 31 | public class FluentdSinkConnector extends SinkConnector { 32 | private static Logger log = LoggerFactory.getLogger(FluentdSinkConnector.class); 33 | private FluentdSinkConnectorConfig config; 34 | private Map properties; 35 | 36 | @Override 37 | public String version() { 38 | return VersionUtil.getVersion(); 39 | } 40 | 41 | @Override 42 | public void start(Map properties) { 43 | this.properties = properties; 44 | 45 | //TODO: Add things you need to do to setup your connector. 46 | 47 | /** 48 | * This will be executed once per connector. This can be used to handle connector level setup. 49 | */ 50 | 51 | } 52 | 53 | @Override 54 | public Class taskClass() { 55 | return FluentdSinkTask.class; 56 | } 57 | 58 | @Override 59 | public List> taskConfigs(int maxTasks) { 60 | //TODO: Define the individual task configurations that will be executed. 61 | List> taskConfigs = new ArrayList<>(); 62 | for (int i = 0; i < maxTasks; ++i) { 63 | taskConfigs.add(this.properties); 64 | } 65 | return taskConfigs; 66 | } 67 | 68 | @Override 69 | public void stop() { 70 | //TODO: Do things that are necessary to stop your connector. 71 | } 72 | 73 | @Override 74 | public ConfigDef config() { 75 | return FluentdSinkConnectorConfig.conf(); 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /src/main/java/org/fluentd/kafka/SinkRecordConverter.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2017 ClearCode Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | **/ 16 | 17 | package org.fluentd.kafka; 18 | 19 | import org.apache.kafka.connect.data.Schema; 20 | import org.apache.kafka.connect.data.Struct; 21 | import org.apache.kafka.connect.errors.DataException; 22 | import org.apache.kafka.connect.sink.SinkRecord; 23 | import org.komamitsu.fluency.EventTime; 24 | import org.slf4j.Logger; 25 | import org.slf4j.LoggerFactory; 26 | 27 | import java.util.Map; 28 | 29 | public class SinkRecordConverter { 30 | private static Logger logger = LoggerFactory.getLogger(SinkRecordConverter.class); 31 | 32 | private final FluentdSinkConnectorConfig config; 33 | 34 | private RecordConverter schemafulRecordConverter = new SchemafulRecordConverter(); 35 | private RecordConverter schemalessRecordConverter = new SchemalessRecordConverter(); 36 | private RecordConverter rawJsonStringRecordConverter = new RawJsonStringRecordConverter(); 37 | 38 | public SinkRecordConverter(final FluentdSinkConnectorConfig config) { 39 | this.config = config; 40 | } 41 | 42 | public FluentdEventRecord convert(SinkRecord sinkRecord) { 43 | logger.debug("SinkRecord: {}", sinkRecord); 44 | FluentdEventRecord eventRecord = null; 45 | 46 | if (sinkRecord.value() != null) { 47 | eventRecord = getRecordConverter(sinkRecord.valueSchema(), sinkRecord.value()) 48 | .convert(sinkRecord.valueSchema(), sinkRecord.value()); 49 | } 50 | eventRecord.setTag(sinkRecord.topic()); 51 | 52 | if (config.getFluentdClientTimestampInteger()) { 53 | eventRecord.setTimestamp(sinkRecord.timestamp() / 1000); 54 | } else { 55 | eventRecord.setEventTime(EventTime.fromEpochMilli(sinkRecord.timestamp())); 56 | } 57 | 58 | return eventRecord; 59 | } 60 | 61 | private RecordConverter getRecordConverter(Schema schema, Object data) { 62 | if (schema != null && data instanceof Struct) { 63 | logger.debug("Schemaful converter"); 64 | return schemafulRecordConverter; 65 | } 66 | 67 | if (data instanceof Map) { 68 | logger.debug("Schemaless converter"); 69 | return schemalessRecordConverter; 70 | } 71 | 72 | if (data instanceof String) { 73 | logger.debug("Raw converter"); 74 | return rawJsonStringRecordConverter; 75 | } 76 | 77 | throw new DataException("No converter found due to unexpected object type " + data.getClass().getName()); 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /src/test/java/org/fluentd/kafka/FluentdSinkTaskTest.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2017 ClearCode Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | **/ 16 | 17 | package org.fluentd.kafka; 18 | 19 | import influent.EventEntry; 20 | import influent.forward.ForwardCallback; 21 | import influent.forward.ForwardServer; 22 | import org.apache.kafka.common.record.TimestampType; 23 | import org.apache.kafka.connect.data.Schema; 24 | import org.apache.kafka.connect.sink.SinkRecord; 25 | import org.apache.kafka.connect.sink.SinkTaskContext; 26 | import org.junit.After; 27 | import org.junit.Assert; 28 | import org.junit.Before; 29 | import org.junit.Test; 30 | import org.powermock.api.easymock.PowerMock; 31 | 32 | import java.util.Collections; 33 | import java.util.HashMap; 34 | import java.util.Map; 35 | import java.util.concurrent.CompletableFuture; 36 | import java.util.concurrent.ConcurrentLinkedDeque; 37 | import java.util.concurrent.TimeUnit; 38 | 39 | public class FluentdSinkTaskTest { 40 | private ForwardServer server; 41 | private final ConcurrentLinkedDeque queue = new ConcurrentLinkedDeque<>(); 42 | 43 | @Before 44 | public void setup() { 45 | ForwardCallback callback = ForwardCallback.of(stream -> { 46 | queue.addAll(stream.getEntries()); 47 | return CompletableFuture.completedFuture(null); 48 | }); 49 | ForwardServer.Builder builder = new ForwardServer.Builder(callback); 50 | server = builder.build(); 51 | server.start(); 52 | } 53 | 54 | @After 55 | public void teardown() { 56 | queue.clear(); 57 | server.shutdown(); 58 | } 59 | 60 | @Test 61 | public void test() throws InterruptedException { 62 | Map sinkProperties = new HashMap<>(); 63 | FluentdSinkTask task = new FluentdSinkTask(); 64 | task.initialize(PowerMock.createMock(SinkTaskContext.class)); 65 | //sinkProperties.put(FluentdSinkConnectorConfig.FLUENTD_CLIENT_MAX_BUFFER_BYTES, "100000"); 66 | task.start(sinkProperties); 67 | final String topic = "testtopic"; 68 | final String value = "{\"message\":\"This is a test message\"}"; 69 | SinkRecord sinkRecord = new SinkRecord( 70 | topic, 71 | 1, 72 | Schema.STRING_SCHEMA, 73 | topic, 74 | null, 75 | value, 76 | 0, 77 | System.currentTimeMillis(), 78 | TimestampType.NO_TIMESTAMP_TYPE 79 | ); 80 | task.put(Collections.singleton(sinkRecord)); 81 | TimeUnit.SECONDS.sleep(1); 82 | EventEntry eventEntry = queue.poll(); 83 | Assert.assertNotNull(eventEntry); 84 | Assert.assertEquals(value, eventEntry.getRecord().asMapValue().toJson()); 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem http://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | 17 | @if "%DEBUG%" == "" @echo off 18 | @rem ########################################################################## 19 | @rem 20 | @rem Gradle startup script for Windows 21 | @rem 22 | @rem ########################################################################## 23 | 24 | @rem Set local scope for the variables with windows NT shell 25 | if "%OS%"=="Windows_NT" setlocal 26 | 27 | set DIRNAME=%~dp0 28 | if "%DIRNAME%" == "" set DIRNAME=. 29 | set APP_BASE_NAME=%~n0 30 | set APP_HOME=%DIRNAME% 31 | 32 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 33 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 34 | 35 | @rem Find java.exe 36 | if defined JAVA_HOME goto findJavaFromJavaHome 37 | 38 | set JAVA_EXE=java.exe 39 | %JAVA_EXE% -version >NUL 2>&1 40 | if "%ERRORLEVEL%" == "0" goto init 41 | 42 | echo. 43 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 44 | echo. 45 | echo Please set the JAVA_HOME variable in your environment to match the 46 | echo location of your Java installation. 47 | 48 | goto fail 49 | 50 | :findJavaFromJavaHome 51 | set JAVA_HOME=%JAVA_HOME:"=% 52 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 53 | 54 | if exist "%JAVA_EXE%" goto init 55 | 56 | echo. 57 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 58 | echo. 59 | echo Please set the JAVA_HOME variable in your environment to match the 60 | echo location of your Java installation. 61 | 62 | goto fail 63 | 64 | :init 65 | @rem Get command-line arguments, handling Windows variants 66 | 67 | if not "%OS%" == "Windows_NT" goto win9xME_args 68 | 69 | :win9xME_args 70 | @rem Slurp the command line arguments. 71 | set CMD_LINE_ARGS= 72 | set _SKIP=2 73 | 74 | :win9xME_args_slurp 75 | if "x%~1" == "x" goto execute 76 | 77 | set CMD_LINE_ARGS=%* 78 | 79 | :execute 80 | @rem Setup the command line 81 | 82 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 83 | 84 | @rem Execute Gradle 85 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% 86 | 87 | :end 88 | @rem End local scope for the variables with windows NT shell 89 | if "%ERRORLEVEL%"=="0" goto mainEnd 90 | 91 | :fail 92 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 93 | rem the _cmd.exe /c_ return code! 94 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 95 | exit /b 1 96 | 97 | :mainEnd 98 | if "%OS%"=="Windows_NT" endlocal 99 | 100 | :omega 101 | -------------------------------------------------------------------------------- /src/test/java/org/fluentd/kafka/FluentdSinkConnectorTest.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2017 ClearCode Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | **/ 16 | 17 | package org.fluentd.kafka; 18 | 19 | import org.apache.kafka.connect.connector.ConnectorContext; 20 | import org.junit.Assert; 21 | import org.junit.Before; 22 | import org.junit.Test; 23 | import org.powermock.api.easymock.PowerMock; 24 | 25 | import java.util.HashMap; 26 | import java.util.List; 27 | import java.util.Map; 28 | 29 | public class FluentdSinkConnectorTest { 30 | private FluentdSinkConnector connector; 31 | private ConnectorContext context; 32 | 33 | @Before 34 | public void setup() { 35 | connector = new FluentdSinkConnector(); 36 | context = PowerMock.createMock(ConnectorContext.class); 37 | connector.initialize(context); 38 | } 39 | @Test 40 | public void testTaskConfig() { 41 | PowerMock.replayAll(); 42 | connector.start(buildSinkProperties()); 43 | List> taskConfigs = connector.taskConfigs(1); 44 | Assert.assertEquals("localhost:24225", taskConfigs.get(0).get(FluentdSinkConnectorConfig.FLUENTD_CONNECT)); 45 | Assert.assertEquals("100", taskConfigs.get(0).get(FluentdSinkConnectorConfig.FLUENTD_CLIENT_MAX_BUFFER_BYTES)); 46 | Assert.assertEquals("10", taskConfigs.get(0).get(FluentdSinkConnectorConfig.FLUENTD_CLIENT_BUFFER_CHUNK_INITIAL_BYTES)); 47 | Assert.assertEquals("10", taskConfigs.get(0).get(FluentdSinkConnectorConfig.FLUENTD_CLIENT_BUFFER_CHUNK_RETENTION_BYTES)); 48 | Assert.assertEquals("500", taskConfigs.get(0).get(FluentdSinkConnectorConfig.FLUENTD_CLIENT_FLUSH_INTERVAL)); 49 | Assert.assertEquals("true", taskConfigs.get(0).get(FluentdSinkConnectorConfig.FLUENTD_CLIENT_ACK_RESPONSE_MODE)); 50 | Assert.assertEquals("/tmp/fluency", taskConfigs.get(0).get(FluentdSinkConnectorConfig.FLUENTD_CLIENT_FILE_BACKUP_DIR)); 51 | Assert.assertEquals("120", taskConfigs.get(0).get(FluentdSinkConnectorConfig.FLUENTD_CLIENT_WAIT_UNTIL_BUFFER_FLUSHED)); 52 | Assert.assertEquals("120", taskConfigs.get(0).get(FluentdSinkConnectorConfig.FLUENTD_CLIENT_WAIT_UNTIL_FLUSHER_TERMINATED)); 53 | Assert.assertEquals("true", taskConfigs.get(0).get(FluentdSinkConnectorConfig.FLUENTD_CLIENT_JVM_HEAP_BUFFER_MODE)); 54 | Assert.assertEquals("true", taskConfigs.get(0).get(FluentdSinkConnectorConfig.FLUENTD_CLIENT_TIMESTAMP_INTEGER)); 55 | PowerMock.verifyAll(); 56 | } 57 | 58 | private Map buildSinkProperties() { 59 | Map sinkProperties = new HashMap<>(); 60 | sinkProperties.put(FluentdSinkConnectorConfig.FLUENTD_CONNECT, "localhost:24225"); 61 | sinkProperties.put(FluentdSinkConnectorConfig.FLUENTD_CLIENT_MAX_BUFFER_BYTES, "100"); 62 | sinkProperties.put(FluentdSinkConnectorConfig.FLUENTD_CLIENT_BUFFER_CHUNK_INITIAL_BYTES, "10"); 63 | sinkProperties.put(FluentdSinkConnectorConfig.FLUENTD_CLIENT_BUFFER_CHUNK_RETENTION_BYTES, "10"); 64 | sinkProperties.put(FluentdSinkConnectorConfig.FLUENTD_CLIENT_FLUSH_INTERVAL, "500"); 65 | sinkProperties.put(FluentdSinkConnectorConfig.FLUENTD_CLIENT_ACK_RESPONSE_MODE, "true"); 66 | sinkProperties.put(FluentdSinkConnectorConfig.FLUENTD_CLIENT_FILE_BACKUP_DIR, "/tmp/fluency"); 67 | sinkProperties.put(FluentdSinkConnectorConfig.FLUENTD_CLIENT_WAIT_UNTIL_BUFFER_FLUSHED, "120"); 68 | sinkProperties.put(FluentdSinkConnectorConfig.FLUENTD_CLIENT_WAIT_UNTIL_FLUSHER_TERMINATED, "120"); 69 | sinkProperties.put(FluentdSinkConnectorConfig.FLUENTD_CLIENT_JVM_HEAP_BUFFER_MODE, "true"); 70 | sinkProperties.put(FluentdSinkConnectorConfig.FLUENTD_CLIENT_TIMESTAMP_INTEGER, "true"); 71 | return sinkProperties; 72 | } 73 | } -------------------------------------------------------------------------------- /src/main/java/org/fluentd/kafka/FluentdSinkTask.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2017 ClearCode Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | **/ 16 | 17 | package org.fluentd.kafka; 18 | 19 | import org.apache.kafka.clients.consumer.OffsetAndMetadata; 20 | import org.apache.kafka.common.TopicPartition; 21 | import org.apache.kafka.connect.errors.ConnectException; 22 | import org.apache.kafka.connect.sink.SinkRecord; 23 | import org.apache.kafka.connect.sink.SinkTask; 24 | import org.komamitsu.fluency.Fluency; 25 | import org.slf4j.Logger; 26 | import org.slf4j.LoggerFactory; 27 | 28 | import java.io.IOException; 29 | import java.util.Collection; 30 | import java.util.Map; 31 | 32 | public class FluentdSinkTask extends SinkTask { 33 | private static final Logger log = LoggerFactory.getLogger(FluentdSinkTask.class); 34 | private Fluency fluency; 35 | private SinkRecordConverter converter; 36 | 37 | @Override 38 | public String version() { 39 | return VersionUtil.getVersion(); 40 | } 41 | 42 | @Override 43 | public void start(Map properties) { 44 | //TODO: Create resources like database or api connections here. 45 | FluentdSinkConnectorConfig config = new FluentdSinkConnectorConfig(properties); 46 | Fluency.Config fluencyConfig = new Fluency.Config() 47 | .setMaxBufferSize(config.getFluentdClientMaxBufferSize()) 48 | .setBufferChunkInitialSize(config.getFluentdClientBufferChunkInitialSize()) 49 | .setBufferChunkRetentionSize(config.getFluentdClientBufferChunkRetentionSize()) 50 | .setFlushIntervalMillis(config.getFluentdClientFlushInterval()) 51 | .setAckResponseMode(config.getFluentdClientAckResponseMode()) 52 | .setFileBackupDir(config.getFluentdClientFileBackupDir()) 53 | .setWaitUntilBufferFlushed(config.getFluentdClientWaitUntilBufferFlushed()) 54 | .setWaitUntilFlusherTerminated(config.getFluentdClientWaitUntilFlusherTerminated()) 55 | .setJvmHeapBufferMode(config.getFluentdClientJvmHeapBufferMode()); 56 | try { 57 | fluency = Fluency.defaultFluency(config.getFluentdConnectAddresses(), fluencyConfig); 58 | } catch (IOException e) { 59 | throw new ConnectException(e); 60 | } 61 | converter = new SinkRecordConverter(config); 62 | } 63 | 64 | @Override 65 | public void put(Collection collection) { 66 | collection.forEach(sinkRecord -> { 67 | log.debug("key: {}, value: {}, class: {}, schema: {}", 68 | sinkRecord.key(), 69 | sinkRecord.value(), 70 | sinkRecord.value().getClass().getCanonicalName(), 71 | sinkRecord.valueSchema()); 72 | // TODO fluency.emit(sinkRecord.key(), record); 73 | FluentdEventRecord eventRecord = converter.convert(sinkRecord); 74 | log.info("{}", eventRecord); 75 | try { 76 | if (eventRecord.getEventTime() != null) { 77 | fluency.emit(eventRecord.getTag(), eventRecord.getEventTime(), eventRecord.getData()); 78 | } else if (eventRecord.getTimestamp() != null) { 79 | fluency.emit(eventRecord.getTag(), eventRecord.getTimestamp(), eventRecord.getData()); 80 | } else { 81 | fluency.emit(eventRecord.getTag(), eventRecord.getData()); 82 | } 83 | } catch (IOException e) { 84 | e.printStackTrace(); 85 | } 86 | }); 87 | } 88 | 89 | @Override 90 | public void flush(Map map) { 91 | try { 92 | fluency.flush(); 93 | } catch (IOException e) { 94 | throw new ConnectException(e); 95 | } 96 | } 97 | 98 | @Override 99 | public void stop() { 100 | try { 101 | fluency.waitUntilAllBufferFlushed(3); 102 | } catch (InterruptedException e) { 103 | throw new ConnectException(e); 104 | } 105 | } 106 | 107 | } 108 | -------------------------------------------------------------------------------- /src/main/java/org/fluentd/kafka/SchemafulRecordConverter.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2017 ClearCode Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | **/ 16 | 17 | package org.fluentd.kafka; 18 | 19 | import org.apache.kafka.connect.data.Date; 20 | import org.apache.kafka.connect.data.Decimal; 21 | import org.apache.kafka.connect.data.Field; 22 | import org.apache.kafka.connect.data.Schema; 23 | import org.apache.kafka.connect.data.Struct; 24 | import org.apache.kafka.connect.data.Time; 25 | import org.apache.kafka.connect.data.Timestamp; 26 | import org.apache.kafka.connect.errors.DataException; 27 | 28 | import java.util.ArrayList; 29 | import java.util.Arrays; 30 | import java.util.HashSet; 31 | import java.util.LinkedHashMap; 32 | import java.util.List; 33 | import java.util.Map; 34 | import java.util.Set; 35 | 36 | public class SchemafulRecordConverter implements RecordConverter { 37 | 38 | private final Set LOGICAL_TYPE_NAMES = new HashSet<>( 39 | Arrays.asList( 40 | Date.LOGICAL_NAME, 41 | Decimal.LOGICAL_NAME, 42 | Time.LOGICAL_NAME, 43 | Timestamp.LOGICAL_NAME) 44 | ); 45 | 46 | @Override 47 | public FluentdEventRecord convert(Schema schema, Object value) { 48 | Map map = toMap(schema, (Struct) value); 49 | Map record = new LinkedHashMap<>(); 50 | map.forEach((key, val) -> record.put(key.toString(), val)); 51 | 52 | return new FluentdEventRecord(null, record); 53 | } 54 | 55 | private Map toMap(Schema schema, Object value) { 56 | Map map = new LinkedHashMap<>(); 57 | schema.fields().forEach(field -> processField(map, (Struct) value, field)); 58 | return map; 59 | } 60 | 61 | private void processField(Map map, Struct struct, Field field) { 62 | if (isSupportedLogicalType(field.schema())) { 63 | map.put(field.name(), struct.get(field)); 64 | } 65 | 66 | switch (field.schema().type()) { 67 | case BOOLEAN: 68 | case FLOAT32: 69 | case FLOAT64: 70 | case INT8: 71 | case INT16: 72 | case INT32: 73 | case INT64: 74 | case BYTES: 75 | case STRING: { 76 | map.put(field.name(), struct.get(field)); 77 | break; 78 | } 79 | case ARRAY: { 80 | List list = new ArrayList<>(); 81 | struct.getArray(field.name()).forEach(element -> { 82 | list.add(processValue(field.schema().valueSchema(), element)); 83 | }); 84 | map.put(field.name(), list); 85 | break; 86 | } 87 | case MAP: { 88 | Map innerMap = new LinkedHashMap<>(); 89 | struct.getMap(field.name()).forEach((key, value) -> { 90 | innerMap.put(processValue(field.schema().keySchema(), key), 91 | processValue(field.schema().valueSchema(), value)); 92 | }); 93 | map.put(field.name(), innerMap); 94 | break; 95 | } 96 | case STRUCT: { 97 | Struct s = struct.getStruct(field.name()); 98 | Map innerMap = new LinkedHashMap<>(); 99 | s.schema().fields().forEach(f -> processField(innerMap, s, f)); 100 | break; 101 | } 102 | default: 103 | throw new DataException("Unknown schema type: " + field.schema().type()); 104 | } 105 | } 106 | 107 | private Object processValue(Schema schema, Object value) { 108 | switch (schema.type()) { 109 | case BOOLEAN: 110 | case FLOAT32: 111 | case FLOAT64: 112 | case INT8: 113 | case INT16: 114 | case INT32: 115 | case INT64: 116 | case BYTES: 117 | case STRING: 118 | return value; 119 | case MAP: 120 | case ARRAY: 121 | case STRUCT: 122 | return new DataException("Unsupported schema type: " + schema.type()); 123 | default: 124 | throw new DataException("Unknown schema type: " + schema.type()); 125 | } 126 | } 127 | 128 | private boolean isSupportedLogicalType(Schema schema) { 129 | if (schema == null) { 130 | return false; 131 | } 132 | if (schema.name() == null) { 133 | return false; 134 | } 135 | 136 | return LOGICAL_TYPE_NAMES.contains(schema.name()); 137 | } 138 | } 139 | -------------------------------------------------------------------------------- /src/test/java/org/fluentd/kafka/FluentdSourceTaskTest.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2017 ClearCode Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | **/ 16 | 17 | package org.fluentd.kafka; 18 | 19 | import org.apache.kafka.connect.source.SourceRecord; 20 | import org.junit.After; 21 | import org.junit.Before; 22 | import org.junit.Test; 23 | import org.komamitsu.fluency.Fluency; 24 | 25 | import java.io.IOException; 26 | import java.util.ArrayList; 27 | import java.util.HashMap; 28 | import java.util.List; 29 | import java.util.Map; 30 | import java.util.concurrent.TimeUnit; 31 | 32 | import static org.hamcrest.MatcherAssert.assertThat; 33 | import static org.hamcrest.core.IsCollectionContaining.hasItems; 34 | import static org.junit.Assert.assertEquals; 35 | import static org.junit.Assert.assertNull; 36 | 37 | public class FluentdSourceTaskTest { 38 | private FluentdSourceTask task; 39 | private Fluency fluency; 40 | 41 | @Before 42 | public void setUp() throws IOException { 43 | task = new FluentdSourceTask(); 44 | fluency = Fluency.defaultFluency(); 45 | } 46 | 47 | @After 48 | public void tearDown() throws InterruptedException { 49 | task.stop(); 50 | Thread.sleep(500); 51 | assert(!task.isReporterRunning()); 52 | } 53 | 54 | @Test 55 | public void oneRecord() throws InterruptedException, IOException { 56 | Map config = new HashMap<>(); 57 | config.put(FluentdSourceConnectorConfig.FLUENTD_SCHEMAS_ENABLE, "false"); 58 | config.put(FluentdSourceConnectorConfig.FLUENTD_COUNTER_ENABLED, "true"); 59 | task.start(config); 60 | while (!task.isReporterRunning()) { 61 | TimeUnit.MILLISECONDS.sleep(10); 62 | } 63 | assert(task.isReporterRunning()); 64 | Map record = new HashMap<>(); 65 | record.put("message", "This is a test message"); 66 | fluency.emit("test", record); 67 | Thread.sleep(1000); 68 | List sourceRecords = task.poll(); 69 | assertEquals(1, sourceRecords.size()); 70 | SourceRecord sourceRecord = sourceRecords.get(0); 71 | assertNull(sourceRecord.key()); 72 | assertNull(sourceRecord.valueSchema()); 73 | assertEquals(record, sourceRecord.value()); 74 | } 75 | 76 | @Test 77 | public void oneRecordWithNullValue() throws InterruptedException, IOException { 78 | Map config = new HashMap<>(); 79 | config.put(FluentdSourceConnectorConfig.FLUENTD_SCHEMAS_ENABLE, "false"); 80 | task.start(config); 81 | assert(!task.isReporterRunning()); 82 | Map record = new HashMap<>(); 83 | record.put("message", null); 84 | fluency.emit("test", record); 85 | Thread.sleep(1000); 86 | List sourceRecords = task.poll(); 87 | assertEquals(1, sourceRecords.size()); 88 | SourceRecord sourceRecord = sourceRecords.get(0); 89 | assertNull(sourceRecord.key()); 90 | assertNull(sourceRecord.valueSchema()); 91 | assertEquals(record, sourceRecord.value()); 92 | } 93 | 94 | @Test 95 | public void nestedRecord() throws IOException, InterruptedException { 96 | Map config = new HashMap<>(); 97 | config.put(FluentdSourceConnectorConfig.FLUENTD_SCHEMAS_ENABLE, "false"); 98 | task.start(config); 99 | Map version = new HashMap<>(); 100 | version.put("stable", 0.12); 101 | version.put("unstable", 0.14); 102 | List versions = new ArrayList<>(); 103 | versions.add("v0.12"); 104 | versions.add("v0.14"); 105 | Map record = new HashMap<>(); 106 | record.put("versions", versions); 107 | record.put("version", version); 108 | // {"versions": ["v0.12", "v0.14"], version: {"stable": 0.12, "unstable": 0.14}} 109 | fluency.emit("test", record); 110 | Thread.sleep(1000); 111 | List sourceRecords = task.poll(); 112 | assertEquals(1, sourceRecords.size()); 113 | SourceRecord sourceRecord = sourceRecords.get(0); 114 | assertNull(sourceRecord.key()); 115 | Map value = (Map) sourceRecord.value(); 116 | assertThat((List) value.get("versions"), hasItems("v0.12", "v0.14")); 117 | assertEquals(value.get("version"), version); 118 | } 119 | 120 | @Test 121 | public void multipleRecords() throws InterruptedException, IOException { 122 | Map config = new HashMap<>(); 123 | config.put(FluentdSourceConnectorConfig.FLUENTD_SCHEMAS_ENABLE, "false"); 124 | task.start(config); 125 | Map record1 = new HashMap<>(); 126 | record1.put("message", "This is a test message1"); 127 | Map record2 = new HashMap<>(); 128 | record2.put("message", "This is a test message2"); 129 | fluency.emit("test", record1); 130 | fluency.emit("test", record2); 131 | Thread.sleep(1000); 132 | List sourceRecords = task.poll(); 133 | assertEquals(2, sourceRecords.size()); 134 | assertNull(sourceRecords.get(0).valueSchema()); 135 | Map value1 = (Map) sourceRecords.get(0).value(); 136 | assertEquals("This is a test message1", value1.get("message")); 137 | assertNull(sourceRecords.get(1).valueSchema()); 138 | Map value2 = (Map) sourceRecords.get(1).value(); 139 | assertEquals("This is a test message2", value2.get("message")); 140 | } 141 | } 142 | -------------------------------------------------------------------------------- /gradlew: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | # 4 | # Copyright 2015 the original author or authors. 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | 19 | ############################################################################## 20 | ## 21 | ## Gradle start up script for UN*X 22 | ## 23 | ############################################################################## 24 | 25 | # Attempt to set APP_HOME 26 | # Resolve links: $0 may be a link 27 | PRG="$0" 28 | # Need this for relative symlinks. 29 | while [ -h "$PRG" ] ; do 30 | ls=`ls -ld "$PRG"` 31 | link=`expr "$ls" : '.*-> \(.*\)$'` 32 | if expr "$link" : '/.*' > /dev/null; then 33 | PRG="$link" 34 | else 35 | PRG=`dirname "$PRG"`"/$link" 36 | fi 37 | done 38 | SAVED="`pwd`" 39 | cd "`dirname \"$PRG\"`/" >/dev/null 40 | APP_HOME="`pwd -P`" 41 | cd "$SAVED" >/dev/null 42 | 43 | APP_NAME="Gradle" 44 | APP_BASE_NAME=`basename "$0"` 45 | 46 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 47 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' 48 | 49 | # Use the maximum available, or set MAX_FD != -1 to use that value. 50 | MAX_FD="maximum" 51 | 52 | warn () { 53 | echo "$*" 54 | } 55 | 56 | die () { 57 | echo 58 | echo "$*" 59 | echo 60 | exit 1 61 | } 62 | 63 | # OS specific support (must be 'true' or 'false'). 64 | cygwin=false 65 | msys=false 66 | darwin=false 67 | nonstop=false 68 | case "`uname`" in 69 | CYGWIN* ) 70 | cygwin=true 71 | ;; 72 | Darwin* ) 73 | darwin=true 74 | ;; 75 | MINGW* ) 76 | msys=true 77 | ;; 78 | NONSTOP* ) 79 | nonstop=true 80 | ;; 81 | esac 82 | 83 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 84 | 85 | # Determine the Java command to use to start the JVM. 86 | if [ -n "$JAVA_HOME" ] ; then 87 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 88 | # IBM's JDK on AIX uses strange locations for the executables 89 | JAVACMD="$JAVA_HOME/jre/sh/java" 90 | else 91 | JAVACMD="$JAVA_HOME/bin/java" 92 | fi 93 | if [ ! -x "$JAVACMD" ] ; then 94 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 95 | 96 | Please set the JAVA_HOME variable in your environment to match the 97 | location of your Java installation." 98 | fi 99 | else 100 | JAVACMD="java" 101 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 102 | 103 | Please set the JAVA_HOME variable in your environment to match the 104 | location of your Java installation." 105 | fi 106 | 107 | # Increase the maximum file descriptors if we can. 108 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then 109 | MAX_FD_LIMIT=`ulimit -H -n` 110 | if [ $? -eq 0 ] ; then 111 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then 112 | MAX_FD="$MAX_FD_LIMIT" 113 | fi 114 | ulimit -n $MAX_FD 115 | if [ $? -ne 0 ] ; then 116 | warn "Could not set maximum file descriptor limit: $MAX_FD" 117 | fi 118 | else 119 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" 120 | fi 121 | fi 122 | 123 | # For Darwin, add options to specify how the application appears in the dock 124 | if $darwin; then 125 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" 126 | fi 127 | 128 | # For Cygwin, switch paths to Windows format before running java 129 | if $cygwin ; then 130 | APP_HOME=`cygpath --path --mixed "$APP_HOME"` 131 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` 132 | JAVACMD=`cygpath --unix "$JAVACMD"` 133 | 134 | # We build the pattern for arguments to be converted via cygpath 135 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` 136 | SEP="" 137 | for dir in $ROOTDIRSRAW ; do 138 | ROOTDIRS="$ROOTDIRS$SEP$dir" 139 | SEP="|" 140 | done 141 | OURCYGPATTERN="(^($ROOTDIRS))" 142 | # Add a user-defined pattern to the cygpath arguments 143 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then 144 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" 145 | fi 146 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 147 | i=0 148 | for arg in "$@" ; do 149 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` 150 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option 151 | 152 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition 153 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` 154 | else 155 | eval `echo args$i`="\"$arg\"" 156 | fi 157 | i=$((i+1)) 158 | done 159 | case $i in 160 | (0) set -- ;; 161 | (1) set -- "$args0" ;; 162 | (2) set -- "$args0" "$args1" ;; 163 | (3) set -- "$args0" "$args1" "$args2" ;; 164 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;; 165 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; 166 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; 167 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; 168 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; 169 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; 170 | esac 171 | fi 172 | 173 | # Escape application args 174 | save () { 175 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done 176 | echo " " 177 | } 178 | APP_ARGS=$(save "$@") 179 | 180 | # Collect all arguments for the java command, following the shell quoting and substitution rules 181 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" 182 | 183 | # by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong 184 | if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then 185 | cd "$(dirname "$0")" 186 | fi 187 | 188 | exec "$JAVACMD" "$@" 189 | -------------------------------------------------------------------------------- /src/main/java/org/fluentd/kafka/FluentdSinkConnectorConfig.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2017 ClearCode Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | **/ 16 | 17 | package org.fluentd.kafka; 18 | 19 | import org.apache.kafka.common.config.AbstractConfig; 20 | import org.apache.kafka.common.config.ConfigDef; 21 | import org.apache.kafka.common.config.ConfigDef.Type; 22 | import org.apache.kafka.common.config.ConfigDef.Importance; 23 | 24 | import java.net.InetSocketAddress; 25 | import java.util.ArrayList; 26 | import java.util.List; 27 | import java.util.Map; 28 | 29 | 30 | public class FluentdSinkConnectorConfig extends AbstractConfig { 31 | 32 | static final String FLUENTD_CONNECT = "fluentd.connect"; 33 | static final String FLUENTD_CLIENT_MAX_BUFFER_BYTES = "fluentd.client.max.buffer.bytes"; 34 | static final String FLUENTD_CLIENT_BUFFER_CHUNK_INITIAL_BYTES = "fluentd.client.buffer.chunk.initial.bytes"; 35 | static final String FLUENTD_CLIENT_BUFFER_CHUNK_RETENTION_BYTES = "fluentd.client.buffer.chunk.retention.bytes"; 36 | static final String FLUENTD_CLIENT_FLUSH_INTERVAL = "fluentd.client.flush.interval"; 37 | static final String FLUENTD_CLIENT_ACK_RESPONSE_MODE = "fluentd.client.ack.response.mode"; 38 | static final String FLUENTD_CLIENT_FILE_BACKUP_DIR = "fluentd.client.file.backup.dir"; 39 | static final String FLUENTD_CLIENT_WAIT_UNTIL_BUFFER_FLUSHED = "fluentd.client.wait.until.buffer.flushed"; 40 | static final String FLUENTD_CLIENT_WAIT_UNTIL_FLUSHER_TERMINATED = "fluentd.client.wait.until.flusher.terminated"; 41 | static final String FLUENTD_CLIENT_JVM_HEAP_BUFFER_MODE = "fluentd.client.jvm.heap.buffer.mode"; 42 | // static final String FLUENTD_CLIENT_SENDER_ERROR_HANDLER = "fluentd.client.sender.error.handler"; 43 | // static final String FLUENTD_CLIENT_TCP_HEART_BEAT = "fluentd.client.tcp.heart.beat"; 44 | static final String FLUENTD_CLIENT_TIMESTAMP_INTEGER = "fluentd.client.timestamp.integer"; 45 | 46 | public FluentdSinkConnectorConfig(ConfigDef config, Map parsedConfig) { 47 | super(config, parsedConfig); 48 | } 49 | 50 | public FluentdSinkConnectorConfig(Map parsedConfig) { 51 | this(conf(), parsedConfig); 52 | } 53 | 54 | public static ConfigDef conf() { 55 | return new ConfigDef() 56 | .define(FLUENTD_CONNECT, Type.STRING, "localhost:24224", Importance.HIGH, 57 | "Connection specs for Fluentd") 58 | .define(FLUENTD_CLIENT_MAX_BUFFER_BYTES, Type.LONG, null, Importance.MEDIUM, 59 | "Max buffer size.") 60 | .define(FLUENTD_CLIENT_BUFFER_CHUNK_INITIAL_BYTES, Type.INT, null, Importance.MEDIUM, 61 | "Initial size of buffer chunk. Default: 1048576 (1MiB)") 62 | .define(FLUENTD_CLIENT_BUFFER_CHUNK_RETENTION_BYTES, Type.INT, null, Importance.MEDIUM, 63 | "Retention size of buffer chunk. Default: 4194304 (4MiB)") 64 | .define(FLUENTD_CLIENT_FLUSH_INTERVAL, Type.INT, null, Importance.MEDIUM, 65 | "Buffer flush interval in msec. Default: 600(msec)") 66 | .define(FLUENTD_CLIENT_ACK_RESPONSE_MODE, Type.BOOLEAN, false, Importance.MEDIUM, 67 | "Enable/Disable ack response mode. Default: false") 68 | .define(FLUENTD_CLIENT_FILE_BACKUP_DIR, Type.STRING, null, Importance.MEDIUM, 69 | "Enable file backup mode if specify backup directory path. Default: null") 70 | .define(FLUENTD_CLIENT_WAIT_UNTIL_BUFFER_FLUSHED, Type.INT, null, Importance.MEDIUM, 71 | "Max wait until all buffers are flushed in sec. Default: 60(sec)") 72 | .define(FLUENTD_CLIENT_WAIT_UNTIL_FLUSHER_TERMINATED, Type.INT, null, Importance.MEDIUM, 73 | "Max wait until the flusher is terminated in sec. Default: 60(sec)") 74 | .define(FLUENTD_CLIENT_JVM_HEAP_BUFFER_MODE, Type.BOOLEAN, false, Importance.MEDIUM, 75 | "If true use JVM heap memory for buffer pool. Default: false") 76 | .define(FLUENTD_CLIENT_TIMESTAMP_INTEGER, Type.BOOLEAN, false, Importance.MEDIUM, 77 | "If true use integer timestamp. Default: false"); 78 | } 79 | 80 | public String getFluentdConnect() { 81 | return getString(FLUENTD_CONNECT); 82 | } 83 | 84 | public List getFluentdConnectAddresses() { 85 | List addresses = new ArrayList<>(); 86 | for (String address : getString(FLUENTD_CONNECT).split(",")) { 87 | String[] parsed = address.split(":"); 88 | String host = parsed[0]; 89 | int port = new Integer(parsed[1]); 90 | addresses.add(new InetSocketAddress(host, port)); 91 | } 92 | return addresses; 93 | } 94 | 95 | public Long getFluentdClientMaxBufferSize() { 96 | return getLong(FLUENTD_CLIENT_MAX_BUFFER_BYTES); 97 | } 98 | 99 | public Integer getFluentdClientBufferChunkInitialSize() { 100 | return getInt(FLUENTD_CLIENT_BUFFER_CHUNK_INITIAL_BYTES); 101 | } 102 | 103 | public Integer getFluentdClientBufferChunkRetentionSize() { 104 | return getInt(FLUENTD_CLIENT_BUFFER_CHUNK_RETENTION_BYTES); 105 | } 106 | 107 | public Integer getFluentdClientFlushInterval() { 108 | return getInt(FLUENTD_CLIENT_FLUSH_INTERVAL); 109 | } 110 | 111 | public boolean getFluentdClientAckResponseMode() { 112 | return getBoolean(FLUENTD_CLIENT_ACK_RESPONSE_MODE); 113 | } 114 | 115 | public String getFluentdClientFileBackupDir() { 116 | return getString(FLUENTD_CLIENT_FILE_BACKUP_DIR); 117 | } 118 | 119 | public Integer getFluentdClientWaitUntilBufferFlushed() { 120 | return getInt(FLUENTD_CLIENT_WAIT_UNTIL_BUFFER_FLUSHED); 121 | } 122 | 123 | public Integer getFluentdClientWaitUntilFlusherTerminated() { 124 | return getInt(FLUENTD_CLIENT_WAIT_UNTIL_FLUSHER_TERMINATED); 125 | } 126 | 127 | public boolean getFluentdClientJvmHeapBufferMode() { 128 | return getBoolean(FLUENTD_CLIENT_JVM_HEAP_BUFFER_MODE); 129 | } 130 | 131 | public boolean getFluentdClientTimestampInteger() { 132 | return getBoolean(FLUENTD_CLIENT_TIMESTAMP_INTEGER); 133 | } 134 | } 135 | -------------------------------------------------------------------------------- /src/main/java/org/fluentd/kafka/MessagePackConverter.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2017 ClearCode Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | **/ 16 | package org.fluentd.kafka; 17 | 18 | import com.fasterxml.jackson.databind.ObjectMapper; 19 | import influent.EventEntry; 20 | import org.apache.kafka.connect.data.Schema; 21 | import org.apache.kafka.connect.data.SchemaAndValue; 22 | import org.apache.kafka.connect.data.SchemaBuilder; 23 | import org.apache.kafka.connect.data.Struct; 24 | import org.apache.kafka.connect.source.SourceRecord; 25 | import org.msgpack.value.Value; 26 | import org.slf4j.Logger; 27 | import org.slf4j.LoggerFactory; 28 | 29 | import java.io.IOException; 30 | import java.time.Instant; 31 | import java.util.*; 32 | 33 | public class MessagePackConverter { 34 | static final Logger log = LoggerFactory.getLogger(MessagePackConverter.class); 35 | private final FluentdSourceConnectorConfig config; 36 | 37 | public MessagePackConverter(final FluentdSourceConnectorConfig config) { 38 | this.config = config; 39 | } 40 | 41 | public SourceRecord convert(String topic, String tag, Long timestamp, EventEntry entry) { 42 | if (config.isFluentdSchemasEnable()) { 43 | SchemaAndValue schemaAndValue = convert(topic, entry); 44 | return new SourceRecord( 45 | null, 46 | null, 47 | topic, 48 | null, 49 | Schema.STRING_SCHEMA, 50 | tag, 51 | schemaAndValue.schema(), 52 | schemaAndValue.value(), 53 | timestamp 54 | ); 55 | } else { 56 | Object record; 57 | try { 58 | record = new ObjectMapper().readValue(entry.getRecord().toJson(), LinkedHashMap.class); 59 | } catch (IOException e) { 60 | record = entry.getRecord().toJson(); 61 | } 62 | return new SourceRecord( 63 | null, 64 | null, 65 | topic, 66 | null, 67 | null, 68 | null, 69 | null, 70 | record, 71 | timestamp 72 | ); 73 | } 74 | } 75 | 76 | public SourceRecord convert(String topic, String tag, Instant timestamp, EventEntry entry) { 77 | return convert(topic, tag, timestamp.toEpochMilli(), entry); 78 | } 79 | 80 | private SchemaAndValue convert(String topic, EventEntry entry) { 81 | return convert(topic, entry.getRecord()); 82 | } 83 | 84 | private SchemaAndValue convert(String name, Value value) { 85 | switch (value.getValueType()) { 86 | case STRING: 87 | return new SchemaAndValue(Schema.STRING_SCHEMA, value.asStringValue().asString()); 88 | case NIL: 89 | return new SchemaAndValue(Schema.OPTIONAL_STRING_SCHEMA, null); 90 | case BOOLEAN: 91 | return new SchemaAndValue(Schema.BOOLEAN_SCHEMA, value.asBooleanValue().getBoolean()); 92 | case INTEGER: 93 | return new SchemaAndValue(Schema.INT64_SCHEMA, value.asIntegerValue().toLong()); 94 | case FLOAT: 95 | // We cannot identify float32 and float64. We must treat float64 as double. 96 | return new SchemaAndValue(Schema.FLOAT64_SCHEMA, value.asFloatValue().toDouble()); 97 | case BINARY: 98 | return new SchemaAndValue(Schema.BYTES_SCHEMA, value.asBinaryValue().asByteArray()); 99 | case MAP: { 100 | if (config.getFluentdSchemasMapField() != null && config.getFluentdSchemasMapField().contains(name)) { 101 | Map map = value.asMapValue().map(); 102 | Map converted = new TreeMap<>(); 103 | Schema valueSchema = null; 104 | for (Map.Entry entry : map.entrySet()) { 105 | Value k = entry.getKey(); 106 | Value v = entry.getValue(); 107 | String keyString = k.asStringValue().asString(); 108 | SchemaAndValue schemaAndValue = convert(keyString, v); 109 | if (valueSchema == null) { 110 | valueSchema = schemaAndValue.schema(); 111 | } 112 | converted.put(keyString, schemaAndValue.value()); 113 | }; 114 | Schema schema = SchemaBuilder.map(Schema.STRING_SCHEMA, valueSchema).name(name).build(); 115 | return new SchemaAndValue(schema, converted); 116 | } else { 117 | SchemaBuilder builder = SchemaBuilder.struct().name(name); 118 | Map map = value.asMapValue().map(); 119 | Map fields = new TreeMap<>(); 120 | map.forEach((k, v) -> { 121 | String n = k.asStringValue().asString(); 122 | fields.put(n, convert(n, v)); 123 | }); 124 | fields.forEach((k, v) -> { 125 | builder.field(k, v.schema()); 126 | }); 127 | Schema schema = builder.build(); 128 | Struct struct = new Struct(schema); 129 | fields.forEach((k, v) -> { 130 | struct.put(k, v.value()); 131 | }); 132 | return new SchemaAndValue(schema, struct); 133 | } 134 | } 135 | case ARRAY: { 136 | List array = value.asArrayValue().list(); 137 | SchemaAndValue sv = convert(name, array.get(0)); 138 | ArrayList values = new ArrayList<>(); 139 | SchemaBuilder.type(sv.schema().type()); 140 | array.forEach(val -> values.add(convert(null, val).value())); 141 | Schema schema = SchemaBuilder.array(sv.schema()).optional().build(); 142 | return new SchemaAndValue(schema, values); 143 | } 144 | default: 145 | return SchemaAndValue.NULL; 146 | } 147 | } 148 | } 149 | -------------------------------------------------------------------------------- /src/test/java/org/fluentd/kafka/MessagePackConverterTest.java: -------------------------------------------------------------------------------- 1 | package org.fluentd.kafka; 2 | 3 | import influent.EventEntry; 4 | import org.apache.kafka.connect.data.Schema; 5 | import org.apache.kafka.connect.data.Struct; 6 | import org.apache.kafka.connect.source.SourceRecord; 7 | import org.junit.Assert; 8 | import org.junit.Test; 9 | import org.msgpack.value.ValueFactory; 10 | 11 | import java.time.Instant; 12 | import java.util.HashMap; 13 | import java.util.List; 14 | import java.util.Map; 15 | 16 | import static org.junit.Assert.assertEquals; 17 | import static org.junit.Assert.assertNull; 18 | import static org.junit.Assert.assertTrue; 19 | 20 | public class MessagePackConverterTest { 21 | @Test 22 | public void simpleKeyValue() { 23 | Map map = new HashMap<>(); 24 | map.put(FluentdSourceConnectorConfig.FLUENTD_SCHEMAS_ENABLE, "true"); 25 | FluentdSourceConnectorConfig config = new FluentdSourceConnectorConfig(map); 26 | EventEntry eventEntry = EventEntry.of( 27 | Instant.now(), 28 | ValueFactory.newMap( 29 | ValueFactory.newString("message"), 30 | ValueFactory.newString("This is a message."), 31 | ValueFactory.newString("flag"), 32 | ValueFactory.newBoolean(true))); 33 | 34 | MessagePackConverter converter = new MessagePackConverter(config); 35 | SourceRecord sourceRecord = converter.convert("topic", "tag", 0L, eventEntry); 36 | 37 | assertEquals(Schema.STRING_SCHEMA, sourceRecord.keySchema()); 38 | assertEquals("tag", sourceRecord.key()); 39 | assertEquals("topic", sourceRecord.valueSchema().name()); 40 | Struct struct = (Struct) sourceRecord.value(); 41 | assertEquals("This is a message.", struct.get("message")); 42 | assertTrue(struct.getBoolean("flag")); 43 | } 44 | 45 | @Test 46 | public void nullValue() { 47 | Map map = new HashMap<>(); 48 | map.put(FluentdSourceConnectorConfig.FLUENTD_SCHEMAS_ENABLE, "true"); 49 | FluentdSourceConnectorConfig config = new FluentdSourceConnectorConfig(map); 50 | EventEntry eventEntry = EventEntry.of( 51 | Instant.now(), 52 | ValueFactory.newMap( 53 | ValueFactory.newString("message"), 54 | ValueFactory.newNil())); 55 | 56 | MessagePackConverter converter = new MessagePackConverter(config); 57 | SourceRecord sourceRecord = converter.convert("topic", "tag", 0L, eventEntry); 58 | 59 | assertEquals(Schema.STRING_SCHEMA, sourceRecord.keySchema()); 60 | assertEquals("tag", sourceRecord.key()); 61 | assertEquals("topic", sourceRecord.valueSchema().name()); 62 | Struct struct = (Struct) sourceRecord.value(); 63 | assertNull(struct.get("message")); 64 | } 65 | 66 | @Test 67 | public void nestedMap() { 68 | Map map = new HashMap<>(); 69 | map.put(FluentdSourceConnectorConfig.FLUENTD_SCHEMAS_ENABLE, "true"); 70 | FluentdSourceConnectorConfig config = new FluentdSourceConnectorConfig(map); 71 | EventEntry eventEntry = EventEntry.of( 72 | Instant.now(), 73 | ValueFactory.newMap( 74 | ValueFactory.newString("message"), 75 | ValueFactory.newString("This is a message."), 76 | ValueFactory.newString("nested"), 77 | ValueFactory.newMap( 78 | ValueFactory.newString("key"), 79 | ValueFactory.newInteger(42) 80 | ))); 81 | 82 | MessagePackConverter converter = new MessagePackConverter(config); 83 | SourceRecord sourceRecord = converter.convert("topic", "tag", 0L, eventEntry); 84 | 85 | assertEquals(Schema.STRING_SCHEMA, sourceRecord.keySchema()); 86 | assertEquals("tag", sourceRecord.key()); 87 | assertEquals("topic", sourceRecord.valueSchema().name()); 88 | Struct struct = (Struct) sourceRecord.value(); 89 | assertEquals("This is a message.", struct.get("message")); 90 | Struct nested = struct.getStruct("nested"); 91 | Long expected = 42L; 92 | assertEquals(expected, nested.getInt64("key")); 93 | } 94 | 95 | @Test 96 | public void nestedArray() { 97 | Map map = new HashMap<>(); 98 | map.put(FluentdSourceConnectorConfig.FLUENTD_SCHEMAS_ENABLE, "true"); 99 | FluentdSourceConnectorConfig config = new FluentdSourceConnectorConfig(map); 100 | EventEntry eventEntry = EventEntry.of( 101 | Instant.now(), 102 | ValueFactory.newMap( 103 | ValueFactory.newString("message"), 104 | ValueFactory.newString("This is a message."), 105 | ValueFactory.newString("nested"), 106 | ValueFactory.newArray( 107 | ValueFactory.newFloat(3.14f), 108 | ValueFactory.newFloat(42.195f) 109 | ))); 110 | 111 | MessagePackConverter converter = new MessagePackConverter(config); 112 | SourceRecord sourceRecord = converter.convert("topic", "tag", 0L, eventEntry); 113 | 114 | assertEquals(Schema.STRING_SCHEMA, sourceRecord.keySchema()); 115 | assertEquals("tag", sourceRecord.key()); 116 | assertEquals("topic", sourceRecord.valueSchema().name()); 117 | Struct struct = (Struct) sourceRecord.value(); 118 | assertEquals("This is a message.", struct.get("message")); 119 | List list = struct.getArray("nested"); 120 | assertEquals(list.get(0), 3.14, 0.01); 121 | assertEquals(list.get(1), 42.195, 0.01); 122 | } 123 | 124 | @Test 125 | public void schemalessKeyValue() { 126 | Map map = new HashMap<>(); 127 | map.put(FluentdSourceConnectorConfig.FLUENTD_SCHEMAS_ENABLE, "false"); 128 | FluentdSourceConnectorConfig config = new FluentdSourceConnectorConfig(map); 129 | EventEntry eventEntry = EventEntry.of( 130 | Instant.now(), 131 | ValueFactory.newMap( 132 | ValueFactory.newString("message"), 133 | ValueFactory.newString("This is a message."))); 134 | 135 | MessagePackConverter converter = new MessagePackConverter(config); 136 | SourceRecord sourceRecord = converter.convert("topic", "tag", 0L, eventEntry); 137 | 138 | Assert.assertNull(sourceRecord.keySchema()); 139 | Assert.assertNull(sourceRecord.key()); 140 | Assert.assertNull(sourceRecord.valueSchema()); 141 | Map value = (Map) sourceRecord.value(); 142 | Assert.assertEquals("This is a message.", value.get("message")); 143 | } 144 | } 145 | -------------------------------------------------------------------------------- /src/test/java/org/fluentd/kafka/FluentdSourceConnectorTest.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2017 ClearCode Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | **/ 16 | 17 | package org.fluentd.kafka; 18 | 19 | import org.apache.kafka.connect.connector.ConnectorContext; 20 | import org.junit.Assert; 21 | import org.junit.Before; 22 | import org.junit.Test; 23 | import org.powermock.api.easymock.PowerMock; 24 | 25 | import java.util.HashMap; 26 | import java.util.List; 27 | import java.util.Map; 28 | 29 | public class FluentdSourceConnectorTest { 30 | private FluentdSourceConnector connector; 31 | private ConnectorContext context; 32 | 33 | @Before 34 | public void setup() { 35 | connector = new FluentdSourceConnector(); 36 | context = PowerMock.createMock(ConnectorContext.class); 37 | connector.initialize(context); 38 | } 39 | 40 | @Test 41 | public void testSingleTask() { 42 | PowerMock.replayAll(); 43 | connector.start(buildSourceProperties()); 44 | List> taskConfigs = connector.taskConfigs(1); 45 | Assert.assertEquals(1, taskConfigs.size()); 46 | Assert.assertEquals("24225", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_PORT)); 47 | Assert.assertEquals("127.0.0.1", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_BIND)); 48 | Assert.assertEquals("100", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_CHUNK_SIZE_LIMIT)); 49 | Assert.assertEquals("200", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_BACKLOG)); 50 | Assert.assertEquals("300", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_SEND_BUFFER_BYTES)); 51 | Assert.assertEquals("400", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_RECEVE_BUFFER_BYTES)); 52 | Assert.assertEquals("false", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_KEEP_ALIVE_ENABLED)); 53 | Assert.assertEquals("false", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_TCP_NO_DELAY_ENABLED)); 54 | Assert.assertEquals("2", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_WORKER_POOL_SIZE)); 55 | Assert.assertEquals("tls", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_TRANSPORT)); 56 | Assert.assertEquals("TLSv1.1,TLSv1.2", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_TLS_VERSIONS)); 57 | Assert.assertEquals("AES,DES,RSA", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_TLS_CIPHERS)); 58 | Assert.assertEquals("/tmp/keystore.jks", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_KEYSTORE_PATH)); 59 | Assert.assertEquals("password1", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_KEYSTORE_PASSWORD)); 60 | Assert.assertEquals("password2", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_KEY_PASSWORD)); 61 | Assert.assertEquals("topic", taskConfigs.get(0).get(FluentdSourceConnectorConfig.KAFKA_TOPIC)); 62 | PowerMock.verifyAll(); 63 | } 64 | 65 | @Test 66 | public void testTaskConfigs() { 67 | PowerMock.replayAll(); 68 | connector.start(buildSourceProperties()); 69 | List> taskConfigs = connector.taskConfigs(10); 70 | Assert.assertEquals(10, taskConfigs.size()); 71 | Assert.assertEquals("24225", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_PORT)); 72 | Assert.assertEquals("127.0.0.1", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_BIND)); 73 | Assert.assertEquals("100", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_CHUNK_SIZE_LIMIT)); 74 | Assert.assertEquals("200", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_BACKLOG)); 75 | Assert.assertEquals("300", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_SEND_BUFFER_BYTES)); 76 | Assert.assertEquals("400", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_RECEVE_BUFFER_BYTES)); 77 | Assert.assertEquals("false", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_KEEP_ALIVE_ENABLED)); 78 | Assert.assertEquals("false", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_TCP_NO_DELAY_ENABLED)); 79 | Assert.assertEquals("2", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_WORKER_POOL_SIZE)); 80 | Assert.assertEquals("tls", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_TRANSPORT)); 81 | Assert.assertEquals("TLSv1.1,TLSv1.2", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_TLS_VERSIONS)); 82 | Assert.assertEquals("AES,DES,RSA", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_TLS_CIPHERS)); 83 | Assert.assertEquals("/tmp/keystore.jks", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_KEYSTORE_PATH)); 84 | Assert.assertEquals("password1", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_KEYSTORE_PASSWORD)); 85 | Assert.assertEquals("password2", taskConfigs.get(0).get(FluentdSourceConnectorConfig.FLUENTD_KEY_PASSWORD)); 86 | Assert.assertEquals("topic", taskConfigs.get(0).get(FluentdSourceConnectorConfig.KAFKA_TOPIC)); 87 | PowerMock.verifyAll(); 88 | } 89 | 90 | private Map buildSourceProperties() { 91 | final Map sourceProperties = new HashMap<>(); 92 | sourceProperties.put(FluentdSourceConnectorConfig.FLUENTD_PORT, "24225"); 93 | sourceProperties.put(FluentdSourceConnectorConfig.FLUENTD_BIND, "127.0.0.1"); 94 | sourceProperties.put(FluentdSourceConnectorConfig.FLUENTD_CHUNK_SIZE_LIMIT, "100"); 95 | sourceProperties.put(FluentdSourceConnectorConfig.FLUENTD_BACKLOG, "200"); 96 | sourceProperties.put(FluentdSourceConnectorConfig.FLUENTD_SEND_BUFFER_BYTES, "300"); 97 | sourceProperties.put(FluentdSourceConnectorConfig.FLUENTD_RECEVE_BUFFER_BYTES, "400"); 98 | sourceProperties.put(FluentdSourceConnectorConfig.FLUENTD_KEEP_ALIVE_ENABLED, "false"); 99 | sourceProperties.put(FluentdSourceConnectorConfig.FLUENTD_TCP_NO_DELAY_ENABLED, "false"); 100 | sourceProperties.put(FluentdSourceConnectorConfig.FLUENTD_WORKER_POOL_SIZE, "2"); 101 | sourceProperties.put(FluentdSourceConnectorConfig.FLUENTD_TRANSPORT, "tls"); 102 | sourceProperties.put(FluentdSourceConnectorConfig.FLUENTD_TLS_VERSIONS, "TLSv1.1,TLSv1.2"); 103 | sourceProperties.put(FluentdSourceConnectorConfig.FLUENTD_TLS_CIPHERS, "AES,DES,RSA"); 104 | sourceProperties.put(FluentdSourceConnectorConfig.FLUENTD_KEYSTORE_PATH, "/tmp/keystore.jks"); 105 | sourceProperties.put(FluentdSourceConnectorConfig.FLUENTD_KEYSTORE_PASSWORD, "password1"); 106 | sourceProperties.put(FluentdSourceConnectorConfig.FLUENTD_KEY_PASSWORD, "password2"); 107 | sourceProperties.put(FluentdSourceConnectorConfig.KAFKA_TOPIC, "topic"); 108 | return sourceProperties; 109 | } 110 | } 111 | -------------------------------------------------------------------------------- /src/main/java/org/fluentd/kafka/FluentdSourceTask.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2017 ClearCode Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | **/ 16 | 17 | package org.fluentd.kafka; 18 | 19 | import influent.forward.ForwardCallback; 20 | import influent.forward.ForwardServer; 21 | import org.apache.kafka.connect.errors.ConnectException; 22 | import org.apache.kafka.connect.source.SourceRecord; 23 | import org.apache.kafka.connect.source.SourceTask; 24 | import org.slf4j.Logger; 25 | import org.slf4j.LoggerFactory; 26 | 27 | import java.io.IOException; 28 | import java.net.ServerSocket; 29 | import java.util.ArrayList; 30 | import java.util.List; 31 | import java.util.Map; 32 | import java.util.concurrent.CompletableFuture; 33 | import java.util.concurrent.ConcurrentLinkedDeque; 34 | import java.util.concurrent.atomic.AtomicBoolean; 35 | import java.util.concurrent.atomic.AtomicLong; 36 | 37 | public class FluentdSourceTask extends SourceTask { 38 | static final Logger log = LoggerFactory.getLogger(FluentdSourceTask.class); 39 | private FluentdSourceConnectorConfig config; 40 | private ForwardServer server; 41 | private final ConcurrentLinkedDeque queue = new ConcurrentLinkedDeque<>(); 42 | private static final Object startLock = new Object(); 43 | 44 | private static final class Reporter implements Runnable { 45 | private final AtomicLong counter = new AtomicLong(); 46 | private final AtomicBoolean isActive = new AtomicBoolean(false); 47 | 48 | void add(final int up) { 49 | counter.addAndGet(up); 50 | } 51 | 52 | void stop() { 53 | isActive.set(false); 54 | } 55 | 56 | @Override 57 | public void run() { 58 | isActive.set(true); 59 | long lastChecked = System.currentTimeMillis(); 60 | while (isActive.get()) { 61 | try { 62 | Thread.sleep(100); 63 | } catch (final InterruptedException e) { 64 | break; 65 | } 66 | final long now = System.currentTimeMillis(); 67 | if (now - lastChecked >= 1000) { 68 | lastChecked = now; 69 | final long current = counter.getAndSet(0); 70 | log.info("{} requests/sec", current); 71 | } 72 | } 73 | } 74 | } 75 | 76 | private final static Reporter reporter = new Reporter(); 77 | 78 | @Override 79 | public String version() { 80 | return VersionUtil.getVersion(); 81 | } 82 | 83 | @Override 84 | public void start(Map properties) { 85 | config = new FluentdSourceConnectorConfig(properties); 86 | MessagePackConverter converter = new MessagePackConverter(config); 87 | ForwardCallback callback = ForwardCallback.of(stream -> { 88 | if (config.getFluentdCounterEnabled()) { 89 | reporter.add(stream.getEntries().size()); 90 | } 91 | stream.getEntries().forEach(entry -> { 92 | String topic = config.getFluentdStaticTopic(); 93 | String tag = stream.getTag().getName(); 94 | if (topic == null) { 95 | topic = tag; 96 | } 97 | Long timestamp = entry.getTime().toEpochMilli(); 98 | SourceRecord sourceRecord = converter.convert( 99 | topic, 100 | tag, 101 | timestamp, 102 | entry 103 | ); 104 | queue.add(sourceRecord); 105 | }); 106 | // TODO complete this future when SourceTask#commit finishes 107 | return CompletableFuture.completedFuture(null); 108 | }); 109 | try { 110 | if (!config.getFluentdTransport().equals("tcp") && 111 | !config.getFluentdTransport().equals("tls")) { 112 | String message = FluentdSourceConnectorConfig.FLUENTD_TRANSPORT + 113 | " must be \"tcp\" or \"tls\""; 114 | throw new FluentdConnectorConfigError(message); 115 | } 116 | ForwardServer.Builder builder = new ForwardServer 117 | .Builder(callback) 118 | .localAddress(config.getLocalAddress()) 119 | .chunkSizeLimit(config.getFluentdChunkSizeLimit()) 120 | .backlog(config.getFluentdBacklog()) 121 | .keepAliveEnabled(config.getFluentdKeepAliveEnabled()) 122 | .tcpNoDelayEnabled(config.getFluentdTcpNoDeleyEnabled()) 123 | .sslEnabled(config.getFluentdTransport().equals("tls")) 124 | .tlsVersions(config.getFluentdTlsVersions().toArray(new String[0])) 125 | .keystorePath(config.getFluentdKeystorePath()) 126 | .keystorePassword(config.getFluentdKeystorePassword()) 127 | .keyPassword(config.getFluentdKeyPassword()); 128 | if (config.getFluentdSendBufferSize() != 0) { 129 | builder.sendBufferSize(config.getFluentdSendBufferSize()); 130 | } 131 | if (config.getFluentdReceveBufferSize() != 0) { 132 | builder.receiveBufferSize(config.getFluentdReceveBufferSize()); 133 | } 134 | if (config.getFluentdWorkerPoolSize() != 0) { 135 | builder.workerPoolSize(config.getFluentdWorkerPoolSize()); 136 | } 137 | 138 | synchronized (startLock) { 139 | if (!isFluentdPortAvailable(config.getFluentdPort())) { 140 | log.info("influent server is already running"); 141 | return; 142 | } 143 | server = builder.build(); 144 | } 145 | } catch (FluentdConnectorConfigError ex) { 146 | throw new ConnectException(ex); 147 | } 148 | 149 | server.start(); 150 | if (config.getFluentdCounterEnabled()) { 151 | new Thread(reporter).start(); 152 | } 153 | } 154 | 155 | @Override 156 | public List poll() throws InterruptedException { 157 | List records = new ArrayList<>(); 158 | while (!queue.isEmpty()) { 159 | SourceRecord record = this.queue.poll(); 160 | log.debug("{}", record); 161 | if (record != null) { 162 | records.add(record); 163 | } 164 | } 165 | if (records.isEmpty()) { 166 | synchronized (this) { 167 | this.wait(1000); 168 | } 169 | } 170 | return records; 171 | } 172 | 173 | @Override 174 | public void stop() { 175 | if (config.getFluentdCounterEnabled()) { 176 | reporter.stop(); 177 | } 178 | server.shutdown(); 179 | } 180 | 181 | public boolean isReporterRunning() { 182 | return config.getFluentdCounterEnabled() && reporter.isActive.get(); 183 | } 184 | 185 | private boolean isFluentdPortAvailable(int port) { 186 | try (ServerSocket ss = new ServerSocket(port)) { 187 | ss.setReuseAddress(true); 188 | return true; 189 | } catch (IOException ignored) { 190 | } 191 | 192 | return false; 193 | } 194 | } 195 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Kafka Connect Fluentd Connector 2 | 3 | [![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2Ffluent%2Fkafka-connect-fluentd.svg?type=shield)](https://app.fossa.io/projects/git%2Bgithub.com%2Ffluent%2Fkafka-connect-fluentd?ref=badge_shield) 4 | [![Build Status](https://travis-ci.org/fluent/kafka-connect-fluentd.svg?branch=master)](https://travis-ci.org/fluent/kafka-connect-fluentd) 5 | 6 | kafka-connect-fluentd is a [Kafka Connector](http://kafka.apache.org/documentation.html#connect) for copying data between Kafka and [Fluentd](https://www.fluentd.org/). 7 | kafka-connect-fluentd can be alternative to [fluent-plugin-kafka](https://github.com/fluent/fluent-plugin-kafka). 8 | 9 | ## Installation 10 | 11 | You can download and install kafka-connect-fluentd from [Maven Central](https://search.maven.org/#search%7Cga%7C1%7Ca%3A%22kafka-connect-fluentd%22). 12 | 13 | For more details, see [Installing Plugins](https://docs.confluent.io/current/connect/userguide.html#installing-plugins). 14 | 15 | ## Running Kafka and Kafka Connector Workers 16 | 17 | See [Running Workers](https://docs.confluent.io/current/connect/userguide.html#running-workers). 18 | 19 | Run Apache Kafka: 20 | 21 | ``` 22 | (on terminal 1) 23 | $ ./bin/zookeeper-server-start.sh config/zookeeper.properties 24 | (on terminal 2) 25 | $ ./bin/kafka-server-start.sh config/server.properties 26 | ``` 27 | 28 | Run kafka-connect-fluentd (FluentdSourceConnector/FluentdSinkConnector): 29 | 30 | ``` 31 | (on terminal 3) 32 | $ bin/connect-standalone.sh config/connect-standalone.properties \ 33 | /path/to/kafka-connect-fluentd/config/FluentdSourceConnector.properties \ 34 | /path/to/kafka-connect-fluentd/config/FluentdSinkConnector.properties 35 | ``` 36 | 37 | **NOTE:** Copy jar file to `CLASSPATH` or change `plugin.path` in connect-standalone.properties. 38 | Use same `topics` in FluentdSourceConnector.properties and FluentdSinkConnector.properties. 39 | 40 | FluentdSourceConnector.properties: 41 | 42 | ``` 43 | name=FluentdSourceConnector 44 | tasks.max=1 45 | connector.class=org.fluentd.kafka.FluentdSourceConnector 46 | fluentd.port=24224 47 | fluentd.bind=0.0.0.0 48 | 49 | fluentd.worker.pool.size=1 50 | fluentd.counter.enabled=true 51 | ``` 52 | 53 | FluentdSinkConnector.properties: 54 | 55 | ``` 56 | name=FluentdSinkConnector 57 | topics=fluentd-test 58 | tasks.max=1 59 | connector.class=org.fluentd.kafka.FluentdSinkConnector 60 | fluentd.connect=localhost:24225 61 | ``` 62 | 63 | Setup Fluentd: 64 | 65 | See also [Fluentd Installation](https://docs.fluentd.org/v1.0/categories/installation). 66 | 67 | ``` 68 | (on terminal 4) 69 | $ git clone https://github.com/fluent/fluentd.git 70 | $ cd fluentd 71 | $ bundle install 72 | ``` 73 | 74 | Run Fluentd: 75 | 76 | ``` 77 | (on terminal 4) 78 | $ bundle exec fluentd -c fluent.conf 79 | ``` 80 | 81 | fluent.conf: 82 | 83 | ```aconf 84 | 85 | @type forward 86 | port 24225 87 | 88 | 89 | @type stdout 90 | 91 | ``` 92 | 93 | And emit records: 94 | 95 | ``` 96 | (on terminal 5) 97 | $ cd fluentd 98 | $ echo '{"messmages": "Hi, Kafka connect!"}' | bundle exec fluent-cat fluentd-test --time-as-integer 99 | ``` 100 | 101 | **NOTE:** Specify tag same as topics in FluentdSourceConnector.properties and FluentdSinkConnector.properties. 102 | 103 | See also [kafka-connect benchmark scripts](https://github.com/fluent/fluentd-benchmark/tree/master/kafka-connect). 104 | 105 | ### Configuration for FluentdSourceConnector 106 | 107 | See also [Influent](https://github.com/okumin/influent). 108 | 109 | * fluentd.port 110 | * Port number to listen. Default: `24224` 111 | * fluentd.bind 112 | * Bind address to listen. Default: `0.0.0.0` 113 | * fluentd.chunk.size.limit 114 | * Allowable chunk size. Default: `Long.MAX_VALUE` 115 | * fluentd.backlog 116 | * The maximum number of pending connections for a server. Default: `0` 117 | * fluentd.send.buffer.bytes 118 | * `SO_SNDBUF` for forward connection. `0` means system default value. Default: `0` 119 | * fluentd.receve.buffer.bytes 120 | * `SO_RCVBUF` for forward connection. `0` means system default value. Default: `0` 121 | * fluentd.keep.alive.enabled 122 | * If `true`, `SO_KEEPALIVE` is enabled. Default: `true` 123 | * fluentd.tcp.no.delay.enabled 124 | * If `true`, `TCP_NODELAY` is enabled. Default: `true` 125 | * fluentd.worker.pool.size 126 | * Event loop pool size. `0` means auto. Default: `0` 127 | * fluentd.transport 128 | * Set Fluentd transport protocol to `tcp` or `tls`. Default: `tcp` 129 | * fluentd.tls.versions 130 | * TLS version. `TLS`, `TLSv1`, `TLSv1.1` or `TLSv1.2`. Default: `TLSv1.2` 131 | * fluentd.tls.ciphers 132 | * Cipher suites 133 | * fluentd.keystore.path 134 | * Path to keystore 135 | * fluentd.keystore.password 136 | * Password for keystore 137 | * fluentd.key.password 138 | * Password for key 139 | * kafka.topic 140 | * Topic for Kafka. `null` means using Fluentd's tag for topic dynamically. Default: `null` 141 | * fluentd.schemas.enable 142 | * Enable schemas for messages. Default: `true` 143 | * fluentd.counter.enabled 144 | * **For developer only** Enable counter for messages/sec. Default: `false` 145 | 146 | ### Configuration for FluentdSinkConnector 147 | 148 | See also [Fluency](https://github.com/komamitsu/fluency). 149 | 150 | * fluentd.connect 151 | * Connection specs for Fluentd. Default: localhost:24224 152 | * fluentd.client.max.buffer.bytes 153 | * Max buffer size. 154 | * fluentd.client.buffer.chunk.initial.bytes 155 | * Initial size of buffer chunk. Default: 1048576 (1MiB) 156 | * fluentd.client.buffer.chunk.retention.bytes 157 | * Retention size of buffer chunk. Default: 4194304 (4MiB) 158 | * fluentd.client.flush.interval 159 | * Buffer flush interval in msec. Default: 600(msec) 160 | * fluentd.client.ack.response.mode 161 | * Enable/Disable ack response mode. Default: false 162 | * fluentd.client.file.backup.dir 163 | * Enable/Disable file backup mode. Default: false 164 | * fluentd.client.wait.until.buffer.flushed 165 | * Max wait until all buffers are flushed in sec. Default: 60(sec) 166 | * fluentd.client.wait.until.flusher.terminated 167 | * Max wait until the flusher is terminated in sec. Default: 60(sec) 168 | * fluentd.client.jvm.heap.buffer.mode 169 | * If true use JVM heap memory for buffer pool. Default: false 170 | * fluentd.client.timestamp.integer 171 | * If true, use integer timestamp (unix timestamp). Default: false 172 | 173 | NOTE: Fluency doesn't support SSL/TLS yet 174 | 175 | ### Example of SSL/TLS support with Fluentd 176 | 177 | FluentdSourceConnector.properties 178 | 179 | ``` 180 | name=FluentdSourceConnector 181 | tasks.max=1 182 | connector.class=org.fluentd.kafka.FluentdSourceConnector 183 | fluentd.port=24224 184 | fluentd.bind=0.0.0.0 185 | fluentd.transport=tls 186 | fluentd.keystore.path=/path/to/influent-server.jks 187 | fluentd.keystore.password=password-for-keystore 188 | fluentd.key.password=password-for-key 189 | ``` 190 | 191 | fluent.conf 192 | 193 | ```aconf 194 | 195 | @type dummy 196 | dummy {"message": "this is test"} 197 | tag test 198 | 199 | 200 | 201 | @type stdout 202 | 203 | 204 | @type forward 205 | transport tls 206 | tls_cert_path /path/to/ca_cert.pem 207 | # tls_verify_hostname false # for test 208 | heartbeat_type none 209 | 210 | # first server 211 | host 127.0.0.1 212 | port 24224 213 | 214 | 215 | flush_interval 1 216 | 217 | 218 | ``` 219 | 220 | Run kafka-connect-fluentd and then run Fluentd with above configuration: 221 | 222 | ```text 223 | (on terminal 1) 224 | $ ./bin/zookeeper-server-start.sh config/zookeeper.properties 225 | (on terminal 2) 226 | $ ./bin/kafka-server-start.sh config/server.properties 227 | (on terminal 3) 228 | $ bin/connect-standalone.sh config/connect-standalone.properties \ 229 | /path/to/kafka-connect-fluentd/config/FluentdSourceConnector.properties \ 230 | /path/to/connect-file-sink.properties 231 | (on terminal 4) 232 | $ fluentd -c fluent.conf 233 | ``` 234 | 235 | ## License 236 | 237 | Apache License, Version 2.0 238 | -------------------------------------------------------------------------------- /src/main/java/org/fluentd/kafka/FluentdSourceConnectorConfig.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2017 ClearCode Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | **/ 16 | 17 | package org.fluentd.kafka; 18 | 19 | import org.apache.kafka.common.config.AbstractConfig; 20 | import org.apache.kafka.common.config.ConfigDef; 21 | import org.apache.kafka.common.config.ConfigDef.Type; 22 | import org.apache.kafka.common.config.ConfigDef.Importance; 23 | 24 | import java.net.InetAddress; 25 | import java.net.InetSocketAddress; 26 | import java.net.SocketAddress; 27 | import java.net.UnknownHostException; 28 | import java.util.Collections; 29 | import java.util.List; 30 | import java.util.Map; 31 | 32 | 33 | public class FluentdSourceConnectorConfig extends AbstractConfig { 34 | 35 | public static final String FLUENTD_PORT = "fluentd.port"; 36 | public static final String FLUENTD_BIND = "fluentd.bind"; 37 | public static final String FLUENTD_CHUNK_SIZE_LIMIT = "fluentd.chunk.size.limit"; 38 | public static final String FLUENTD_BACKLOG = "fluentd.backlog"; 39 | public static final String FLUENTD_SEND_BUFFER_BYTES = "fluentd.send.buffer.bytes"; 40 | public static final String FLUENTD_RECEVE_BUFFER_BYTES = "fluentd.receve.buffer.bytes"; 41 | public static final String FLUENTD_KEEP_ALIVE_ENABLED = "fluentd.keep.alive.enabled"; 42 | public static final String FLUENTD_TCP_NO_DELAY_ENABLED = "fluentd.tcp.no.delay.enabled"; 43 | public static final String FLUENTD_WORKER_POOL_SIZE = "fluentd.worker.pool.size"; 44 | public static final String FLUENTD_TRANSPORT = "fluentd.transport"; 45 | public static final String FLUENTD_TLS_VERSIONS = "fluentd.tls.versions"; 46 | public static final String FLUENTD_TLS_CIPHERS = "fluentd.tls.ciphers"; 47 | public static final String FLUENTD_KEYSTORE_PATH = "fluentd.keystore.path"; 48 | public static final String FLUENTD_KEYSTORE_PASSWORD = "fluentd.keystore.password"; 49 | public static final String FLUENTD_KEY_PASSWORD = "fluentd.key.password"; 50 | 51 | public static final String KAFKA_TOPIC = "kafka.topic"; 52 | public static final String FLUENTD_SCHEMAS_ENABLE = "fluentd.schemas.enable"; 53 | public static final String FLUENTD_SCHEMAS_MAP_FIELD = "fluentd.schemas.map_field"; 54 | public static final String FLUENTD_COUNTER_ENABLED = "fluentd.counter.enabled"; 55 | 56 | public FluentdSourceConnectorConfig(ConfigDef config, Map parsedConfig) { 57 | super(config, parsedConfig); 58 | } 59 | 60 | public FluentdSourceConnectorConfig(Map parsedConfig) { 61 | this(conf(), parsedConfig); 62 | } 63 | 64 | public static ConfigDef conf() { 65 | return new ConfigDef() 66 | .define(FLUENTD_PORT, Type.INT, 24224, Importance.HIGH, 67 | "Port number to listen. Default: 24224") 68 | .define(FLUENTD_BIND, Type.STRING, "0.0.0.0", Importance.HIGH, 69 | "Bind address to listen. Default: 0.0.0.0") 70 | .define(FLUENTD_CHUNK_SIZE_LIMIT, Type.LONG, Long.MAX_VALUE, Importance.MEDIUM, 71 | "Allowable chunk size. Default: Long.MAX_VALUE") 72 | .define(FLUENTD_BACKLOG, Type.INT, 0, Importance.MEDIUM, 73 | "The maximum number of pending connections for a server. Default: 0") 74 | .define(FLUENTD_SEND_BUFFER_BYTES, Type.INT, 0, Importance.MEDIUM, 75 | "SO_SNDBUF for forward connection. 0 means system default value. Default: 0") 76 | .define(FLUENTD_RECEVE_BUFFER_BYTES, Type.INT, 0, Importance.MEDIUM, 77 | "SO_RCVBUF for forward connection. 0 means system default value. Default: 0") 78 | .define(FLUENTD_KEEP_ALIVE_ENABLED, Type.BOOLEAN, true, Importance.MEDIUM, 79 | "If true SO_KEEPALIVE is enabled. Default: true") 80 | .define(FLUENTD_TCP_NO_DELAY_ENABLED, Type.BOOLEAN, true, Importance.MEDIUM, 81 | "If true TCP_NODELAY is enabled. Default: true") 82 | .define(FLUENTD_WORKER_POOL_SIZE, Type.INT, 0, Importance.MEDIUM, 83 | "Event loop pool size. 0 means auto. Default: 0") 84 | .define(FLUENTD_TRANSPORT, Type.STRING, "tcp", Importance.MEDIUM, "tcp or tls") 85 | .define(FLUENTD_TLS_VERSIONS, Type.LIST, "TLSv1.2", Importance.MEDIUM, 86 | "TLS version. \"TLS\", \"TLSv1\", \"TLSv1.1\" or \"TLSv1.2\". Default: TLSv1.2") 87 | .define(FLUENTD_TLS_CIPHERS, Type.LIST, null, Importance.MEDIUM, "Cipher suites") 88 | .define(FLUENTD_KEYSTORE_PATH, Type.STRING, null, Importance.MEDIUM, 89 | "Path to keystore") 90 | .define(FLUENTD_KEYSTORE_PASSWORD, Type.STRING, null, Importance.MEDIUM, 91 | "Password for keystore") 92 | .define(FLUENTD_KEY_PASSWORD, Type.STRING, null, Importance.MEDIUM, 93 | "Password for key") 94 | .define(KAFKA_TOPIC, Type.STRING, null, Importance.MEDIUM, 95 | "Topic for Kafka. null means using Fluentd's tag for topic dynamically. Default: null") 96 | .define(FLUENTD_SCHEMAS_ENABLE, Type.BOOLEAN, true, Importance.MEDIUM, 97 | "Enable schemas for messages. Default: true") 98 | .define(FLUENTD_SCHEMAS_MAP_FIELD, Type.LIST, null, Importance.MEDIUM, 99 | "Field name treated as MAP type. Default: null") 100 | .define(FLUENTD_COUNTER_ENABLED, Type.BOOLEAN, false, Importance.MEDIUM, 101 | "Enable counter for messages/sec. Default: false"); 102 | } 103 | 104 | public int getFluentdPort() { 105 | return getInt(FLUENTD_PORT); 106 | } 107 | 108 | public String getFluentdBind() { 109 | return getString(FLUENTD_BIND); 110 | } 111 | 112 | public SocketAddress getLocalAddress() throws FluentdConnectorConfigError { 113 | try { 114 | return new InetSocketAddress(InetAddress.getByName(getFluentdBind()), getFluentdPort()); 115 | } catch (UnknownHostException ex) { 116 | throw new FluentdConnectorConfigError(ex.getMessage()); 117 | } 118 | } 119 | 120 | public Long getFluentdChunkSizeLimit() { 121 | return getLong(FLUENTD_CHUNK_SIZE_LIMIT); 122 | } 123 | 124 | public int getFluentdBacklog() { 125 | return getInt(FLUENTD_BACKLOG); 126 | } 127 | 128 | public int getFluentdSendBufferSize() { 129 | return getInt(FLUENTD_SEND_BUFFER_BYTES); 130 | } 131 | 132 | public int getFluentdReceveBufferSize() { 133 | return getInt(FLUENTD_RECEVE_BUFFER_BYTES); 134 | } 135 | 136 | public boolean getFluentdKeepAliveEnabled() { 137 | return getBoolean(FLUENTD_KEEP_ALIVE_ENABLED); 138 | } 139 | 140 | public boolean getFluentdTcpNoDeleyEnabled() { 141 | return getBoolean(FLUENTD_TCP_NO_DELAY_ENABLED); 142 | } 143 | 144 | public int getFluentdWorkerPoolSize() { 145 | return getInt(FLUENTD_WORKER_POOL_SIZE); 146 | } 147 | 148 | public String getFluentdTransport() { 149 | return getString(FLUENTD_TRANSPORT); 150 | } 151 | 152 | public List getFluentdTlsVersions() { 153 | return getList(FLUENTD_TLS_VERSIONS); 154 | } 155 | 156 | public String getFluentdKeystorePath() { 157 | return getString(FLUENTD_KEYSTORE_PATH); 158 | } 159 | 160 | public String getFluentdKeystorePassword() { 161 | return getString(FLUENTD_KEYSTORE_PASSWORD); 162 | } 163 | 164 | public String getFluentdKeyPassword() { 165 | return getString(FLUENTD_KEY_PASSWORD); 166 | } 167 | 168 | public String getFluentdStaticTopic() { 169 | return getString(KAFKA_TOPIC); 170 | } 171 | 172 | public boolean isFluentdSchemasEnable() { 173 | return getBoolean(FLUENTD_SCHEMAS_ENABLE); 174 | } 175 | 176 | public List getFluentdSchemasMapField() { 177 | return getList(FLUENTD_SCHEMAS_MAP_FIELD); 178 | } 179 | 180 | public boolean getFluentdCounterEnabled() { 181 | return getBoolean(FLUENTD_COUNTER_ENABLED); 182 | } 183 | } 184 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | --------------------------------------------------------------------------------