├── .gitignore
├── amazon-kinesis-connector-flink
└── src
│ ├── main
│ ├── resources
│ │ ├── software
│ │ │ └── amazon
│ │ │ │ └── kinesis
│ │ │ │ └── shaded
│ │ │ │ └── software
│ │ │ │ └── amazon
│ │ │ │ └── awssdk
│ │ │ │ └── global
│ │ │ │ └── handlers
│ │ │ │ └── execution.interceptors
│ │ └── META-INF
│ │ │ ├── services
│ │ │ ├── software.amazon.kinesis.shaded.software.amazon.awssdk.http.SdkHttpService
│ │ │ └── org.apache.flink.table.factories.Factory
│ │ │ └── licenses
│ │ │ └── LICENSE.protobuf
│ └── java
│ │ └── software
│ │ └── amazon
│ │ └── kinesis
│ │ └── connectors
│ │ └── flink
│ │ ├── util
│ │ ├── TimeoutLatch.java
│ │ ├── KinesisStateUtil.java
│ │ ├── BeanDeserializerModifierForIgnorables.java
│ │ └── WatermarkTracker.java
│ │ ├── RandomKinesisPartitioner.java
│ │ ├── FlinkKinesisException.java
│ │ ├── serialization
│ │ ├── DynamoDBStreamsSchema.java
│ │ ├── KinesisSerializationSchema.java
│ │ ├── KinesisDeserializationSchemaWrapper.java
│ │ └── KinesisDeserializationSchema.java
│ │ ├── KinesisPartitioner.java
│ │ ├── internals
│ │ ├── publisher
│ │ │ ├── RecordPublisherFactory.java
│ │ │ ├── RecordPublisher.java
│ │ │ ├── polling
│ │ │ │ ├── PollingRecordPublisherConfiguration.java
│ │ │ │ └── PollingRecordPublisherFactory.java
│ │ │ ├── RecordBatch.java
│ │ │ └── fanout
│ │ │ │ └── FanOutRecordPublisherFactory.java
│ │ └── DynamoDBStreamsDataFetcher.java
│ │ ├── metrics
│ │ ├── KinesisConsumerMetricConstants.java
│ │ ├── PollingRecordPublisherMetricsReporter.java
│ │ └── ShardConsumerMetricsReporter.java
│ │ ├── config
│ │ └── ProducerConfigConstants.java
│ │ ├── proxy
│ │ ├── FullJitterBackoff.java
│ │ ├── KinesisProxyV2Interface.java
│ │ ├── GetShardListResult.java
│ │ ├── KinesisProxyV2Factory.java
│ │ └── KinesisProxyInterface.java
│ │ ├── KinesisShardAssigner.java
│ │ ├── FixedKinesisPartitioner.java
│ │ ├── model
│ │ ├── DynamoDBStreamsShardHandle.java
│ │ ├── SentinelSequenceNumber.java
│ │ ├── KinesisStreamShardState.java
│ │ ├── SequenceNumber.java
│ │ └── StreamShardHandle.java
│ │ ├── table
│ │ └── RowDataKinesisDeserializationSchema.java
│ │ └── FlinkDynamoDBStreamsConsumer.java
│ └── test
│ ├── resources
│ ├── profile
│ ├── META-INF
│ │ └── services
│ │ │ └── org.apache.flink.table.factories.Factory
│ └── log4j2-test.properties
│ └── java
│ └── software
│ └── amazon
│ └── kinesis
│ └── connectors
│ └── flink
│ ├── testutils
│ ├── KinesisShardIdGenerator.java
│ ├── TestableFlinkKinesisConsumer.java
│ ├── AlwaysThrowsDeserializationSchema.java
│ ├── TestSourceContext.java
│ ├── TableOptionsBuilder.java
│ ├── TestRuntimeContext.java
│ ├── TestableKinesisDataFetcherForShardConsumerException.java
│ └── KinesisEventsGeneratorProducerThread.java
│ ├── model
│ ├── SentinelSequenceNumberTest.java
│ ├── StreamShardHandleTest.java
│ ├── StartingPositionTest.java
│ └── DynamoDBStreamsShardHandleTest.java
│ ├── examples
│ ├── ConsumeFromKinesis.java
│ ├── ConsumeFromDynamoDBStreams.java
│ └── ProduceIntoKinesis.java
│ ├── KinesisConsumerTest.java
│ ├── metrics
│ ├── PollingRecordPublisherMetricsReporterTest.java
│ └── ShardConsumerMetricsReporterTest.java
│ ├── internals
│ ├── DynamoDBStreamsDataFetcherTest.java
│ └── publisher
│ │ ├── polling
│ │ ├── PollingRecordPublisherFactoryTest.java
│ │ └── PollingRecordPublisherConfigurationTest.java
│ │ └── RecordBatchTest.java
│ ├── util
│ ├── JobManagerWatermarkTrackerTest.java
│ ├── WatermarkTrackerTest.java
│ └── StreamConsumerRegistrarUtilTest.java
│ ├── manualtests
│ └── ManualProducerTest.java
│ └── proxy
│ └── KinesisProxyV2FactoryTest.java
├── CODE_OF_CONDUCT.md
├── amazon-kinesis-sql-connector-flink
├── src
│ └── main
│ │ └── resources
│ │ └── META-INF
│ │ └── NOTICE
└── pom.xml
├── tools
└── maven
│ └── suppressions.xml
├── CONTRIBUTING.md
└── pom.xml
/.gitignore:
--------------------------------------------------------------------------------
1 | *.iml
2 | .idea
3 | target
4 |
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/main/resources/software/amazon/kinesis/shaded/software/amazon/awssdk/global/handlers/execution.interceptors:
--------------------------------------------------------------------------------
1 | software.amazon.kinesis.shaded.software.amazon.awssdk.core.internal.interceptor.HttpChecksumRequiredInterceptor
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/test/resources/profile:
--------------------------------------------------------------------------------
1 | [default]
2 | aws_access_key_id=11111111111111111111
3 | aws_secret_access_key=wJalrXUtnFEMI/K7MDENG/bPxRfiCY1111111111
4 |
5 | [foo]
6 | aws_access_key_id=22222222222222222222
7 | aws_secret_access_key=wJalrXUtnFEMI/K7MDENG/bPxRfiCY2222222222
8 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | ## Code of Conduct
2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
4 | opensource-codeofconduct@amazon.com with any additional questions or comments.
5 |
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/main/resources/META-INF/services/software.amazon.kinesis.shaded.software.amazon.awssdk.http.SdkHttpService:
--------------------------------------------------------------------------------
1 | #
2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License").
5 | # You may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | #
16 |
17 | software.amazon.kinesis.shaded.software.amazon.awssdk.http.apache.ApacheSdkHttpService
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/test/resources/META-INF/services/org.apache.flink.table.factories.Factory:
--------------------------------------------------------------------------------
1 | # Licensed to the Apache Software Foundation (ASF) under one or more
2 | # contributor license agreements. See the NOTICE file distributed with
3 | # this work for additional information regarding copyright ownership.
4 | # The ASF licenses this file to You under the Apache License, Version 2.0
5 | # (the "License"); you may not use this file except in compliance with
6 | # the License. You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | software.amazon.kinesis.connectors.flink.testutils.TestFormatFactory
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory:
--------------------------------------------------------------------------------
1 | # Licensed to the Apache Software Foundation (ASF) under one or more
2 | # contributor license agreements. See the NOTICE file distributed with
3 | # this work for additional information regarding copyright ownership.
4 | # The ASF licenses this file to You under the Apache License, Version 2.0
5 | # (the "License"); you may not use this file except in compliance with
6 | # the License. You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | software.amazon.kinesis.connectors.flink.table.KinesisDynamicTableFactory
17 |
--------------------------------------------------------------------------------
/amazon-kinesis-sql-connector-flink/src/main/resources/META-INF/NOTICE:
--------------------------------------------------------------------------------
1 | // ------------------------------------------------------------------
2 | // NOTICE file corresponding to the section 4d of The Apache License,
3 | // ------------------------------------------------------------------
4 |
5 | Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
6 |
7 | flink-sql-connector-kinesis
8 | Copyright 2014-2021 The Apache Software Foundation
9 |
10 | This product includes software developed at
11 | The Apache Software Foundation (http://www.apache.org/).
12 |
13 | This project bundles the following dependencies under the Apache Software License 2.0. (http://www.apache.org/licenses/LICENSE-2.0.txt)
14 |
15 | - joda-time:joda-time:2.5
16 | - commons-io:commons-io:2.4
17 | - commons-lang:commons-lang:2.6
18 | - commons-logging:commons-logging:1.1.3
19 | - commons-codec:commons-codec:1.10
20 | - org.apache.commons:commons-lang3:3.3.2
21 | - com.google.guava:guava:30.0-jre
22 | - com.google.guava:failureaccess:1.0.1
23 | - com.fasterxml.jackson.core:jackson-annotations:2.12.7
24 | - com.fasterxml.jackson.core:jackson-databind:2.12.7
25 | - com.fasterxml.jackson.core:jackson-core:2.12.7
26 | - com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:2.12.7
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/test/java/software/amazon/kinesis/connectors/flink/testutils/KinesisShardIdGenerator.java:
--------------------------------------------------------------------------------
1 | /*
2 | * This file has been modified from the original.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 |
20 | package software.amazon.kinesis.connectors.flink.testutils;
21 |
22 | /**
23 | * A generator for Kinesis shard IDs.
24 | *
25 | *
Kinesis shard ids are in the form of: shardId-\d{12}
26 | */
27 | public class KinesisShardIdGenerator {
28 | public static String generateFromShardOrder(int order) {
29 | return String.format("shardId-%012d", order);
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/tools/maven/suppressions.xml:
--------------------------------------------------------------------------------
1 |
2 |
22 |
23 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/test/resources/log4j2-test.properties:
--------------------------------------------------------------------------------
1 | ################################################################################
2 | # Licensed to the Apache Software Foundation (ASF) under one
3 | # or more contributor license agreements. See the NOTICE file
4 | # distributed with this work for additional information
5 | # regarding copyright ownership. The ASF licenses this file
6 | # to you under the Apache License, Version 2.0 (the
7 | # "License"); you may not use this file except in compliance
8 | # with the License. You may obtain a copy of the License at
9 | #
10 | # http://www.apache.org/licenses/LICENSE-2.0
11 | #
12 | # Unless required by applicable law or agreed to in writing, software
13 | # distributed under the License is distributed on an "AS IS" BASIS,
14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | # See the License for the specific language governing permissions and
16 | # limitations under the License.
17 | ################################################################################
18 |
19 | # Set root logger level to OFF to not flood build logs
20 | # set manually to INFO for debugging purposes
21 | rootLogger.level = OFF
22 | rootLogger.appenderRef.test.ref = TestLogger
23 |
24 | appender.testlogger.name = TestLogger
25 | appender.testlogger.type = CONSOLE
26 | appender.testlogger.target = SYSTEM_ERR
27 | appender.testlogger.layout.type = PatternLayout
28 | appender.testlogger.layout.pattern = %-4r [%t] %-5p %c %x - %m%n
29 |
30 |
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/test/java/software/amazon/kinesis/connectors/flink/model/SentinelSequenceNumberTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * This file has been modified from the original.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 |
20 | package software.amazon.kinesis.connectors.flink.model;
21 |
22 | import org.junit.Test;
23 |
24 | import static org.junit.Assert.assertTrue;
25 |
26 | /**
27 | * Tests for {@link SentinelSequenceNumber}.
28 | */
29 | public class SentinelSequenceNumberTest {
30 |
31 | @Test
32 | public void allSentinelNumbersAreRecognized() {
33 | for (SentinelSequenceNumber sentinel : SentinelSequenceNumber.values()) {
34 | assertTrue(SentinelSequenceNumber.isSentinelSequenceNumber(sentinel.get()));
35 | }
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/main/java/software/amazon/kinesis/connectors/flink/util/TimeoutLatch.java:
--------------------------------------------------------------------------------
1 | /*
2 | * This file has been modified from the original.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one
5 | * or more contributor license agreements. See the NOTICE file
6 | * distributed with this work for additional information
7 | * regarding copyright ownership. The ASF licenses this file
8 | * to you under the Apache License, Version 2.0 (the
9 | * "License"); you may not use this file except in compliance
10 | * with the License. You may obtain a copy of the License at
11 | *
12 | * http://www.apache.org/licenses/LICENSE-2.0
13 | *
14 | * Unless required by applicable law or agreed to in writing, software
15 | * distributed under the License is distributed on an "AS IS" BASIS,
16 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 | * See the License for the specific language governing permissions and
18 | * limitations under the License.
19 | */
20 |
21 | package software.amazon.kinesis.connectors.flink.util;
22 |
23 | import org.apache.flink.annotation.Internal;
24 |
25 | /**
26 | * Internal use.
27 | */
28 | @Internal
29 | public class TimeoutLatch {
30 |
31 | private final Object lock = new Object();
32 | private volatile boolean waiting;
33 |
34 | public void await(long timeout) throws InterruptedException {
35 | synchronized (lock) {
36 | waiting = true;
37 | lock.wait(timeout);
38 | }
39 | }
40 |
41 | public void trigger() {
42 | if (waiting) {
43 | synchronized (lock) {
44 | waiting = false;
45 | lock.notifyAll();
46 | }
47 | }
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/main/java/software/amazon/kinesis/connectors/flink/RandomKinesisPartitioner.java:
--------------------------------------------------------------------------------
1 | /*
2 | * This file has been modified from the original.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one
5 | * or more contributor license agreements. See the NOTICE file
6 | * distributed with this work for additional information
7 | * regarding copyright ownership. The ASF licenses this file
8 | * to you under the Apache License, Version 2.0 (the
9 | * "License"); you may not use this file except in compliance
10 | * with the License. You may obtain a copy of the License at
11 | *
12 | * http://www.apache.org/licenses/LICENSE-2.0
13 | *
14 | * Unless required by applicable law or agreed to in writing, software
15 | * distributed under the License is distributed on an "AS IS" BASIS,
16 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 | * See the License for the specific language governing permissions and
18 | * limitations under the License.
19 | */
20 |
21 | package software.amazon.kinesis.connectors.flink;
22 |
23 | import org.apache.flink.annotation.PublicEvolving;
24 |
25 | import java.util.UUID;
26 |
27 | /**
28 | * A {@link KinesisPartitioner} that maps an arbitrary input {@code element} to a random partition
29 | * ID.
30 | *
31 | * @param The input element type.
32 | */
33 | @PublicEvolving
34 | public final class RandomKinesisPartitioner extends KinesisPartitioner {
35 | @Override
36 | public String getPartitionId(T element) {
37 | return UUID.randomUUID().toString();
38 | }
39 |
40 | @Override
41 | public boolean equals(Object o) {
42 | return o instanceof RandomKinesisPartitioner;
43 | }
44 |
45 | @Override
46 | public int hashCode() {
47 | return RandomKinesisPartitioner.class.hashCode();
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/test/java/software/amazon/kinesis/connectors/flink/model/StreamShardHandleTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * This file has been modified from the original.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 |
20 | package software.amazon.kinesis.connectors.flink.model;
21 |
22 | import org.junit.Test;
23 |
24 | import static org.hamcrest.Matchers.equalTo;
25 | import static org.hamcrest.Matchers.greaterThan;
26 | import static org.hamcrest.Matchers.lessThan;
27 | import static org.junit.Assert.assertThat;
28 |
29 | /**
30 | * Test for methods in the {@link StreamShardHandle} class.
31 | */
32 | public class StreamShardHandleTest {
33 | @Test
34 | public void testCompareShardIds() {
35 | assertThat(StreamShardHandle.compareShardIds("shardId-000000000001", "shardId-000000000010"), lessThan(0));
36 | assertThat(StreamShardHandle.compareShardIds("shardId-000000000010", "shardId-000000000010"), equalTo(0));
37 | assertThat(StreamShardHandle.compareShardIds("shardId-000000000015", "shardId-000000000010"), greaterThan(0));
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/main/resources/META-INF/licenses/LICENSE.protobuf:
--------------------------------------------------------------------------------
1 | Copyright 2008 Google Inc. All rights reserved.
2 |
3 | Redistribution and use in source and binary forms, with or without
4 | modification, are permitted provided that the following conditions are
5 | met:
6 |
7 | * Redistributions of source code must retain the above copyright
8 | notice, this list of conditions and the following disclaimer.
9 | * Redistributions in binary form must reproduce the above
10 | copyright notice, this list of conditions and the following disclaimer
11 | in the documentation and/or other materials provided with the
12 | distribution.
13 | * Neither the name of Google Inc. nor the names of its
14 | contributors may be used to endorse or promote products derived from
15 | this software without specific prior written permission.
16 |
17 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
22 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
23 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 |
29 | Code generated by the Protocol Buffer compiler is owned by the owner
30 | of the input file used when generating it. This code is not
31 | standalone and requires a support library to be linked with it. This
32 | support library is itself covered by the above license.
33 |
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/main/java/software/amazon/kinesis/connectors/flink/FlinkKinesisException.java:
--------------------------------------------------------------------------------
1 | /*
2 | * This file has been modified from the original.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 |
20 | package software.amazon.kinesis.connectors.flink;
21 |
22 | import org.apache.flink.annotation.Internal;
23 |
24 | /**
25 | * A {@link RuntimeException} wrapper indicating the exception was thrown from this connector.
26 | * This class is abstract, semantic subclasses should be created to indicate the type of exception.
27 | */
28 | @Internal
29 | public abstract class FlinkKinesisException extends RuntimeException {
30 |
31 | public FlinkKinesisException(final String message) {
32 | super(message);
33 | }
34 |
35 | public FlinkKinesisException(final String message, final Throwable cause) {
36 | super(message, cause);
37 | }
38 |
39 | /**
40 | * A semantic {@link RuntimeException} thrown to indicate timeout errors in the Kinesis connector.
41 | */
42 | @Internal
43 | public static class FlinkKinesisTimeoutException extends FlinkKinesisException {
44 |
45 | public FlinkKinesisTimeoutException(String message) {
46 | super(message);
47 | }
48 | }
49 |
50 | }
51 |
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/main/java/software/amazon/kinesis/connectors/flink/serialization/DynamoDBStreamsSchema.java:
--------------------------------------------------------------------------------
1 | /*
2 | * This file has been modified from the original.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 |
20 | package software.amazon.kinesis.connectors.flink.serialization;
21 |
22 | import org.apache.flink.api.common.typeinfo.TypeInformation;
23 |
24 | import com.amazonaws.services.dynamodbv2.model.Record;
25 | import com.amazonaws.services.dynamodbv2.streamsadapter.model.RecordObjectMapper;
26 | import com.fasterxml.jackson.databind.ObjectMapper;
27 |
28 | import java.io.IOException;
29 |
30 | /**
31 | * Schema used for deserializing DynamoDB streams records.
32 | */
33 | public class DynamoDBStreamsSchema implements KinesisDeserializationSchema {
34 | private static final ObjectMapper MAPPER = new RecordObjectMapper();
35 |
36 | @Override
37 | public Record deserialize(byte[] message, String partitionKey, String seqNum,
38 | long approxArrivalTimestamp, String stream, String shardId) throws IOException {
39 | return MAPPER.readValue(message, Record.class);
40 | }
41 |
42 | @Override
43 | public TypeInformation getProducedType() {
44 | return TypeInformation.of(Record.class);
45 | }
46 |
47 | }
48 |
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/test/java/software/amazon/kinesis/connectors/flink/testutils/TestableFlinkKinesisConsumer.java:
--------------------------------------------------------------------------------
1 | /*
2 | * This file has been modified from the original.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 |
20 | package software.amazon.kinesis.connectors.flink.testutils;
21 |
22 | import org.apache.flink.api.common.functions.RuntimeContext;
23 | import org.apache.flink.api.common.serialization.SimpleStringSchema;
24 | import org.apache.flink.streaming.util.MockStreamingRuntimeContext;
25 |
26 | import software.amazon.kinesis.connectors.flink.FlinkKinesisConsumer;
27 |
28 | import java.util.Properties;
29 |
30 | /**
31 | * Extension of the {@link FlinkKinesisConsumer} for testing.
32 | */
33 | public class TestableFlinkKinesisConsumer extends FlinkKinesisConsumer {
34 |
35 | private final RuntimeContext mockedRuntimeCtx;
36 |
37 | public TestableFlinkKinesisConsumer(String fakeStream,
38 | Properties fakeConfiguration,
39 | final int totalNumOfConsumerSubtasks,
40 | final int indexOfThisConsumerSubtask) {
41 | super(fakeStream, new SimpleStringSchema(), fakeConfiguration);
42 |
43 | this.mockedRuntimeCtx = new MockStreamingRuntimeContext(true, totalNumOfConsumerSubtasks, indexOfThisConsumerSubtask);
44 | }
45 |
46 | @Override
47 | public RuntimeContext getRuntimeContext() {
48 | return this.mockedRuntimeCtx;
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/main/java/software/amazon/kinesis/connectors/flink/KinesisPartitioner.java:
--------------------------------------------------------------------------------
1 | /*
2 | * This file has been modified from the original.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 |
20 | package software.amazon.kinesis.connectors.flink;
21 |
22 | import org.apache.flink.annotation.PublicEvolving;
23 |
24 | import java.io.Serializable;
25 |
26 | /**
27 | * An interface for partitioning records.
28 | *
29 | * @param record type
30 | */
31 | @PublicEvolving
32 | public abstract class KinesisPartitioner implements Serializable {
33 |
34 | private static final long serialVersionUID = -7467294664702189780L;
35 |
36 | /**
37 | * Return a partition id based on the input.
38 | *
39 | * @param element Element to partition
40 | *
41 | * @return A string representing the partition id
42 | */
43 | public abstract String getPartitionId(T element);
44 |
45 | /**
46 | * Optional method for setting an explicit hash key.
47 | *
48 | * @param element Element to get the hash key for
49 | *
50 | * @return the hash key for the element
51 | */
52 | public String getExplicitHashKey(T element) {
53 | return null;
54 | }
55 |
56 | /**
57 | * Optional initializer.
58 | *
59 | * @param indexOfThisSubtask Index of this partitioner instance
60 | * @param numberOfParallelSubtasks Total number of parallel instances
61 | */
62 | public void initialize(int indexOfThisSubtask, int numberOfParallelSubtasks) {
63 | }
64 | }
65 |
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/main/java/software/amazon/kinesis/connectors/flink/internals/publisher/RecordPublisherFactory.java:
--------------------------------------------------------------------------------
1 | /*
2 | * This file has been modified from the original.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 |
20 | package software.amazon.kinesis.connectors.flink.internals.publisher;
21 |
22 | import org.apache.flink.annotation.Internal;
23 | import org.apache.flink.metrics.MetricGroup;
24 |
25 | import software.amazon.kinesis.connectors.flink.model.StartingPosition;
26 | import software.amazon.kinesis.connectors.flink.model.StreamShardHandle;
27 |
28 | import java.util.Properties;
29 |
30 | /**
31 | * A factory interface used to create instances of {@link RecordPublisher}.
32 | */
33 | @Internal
34 | public interface RecordPublisherFactory {
35 |
36 | /**
37 | * Create a {@link RecordPublisher}.
38 | *
39 | * @param startingPosition the position in the shard to start consuming records from
40 | * @param consumerConfig the properties used to configure the {@link RecordPublisher}.
41 | * @param metricGroup the {@link MetricGroup} used to report metrics to
42 | * @param streamShardHandle the stream shard in which to consume from
43 | * @return the constructed {@link RecordPublisher}
44 | */
45 | RecordPublisher create(
46 | StartingPosition startingPosition,
47 | Properties consumerConfig,
48 | MetricGroup metricGroup,
49 | StreamShardHandle streamShardHandle) throws InterruptedException;
50 |
51 | /**
52 | * Destroy any open resources used by the factory.
53 | */
54 | default void close() {
55 | // Do nothing by default
56 | }
57 |
58 | }
59 |
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/main/java/software/amazon/kinesis/connectors/flink/metrics/KinesisConsumerMetricConstants.java:
--------------------------------------------------------------------------------
1 | /*
2 | * This file has been modified from the original.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one
5 | * or more contributor license agreements. See the NOTICE file
6 | * distributed with this work for additional information
7 | * regarding copyright ownership. The ASF licenses this file
8 | * to you under the Apache License, Version 2.0 (the
9 | * "License"); you may not use this file except in compliance
10 | * with the License. You may obtain a copy of the License at
11 | *
12 | * http://www.apache.org/licenses/LICENSE-2.0
13 | *
14 | * Unless required by applicable law or agreed to in writing, software
15 | * distributed under the License is distributed on an "AS IS" BASIS,
16 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 | * See the License for the specific language governing permissions and
18 | * limitations under the License.
19 | */
20 |
21 | package software.amazon.kinesis.connectors.flink.metrics;
22 |
23 | import org.apache.flink.annotation.Internal;
24 |
25 | /**
26 | * A collection of consumer metric related constant names.
27 | *
28 | *
The names must not be changed, as that would break backwards compatibility for the consumer metrics.
29 | */
30 | @Internal
31 | public class KinesisConsumerMetricConstants {
32 |
33 | public static final String KINESIS_CONSUMER_METRICS_GROUP = "KinesisConsumer";
34 |
35 | public static final String STREAM_METRICS_GROUP = "stream";
36 | public static final String SHARD_METRICS_GROUP = "shardId";
37 |
38 | public static final String MILLIS_BEHIND_LATEST_GAUGE = "millisBehindLatest";
39 | public static final String SLEEP_TIME_MILLIS = "sleepTimeMillis";
40 | public static final String MAX_RECORDS_PER_FETCH = "maxNumberOfRecordsPerFetch";
41 | public static final String NUM_AGGREGATED_RECORDS_PER_FETCH = "numberOfAggregatedRecordsPerFetch";
42 | public static final String NUM_DEAGGREGATED_RECORDS_PER_FETCH = "numberOfDeaggregatedRecordsPerFetch";
43 | public static final String AVG_RECORD_SIZE_BYTES = "averageRecordSizeBytes";
44 | public static final String RUNTIME_LOOP_NANOS = "runLoopTimeNanos";
45 | public static final String LOOP_FREQUENCY_HZ = "loopFrequencyHz";
46 | public static final String BYTES_PER_READ = "bytesRequestedPerFetch";
47 |
48 | }
49 |
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/main/java/software/amazon/kinesis/connectors/flink/config/ProducerConfigConstants.java:
--------------------------------------------------------------------------------
1 | /*
2 | * This file has been modified from the original.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 |
20 | package software.amazon.kinesis.connectors.flink.config;
21 |
22 | import software.amazon.kinesis.connectors.flink.FlinkKinesisProducer;
23 |
24 | /**
25 | * Optional producer specific configuration keys for {@link FlinkKinesisProducer}.
26 | *
27 | * @deprecated This class is deprecated in favor of the official AWS Kinesis producer configuration keys.
28 | * See
29 | * here for the full list of available configs.
30 | * For configuring the region and credentials, please use the keys in {@link AWSConfigConstants}.
31 | */
32 | @Deprecated
33 | public class ProducerConfigConstants extends AWSConfigConstants {
34 |
35 | /**
36 | * Deprecated key.
37 | *
38 | * @deprecated This is deprecated in favor of the official AWS Kinesis producer configuration keys.
39 | * Please use {@code CollectionMaxCount} instead.
40 | **/
41 | @Deprecated
42 | public static final String COLLECTION_MAX_COUNT = "aws.producer.collectionMaxCount";
43 |
44 | /**
45 | * Deprecated key.
46 | *
47 | * @deprecated This is deprecated in favor of the official AWS Kinesis producer configuration keys.
48 | * Please use {@code AggregationMaxCount} instead.
49 | **/
50 | @Deprecated
51 | public static final String AGGREGATION_MAX_COUNT = "aws.producer.aggregationMaxCount";
52 | }
53 |
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/test/java/software/amazon/kinesis/connectors/flink/testutils/AlwaysThrowsDeserializationSchema.java:
--------------------------------------------------------------------------------
1 | /*
2 | * This file has been modified from the original.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 |
20 | package software.amazon.kinesis.connectors.flink.testutils;
21 |
22 | import org.apache.flink.api.common.serialization.DeserializationSchema;
23 | import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
24 | import org.apache.flink.api.common.typeinfo.TypeInformation;
25 | import org.apache.flink.core.testutils.OneShotLatch;
26 |
27 | import java.io.IOException;
28 |
29 | /**
30 | * A DeserializationSchema which always throws an exception when the deserialize method is called. Also supports
31 | * waiting on a latch until at least one exception has been thrown.
32 | */
33 | public class AlwaysThrowsDeserializationSchema implements DeserializationSchema {
34 | public static final String EXCEPTION_MESSAGE = "This method always throws an exception.";
35 |
36 | public transient OneShotLatch isExceptionThrown = new OneShotLatch();
37 |
38 | @Override
39 | public String deserialize(final byte[] bytes) throws IOException {
40 | isExceptionThrown.trigger();
41 | throw new RuntimeException(EXCEPTION_MESSAGE);
42 | }
43 |
44 | @Override
45 | public boolean isEndOfStream(final String s) {
46 | return false;
47 | }
48 |
49 | @Override
50 | public TypeInformation getProducedType() {
51 | return BasicTypeInfo.STRING_TYPE_INFO;
52 | }
53 |
54 | private void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException {
55 | in.defaultReadObject();
56 | this.isExceptionThrown = new OneShotLatch();
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/test/java/software/amazon/kinesis/connectors/flink/testutils/TestSourceContext.java:
--------------------------------------------------------------------------------
1 | /*
2 | * This file has been modified from the original.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 |
20 | package software.amazon.kinesis.connectors.flink.testutils;
21 |
22 | import org.apache.flink.streaming.api.functions.source.SourceFunction;
23 | import org.apache.flink.streaming.api.watermark.Watermark;
24 | import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
25 |
26 | import java.util.concurrent.ConcurrentLinkedQueue;
27 |
28 | /**
29 | * A testable {@link SourceFunction.SourceContext}.
30 | */
31 | public class TestSourceContext implements SourceFunction.SourceContext {
32 |
33 | private final Object checkpointLock = new Object();
34 |
35 | private ConcurrentLinkedQueue> collectedOutputs = new ConcurrentLinkedQueue<>();
36 |
37 | @Override
38 | public void collect(T element) {
39 | this.collectedOutputs.add(new StreamRecord<>(element));
40 | }
41 |
42 | @Override
43 | public void collectWithTimestamp(T element, long timestamp) {
44 | this.collectedOutputs.add(new StreamRecord<>(element, timestamp));
45 | }
46 |
47 | @Override
48 | public void emitWatermark(Watermark mark) {
49 | throw new UnsupportedOperationException();
50 | }
51 |
52 | @Override
53 | public void markAsTemporarilyIdle() {}
54 |
55 | @Override
56 | public Object getCheckpointLock() {
57 | return checkpointLock;
58 | }
59 |
60 | @Override
61 | public void close() {}
62 |
63 | public StreamRecord removeLatestOutput() {
64 | return collectedOutputs.poll();
65 | }
66 |
67 | public ConcurrentLinkedQueue> getCollectedOutputs() {
68 | return collectedOutputs;
69 | }
70 | }
71 |
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/test/java/software/amazon/kinesis/connectors/flink/examples/ConsumeFromKinesis.java:
--------------------------------------------------------------------------------
1 | /*
2 | * This file has been modified from the original.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 |
20 | package software.amazon.kinesis.connectors.flink.examples;
21 |
22 | import org.apache.flink.api.common.serialization.SimpleStringSchema;
23 | import org.apache.flink.api.java.utils.ParameterTool;
24 | import org.apache.flink.streaming.api.datastream.DataStream;
25 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
26 |
27 | import software.amazon.kinesis.connectors.flink.FlinkKinesisConsumer;
28 | import software.amazon.kinesis.connectors.flink.config.ConsumerConfigConstants;
29 |
30 | import java.util.Properties;
31 |
32 | /**
33 | * This is an example on how to consume data from Kinesis.
34 | */
35 | public class ConsumeFromKinesis {
36 |
37 | public static void main(String[] args) throws Exception {
38 | ParameterTool pt = ParameterTool.fromArgs(args);
39 |
40 | StreamExecutionEnvironment see = StreamExecutionEnvironment.getExecutionEnvironment();
41 | see.setParallelism(1);
42 |
43 | Properties kinesisConsumerConfig = new Properties();
44 | kinesisConsumerConfig.setProperty(ConsumerConfigConstants.AWS_REGION, pt.getRequired("region"));
45 | kinesisConsumerConfig.setProperty(ConsumerConfigConstants.AWS_ACCESS_KEY_ID, pt.getRequired("accesskey"));
46 | kinesisConsumerConfig.setProperty(ConsumerConfigConstants.AWS_SECRET_ACCESS_KEY, pt.getRequired("secretkey"));
47 |
48 | DataStream kinesis = see.addSource(new FlinkKinesisConsumer<>(
49 | "flink-test",
50 | new SimpleStringSchema(),
51 | kinesisConsumerConfig));
52 |
53 | kinesis.print();
54 |
55 | see.execute();
56 | }
57 |
58 | }
59 |
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/main/java/software/amazon/kinesis/connectors/flink/internals/publisher/RecordPublisher.java:
--------------------------------------------------------------------------------
1 | /*
2 | * This file has been modified from the original.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 |
20 | package software.amazon.kinesis.connectors.flink.internals.publisher;
21 |
22 | import org.apache.flink.annotation.Internal;
23 |
24 | import software.amazon.kinesis.connectors.flink.model.SequenceNumber;
25 |
26 | /**
27 | * A {@code RecordPublisher} will consume records from an external stream and deliver them to the registered subscriber.
28 | */
29 | @Internal
30 | public interface RecordPublisher {
31 |
32 | /**
33 | * Run the record publisher. Records will be consumed from the stream and published to the consumer.
34 | * The number of batches retrieved by a single invocation will vary based on implementation.
35 | *
36 | * @param recordBatchConsumer the record batch consumer in which to output records
37 | * @return a status enum to represent whether a shard has been fully consumed
38 | * @throws InterruptedException
39 | */
40 | RecordPublisherRunResult run(RecordBatchConsumer recordBatchConsumer) throws InterruptedException;
41 |
42 | /**
43 | * A status enum to represent whether a shard has been fully consumed.
44 | */
45 | enum RecordPublisherRunResult {
46 | /** There are no more records to consume from this shard. */
47 | COMPLETE,
48 |
49 | /** There are more records to consume from this shard. */
50 | INCOMPLETE,
51 |
52 | /** The record publisher has been cancelled. */
53 | CANCELLED
54 | }
55 |
56 | /**
57 | * An interface used to collect record batches, and reply with the latest consumed sequence number.
58 | */
59 | interface RecordBatchConsumer {
60 |
61 | SequenceNumber accept(RecordBatch recordBatch);
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/main/java/software/amazon/kinesis/connectors/flink/serialization/KinesisSerializationSchema.java:
--------------------------------------------------------------------------------
1 | /*
2 | * This file has been modified from the original.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 |
20 | package software.amazon.kinesis.connectors.flink.serialization;
21 |
22 | import org.apache.flink.annotation.PublicEvolving;
23 | import org.apache.flink.api.common.serialization.SerializationSchema.InitializationContext;
24 |
25 | import java.io.Serializable;
26 | import java.nio.ByteBuffer;
27 |
28 | /**
29 | * Kinesis-specific serialization schema, allowing users to specify a target stream based
30 | * on a record's contents.
31 | * @param
32 | */
33 | @PublicEvolving
34 | public interface KinesisSerializationSchema extends Serializable {
35 | /**
36 | * Initialization method for the schema. It is called before the actual working methods
37 | * {@link #serialize(Object)} and thus suitable for one time setup work.
38 | *
39 | *
The provided {@link InitializationContext} can be used to access additional features such
40 | * as e.g. registering user metrics.
41 | *
42 | * @param context Contextual information that can be used during initialization.
43 | */
44 | default void open(InitializationContext context) throws Exception {
45 | }
46 |
47 | /**
48 | * Serialize the given element into a ByteBuffer.
49 | *
50 | * @param element The element to serialize
51 | * @return Serialized representation of the element
52 | */
53 | ByteBuffer serialize(T element);
54 |
55 | /**
56 | * Optional method to determine the target stream based on the element.
57 | * Return null to use the default stream
58 | *
59 | * @param element The element to determine the target stream from
60 | * @return target stream name
61 | */
62 | String getTargetStream(T element);
63 | }
64 |
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/main/java/software/amazon/kinesis/connectors/flink/proxy/FullJitterBackoff.java:
--------------------------------------------------------------------------------
1 | /*
2 | * This file has been modified from the original.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 |
20 | package software.amazon.kinesis.connectors.flink.proxy;
21 |
22 | import org.apache.flink.annotation.Internal;
23 |
24 | import java.util.Random;
25 |
26 | /**
27 | * Used to calculate full jitter backoff sleep durations.
28 | * @see
29 | * Exponential Backoff and Jitter
30 | *
31 | */
32 | @Internal
33 | public class FullJitterBackoff {
34 |
35 | /** Random seed used to calculate backoff jitter for Kinesis operations. */
36 | private final Random seed = new Random();
37 |
38 | /**
39 | * Calculates the sleep time for full jitter based on the given parameters.
40 | *
41 | * @param baseMillis the base backoff time in milliseconds
42 | * @param maxMillis the maximum backoff time in milliseconds
43 | * @param power the power constant for exponential backoff
44 | * @param attempt the attempt number
45 | * @return the time to wait before trying again
46 | */
47 | public long calculateFullJitterBackoff(long baseMillis, long maxMillis, double power, int attempt) {
48 | long exponentialBackoff = (long) Math.min(maxMillis, baseMillis * Math.pow(power, attempt));
49 | return (long) (seed.nextDouble() * exponentialBackoff);
50 | }
51 |
52 | /**
53 | * Puts the current thread to sleep for the specified number of millis.
54 | * Simply delegates to {@link Thread#sleep}.
55 | *
56 | * @param millisToSleep the number of milliseconds to sleep for
57 | * @throws InterruptedException
58 | */
59 | public void sleep(long millisToSleep) throws InterruptedException {
60 | Thread.sleep(millisToSleep);
61 | }
62 |
63 | }
64 |
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/test/java/software/amazon/kinesis/connectors/flink/testutils/TableOptionsBuilder.java:
--------------------------------------------------------------------------------
1 | /*
2 | * This file has been modified from the original.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one
5 | * or more contributor license agreements. See the NOTICE file
6 | * distributed with this work for additional information
7 | * regarding copyright ownership. The ASF licenses this file
8 | * to you under the Apache License, Version 2.0 (the
9 | * "License"); you may not use this file except in compliance
10 | * with the License. You may obtain a copy of the License at
11 | *
12 | * http://www.apache.org/licenses/LICENSE-2.0
13 | *
14 | * Unless required by applicable law or agreed to in writing, software
15 | * distributed under the License is distributed on an "AS IS" BASIS,
16 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 | * See the License for the specific language governing permissions and
18 | * limitations under the License.
19 | */
20 |
21 | package software.amazon.kinesis.connectors.flink.testutils;
22 |
23 | import org.apache.flink.configuration.ConfigOption;
24 | import org.apache.flink.table.factories.FactoryUtil;
25 |
26 | import java.util.HashMap;
27 | import java.util.Map;
28 |
29 | /**
30 | * Convenience syntax for constructing option maps for testing
31 | * {@link org.apache.flink.table.factories.DynamicTableFactory} implementations.
32 | *
Back-ported from Flink upstream, branch 1.12.0-rc3, `org.apache.flink.table.factories.TableOptionsBuilder`.
33 | */
34 | public class TableOptionsBuilder {
35 | private final Map options;
36 | private final String connector;
37 | private final String format;
38 |
39 | public TableOptionsBuilder(String connector, String format) {
40 | this.options = new HashMap<>();
41 | this.connector = connector;
42 | this.format = format;
43 | }
44 |
45 | public TableOptionsBuilder withTableOption(ConfigOption> option, String value) {
46 | return withTableOption(option.key(), value);
47 | }
48 |
49 | public TableOptionsBuilder withFormatOption(ConfigOption> option, String value) {
50 | return withFormatOption(format + "." + option.key(), value);
51 | }
52 |
53 | public TableOptionsBuilder withTableOption(String key, String value) {
54 | options.put(key, value);
55 | return this;
56 | }
57 |
58 | public TableOptionsBuilder withFormatOption(String key, String value) {
59 | options.put(key, value);
60 | return this;
61 | }
62 |
63 | public Map build() {
64 | withTableOption(FactoryUtil.CONNECTOR, connector);
65 | withTableOption(FactoryUtil.FORMAT, format);
66 | return options;
67 | }
68 | }
69 |
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/main/java/software/amazon/kinesis/connectors/flink/KinesisShardAssigner.java:
--------------------------------------------------------------------------------
1 | /*
2 | * This file has been modified from the original.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 |
20 | package software.amazon.kinesis.connectors.flink;
21 |
22 | import org.apache.flink.annotation.PublicEvolving;
23 |
24 | import software.amazon.kinesis.connectors.flink.model.StreamShardHandle;
25 |
26 | import java.io.Serializable;
27 |
28 | /**
29 | * Utility to map Kinesis shards to Flink subtask indices. Users can implement this interface to optimize
30 | * distribution of shards over subtasks. See {@link #assign(StreamShardHandle, int)} for details.
31 | */
32 | @PublicEvolving
33 | public interface KinesisShardAssigner extends Serializable {
34 |
35 | /**
36 | * Returns the index of the target subtask that a specific shard should be
37 | * assigned to. For return values outside the subtask range, modulus operation will
38 | * be applied automatically, hence it is also valid to just return a hash code.
39 | *
40 | *
The resulting distribution of shards should have the following contract:
41 | *
42 | *
1. Uniform distribution across subtasks
43 | *
2. Deterministic, calls for a given shard always return same index.
44 | *
45 | *
46 | *
The above contract is crucial and cannot be broken. Consumer subtasks rely on this
47 | * contract to filter out shards that they should not subscribe to, guaranteeing
48 | * that each shard of a stream will always be assigned to one subtask in a
49 | * uniformly distributed manner.
50 | *
51 | * @param shard the shard to determine
52 | * @param numParallelSubtasks total number of subtasks
53 | * @return target index, if index falls outside of the range, modulus operation will be applied
54 | */
55 | int assign(StreamShardHandle shard, int numParallelSubtasks);
56 | }
57 |
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/main/java/software/amazon/kinesis/connectors/flink/internals/publisher/polling/PollingRecordPublisherConfiguration.java:
--------------------------------------------------------------------------------
1 | /*
2 | * This file has been modified from the original.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 |
20 | package software.amazon.kinesis.connectors.flink.internals.publisher.polling;
21 |
22 | import org.apache.flink.annotation.Internal;
23 |
24 | import software.amazon.kinesis.connectors.flink.config.ConsumerConfigConstants;
25 |
26 | import java.util.Properties;
27 |
28 | /**
29 | * A configuration class for {@link PollingRecordPublisher} instantiated from a properties map.
30 | */
31 | @Internal
32 | public class PollingRecordPublisherConfiguration {
33 |
34 | private final boolean adaptiveReads;
35 |
36 | private final int maxNumberOfRecordsPerFetch;
37 |
38 | private final long fetchIntervalMillis;
39 |
40 | public PollingRecordPublisherConfiguration(final Properties consumerConfig) {
41 | this.maxNumberOfRecordsPerFetch = Integer.parseInt(consumerConfig.getProperty(
42 | ConsumerConfigConstants.SHARD_GETRECORDS_MAX,
43 | Integer.toString(ConsumerConfigConstants.DEFAULT_SHARD_GETRECORDS_MAX)));
44 |
45 | this.fetchIntervalMillis = Long.parseLong(consumerConfig.getProperty(
46 | ConsumerConfigConstants.SHARD_GETRECORDS_INTERVAL_MILLIS,
47 | Long.toString(ConsumerConfigConstants.DEFAULT_SHARD_GETRECORDS_INTERVAL_MILLIS)));
48 |
49 | this.adaptiveReads = Boolean.parseBoolean(consumerConfig.getProperty(
50 | ConsumerConfigConstants.SHARD_USE_ADAPTIVE_READS,
51 | Boolean.toString(ConsumerConfigConstants.DEFAULT_SHARD_USE_ADAPTIVE_READS)));
52 | }
53 |
54 | public boolean isAdaptiveReads() {
55 | return adaptiveReads;
56 | }
57 |
58 | public int getMaxNumberOfRecordsPerFetch() {
59 | return maxNumberOfRecordsPerFetch;
60 | }
61 |
62 | public long getFetchIntervalMillis() {
63 | return fetchIntervalMillis;
64 | }
65 | }
66 |
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/main/java/software/amazon/kinesis/connectors/flink/util/KinesisStateUtil.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License").
5 | * You may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package software.amazon.kinesis.connectors.flink.util;
18 |
19 | import org.apache.flink.api.common.ExecutionConfig;
20 | import org.apache.flink.api.common.typeinfo.TypeInformation;
21 | import org.apache.flink.api.common.typeutils.TypeSerializer;
22 | import org.apache.flink.api.java.tuple.Tuple2;
23 | import org.apache.flink.api.java.typeutils.runtime.TupleSerializer;
24 | import org.apache.flink.api.java.typeutils.runtime.kryo.KryoSerializer;
25 |
26 | import software.amazon.kinesis.connectors.flink.model.SequenceNumber;
27 | import software.amazon.kinesis.connectors.flink.model.StreamShardMetadata;
28 |
29 | /**
30 | * Utilities for Flink Kinesis connector state management.
31 | */
32 | public class KinesisStateUtil {
33 |
34 | /**
35 | * To prevent instantiation of class.
36 | */
37 | private KinesisStateUtil() {
38 | }
39 |
40 | /**
41 | * Creates state serializer for kinesis shard sequence number.
42 | * Using of the explicit state serializer with KryoSerializer is needed because otherwise
43 | * users cannot use 'disableGenericTypes' properties with KinesisConsumer, see FLINK-24943 for details
44 | *
45 | * @return state serializer
46 | */
47 | public static TupleSerializer> createShardsStateSerializer(ExecutionConfig executionConfig) {
48 | // explicit serializer will keep the compatibility with GenericTypeInformation and allow to disableGenericTypes for users
49 | TypeSerializer>[] fieldSerializers = new TypeSerializer>[]{
50 | TypeInformation.of(StreamShardMetadata.class).createSerializer(executionConfig),
51 | new KryoSerializer<>(SequenceNumber.class, executionConfig)
52 | };
53 | @SuppressWarnings("unchecked")
54 | Class> tupleClass = (Class>) (Class>) Tuple2.class;
55 | return new TupleSerializer<>(tupleClass, fieldSerializers);
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/test/java/software/amazon/kinesis/connectors/flink/examples/ConsumeFromDynamoDBStreams.java:
--------------------------------------------------------------------------------
1 | /*
2 | * This file has been modified from the original.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 |
20 | package software.amazon.kinesis.connectors.flink.examples;
21 |
22 | import org.apache.flink.api.common.serialization.SimpleStringSchema;
23 | import org.apache.flink.api.java.utils.ParameterTool;
24 | import org.apache.flink.streaming.api.datastream.DataStream;
25 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
26 |
27 | import software.amazon.kinesis.connectors.flink.FlinkDynamoDBStreamsConsumer;
28 | import software.amazon.kinesis.connectors.flink.config.ConsumerConfigConstants;
29 |
30 | import java.util.Properties;
31 |
32 | /**
33 | * Sample command-line program of consuming data from a single DynamoDB stream.
34 | */
35 | public class ConsumeFromDynamoDBStreams {
36 | private static final String DYNAMODB_STREAM_NAME = "stream";
37 |
38 | public static void main(String[] args) throws Exception {
39 | ParameterTool pt = ParameterTool.fromArgs(args);
40 |
41 | StreamExecutionEnvironment see = StreamExecutionEnvironment.getExecutionEnvironment();
42 | see.setParallelism(1);
43 |
44 | Properties dynamodbStreamsConsumerConfig = new Properties();
45 | final String streamName = pt.getRequired(DYNAMODB_STREAM_NAME);
46 | dynamodbStreamsConsumerConfig.setProperty(
47 | ConsumerConfigConstants.AWS_REGION, pt.getRequired("region"));
48 | dynamodbStreamsConsumerConfig.setProperty(
49 | ConsumerConfigConstants.AWS_ACCESS_KEY_ID, pt.getRequired("accesskey"));
50 | dynamodbStreamsConsumerConfig.setProperty(
51 | ConsumerConfigConstants.AWS_SECRET_ACCESS_KEY, pt.getRequired("secretkey"));
52 |
53 | DataStream dynamodbStreams = see.addSource(new FlinkDynamoDBStreamsConsumer<>(
54 | streamName,
55 | new SimpleStringSchema(),
56 | dynamodbStreamsConsumerConfig));
57 |
58 | dynamodbStreams.print();
59 |
60 | see.execute();
61 | }
62 |
63 | }
64 |
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/main/java/software/amazon/kinesis/connectors/flink/FixedKinesisPartitioner.java:
--------------------------------------------------------------------------------
1 | /*
2 | * This file has been modified from the original.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one
5 | * or more contributor license agreements. See the NOTICE file
6 | * distributed with this work for additional information
7 | * regarding copyright ownership. The ASF licenses this file
8 | * to you under the Apache License, Version 2.0 (the
9 | * "License"); you may not use this file except in compliance
10 | * with the License. You may obtain a copy of the License at
11 | *
12 | * http://www.apache.org/licenses/LICENSE-2.0
13 | *
14 | * Unless required by applicable law or agreed to in writing, software
15 | * distributed under the License is distributed on an "AS IS" BASIS,
16 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 | * See the License for the specific language governing permissions and
18 | * limitations under the License.
19 | */
20 |
21 | package software.amazon.kinesis.connectors.flink;
22 |
23 | import org.apache.flink.annotation.PublicEvolving;
24 | import org.apache.flink.util.Preconditions;
25 |
26 | import java.util.Objects;
27 |
28 | /**
29 | * A partitioner ensuring that each internal Flink partition ends up in the same Kinesis partition.
30 | *
31 | *
This is achieved by using the index of the producer task as a {@code PartitionKey}.
32 | */
33 | @PublicEvolving
34 | public final class FixedKinesisPartitioner extends KinesisPartitioner {
35 |
36 | private static final long serialVersionUID = 1L;
37 |
38 | private int indexOfThisSubtask = 0;
39 |
40 | @Override
41 | public void initialize(int indexOfThisSubtask, int numberOfParallelSubtasks) {
42 | Preconditions.checkArgument(
43 | indexOfThisSubtask >= 0,
44 | "Id of this subtask cannot be negative.");
45 | Preconditions.checkArgument(
46 | numberOfParallelSubtasks > 0,
47 | "Number of subtasks must be larger than 0.");
48 |
49 | this.indexOfThisSubtask = indexOfThisSubtask;
50 | }
51 |
52 | @Override
53 | public String getPartitionId(T record) {
54 | return String.valueOf(indexOfThisSubtask);
55 | }
56 |
57 | // --------------------------------------------------------------------------------------------
58 | // Value semantics for equals and hashCode
59 | // --------------------------------------------------------------------------------------------
60 |
61 | @Override
62 | public boolean equals(Object o) {
63 | if (this == o) {
64 | return true;
65 | }
66 | if (o == null || getClass() != o.getClass()) {
67 | return false;
68 | }
69 | final FixedKinesisPartitioner> that = (FixedKinesisPartitioner>) o;
70 | return Objects.equals(this.indexOfThisSubtask, that.indexOfThisSubtask);
71 | }
72 |
73 | @Override
74 | public int hashCode() {
75 | return Objects.hash(
76 | FixedKinesisPartitioner.class.hashCode(),
77 | indexOfThisSubtask);
78 | }
79 | }
80 |
--------------------------------------------------------------------------------
/amazon-kinesis-connector-flink/src/test/java/software/amazon/kinesis/connectors/flink/KinesisConsumerTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * This file has been modified from the original.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one
5 | * or more contributor license agreements. See the NOTICE file
6 | * distributed with this work for additional information
7 | * regarding copyright ownership. The ASF licenses this file
8 | * to you under the Apache License, Version 2.0 (the
9 | * "License"); you may not use this file except in compliance
10 | * with the License. You may obtain a copy of the License at
11 | *
12 | * http://www.apache.org/licenses/LICENSE-2.0
13 | *
14 | * Unless required by applicable law or agreed to in writing, software
15 | * distributed under the License is distributed on an "AS IS" BASIS,
16 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 | * See the License for the specific language governing permissions and
18 | * limitations under the License.
19 | */
20 |
21 | package software.amazon.kinesis.connectors.flink;
22 |
23 | import org.apache.flink.api.common.serialization.DeserializationSchema;
24 | import org.apache.flink.api.common.typeinfo.TypeInformation;
25 | import org.apache.flink.util.Collector;
26 |
27 | import org.junit.Rule;
28 | import org.junit.Test;
29 | import org.junit.rules.ExpectedException;
30 |
31 | import java.io.IOException;
32 | import java.util.Properties;
33 |
34 | /**
35 | * Tests for {@link FlinkKinesisConsumer}. In contrast to tests in {@link FlinkKinesisConsumerTest} it does not
36 | * use power mock, which makes it possible to use e.g. the {@link ExpectedException}.
37 | */
38 | public class KinesisConsumerTest {
39 |
40 | @Rule
41 | public ExpectedException thrown = ExpectedException.none();
42 |
43 | @Test
44 | public void testKinesisConsumerThrowsExceptionIfSchemaImplementsCollector() {
45 | DeserializationSchema