opsMapper, KuduFailureHandler failureHandler) {
63 | this.tableInfo = checkNotNull(tableInfo, "tableInfo could not be null");
64 | this.writerConfig = checkNotNull(writerConfig, "config could not be null");
65 | this.opsMapper = checkNotNull(opsMapper, "opsMapper could not be null");
66 | this.failureHandler = checkNotNull(failureHandler, "failureHandler could not be null");
67 | }
68 |
69 | @Override
70 | public void configure(Configuration parameters) {
71 | }
72 |
73 | @Override
74 | public void open(int taskNumber, int numTasks) throws IOException {
75 | kuduWriter = new KuduWriter<>(tableInfo, writerConfig, opsMapper, failureHandler);
76 | }
77 |
78 | @Override
79 | public void writeRecord(IN row) throws IOException {
80 | kuduWriter.write(row);
81 | }
82 |
83 | @Override
84 | public void close() throws IOException {
85 | if (kuduWriter != null) {
86 | kuduWriter.close();
87 | }
88 | }
89 |
90 | @Override
91 | public void snapshotState(FunctionSnapshotContext functionSnapshotContext) throws Exception {
92 | kuduWriter.flushAndCheckErrors();
93 | }
94 |
95 | @Override
96 | public void initializeState(FunctionInitializationContext functionInitializationContext) throws Exception {
97 |
98 | }
99 | }
100 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/format/KuduRowDataInputFormat.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | package org.colloh.flink.kudu.connector.format;
18 |
19 | import org.apache.flink.annotation.PublicEvolving;
20 | import org.apache.flink.api.common.typeinfo.TypeInformation;
21 | import org.colloh.flink.kudu.connector.internal.KuduFilterInfo;
22 | import org.colloh.flink.kudu.connector.internal.KuduTableInfo;
23 | import org.colloh.flink.kudu.connector.internal.convertor.RowResultConvertor;
24 | import org.colloh.flink.kudu.connector.internal.reader.KuduReaderConfig;
25 | import org.colloh.flink.kudu.connector.table.catalog.KuduCatalog;
26 | import org.apache.flink.table.data.RowData;
27 | import org.apache.flink.types.Row;
28 |
29 | import java.util.List;
30 |
31 | import static org.apache.flink.util.Preconditions.checkNotNull;
32 |
33 | /**
34 | * Input format for reading the contents of a Kudu table (defined by the provided {@link KuduTableInfo}) in both batch
35 | * and stream programs. Rows of the Kudu table are mapped to {@link Row} instances that can converted to other data
36 | * types by the user later if necessary.
37 | *
38 | * For programmatic access to the schema of the input rows users can use the {@link KuduCatalog}
39 | * or overwrite the column order manually by providing a list of projected column names.
40 | */
41 | @PublicEvolving
42 | public class KuduRowDataInputFormat extends BaseKuduInputFormat {
43 |
44 | public KuduRowDataInputFormat(KuduReaderConfig readerConfig, RowResultConvertor rowResultConvertor, KuduTableInfo tableInfo) {
45 | super(readerConfig, rowResultConvertor, tableInfo);
46 | }
47 |
48 | public KuduRowDataInputFormat(KuduReaderConfig readerConfig, RowResultConvertor rowResultConvertor, KuduTableInfo tableInfo, List tableProjections) {
49 | super(readerConfig, rowResultConvertor, tableInfo, tableProjections);
50 | }
51 |
52 | public KuduRowDataInputFormat(KuduReaderConfig readerConfig, RowResultConvertor rowResultConvertor, KuduTableInfo tableInfo, List tableFilters, List tableProjections) {
53 | super(readerConfig, rowResultConvertor, tableInfo, tableFilters, tableProjections);
54 | }
55 |
56 | @Override
57 | public TypeInformation getProducedType() {
58 | return TypeInformation.of(RowData.class);
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/format/KuduRowInputFormat.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | package org.colloh.flink.kudu.connector.format;
18 |
19 | import org.apache.flink.annotation.PublicEvolving;
20 | import org.apache.flink.api.common.typeinfo.TypeInformation;
21 | import org.colloh.flink.kudu.connector.internal.KuduFilterInfo;
22 | import org.colloh.flink.kudu.connector.internal.KuduTableInfo;
23 | import org.colloh.flink.kudu.connector.internal.convertor.RowResultConvertor;
24 | import org.colloh.flink.kudu.connector.internal.reader.KuduReaderConfig;
25 | import org.colloh.flink.kudu.connector.table.catalog.KuduCatalog;
26 | import org.apache.flink.types.Row;
27 |
28 | import java.util.List;
29 |
30 | /**
31 | * Input format for reading the contents of a Kudu table (defined by the provided {@link KuduTableInfo}) in both batch
32 | * and stream programs. Rows of the Kudu table are mapped to {@link Row} instances that can converted to other data
33 | * types by the user later if necessary.
34 | *
35 | * For programmatic access to the schema of the input rows users can use the {@link KuduCatalog}
36 | * or overwrite the column order manually by providing a list of projected column names.
37 | */
38 | @PublicEvolving
39 | public class KuduRowInputFormat extends BaseKuduInputFormat {
40 |
41 | public KuduRowInputFormat(KuduReaderConfig readerConfig, RowResultConvertor rowResultConvertor, KuduTableInfo tableInfo) {
42 | super(readerConfig, rowResultConvertor, tableInfo);
43 | }
44 |
45 | public KuduRowInputFormat(KuduReaderConfig readerConfig, RowResultConvertor rowResultConvertor, KuduTableInfo tableInfo, List tableProjections) {
46 | super(readerConfig, rowResultConvertor, tableInfo, tableProjections);
47 | }
48 |
49 | public KuduRowInputFormat(KuduReaderConfig readerConfig, RowResultConvertor rowResultConvertor, KuduTableInfo tableInfo, List tableFilters, List tableProjections) {
50 | super(readerConfig, rowResultConvertor, tableInfo, tableFilters, tableProjections);
51 | }
52 |
53 | @Override
54 | public TypeInformation getProducedType() {
55 | return TypeInformation.of(Row.class);
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/internal/ColumnSchemasFactory.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package org.colloh.flink.kudu.connector.internal;
19 |
20 | import org.apache.flink.annotation.PublicEvolving;
21 | import org.apache.kudu.ColumnSchema;
22 |
23 | import java.io.Serializable;
24 | import java.util.List;
25 |
26 | /**
27 | * Factory for creating {@link ColumnSchema}s to be used when creating a table that
28 | * does not currently exist in Kudu. Usable through {@link KuduTableInfo#createTableIfNotExists}.
29 | *
30 | * This factory implementation must be Serializable as it will be used directly in the Flink sources
31 | * and sinks.
32 | */
33 | @PublicEvolving
34 | public interface ColumnSchemasFactory extends Serializable {
35 |
36 | /**
37 | * Creates the columns of the Kudu table that will be used during the createTable operation.
38 | *
39 | * @return List of columns.
40 | */
41 | List getColumnSchemas();
42 |
43 | }
44 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/internal/CreateTableOptionsFactory.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package org.colloh.flink.kudu.connector.internal;
19 |
20 | import org.apache.flink.annotation.PublicEvolving;
21 |
22 | import org.apache.kudu.client.CreateTableOptions;
23 |
24 | import java.io.Serializable;
25 |
26 | /**
27 | * Factory for creating {@link CreateTableOptions} to be used when creating a table that
28 | * does not currently exist in Kudu. Usable through {@link KuduTableInfo#createTableIfNotExists}.
29 | *
30 | * This factory implementation must be Serializable as it will be used directly in the Flink sources
31 | * and sinks.
32 | */
33 | @PublicEvolving
34 | public interface CreateTableOptionsFactory extends Serializable {
35 |
36 | /**
37 | * Creates the {@link CreateTableOptions} that will be used during the createTable operation.
38 | *
39 | * @return CreateTableOptions for creating the table.
40 | */
41 | CreateTableOptions getCreateTableOptions();
42 |
43 | }
44 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/internal/KuduTableInfo.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | package org.colloh.flink.kudu.connector.internal;
18 |
19 | import org.apache.flink.annotation.PublicEvolving;
20 |
21 | import org.apache.commons.lang3.Validate;
22 | import org.apache.kudu.Schema;
23 | import org.apache.kudu.client.CreateTableOptions;
24 |
25 | import java.io.Serializable;
26 |
27 | /**
28 | * Describes which table should be used in sources and sinks along with specifications
29 | * on how to create it if it does not exist.
30 | *
31 | *
For sources and sinks reading from already existing tables, simply use @{@link KuduTableInfo#forTable(String)}
32 | * and if you want the system to create the table if it does not exist you need to specify the column and options
33 | * factories through {@link KuduTableInfo#createTableIfNotExists}
34 | */
35 | @PublicEvolving
36 | public class KuduTableInfo implements Serializable {
37 |
38 | private final String name;
39 | private CreateTableOptionsFactory createTableOptionsFactory = null;
40 | private ColumnSchemasFactory schemasFactory = null;
41 |
42 | private KuduTableInfo(String name) {
43 | this.name = Validate.notNull(name);
44 | }
45 |
46 | /**
47 | * Creates a new {@link KuduTableInfo} that is sufficient for reading/writing to existing Kudu Tables.
48 | * For creating new tables call {@link #createTableIfNotExists} afterwards.
49 | *
50 | * @param name Table name in Kudu
51 | * @return KuduTableInfo for the given table name
52 | */
53 | public static KuduTableInfo forTable(String name) {
54 | return new KuduTableInfo(name);
55 | }
56 |
57 | /**
58 | * Defines table parameters to be used when creating the Kudu table if it does not exist (read or write)
59 | *
60 | * @param schemasFactory factory for defining columns
61 | * @param createTableOptionsFactory factory for defining create table options
62 | * @return KuduTableInfo that will create tables that does not exist with the given settings.
63 | */
64 | public KuduTableInfo createTableIfNotExists(ColumnSchemasFactory schemasFactory, CreateTableOptionsFactory createTableOptionsFactory) {
65 | this.createTableOptionsFactory = Validate.notNull(createTableOptionsFactory);
66 | this.schemasFactory = Validate.notNull(schemasFactory);
67 | return this;
68 | }
69 |
70 | /**
71 | * Returns the {@link Schema} of the table. Only works if {@link #createTableIfNotExists} was specified otherwise throws an error.
72 | *
73 | * @return Schema of the target table.
74 | */
75 | public Schema getSchema() {
76 | if (!getCreateTableIfNotExists()) {
77 | throw new RuntimeException("Cannot access schema for KuduTableInfo. Use createTableIfNotExists to specify the columns.");
78 | }
79 |
80 | return new Schema(schemasFactory.getColumnSchemas());
81 | }
82 |
83 | /**
84 | * @return Name of the table.
85 | */
86 | public String getName() {
87 | return name;
88 | }
89 |
90 | /**
91 | * @return True if table creation is enabled if target table does not exist.
92 | */
93 | public boolean getCreateTableIfNotExists() {
94 | return createTableOptionsFactory != null;
95 | }
96 |
97 | /**
98 | * @return CreateTableOptions if {@link #createTableIfNotExists} was specified.
99 | */
100 | public CreateTableOptions getCreateTableOptions() {
101 | if (!getCreateTableIfNotExists()) {
102 | throw new RuntimeException("Cannot access CreateTableOptions for KuduTableInfo. Use createTableIfNotExists to specify.");
103 | }
104 | return createTableOptionsFactory.getCreateTableOptions();
105 | }
106 | }
107 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/internal/convertor/RowResultConvertor.java:
--------------------------------------------------------------------------------
1 | package org.colloh.flink.kudu.connector.internal.convertor;
2 |
3 | import org.apache.kudu.client.RowResult;
4 |
5 | import java.io.Serializable;
6 |
7 | /**
8 | * @fileName: RowConvertor.java
9 | * @description: Row转换器
10 | * @author: by echo huang
11 | * @date: 2021/3/3 3:14 下午
12 | */
13 | public interface RowResultConvertor extends Serializable {
14 |
15 | /**
16 | * 将Kudu RowResult转换成对应T格式
17 | *
18 | * @param row kudu记录格式
19 | * @return {@link T}
20 | */
21 | T convertor(RowResult row);
22 | }
23 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/internal/convertor/RowResultRowConvertor.java:
--------------------------------------------------------------------------------
1 | package org.colloh.flink.kudu.connector.internal.convertor;
2 |
3 | import org.apache.flink.types.Row;
4 | import org.apache.kudu.Schema;
5 | import org.apache.kudu.client.RowResult;
6 |
7 | /**
8 | * @fileName: RowResultRowConvertor.java
9 | * @description: RowResult转换Row
10 | * @author: by echo huang
11 | * @date: 2021/3/3 3:16 下午
12 | */
13 | public class RowResultRowConvertor implements RowResultConvertor {
14 | @Override
15 | public Row convertor(RowResult row) {
16 | Schema schema = row.getColumnProjection();
17 |
18 | Row values = new Row(schema.getColumnCount());
19 | schema.getColumns().forEach(column -> {
20 | String name = column.getName();
21 | if (row.isNull(name)){
22 | return;
23 | }
24 | int pos = schema.getColumnIndex(name);
25 | values.setField(pos, row.getObject(name));
26 | });
27 | return values;
28 |
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/internal/convertor/RowResultRowDataConvertor.java:
--------------------------------------------------------------------------------
1 | package org.colloh.flink.kudu.connector.internal.convertor;
2 |
3 | import org.apache.flink.table.data.DecimalData;
4 | import org.apache.flink.table.data.GenericRowData;
5 | import org.apache.flink.table.data.RowData;
6 | import org.apache.flink.table.data.StringData;
7 | import org.apache.flink.table.data.TimestampData;
8 | import org.apache.kudu.Schema;
9 | import org.apache.kudu.Type;
10 | import org.apache.kudu.client.RowResult;
11 |
12 | import java.math.BigDecimal;
13 | import java.util.Objects;
14 |
15 | /**
16 | * @fileName: RowResultRowConvertor.java
17 | * @description: RowResult转RowData
18 | * @author: by echo huang
19 | * @date: 2021/3/3 3:16 下午
20 | */
21 | public class RowResultRowDataConvertor implements RowResultConvertor {
22 | @Override
23 | public RowData convertor(RowResult row) {
24 | Schema schema = row.getColumnProjection();
25 | GenericRowData values = new GenericRowData(schema.getColumnCount());
26 | schema.getColumns().forEach(column -> {
27 | String name = column.getName();
28 | Type type = column.getType();
29 | int pos = schema.getColumnIndex(name);
30 | if (Objects.isNull(type)) {
31 | throw new IllegalArgumentException("columnName:" + name);
32 | }
33 | if (row.isNull(name)) {
34 | return;
35 | }
36 | switch (type) {
37 | case DECIMAL:
38 | BigDecimal decimal = row.getDecimal(name);
39 | values.setField(pos, DecimalData.fromBigDecimal(decimal, decimal.precision(), decimal.scale()));
40 | break;
41 | case UNIXTIME_MICROS:
42 | values.setField(pos, TimestampData.fromTimestamp(row.getTimestamp(name)));
43 | break;
44 | case DOUBLE:
45 | values.setField(pos, row.getDouble(name));
46 | break;
47 | case STRING:
48 | Object value = row.getObject(name);
49 | values.setField(pos, StringData.fromString(Objects.nonNull(value) ? value.toString() : ""));
50 | break;
51 | case BINARY:
52 | values.setField(pos, row.getBinary(name));
53 | break;
54 | case FLOAT:
55 | values.setField(pos, row.getFloat(name));
56 | break;
57 | case INT64:
58 | values.setField(pos, row.getLong(name));
59 | break;
60 | case INT32:
61 | case INT16:
62 | case INT8:
63 | values.setField(pos, row.getInt(name));
64 | break;
65 | case BOOL:
66 | values.setField(pos, row.getBoolean(name));
67 | break;
68 | default:
69 | throw new IllegalArgumentException("columnName:" + name + ",type:" + type.getName() + "不支持!");
70 | }
71 | });
72 | return values;
73 | }
74 | }
75 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/internal/failure/DefaultKuduFailureHandler.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | package org.colloh.flink.kudu.connector.internal.failure;
18 |
19 | import org.apache.kudu.client.RowError;
20 |
21 | import java.io.IOException;
22 | import java.util.List;
23 | import java.util.stream.Collectors;
24 |
25 | /**
26 | * Default failure handling logic that doesn't do any handling but throws
27 | * an error.
28 | */
29 | public class DefaultKuduFailureHandler implements KuduFailureHandler {
30 |
31 | @Override
32 | public void onFailure(List failure) throws IOException {
33 | String errors = failure.stream()
34 | .map(error -> error.toString() + System.lineSeparator())
35 | .collect(Collectors.joining());
36 |
37 | throw new IOException("Error while sending value. \n " + errors);
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/internal/failure/KuduFailureHandler.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | package org.colloh.flink.kudu.connector.internal.failure;
18 |
19 | import org.apache.flink.annotation.PublicEvolving;
20 |
21 | import org.colloh.flink.kudu.connector.internal.writer.KuduWriter;
22 | import org.apache.kudu.client.RowError;
23 |
24 | import java.io.IOException;
25 | import java.io.Serializable;
26 | import java.util.List;
27 |
28 | /**
29 | * Custom handling logic for errors resulting from trying to execute Kudu operations in the
30 | * {@link KuduWriter}
31 | */
32 | @PublicEvolving
33 | public interface KuduFailureHandler extends Serializable {
34 |
35 | /**
36 | * Handle a failed {@link List}.
37 | *
38 | * @param failure the cause of failure
39 | * @throws IOException if the sink should fail on this failure, the implementation should rethrow the throwable or a custom one
40 | */
41 | void onFailure(List failure) throws IOException;
42 |
43 | /**
44 | * Handle a ClassCastException. Default implementation rethrows the exception.
45 | *
46 | * @param e the cause of failure
47 | * @throws IOException if the casting failed
48 | */
49 | default void onTypeMismatch(ClassCastException e) throws IOException {
50 | throw new IOException("Class casting failed \n", e);
51 | }
52 | }
53 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/internal/metrics/IntegerGauge.java:
--------------------------------------------------------------------------------
1 | package org.colloh.flink.kudu.connector.internal.metrics;
2 |
3 | import org.apache.flink.metrics.Gauge;
4 |
5 | /**
6 | * @fileName: CacheGauge.java
7 | * @description: CacheGauge.java类说明
8 | * @author: by echo huang
9 | * @date: 2020/12/31 4:28 下午
10 | */
11 | public class IntegerGauge implements Gauge {
12 | private Integer count;
13 |
14 | public IntegerGauge() {
15 | }
16 |
17 | public IntegerGauge(Integer count) {
18 | this.count = count;
19 | }
20 |
21 | @Override
22 | public Integer getValue() {
23 | return this.count;
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/internal/reader/KuduInputSplit.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | package org.colloh.flink.kudu.connector.internal.reader;
18 |
19 | import org.apache.flink.annotation.Internal;
20 | import org.apache.flink.core.io.LocatableInputSplit;
21 |
22 | @Internal
23 | public class KuduInputSplit extends LocatableInputSplit {
24 |
25 | private final byte[] scanToken;
26 |
27 | /**
28 | * Creates a new KuduInputSplit
29 | *
30 | * @param splitNumber the number of the input split
31 | * @param hostnames The names of the hosts storing the data this input split refers to.
32 | */
33 | public KuduInputSplit(byte[] scanToken, final int splitNumber, final String[] hostnames) {
34 | super(splitNumber, hostnames);
35 |
36 | this.scanToken = scanToken;
37 | }
38 |
39 | public byte[] getScanToken() {
40 | return scanToken;
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/internal/reader/KuduReaderConfig.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | package org.colloh.flink.kudu.connector.internal.reader;
18 |
19 | import org.apache.flink.annotation.PublicEvolving;
20 |
21 | import org.apache.commons.lang3.builder.ToStringBuilder;
22 | import org.colloh.flink.kudu.connector.format.KuduRowDataInputFormat;
23 |
24 | import java.io.Serializable;
25 |
26 | import static org.apache.flink.util.Preconditions.checkNotNull;
27 |
28 | /**
29 | * Configuration used by {@link KuduRowDataInputFormat}. Specifies connection and other necessary properties.
30 | */
31 | @PublicEvolving
32 | public class KuduReaderConfig implements Serializable {
33 |
34 | private final String masters;
35 | private final int rowLimit;
36 |
37 | private KuduReaderConfig(
38 | String masters,
39 | int rowLimit) {
40 |
41 | this.masters = checkNotNull(masters, "Kudu masters cannot be null");
42 | this.rowLimit = checkNotNull(rowLimit, "Kudu rowLimit cannot be null");
43 | }
44 |
45 | public String getMasters() {
46 | return masters;
47 | }
48 |
49 | public int getRowLimit() {
50 | return rowLimit;
51 | }
52 |
53 | @Override
54 | public String toString() {
55 | return new ToStringBuilder(this)
56 | .append("masters", masters)
57 | .append("rowLimit", rowLimit)
58 | .toString();
59 | }
60 |
61 | /**
62 | * Builder for the {@link KuduReaderConfig}.
63 | */
64 | public static class Builder {
65 | private static final int DEFAULT_ROW_LIMIT = 0;
66 |
67 | private final String masters;
68 | private final int rowLimit;
69 |
70 | private Builder(String masters) {
71 | this(masters, DEFAULT_ROW_LIMIT);
72 | }
73 |
74 | private Builder(String masters, Integer rowLimit) {
75 | this.masters = masters;
76 | this.rowLimit = rowLimit;
77 | }
78 |
79 | public static Builder setMasters(String masters) {
80 | return new Builder(masters);
81 | }
82 |
83 | public Builder setRowLimit(int rowLimit) {
84 | return new Builder(masters,rowLimit);
85 | }
86 |
87 | public KuduReaderConfig build() {
88 | return new KuduReaderConfig(
89 | masters,
90 | rowLimit);
91 | }
92 | }
93 | }
94 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/internal/reader/KuduReaderIterator.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed serialize the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file serialize You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed serialize in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | package org.colloh.flink.kudu.connector.internal.reader;
18 |
19 | import org.apache.flink.annotation.Internal;
20 | import org.colloh.flink.kudu.connector.internal.convertor.RowResultConvertor;
21 | import org.apache.kudu.client.KuduException;
22 | import org.apache.kudu.client.KuduScanner;
23 | import org.apache.kudu.client.RowResult;
24 | import org.apache.kudu.client.RowResultIterator;
25 |
26 | import java.io.Serializable;
27 |
28 | @Internal
29 | public class KuduReaderIterator implements Serializable {
30 |
31 | private final KuduScanner scanner;
32 | private final RowResultConvertor rowResultConvertor;
33 | private RowResultIterator rowIterator;
34 |
35 | public KuduReaderIterator(KuduScanner scanner, RowResultConvertor rowResultConvertor) throws KuduException {
36 | this.scanner = scanner;
37 | this.rowResultConvertor = rowResultConvertor;
38 | nextRows();
39 | }
40 |
41 | public void close() throws KuduException {
42 | scanner.close();
43 | }
44 |
45 | public boolean hasNext() throws KuduException {
46 | if (rowIterator.hasNext()) {
47 | return true;
48 | } else if (scanner.hasMoreRows()) {
49 | nextRows();
50 | return true;
51 | } else {
52 | return false;
53 | }
54 | }
55 |
56 | public T next() {
57 | RowResult row = this.rowIterator.next();
58 | return rowResultConvertor.convertor(row);
59 | }
60 |
61 | private void nextRows() throws KuduException {
62 | this.rowIterator = scanner.nextRows();
63 | }
64 | }
65 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/internal/utils/KuduTypeUtils.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package org.colloh.flink.kudu.connector.internal.utils;
19 |
20 | import org.apache.flink.table.api.DataTypes;
21 | import org.apache.flink.table.types.AtomicDataType;
22 | import org.apache.flink.table.types.DataType;
23 | import org.apache.flink.table.types.logical.BigIntType;
24 | import org.apache.flink.table.types.logical.BooleanType;
25 | import org.apache.flink.table.types.logical.DecimalType;
26 | import org.apache.flink.table.types.logical.DoubleType;
27 | import org.apache.flink.table.types.logical.FloatType;
28 | import org.apache.flink.table.types.logical.IntType;
29 | import org.apache.flink.table.types.logical.LogicalType;
30 | import org.apache.flink.table.types.logical.SmallIntType;
31 | import org.apache.flink.table.types.logical.TimestampType;
32 | import org.apache.flink.table.types.logical.TinyIntType;
33 | import org.apache.flink.table.types.logical.VarBinaryType;
34 | import org.apache.flink.table.types.logical.VarCharType;
35 | import org.apache.flink.table.types.logical.utils.LogicalTypeDefaultVisitor;
36 |
37 | import org.apache.kudu.ColumnTypeAttributes;
38 | import org.apache.kudu.Type;
39 |
40 | import java.sql.Timestamp;
41 |
42 | import static org.apache.flink.util.Preconditions.checkNotNull;
43 |
44 | public class KuduTypeUtils {
45 |
46 | public static DataType toFlinkType(Type type, ColumnTypeAttributes typeAttributes) {
47 | switch (type) {
48 | case STRING:
49 | return DataTypes.STRING();
50 | case FLOAT:
51 | return DataTypes.FLOAT();
52 | case INT8:
53 | return DataTypes.TINYINT();
54 | case INT16:
55 | return DataTypes.SMALLINT();
56 | case INT32:
57 | return DataTypes.INT();
58 | case INT64:
59 | return DataTypes.BIGINT();
60 | case DOUBLE:
61 | return DataTypes.DOUBLE();
62 | case DECIMAL:
63 | return DataTypes.DECIMAL(typeAttributes.getPrecision(), typeAttributes.getScale());
64 | case BOOL:
65 | return DataTypes.BOOLEAN();
66 | case BINARY:
67 | return DataTypes.BYTES();
68 | case UNIXTIME_MICROS:
69 | return new AtomicDataType(new TimestampType(3), Timestamp.class);
70 |
71 | default:
72 | throw new IllegalArgumentException("Illegal var type: " + type);
73 | }
74 | }
75 |
76 | public static Type toKuduType(DataType dataType) {
77 | checkNotNull(dataType, "type cannot be null");
78 | LogicalType logicalType = dataType.getLogicalType();
79 | return logicalType.accept(new KuduTypeLogicalTypeVisitor(dataType));
80 | }
81 |
82 | private static class KuduTypeLogicalTypeVisitor extends LogicalTypeDefaultVisitor {
83 |
84 | private final DataType dataType;
85 |
86 | KuduTypeLogicalTypeVisitor(DataType dataType) {
87 | this.dataType = dataType;
88 | }
89 |
90 | @Override
91 | public Type visit(BooleanType booleanType) {
92 | return Type.BOOL;
93 | }
94 |
95 | @Override
96 | public Type visit(TinyIntType tinyIntType) {
97 | return Type.INT8;
98 | }
99 |
100 | @Override
101 | public Type visit(SmallIntType smallIntType) {
102 | return Type.INT16;
103 | }
104 |
105 | @Override
106 | public Type visit(IntType intType) {
107 | return Type.INT32;
108 | }
109 |
110 | @Override
111 | public Type visit(BigIntType bigIntType) {
112 | return Type.INT64;
113 | }
114 |
115 | @Override
116 | public Type visit(FloatType floatType) {
117 | return Type.FLOAT;
118 | }
119 |
120 | @Override
121 | public Type visit(DoubleType doubleType) {
122 | return Type.DOUBLE;
123 | }
124 |
125 | @Override
126 | public Type visit(DecimalType decimalType) {
127 | return Type.DECIMAL;
128 | }
129 |
130 | @Override
131 | public Type visit(TimestampType timestampType) {
132 | return Type.UNIXTIME_MICROS;
133 | }
134 |
135 | @Override
136 | public Type visit(VarCharType varCharType) {
137 | return Type.STRING;
138 | }
139 |
140 | @Override
141 | public Type visit(VarBinaryType varBinaryType) {
142 | return Type.BINARY;
143 | }
144 |
145 | @Override
146 | protected Type defaultMethod(LogicalType logicalType) {
147 | throw new UnsupportedOperationException(
148 | String.format("Flink doesn't support converting type %s to Kudu type yet.", dataType.toString()));
149 | }
150 |
151 | }
152 | }
153 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/internal/writer/AbstractSingleOperationMapper.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | package org.colloh.flink.kudu.connector.internal.writer;
18 |
19 | import org.apache.flink.annotation.PublicEvolving;
20 | import org.apache.flink.table.data.binary.BinaryStringData;
21 | import org.apache.kudu.ColumnSchema;
22 | import org.apache.kudu.client.Delete;
23 | import org.apache.kudu.client.KuduTable;
24 | import org.apache.kudu.client.Operation;
25 | import org.apache.kudu.client.PartialRow;
26 |
27 | import java.util.Collections;
28 | import java.util.List;
29 | import java.util.Optional;
30 |
31 | /**
32 | * Base implementation for {@link KuduOperationMapper}s that have one-to-one input to
33 | * Kudu operation mapping. It requires a fixed table schema to be provided at construction
34 | * time and only requires users to implement a getter for a specific column index (relative
35 | * to the ones provided in the constructor).
36 | *
37 | * Supports both fixed operation type per record by specifying the {@link KuduOperation} or a
38 | * custom implementation for creating the base {@link Operation} throwugh the
39 | * {@link #createBaseOperation(Object, KuduTable)} method.
40 | *
41 | * @param Input type
42 | */
43 | @PublicEvolving
44 | public abstract class AbstractSingleOperationMapper implements KuduOperationMapper {
45 |
46 | protected final String[] columnNames;
47 | private final KuduOperation operation;
48 |
49 | protected AbstractSingleOperationMapper(String[] columnNames) {
50 | this(columnNames, null);
51 | }
52 |
53 | public AbstractSingleOperationMapper(String[] columnNames, KuduOperation operation) {
54 | this.columnNames = columnNames;
55 | this.operation = operation;
56 | }
57 |
58 | /**
59 | * Returns the object corresponding to the given column index.
60 | *
61 | * @param input Input element
62 | * @param i Column index
63 | * @return Column value
64 | */
65 | public abstract Object getField(T input, int i);
66 |
67 | public Optional createBaseOperation(T input, KuduTable table) {
68 | if (operation == null) {
69 | throw new UnsupportedOperationException("createBaseOperation must be overridden if no operation specified" +
70 | " in constructor");
71 | }
72 | switch (operation) {
73 | case INSERT:
74 | return Optional.of(table.newInsert());
75 | case UPDATE:
76 | return Optional.of(table.newUpdate());
77 | case UPSERT:
78 | return Optional.of(table.newUpsert());
79 | case DELETE:
80 | return Optional.of(table.newDelete());
81 | default:
82 | throw new RuntimeException("Unknown operation " + operation);
83 | }
84 | }
85 |
86 | @Override
87 | public List createOperations(T input, KuduTable table) {
88 | Optional operationOpt = createBaseOperation(input, table);
89 | if (!operationOpt.isPresent()) {
90 | return Collections.emptyList();
91 | }
92 |
93 | Operation operation = operationOpt.get();
94 | PartialRow partialRow = operation.getRow();
95 | for (int i = 0; i < columnNames.length; i++) {
96 | Object field = getField(input, i);
97 | partialRow.addObject(columnNames[i], field);
98 | }
99 |
100 | return Collections.singletonList(operation);
101 | }
102 |
103 | public enum KuduOperation {
104 | INSERT,
105 | UPDATE,
106 | UPSERT,
107 | DELETE
108 | }
109 | }
110 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/internal/writer/KuduOperationMapper.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | package org.colloh.flink.kudu.connector.internal.writer;
18 |
19 | import org.apache.flink.annotation.PublicEvolving;
20 |
21 | import org.apache.kudu.client.KuduTable;
22 | import org.apache.kudu.client.Operation;
23 |
24 | import java.io.Serializable;
25 | import java.util.List;
26 |
27 | /**
28 | * Encapsulates the logic of mapping input records (of a DataStream) to operations
29 | * executed in Kudu. By allowing to return a list of operations we give flexibility
30 | * to the implementers to provide more sophisticated logic.
31 | *
32 | * @param Type of the input data
33 | */
34 | @PublicEvolving
35 | public interface KuduOperationMapper extends Serializable {
36 |
37 | /**
38 | * Create a list of operations to be executed by the {@link KuduWriter} for the
39 | * current input
40 | *
41 | * @param input input element
42 | * @param table table for which the operations should be created
43 | * @return List of operations to be executed on the table
44 | */
45 | List createOperations(T input, KuduTable table);
46 |
47 | }
48 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/internal/writer/KuduWriter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | package org.colloh.flink.kudu.connector.internal.writer;
18 |
19 | import org.apache.flink.annotation.Internal;
20 | import org.apache.flink.annotation.VisibleForTesting;
21 | import org.apache.kudu.ColumnSchema;
22 | import org.apache.kudu.client.Delete;
23 | import org.apache.kudu.client.KuduPredicate;
24 | import org.apache.kudu.client.KuduScanner;
25 | import org.colloh.flink.kudu.connector.internal.KuduTableInfo;
26 | import org.colloh.flink.kudu.connector.internal.failure.DefaultKuduFailureHandler;
27 | import org.colloh.flink.kudu.connector.internal.failure.KuduFailureHandler;
28 |
29 | import org.apache.kudu.client.DeleteTableResponse;
30 | import org.apache.kudu.client.KuduClient;
31 | import org.apache.kudu.client.KuduSession;
32 | import org.apache.kudu.client.KuduTable;
33 | import org.apache.kudu.client.Operation;
34 | import org.apache.kudu.client.OperationResponse;
35 | import org.apache.kudu.client.RowError;
36 | import org.slf4j.Logger;
37 | import org.slf4j.LoggerFactory;
38 |
39 | import java.io.IOException;
40 | import java.util.Arrays;
41 | import java.util.List;
42 |
43 | /**
44 | * todo write kudu table config
45 | *
46 | * @param
47 | */
48 | @Internal
49 | public class KuduWriter implements AutoCloseable {
50 |
51 | private final Logger log = LoggerFactory.getLogger(getClass());
52 |
53 | private final KuduTableInfo tableInfo;
54 | private final KuduWriterConfig writerConfig;
55 | private final KuduFailureHandler failureHandler;
56 | private final KuduOperationMapper operationMapper;
57 |
58 | private final transient KuduClient client;
59 | private final transient KuduSession session;
60 | private final transient KuduTable table;
61 |
62 | public KuduWriter(KuduTableInfo tableInfo, KuduWriterConfig writerConfig,
63 | KuduOperationMapper operationMapper) throws IOException {
64 | this(tableInfo, writerConfig, operationMapper, new DefaultKuduFailureHandler());
65 | }
66 |
67 | public KuduWriter(KuduTableInfo tableInfo, KuduWriterConfig writerConfig,
68 | KuduOperationMapper operationMapper,
69 | KuduFailureHandler failureHandler) throws IOException {
70 | this.tableInfo = tableInfo;
71 | this.writerConfig = writerConfig;
72 | this.failureHandler = failureHandler;
73 |
74 | this.client = obtainClient();
75 | // 支持ignore operation kudu1.14支持
76 | // this.client.supportsIgnoreOperations();
77 | this.session = obtainSession();
78 | this.table = obtainTable();
79 | this.operationMapper = operationMapper;
80 | }
81 |
82 | private KuduClient obtainClient() {
83 | return new KuduClient.KuduClientBuilder(writerConfig.getMasters())
84 | .build();
85 | }
86 |
87 | private KuduSession obtainSession() {
88 | KuduSession session = client.newSession();
89 | session.setFlushMode(writerConfig.getFlushMode());
90 | return session;
91 | }
92 |
93 | private KuduTable obtainTable() throws IOException {
94 | String tableName = tableInfo.getName();
95 | if (client.tableExists(tableName)) {
96 | return client.openTable(tableName);
97 | }
98 | if (tableInfo.getCreateTableIfNotExists()) {
99 | return client.createTable(tableName, tableInfo.getSchema(), tableInfo.getCreateTableOptions());
100 | }
101 | throw new RuntimeException("Table " + tableName + " does not exist.");
102 | }
103 |
104 | public void write(T input) throws IOException {
105 | checkAsyncErrors();
106 |
107 | for (Operation operation : operationMapper.createOperations(input, table)) {
108 | checkErrors(session.apply(operation));
109 | }
110 | }
111 |
112 | public void flushAndCheckErrors() throws IOException {
113 | checkAsyncErrors();
114 | flush();
115 | checkAsyncErrors();
116 | }
117 |
118 | @VisibleForTesting
119 | public DeleteTableResponse deleteTable() throws IOException {
120 | String tableName = table.getName();
121 | return client.deleteTable(tableName);
122 | }
123 |
124 | @Override
125 | public void close() throws IOException {
126 | try {
127 | flushAndCheckErrors();
128 | } finally {
129 | try {
130 | if (session != null) {
131 | session.close();
132 | }
133 | } catch (Exception e) {
134 | log.error("Error while closing session.", e);
135 | }
136 | try {
137 | if (client != null) {
138 | client.close();
139 | }
140 | } catch (Exception e) {
141 | log.error("Error while closing client.", e);
142 | }
143 | }
144 | }
145 |
146 | private void flush() throws IOException {
147 | session.flush();
148 | }
149 |
150 | private void checkErrors(OperationResponse response) throws IOException {
151 | if (response != null && response.hasRowError()) {
152 | failureHandler.onFailure(Arrays.asList(response.getRowError()));
153 | } else {
154 | checkAsyncErrors();
155 | }
156 | }
157 |
158 | private void checkAsyncErrors() throws IOException {
159 | if (session.countPendingErrors() == 0) {
160 | return;
161 | }
162 |
163 | List errors = Arrays.asList(session.getPendingErrors().getRowErrors());
164 | failureHandler.onFailure(errors);
165 | }
166 | }
167 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/internal/writer/KuduWriterConfig.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | package org.colloh.flink.kudu.connector.internal.writer;
18 |
19 | import org.apache.flink.annotation.PublicEvolving;
20 |
21 | import org.apache.commons.lang3.builder.ToStringBuilder;
22 | import org.colloh.flink.kudu.connector.format.KuduOutputFormat;
23 | import org.colloh.flink.kudu.connector.table.sink.KuduSink;
24 |
25 | import java.io.Serializable;
26 |
27 | import static org.apache.flink.util.Preconditions.checkNotNull;
28 | import static org.apache.kudu.client.SessionConfiguration.FlushMode;
29 |
30 | /**
31 | * Configuration used by {@link KuduSink} and {@link KuduOutputFormat}.
32 | * Specifies connection and other necessary properties.
33 | */
34 | @PublicEvolving
35 | public class KuduWriterConfig implements Serializable {
36 |
37 | private final String masters;
38 | private final FlushMode flushMode;
39 |
40 | private KuduWriterConfig(
41 | String masters,
42 | FlushMode flushMode) {
43 |
44 | this.masters = checkNotNull(masters, "Kudu masters cannot be null");
45 | this.flushMode = checkNotNull(flushMode, "Kudu flush mode cannot be null");
46 | }
47 |
48 | public String getMasters() {
49 | return masters;
50 | }
51 |
52 | public FlushMode getFlushMode() {
53 | return flushMode;
54 | }
55 |
56 | @Override
57 | public String toString() {
58 | return new ToStringBuilder(this)
59 | .append("masters", masters)
60 | .append("flushMode", flushMode)
61 | .toString();
62 | }
63 |
64 | /**
65 | * Builder for the {@link KuduWriterConfig}.
66 | */
67 | public static class Builder {
68 | private final String masters;
69 | private FlushMode flushMode = FlushMode.AUTO_FLUSH_BACKGROUND;
70 |
71 | private Builder(String masters) {
72 | this.masters = masters;
73 | }
74 |
75 | public static Builder setMasters(String masters) {
76 | return new Builder(masters);
77 | }
78 |
79 | public Builder setConsistency(FlushMode flushMode) {
80 | this.flushMode = flushMode;
81 | return this;
82 | }
83 |
84 | public Builder setEventualConsistency() {
85 | return setConsistency(FlushMode.AUTO_FLUSH_BACKGROUND);
86 | }
87 |
88 | public Builder setStrongConsistency() {
89 | return setConsistency(FlushMode.AUTO_FLUSH_SYNC);
90 | }
91 |
92 | public KuduWriterConfig build() {
93 | return new KuduWriterConfig(
94 | masters,
95 | flushMode);
96 | }
97 | }
98 | }
99 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/internal/writer/PojoOperationMapper.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | package org.colloh.flink.kudu.connector.internal.writer;
18 |
19 | import org.apache.flink.annotation.PublicEvolving;
20 |
21 | import java.lang.reflect.Field;
22 | import java.util.ArrayList;
23 | import java.util.Arrays;
24 | import java.util.HashMap;
25 | import java.util.List;
26 | import java.util.Map;
27 |
28 | @PublicEvolving
29 | public class PojoOperationMapper extends AbstractSingleOperationMapper {
30 |
31 | private final transient Field[] fields;
32 |
33 | protected PojoOperationMapper(Class pojoClass, String[] columnNames) {
34 | this(pojoClass, columnNames, null);
35 | }
36 |
37 | public PojoOperationMapper(Class pojoClass, String[] columnNames, KuduOperation operation) {
38 | super(columnNames, operation);
39 | fields = initFields(pojoClass, columnNames);
40 | }
41 |
42 | public static List getAllFields(List fields, Class> type) {
43 | fields.addAll(Arrays.asList(type.getDeclaredFields()));
44 |
45 | if (type.getSuperclass() != null) {
46 | getAllFields(fields, type.getSuperclass());
47 | }
48 |
49 | return fields;
50 | }
51 |
52 | private Field[] initFields(Class pojoClass, String[] columnNames) {
53 | Map allFields = new HashMap<>();
54 | getAllFields(new ArrayList<>(), pojoClass).stream().forEach(f -> {
55 | if (!allFields.containsKey(f.getName())) {
56 | allFields.put(f.getName(), f);
57 | }
58 | });
59 |
60 | Field[] fields = new Field[columnNames.length];
61 |
62 | for (int i = 0; i < columnNames.length; i++) {
63 | Field f = allFields.get(columnNames[i]);
64 | if (f == null) {
65 | throw new RuntimeException("Cannot find field " + columnNames[i] + ". List of detected fields: " + allFields.keySet());
66 | }
67 | f.setAccessible(true);
68 | fields[i] = f;
69 | }
70 |
71 | return fields;
72 | }
73 |
74 | @Override
75 | public Object getField(T input, int i) {
76 | try {
77 | return fields[i].get(input);
78 | } catch (IllegalAccessException e) {
79 | throw new RuntimeException("This is a bug");
80 | }
81 | }
82 | }
83 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/internal/writer/RowDataUpsertOperationMapper.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | package org.colloh.flink.kudu.connector.internal.writer;
18 |
19 | import org.apache.flink.annotation.Internal;
20 | import org.apache.flink.table.api.TableSchema;
21 | import org.apache.flink.table.data.DecimalData;
22 | import org.apache.flink.table.data.RowData;
23 | import org.apache.flink.table.data.StringData;
24 | import org.apache.flink.table.types.DataType;
25 | import org.apache.flink.table.types.logical.DecimalType;
26 | import org.apache.flink.table.types.logical.LogicalType;
27 | import org.apache.flink.types.RowKind;
28 | import org.apache.kudu.client.KuduTable;
29 | import org.apache.kudu.client.Operation;
30 | import org.slf4j.Logger;
31 | import org.slf4j.LoggerFactory;
32 |
33 | import java.util.Arrays;
34 | import java.util.Optional;
35 |
36 | import static org.apache.flink.table.types.logical.utils.LogicalTypeChecks.getPrecision;
37 |
38 | @Internal
39 | public class RowDataUpsertOperationMapper extends AbstractSingleOperationMapper {
40 |
41 | private static final Logger LOG = LoggerFactory.getLogger(RowDataUpsertOperationMapper.class);
42 |
43 |
44 | private static final int MIN_TIME_PRECISION = 0;
45 | private static final int MAX_TIME_PRECISION = 3;
46 | private static final int MIN_TIMESTAMP_PRECISION = 0;
47 | private static final int MAX_TIMESTAMP_PRECISION = 6;
48 |
49 | private LogicalType[] logicalTypes;
50 |
51 | public RowDataUpsertOperationMapper(TableSchema schema) {
52 | super(schema.getFieldNames());
53 | logicalTypes = Arrays.stream(schema.getFieldDataTypes())
54 | .map(DataType::getLogicalType)
55 | .toArray(LogicalType[]::new);
56 | }
57 |
58 | @Override
59 | public Object getField(RowData input, int i) {
60 | return getFieldValue(input, i);
61 | }
62 |
63 | public Object getFieldValue(RowData input, int i) {
64 | if (input == null || input.isNullAt(i)) {
65 | return null;
66 | }
67 | LogicalType fieldType = logicalTypes[i];
68 | switch (fieldType.getTypeRoot()) {
69 | case CHAR:
70 | case VARCHAR: {
71 | StringData data = input.getString(i);
72 | if (data != null) {
73 | return data.toString();
74 | }
75 | return null;
76 | }
77 | case BOOLEAN:
78 | return input.getBoolean(i);
79 | case BINARY:
80 | case VARBINARY:
81 | return input.getBinary(i);
82 | case DECIMAL: {
83 | DecimalType decimalType = (DecimalType) fieldType;
84 | final int precision = decimalType.getPrecision();
85 | final int scale = decimalType.getScale();
86 | DecimalData data = input.getDecimal(i, precision, scale);
87 | if (data != null) {
88 | return data.toBigDecimal();
89 | } else {
90 | return null;
91 | }
92 | }
93 | case TINYINT:
94 | return input.getByte(i);
95 | case SMALLINT:
96 | return input.getShort(i);
97 | case INTEGER:
98 | case DATE:
99 | case INTERVAL_YEAR_MONTH:
100 | return input.getInt(i);
101 | case TIME_WITHOUT_TIME_ZONE:
102 | final int timePrecision = getPrecision(fieldType);
103 | if (timePrecision < MIN_TIME_PRECISION || timePrecision > MAX_TIME_PRECISION) {
104 | throw new UnsupportedOperationException(
105 | String.format("The precision %s of TIME type is out of the range [%s, %s] supported by " +
106 | "HBase connector", timePrecision, MIN_TIME_PRECISION, MAX_TIME_PRECISION));
107 | }
108 | return input.getInt(i);
109 | case BIGINT:
110 | case INTERVAL_DAY_TIME:
111 | return input.getLong(i);
112 | case FLOAT:
113 | return input.getFloat(i);
114 | case DOUBLE:
115 | return input.getDouble(i);
116 | case TIMESTAMP_WITHOUT_TIME_ZONE:
117 | case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
118 | final int timestampPrecision = getPrecision(fieldType);
119 | if (timestampPrecision < MIN_TIMESTAMP_PRECISION || timestampPrecision > MAX_TIMESTAMP_PRECISION) {
120 | throw new UnsupportedOperationException(
121 | String.format("The precision %s of TIMESTAMP type is out of the range [%s, %s] supported " +
122 | "by " +
123 | "kudu connector", timestampPrecision, MIN_TIMESTAMP_PRECISION,
124 | MAX_TIMESTAMP_PRECISION));
125 | }
126 | return input.getTimestamp(i, timestampPrecision).toTimestamp();
127 | default:
128 | throw new UnsupportedOperationException("Unsupported type: " + fieldType);
129 | }
130 | }
131 |
132 | @Override
133 | public Optional createBaseOperation(RowData input, KuduTable table) {
134 | boolean isUpsert = input.getRowKind().equals(RowKind.INSERT)
135 | || input.getRowKind().equals(RowKind.UPDATE_AFTER) || input.getRowKind().equals(RowKind.UPDATE_BEFORE);
136 | return Optional.of(isUpsert ? table.newUpsert() : table.newDeleteIgnore());
137 | }
138 | }
139 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/internal/writer/RowOperationMapper.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | package org.colloh.flink.kudu.connector.internal.writer;
18 |
19 | import org.apache.flink.annotation.PublicEvolving;
20 | import org.apache.flink.types.Row;
21 |
22 | @PublicEvolving
23 | public class RowOperationMapper extends AbstractSingleOperationMapper {
24 |
25 | protected RowOperationMapper(String[] columnNames) {
26 | super(columnNames);
27 | }
28 |
29 | public RowOperationMapper(String[] columnNames, KuduOperation operation) {
30 | super(columnNames, operation);
31 | }
32 |
33 | @Override
34 | public Object getField(Row input, int i) {
35 | return input.getField(i);
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/internal/writer/TupleOperationMapper.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | package org.colloh.flink.kudu.connector.internal.writer;
18 |
19 | import org.apache.flink.annotation.PublicEvolving;
20 | import org.apache.flink.api.java.tuple.Tuple;
21 |
22 | @PublicEvolving
23 | public class TupleOperationMapper extends AbstractSingleOperationMapper {
24 |
25 | protected TupleOperationMapper(String[] columnNames) {
26 | super(columnNames);
27 | }
28 |
29 | public TupleOperationMapper(String[] columnNames, KuduOperation operation) {
30 | super(columnNames, operation);
31 | }
32 |
33 | @Override
34 | public Object getField(T input, int i) {
35 | return input.getField(i);
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/internal/writer/UpsertOperationMapper.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | package org.colloh.flink.kudu.connector.internal.writer;
18 |
19 | import org.apache.flink.annotation.Internal;
20 | import org.apache.flink.api.java.tuple.Tuple2;
21 | import org.apache.flink.types.Row;
22 | import org.apache.kudu.client.KuduTable;
23 | import org.apache.kudu.client.Operation;
24 |
25 | import java.util.Optional;
26 |
27 | @Internal
28 | public class UpsertOperationMapper extends AbstractSingleOperationMapper> {
29 |
30 | public UpsertOperationMapper(String[] columnNames) {
31 | super(columnNames);
32 | }
33 |
34 | @Override
35 | public Object getField(Tuple2 input, int i) {
36 | return input.f1.getField(i);
37 | }
38 |
39 | @Override
40 | public Optional createBaseOperation(Tuple2 input, KuduTable table) {
41 | return Optional.of(input.f0 ? table.newUpsert() : table.newDelete());
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/table/catalog/KuduCatalogFactory.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package org.colloh.flink.kudu.connector.table.catalog;
20 |
21 | import org.apache.flink.annotation.Internal;
22 | import org.apache.flink.table.catalog.Catalog;
23 | import org.apache.flink.table.descriptors.DescriptorProperties;
24 | import org.apache.flink.table.factories.CatalogFactory;
25 | import org.slf4j.Logger;
26 | import org.slf4j.LoggerFactory;
27 |
28 | import java.util.ArrayList;
29 | import java.util.HashMap;
30 | import java.util.List;
31 | import java.util.Map;
32 |
33 | import static org.colloh.flink.kudu.connector.table.KuduDynamicTableSourceSinkFactory.KUDU;
34 | import static org.colloh.flink.kudu.connector.table.KuduDynamicTableSourceSinkFactory.KUDU_MASTERS;
35 | import static org.apache.flink.table.descriptors.CatalogDescriptorValidator.CATALOG_PROPERTY_VERSION;
36 | import static org.apache.flink.table.descriptors.CatalogDescriptorValidator.CATALOG_TYPE;
37 |
38 | /**
39 | * Factory for {@link KuduCatalog}.
40 | */
41 | @Internal
42 | public class KuduCatalogFactory implements CatalogFactory {
43 |
44 | private static final Logger LOG = LoggerFactory.getLogger(KuduCatalogFactory.class);
45 |
46 | @Override
47 | public Map requiredContext() {
48 | Map context = new HashMap<>();
49 | context.put(CATALOG_TYPE, KUDU);
50 | context.put(CATALOG_PROPERTY_VERSION, "1"); // backwards compatibility
51 | return context;
52 | }
53 |
54 | @Override
55 | public List supportedProperties() {
56 | List properties = new ArrayList<>();
57 |
58 | properties.add(KUDU_MASTERS.key());
59 |
60 | return properties;
61 | }
62 |
63 | @Override
64 | public Catalog createCatalog(String name, Map properties) {
65 | final DescriptorProperties descriptorProperties = getValidatedProperties(properties);
66 | return new KuduCatalog(name,
67 | descriptorProperties.getString(KUDU_MASTERS.key()));
68 | }
69 |
70 | private DescriptorProperties getValidatedProperties(Map properties) {
71 | final DescriptorProperties descriptorProperties = new DescriptorProperties(true);
72 | descriptorProperties.putProperties(properties);
73 | descriptorProperties.validateString(KUDU_MASTERS.key(), false);
74 | return descriptorProperties;
75 | }
76 |
77 | }
78 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/table/lookup/KuduLookupOptions.java:
--------------------------------------------------------------------------------
1 | package org.colloh.flink.kudu.connector.table.lookup;
2 |
3 | /**
4 | * @fileName: KuduLookupOptions.java
5 | * @description: kudu lookup config options
6 | * @author: by echo huang
7 | * @date: 2020/12/31 2:09 下午
8 | */
9 | public class KuduLookupOptions {
10 | private final long cacheMaxSize;
11 | private final long cacheExpireMs;
12 | private final int maxRetryTimes;
13 |
14 | public static Builder builder() {
15 | return new Builder();
16 | }
17 |
18 | public KuduLookupOptions(long cacheMaxSize, long cacheExpireMs, int maxRetryTimes) {
19 | this.cacheMaxSize = cacheMaxSize;
20 | this.cacheExpireMs = cacheExpireMs;
21 | this.maxRetryTimes = maxRetryTimes;
22 | }
23 |
24 | public long getCacheMaxSize() {
25 | return cacheMaxSize;
26 | }
27 |
28 |
29 | public long getCacheExpireMs() {
30 | return cacheExpireMs;
31 | }
32 |
33 |
34 | public int getMaxRetryTimes() {
35 | return maxRetryTimes;
36 | }
37 |
38 |
39 | public static final class Builder {
40 | private long cacheMaxSize;
41 | private long cacheExpireMs;
42 | private int maxRetryTimes;
43 |
44 | public static Builder options() {
45 | return new Builder();
46 | }
47 |
48 | public Builder withCacheMaxSize(long cacheMaxSize) {
49 | this.cacheMaxSize = cacheMaxSize;
50 | return this;
51 | }
52 |
53 | public Builder withCacheExpireMs(long cacheExpireMs) {
54 | this.cacheExpireMs = cacheExpireMs;
55 | return this;
56 | }
57 |
58 | public Builder withMaxRetryTimes(int maxRetryTimes) {
59 | this.maxRetryTimes = maxRetryTimes;
60 | return this;
61 | }
62 |
63 | public KuduLookupOptions build() {
64 | return new KuduLookupOptions(cacheMaxSize, cacheExpireMs, maxRetryTimes);
65 | }
66 | }
67 | }
68 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/table/lookup/RowDataLookupFunction.java:
--------------------------------------------------------------------------------
1 | package org.colloh.flink.kudu.connector.table.lookup;
2 |
3 | import org.colloh.flink.kudu.connector.internal.KuduTableInfo;
4 | import org.colloh.flink.kudu.connector.internal.convertor.RowResultRowDataConvertor;
5 | import org.colloh.flink.kudu.connector.internal.reader.KuduReaderConfig;
6 | import org.apache.flink.table.data.GenericRowData;
7 | import org.apache.flink.table.data.RowData;
8 |
9 | /**
10 | * @fileName: KuduLookupFunction.java
11 | * @description: 支持kudu lookup function
12 | * @author: by echo huang
13 | * @date: 2020/12/29 2:22 下午
14 | */
15 | public class RowDataLookupFunction extends BaseKuduLookupFunction {
16 | private static final long serialVersionUID = 1L;
17 |
18 |
19 | private RowDataLookupFunction(String[] keyNames, KuduTableInfo tableInfo, KuduReaderConfig kuduReaderConfig, String[] projectedFields, KuduLookupOptions kuduLookupOptions) {
20 | super(keyNames, new RowResultRowDataConvertor(), tableInfo, kuduReaderConfig, projectedFields, kuduLookupOptions);
21 | }
22 |
23 | @Override
24 | public RowData buildCacheKey(Object... keys) {
25 | return GenericRowData.of(keys);
26 | }
27 |
28 | public static class Builder {
29 | private KuduTableInfo tableInfo;
30 | private KuduReaderConfig kuduReaderConfig;
31 | private String[] keyNames;
32 | private String[] projectedFields;
33 | private KuduLookupOptions kuduLookupOptions;
34 |
35 | public static Builder options() {
36 | return new Builder();
37 | }
38 |
39 | public Builder tableInfo(KuduTableInfo tableInfo) {
40 | this.tableInfo = tableInfo;
41 | return this;
42 | }
43 |
44 | public Builder kuduReaderConfig(KuduReaderConfig kuduReaderConfig) {
45 | this.kuduReaderConfig = kuduReaderConfig;
46 | return this;
47 | }
48 |
49 | public Builder keyNames(String[] keyNames) {
50 | this.keyNames = keyNames;
51 | return this;
52 | }
53 |
54 | public Builder projectedFields(String[] projectedFields) {
55 | this.projectedFields = projectedFields;
56 | return this;
57 | }
58 |
59 | public Builder kuduLookupOptions(KuduLookupOptions kuduLookupOptions) {
60 | this.kuduLookupOptions = kuduLookupOptions;
61 | return this;
62 | }
63 |
64 | public RowDataLookupFunction build() {
65 | return new RowDataLookupFunction(keyNames, tableInfo, kuduReaderConfig, projectedFields, kuduLookupOptions);
66 | }
67 | }
68 | }
69 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/table/lookup/RowLookupFunction.java:
--------------------------------------------------------------------------------
1 | package org.colloh.flink.kudu.connector.table.lookup;
2 |
3 | import org.colloh.flink.kudu.connector.internal.KuduTableInfo;
4 | import org.colloh.flink.kudu.connector.internal.convertor.RowResultRowConvertor;
5 | import org.colloh.flink.kudu.connector.internal.reader.KuduReaderConfig;
6 | import org.apache.flink.types.Row;
7 | import org.slf4j.Logger;
8 | import org.slf4j.LoggerFactory;
9 |
10 | /**
11 | * @fileName: KuduLookupFunction.java
12 | * @description: 支持kudu lookup function
13 | * @author: by echo huang
14 | * @date: 2020/12/29 2:22 下午
15 | */
16 | public class RowLookupFunction extends BaseKuduLookupFunction {
17 | private static final Logger LOG = LoggerFactory.getLogger(RowLookupFunction.class);
18 | private static final long serialVersionUID = 1L;
19 |
20 | private RowLookupFunction(String[] keyNames, KuduTableInfo tableInfo, KuduReaderConfig kuduReaderConfig, String[] projectedFields, KuduLookupOptions kuduLookupOptions) {
21 | super(keyNames, new RowResultRowConvertor(), tableInfo, kuduReaderConfig, projectedFields, kuduLookupOptions);
22 | }
23 |
24 | @Override
25 | public Row buildCacheKey(Object... keys) {
26 | return Row.of(keys);
27 | }
28 |
29 | public static class Builder {
30 | private KuduTableInfo tableInfo;
31 | private KuduReaderConfig kuduReaderConfig;
32 | private String[] keyNames;
33 | private String[] projectedFields;
34 | private KuduLookupOptions kuduLookupOptions;
35 |
36 | public static Builder options() {
37 | return new Builder();
38 | }
39 |
40 | public Builder tableInfo(KuduTableInfo tableInfo) {
41 | this.tableInfo = tableInfo;
42 | return this;
43 | }
44 |
45 | public Builder kuduReaderConfig(KuduReaderConfig kuduReaderConfig) {
46 | this.kuduReaderConfig = kuduReaderConfig;
47 | return this;
48 | }
49 |
50 | public Builder keyNames(String[] keyNames) {
51 | this.keyNames = keyNames;
52 | return this;
53 | }
54 |
55 | public Builder projectedFields(String[] projectedFields) {
56 | this.projectedFields = projectedFields;
57 | return this;
58 | }
59 |
60 | public Builder kuduLookupOptions(KuduLookupOptions kuduLookupOptions) {
61 | this.kuduLookupOptions = kuduLookupOptions;
62 | return this;
63 | }
64 |
65 | public RowLookupFunction build() {
66 | return new RowLookupFunction(keyNames, tableInfo, kuduReaderConfig, projectedFields, kuduLookupOptions);
67 | }
68 | }
69 | }
70 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/table/sink/KuduDynamicTableSink.java:
--------------------------------------------------------------------------------
1 | package org.colloh.flink.kudu.connector.table.sink;
2 |
3 | import org.apache.flink.table.api.TableSchema;
4 | import org.apache.flink.table.connector.ChangelogMode;
5 | import org.apache.flink.table.connector.sink.DynamicTableSink;
6 | import org.apache.flink.table.connector.sink.SinkFunctionProvider;
7 | import org.apache.flink.table.data.RowData;
8 | import org.apache.flink.types.RowKind;
9 | import org.apache.flink.util.Preconditions;
10 | import org.colloh.flink.kudu.connector.internal.KuduTableInfo;
11 | import org.colloh.flink.kudu.connector.internal.writer.KuduWriterConfig;
12 | import org.colloh.flink.kudu.connector.internal.writer.RowDataUpsertOperationMapper;
13 |
14 | import java.util.Objects;
15 |
16 | /**
17 | * @fileName: KuduDynamicTableSink.java
18 | * @description: 动态tableSink
19 | * @author: by echo huang
20 | * @date: 2021/3/1 4:47 下午
21 | */
22 | public class KuduDynamicTableSink implements DynamicTableSink {
23 | private final KuduWriterConfig.Builder writerConfigBuilder;
24 | private final TableSchema flinkSchema;
25 | private final KuduTableInfo tableInfo;
26 |
27 | public KuduDynamicTableSink(KuduWriterConfig.Builder writerConfigBuilder, TableSchema flinkSchema, KuduTableInfo tableInfo) {
28 | this.writerConfigBuilder = writerConfigBuilder;
29 | this.flinkSchema = flinkSchema;
30 | this.tableInfo = tableInfo;
31 | }
32 |
33 | @Override
34 | public ChangelogMode getChangelogMode(ChangelogMode requestedMode) {
35 | this.validatePrimaryKey(requestedMode);
36 | return ChangelogMode.newBuilder().addContainedKind(RowKind.INSERT).addContainedKind(RowKind.DELETE).addContainedKind(RowKind.UPDATE_AFTER).build();
37 | }
38 |
39 | private void validatePrimaryKey(ChangelogMode requestedMode) {
40 | Preconditions.checkState(ChangelogMode.insertOnly().equals(requestedMode) || this.tableInfo.getSchema().getPrimaryKeyColumnCount() != 0, "please declare primary key for sink table when query contains update/delete record.");
41 | }
42 |
43 | @Override
44 | public SinkRuntimeProvider getSinkRuntimeProvider(Context context) {
45 | KuduSink upsertKuduSink = new KuduSink<>(writerConfigBuilder.build(), tableInfo, new RowDataUpsertOperationMapper(flinkSchema));
46 | return SinkFunctionProvider.of(upsertKuduSink);
47 | }
48 |
49 | @Override
50 | public DynamicTableSink copy() {
51 | return new KuduDynamicTableSink(this.writerConfigBuilder, this.flinkSchema, this.tableInfo);
52 | }
53 |
54 | @Override
55 | public String asSummaryString() {
56 | return "kudu";
57 | }
58 |
59 | @Override
60 | public boolean equals(Object o) {
61 | if (this == o) return true;
62 | if (o == null || getClass() != o.getClass()) return false;
63 | KuduDynamicTableSink that = (KuduDynamicTableSink) o;
64 | return Objects.equals(writerConfigBuilder, that.writerConfigBuilder) && Objects.equals(flinkSchema, that.flinkSchema) && Objects.equals(tableInfo, that.tableInfo);
65 | }
66 |
67 | @Override
68 | public int hashCode() {
69 | return Objects.hash(writerConfigBuilder, flinkSchema, tableInfo);
70 | }
71 | }
72 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/table/sink/KuduSink.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | package org.colloh.flink.kudu.connector.table.sink;
18 |
19 | import org.apache.flink.annotation.PublicEvolving;
20 | import org.apache.flink.configuration.Configuration;
21 | import org.colloh.flink.kudu.connector.internal.KuduTableInfo;
22 | import org.colloh.flink.kudu.connector.internal.failure.DefaultKuduFailureHandler;
23 | import org.colloh.flink.kudu.connector.internal.failure.KuduFailureHandler;
24 | import org.colloh.flink.kudu.connector.internal.writer.AbstractSingleOperationMapper;
25 | import org.colloh.flink.kudu.connector.internal.writer.KuduOperationMapper;
26 | import org.colloh.flink.kudu.connector.internal.writer.KuduWriter;
27 | import org.colloh.flink.kudu.connector.internal.writer.KuduWriterConfig;
28 | import org.apache.flink.runtime.state.FunctionInitializationContext;
29 | import org.apache.flink.runtime.state.FunctionSnapshotContext;
30 | import org.apache.flink.streaming.api.checkpoint.CheckpointedFunction;
31 | import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
32 | import org.slf4j.Logger;
33 | import org.slf4j.LoggerFactory;
34 |
35 | import static org.apache.flink.util.Preconditions.checkNotNull;
36 |
37 | /**
38 | * Streaming Sink that executes Kudu operations based on the incoming elements.
39 | * The target Kudu table is defined in the {@link KuduTableInfo} object together with parameters for table
40 | * creation in case the table does not exist.
41 | *
42 | * Incoming records are mapped to Kudu table operations using the provided {@link KuduOperationMapper} logic. While
43 | * failures resulting from the operations are handled by the {@link KuduFailureHandler} instance.
44 | *
45 | * @param Type of the input records
46 | */
47 | @PublicEvolving
48 | public class KuduSink extends RichSinkFunction implements CheckpointedFunction {
49 |
50 | private final Logger log = LoggerFactory.getLogger(getClass());
51 |
52 | private final KuduTableInfo tableInfo;
53 | private final KuduWriterConfig writerConfig;
54 | private final KuduFailureHandler failureHandler;
55 | private final KuduOperationMapper opsMapper;
56 | private transient KuduWriter kuduWriter;
57 |
58 | /**
59 | * Creates a new {@link KuduSink} that will execute operations against the specified Kudu table (defined in
60 | * {@link KuduTableInfo})
61 | * for the incoming stream elements.
62 | *
63 | * @param writerConfig Writer configuration
64 | * @param tableInfo Table information for the target table
65 | * @param opsMapper Mapping logic from inputs to Kudu operations
66 | */
67 | public KuduSink(KuduWriterConfig writerConfig, KuduTableInfo tableInfo, KuduOperationMapper opsMapper) {
68 | this(writerConfig, tableInfo, opsMapper, new DefaultKuduFailureHandler());
69 | }
70 |
71 | /**
72 | * Creates a new {@link KuduSink} that will execute operations against the specified Kudu table (defined in
73 | * {@link KuduTableInfo})
74 | * for the incoming stream elements.
75 | *
76 | * @param writerConfig Writer configuration
77 | * @param tableInfo Table information for the target table
78 | * @param opsMapper Mapping logic from inputs to Kudu operations
79 | * @param failureHandler Custom failure handler instance
80 | */
81 | public KuduSink(KuduWriterConfig writerConfig, KuduTableInfo tableInfo, KuduOperationMapper opsMapper,
82 | KuduFailureHandler failureHandler) {
83 | this.tableInfo = checkNotNull(tableInfo, "tableInfo could not be null");
84 | this.writerConfig = checkNotNull(writerConfig, "config could not be null");
85 | this.opsMapper = checkNotNull(opsMapper, "opsMapper could not be null");
86 | this.failureHandler = checkNotNull(failureHandler, "failureHandler could not be null");
87 | }
88 |
89 | @Override
90 | public void open(Configuration parameters) throws Exception {
91 | kuduWriter = new KuduWriter(tableInfo, writerConfig, opsMapper, failureHandler);
92 | }
93 |
94 | @Override
95 | public void invoke(IN value) throws Exception {
96 | try {
97 | kuduWriter.write(value);
98 | } catch (ClassCastException e) {
99 | failureHandler.onTypeMismatch(e);
100 | }
101 | }
102 |
103 | @Override
104 | public void close() throws Exception {
105 | if (kuduWriter != null) {
106 | kuduWriter.close();
107 | }
108 | }
109 |
110 | @Override
111 | public void snapshotState(FunctionSnapshotContext functionSnapshotContext) throws Exception {
112 | kuduWriter.flushAndCheckErrors();
113 | }
114 |
115 | @Override
116 | public void initializeState(FunctionInitializationContext functionInitializationContext) throws Exception {
117 | }
118 |
119 | }
120 |
--------------------------------------------------------------------------------
/src/main/java/org/colloh/flink/kudu/connector/table/sink/KuduTableSink.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package org.colloh.flink.kudu.connector.table.sink;
19 |
20 | import org.apache.flink.api.common.typeinfo.TypeInformation;
21 | import org.apache.flink.api.java.tuple.Tuple2;
22 | import org.colloh.flink.kudu.connector.internal.KuduTableInfo;
23 | import org.colloh.flink.kudu.connector.internal.writer.KuduWriterConfig;
24 | import org.colloh.flink.kudu.connector.internal.writer.UpsertOperationMapper;
25 | import org.apache.flink.streaming.api.datastream.DataStream;
26 | import org.apache.flink.streaming.api.datastream.DataStreamSink;
27 | import org.apache.flink.table.api.TableSchema;
28 | import org.apache.flink.table.sinks.TableSink;
29 | import org.apache.flink.table.sinks.UpsertStreamTableSink;
30 | import org.apache.flink.table.utils.TableConnectorUtils;
31 | import org.apache.flink.types.Row;
32 |
33 | public class KuduTableSink implements UpsertStreamTableSink {
34 |
35 | private final KuduWriterConfig.Builder writerConfigBuilder;
36 | private final TableSchema flinkSchema;
37 | private final KuduTableInfo tableInfo;
38 |
39 | public KuduTableSink(KuduWriterConfig.Builder configBuilder, KuduTableInfo tableInfo, TableSchema flinkSchema) {
40 | this.writerConfigBuilder = configBuilder;
41 | this.tableInfo = tableInfo;
42 | this.flinkSchema = flinkSchema;
43 | }
44 |
45 | @Override
46 | public void setKeyFields(String[] keyFields) { /* this has no effect */}
47 |
48 | @Override
49 | public void setIsAppendOnly(Boolean isAppendOnly) { /* this has no effect */}
50 |
51 | @Override
52 | public TypeInformation getRecordType() {
53 | return flinkSchema.toRowType();
54 | }
55 |
56 | @Override
57 | public DataStreamSink> consumeDataStream(DataStream> dataStreamTuple) {
58 | KuduSink upsertKuduSink = new KuduSink(writerConfigBuilder.build(), tableInfo, new UpsertOperationMapper(getTableSchema().getFieldNames()));
59 |
60 | return dataStreamTuple
61 | .addSink(upsertKuduSink)
62 | .setParallelism(dataStreamTuple.getParallelism())
63 | .name(TableConnectorUtils.generateRuntimeName(this.getClass(), getTableSchema().getFieldNames()));
64 | }
65 |
66 | @Override
67 | public TableSink> configure(String[] fieldNames, TypeInformation>[] fieldTypes) {
68 | return new KuduTableSink(writerConfigBuilder, tableInfo, flinkSchema);
69 | }
70 |
71 | @Override
72 | public TableSchema getTableSchema() {
73 | return flinkSchema;
74 | }
75 | }
76 |
--------------------------------------------------------------------------------
/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory:
--------------------------------------------------------------------------------
1 | org.colloh.flink.kudu.connector.table.KuduDynamicTableSourceSinkFactory
--------------------------------------------------------------------------------
/src/main/resources/META-INF/services/org.apache.flink.table.factories.TableFactory:
--------------------------------------------------------------------------------
1 | # Licensed to the Apache Software Foundation (ASF) under one or more
2 | # contributor license agreements. See the NOTICE file distributed with
3 | # this work for additional information regarding copyright ownership.
4 | # The ASF licenses this file to You under the Apache License, Version 2.0
5 | # (the "License"); you may not use this file except in compliance with
6 | # the License. You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | org.colloh.flink.kudu.connector.table.KuduTableFactory
17 | org.colloh.flink.kudu.connector.table.catalog.KuduCatalogFactory
18 |
--------------------------------------------------------------------------------
/src/test/java/org/colloh/flink/connectors/kudu/batch/KuduInputFormatTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | package org.colloh.flink.connectors.kudu.batch;
18 |
19 | import org.colloh.flink.kudu.connector.KuduTestBase;
20 | import org.colloh.flink.kudu.connector.format.KuduRowInputFormat;
21 | import org.colloh.flink.kudu.connector.internal.KuduTableInfo;
22 | import org.colloh.flink.kudu.connector.internal.convertor.RowResultRowConvertor;
23 | import org.colloh.flink.kudu.connector.internal.reader.KuduInputSplit;
24 | import org.colloh.flink.kudu.connector.internal.reader.KuduReaderConfig;
25 | import org.apache.flink.types.Row;
26 | import org.junit.jupiter.api.Assertions;
27 | import org.junit.jupiter.api.Test;
28 |
29 | import java.util.ArrayList;
30 | import java.util.Arrays;
31 | import java.util.List;
32 |
33 | class KuduInputFormatTest extends KuduTestBase {
34 |
35 | @Test
36 | void testInvalidKuduMaster() {
37 | KuduTableInfo tableInfo = booksTableInfo("books", false);
38 | Assertions.assertThrows(NullPointerException.class, () -> new KuduRowInputFormat(null, new RowResultRowConvertor(), tableInfo));
39 | }
40 |
41 | @Test
42 | void testInvalidTableInfo() {
43 | String masterAddresses = harness.getMasterAddressesAsString();
44 | KuduReaderConfig readerConfig = KuduReaderConfig.Builder.setMasters(masterAddresses).build();
45 | Assertions.assertThrows(NullPointerException.class, () -> new KuduRowInputFormat(readerConfig, new RowResultRowConvertor(), null));
46 | }
47 |
48 | @Test
49 | void testInputFormat() throws Exception {
50 | KuduTableInfo tableInfo = booksTableInfo("books", true);
51 | setUpDatabase(tableInfo);
52 |
53 | List rows = readRows(tableInfo);
54 | Assertions.assertEquals(5, rows.size());
55 |
56 | cleanDatabase(tableInfo);
57 | }
58 |
59 | @Test
60 | void testInputFormatWithProjection() throws Exception {
61 | KuduTableInfo tableInfo = booksTableInfo("books", true);
62 | setUpDatabase(tableInfo);
63 |
64 | List rows = readRows(tableInfo, "title", "id");
65 | Assertions.assertEquals(5, rows.size());
66 |
67 | for (Row row : rows) {
68 | Assertions.assertEquals(2, row.getArity());
69 | }
70 |
71 | cleanDatabase(tableInfo);
72 | }
73 |
74 | private List readRows(KuduTableInfo tableInfo, String... fieldProjection) throws Exception {
75 | String masterAddresses = harness.getMasterAddressesAsString();
76 | KuduReaderConfig readerConfig = KuduReaderConfig.Builder.setMasters(masterAddresses).build();
77 | KuduRowInputFormat inputFormat = new KuduRowInputFormat(readerConfig, new RowResultRowConvertor(), tableInfo, new ArrayList<>(),
78 | fieldProjection == null ? null : Arrays.asList(fieldProjection));
79 |
80 | KuduInputSplit[] splits = inputFormat.createInputSplits(1);
81 | List rows = new ArrayList<>();
82 | for (KuduInputSplit split : splits) {
83 | inputFormat.open(split);
84 | while (!inputFormat.reachedEnd()) {
85 | Row row = inputFormat.nextRecord(new Row(5));
86 | if (row != null) {
87 | rows.add(row);
88 | }
89 | }
90 | }
91 | inputFormat.close();
92 |
93 | return rows;
94 | }
95 | }
96 |
--------------------------------------------------------------------------------
/src/test/java/org/colloh/flink/connectors/kudu/batch/KuduOutputFormatTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | package org.colloh.flink.connectors.kudu.batch;
18 |
19 | import org.colloh.flink.kudu.connector.KuduTestBase;
20 | import org.colloh.flink.kudu.connector.internal.KuduTableInfo;
21 | import org.colloh.flink.kudu.connector.format.KuduOutputFormat;
22 | import org.colloh.flink.kudu.connector.internal.writer.AbstractSingleOperationMapper;
23 | import org.colloh.flink.kudu.connector.internal.writer.KuduWriterConfig;
24 | import org.colloh.flink.kudu.connector.internal.writer.RowOperationMapper;
25 | import org.apache.flink.types.Row;
26 |
27 | import org.junit.jupiter.api.Assertions;
28 | import org.junit.jupiter.api.Test;
29 |
30 | import java.util.List;
31 | import java.util.UUID;
32 |
33 | class KuduOutputFormatTest extends KuduTestBase {
34 |
35 | @Test
36 | void testInvalidKuduMaster() {
37 | KuduTableInfo tableInfo = booksTableInfo(UUID.randomUUID().toString(), false);
38 | Assertions.assertThrows(NullPointerException.class, () -> new KuduOutputFormat<>(null, tableInfo, null));
39 | }
40 |
41 | @Test
42 | void testInvalidTableInfo() {
43 | String masterAddresses = harness.getMasterAddressesAsString();
44 | KuduWriterConfig writerConfig = KuduWriterConfig.Builder.setMasters(masterAddresses).build();
45 | Assertions.assertThrows(NullPointerException.class, () -> new KuduOutputFormat<>(writerConfig, null, null));
46 | }
47 |
48 | @Test
49 | void testNotTableExist() {
50 | String masterAddresses = harness.getMasterAddressesAsString();
51 | KuduTableInfo tableInfo = booksTableInfo(UUID.randomUUID().toString(), false);
52 | KuduWriterConfig writerConfig = KuduWriterConfig.Builder.setMasters(masterAddresses).build();
53 | KuduOutputFormat outputFormat = new KuduOutputFormat<>(writerConfig, tableInfo, new RowOperationMapper(KuduTestBase.columns, AbstractSingleOperationMapper.KuduOperation.INSERT));
54 | Assertions.assertThrows(RuntimeException.class, () -> outputFormat.open(0, 1));
55 | }
56 |
57 | @Test
58 | void testOutputWithStrongConsistency() throws Exception {
59 | String masterAddresses = harness.getMasterAddressesAsString();
60 |
61 | KuduTableInfo tableInfo = booksTableInfo(UUID.randomUUID().toString(), true);
62 | KuduWriterConfig writerConfig = KuduWriterConfig.Builder
63 | .setMasters(masterAddresses)
64 | .setStrongConsistency()
65 | .build();
66 | KuduOutputFormat outputFormat = new KuduOutputFormat<>(writerConfig, tableInfo, new RowOperationMapper(KuduTestBase.columns, AbstractSingleOperationMapper.KuduOperation.INSERT));
67 |
68 | outputFormat.open(0, 1);
69 |
70 | for (Row kuduRow : booksDataRow()) {
71 | outputFormat.writeRecord(kuduRow);
72 | }
73 | outputFormat.close();
74 |
75 | List rows = readRows(tableInfo);
76 | Assertions.assertEquals(5, rows.size());
77 | kuduRowsTest(rows);
78 |
79 | cleanDatabase(tableInfo);
80 | }
81 |
82 | @Test
83 | void testOutputWithEventualConsistency() throws Exception {
84 | String masterAddresses = harness.getMasterAddressesAsString();
85 |
86 | KuduTableInfo tableInfo = booksTableInfo(UUID.randomUUID().toString(), true);
87 | KuduWriterConfig writerConfig = KuduWriterConfig.Builder
88 | .setMasters(masterAddresses)
89 | .setEventualConsistency()
90 | .build();
91 | KuduOutputFormat outputFormat = new KuduOutputFormat<>(writerConfig, tableInfo, new RowOperationMapper(KuduTestBase.columns, AbstractSingleOperationMapper.KuduOperation.INSERT));
92 |
93 | outputFormat.open(0, 1);
94 |
95 | for (Row kuduRow : booksDataRow()) {
96 | outputFormat.writeRecord(kuduRow);
97 | }
98 |
99 | // sleep to allow eventual consistency to finish
100 | Thread.sleep(1000);
101 |
102 | outputFormat.close();
103 |
104 | List rows = readRows(tableInfo);
105 | Assertions.assertEquals(5, rows.size());
106 | kuduRowsTest(rows);
107 |
108 | cleanDatabase(tableInfo);
109 | }
110 |
111 | }
112 |
--------------------------------------------------------------------------------
/src/test/java/org/colloh/flink/connectors/kudu/table/KuduDynamicTableSourceITCase.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | package org.colloh.flink.connectors.kudu.table;
18 |
19 | import org.colloh.flink.kudu.connector.internal.KuduTableInfo;
20 | import org.colloh.flink.kudu.connector.KuduTestBase;
21 | import org.colloh.flink.kudu.connector.table.catalog.KuduCatalog;
22 | import org.colloh.flink.kudu.connector.table.source.KuduDynamicTableSource;
23 | import org.apache.flink.table.api.TableEnvironment;
24 | import org.apache.flink.types.Row;
25 | import org.apache.flink.util.CloseableIterator;
26 | import org.junit.jupiter.api.BeforeEach;
27 | import org.junit.jupiter.api.Test;
28 |
29 | import java.util.ArrayList;
30 | import java.util.List;
31 |
32 | import static org.junit.jupiter.api.Assertions.assertEquals;
33 |
34 | /**
35 | * Integration tests for {@link KuduDynamicTableSource}.
36 | */
37 | public class KuduDynamicTableSourceITCase extends KuduTestBase {
38 | private TableEnvironment tableEnv;
39 | private KuduCatalog catalog;
40 |
41 | @BeforeEach
42 | public void init() {
43 | KuduTableInfo tableInfo = booksTableInfo("books", true);
44 | setUpDatabase(tableInfo);
45 | tableEnv = KuduTableTestUtils.createTableEnvWithBlinkPlannerBatchMode();
46 | catalog = new KuduCatalog(harness.getMasterAddressesAsString());
47 | tableEnv.registerCatalog("kudu", catalog);
48 | tableEnv.useCatalog("kudu");
49 | }
50 |
51 | @Test
52 | void testFullBatchScan() throws Exception {
53 | CloseableIterator it = tableEnv.executeSql("select * from books order by id").collect();
54 | List results = new ArrayList<>();
55 | it.forEachRemaining(results::add);
56 | assertEquals(5, results.size());
57 | assertEquals("1001,Java for dummies,Tan Ah Teck,11.11,11", results.get(0).toString());
58 | tableEnv.sqlUpdate("DROP TABLE books");
59 | }
60 |
61 | @Test
62 | void testScanWithProjectionAndFilter() throws Exception {
63 | // (price > 30 and price < 40)
64 | CloseableIterator it = tableEnv.executeSql("SELECT title FROM books WHERE id IN (1003, 1004) and quantity < 40").collect();
65 | List results = new ArrayList<>();
66 | it.forEachRemaining(results::add);
67 | assertEquals(1, results.size());
68 | assertEquals("More Java for more dummies", results.get(0).toString());
69 | tableEnv.sqlUpdate("DROP TABLE books");
70 | }
71 | }
72 |
--------------------------------------------------------------------------------
/src/test/java/org/colloh/flink/connectors/kudu/table/KuduTableTestUtils.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | package org.colloh.flink.connectors.kudu.table;
18 |
19 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
20 | import org.apache.flink.table.api.EnvironmentSettings;
21 | import org.apache.flink.table.api.TableEnvironment;
22 | import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
23 |
24 | import static org.apache.flink.table.api.config.ExecutionConfigOptions.TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM;
25 |
26 | public class KuduTableTestUtils {
27 |
28 | public static StreamTableEnvironment createTableEnvWithBlinkPlannerStreamingMode(StreamExecutionEnvironment env) {
29 | EnvironmentSettings settings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build();
30 | StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, settings);
31 | tableEnv.getConfig().getConfiguration().setInteger(TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM.key(), 1);
32 | return tableEnv;
33 | }
34 |
35 | public static TableEnvironment createTableEnvWithBlinkPlannerBatchMode() {
36 | EnvironmentSettings settings = EnvironmentSettings.newInstance().useBlinkPlanner().inBatchMode().build();
37 | TableEnvironment tableEnv = TableEnvironment.create(settings);
38 | tableEnv.getConfig().getConfiguration().setInteger(TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM.key(), 1);
39 | return tableEnv;
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/src/test/java/org/colloh/flink/connectors/kudu/writer/AbstractOperationTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | package org.colloh.flink.connectors.kudu.writer;
18 |
19 | import org.colloh.flink.kudu.connector.KuduTestBase;
20 |
21 | import org.apache.kudu.Schema;
22 | import org.apache.kudu.client.Delete;
23 | import org.apache.kudu.client.Insert;
24 | import org.apache.kudu.client.KuduTable;
25 | import org.apache.kudu.client.PartialRow;
26 | import org.apache.kudu.client.Update;
27 | import org.apache.kudu.client.Upsert;
28 | import org.junit.jupiter.api.BeforeEach;
29 | import org.mockito.Mock;
30 | import org.mockito.MockitoAnnotations;
31 |
32 | import static org.mockito.Mockito.when;
33 |
34 | public abstract class AbstractOperationTest {
35 |
36 | public static final Schema tableSchema = KuduTestBase.booksTableInfo("test_table", true).getSchema();
37 | @Mock
38 | Insert mockInsert;
39 | @Mock
40 | Upsert mockUpsert;
41 | @Mock
42 | Update mockUpdate;
43 | @Mock
44 | Delete mockDelete;
45 | @Mock
46 | KuduTable mockTable;
47 | @Mock
48 | PartialRow mockPartialRow;
49 |
50 | @BeforeEach
51 | public void setup() {
52 | MockitoAnnotations.initMocks(this);
53 | when(mockInsert.getRow()).thenReturn(mockPartialRow);
54 | when(mockUpsert.getRow()).thenReturn(mockPartialRow);
55 | when(mockUpdate.getRow()).thenReturn(mockPartialRow);
56 | when(mockDelete.getRow()).thenReturn(mockPartialRow);
57 | when(mockTable.newInsert()).thenReturn(mockInsert);
58 | when(mockTable.newUpsert()).thenReturn(mockUpsert);
59 | when(mockTable.newUpdate()).thenReturn(mockUpdate);
60 | when(mockTable.newDelete()).thenReturn(mockDelete);
61 | when(mockTable.getSchema()).thenReturn(tableSchema);
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/src/test/java/org/colloh/flink/connectors/kudu/writer/PojoOperationMapperTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package org.colloh.flink.connectors.kudu.writer;
19 |
20 | import org.colloh.flink.kudu.connector.KuduTestBase.BookInfo;
21 | import org.colloh.flink.kudu.connector.KuduTestBase;
22 | import org.colloh.flink.kudu.connector.internal.writer.AbstractSingleOperationMapper;
23 | import org.colloh.flink.kudu.connector.internal.writer.PojoOperationMapper;
24 |
25 | import org.apache.kudu.client.Operation;
26 | import org.apache.kudu.client.PartialRow;
27 | import org.junit.jupiter.api.Test;
28 | import org.mockito.Mockito;
29 |
30 | import java.util.List;
31 |
32 | import static org.junit.Assert.assertEquals;
33 |
34 | public class PojoOperationMapperTest extends AbstractOperationTest {
35 |
36 | @Test
37 | void testPojoMapper() {
38 |
39 | PojoOperationMapper mapper = new PojoOperationMapper<>(BookInfo.class, KuduTestBase.columns, AbstractSingleOperationMapper.KuduOperation.INSERT);
40 |
41 | BookInfo bookInfo = KuduTestBase.booksDataPojo().get(0);
42 |
43 | assertEquals(bookInfo.id, mapper.getField(bookInfo, 0));
44 | assertEquals(bookInfo.title, mapper.getField(bookInfo, 1));
45 | assertEquals(bookInfo.author, mapper.getField(bookInfo, 2));
46 | assertEquals(bookInfo.price, mapper.getField(bookInfo, 3));
47 | assertEquals(bookInfo.quantity, mapper.getField(bookInfo, 4));
48 |
49 | List operations = mapper.createOperations(bookInfo, mockTable);
50 | assertEquals(1, operations.size());
51 |
52 | PartialRow row = operations.get(0).getRow();
53 | Mockito.verify(row, Mockito.times(1)).addObject("id", bookInfo.id);
54 | Mockito.verify(row, Mockito.times(1)).addObject("quantity", bookInfo.quantity);
55 |
56 | Mockito.verify(row, Mockito.times(1)).addObject("title", bookInfo.title);
57 | Mockito.verify(row, Mockito.times(1)).addObject("author", bookInfo.author);
58 |
59 | Mockito.verify(row, Mockito.times(1)).addObject("price", bookInfo.price);
60 | }
61 |
62 | @Test
63 | public void testFieldInheritance() {
64 | PojoOperationMapper mapper = new PojoOperationMapper<>(Second.class, new String[]{"s1", "i1", "i2"}, AbstractSingleOperationMapper.KuduOperation.INSERT);
65 | Second s = new Second();
66 | assertEquals("s1", mapper.getField(s, 0));
67 | assertEquals(1, mapper.getField(s, 1));
68 | assertEquals(2, mapper.getField(s, 2));
69 | }
70 |
71 | private static class First {
72 | private final int i1 = 1;
73 | public int i2 = 2;
74 | private final String s1 = "ignore";
75 | }
76 |
77 | private static class Second extends First {
78 | private final String s1 = "s1";
79 | }
80 | }
81 |
--------------------------------------------------------------------------------
/src/test/java/org/colloh/flink/connectors/kudu/writer/RowOperationMapperTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | package org.colloh.flink.connectors.kudu.writer;
18 |
19 | import org.colloh.flink.kudu.connector.KuduTestBase;
20 | import org.colloh.flink.kudu.connector.internal.writer.AbstractSingleOperationMapper;
21 | import org.colloh.flink.kudu.connector.internal.writer.RowOperationMapper;
22 | import org.apache.flink.types.Row;
23 |
24 | import org.apache.kudu.client.Operation;
25 | import org.junit.jupiter.api.Assertions;
26 | import org.junit.jupiter.api.Test;
27 |
28 | import java.util.List;
29 |
30 | import static org.junit.Assert.assertEquals;
31 | import static org.mockito.Mockito.verify;
32 |
33 | public class RowOperationMapperTest extends AbstractOperationTest {
34 |
35 | @Test
36 | void testGetField() {
37 | RowOperationMapper mapper = new RowOperationMapper(KuduTestBase.columns, AbstractSingleOperationMapper.KuduOperation.INSERT);
38 | Row inputRow = KuduTestBase.booksDataRow().get(0);
39 |
40 | for (int i = 0; i < inputRow.getArity(); i++) {
41 | Assertions.assertEquals(inputRow.getField(i), mapper.getField(inputRow, i));
42 | }
43 | }
44 |
45 | @Test
46 | void testCorrectOperationInsert() {
47 | RowOperationMapper mapper = new RowOperationMapper(KuduTestBase.columns, AbstractSingleOperationMapper.KuduOperation.INSERT);
48 | Row inputRow = KuduTestBase.booksDataRow().get(0);
49 |
50 | List operations = mapper.createOperations(inputRow, mockTable);
51 |
52 | assertEquals(1, operations.size());
53 | verify(mockTable).newInsert();
54 | }
55 |
56 | @Test
57 | void testCorrectOperationUpsert() {
58 | RowOperationMapper mapper = new RowOperationMapper(KuduTestBase.columns, AbstractSingleOperationMapper.KuduOperation.UPSERT);
59 | Row inputRow = KuduTestBase.booksDataRow().get(0);
60 |
61 | List operations = mapper.createOperations(inputRow, mockTable);
62 |
63 | assertEquals(1, operations.size());
64 | verify(mockTable).newUpsert();
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/src/test/java/org/colloh/flink/connectors/kudu/writer/TupleOpertaionMapperTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | package org.colloh.flink.connectors.kudu.writer;
18 |
19 | import org.apache.flink.api.java.tuple.Tuple5;
20 | import org.colloh.flink.kudu.connector.KuduTestBase;
21 | import org.colloh.flink.kudu.connector.internal.writer.AbstractSingleOperationMapper;
22 | import org.colloh.flink.kudu.connector.internal.writer.TupleOperationMapper;
23 |
24 | import org.apache.kudu.client.Operation;
25 | import org.junit.jupiter.api.Assertions;
26 | import org.junit.jupiter.api.Test;
27 |
28 | import java.util.List;
29 |
30 | import static org.junit.Assert.assertEquals;
31 | import static org.mockito.Mockito.verify;
32 |
33 | public class TupleOpertaionMapperTest extends AbstractOperationTest {
34 | @Test
35 | void testGetField() {
36 | TupleOperationMapper> mapper =
37 | new TupleOperationMapper<>(KuduTestBase.columns, AbstractSingleOperationMapper.KuduOperation.INSERT);
38 | Tuple5 inputTuple = KuduTestBase.booksDataTuple().get(0);
39 |
40 | for (int i = 0; i < inputTuple.getArity(); i++) {
41 | Assertions.assertEquals(inputTuple.getField(i), mapper.getField(inputTuple, i));
42 | }
43 | }
44 |
45 | @Test
46 | void testCorrectOperationInsert() {
47 | TupleOperationMapper> mapper =
48 | new TupleOperationMapper<>(KuduTestBase.columns, AbstractSingleOperationMapper.KuduOperation.INSERT);
49 | Tuple5 inputTuple = KuduTestBase.booksDataTuple().get(0);
50 |
51 | List operations = mapper.createOperations(inputTuple, mockTable);
52 |
53 | assertEquals(1, operations.size());
54 | verify(mockTable).newInsert();
55 | }
56 |
57 | @Test
58 | void testCorrectOperationUpsert() {
59 | TupleOperationMapper> mapper =
60 | new TupleOperationMapper<>(KuduTestBase.columns, AbstractSingleOperationMapper.KuduOperation.UPSERT);
61 | Tuple5 inputTuple = KuduTestBase.booksDataTuple().get(0);
62 |
63 | List operations = mapper.createOperations(inputTuple, mockTable);
64 |
65 | assertEquals(1, operations.size());
66 | verify(mockTable).newUpsert();
67 | }
68 | }
69 |
--------------------------------------------------------------------------------
/src/test/resources/log4j2-test.properties:
--------------------------------------------------------------------------------
1 | ################################################################################
2 | # Licensed to the Apache Software Foundation (ASF) under one
3 | # or more contributor license agreements. See the NOTICE file
4 | # distributed with this work for additional information
5 | # regarding copyright ownership. The ASF licenses this file
6 | # to you under the Apache License, Version 2.0 (the
7 | # "License"); you may not use this file except in compliance
8 | # with the License. You may obtain a copy of the License at
9 | #
10 | # http://www.apache.org/licenses/LICENSE-2.0
11 | #
12 | # Unless required by applicable law or agreed to in writing, software
13 | # distributed under the License is distributed on an "AS IS" BASIS,
14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | # See the License for the specific language governing permissions and
16 | # limitations under the License.
17 | ################################################################################
18 | # Set root logger level to OFF to not flood build logs
19 | # set manually to INFO for debugging purposes
20 | rootLogger.level=INFO
21 | rootLogger.appenderRef.test.ref=TestLogger
22 | appender.testlogger.name=TestLogger
23 | appender.testlogger.type=CONSOLE
24 | appender.testlogger.target=SYSTEM_ERR
25 | appender.testlogger.layout.type=PatternLayout
26 | appender.testlogger.layout.pattern=%-4r [%t] %-5p %c %x - %m%n
27 |
--------------------------------------------------------------------------------