├── .gitignore ├── README.md ├── flink-examples-1.10 ├── pom.xml └── src │ └── main │ └── java │ └── flink │ └── examples │ └── sql │ └── _07 │ └── query │ └── _06_joins │ └── _02_interval_joins │ └── _01_outer_join │ ├── WindowJoinFunction$46.java │ └── _06_Interval_Outer_Joins_EventTime_Test.java ├── flink-examples-1.12 ├── .gitignore ├── pom.xml └── src │ └── main │ └── java │ └── flink │ └── examples │ ├── datastream │ └── _07 │ │ └── query │ │ └── _04_window │ │ └── _04_TumbleWindowTest.java │ └── sql │ └── _07 │ └── query │ └── _04_window_agg │ ├── _04_TumbleWindowTest.java │ ├── _04_TumbleWindowTest_GroupingWindowAggsHandler$59.java │ ├── _04_TumbleWindowTest_KeyProjection$69.java │ └── _04_TumbleWindowTest_WatermarkGenerator$6.java ├── flink-examples-1.13 ├── .gitignore ├── pom.xml └── src │ ├── main │ ├── java │ │ └── flink │ │ │ ├── core │ │ │ └── source │ │ │ │ ├── JaninoUtils.java │ │ │ │ └── SourceFactory.java │ │ │ └── examples │ │ │ ├── FlinkEnvUtils.java │ │ │ ├── JacksonUtils.java │ │ │ ├── datastream │ │ │ ├── _01 │ │ │ │ └── bytedance │ │ │ │ │ └── split │ │ │ │ │ ├── codegen │ │ │ │ │ ├── JaninoUtils.java │ │ │ │ │ └── benchmark │ │ │ │ │ │ └── Benchmark.java │ │ │ │ │ ├── job │ │ │ │ │ ├── SplitExampleJob.java │ │ │ │ │ └── start.sh │ │ │ │ │ ├── kafka │ │ │ │ │ ├── KafkaProducerCenter.java │ │ │ │ │ └── demo │ │ │ │ │ │ ├── Application.java │ │ │ │ │ │ ├── ConsumerThread.java │ │ │ │ │ │ └── ProducerThread.java │ │ │ │ │ ├── model │ │ │ │ │ ├── ClientLogSink.java │ │ │ │ │ ├── ClientLogSource.java │ │ │ │ │ ├── DynamicProducerRule.java │ │ │ │ │ └── Evaluable.java │ │ │ │ │ └── zkconfigcenter │ │ │ │ │ ├── ZkBasedConfigCenter.java │ │ │ │ │ ├── new.json │ │ │ │ │ └── old.json │ │ │ ├── _02 │ │ │ │ ├── DataStreamTest.java │ │ │ │ └── DataStreamTest1.java │ │ │ ├── _03 │ │ │ │ ├── enums_state │ │ │ │ │ ├── EnumsStateTest.java │ │ │ │ │ └── SenerioTest.java │ │ │ │ └── state │ │ │ │ │ ├── StateExamplesTest.java │ │ │ │ │ ├── _01_broadcast_state │ │ │ │ │ └── BroadcastStateTest.java │ │ │ │ │ ├── _03_rocksdb │ │ │ │ │ ├── CreateStateBackendTest.java │ │ │ │ │ ├── GettingStartDemo.java │ │ │ │ │ ├── Rocksdb_OperatorAndKeyedState_StateStorageDIr_Test.java │ │ │ │ │ ├── keyed_state │ │ │ │ │ │ ├── RocksBackendKeyedMapStateTest.java │ │ │ │ │ │ └── RocksBackendKeyedValueStateTest.java │ │ │ │ │ └── operator_state │ │ │ │ │ │ ├── KeyedStreamOperatorListStateTest.java │ │ │ │ │ │ └── RocksBackendOperatorListStateTest.java │ │ │ │ │ ├── _04_filesystem │ │ │ │ │ ├── keyed_state │ │ │ │ │ │ └── FsStateBackendKeyedMapStateTest.java │ │ │ │ │ └── operator_state │ │ │ │ │ │ └── FsStateBackendOperatorListStateTest.java │ │ │ │ │ └── _05_memory │ │ │ │ │ └── keyed_state │ │ │ │ │ └── MemoryStateBackendKeyedMapStateTest.java │ │ │ ├── _04 │ │ │ │ └── keyed_co_process │ │ │ │ │ ├── HashMapTest.java │ │ │ │ │ └── _04_KeyedCoProcessFunctionTest.java │ │ │ ├── _05_ken │ │ │ │ └── _01_watermark │ │ │ │ │ └── WatermarkTest.java │ │ │ ├── _06_test │ │ │ │ └── _01_event_proctime │ │ │ │ │ ├── OneJobWIthProcAndEventTimeWIndowTest.java │ │ │ │ │ └── OneJobWIthTimerTest.java │ │ │ ├── _07_lambda_error │ │ │ │ └── LambdaErrorTest.java │ │ │ ├── _08_late_record │ │ │ │ └── LatenessTest.java │ │ │ ├── _09_join │ │ │ │ ├── _01_window_join │ │ │ │ │ └── _01_Window_Join_Test.java │ │ │ │ └── _02_connect │ │ │ │ │ └── _01_Connect_Test.java │ │ │ └── _10_agg │ │ │ │ └── AggTest.java │ │ │ ├── practice │ │ │ └── _01 │ │ │ │ └── dau │ │ │ │ └── _01_DataStream_Session_Window.java │ │ │ ├── question │ │ │ ├── datastream │ │ │ │ └── _01 │ │ │ │ │ └── kryo_protobuf_no_more_bytes_left │ │ │ │ │ └── KryoProtobufNoMoreBytesLeftTest.java │ │ │ └── sql │ │ │ │ └── _01 │ │ │ │ └── lots_source_fields_poor_performance │ │ │ │ ├── EmbeddedKafka.java │ │ │ │ ├── _01_DataGenSourceTest.java │ │ │ │ └── _01_JsonSourceTest.java │ │ │ ├── runtime │ │ │ ├── _01 │ │ │ │ └── future │ │ │ │ │ ├── CompletableFutureTest.java │ │ │ │ │ ├── CompletableFutureTest4.java │ │ │ │ │ ├── CompletableFuture_AnyOf_Test3.java │ │ │ │ │ ├── CompletableFuture_ThenApplyAsync_Test2.java │ │ │ │ │ ├── CompletableFuture_ThenComposeAsync_Test2.java │ │ │ │ │ └── FutureTest.java │ │ │ └── _04 │ │ │ │ └── statebackend │ │ │ │ └── CancelAndRestoreWithCheckpointTest.java │ │ │ └── sql │ │ │ ├── _01 │ │ │ └── countdistincterror │ │ │ │ ├── CountDistinctErrorTest.java │ │ │ │ ├── CountDistinctErrorTest2.java │ │ │ │ ├── CountDistinctErrorTest3.java │ │ │ │ └── udf │ │ │ │ ├── Mod_UDF.java │ │ │ │ ├── StatusMapper1_UDF.java │ │ │ │ └── StatusMapper_UDF.java │ │ │ ├── _02 │ │ │ └── timezone │ │ │ │ ├── TimeZoneTest.java │ │ │ │ ├── TimeZoneTest2.java │ │ │ │ └── TimeZoneTest3.java │ │ │ ├── _03 │ │ │ └── source_sink │ │ │ │ ├── CreateViewTest.java │ │ │ │ ├── DataStreamSourceEventTimeTest.java │ │ │ │ ├── DataStreamSourceProcessingTimeTest.java │ │ │ │ ├── KafkaSourceTest.java │ │ │ │ ├── RedisLookupTest.java │ │ │ │ ├── RedisSinkTest.java │ │ │ │ ├── SocketSourceTest.java │ │ │ │ ├── TableApiKafkaSourceTest.java │ │ │ │ ├── UpsertKafkaSinkProtobufFormatSupportTest.java │ │ │ │ ├── UpsertKafkaSinkTest.java │ │ │ │ ├── UserDefinedSourceTest.java │ │ │ │ ├── abilities │ │ │ │ ├── sink │ │ │ │ │ ├── Abilities_SinkFunction.java │ │ │ │ │ ├── Abilities_TableSink.java │ │ │ │ │ ├── Abilities_TableSinkFactory.java │ │ │ │ │ └── _01_SupportsWritingMetadata_Test.java │ │ │ │ └── source │ │ │ │ │ ├── Abilities_SourceFunction.java │ │ │ │ │ ├── Abilities_TableSource.java │ │ │ │ │ ├── Abilities_TableSourceFactory.java │ │ │ │ │ ├── _01_SupportsFilterPushDown_Test.java │ │ │ │ │ ├── _02_SupportsLimitPushDown_Test.java │ │ │ │ │ ├── _03_SupportsPartitionPushDown_Test.java │ │ │ │ │ ├── _04_SupportsProjectionPushDown_JDBC_Test.java │ │ │ │ │ ├── _04_SupportsProjectionPushDown_Test.java │ │ │ │ │ ├── _05_SupportsReadingMetadata_Test.java │ │ │ │ │ ├── _06_SupportsWatermarkPushDown_Test.java │ │ │ │ │ ├── _07_SupportsSourceWatermark_Test.java │ │ │ │ │ └── before │ │ │ │ │ ├── Before_Abilities_SourceFunction.java │ │ │ │ │ ├── Before_Abilities_TableSource.java │ │ │ │ │ ├── Before_Abilities_TableSourceFactory.java │ │ │ │ │ ├── _01_Before_SupportsFilterPushDown_Test.java │ │ │ │ │ ├── _02_Before_SupportsLimitPushDown_Test.java │ │ │ │ │ ├── _03_Before_SupportsPartitionPushDown_Test.java │ │ │ │ │ ├── _04_Before_SupportsProjectionPushDown_Test.java │ │ │ │ │ ├── _05_Before_SupportsReadingMetadata_Test.java │ │ │ │ │ ├── _06_Before_SupportsWatermarkPushDown_Test.java │ │ │ │ │ └── _07_Before_SupportsSourceWatermark_Test.java │ │ │ │ ├── ddl │ │ │ │ └── TableApiDDLTest.java │ │ │ │ └── table │ │ │ │ ├── redis │ │ │ │ ├── container │ │ │ │ │ ├── RedisCommandsContainer.java │ │ │ │ │ ├── RedisCommandsContainerBuilder.java │ │ │ │ │ └── RedisContainer.java │ │ │ │ ├── demo │ │ │ │ │ └── RedisDemo.java │ │ │ │ ├── mapper │ │ │ │ │ ├── LookupRedisMapper.java │ │ │ │ │ ├── RedisCommand.java │ │ │ │ │ ├── RedisCommandDescription.java │ │ │ │ │ └── SetRedisMapper.java │ │ │ │ ├── options │ │ │ │ │ ├── RedisLookupOptions.java │ │ │ │ │ ├── RedisOptions.java │ │ │ │ │ └── RedisWriteOptions.java │ │ │ │ ├── v1 │ │ │ │ │ ├── RedisDynamicTableFactory.java │ │ │ │ │ ├── sink │ │ │ │ │ │ └── RedisDynamicTableSink.java │ │ │ │ │ └── source │ │ │ │ │ │ ├── RedisDynamicTableSource.java │ │ │ │ │ │ └── RedisRowDataLookupFunction.java │ │ │ │ └── v2 │ │ │ │ │ ├── RedisDynamicTableFactory.java │ │ │ │ │ ├── sink │ │ │ │ │ └── RedisDynamicTableSink.java │ │ │ │ │ └── source │ │ │ │ │ ├── RedisDynamicTableSource.java │ │ │ │ │ ├── RedisRowDataBatchLookupFunction.java │ │ │ │ │ └── RedisRowDataLookupFunction.java │ │ │ │ ├── socket │ │ │ │ ├── SocketDynamicTableFactory.java │ │ │ │ ├── SocketDynamicTableSource.java │ │ │ │ └── SocketSourceFunction.java │ │ │ │ └── user_defined │ │ │ │ ├── UserDefinedDynamicTableFactory.java │ │ │ │ ├── UserDefinedDynamicTableSource.java │ │ │ │ └── UserDefinedSource.java │ │ │ ├── _04 │ │ │ └── type │ │ │ │ ├── BlinkPlannerTest.java │ │ │ │ ├── JavaEnvTest.java │ │ │ │ └── OldPlannerTest.java │ │ │ ├── _05 │ │ │ └── format │ │ │ │ └── formats │ │ │ │ ├── ProtobufFormatTest.java │ │ │ │ ├── SocketWriteTest.java │ │ │ │ ├── csv │ │ │ │ ├── ChangelogCsvDeserializer.java │ │ │ │ ├── ChangelogCsvFormat.java │ │ │ │ └── ChangelogCsvFormatFactory.java │ │ │ │ ├── protobuf │ │ │ │ ├── descriptors │ │ │ │ │ ├── Protobuf.java │ │ │ │ │ └── ProtobufValidator.java │ │ │ │ ├── row │ │ │ │ │ ├── ProtobufDeserializationSchema.java │ │ │ │ │ ├── ProtobufRowDeserializationSchema.java │ │ │ │ │ ├── ProtobufRowFormatFactory.java │ │ │ │ │ ├── ProtobufRowSerializationSchema.java │ │ │ │ │ ├── ProtobufSerializationSchema.java │ │ │ │ │ ├── ProtobufUtils.java │ │ │ │ │ └── typeutils │ │ │ │ │ │ └── ProtobufSchemaConverter.java │ │ │ │ └── rowdata │ │ │ │ │ ├── ProtobufFormatFactory.java │ │ │ │ │ ├── ProtobufOptions.java │ │ │ │ │ ├── ProtobufRowDataDeserializationSchema.java │ │ │ │ │ ├── ProtobufRowDataSerializationSchema.java │ │ │ │ │ ├── ProtobufToRowDataConverters.java │ │ │ │ │ └── RowDataToProtobufConverters.java │ │ │ │ └── utils │ │ │ │ ├── MoreRunnables.java │ │ │ │ ├── MoreSuppliers.java │ │ │ │ ├── ThrowableRunable.java │ │ │ │ └── ThrowableSupplier.java │ │ │ ├── _06 │ │ │ └── calcite │ │ │ │ ├── CalciteTest.java │ │ │ │ ├── ParserTest.java │ │ │ │ └── javacc │ │ │ │ ├── JavaccCodeGenTest.java │ │ │ │ ├── Simple1Test.java │ │ │ │ └── generatedcode │ │ │ │ ├── ParseException.java │ │ │ │ ├── Simple1.java │ │ │ │ ├── Simple1Constants.java │ │ │ │ ├── Simple1TokenManager.java │ │ │ │ ├── SimpleCharStream.java │ │ │ │ ├── Token.java │ │ │ │ └── TokenMgrError.java │ │ │ ├── _07 │ │ │ └── query │ │ │ │ ├── _01_select_where │ │ │ │ ├── SelectWhereHiveDialect.java │ │ │ │ ├── SelectWhereTest.java │ │ │ │ ├── SelectWhereTest2.java │ │ │ │ ├── SelectWhereTest3.java │ │ │ │ ├── SelectWhereTest4.java │ │ │ │ ├── SelectWhereTest5.java │ │ │ │ └── StreamExecCalc$10.java │ │ │ │ ├── _02_select_distinct │ │ │ │ ├── GroupAggsHandler$5.java │ │ │ │ ├── KeyProjection$0.java │ │ │ │ ├── SelectDistinctTest.java │ │ │ │ └── SelectDistinctTest2.java │ │ │ │ ├── _03_group_agg │ │ │ │ ├── _01_group_agg │ │ │ │ │ ├── GroupAggMiniBatchTest.java │ │ │ │ │ ├── GroupAggTest.java │ │ │ │ │ └── GroupAggsHandler$39.java │ │ │ │ ├── _02_count_distinct │ │ │ │ │ ├── CountDistinctGroupAggTest.java │ │ │ │ │ └── GroupAggsHandler$17.java │ │ │ │ ├── _03_grouping_sets │ │ │ │ │ ├── GroupingSetsEqualsGroupAggUnionAllGroupAggTest2.java │ │ │ │ │ ├── GroupingSetsGroupAggTest.java │ │ │ │ │ ├── GroupingSetsGroupAggTest2.java │ │ │ │ │ └── StreamExecExpand$20.java │ │ │ │ ├── _04_cube │ │ │ │ │ ├── CubeGroupAggTest.java │ │ │ │ │ └── CubeGroupAggTest2.java │ │ │ │ └── _05_rollup │ │ │ │ │ ├── RollUpGroupAggTest.java │ │ │ │ │ └── RollUpGroupAggTest2.java │ │ │ │ ├── _04_window_agg │ │ │ │ ├── _01_tumble_window │ │ │ │ │ ├── TumbleWindow2GroupAggTest.java │ │ │ │ │ ├── TumbleWindowTest.java │ │ │ │ │ ├── TumbleWindowTest2.java │ │ │ │ │ ├── TumbleWindowTest3.java │ │ │ │ │ ├── TumbleWindowTest4.java │ │ │ │ │ ├── TumbleWindowTest5.java │ │ │ │ │ ├── global_agg │ │ │ │ │ │ ├── GlobalWindowAggsHandler$232.java │ │ │ │ │ │ ├── LocalWindowAggsHandler$162.java │ │ │ │ │ │ └── StateWindowAggsHandler$300.java │ │ │ │ │ └── local_agg │ │ │ │ │ │ ├── KeyProjection$89.java │ │ │ │ │ │ └── LocalWindowAggsHandler$88.java │ │ │ │ ├── _02_cumulate_window │ │ │ │ │ ├── CumulateWindowGroupingSetsBigintTest.java │ │ │ │ │ ├── CumulateWindowGroupingSetsTest.java │ │ │ │ │ ├── CumulateWindowTest.java │ │ │ │ │ ├── TumbleWindowEarlyFireTest.java │ │ │ │ │ ├── cumulate │ │ │ │ │ │ ├── global_agg │ │ │ │ │ │ │ ├── GlobalWindowAggsHandler$232.java │ │ │ │ │ │ │ ├── KeyProjection$301.java │ │ │ │ │ │ │ ├── LocalWindowAggsHandler$162.java │ │ │ │ │ │ │ └── StateWindowAggsHandler$300.java │ │ │ │ │ │ └── local_agg │ │ │ │ │ │ │ ├── KeyProjection$89.java │ │ │ │ │ │ │ └── LocalWindowAggsHandler$88.java │ │ │ │ │ └── earlyfire │ │ │ │ │ │ ├── GroupAggsHandler$210.java │ │ │ │ │ │ └── GroupingWindowAggsHandler$57.java │ │ │ │ └── _03_hop_window │ │ │ │ │ └── HopWindowGroupWindowAggTest.java │ │ │ │ ├── _05_over │ │ │ │ ├── _01_row_number │ │ │ │ │ ├── RowNumberOrderByBigintTest.java │ │ │ │ │ ├── RowNumberOrderByStringTest.java │ │ │ │ │ ├── RowNumberOrderByUnixTimestampTest.java │ │ │ │ │ ├── RowNumberWithoutPartitionKeyTest.java │ │ │ │ │ ├── RowNumberWithoutRowNumberEqual1Test.java │ │ │ │ │ └── Scalar_UDF.java │ │ │ │ └── _02_agg │ │ │ │ │ ├── RangeIntervalProctimeTest.java │ │ │ │ │ ├── RangeIntervalRowtimeAscendingTest.java │ │ │ │ │ ├── RangeIntervalRowtimeBoundedOutOfOrdernessTest.java │ │ │ │ │ ├── RangeIntervalRowtimeStrictlyAscendingTest.java │ │ │ │ │ └── RowIntervalTest.java │ │ │ │ ├── _06_joins │ │ │ │ ├── _01_regular_joins │ │ │ │ │ ├── _01_inner_join │ │ │ │ │ │ ├── ConditionFunction$4.java │ │ │ │ │ │ ├── _01_InnerJoinsTest.java │ │ │ │ │ │ └── _02_InnerJoinsOnNotEqualTest.java │ │ │ │ │ └── _02_outer_join │ │ │ │ │ │ ├── _01_LeftJoinsTest.java │ │ │ │ │ │ ├── _02_RightJoinsTest.java │ │ │ │ │ │ └── _03_FullJoinsTest.java │ │ │ │ ├── _02_interval_joins │ │ │ │ │ ├── _01_proctime │ │ │ │ │ │ ├── Interval_Full_Joins_ProcesingTime_Test.java │ │ │ │ │ │ ├── Interval_Inner_Joins_ProcesingTime_Test.java │ │ │ │ │ │ ├── Interval_Left_Joins_ProcesingTime_Test.java │ │ │ │ │ │ └── Interval_Right_Joins_ProcesingTime_Test.java │ │ │ │ │ └── _02_row_time │ │ │ │ │ │ ├── Interval_Full_JoinsOnNotEqual_EventTime_Test.java │ │ │ │ │ │ ├── Interval_Full_Joins_EventTime_Test.java │ │ │ │ │ │ ├── Interval_Inner_Joins_EventTime_Test.java │ │ │ │ │ │ ├── Interval_Left_Joins_EventTime_Test.java │ │ │ │ │ │ └── Interval_Right_Joins_EventTime_Test.java │ │ │ │ ├── _03_temporal_join │ │ │ │ │ ├── _01_proctime │ │ │ │ │ │ └── Temporal_Join_ProcesingTime_Test.java │ │ │ │ │ └── _02_row_time │ │ │ │ │ │ └── Temporal_Join_EventTime_Test.java │ │ │ │ ├── _04_lookup_join │ │ │ │ │ └── _01_redis │ │ │ │ │ │ ├── RedisBatchLookupTest2.java │ │ │ │ │ │ ├── RedisDemo.java │ │ │ │ │ │ ├── RedisLookupTest.java │ │ │ │ │ │ ├── RedisLookupTest2.java │ │ │ │ │ │ └── pipeline │ │ │ │ │ │ ├── BatchJoinTableFuncCollector$8.java │ │ │ │ │ │ ├── BatchLookupFunction$4.java │ │ │ │ │ │ ├── JoinTableFuncCollector$8.java │ │ │ │ │ │ ├── JoinTableFuncCollector$9.java │ │ │ │ │ │ ├── LookupFunction$4.java │ │ │ │ │ │ ├── LookupFunction$5.java │ │ │ │ │ │ └── T1.java │ │ │ │ ├── _05_array_expansion │ │ │ │ │ └── _01_ArrayExpansionTest.java │ │ │ │ └── _06_table_function │ │ │ │ │ └── _01_inner_join │ │ │ │ │ ├── TableFunctionInnerJoin_Test.java │ │ │ │ │ └── TableFunctionInnerJoin_WithEmptyTableFunction_Test.java │ │ │ │ ├── _07_deduplication │ │ │ │ ├── DeduplicationProcessingTimeTest.java │ │ │ │ ├── DeduplicationProcessingTimeTest1.java │ │ │ │ └── DeduplicationRowTimeTest.java │ │ │ │ ├── _08_datastream_trans │ │ │ │ ├── AlertExample.java │ │ │ │ ├── AlertExampleRetract.java │ │ │ │ ├── AlertExampleRetractError.java │ │ │ │ ├── RetractExample.java │ │ │ │ └── Test.java │ │ │ │ ├── _09_set_operations │ │ │ │ ├── Except_Test.java │ │ │ │ ├── Exist_Test.java │ │ │ │ ├── In_Test.java │ │ │ │ ├── Intersect_Test.java │ │ │ │ ├── UnionAll_Test.java │ │ │ │ └── Union_Test.java │ │ │ │ ├── _10_order_by │ │ │ │ ├── OrderBy_with_time_attr_Test.java │ │ │ │ └── OrderBy_without_time_attr_Test.java │ │ │ │ ├── _11_limit │ │ │ │ └── Limit_Test.java │ │ │ │ ├── _12_topn │ │ │ │ └── TopN_Test.java │ │ │ │ ├── _13_window_topn │ │ │ │ └── WindowTopN_Test.java │ │ │ │ ├── _14_retract │ │ │ │ └── Retract_Test.java │ │ │ │ ├── _15_exec_options │ │ │ │ ├── Default_Parallelism_Test.java │ │ │ │ ├── Idle_Timeout_Test.java │ │ │ │ └── State_Ttl_Test.java │ │ │ │ ├── _16_optimizer_options │ │ │ │ ├── Agg_OnePhase_Strategy_window_Test.java │ │ │ │ ├── Agg_TwoPhase_Strategy_unbounded_Test.java │ │ │ │ ├── Agg_TwoPhase_Strategy_window_Test.java │ │ │ │ ├── DistinctAgg_Split_One_Distinct_Key_Test.java │ │ │ │ └── DistinctAgg_Split_Two_Distinct_Key_Test.java │ │ │ │ ├── _17_table_options │ │ │ │ ├── Dml_Syc_False_Test.java │ │ │ │ ├── Dml_Syc_True_Test.java │ │ │ │ └── TimeZone_window_Test.java │ │ │ │ └── _18_performance_tuning │ │ │ │ └── Count_Distinct_Filter_Test.java │ │ │ ├── _08 │ │ │ └── batch │ │ │ │ ├── Utils.java │ │ │ │ ├── _01_ddl │ │ │ │ └── HiveDDLTest.java │ │ │ │ ├── _02_dml │ │ │ │ ├── HiveDMLBetweenAndTest.java │ │ │ │ ├── HiveDMLTest.java │ │ │ │ ├── HiveTest2.java │ │ │ │ ├── _01_hive_dialect │ │ │ │ │ └── HiveDMLTest.java │ │ │ │ ├── _02_with_as │ │ │ │ │ └── HIveWIthAsTest.java │ │ │ │ ├── _03_substr │ │ │ │ │ └── HiveSubstrTest.java │ │ │ │ ├── _04_tumble_window │ │ │ │ │ ├── Test.java │ │ │ │ │ ├── Test1.java │ │ │ │ │ ├── Test2_BIGINT_SOURCE.java │ │ │ │ │ ├── Test3.java │ │ │ │ │ └── Test5.java │ │ │ │ ├── _05_batch_to_datastream │ │ │ │ │ └── Test.java │ │ │ │ └── _06_select_where │ │ │ │ │ └── Test.java │ │ │ │ ├── _03_hive_udf │ │ │ │ ├── HiveModuleV2.java │ │ │ │ ├── HiveUDFRegistryTest.java │ │ │ │ ├── HiveUDFRegistryUnloadTest.java │ │ │ │ ├── _01_GenericUDAFResolver2 │ │ │ │ │ ├── HiveUDAF_hive_module_registry_Test.java │ │ │ │ │ ├── HiveUDAF_sql_registry_create_function_Test.java │ │ │ │ │ ├── HiveUDAF_sql_registry_create_temporary_function_Test.java │ │ │ │ │ └── TestHiveUDAF.java │ │ │ │ ├── _02_GenericUDTF │ │ │ │ │ ├── HiveUDTF_hive_module_registry_Test.java │ │ │ │ │ ├── HiveUDTF_sql_registry_create_function_Test.java │ │ │ │ │ ├── HiveUDTF_sql_registry_create_temporary_function_Test.java │ │ │ │ │ └── TestHiveUDTF.java │ │ │ │ ├── _03_built_in_udf │ │ │ │ │ ├── _01_get_json_object │ │ │ │ │ │ └── HiveUDF_get_json_object_Test.java │ │ │ │ │ └── _02_rlike │ │ │ │ │ │ └── HiveUDF_rlike_Test.java │ │ │ │ └── _04_GenericUDF │ │ │ │ │ ├── HiveUDF_hive_module_registry_Test.java │ │ │ │ │ ├── HiveUDF_sql_registry_create_function_Test.java │ │ │ │ │ ├── HiveUDF_sql_registry_create_temporary_function_Test.java │ │ │ │ │ └── TestGenericUDF.java │ │ │ │ ├── _04_flink_udf │ │ │ │ ├── FlinkUDAF_Test.java │ │ │ │ ├── FlinkUDF_Test.java │ │ │ │ └── FlinkUDTF_Test.java │ │ │ │ └── _05_test │ │ │ │ └── _01_batch_to_datastream │ │ │ │ └── Test.java │ │ │ ├── _09 │ │ │ └── udf │ │ │ │ ├── _01_hive_udf │ │ │ │ └── _01_GenericUDF │ │ │ │ │ ├── HiveUDF_sql_registry_create_function_Test.java │ │ │ │ │ ├── HiveUDF_sql_registry_create_function_with_hive_catalog_Test.java │ │ │ │ │ ├── HiveUDF_sql_registry_create_temporary_function_Test.java │ │ │ │ │ ├── HiveUDF_sql_registry_create_temporary_function_with_hive_catalog_Test.java │ │ │ │ │ └── TestGenericUDF.java │ │ │ │ ├── _02_stream_hive_udf │ │ │ │ ├── HiveUDF_Error_Test.java │ │ │ │ ├── HiveUDF_create_temporary_error_Test.java │ │ │ │ ├── HiveUDF_hive_module_registry_Test.java │ │ │ │ ├── HiveUDF_load_first_Test.java │ │ │ │ ├── HiveUDF_load_second_Test.java │ │ │ │ ├── TestGenericUDF.java │ │ │ │ └── UserDefinedSource.java │ │ │ │ ├── _03_advanced_type_inference │ │ │ │ ├── AdvancedFunctionsExample.java │ │ │ │ ├── InternalRowMergerFunction.java │ │ │ │ └── LastDatedValueFunction.java │ │ │ │ ├── _04_udf │ │ │ │ └── UDAF_Test.java │ │ │ │ └── _05_scalar_function │ │ │ │ ├── ExplodeUDTF.java │ │ │ │ ├── ExplodeUDTFV2.java │ │ │ │ ├── GetMapValue.java │ │ │ │ ├── GetSetValue.java │ │ │ │ ├── ScalarFunctionTest.java │ │ │ │ ├── ScalarFunctionTest2.java │ │ │ │ ├── SetStringUDF.java │ │ │ │ └── TableFunctionTest2.java │ │ │ ├── _10_share │ │ │ └── A.java │ │ │ ├── _11_explain │ │ │ └── Explain_Test.java │ │ │ └── _12_data_type │ │ │ ├── _01_interval │ │ │ ├── Timestamp3_Interval_To_Test.java │ │ │ └── Timestamp_ltz3_Interval_To_Test.java │ │ │ ├── _02_user_defined │ │ │ ├── User.java │ │ │ ├── UserDefinedDataTypes_Test.java │ │ │ ├── UserDefinedDataTypes_Test2.java │ │ │ └── UserScalarFunction.java │ │ │ └── _03_raw │ │ │ ├── RawScalarFunction.java │ │ │ └── Raw_DataTypes_Test2.java │ ├── javacc │ │ └── Simple1.jj │ ├── proto │ │ ├── source.proto │ │ └── test.proto │ ├── resources │ │ └── META-INF │ │ │ └── services │ │ │ └── org.apache.flink.table.factories.Factory │ └── scala │ │ └── flink │ │ └── examples │ │ └── sql │ │ └── _04 │ │ └── type │ │ └── TableFunc0.scala │ └── test │ ├── java │ └── flink │ │ └── examples │ │ └── sql │ │ ├── _05 │ │ └── format │ │ │ └── formats │ │ │ └── protobuf │ │ │ ├── row │ │ │ ├── ProtobufRowDeserializationSchemaTest.java │ │ │ └── ProtobufRowSerializationSchemaTest.java │ │ │ └── rowdata │ │ │ ├── ProtobufRowDataDeserializationSchemaTest.java │ │ │ └── ProtobufRowDataSerializationSchemaTest.java │ │ ├── _06 │ │ └── calcite │ │ │ └── CalciteTest.java │ │ └── _07 │ │ └── query │ │ └── _06_joins │ │ └── JaninoCompileTest.java │ ├── proto │ └── person.proto │ └── scala │ ├── ScalaEnv.scala │ └── TableFunc0.scala ├── flink-examples-1.14 ├── pom.xml └── src │ └── main │ └── java │ └── flink │ └── examples │ └── sql │ └── _08 │ └── batch │ ├── HiveModuleV2.java │ └── Test.java ├── flink-examples-1.8 ├── .gitignore └── pom.xml └── pom.xml /.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | target/ 3 | !.mvn/wrapper/maven-wrapper.jar 4 | !**/src/main/** 5 | #**/src/test/** 6 | .idea/ 7 | *.iml 8 | *.DS_Store 9 | 10 | ### IntelliJ IDEA ### 11 | .idea 12 | *.iws 13 | *.ipr 14 | 15 | -------------------------------------------------------------------------------- /flink-examples-1.10/src/main/java/flink/examples/sql/_07/query/_06_joins/_02_interval_joins/_01_outer_join/WindowJoinFunction$46.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._07.query._06_joins._02_interval_joins._01_outer_join; 2 | 3 | 4 | public class WindowJoinFunction$46 5 | extends org.apache.flink.api.common.functions.RichFlatJoinFunction { 6 | 7 | final org.apache.flink.table.dataformat.JoinedRow joinedRow = new org.apache.flink.table.dataformat.JoinedRow(); 8 | 9 | public WindowJoinFunction$46(Object[] references) throws Exception { 10 | 11 | } 12 | 13 | 14 | @Override 15 | public void open(org.apache.flink.configuration.Configuration parameters) throws Exception { 16 | 17 | } 18 | 19 | @Override 20 | public void join(Object _in1, Object _in2, org.apache.flink.util.Collector c) throws Exception { 21 | org.apache.flink.table.dataformat.BaseRow in1 = (org.apache.flink.table.dataformat.BaseRow) _in1; 22 | org.apache.flink.table.dataformat.BaseRow in2 = (org.apache.flink.table.dataformat.BaseRow) _in2; 23 | 24 | int result$40; 25 | boolean isNull$40; 26 | int field$41; 27 | boolean isNull$41; 28 | int result$42; 29 | boolean isNull$42; 30 | int field$43; 31 | boolean isNull$43; 32 | boolean isNull$44; 33 | boolean result$45; 34 | result$40 = -1; 35 | isNull$40 = true; 36 | if (in1 != null) { 37 | isNull$41 = in1.isNullAt(0); 38 | field$41 = -1; 39 | if (!isNull$41) { 40 | field$41 = in1.getInt(0); 41 | } 42 | result$40 = field$41; 43 | isNull$40 = isNull$41; 44 | } 45 | result$42 = -1; 46 | isNull$42 = true; 47 | if (in2 != null) { 48 | isNull$43 = in2.isNullAt(0); 49 | field$43 = -1; 50 | if (!isNull$43) { 51 | field$43 = in2.getInt(0); 52 | } 53 | result$42 = field$43; 54 | isNull$42 = isNull$43; 55 | } 56 | 57 | 58 | isNull$44 = isNull$40 || isNull$42; 59 | result$45 = false; 60 | if (!isNull$44) { 61 | 62 | result$45 = result$40 == result$42; 63 | 64 | } 65 | 66 | if (result$45) { 67 | 68 | joinedRow.replace(in1, in2); 69 | c.collect(joinedRow); 70 | } 71 | } 72 | 73 | @Override 74 | public void close() throws Exception { 75 | 76 | } 77 | } -------------------------------------------------------------------------------- /flink-examples-1.12/.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | target/ 3 | !.mvn/wrapper/maven-wrapper.jar 4 | !**/src/main/** 5 | #**/src/test/** 6 | .idea/ 7 | *.iml 8 | *.DS_Store 9 | 10 | ### IntelliJ IDEA ### 11 | .idea 12 | *.iws 13 | *.ipr 14 | 15 | -------------------------------------------------------------------------------- /flink-examples-1.12/src/main/java/flink/examples/sql/_07/query/_04_window_agg/_04_TumbleWindowTest_KeyProjection$69.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._07.query._04_window_agg; 2 | 3 | 4 | public final class _04_TumbleWindowTest_KeyProjection$69 implements 5 | org.apache.flink.table.runtime.generated.Projection { 7 | 8 | org.apache.flink.table.data.binary.BinaryRowData out = new org.apache.flink.table.data.binary.BinaryRowData(2); 9 | org.apache.flink.table.data.writer.BinaryRowWriter outWriter = 10 | new org.apache.flink.table.data.writer.BinaryRowWriter(out); 11 | 12 | public _04_TumbleWindowTest_KeyProjection$69(Object[] references) throws Exception { 13 | 14 | } 15 | 16 | @Override 17 | public org.apache.flink.table.data.binary.BinaryRowData apply(org.apache.flink.table.data.RowData in1) { 18 | int field$70; 19 | boolean isNull$70; 20 | org.apache.flink.table.data.binary.BinaryStringData field$71; 21 | boolean isNull$71; 22 | outWriter.reset(); 23 | isNull$70 = in1.isNullAt(0); 24 | field$70 = -1; 25 | if (!isNull$70) { 26 | field$70 = in1.getInt(0); 27 | } 28 | if (isNull$70) { 29 | outWriter.setNullAt(0); 30 | } else { 31 | outWriter.writeInt(0, field$70); 32 | } 33 | 34 | isNull$71 = in1.isNullAt(1); 35 | field$71 = org.apache.flink.table.data.binary.BinaryStringData.EMPTY_UTF8; 36 | if (!isNull$71) { 37 | field$71 = ((org.apache.flink.table.data.binary.BinaryStringData) in1.getString(1)); 38 | } 39 | if (isNull$71) { 40 | outWriter.setNullAt(1); 41 | } else { 42 | outWriter.writeString(1, field$71); 43 | } 44 | 45 | outWriter.complete(); 46 | 47 | 48 | return out; 49 | } 50 | } -------------------------------------------------------------------------------- /flink-examples-1.12/src/main/java/flink/examples/sql/_07/query/_04_window_agg/_04_TumbleWindowTest_WatermarkGenerator$6.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._07.query._04_window_agg; 2 | 3 | 4 | public final class _04_TumbleWindowTest_WatermarkGenerator$6 5 | extends org.apache.flink.table.runtime.generated.WatermarkGenerator { 6 | 7 | 8 | public _04_TumbleWindowTest_WatermarkGenerator$6(Object[] references) throws Exception { 9 | 10 | } 11 | 12 | @Override 13 | public void open(org.apache.flink.configuration.Configuration parameters) throws Exception { 14 | 15 | } 16 | 17 | @Override 18 | public Long currentWatermark(org.apache.flink.table.data.RowData row) throws Exception { 19 | 20 | org.apache.flink.table.data.TimestampData field$7; 21 | boolean isNull$7; 22 | boolean isNull$8; 23 | org.apache.flink.table.data.TimestampData result$9; 24 | isNull$7 = row.isNullAt(3); 25 | field$7 = null; 26 | if (!isNull$7) { 27 | field$7 = row.getTimestamp(3, 3); 28 | } 29 | 30 | 31 | isNull$8 = isNull$7 || false; 32 | result$9 = null; 33 | if (!isNull$8) { 34 | 35 | result$9 = org.apache.flink.table.data.TimestampData 36 | .fromEpochMillis(field$7.getMillisecond() - ((long) 5000L), field$7.getNanoOfMillisecond()); 37 | 38 | } 39 | 40 | if (isNull$8) { 41 | return null; 42 | } else { 43 | return result$9.getMillisecond(); 44 | } 45 | } 46 | 47 | @Override 48 | public void close() throws Exception { 49 | 50 | } 51 | } -------------------------------------------------------------------------------- /flink-examples-1.13/.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | target/ 3 | !.mvn/wrapper/maven-wrapper.jar 4 | !**/src/main/** 5 | #**/src/test/** 6 | .idea/ 7 | *.iml 8 | *.DS_Store 9 | 10 | ### IntelliJ IDEA ### 11 | .idea 12 | *.iws 13 | *.ipr 14 | 15 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/core/source/JaninoUtils.java: -------------------------------------------------------------------------------- 1 | package flink.core.source; 2 | 3 | import org.codehaus.janino.SimpleCompiler; 4 | 5 | import lombok.extern.slf4j.Slf4j; 6 | 7 | 8 | @Slf4j 9 | public class JaninoUtils { 10 | 11 | private static final SimpleCompiler COMPILER = new SimpleCompiler(); 12 | 13 | static { 14 | COMPILER.setParentClassLoader(JaninoUtils.class.getClassLoader()); 15 | } 16 | 17 | public static Class genClass(String className, String code, Class clazz) throws Exception { 18 | 19 | COMPILER.cook(code); 20 | 21 | System.out.println("生成的代码:\n" + code); 22 | 23 | return (Class) COMPILER.getClassLoader().loadClass(className); 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/datastream/_01/bytedance/split/codegen/JaninoUtils.java: -------------------------------------------------------------------------------- 1 | package flink.examples.datastream._01.bytedance.split.codegen; 2 | 3 | import org.codehaus.janino.SimpleCompiler; 4 | 5 | import flink.examples.datastream._01.bytedance.split.model.Evaluable; 6 | import lombok.extern.slf4j.Slf4j; 7 | 8 | 9 | @Slf4j 10 | public class JaninoUtils { 11 | 12 | private static final SimpleCompiler COMPILER = new SimpleCompiler(); 13 | 14 | static { 15 | COMPILER.setParentClassLoader(JaninoUtils.class.getClassLoader()); 16 | } 17 | 18 | public static Class genCodeAndGetClazz(Long id, String topic, String condition) throws Exception { 19 | 20 | String className = "CodeGen_" + topic + "_" + id; 21 | 22 | String code = "import org.apache.commons.lang3.ArrayUtils;\n" 23 | + "\n" 24 | + "public class " + className + " implements flink.examples.datastream._01.bytedance.split.model.Evaluable {\n" 25 | + " \n" 26 | + " @Override\n" 27 | + " public boolean eval(flink.examples.datastream._01.bytedance.split.model.ClientLogSource clientLogSource) {\n" 28 | + " \n" 29 | + " return " + condition + ";\n" 30 | + " }\n" 31 | + "}\n"; 32 | 33 | COMPILER.cook(code); 34 | 35 | System.out.println("生成的代码:\n" + code); 36 | 37 | return (Class) COMPILER.getClassLoader().loadClass(className); 38 | } 39 | 40 | public static void main(String[] args) throws Exception { 41 | Class c = genCodeAndGetClazz(1L, "topic", "1==1"); 42 | 43 | System.out.println(1); 44 | } 45 | 46 | } 47 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/datastream/_01/bytedance/split/job/start.sh: -------------------------------------------------------------------------------- 1 | # 1.kafka 初始化 2 | 3 | cd /kafka-bin-目录 4 | 5 | # 启动 kafka server 6 | ./kafka-server-start /usr/local/etc/kafka/server.properties & 7 | 8 | # 创建 3 个 topic 9 | kafka-topics --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic tuzisir 10 | 11 | kafka-topics --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic tuzisir1 12 | 13 | kafka-topics --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic tuzisir2 14 | 15 | # 启动一个 console consumer 16 | 17 | kafka-console-consumer --bootstrap-server localhost:9092 --topic tuzisir --from-beginning 18 | 19 | # 2.zk 初始化 20 | 21 | cd /zk-bin-目录 22 | 23 | zkServer start 24 | 25 | zkCli -server 127.0.0.1:2181 26 | 27 | # zkCli 中需要执行的命令 28 | create /kafka-config {"1":{"condition":"1==1","targetTopic":"tuzisir1"},"2":{"condition":"1!=1","targetTopic":"tuzisir2"}} 29 | 30 | get /kafka-config 31 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/datastream/_01/bytedance/split/kafka/demo/Application.java: -------------------------------------------------------------------------------- 1 | package flink.examples.datastream._01.bytedance.split.kafka.demo; 2 | 3 | 4 | public class Application { 5 | 6 | private String topicName = "tuzisir"; 7 | private String consumerGrp = "consumerGrp"; 8 | private String brokerUrl = "localhost:9092"; 9 | 10 | public static void main(String[] args) throws InterruptedException { 11 | 12 | 13 | System.out.println(1); 14 | 15 | Application application = new Application(); 16 | new Thread(new ProducerThread(application), "Producer : ").start(); 17 | new Thread(new ConsumerThread(application), "Consumer1 : ").start(); 18 | 19 | //for multiple consumers in same group, start new consumer threads 20 | //new Thread(new ConsumerThread(application), "Consumer2 : ").start(); 21 | } 22 | 23 | public String getTopicName() { 24 | return topicName; 25 | } 26 | 27 | public String getConsumerGrp() { 28 | return consumerGrp; 29 | } 30 | 31 | public String getBrokerUrl() { 32 | return brokerUrl; 33 | } 34 | 35 | } 36 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/datastream/_01/bytedance/split/kafka/demo/ConsumerThread.java: -------------------------------------------------------------------------------- 1 | package flink.examples.datastream._01.bytedance.split.kafka.demo; 2 | 3 | import java.time.Duration; 4 | import java.util.Collections; 5 | import java.util.Properties; 6 | 7 | import org.apache.kafka.clients.consumer.Consumer; 8 | import org.apache.kafka.clients.consumer.ConsumerRecord; 9 | import org.apache.kafka.clients.consumer.ConsumerRecords; 10 | import org.apache.kafka.clients.consumer.KafkaConsumer; 11 | 12 | 13 | public class ConsumerThread implements Runnable { 14 | 15 | private Consumer consumer; 16 | 17 | public ConsumerThread(Application application) { 18 | Properties props = new Properties(); 19 | props.put("bootstrap.servers", application.getBrokerUrl()); 20 | props.put("group.id", application.getConsumerGrp()); 21 | props.put("enable.auto.commit", "true"); 22 | props.put("auto.commit.interval.ms", "1000"); 23 | props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 24 | props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 25 | //props.put("auto.offset.reset", "earliest"); 26 | consumer = new KafkaConsumer<>(props); 27 | consumer.subscribe(Collections.singletonList(application.getTopicName())); 28 | } 29 | 30 | @Override 31 | public void run() { 32 | String threadName = Thread.currentThread().getName(); 33 | int noMessageToFetch = 1; 34 | while (noMessageToFetch < 3) { 35 | System.out.println(threadName + "poll start.."); 36 | final ConsumerRecords consumerRecords = consumer.poll(Duration.ofSeconds(1)); 37 | System.out.println(threadName + "records polled : " + consumerRecords.count()); 38 | if (consumerRecords.count() == 0) { 39 | noMessageToFetch++; 40 | continue; 41 | } 42 | for (ConsumerRecord record : consumerRecords) { 43 | System.out.printf(threadName + "offset = %d, key = %s, value = %s, partition =%d%n", 44 | record.offset(), record.key(), record.value(), record.partition()); 45 | } 46 | consumer.commitAsync(); 47 | } 48 | } 49 | 50 | } 51 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/datastream/_01/bytedance/split/kafka/demo/ProducerThread.java: -------------------------------------------------------------------------------- 1 | package flink.examples.datastream._01.bytedance.split.kafka.demo; 2 | 3 | import java.util.Properties; 4 | import java.util.concurrent.ExecutionException; 5 | 6 | import org.apache.kafka.clients.producer.KafkaProducer; 7 | import org.apache.kafka.clients.producer.Producer; 8 | import org.apache.kafka.clients.producer.ProducerRecord; 9 | import org.apache.kafka.clients.producer.RecordMetadata; 10 | 11 | 12 | public class ProducerThread implements Runnable { 13 | 14 | private Producer producer; 15 | private String topicName; 16 | 17 | public ProducerThread(Application application) { 18 | this.topicName = application.getTopicName(); 19 | Properties props = new Properties(); 20 | props.put("bootstrap.servers", application.getBrokerUrl()); 21 | props.put("acks", "all"); 22 | props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 23 | props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 24 | producer = new KafkaProducer<>(props); 25 | } 26 | 27 | @Override 28 | public void run() { 29 | String threadName = Thread.currentThread().getName(); 30 | for (int index = 1; index < 100; index++) { 31 | final ProducerRecord record = new ProducerRecord<>(topicName, 32 | Integer.toString(index), Integer.toString(index)); 33 | try { 34 | RecordMetadata metadata = producer.send(record).get(); 35 | System.out 36 | .println(threadName + "Record sent with key " + index + " to partition " + metadata.partition() 37 | + " with offset " + metadata.offset()); 38 | } catch (ExecutionException e) { 39 | System.out.println(threadName + "Error in sending record :" + e); 40 | throw new RuntimeException(e); 41 | } catch (InterruptedException e) { 42 | System.out.println(threadName + "Error in sending record : " + e); 43 | throw new RuntimeException(e); 44 | } catch (Exception e) { 45 | System.out.println(threadName + "Error in sending record : " + e); 46 | throw new RuntimeException(e); 47 | } 48 | } 49 | } 50 | 51 | } 52 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/datastream/_01/bytedance/split/model/ClientLogSink.java: -------------------------------------------------------------------------------- 1 | package flink.examples.datastream._01.bytedance.split.model; 2 | 3 | import lombok.Builder; 4 | import lombok.Data; 5 | 6 | 7 | @Data 8 | @Builder 9 | public class ClientLogSink { 10 | private int id; 11 | private int price; 12 | private long timestamp; 13 | 14 | } 15 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/datastream/_01/bytedance/split/model/ClientLogSource.java: -------------------------------------------------------------------------------- 1 | package flink.examples.datastream._01.bytedance.split.model; 2 | 3 | import lombok.Builder; 4 | import lombok.Data; 5 | 6 | 7 | @Data 8 | @Builder 9 | public class ClientLogSource { 10 | 11 | private int id; 12 | private int price; 13 | private long timestamp; 14 | private String date; 15 | private String page; 16 | 17 | } 18 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/datastream/_01/bytedance/split/model/DynamicProducerRule.java: -------------------------------------------------------------------------------- 1 | package flink.examples.datastream._01.bytedance.split.model; 2 | 3 | 4 | import flink.examples.datastream._01.bytedance.split.codegen.JaninoUtils; 5 | import lombok.Builder; 6 | import lombok.Data; 7 | 8 | 9 | @Data 10 | @Builder 11 | public class DynamicProducerRule implements Evaluable { 12 | 13 | private String condition; 14 | 15 | private String targetTopic; 16 | 17 | private Evaluable evaluable; 18 | 19 | public void init(Long id) { 20 | try { 21 | Class clazz = JaninoUtils.genCodeAndGetClazz(id, targetTopic, condition); 22 | this.evaluable = clazz.newInstance(); 23 | } catch (Exception e) { 24 | throw new RuntimeException(e); 25 | } 26 | } 27 | 28 | @Override 29 | public boolean eval(ClientLogSource clientLogSource) { 30 | return this.evaluable.eval(clientLogSource); 31 | } 32 | 33 | public static void main(String[] args) throws Exception { 34 | String condition = "String.valueOf(sourceModel.getId())==\"1\""; 35 | 36 | DynamicProducerRule dynamicProducerRule = DynamicProducerRule 37 | .builder() 38 | .condition(condition) 39 | .targetTopic("t") 40 | .build(); 41 | 42 | dynamicProducerRule.init(1L); 43 | 44 | boolean b = dynamicProducerRule.eval(ClientLogSource.builder().id(1).build()); 45 | 46 | System.out.println(); 47 | } 48 | 49 | } 50 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/datastream/_01/bytedance/split/model/Evaluable.java: -------------------------------------------------------------------------------- 1 | package flink.examples.datastream._01.bytedance.split.model; 2 | 3 | 4 | public interface Evaluable { 5 | 6 | boolean eval(ClientLogSource clientLogSource); 7 | 8 | } 9 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/datastream/_01/bytedance/split/zkconfigcenter/new.json: -------------------------------------------------------------------------------- 1 | {"1":{"condition":"1==1","targetTopic":"tuzisir1"},"2":{"condition":"1!=1","targetTopic":"tuzisir2"},"3":{"condition":"1==1","targetTopic":"tuzisir"}} -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/datastream/_01/bytedance/split/zkconfigcenter/old.json: -------------------------------------------------------------------------------- 1 | {"1":{"condition":"1==1","targetTopic":"tuzisir1"},"2":{"condition":"1!=1","targetTopic":"tuzisir2"}} -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/datastream/_03/enums_state/EnumsStateTest.java: -------------------------------------------------------------------------------- 1 | package flink.examples.datastream._03.enums_state; 2 | 3 | import org.apache.flink.api.common.typeinfo.TypeInformation; 4 | import org.apache.flink.api.common.typeutils.base.EnumSerializer; 5 | import org.apache.flink.configuration.Configuration; 6 | import org.apache.flink.core.memory.DataOutputSerializer; 7 | import org.apache.flink.streaming.api.TimeCharacteristic; 8 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 9 | 10 | 11 | public class EnumsStateTest { 12 | 13 | 14 | public static void main(String[] args) throws Exception { 15 | StreamExecutionEnvironment env = 16 | StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration()); 17 | 18 | env.setParallelism(1); 19 | 20 | env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime); 21 | 22 | TypeInformation t = TypeInformation.of(StateTestEnums.class); 23 | 24 | EnumSerializer e = (EnumSerializer) t.createSerializer(env.getConfig()); 25 | 26 | DataOutputSerializer d = new DataOutputSerializer(10000); 27 | 28 | e.serialize(StateTestEnums.A, d); 29 | 30 | env.execute(); 31 | } 32 | 33 | enum StateTestEnums { 34 | A, 35 | B, 36 | C 37 | ; 38 | } 39 | 40 | } 41 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/datastream/_03/state/_03_rocksdb/GettingStartDemo.java: -------------------------------------------------------------------------------- 1 | package flink.examples.datastream._03.state._03_rocksdb; 2 | 3 | import org.rocksdb.Options; 4 | import org.rocksdb.RocksDB; 5 | import org.rocksdb.RocksDBException; 6 | 7 | public class GettingStartDemo { 8 | 9 | // 因为RocksDB是由C++编写的,在Java中使用首先需要加载Native库 10 | static { 11 | // Loads the necessary library files. 12 | // Calling this method twice will have no effect. 13 | // By default the method extracts the shared library for loading at 14 | // java.io.tmpdir, however, you can override this temporary location by 15 | // setting the environment variable ROCKSDB_SHAREDLIB_DIR. 16 | // 默认这个方法会加压一个共享库到java.io.tmpdir 17 | RocksDB.loadLibrary(); 18 | } 19 | 20 | public static void main(String[] args) throws RocksDBException { 21 | // 1. 打开数据库 22 | // 1.1 创建数据库配置 23 | Options dbOpt = new Options(); 24 | // 1.2 配置当数据库不存在时自动创建 25 | dbOpt.setCreateIfMissing(true); 26 | // 1.3 打开数据库。因为RocksDB默认是保存在本地磁盘,所以需要指定位置 27 | RocksDB rdb = RocksDB.open(dbOpt, "./data/rocksdb"); 28 | // 2. 写入数据 29 | // 2.1 RocksDB都是以字节流的方式写入数据库中,所以我们需要将字符串转换为字节流再写入。这点类似于HBase 30 | byte[] key = "zhangsan".getBytes(); 31 | byte[] value = "20".getBytes(); 32 | // 2.2 调用put方法写入数据 33 | rdb.put(key, value); 34 | System.out.println("写入数据到RocksDB完成!"); 35 | // 3. 调用delete方法读取数据 36 | System.out.println("从RocksDB读取key = " + new String(key) + "的value为" + new String(rdb.get(key))); 37 | // 4. 移除数据 38 | rdb.delete(key); 39 | // 关闭资源 40 | rdb.close(); 41 | dbOpt.close(); 42 | } 43 | 44 | } 45 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/datastream/_04/keyed_co_process/HashMapTest.java: -------------------------------------------------------------------------------- 1 | package flink.examples.datastream._04.keyed_co_process; 2 | 3 | 4 | import java.util.HashMap; 5 | import java.util.Map.Entry; 6 | 7 | public class HashMapTest { 8 | 9 | public static void main(String[] args) { 10 | HashMap hashMap = new HashMap<>(); 11 | 12 | hashMap.put("1", "2"); 13 | hashMap.put("2", "2"); 14 | hashMap.put("3", "2"); 15 | hashMap.put("4", "2"); 16 | hashMap.put("5", "2"); 17 | 18 | for (Entry e : hashMap.entrySet()) { 19 | hashMap.remove(e.getKey()); 20 | } 21 | } 22 | 23 | } 24 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/datastream/_07_lambda_error/LambdaErrorTest.java: -------------------------------------------------------------------------------- 1 | package flink.examples.datastream._07_lambda_error; 2 | 3 | import org.apache.flink.streaming.api.functions.source.SourceFunction; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | import lombok.Builder; 8 | import lombok.Data; 9 | 10 | 11 | public class LambdaErrorTest { 12 | 13 | public static void main(String[] args) throws Exception { 14 | 15 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 16 | 17 | flinkEnv.env().setParallelism(1); 18 | 19 | flinkEnv.env() 20 | .addSource(new SourceFunction() { 21 | 22 | private volatile boolean isCancel = false; 23 | 24 | private SinkModel s; 25 | 26 | @Override 27 | public void run(SourceContext ctx) throws Exception { 28 | while (!isCancel) { 29 | // xxx 日志上报逻辑 30 | ctx.collect( 31 | SourceModel 32 | .builder() 33 | .page("Shopping-Cart") 34 | .userId(1) 35 | .time(System.currentTimeMillis()) 36 | .build() 37 | ); 38 | Thread.sleep(100); 39 | } 40 | } 41 | 42 | @Override 43 | public void cancel() { 44 | this.isCancel = true; 45 | } 46 | }) 47 | .print(); 48 | 49 | flinkEnv.env().execute(); 50 | } 51 | 52 | @Data 53 | @Builder 54 | private static class SourceModel { 55 | private long userId; 56 | private String page; 57 | private long time; 58 | } 59 | 60 | @Data 61 | @Builder 62 | private static class MiddleModel { 63 | private long uv; 64 | private long time; 65 | } 66 | 67 | @Data 68 | @Builder 69 | private static class SinkModel { 70 | private long uv; 71 | private long time; 72 | } 73 | 74 | } 75 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/question/datastream/_01/kryo_protobuf_no_more_bytes_left/KryoProtobufNoMoreBytesLeftTest.java: -------------------------------------------------------------------------------- 1 | package flink.examples.question.datastream._01.kryo_protobuf_no_more_bytes_left; 2 | 3 | import java.lang.reflect.Method; 4 | 5 | import com.esotericsoftware.kryo.Kryo; 6 | import com.esotericsoftware.kryo.io.Input; 7 | import com.esotericsoftware.kryo.io.Output; 8 | import com.google.protobuf.Message; 9 | import com.sun.tools.javac.util.Assert; 10 | import com.twitter.chill.protobuf.ProtobufSerializer; 11 | 12 | import flink.examples.datastream._04.keyed_co_process.protobuf.Source; 13 | 14 | public class KryoProtobufNoMoreBytesLeftTest { 15 | 16 | public static void main(String[] args) throws Exception { 17 | 18 | Source source = Source 19 | .newBuilder() 20 | .build(); 21 | 22 | byte[] bytes = source.toByteArray(); 23 | 24 | byte[] buffer = new byte[300]; 25 | 26 | Kryo kryo = newKryo(); 27 | 28 | Output output = new Output(buffer); 29 | 30 | // ser 31 | 32 | ProtobufSerializer protobufSerializer = new ProtobufSerializer(); 33 | 34 | protobufSerializer.write(kryo, output, source); 35 | 36 | 37 | // deser 38 | 39 | Input input = new Input(buffer); 40 | 41 | Class c = (Class) Source.getDefaultInstance().getClass(); 42 | 43 | Message m = protobufSerializer.read(kryo, input, (Class) c); 44 | 45 | testGetParse(); 46 | 47 | } 48 | 49 | private static void testGetParse() throws Exception { 50 | 51 | ProtobufSerializerV2 protobufSerializerV2 = new ProtobufSerializerV2(); 52 | 53 | Method m = protobufSerializerV2.getParse(Source.class); 54 | 55 | 56 | Source s = (Source) m.invoke(null, Source.newBuilder().setName("antigeneral").build().toByteArray()); 57 | 58 | Assert.check("antigeneral".equals(s.getName())); 59 | } 60 | 61 | private static class ProtobufSerializerV2 extends ProtobufSerializer { 62 | @Override 63 | public Method getParse(Class cls) throws Exception { 64 | return super.getParse(cls); 65 | } 66 | } 67 | 68 | private static Kryo newKryo() { 69 | Kryo kryo = new Kryo(); 70 | 71 | kryo.addDefaultSerializer(Source.class, ProtobufSerializerV2.class); 72 | 73 | return kryo; 74 | } 75 | 76 | } 77 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/question/sql/_01/lots_source_fields_poor_performance/EmbeddedKafka.java: -------------------------------------------------------------------------------- 1 | //package flink.examples.question.sql._01.lots_source_fields_poor_performance; 2 | // 3 | //import static net.mguenther.kafka.junit.ObserveKeyValues.on; 4 | //import static net.mguenther.kafka.junit.SendValues.to; 5 | // 6 | //import lombok.SneakyThrows; 7 | //import net.mguenther.kafka.junit.EmbeddedKafkaCluster; 8 | //import net.mguenther.kafka.junit.EmbeddedKafkaClusterConfig; 9 | // 10 | //public class EmbeddedKafka { 11 | // 12 | // public static void main(String[] args) { 13 | // EmbeddedKafkaCluster kafkaCluster = 14 | // EmbeddedKafkaCluster.provisionWith(EmbeddedKafkaClusterConfig.defaultClusterConfig()); 15 | // kafkaCluster.start(); 16 | // 17 | // new Thread(new Runnable() { 18 | // @SneakyThrows 19 | // @Override 20 | // public void run() { 21 | // while (true) { 22 | // kafkaCluster.send(to("test-topic", "a", "b", "c")); 23 | // Thread.sleep(1000); 24 | // } 25 | // } 26 | // }).start(); 27 | // 28 | // 29 | // new Thread(new Runnable() { 30 | // @SneakyThrows 31 | // @Override 32 | // public void run() { 33 | // while (true) { 34 | // kafkaCluster.observe(on("test-topic", 3)) 35 | // .forEach(a -> System.out.println(a.getValue())); 36 | // } 37 | // } 38 | // }).start(); 39 | // 40 | // 41 | // } 42 | // 43 | //} 44 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/runtime/_01/future/CompletableFutureTest.java: -------------------------------------------------------------------------------- 1 | package flink.examples.runtime._01.future; 2 | 3 | import java.util.concurrent.CompletableFuture; 4 | 5 | 6 | public class CompletableFutureTest { 7 | 8 | public static void main(String[] args) throws Exception { 9 | // 创建异步执行任务: 10 | CompletableFuture cf = CompletableFuture.supplyAsync(CompletableFutureTest::fetchPrice); 11 | // 如果执行成功: 12 | cf.thenAccept((result) -> { 13 | System.out.println("price: " + result); 14 | }); 15 | // 如果执行异常: 16 | cf.exceptionally((e) -> { 17 | e.printStackTrace(); 18 | return null; 19 | }); 20 | // 主线程不要立刻结束,否则CompletableFuture默认使用的线程池会立刻关闭: 21 | Thread.sleep(200); 22 | } 23 | 24 | static Double fetchPrice() { 25 | try { 26 | Thread.sleep(100); 27 | } catch (InterruptedException e) { 28 | } 29 | if (false) { 30 | throw new RuntimeException("fetch price failed!"); 31 | } 32 | return 5 + Math.random() * 20; 33 | } 34 | 35 | } 36 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/runtime/_01/future/CompletableFutureTest4.java: -------------------------------------------------------------------------------- 1 | package flink.examples.runtime._01.future; 2 | 3 | import java.util.concurrent.CompletableFuture; 4 | 5 | 6 | public class CompletableFutureTest4 { 7 | 8 | public static void main(String[] args) throws Exception { 9 | // 第一个任务: 10 | CompletableFuture cfQuery = CompletableFuture.supplyAsync(() -> { 11 | return queryCode("中国石油"); 12 | }); 13 | // cfQuery成功后继续执行下一个任务: 14 | CompletableFuture cfFetch = cfQuery.thenApplyAsync((code) -> { 15 | return fetchPrice(code); 16 | }); 17 | // cfFetch成功后打印结果: 18 | cfFetch.thenAccept((result) -> { 19 | System.out.println("price: " + result); 20 | }); 21 | // 主线程不要立刻结束,否则CompletableFuture默认使用的线程池会立刻关闭: 22 | Thread.sleep(2000); 23 | } 24 | 25 | static String queryCode(String name) { 26 | try { 27 | Thread.sleep(100); 28 | } catch (InterruptedException e) { 29 | } 30 | return name; 31 | } 32 | 33 | static String fetchPrice(String code) { 34 | try { 35 | Thread.sleep(100); 36 | } catch (InterruptedException e) { 37 | } 38 | return code + ":" + 5 + Math.random() * 20; 39 | } 40 | 41 | } 42 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/runtime/_01/future/CompletableFuture_AnyOf_Test3.java: -------------------------------------------------------------------------------- 1 | package flink.examples.runtime._01.future; 2 | 3 | import java.util.concurrent.CompletableFuture; 4 | 5 | 6 | public class CompletableFuture_AnyOf_Test3 { 7 | 8 | public static void main(String[] args) throws Exception { 9 | // 两个CompletableFuture执行异步查询: 10 | CompletableFuture cfQueryFromSina = CompletableFuture.supplyAsync(() -> { 11 | return queryCode("中国石油", "https://finance.sina.com.cn/code/"); 12 | }); 13 | CompletableFuture cfQueryFrom163 = CompletableFuture.supplyAsync(() -> { 14 | return queryCode("中国石油", "https://money.163.com/code/"); 15 | }); 16 | 17 | // 用anyOf合并为一个新的CompletableFuture: 18 | CompletableFuture cfQuery = CompletableFuture.anyOf(cfQueryFromSina, cfQueryFrom163); 19 | 20 | // 两个CompletableFuture执行异步查询: 21 | CompletableFuture cfFetchFromSina = cfQuery.thenApplyAsync((code) -> { 22 | return fetchPrice((String) code, "https://finance.sina.com.cn/price/"); 23 | }); 24 | CompletableFuture cfFetchFrom163 = cfQuery.thenApplyAsync((code) -> { 25 | return fetchPrice((String) code, "https://money.163.com/price/"); 26 | }); 27 | 28 | // 用anyOf合并为一个新的CompletableFuture: 29 | CompletableFuture cfFetch = CompletableFuture.anyOf(cfFetchFromSina, cfFetchFrom163); 30 | 31 | // 最终结果: 32 | cfFetch.thenAccept((result) -> { 33 | System.out.println("price: " + result); 34 | }); 35 | // 主线程不要立刻结束,否则CompletableFuture默认使用的线程池会立刻关闭: 36 | Thread.sleep(200); 37 | } 38 | 39 | static String queryCode(String name, String url) { 40 | System.out.println("query code from " + url + "..."); 41 | try { 42 | Thread.sleep((long) (Math.random() * 100)); 43 | } catch (InterruptedException e) { 44 | } 45 | return "601857"; 46 | } 47 | 48 | static Double fetchPrice(String code, String url) { 49 | System.out.println("query price from " + url + "..."); 50 | try { 51 | Thread.sleep((long) (Math.random() * 100)); 52 | } catch (InterruptedException e) { 53 | } 54 | return 5 + Math.random() * 20; 55 | } 56 | 57 | } 58 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/runtime/_01/future/CompletableFuture_ThenApplyAsync_Test2.java: -------------------------------------------------------------------------------- 1 | package flink.examples.runtime._01.future; 2 | 3 | import java.util.concurrent.CompletableFuture; 4 | 5 | 6 | public class CompletableFuture_ThenApplyAsync_Test2 { 7 | 8 | public static void main(String[] args) throws Exception { 9 | // 第一个任务: 10 | CompletableFuture cfQuery = CompletableFuture.supplyAsync(() -> { 11 | return queryCode("中国石油"); 12 | }); 13 | // cfQuery成功后继续执行下一个任务: 14 | CompletableFuture cfFetch = cfQuery.thenApplyAsync((code) -> { 15 | return fetchPrice(code); 16 | }); 17 | // cfFetch成功后打印结果: 18 | cfFetch.thenAccept((result) -> { 19 | System.out.println("price: " + result); 20 | }); 21 | // 主线程不要立刻结束,否则CompletableFuture默认使用的线程池会立刻关闭: 22 | Thread.sleep(2000); 23 | } 24 | 25 | static String queryCode(String name) { 26 | try { 27 | Thread.sleep(100); 28 | } catch (InterruptedException e) { 29 | } 30 | return name; 31 | } 32 | 33 | static String fetchPrice(String code) { 34 | try { 35 | Thread.sleep(100); 36 | } catch (InterruptedException e) { 37 | } 38 | return code + ":" + 5 + Math.random() * 20; 39 | } 40 | 41 | } 42 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/runtime/_01/future/CompletableFuture_ThenComposeAsync_Test2.java: -------------------------------------------------------------------------------- 1 | package flink.examples.runtime._01.future; 2 | 3 | import java.util.concurrent.CompletableFuture; 4 | 5 | 6 | public class CompletableFuture_ThenComposeAsync_Test2 { 7 | 8 | public static void main(String[] args) throws Exception { 9 | // 第一个任务: 10 | CompletableFuture cfQuery = CompletableFuture.supplyAsync(() -> { 11 | return queryCode("中国石油"); 12 | }); 13 | // cfQuery成功后继续执行下一个任务: 14 | CompletableFuture cfFetch = cfQuery.thenComposeAsync((code) -> { 15 | return CompletableFuture.supplyAsync(() -> fetchPrice(code)); 16 | }); 17 | // cfFetch成功后打印结果: 18 | cfFetch.thenAccept((result) -> { 19 | System.out.println("price: " + result); 20 | }); 21 | // 主线程不要立刻结束,否则CompletableFuture默认使用的线程池会立刻关闭: 22 | Thread.sleep(2000); 23 | } 24 | 25 | static String queryCode(String name) { 26 | try { 27 | Thread.sleep(100); 28 | } catch (InterruptedException e) { 29 | } 30 | return name; 31 | } 32 | 33 | static String fetchPrice(String code) { 34 | try { 35 | Thread.sleep(100); 36 | } catch (InterruptedException e) { 37 | } 38 | return code + ":" + 5 + Math.random() * 20; 39 | } 40 | 41 | } 42 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/runtime/_01/future/FutureTest.java: -------------------------------------------------------------------------------- 1 | package flink.examples.runtime._01.future; 2 | 3 | import java.util.concurrent.Callable; 4 | import java.util.concurrent.ExecutionException; 5 | import java.util.concurrent.ExecutorService; 6 | import java.util.concurrent.Executors; 7 | import java.util.concurrent.Future; 8 | import java.util.concurrent.TimeoutException; 9 | 10 | 11 | public class FutureTest { 12 | 13 | public static void main(String[] args) throws ExecutionException, InterruptedException, TimeoutException { 14 | 15 | ExecutorService executor = Executors.newFixedThreadPool(4); 16 | // 定义任务: 17 | Callable task = new Task(); 18 | // 提交任务并获得Future: 19 | Future future = executor.submit(task); 20 | 21 | // 从Future获取异步执行返回的结果: 22 | 23 | String result = future.get(); 24 | System.out.println(result); 25 | executor.shutdown(); 26 | 27 | } 28 | 29 | private static class Task implements Callable { 30 | public String call() throws Exception { 31 | Thread.sleep(1000); 32 | return "1"; 33 | } 34 | } 35 | 36 | } 37 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_01/countdistincterror/udf/Mod_UDF.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._01.countdistincterror.udf; 2 | 3 | import org.apache.flink.table.functions.ScalarFunction; 4 | 5 | 6 | public class Mod_UDF extends ScalarFunction { 7 | 8 | public int eval(long id, int remainder) { 9 | return (int) (id % remainder); 10 | } 11 | 12 | } 13 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_01/countdistincterror/udf/StatusMapper1_UDF.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._01.countdistincterror.udf; 2 | 3 | import org.apache.flink.table.functions.ScalarFunction; 4 | 5 | 6 | public class StatusMapper1_UDF extends ScalarFunction { 7 | 8 | private int i = 0; 9 | 10 | public String eval(String status) { 11 | 12 | if (i == 5) { 13 | i++; 14 | return "等级4"; 15 | } else { 16 | i++; 17 | if ("1".equals(status)) { 18 | return "等级1"; 19 | } else if ("2".equals(status)) { 20 | return "等级2"; 21 | } else if ("3".equals(status)) { 22 | return "等级3"; 23 | } 24 | } 25 | return "未知"; 26 | } 27 | 28 | } 29 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_01/countdistincterror/udf/StatusMapper_UDF.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._01.countdistincterror.udf; 2 | 3 | import org.apache.flink.table.functions.TableFunction; 4 | 5 | 6 | public class StatusMapper_UDF extends TableFunction { 7 | 8 | private int i = 0; 9 | 10 | public void eval(String status) throws InterruptedException { 11 | 12 | if (i == 6) { 13 | Thread.sleep(2000L); 14 | } 15 | 16 | if (i == 5) { 17 | collect("等级4"); 18 | } else { 19 | if ("1".equals(status)) { 20 | collect("等级1"); 21 | } else if ("2".equals(status)) { 22 | collect("等级2"); 23 | } else if ("3".equals(status)) { 24 | collect("等级3"); 25 | } 26 | } 27 | i++; 28 | } 29 | 30 | } 31 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_03/source_sink/KafkaSourceTest.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._03.source_sink; 2 | 3 | import org.apache.flink.configuration.Configuration; 4 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 5 | import org.apache.flink.table.api.EnvironmentSettings; 6 | import org.apache.flink.table.api.Table; 7 | import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; 8 | import org.apache.flink.types.Row; 9 | 10 | 11 | public class KafkaSourceTest { 12 | 13 | public static void main(String[] args) throws Exception { 14 | 15 | StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration()); 16 | 17 | env.setParallelism(1); 18 | 19 | EnvironmentSettings settings = EnvironmentSettings 20 | .newInstance() 21 | .useBlinkPlanner() 22 | .inStreamingMode() 23 | .build(); 24 | 25 | StreamTableEnvironment tEnv = StreamTableEnvironment.create(env, settings); 26 | 27 | tEnv.executeSql( 28 | "CREATE TABLE KafkaSourceTable (\n" 29 | + " `f0` STRING,\n" 30 | + " `f1` STRING\n" 31 | + ") WITH (\n" 32 | + " 'connector' = 'kafka',\n" 33 | + " 'topic' = 'topic',\n" 34 | + " 'properties.bootstrap.servers' = 'localhost:9092',\n" 35 | + " 'properties.group.id' = 'testGroup',\n" 36 | + " 'format' = 'json'\n" 37 | + ")" 38 | ); 39 | 40 | Table t = tEnv.sqlQuery("SELECT * FROM KafkaSourceTable"); 41 | 42 | tEnv.toAppendStream(t, Row.class).print(); 43 | 44 | env.execute(); 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_03/source_sink/UserDefinedSourceTest.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._03.source_sink; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | public class UserDefinedSourceTest { 9 | 10 | public static void main(String[] args) throws Exception { 11 | 12 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 13 | 14 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("pipeline.name", "1.13.5 用户自定义 SOURCE 案例"); 15 | 16 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("state.backend", "rocksdb"); 17 | 18 | String sql = "CREATE TABLE source_table (\n" 19 | + " user_id BIGINT,\n" 20 | + " `name` STRING\n" 21 | + ") WITH (\n" 22 | + " 'connector' = 'user_defined',\n" 23 | + " 'format' = 'json',\n" 24 | + " 'class.name' = 'flink.examples.sql._03.source_sink.table.user_defined.UserDefinedSource'\n" 25 | + ");\n" 26 | + "\n" 27 | + "CREATE TABLE sink_table (\n" 28 | + " user_id BIGINT,\n" 29 | + " name STRING\n" 30 | + ") WITH (\n" 31 | + " 'connector' = 'print'\n" 32 | + ");\n" 33 | + "\n" 34 | + "INSERT INTO sink_table\n" 35 | + "SELECT\n" 36 | + " *\n" 37 | + "FROM source_table;"; 38 | 39 | Arrays.stream(sql.split(";")) 40 | .forEach(flinkEnv.streamTEnv()::executeSql); 41 | } 42 | 43 | } 44 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_03/source_sink/abilities/sink/Abilities_SinkFunction.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._03.source_sink.abilities.sink; 2 | 3 | import org.apache.flink.api.common.functions.util.PrintSinkOutputWriter; 4 | import org.apache.flink.configuration.Configuration; 5 | import org.apache.flink.streaming.api.functions.sink.RichSinkFunction; 6 | import org.apache.flink.streaming.api.functions.sink.SinkFunction; 7 | import org.apache.flink.streaming.api.operators.StreamingRuntimeContext; 8 | import org.apache.flink.table.connector.sink.DynamicTableSink.DataStructureConverter; 9 | import org.apache.flink.table.data.RowData; 10 | 11 | public class Abilities_SinkFunction extends RichSinkFunction { 12 | 13 | private static final long serialVersionUID = 1L; 14 | 15 | private final DataStructureConverter converter; 16 | private final PrintSinkOutputWriter writer; 17 | 18 | public Abilities_SinkFunction( 19 | DataStructureConverter converter, String printIdentifier, boolean stdErr) { 20 | this.converter = converter; 21 | this.writer = new PrintSinkOutputWriter<>(printIdentifier, stdErr); 22 | } 23 | 24 | @Override 25 | public void open(Configuration parameters) throws Exception { 26 | super.open(parameters); 27 | StreamingRuntimeContext context = (StreamingRuntimeContext) getRuntimeContext(); 28 | writer.open(context.getIndexOfThisSubtask(), context.getNumberOfParallelSubtasks()); 29 | } 30 | 31 | @Override 32 | public void invoke(RowData value, SinkFunction.Context context) { 33 | Object data = converter.toExternal(value); 34 | assert data != null; 35 | writer.write(data.toString()); 36 | } 37 | } -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_03/source_sink/abilities/sink/_01_SupportsWritingMetadata_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._03.source_sink.abilities.sink; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | public class _01_SupportsWritingMetadata_Test { 9 | 10 | public static void main(String[] args) throws Exception { 11 | 12 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 13 | 14 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("pipeline.name", "1.13.5 用户自定义 SOURCE 案例"); 15 | 16 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("state.backend", "rocksdb"); 17 | 18 | String sql = "CREATE TABLE source_table (\n" 19 | + " user_id BIGINT,\n" 20 | + " flink_read_timestamp BIGINT METADATA VIRTUAL,\n" 21 | + " `name` STRING\n" 22 | + ") WITH (\n" 23 | + " 'connector' = 'supports_reading_metadata_user_defined',\n" 24 | + " 'format' = 'json',\n" 25 | + " 'class.name' = 'flink.examples.sql._03.source_sink.abilities.source.Abilities_SourceFunction'\n" 26 | + ");\n" 27 | + "\n" 28 | + "CREATE TABLE sink_table (\n" 29 | + " user_id BIGINT,\n" 30 | + " flink_write_timestamp BIGINT METADATA,\n" 31 | + " name STRING\n" 32 | + ") WITH (\n" 33 | + " 'connector' = 'abilities_print'\n" 34 | + ");\n" 35 | + "\n" 36 | + "INSERT INTO sink_table\n" 37 | + "SELECT\n" 38 | + " user_id\n" 39 | + " , flink_read_timestamp as flink_write_timestamp\n" 40 | + " , name\n" 41 | + "FROM source_table"; 42 | 43 | Arrays.stream(sql.split(";")) 44 | .forEach(flinkEnv.streamTEnv()::executeSql); 45 | } 46 | 47 | } 48 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_03/source_sink/abilities/source/Abilities_SourceFunction.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._03.source_sink.abilities.source; 2 | 3 | import org.apache.flink.api.common.serialization.DeserializationSchema; 4 | import org.apache.flink.streaming.api.functions.source.RichSourceFunction; 5 | import org.apache.flink.streaming.api.watermark.Watermark; 6 | import org.apache.flink.table.data.RowData; 7 | 8 | import com.google.common.collect.ImmutableMap; 9 | 10 | import flink.examples.JacksonUtils; 11 | 12 | public class Abilities_SourceFunction extends RichSourceFunction { 13 | 14 | private DeserializationSchema dser; 15 | 16 | private long limit = -1; 17 | 18 | private volatile boolean isCancel = false; 19 | 20 | private boolean enableSourceWatermark = false; 21 | 22 | public Abilities_SourceFunction(DeserializationSchema dser) { 23 | this.dser = dser; 24 | } 25 | 26 | public Abilities_SourceFunction(DeserializationSchema dser, long limit) { 27 | this.dser = dser; 28 | this.limit = limit; 29 | } 30 | 31 | public Abilities_SourceFunction(DeserializationSchema dser, boolean enableSourceWatermark) { 32 | this.dser = dser; 33 | this.enableSourceWatermark = enableSourceWatermark; 34 | } 35 | 36 | @Override 37 | public void run(SourceContext ctx) throws Exception { 38 | int i = 0; 39 | while (!this.isCancel) { 40 | 41 | long currentTimeMills = System.currentTimeMillis(); 42 | 43 | ctx.collect(this.dser.deserialize( 44 | JacksonUtils.bean2Json(ImmutableMap.of( 45 | "user_id", 11111L + i 46 | , "name", "antigeneral" 47 | , "flink_read_timestamp", currentTimeMills + "")).getBytes() 48 | )); 49 | Thread.sleep(1000); 50 | i++; 51 | 52 | if (limit >= 0 && i > limit) { 53 | this.isCancel = true; 54 | } 55 | 56 | if (enableSourceWatermark) { 57 | ctx.emitWatermark(new Watermark(currentTimeMills)); 58 | } 59 | } 60 | } 61 | 62 | @Override 63 | public void cancel() { 64 | this.isCancel = true; 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_03/source_sink/abilities/source/_01_SupportsFilterPushDown_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._03.source_sink.abilities.source; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | public class _01_SupportsFilterPushDown_Test { 9 | 10 | public static void main(String[] args) throws Exception { 11 | 12 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 13 | 14 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("pipeline.name", "1.13.5 用户自定义 SOURCE 案例"); 15 | 16 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("state.backend", "rocksdb"); 17 | 18 | String sql = "CREATE TABLE source_table (\n" 19 | + " user_id BIGINT,\n" 20 | + " `name` STRING\n" 21 | + ") WITH (\n" 22 | + " 'connector' = 'supports_reading_metadata_user_defined',\n" 23 | + " 'format' = 'json',\n" 24 | + " 'class.name' = 'flink.examples.sql._03.source_sink.abilities.source.Abilities_SourceFunction'\n" 25 | + ");\n" 26 | + "\n" 27 | + "CREATE TABLE sink_table (\n" 28 | + " user_id BIGINT,\n" 29 | + " name STRING\n" 30 | + ") WITH (\n" 31 | + " 'connector' = 'print'\n" 32 | + ");\n" 33 | + "\n" 34 | + "INSERT INTO sink_table\n" 35 | + "SELECT\n" 36 | + " *\n" 37 | + "FROM source_table\n" 38 | + "WHERE user_id > 3333\n"; 39 | 40 | Arrays.stream(sql.split(";")) 41 | .forEach(flinkEnv.streamTEnv()::executeSql); 42 | } 43 | 44 | } 45 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_03/source_sink/abilities/source/_02_SupportsLimitPushDown_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._03.source_sink.abilities.source; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | public class _02_SupportsLimitPushDown_Test { 9 | 10 | public static void main(String[] args) throws Exception { 11 | 12 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 13 | 14 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("pipeline.name", "1.13.5 用户自定义 SOURCE 案例"); 15 | 16 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("state.backend", "rocksdb"); 17 | 18 | String sql = "CREATE TABLE source_table (\n" 19 | + " user_id BIGINT,\n" 20 | + " flink_read_timestamp BIGINT\n" 21 | + ") WITH (\n" 22 | + " 'connector' = 'supports_reading_metadata_user_defined',\n" 23 | + " 'format' = 'json',\n" 24 | + " 'class.name' = 'flink.examples.sql._03.source_sink.abilities.source.Abilities_SourceFunction'\n" 25 | + ");\n" 26 | + "\n" 27 | + "CREATE TABLE sink_table (\n" 28 | + " user_id BIGINT,\n" 29 | + " flink_read_timestamp BIGINT\n" 30 | + ") WITH (\n" 31 | + " 'connector' = 'print'\n" 32 | + ");\n" 33 | + "\n" 34 | + "INSERT INTO sink_table\n" 35 | + "SELECT\n" 36 | + " *\n" 37 | + "FROM source_table\n" 38 | + "LIMIT 100"; 39 | 40 | Arrays.stream(sql.split(";")) 41 | .forEach(flinkEnv.streamTEnv()::executeSql); 42 | } 43 | 44 | } 45 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_03/source_sink/abilities/source/_03_SupportsPartitionPushDown_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._03.source_sink.abilities.source; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | public class _03_SupportsPartitionPushDown_Test { 9 | 10 | public static void main(String[] args) throws Exception { 11 | 12 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 13 | 14 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("pipeline.name", "1.13.5 用户自定义 SOURCE 案例"); 15 | 16 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("state.backend", "rocksdb"); 17 | 18 | String sql = "CREATE TABLE source_table (\n" 19 | + " user_id BIGINT,\n" 20 | + " flink_read_timestamp BIGINT METADATA VIRTUAL,\n" 21 | + " `name` STRING\n" 22 | + ") WITH (\n" 23 | + " 'connector' = 'supports_reading_metadata_user_defined',\n" 24 | + " 'format' = 'json',\n" 25 | + " 'class.name' = 'flink.examples.sql._03.source_sink.abilities.source.Abilities_SourceFunction'\n" 26 | + ");\n" 27 | + "\n" 28 | + "CREATE TABLE sink_table (\n" 29 | + " user_id BIGINT,\n" 30 | + " flink_read_timestamp BIGINT,\n" 31 | + " name STRING\n" 32 | + ") WITH (\n" 33 | + " 'connector' = 'print'\n" 34 | + ");\n" 35 | + "\n" 36 | + "INSERT INTO sink_table\n" 37 | + "SELECT\n" 38 | + " *\n" 39 | + "FROM source_table\n" 40 | + "LIMIT 100"; 41 | 42 | Arrays.stream(sql.split(";")) 43 | .forEach(flinkEnv.streamTEnv()::executeSql); 44 | } 45 | 46 | } 47 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_03/source_sink/abilities/source/_04_SupportsProjectionPushDown_JDBC_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._03.source_sink.abilities.source; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | public class _04_SupportsProjectionPushDown_JDBC_Test { 9 | 10 | public static void main(String[] args) throws Exception { 11 | 12 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 13 | 14 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("pipeline.name", "1.13.5 用户自定义 SOURCE 案例"); 15 | 16 | String sql = "CREATE TABLE source_table_1 (\n" 17 | + " id DECIMAL(20, 0),\n" 18 | + " name STRING,\n" 19 | + " owner STRING\n" 20 | + ") WITH (\n" 21 | + " 'connector' = 'jdbc',\n" 22 | + " 'url' = 'jdbc:mysql://localhost:3306/user_profile',\n" 23 | + " 'username' = 'root',\n" 24 | + " 'password' = 'root123456',\n" 25 | + " 'table-name' = 'user_test'\n" 26 | + ");\n" 27 | + "\n" 28 | + "CREATE TABLE sink_table_2 (\n" 29 | + " id DECIMAL(20, 0),\n" 30 | + " name STRING\n" 31 | + ") WITH (\n" 32 | + " 'connector' = 'print'\n" 33 | + ");\n" 34 | + "\n" 35 | + "INSERT INTO sink_table_2\n" 36 | + "SELECT\n" 37 | + " id\n" 38 | + " , name\n" 39 | + "FROM source_table_1\n"; 40 | 41 | Arrays.stream(sql.split(";")) 42 | .forEach(flinkEnv.streamTEnv()::executeSql); 43 | } 44 | 45 | } 46 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_03/source_sink/abilities/source/_04_SupportsProjectionPushDown_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._03.source_sink.abilities.source; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | public class _04_SupportsProjectionPushDown_Test { 9 | 10 | public static void main(String[] args) throws Exception { 11 | 12 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 13 | 14 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("pipeline.name", "1.13.5 用户自定义 SOURCE 案例"); 15 | 16 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("state.backend", "rocksdb"); 17 | 18 | String sql = "CREATE TABLE source_table (\n" 19 | + " user_id BIGINT,\n" 20 | + " `name1` STRING,\n" 21 | + " `name2` STRING,\n" 22 | + " `name3` STRING\n" 23 | + ") WITH (\n" 24 | + " 'connector' = 'supports_reading_metadata_user_defined',\n" 25 | + " 'format' = 'json',\n" 26 | + " 'class.name' = 'flink.examples.sql._03.source_sink.abilities.source.Abilities_SourceFunction'\n" 27 | + ");\n" 28 | + "\n" 29 | + "CREATE TABLE sink_table (\n" 30 | + " user_id BIGINT,\n" 31 | + " name STRING\n" 32 | + ") WITH (\n" 33 | + " 'connector' = 'print'\n" 34 | + ");\n" 35 | + "\n" 36 | + "INSERT INTO sink_table\n" 37 | + "SELECT\n" 38 | + " user_id\n" 39 | + " , name1 as name\n" 40 | + "FROM source_table"; 41 | 42 | Arrays.stream(sql.split(";")) 43 | .forEach(flinkEnv.streamTEnv()::executeSql); 44 | } 45 | 46 | } 47 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_03/source_sink/abilities/source/_05_SupportsReadingMetadata_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._03.source_sink.abilities.source; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | public class _05_SupportsReadingMetadata_Test { 9 | 10 | public static void main(String[] args) throws Exception { 11 | 12 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 13 | 14 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("pipeline.name", "1.13.5 用户自定义 SOURCE 案例"); 15 | 16 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("state.backend", "rocksdb"); 17 | 18 | String sql = "CREATE TABLE source_table (\n" 19 | + " user_id BIGINT,\n" 20 | + " flink_read_timestamp BIGINT METADATA VIRTUAL,\n" 21 | + " `name` STRING\n" 22 | + ") WITH (\n" 23 | + " 'connector' = 'supports_reading_metadata_user_defined',\n" 24 | + " 'format' = 'json',\n" 25 | + " 'class.name' = 'flink.examples.sql._03.source_sink.abilities.source.Abilities_SourceFunction'\n" 26 | + ");\n" 27 | + "\n" 28 | + "CREATE TABLE sink_table (\n" 29 | + " user_id BIGINT,\n" 30 | + " flink_read_timestamp BIGINT,\n" 31 | + " name STRING\n" 32 | + ") WITH (\n" 33 | + " 'connector' = 'print'\n" 34 | + ");\n" 35 | + "\n" 36 | + "INSERT INTO sink_table\n" 37 | + "SELECT\n" 38 | + " *\n" 39 | + "FROM source_table"; 40 | 41 | Arrays.stream(sql.split(";")) 42 | .forEach(flinkEnv.streamTEnv()::executeSql); 43 | } 44 | 45 | } 46 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_03/source_sink/abilities/source/_06_SupportsWatermarkPushDown_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._03.source_sink.abilities.source; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | public class _06_SupportsWatermarkPushDown_Test { 9 | 10 | public static void main(String[] args) throws Exception { 11 | 12 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 13 | 14 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("pipeline.name", "1.13.5 用户自定义 SOURCE 案例"); 15 | 16 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("state.backend", "rocksdb"); 17 | 18 | String sql = "CREATE TABLE source_table (\n" 19 | + " user_id BIGINT,\n" 20 | + " flink_read_timestamp BIGINT METADATA VIRTUAL,\n" 21 | + " time_ltz AS TO_TIMESTAMP_LTZ(flink_read_timestamp, 3),\n" 22 | + " `name` STRING,\n" 23 | + " WATERMARK FOR time_ltz AS time_ltz - INTERVAL '5' SECOND\n" 24 | + ") WITH (\n" 25 | + " 'connector' = 'supports_reading_metadata_user_defined',\n" 26 | + " 'format' = 'json',\n" 27 | + " 'class.name' = 'flink.examples.sql._03.source_sink.abilities.source.Abilities_SourceFunction'\n" 28 | + ");\n" 29 | + "\n" 30 | + "CREATE TABLE sink_table (\n" 31 | + " user_id BIGINT,\n" 32 | + " flink_read_timestamp BIGINT,\n" 33 | + " name STRING\n" 34 | + ") WITH (\n" 35 | + " 'connector' = 'print'\n" 36 | + ");\n" 37 | + "\n" 38 | + "INSERT INTO sink_table\n" 39 | + "SELECT\n" 40 | + " user_id,\n" 41 | + " flink_read_timestamp,\n" 42 | + " name\n" 43 | + "FROM source_table"; 44 | 45 | Arrays.stream(sql.split(";")) 46 | .forEach(flinkEnv.streamTEnv()::executeSql); 47 | } 48 | 49 | } 50 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_03/source_sink/abilities/source/before/Before_Abilities_SourceFunction.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._03.source_sink.abilities.source.before; 2 | 3 | import org.apache.flink.api.common.serialization.DeserializationSchema; 4 | import org.apache.flink.streaming.api.functions.source.RichSourceFunction; 5 | import org.apache.flink.streaming.api.watermark.Watermark; 6 | import org.apache.flink.table.data.RowData; 7 | 8 | import com.google.common.collect.ImmutableMap; 9 | 10 | import flink.examples.JacksonUtils; 11 | 12 | public class Before_Abilities_SourceFunction extends RichSourceFunction { 13 | 14 | private DeserializationSchema dser; 15 | 16 | private long limit = -1; 17 | 18 | private volatile boolean isCancel = false; 19 | 20 | private boolean enableSourceWatermark = false; 21 | 22 | public Before_Abilities_SourceFunction(DeserializationSchema dser) { 23 | this.dser = dser; 24 | } 25 | 26 | public Before_Abilities_SourceFunction(DeserializationSchema dser, long limit) { 27 | this.dser = dser; 28 | this.limit = limit; 29 | } 30 | 31 | public Before_Abilities_SourceFunction(DeserializationSchema dser, boolean enableSourceWatermark) { 32 | this.dser = dser; 33 | this.enableSourceWatermark = enableSourceWatermark; 34 | } 35 | 36 | @Override 37 | public void run(SourceContext ctx) throws Exception { 38 | int i = 0; 39 | while (!this.isCancel) { 40 | 41 | long currentTimeMills = System.currentTimeMillis(); 42 | 43 | ctx.collect(this.dser.deserialize( 44 | JacksonUtils.bean2Json(ImmutableMap.of( 45 | "user_id", 11111L + i 46 | , "name", "antigeneral" 47 | , "flink_read_timestamp", currentTimeMills + "")).getBytes() 48 | )); 49 | Thread.sleep(1000); 50 | i++; 51 | 52 | if (limit >= 0 && i > limit) { 53 | this.isCancel = true; 54 | } 55 | 56 | if (enableSourceWatermark) { 57 | ctx.emitWatermark(new Watermark(currentTimeMills)); 58 | } 59 | } 60 | } 61 | 62 | @Override 63 | public void cancel() { 64 | this.isCancel = true; 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_03/source_sink/abilities/source/before/_01_Before_SupportsFilterPushDown_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._03.source_sink.abilities.source.before; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | public class _01_Before_SupportsFilterPushDown_Test { 9 | 10 | public static void main(String[] args) throws Exception { 11 | 12 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 13 | 14 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("pipeline.name", "1.13.5 用户自定义 SOURCE 案例"); 15 | 16 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("state.backend", "rocksdb"); 17 | 18 | String sql = "CREATE TABLE source_table (\n" 19 | + " user_id BIGINT,\n" 20 | + " `name` STRING\n" 21 | + ") WITH (\n" 22 | + " 'connector' = 'before_supports_reading_metadata_user_defined',\n" 23 | + " 'format' = 'json',\n" 24 | + " 'class.name' = 'flink.examples.sql._03.source_sink.abilities.source.before.Before_Abilities_SourceFunction'\n" 25 | + ");\n" 26 | + "\n" 27 | + "CREATE TABLE sink_table (\n" 28 | + " user_id BIGINT,\n" 29 | + " name STRING\n" 30 | + ") WITH (\n" 31 | + " 'connector' = 'print'\n" 32 | + ");\n" 33 | + "\n" 34 | + "INSERT INTO sink_table\n" 35 | + "SELECT\n" 36 | + " *\n" 37 | + "FROM source_table\n" 38 | + "WHERE user_id > 3333\n"; 39 | 40 | Arrays.stream(sql.split(";")) 41 | .forEach(flinkEnv.streamTEnv()::executeSql); 42 | } 43 | 44 | } 45 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_03/source_sink/abilities/source/before/_02_Before_SupportsLimitPushDown_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._03.source_sink.abilities.source.before; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | public class _02_Before_SupportsLimitPushDown_Test { 9 | 10 | public static void main(String[] args) throws Exception { 11 | 12 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 13 | 14 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("pipeline.name", "1.13.5 用户自定义 SOURCE 案例"); 15 | 16 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("state.backend", "rocksdb"); 17 | 18 | String sql = "CREATE TABLE source_table (\n" 19 | + " user_id BIGINT,\n" 20 | + " flink_read_timestamp BIGINT\n" 21 | + ") WITH (\n" 22 | + " 'connector' = 'before_supports_reading_metadata_user_defined',\n" 23 | + " 'format' = 'json',\n" 24 | + " 'class.name' = 'flink.examples.sql._03.source_sink.abilities.source.before.Before_Abilities_SourceFunction'\n" 25 | + ");\n" 26 | + "\n" 27 | + "CREATE TABLE sink_table (\n" 28 | + " user_id BIGINT,\n" 29 | + " flink_read_timestamp BIGINT\n" 30 | + ") WITH (\n" 31 | + " 'connector' = 'print'\n" 32 | + ");\n" 33 | + "\n" 34 | + "INSERT INTO sink_table\n" 35 | + "SELECT\n" 36 | + " *\n" 37 | + "FROM source_table\n" 38 | + "LIMIT 100"; 39 | 40 | Arrays.stream(sql.split(";")) 41 | .forEach(flinkEnv.streamTEnv()::executeSql); 42 | } 43 | 44 | } 45 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_03/source_sink/abilities/source/before/_03_Before_SupportsPartitionPushDown_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._03.source_sink.abilities.source.before; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | public class _03_Before_SupportsPartitionPushDown_Test { 9 | 10 | public static void main(String[] args) throws Exception { 11 | 12 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 13 | 14 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("pipeline.name", "1.13.5 用户自定义 SOURCE 案例"); 15 | 16 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("state.backend", "rocksdb"); 17 | 18 | String sql = "CREATE TABLE source_table (\n" 19 | + " user_id BIGINT,\n" 20 | + " flink_read_timestamp BIGINT METADATA VIRTUAL,\n" 21 | + " `name` STRING\n" 22 | + ") WITH (\n" 23 | + " 'connector' = 'before_supports_reading_metadata_user_defined',\n" 24 | + " 'format' = 'json',\n" 25 | + " 'class.name' = 'flink.examples.sql._03.source_sink.abilities.source.before.Before_Abilities_SourceFunction'\n" 26 | + ");\n" 27 | + "\n" 28 | + "CREATE TABLE sink_table (\n" 29 | + " user_id BIGINT,\n" 30 | + " flink_read_timestamp BIGINT,\n" 31 | + " name STRING\n" 32 | + ") WITH (\n" 33 | + " 'connector' = 'print'\n" 34 | + ");\n" 35 | + "\n" 36 | + "INSERT INTO sink_table\n" 37 | + "SELECT\n" 38 | + " *\n" 39 | + "FROM source_table\n" 40 | + "LIMIT 100"; 41 | 42 | Arrays.stream(sql.split(";")) 43 | .forEach(flinkEnv.streamTEnv()::executeSql); 44 | } 45 | 46 | } 47 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_03/source_sink/abilities/source/before/_04_Before_SupportsProjectionPushDown_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._03.source_sink.abilities.source.before; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | public class _04_Before_SupportsProjectionPushDown_Test { 9 | 10 | public static void main(String[] args) throws Exception { 11 | 12 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 13 | 14 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("pipeline.name", "1.13.5 用户自定义 SOURCE 案例"); 15 | 16 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("state.backend", "rocksdb"); 17 | 18 | String sql = "CREATE TABLE source_table (\n" 19 | + " user_id BIGINT,\n" 20 | + " `name1` STRING,\n" 21 | + " `name2` STRING,\n" 22 | + " `name3` STRING\n" 23 | + ") WITH (\n" 24 | + " 'connector' = 'before_supports_reading_metadata_user_defined',\n" 25 | + " 'format' = 'json',\n" 26 | + " 'class.name' = 'flink.examples.sql._03.source_sink.abilities.source.before.Before_Abilities_SourceFunction'\n" 27 | + ");\n" 28 | + "\n" 29 | + "CREATE TABLE sink_table (\n" 30 | + " user_id BIGINT,\n" 31 | + " name STRING\n" 32 | + ") WITH (\n" 33 | + " 'connector' = 'print'\n" 34 | + ");\n" 35 | + "\n" 36 | + "INSERT INTO sink_table\n" 37 | + "SELECT\n" 38 | + " user_id\n" 39 | + " , name1 as name\n" 40 | + "FROM source_table"; 41 | 42 | Arrays.stream(sql.split(";")) 43 | .forEach(flinkEnv.streamTEnv()::executeSql); 44 | } 45 | 46 | } 47 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_03/source_sink/abilities/source/before/_05_Before_SupportsReadingMetadata_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._03.source_sink.abilities.source.before; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | public class _05_Before_SupportsReadingMetadata_Test { 9 | 10 | public static void main(String[] args) throws Exception { 11 | 12 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 13 | 14 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("pipeline.name", "1.13.5 用户自定义 SOURCE 案例"); 15 | 16 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("state.backend", "rocksdb"); 17 | 18 | String sql = "CREATE TABLE source_table (\n" 19 | + " user_id BIGINT,\n" 20 | + " `name` STRING\n" 21 | + ") WITH (\n" 22 | + " 'connector' = 'before_supports_reading_metadata_user_defined',\n" 23 | + " 'format' = 'json',\n" 24 | + " 'class.name' = 'flink.examples.sql._03.source_sink.abilities.source.before.Before_Abilities_SourceFunction'\n" 25 | + ");\n" 26 | + "\n" 27 | + "CREATE TABLE sink_table (\n" 28 | + " user_id BIGINT,\n" 29 | + " name STRING\n" 30 | + ") WITH (\n" 31 | + " 'connector' = 'print'\n" 32 | + ");\n" 33 | + "\n" 34 | + "INSERT INTO sink_table\n" 35 | + "SELECT\n" 36 | + " *\n" 37 | + "FROM source_table"; 38 | 39 | Arrays.stream(sql.split(";")) 40 | .forEach(flinkEnv.streamTEnv()::executeSql); 41 | } 42 | 43 | } 44 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_03/source_sink/abilities/source/before/_06_Before_SupportsWatermarkPushDown_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._03.source_sink.abilities.source.before; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | public class _06_Before_SupportsWatermarkPushDown_Test { 9 | 10 | public static void main(String[] args) throws Exception { 11 | 12 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 13 | 14 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("pipeline.name", "1.13.5 用户自定义 SOURCE 案例"); 15 | 16 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("state.backend", "rocksdb"); 17 | 18 | String sql = "CREATE TABLE source_table (\n" 19 | + " user_id BIGINT,\n" 20 | + " time_ltz AS cast(CURRENT_TIMESTAMP as TIMESTAMP(3)),\n" 21 | + " `name` STRING,\n" 22 | + " WATERMARK FOR time_ltz AS time_ltz - INTERVAL '5' SECOND\n" 23 | + ") WITH (\n" 24 | + " 'connector' = 'before_supports_reading_metadata_user_defined',\n" 25 | + " 'format' = 'json',\n" 26 | + " 'class.name' = 'flink.examples.sql._03.source_sink.abilities.source.before.Before_Abilities_SourceFunction'\n" 27 | + ");\n" 28 | + "\n" 29 | + "CREATE TABLE sink_table (\n" 30 | + " user_id BIGINT,\n" 31 | + " name STRING\n" 32 | + ") WITH (\n" 33 | + " 'connector' = 'print'\n" 34 | + ");\n" 35 | + "\n" 36 | + "INSERT INTO sink_table\n" 37 | + "SELECT\n" 38 | + " user_id,\n" 39 | + " name\n" 40 | + "FROM source_table"; 41 | 42 | Arrays.stream(sql.split(";")) 43 | .forEach(flinkEnv.streamTEnv()::executeSql); 44 | } 45 | 46 | } 47 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_03/source_sink/table/redis/container/RedisCommandsContainer.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | package flink.examples.sql._03.source_sink.table.redis.container; 18 | 19 | import java.io.Closeable; 20 | import java.io.Serializable; 21 | import java.util.List; 22 | 23 | /** 24 | * The container for all available Redis commands. 25 | */ 26 | public interface RedisCommandsContainer extends Closeable, Serializable { 27 | 28 | void open() throws Exception; 29 | 30 | byte[] get(byte[] key); 31 | 32 | List multiGet(List key); 33 | 34 | byte[] hget(byte[] key, byte[] hashField); 35 | 36 | } 37 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_03/source_sink/table/redis/demo/RedisDemo.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._03.source_sink.table.redis.demo; 2 | 3 | import java.util.HashMap; 4 | 5 | import com.google.gson.Gson; 6 | 7 | import redis.clients.jedis.Jedis; 8 | import redis.clients.jedis.JedisPool; 9 | 10 | /** 11 | * redis 安装:https://blog.csdn.net/realize_dream/article/details/106227622 12 | * redis java client:https://www.cnblogs.com/chenyanbin/p/12088796.html 13 | */ 14 | public class RedisDemo { 15 | 16 | public static void main(String[] args) { 17 | singleConnect(); 18 | poolConnect(); 19 | } 20 | 21 | public static void singleConnect() { 22 | // jedis单实例连接 23 | Jedis jedis = new Jedis("127.0.0.1", 6379); 24 | String result = jedis.get("a"); 25 | 26 | HashMap h = new HashMap<>(); 27 | 28 | h.put("name", "namehhh"); 29 | h.put("name1", "namehhh111"); 30 | h.put("score", 3L); 31 | 32 | String s = new Gson().toJson(h); 33 | 34 | jedis.set("a", s); 35 | 36 | System.out.println(result); 37 | jedis.close(); 38 | } 39 | 40 | public static void poolConnect() { 41 | //jedis连接池 42 | JedisPool pool = new JedisPool("127.0.0.1", 6379); 43 | Jedis jedis = pool.getResource(); 44 | String result = jedis.get("a"); 45 | System.out.println(result); 46 | jedis.close(); 47 | pool.close(); 48 | } 49 | 50 | } 51 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_03/source_sink/table/redis/mapper/LookupRedisMapper.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._03.source_sink.table.redis.mapper; 2 | 3 | 4 | import java.io.IOException; 5 | 6 | import org.apache.flink.api.common.serialization.AbstractDeserializationSchema; 7 | import org.apache.flink.api.common.serialization.DeserializationSchema; 8 | import org.apache.flink.api.common.serialization.SerializationSchema; 9 | import org.apache.flink.table.data.RowData; 10 | 11 | import com.google.common.base.Joiner; 12 | 13 | 14 | public class LookupRedisMapper extends AbstractDeserializationSchema implements SerializationSchema { 15 | 16 | 17 | private DeserializationSchema valueDeserializationSchema; 18 | 19 | public LookupRedisMapper(DeserializationSchema valueDeserializationSchema) { 20 | 21 | this.valueDeserializationSchema = valueDeserializationSchema; 22 | 23 | } 24 | 25 | public RedisCommandDescription getCommandDescription() { 26 | return new RedisCommandDescription(RedisCommand.GET); 27 | } 28 | 29 | @Override 30 | public RowData deserialize(byte[] message) { 31 | try { 32 | return this.valueDeserializationSchema.deserialize(message); 33 | } catch (IOException e) { 34 | throw new RuntimeException(e); 35 | } 36 | } 37 | 38 | @Override 39 | public byte[] serialize(Object[] element) { 40 | return Joiner.on(":").join(element).getBytes(); 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_03/source_sink/table/redis/mapper/RedisCommand.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._03.source_sink.table.redis.mapper; 2 | 3 | import org.apache.flink.streaming.connectors.redis.common.mapper.RedisDataType; 4 | 5 | 6 | public enum RedisCommand { 7 | 8 | GET(RedisDataType.STRING), 9 | 10 | HGET(RedisDataType.HASH), 11 | 12 | ; 13 | 14 | private RedisDataType redisDataType; 15 | 16 | RedisCommand(RedisDataType redisDataType) { 17 | this.redisDataType = redisDataType; 18 | } 19 | 20 | public RedisDataType getRedisDataType() { 21 | return redisDataType; 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_03/source_sink/table/redis/mapper/RedisCommandDescription.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._03.source_sink.table.redis.mapper; 2 | 3 | import org.apache.flink.streaming.connectors.redis.common.mapper.RedisDataType; 4 | 5 | 6 | public class RedisCommandDescription { 7 | 8 | private static final long serialVersionUID = 1L; 9 | 10 | private RedisCommand redisCommand; 11 | 12 | private String additionalKey; 13 | 14 | public RedisCommandDescription(RedisCommand redisCommand, String additionalKey) { 15 | 16 | this.redisCommand = redisCommand; 17 | this.additionalKey = additionalKey; 18 | 19 | if (redisCommand.getRedisDataType() == RedisDataType.HASH) { 20 | if (additionalKey == null) { 21 | throw new IllegalArgumentException("Hash should have additional key"); 22 | } 23 | } 24 | } 25 | 26 | public RedisCommandDescription(RedisCommand redisCommand) { 27 | 28 | this(redisCommand, null); 29 | } 30 | 31 | public RedisCommand getRedisCommand() { 32 | return redisCommand; 33 | } 34 | 35 | public String getAdditionalKey() { 36 | return additionalKey; 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_03/source_sink/table/redis/mapper/SetRedisMapper.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._03.source_sink.table.redis.mapper; 2 | 3 | import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommand; 4 | import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommandDescription; 5 | import org.apache.flink.streaming.connectors.redis.common.mapper.RedisMapper; 6 | import org.apache.flink.table.data.RowData; 7 | 8 | 9 | public class SetRedisMapper implements RedisMapper { 10 | 11 | @Override 12 | public RedisCommandDescription getCommandDescription() { 13 | return new RedisCommandDescription(RedisCommand.SET); 14 | } 15 | 16 | @Override 17 | public String getKeyFromData(RowData data) { 18 | return data.getString(0).toString(); 19 | } 20 | 21 | @Override 22 | public String getValueFromData(RowData data) { 23 | return data.getString(1).toString(); 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_03/source_sink/table/user_defined/UserDefinedSource.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._03.source_sink.table.user_defined; 2 | 3 | import org.apache.flink.api.common.serialization.DeserializationSchema; 4 | import org.apache.flink.streaming.api.functions.source.RichSourceFunction; 5 | import org.apache.flink.table.data.RowData; 6 | 7 | import com.google.common.collect.ImmutableMap; 8 | 9 | import flink.examples.JacksonUtils; 10 | 11 | public class UserDefinedSource extends RichSourceFunction { 12 | 13 | private DeserializationSchema dser; 14 | 15 | private volatile boolean isCancel; 16 | 17 | public UserDefinedSource(DeserializationSchema dser) { 18 | this.dser = dser; 19 | } 20 | 21 | @Override 22 | public void run(SourceContext ctx) throws Exception { 23 | while (!this.isCancel) { 24 | ctx.collect(this.dser.deserialize( 25 | JacksonUtils.bean2Json(ImmutableMap.of("user_id", 1111L, "name", "antigeneral")).getBytes() 26 | )); 27 | Thread.sleep(1000); 28 | } 29 | } 30 | 31 | @Override 32 | public void cancel() { 33 | this.isCancel = true; 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_05/format/formats/ProtobufFormatTest.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._05.format.formats; 2 | 3 | import org.apache.flink.configuration.Configuration; 4 | import org.apache.flink.streaming.api.TimeCharacteristic; 5 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 6 | import org.apache.flink.table.api.EnvironmentSettings; 7 | import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; 8 | 9 | /** 10 | * nc -lk 9999 11 | */ 12 | public class ProtobufFormatTest { 13 | 14 | public static void main(String[] args) throws Exception { 15 | 16 | StreamExecutionEnvironment env = 17 | StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration()); 18 | 19 | env.setParallelism(1); 20 | 21 | EnvironmentSettings settings = EnvironmentSettings 22 | .newInstance() 23 | .useBlinkPlanner() 24 | .inStreamingMode().build(); 25 | 26 | env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime); 27 | 28 | StreamTableEnvironment tEnv = StreamTableEnvironment.create(env, settings); 29 | 30 | String sourceTableSql = "CREATE TABLE protobuf_source (" 31 | + " name STRING\n" 32 | + " , names ARRAY\n" 33 | + " , si_map MAP\n" 34 | + ")\n" 35 | + "WITH (\n" 36 | + " 'connector' = 'socket',\n" 37 | + " 'hostname' = 'localhost',\n" 38 | + " 'port' = '9999',\n" 39 | + " 'format' = 'protobuf',\n" 40 | + " 'protobuf.class-name' = 'flink.examples.sql._04.format.formats.protobuf.Test'\n" 41 | + ")"; 42 | 43 | String sinkTableSql = "CREATE TABLE print_sink (\n" 44 | + " name STRING\n" 45 | + " , names ARRAY\n" 46 | + " , si_map MAP\n" 47 | + ") WITH (\n" 48 | + " 'connector' = 'print'\n" 49 | + ")"; 50 | 51 | String selectSql = "INSERT INTO print_sink\n" 52 | + "SELECT *\n" 53 | + "FROM protobuf_source\n"; 54 | 55 | tEnv.executeSql(sourceTableSql); 56 | tEnv.executeSql(sinkTableSql); 57 | tEnv.executeSql(selectSql); 58 | 59 | env.execute(); 60 | } 61 | 62 | 63 | } 64 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_05/format/formats/SocketWriteTest.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._05.format.formats; 2 | 3 | import java.io.IOException; 4 | import java.net.ServerSocket; 5 | import java.net.Socket; 6 | import java.util.Map; 7 | 8 | import com.google.common.collect.ImmutableMap; 9 | 10 | import flink.examples.JacksonUtils; 11 | import flink.examples.sql._05.format.formats.protobuf.Test; 12 | 13 | 14 | public class SocketWriteTest { 15 | 16 | 17 | public static void main(String[] args) throws IOException, InterruptedException { 18 | 19 | ServerSocket serversocket = new ServerSocket(9999); 20 | 21 | final Socket socket = serversocket.accept(); 22 | 23 | int i = 0; 24 | 25 | while (true) { 26 | 27 | Map map = ImmutableMap.of("key1", 1, "地图", i); 28 | 29 | Test test = Test.newBuilder() 30 | .setName("姓名" + i) 31 | .addNames("姓名列表" + i) 32 | .putAllSiMap(map) 33 | .build(); 34 | 35 | System.out.println(JacksonUtils.bean2Json(test)); 36 | byte[] b = test.toByteArray(); 37 | 38 | socket.getOutputStream().write(b); 39 | 40 | socket.getOutputStream().flush(); 41 | i++; 42 | 43 | if (i == 10) { 44 | break; 45 | } 46 | 47 | Thread.sleep(500); 48 | } 49 | 50 | socket.close(); 51 | serversocket.close(); 52 | 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_05/format/formats/csv/ChangelogCsvFormat.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._05.format.formats.csv; 2 | 3 | import java.util.List; 4 | 5 | import org.apache.flink.api.common.serialization.DeserializationSchema; 6 | import org.apache.flink.api.common.typeinfo.TypeInformation; 7 | import org.apache.flink.table.connector.ChangelogMode; 8 | import org.apache.flink.table.connector.format.DecodingFormat; 9 | import org.apache.flink.table.connector.source.DynamicTableSource; 10 | import org.apache.flink.table.connector.source.DynamicTableSource.DataStructureConverter; 11 | import org.apache.flink.table.data.RowData; 12 | import org.apache.flink.table.types.DataType; 13 | import org.apache.flink.table.types.logical.LogicalType; 14 | import org.apache.flink.types.RowKind; 15 | 16 | 17 | public class ChangelogCsvFormat implements DecodingFormat> { 18 | 19 | private final String columnDelimiter; 20 | 21 | public ChangelogCsvFormat(String columnDelimiter) { 22 | this.columnDelimiter = columnDelimiter; 23 | } 24 | 25 | @Override 26 | @SuppressWarnings("unchecked") 27 | public DeserializationSchema createRuntimeDecoder( 28 | DynamicTableSource.Context context, 29 | DataType producedDataType) { 30 | // create type information for the DeserializationSchema 31 | final TypeInformation producedTypeInfo = context.createTypeInformation( 32 | producedDataType); 33 | 34 | // most of the code in DeserializationSchema will not work on internal data structures 35 | // create a converter for conversion at the end 36 | final DataStructureConverter converter = context.createDataStructureConverter(producedDataType); 37 | 38 | // use logical types during runtime for parsing 39 | final List parsingTypes = producedDataType.getLogicalType().getChildren(); 40 | 41 | // create runtime class 42 | return new ChangelogCsvDeserializer(parsingTypes, converter, producedTypeInfo, columnDelimiter); 43 | } 44 | 45 | @Override 46 | public ChangelogMode getChangelogMode() { 47 | // define that this format can produce INSERT and DELETE rows 48 | return ChangelogMode.newBuilder() 49 | .addContainedKind(RowKind.INSERT) 50 | .addContainedKind(RowKind.DELETE) 51 | .build(); 52 | } 53 | } -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_05/format/formats/csv/ChangelogCsvFormatFactory.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._05.format.formats.csv; 2 | 3 | import java.util.Collections; 4 | import java.util.HashSet; 5 | import java.util.Set; 6 | 7 | import org.apache.flink.api.common.serialization.DeserializationSchema; 8 | import org.apache.flink.configuration.ConfigOption; 9 | import org.apache.flink.configuration.ConfigOptions; 10 | import org.apache.flink.configuration.ReadableConfig; 11 | import org.apache.flink.table.connector.format.DecodingFormat; 12 | import org.apache.flink.table.data.RowData; 13 | import org.apache.flink.table.factories.DeserializationFormatFactory; 14 | import org.apache.flink.table.factories.DynamicTableFactory; 15 | import org.apache.flink.table.factories.FactoryUtil; 16 | 17 | 18 | public class ChangelogCsvFormatFactory implements DeserializationFormatFactory { 19 | 20 | // define all options statically 21 | public static final ConfigOption COLUMN_DELIMITER = ConfigOptions.key("column-delimiter") 22 | .stringType() 23 | .defaultValue("|"); 24 | 25 | @Override 26 | public String factoryIdentifier() { 27 | return "changelog-csv"; 28 | } 29 | 30 | @Override 31 | public Set> requiredOptions() { 32 | return Collections.emptySet(); 33 | } 34 | 35 | @Override 36 | public Set> optionalOptions() { 37 | final Set> options = new HashSet<>(); 38 | options.add(COLUMN_DELIMITER); 39 | return options; 40 | } 41 | 42 | @Override 43 | public DecodingFormat> createDecodingFormat( 44 | DynamicTableFactory.Context context, 45 | ReadableConfig formatOptions) { 46 | // either implement your custom validation logic here ... 47 | // or use the provided helper method 48 | FactoryUtil.validateFactoryOptions(this, formatOptions); 49 | 50 | // get the validated options 51 | final String columnDelimiter = formatOptions.get(COLUMN_DELIMITER); 52 | 53 | // create and return the format 54 | return new ChangelogCsvFormat(columnDelimiter); 55 | } 56 | } -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_05/format/formats/protobuf/descriptors/Protobuf.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._05.format.formats.protobuf.descriptors; 2 | 3 | import java.util.Map; 4 | 5 | import org.apache.flink.annotation.PublicEvolving; 6 | import org.apache.flink.table.descriptors.DescriptorProperties; 7 | import org.apache.flink.table.descriptors.FormatDescriptor; 8 | import org.apache.flink.util.Preconditions; 9 | 10 | import com.google.protobuf.Message; 11 | 12 | /** 13 | * Format descriptor for Apache Protobuf messages. 14 | */ 15 | @PublicEvolving 16 | public class Protobuf extends FormatDescriptor { 17 | 18 | private Class messageClass; 19 | private String protobufDescriptorHttpGetUrl; 20 | 21 | public Protobuf() { 22 | super(ProtobufValidator.FORMAT_TYPE_VALUE, 1); 23 | } 24 | 25 | /** 26 | * Sets the class of the Protobuf message. 27 | * 28 | * @param messageClass class of the Protobuf message. 29 | */ 30 | public Protobuf messageClass(Class messageClass) { 31 | Preconditions.checkNotNull(messageClass); 32 | this.messageClass = messageClass; 33 | return this; 34 | } 35 | 36 | /** 37 | * Sets the Protobuf for protobuf messages. 38 | * 39 | * @param protobufDescriptorHttpGetUrl protobuf descriptor http get url 40 | */ 41 | public Protobuf protobufDescriptorHttpGetUrl(String protobufDescriptorHttpGetUrl) { 42 | Preconditions.checkNotNull(protobufDescriptorHttpGetUrl); 43 | this.protobufDescriptorHttpGetUrl = protobufDescriptorHttpGetUrl; 44 | return this; 45 | } 46 | 47 | @Override 48 | protected Map toFormatProperties() { 49 | final DescriptorProperties properties = new DescriptorProperties(); 50 | 51 | if (null != messageClass) { 52 | properties.putClass(ProtobufValidator.FORMAT_MESSAGE_CLASS, messageClass); 53 | } 54 | if (null != protobufDescriptorHttpGetUrl) { 55 | properties.putString(ProtobufValidator.FORMAT_PROTOBUF_DESCRIPTOR_HTTP_GET_URL, protobufDescriptorHttpGetUrl); 56 | } 57 | 58 | return properties.asMap(); 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_05/format/formats/protobuf/descriptors/ProtobufValidator.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._05.format.formats.protobuf.descriptors; 2 | 3 | import org.apache.flink.table.api.ValidationException; 4 | import org.apache.flink.table.descriptors.DescriptorProperties; 5 | import org.apache.flink.table.descriptors.FormatDescriptorValidator; 6 | 7 | /** 8 | * Validator for {@link Protobuf}. 9 | */ 10 | public class ProtobufValidator extends FormatDescriptorValidator { 11 | 12 | public static final String FORMAT_TYPE_VALUE = "protobuf"; 13 | public static final String FORMAT_MESSAGE_CLASS = "format.message-class"; 14 | public static final String FORMAT_PROTOBUF_DESCRIPTOR_HTTP_GET_URL = "format.protobuf-descriptor-http-get-url"; 15 | 16 | @Override 17 | public void validate(DescriptorProperties properties) { 18 | super.validate(properties); 19 | final boolean hasMessageClass = properties.containsKey(FORMAT_MESSAGE_CLASS); 20 | final boolean hasProtobufDescriptorHttpGetUrl = properties.containsKey(FORMAT_PROTOBUF_DESCRIPTOR_HTTP_GET_URL); 21 | if (hasMessageClass && hasProtobufDescriptorHttpGetUrl) { 22 | throw new ValidationException("A definition of both a Protobuf message class and Protobuf get descriptor http url is not allowed."); 23 | } else if (hasMessageClass) { 24 | properties.validateString(FORMAT_MESSAGE_CLASS, false, 1); 25 | } else if (hasProtobufDescriptorHttpGetUrl) { 26 | properties.validateString(FORMAT_PROTOBUF_DESCRIPTOR_HTTP_GET_URL, false, 1); 27 | } else { 28 | throw new ValidationException("A definition of an Protobuf message class or Protobuf get descriptor http url is required."); 29 | } 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_05/format/formats/protobuf/row/ProtobufSerializationSchema.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._05.format.formats.protobuf.row; 2 | 3 | import org.apache.flink.api.common.serialization.SerializationSchema; 4 | 5 | import com.google.protobuf.Message; 6 | 7 | public class ProtobufSerializationSchema implements SerializationSchema { 8 | 9 | @Override 10 | public byte[] serialize(T t) { 11 | return t.toByteArray(); 12 | } 13 | 14 | } 15 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_05/format/formats/protobuf/rowdata/ProtobufOptions.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._05.format.formats.protobuf.rowdata; 2 | 3 | import org.apache.flink.configuration.ConfigOption; 4 | import org.apache.flink.configuration.ConfigOptions; 5 | 6 | 7 | public class ProtobufOptions { 8 | 9 | public static final ConfigOption PROTOBUF_CLASS_NAME = 10 | ConfigOptions.key("class-name") 11 | .stringType() 12 | .noDefaultValue() 13 | .withDescription( 14 | "Optional flag to specify whether to fail if a field is missing or not, false by default."); 15 | 16 | public static final ConfigOption PROTOBUF_DESCRIPTOR_FILE = 17 | ConfigOptions.key("descriptor-file") 18 | .stringType() 19 | .noDefaultValue() 20 | .withDescription( 21 | "Optional flag to skip fields and rows with parse errors instead of failing;\n" 22 | + "fields are set to null in case of errors, false by default."); 23 | 24 | } 25 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_05/format/formats/protobuf/rowdata/ProtobufRowDataSerializationSchema.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._05.format.formats.protobuf.rowdata; 2 | 3 | import org.apache.flink.api.common.serialization.SerializationSchema; 4 | import org.apache.flink.table.data.RowData; 5 | 6 | 7 | public class ProtobufRowDataSerializationSchema implements SerializationSchema { 8 | @Override 9 | public byte[] serialize(RowData element) { 10 | return new byte[0]; 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_05/format/formats/protobuf/rowdata/RowDataToProtobufConverters.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._05.format.formats.protobuf.rowdata; 2 | 3 | 4 | public class RowDataToProtobufConverters { 5 | } 6 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_05/format/formats/utils/MoreRunnables.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._05.format.formats.utils; 2 | 3 | public class MoreRunnables { 4 | 5 | 6 | public static void throwing(ThrowableRunable throwableRunable) { 7 | try { 8 | throwableRunable.run(); 9 | } catch (Throwable e) { 10 | throw new RuntimeException(e); 11 | } 12 | } 13 | 14 | } 15 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_05/format/formats/utils/MoreSuppliers.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._05.format.formats.utils; 2 | 3 | 4 | public class MoreSuppliers { 5 | 6 | private MoreSuppliers() { 7 | throw new UnsupportedOperationException(); 8 | } 9 | 10 | public static OUT throwing(ThrowableSupplier throwableSupplier) { 11 | try { 12 | return throwableSupplier.get(); 13 | } catch (Throwable e) { 14 | throw new RuntimeException(e); 15 | } 16 | } 17 | 18 | } 19 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_05/format/formats/utils/ThrowableRunable.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._05.format.formats.utils; 2 | 3 | @FunctionalInterface 4 | public interface ThrowableRunable { 5 | 6 | void run() throws EXCEPTION; 7 | 8 | } 9 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_05/format/formats/utils/ThrowableSupplier.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._05.format.formats.utils; 2 | 3 | @FunctionalInterface 4 | public interface ThrowableSupplier { 5 | 6 | OUT get() throws EXCEPTION; 7 | 8 | } 9 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_06/calcite/CalciteTest.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._06.calcite; 2 | 3 | import org.apache.calcite.sql.SqlNode; 4 | import org.apache.calcite.sql.parser.SqlParseException; 5 | import org.apache.calcite.sql.parser.SqlParser; 6 | 7 | 8 | public class CalciteTest { 9 | 10 | public static void main(String[] args) throws SqlParseException { 11 | SqlParser parser = SqlParser.create("select c,d from source where a = '6'", SqlParser.Config.DEFAULT); 12 | SqlNode sqlNode = parser.parseStmt(); 13 | 14 | System.out.println(); 15 | } 16 | 17 | } 18 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_06/calcite/javacc/JavaccCodeGenTest.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._06.calcite.javacc; 2 | 3 | 4 | 5 | public class JavaccCodeGenTest { 6 | 7 | public static void main(String[] args) throws Exception { 8 | // version(); 9 | javacc(); 10 | } 11 | 12 | private static void version() throws Exception { 13 | org.javacc.parser.Main.main(new String[] {"-version"}); 14 | } 15 | 16 | private static void javacc() throws Exception { 17 | 18 | String path = ClassLoader.getSystemResources("Simple1.jj").nextElement().getPath(); 19 | 20 | org.javacc.parser.Main.main(new String[] {path}); 21 | } 22 | 23 | } 24 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_06/calcite/javacc/Simple1Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._06.calcite.javacc; 2 | 3 | 4 | import flink.examples.sql._06.calcite.javacc.generatedcode.Simple1; 5 | 6 | 7 | public class Simple1Test { 8 | 9 | public static void main(String[] args) throws Exception { 10 | Simple1.main(args); 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_06/calcite/javacc/generatedcode/Simple1Constants.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._06.calcite.javacc.generatedcode;/* Generated By:JavaCC: Do not edit this line. Simple1Constants.java */ 2 | 3 | /** 4 | * Token literal values and constants. 5 | * Generated by org.javacc.parser.OtherFilesGen#start() 6 | */ 7 | public interface Simple1Constants { 8 | 9 | /** End of File. */ 10 | int EOF = 0; 11 | 12 | /** Lexical state. */ 13 | int DEFAULT = 0; 14 | 15 | /** Literal token values. */ 16 | String[] tokenImage = { 17 | "", 18 | "\"\\n\"", 19 | "\"\\r\"", 20 | "\"{\"", 21 | "\"}\"", 22 | }; 23 | 24 | } 25 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_07/query/_01_select_where/SelectWhereTest5.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._07.query._01_select_where; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | 9 | public class SelectWhereTest5 { 10 | 11 | 12 | public static void main(String[] args) throws Exception { 13 | 14 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 15 | 16 | String sql = "CREATE TABLE Orders (\n" 17 | + " order_id BIGINT NOT NULL,\n" 18 | + " name STRING,\n" 19 | + " row_time AS cast(CURRENT_TIMESTAMP as timestamp(3)),\n" 20 | + " WATERMARK FOR row_time AS row_time - INTERVAL '5' SECOND\n" 21 | + ") WITH (\n" 22 | + " 'connector' = 'datagen',\n" 23 | + " 'rows-per-second' = '10',\n" 24 | + " 'fields.name.length' = '1',\n" 25 | + " 'fields.order_id.min' = '1',\n" 26 | + " 'fields.order_id.max' = '10'\n" 27 | + ");\n" 28 | + "\n" 29 | + "CREATE TABLE target_table (\n" 30 | + " order_id BIGINT NOT NULL,\n" 31 | + " name STRING,\n" 32 | + " row_time timestamp(3)\n" 33 | + ") WITH (\n" 34 | + " 'connector' = 'print'\n" 35 | + ");\n" 36 | + "\n" 37 | + "INSERT INTO target_table\n" 38 | + "SELECT * FROM Orders\n" 39 | + "Where order_id > 3"; 40 | 41 | Arrays.stream(sql.split(";")) 42 | .forEach(flinkEnv.streamTEnv()::executeSql); 43 | } 44 | 45 | } 46 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_07/query/_02_select_distinct/KeyProjection$0.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._07.query._02_select_distinct; 2 | 3 | 4 | public class KeyProjection$0 implements 5 | org.apache.flink.table.runtime.generated.Projection { 7 | 8 | org.apache.flink.table.data.binary.BinaryRowData out = new org.apache.flink.table.data.binary.BinaryRowData(1); 9 | org.apache.flink.table.data.writer.BinaryRowWriter outWriter = 10 | new org.apache.flink.table.data.writer.BinaryRowWriter(out); 11 | 12 | public KeyProjection$0(Object[] references) throws Exception { 13 | 14 | } 15 | 16 | @Override 17 | public org.apache.flink.table.data.binary.BinaryRowData apply(org.apache.flink.table.data.RowData in1) { 18 | org.apache.flink.table.data.binary.BinaryStringData field$1; 19 | boolean isNull$1; 20 | 21 | 22 | outWriter.reset(); 23 | 24 | isNull$1 = in1.isNullAt(0); 25 | field$1 = org.apache.flink.table.data.binary.BinaryStringData.EMPTY_UTF8; 26 | if (!isNull$1) { 27 | field$1 = ((org.apache.flink.table.data.binary.BinaryStringData) in1.getString(0)); 28 | } 29 | if (isNull$1) { 30 | outWriter.setNullAt(0); 31 | } else { 32 | outWriter.writeString(0, field$1); 33 | } 34 | 35 | outWriter.complete(); 36 | 37 | 38 | return out; 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_07/query/_04_window_agg/_01_tumble_window/local_agg/KeyProjection$89.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._07.query._04_window_agg._01_tumble_window.local_agg; 2 | 3 | 4 | public class KeyProjection$89 implements 5 | org.apache.flink.table.runtime.generated.Projection { 7 | 8 | org.apache.flink.table.data.binary.BinaryRowData out = new org.apache.flink.table.data.binary.BinaryRowData(2); 9 | org.apache.flink.table.data.writer.BinaryRowWriter outWriter = 10 | new org.apache.flink.table.data.writer.BinaryRowWriter(out); 11 | 12 | public KeyProjection$89(Object[] references) throws Exception { 13 | 14 | } 15 | 16 | @Override 17 | public org.apache.flink.table.data.binary.BinaryRowData apply(org.apache.flink.table.data.RowData in1) { 18 | org.apache.flink.table.data.binary.BinaryStringData field$90; 19 | boolean isNull$90; 20 | int field$91; 21 | boolean isNull$91; 22 | 23 | 24 | outWriter.reset(); 25 | 26 | isNull$90 = in1.isNullAt(0); 27 | field$90 = org.apache.flink.table.data.binary.BinaryStringData.EMPTY_UTF8; 28 | if (!isNull$90) { 29 | field$90 = ((org.apache.flink.table.data.binary.BinaryStringData) in1.getString(0)); 30 | } 31 | if (isNull$90) { 32 | outWriter.setNullAt(0); 33 | } else { 34 | outWriter.writeString(0, field$90); 35 | } 36 | 37 | 38 | isNull$91 = in1.isNullAt(1); 39 | field$91 = -1; 40 | if (!isNull$91) { 41 | field$91 = in1.getInt(1); 42 | } 43 | if (isNull$91) { 44 | outWriter.setNullAt(1); 45 | } else { 46 | outWriter.writeInt(1, field$91); 47 | } 48 | 49 | outWriter.complete(); 50 | 51 | 52 | return out; 53 | } 54 | } -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_07/query/_04_window_agg/_02_cumulate_window/cumulate/global_agg/KeyProjection$301.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._07.query._04_window_agg._02_cumulate_window.cumulate.global_agg; 2 | 3 | 4 | public class KeyProjection$301 implements 5 | org.apache.flink.table.runtime.generated.Projection { 7 | 8 | org.apache.flink.table.data.binary.BinaryRowData out = new org.apache.flink.table.data.binary.BinaryRowData(2); 9 | org.apache.flink.table.data.writer.BinaryRowWriter outWriter = 10 | new org.apache.flink.table.data.writer.BinaryRowWriter(out); 11 | 12 | public KeyProjection$301(Object[] references) throws Exception { 13 | 14 | } 15 | 16 | @Override 17 | public org.apache.flink.table.data.binary.BinaryRowData apply(org.apache.flink.table.data.RowData in1) { 18 | org.apache.flink.table.data.binary.BinaryStringData field$302; 19 | boolean isNull$302; 20 | int field$303; 21 | boolean isNull$303; 22 | 23 | 24 | outWriter.reset(); 25 | 26 | isNull$302 = in1.isNullAt(0); 27 | field$302 = org.apache.flink.table.data.binary.BinaryStringData.EMPTY_UTF8; 28 | if (!isNull$302) { 29 | field$302 = ((org.apache.flink.table.data.binary.BinaryStringData) in1.getString(0)); 30 | } 31 | if (isNull$302) { 32 | outWriter.setNullAt(0); 33 | } else { 34 | outWriter.writeString(0, field$302); 35 | } 36 | 37 | 38 | isNull$303 = in1.isNullAt(1); 39 | field$303 = -1; 40 | if (!isNull$303) { 41 | field$303 = in1.getInt(1); 42 | } 43 | if (isNull$303) { 44 | outWriter.setNullAt(1); 45 | } else { 46 | outWriter.writeInt(1, field$303); 47 | } 48 | 49 | outWriter.complete(); 50 | 51 | 52 | return out; 53 | } 54 | } -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_07/query/_04_window_agg/_02_cumulate_window/cumulate/local_agg/KeyProjection$89.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._07.query._04_window_agg._02_cumulate_window.cumulate.local_agg; 2 | 3 | 4 | public class KeyProjection$89 implements 5 | org.apache.flink.table.runtime.generated.Projection { 7 | 8 | org.apache.flink.table.data.binary.BinaryRowData out = new org.apache.flink.table.data.binary.BinaryRowData(2); 9 | org.apache.flink.table.data.writer.BinaryRowWriter outWriter = 10 | new org.apache.flink.table.data.writer.BinaryRowWriter(out); 11 | 12 | public KeyProjection$89(Object[] references) throws Exception { 13 | 14 | } 15 | 16 | @Override 17 | public org.apache.flink.table.data.binary.BinaryRowData apply(org.apache.flink.table.data.RowData in1) { 18 | org.apache.flink.table.data.binary.BinaryStringData field$90; 19 | boolean isNull$90; 20 | int field$91; 21 | boolean isNull$91; 22 | 23 | 24 | outWriter.reset(); 25 | 26 | isNull$90 = in1.isNullAt(0); 27 | field$90 = org.apache.flink.table.data.binary.BinaryStringData.EMPTY_UTF8; 28 | if (!isNull$90) { 29 | field$90 = ((org.apache.flink.table.data.binary.BinaryStringData) in1.getString(0)); 30 | } 31 | if (isNull$90) { 32 | outWriter.setNullAt(0); 33 | } else { 34 | outWriter.writeString(0, field$90); 35 | } 36 | 37 | 38 | isNull$91 = in1.isNullAt(1); 39 | field$91 = -1; 40 | if (!isNull$91) { 41 | field$91 = in1.getInt(1); 42 | } 43 | if (isNull$91) { 44 | outWriter.setNullAt(1); 45 | } else { 46 | outWriter.writeInt(1, field$91); 47 | } 48 | 49 | outWriter.complete(); 50 | 51 | 52 | return out; 53 | } 54 | } -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_07/query/_05_over/_01_row_number/Scalar_UDF.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._07.query._05_over._01_row_number; 2 | 3 | import org.apache.flink.table.functions.FunctionContext; 4 | import org.apache.flink.table.functions.ScalarFunction; 5 | 6 | 7 | public class Scalar_UDF extends ScalarFunction { 8 | 9 | @Override 10 | public void open(FunctionContext context) throws Exception { 11 | super.open(context); 12 | 13 | 14 | } 15 | 16 | public int eval(Long id, int remainder) { 17 | return (int) (id % remainder); 18 | } 19 | 20 | } 21 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_07/query/_05_over/_02_agg/RangeIntervalProctimeTest.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._07.query._05_over._02_agg; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | public class RangeIntervalProctimeTest { 9 | 10 | public static void main(String[] args) throws Exception { 11 | 12 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(new String[]{"--enable.hive.module.v2", "false"}); 13 | 14 | flinkEnv.env().setParallelism(1); 15 | 16 | String sql = "CREATE TABLE source_table (\n" 17 | + " order_id BIGINT,\n" 18 | + " product BIGINT,\n" 19 | + " amount BIGINT,\n" 20 | + " order_time as PROCTIME()\n" 21 | + ") WITH (\n" 22 | + " 'connector' = 'datagen',\n" 23 | + " 'rows-per-second' = '1',\n" 24 | + " 'fields.order_id.min' = '1',\n" 25 | + " 'fields.order_id.max' = '2',\n" 26 | + " 'fields.amount.min' = '1',\n" 27 | + " 'fields.amount.max' = '10',\n" 28 | + " 'fields.product.min' = '1',\n" 29 | + " 'fields.product.max' = '2'\n" 30 | + ");\n" 31 | + "\n" 32 | + "CREATE TABLE sink_table (\n" 33 | + " product BIGINT,\n" 34 | + " order_time TIMESTAMP(3),\n" 35 | + " amount BIGINT,\n" 36 | + " one_hour_prod_amount_sum BIGINT\n" 37 | + ") WITH (\n" 38 | + " 'connector' = 'print'\n" 39 | + ");\n" 40 | + "\n" 41 | + "INSERT INTO sink_table\n" 42 | + "SELECT product, order_time, amount,\n" 43 | + " SUM(amount) OVER (\n" 44 | + " PARTITION BY product\n" 45 | + " ORDER BY order_time\n" 46 | + " RANGE BETWEEN INTERVAL '1' HOUR PRECEDING AND CURRENT ROW\n" 47 | + " ) AS one_hour_prod_amount_sum\n" 48 | + "FROM source_table"; 49 | 50 | Arrays.stream(sql.split(";")) 51 | .forEach(flinkEnv.streamTEnv()::executeSql); 52 | } 53 | 54 | } 55 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_07/query/_05_over/_02_agg/RangeIntervalRowtimeAscendingTest.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._07.query._05_over._02_agg; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | public class RangeIntervalRowtimeAscendingTest { 9 | 10 | public static void main(String[] args) throws Exception { 11 | 12 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(new String[]{"--enable.hive.module.v2", "false"}); 13 | 14 | flinkEnv.env().setParallelism(1); 15 | 16 | String sql = "CREATE TABLE source_table (\n" 17 | + " order_id BIGINT,\n" 18 | + " product BIGINT,\n" 19 | + " amount BIGINT,\n" 20 | + " order_time as cast(CURRENT_TIMESTAMP as TIMESTAMP(3)),\n" 21 | + " WATERMARK FOR order_time AS order_time - INTERVAL '0.001' SECOND\n" 22 | + ") WITH (\n" 23 | + " 'connector' = 'datagen',\n" 24 | + " 'rows-per-second' = '1',\n" 25 | + " 'fields.order_id.min' = '1',\n" 26 | + " 'fields.order_id.max' = '2',\n" 27 | + " 'fields.amount.min' = '1',\n" 28 | + " 'fields.amount.max' = '10',\n" 29 | + " 'fields.product.min' = '1',\n" 30 | + " 'fields.product.max' = '2'\n" 31 | + ");\n" 32 | + "\n" 33 | + "CREATE TABLE sink_table (\n" 34 | + " product BIGINT,\n" 35 | + " order_time TIMESTAMP(3),\n" 36 | + " amount BIGINT,\n" 37 | + " one_hour_prod_amount_sum BIGINT\n" 38 | + ") WITH (\n" 39 | + " 'connector' = 'print'\n" 40 | + ");\n" 41 | + "\n" 42 | + "INSERT INTO sink_table\n" 43 | + "SELECT product, order_time, amount,\n" 44 | + " SUM(amount) OVER (\n" 45 | + " PARTITION BY product\n" 46 | + " ORDER BY order_time\n" 47 | + " RANGE BETWEEN INTERVAL '1' HOUR PRECEDING AND CURRENT ROW\n" 48 | + " ) AS one_hour_prod_amount_sum\n" 49 | + "FROM source_table"; 50 | 51 | Arrays.stream(sql.split(";")) 52 | .forEach(flinkEnv.streamTEnv()::executeSql); 53 | } 54 | 55 | } 56 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_07/query/_05_over/_02_agg/RangeIntervalRowtimeBoundedOutOfOrdernessTest.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._07.query._05_over._02_agg; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | public class RangeIntervalRowtimeBoundedOutOfOrdernessTest { 9 | 10 | public static void main(String[] args) throws Exception { 11 | 12 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(new String[]{"--enable.hive.module.v2", "false"}); 13 | 14 | flinkEnv.env().setParallelism(1); 15 | 16 | String sql = "CREATE TABLE source_table (\n" 17 | + " order_id BIGINT,\n" 18 | + " product BIGINT,\n" 19 | + " amount BIGINT,\n" 20 | + " order_time as cast(CURRENT_TIMESTAMP as TIMESTAMP(3)),\n" 21 | + " WATERMARK FOR order_time AS order_time - INTERVAL '10' SECOND\n" 22 | + ") WITH (\n" 23 | + " 'connector' = 'datagen',\n" 24 | + " 'rows-per-second' = '1',\n" 25 | + " 'fields.order_id.min' = '1',\n" 26 | + " 'fields.order_id.max' = '2',\n" 27 | + " 'fields.amount.min' = '1',\n" 28 | + " 'fields.amount.max' = '10',\n" 29 | + " 'fields.product.min' = '1',\n" 30 | + " 'fields.product.max' = '2'\n" 31 | + ");\n" 32 | + "\n" 33 | + "CREATE TABLE sink_table (\n" 34 | + " product BIGINT,\n" 35 | + " order_time TIMESTAMP(3),\n" 36 | + " amount BIGINT,\n" 37 | + " one_hour_prod_amount_sum BIGINT\n" 38 | + ") WITH (\n" 39 | + " 'connector' = 'print'\n" 40 | + ");\n" 41 | + "\n" 42 | + "INSERT INTO sink_table\n" 43 | + "SELECT product, order_time, amount,\n" 44 | + " SUM(amount) OVER (\n" 45 | + " PARTITION BY product\n" 46 | + " ORDER BY order_time\n" 47 | + " RANGE BETWEEN INTERVAL '1' HOUR PRECEDING AND CURRENT ROW\n" 48 | + " ) AS one_hour_prod_amount_sum\n" 49 | + "FROM source_table"; 50 | 51 | Arrays.stream(sql.split(";")) 52 | .forEach(flinkEnv.streamTEnv()::executeSql); 53 | } 54 | 55 | } 56 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_07/query/_05_over/_02_agg/RangeIntervalRowtimeStrictlyAscendingTest.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._07.query._05_over._02_agg; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | public class RangeIntervalRowtimeStrictlyAscendingTest { 9 | 10 | public static void main(String[] args) throws Exception { 11 | 12 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(new String[]{"--enable.hive.module.v2", "false"}); 13 | 14 | flinkEnv.env().setParallelism(1); 15 | 16 | String sql = "CREATE TABLE source_table (\n" 17 | + " order_id BIGINT,\n" 18 | + " product BIGINT,\n" 19 | + " amount BIGINT,\n" 20 | + " order_time as cast(CURRENT_TIMESTAMP as TIMESTAMP(3)),\n" 21 | + " WATERMARK FOR order_time AS order_time\n" 22 | + ") WITH (\n" 23 | + " 'connector' = 'datagen',\n" 24 | + " 'rows-per-second' = '1',\n" 25 | + " 'fields.order_id.min' = '1',\n" 26 | + " 'fields.order_id.max' = '2',\n" 27 | + " 'fields.amount.min' = '1',\n" 28 | + " 'fields.amount.max' = '10',\n" 29 | + " 'fields.product.min' = '1',\n" 30 | + " 'fields.product.max' = '2'\n" 31 | + ");\n" 32 | + "\n" 33 | + "CREATE TABLE sink_table (\n" 34 | + " product BIGINT,\n" 35 | + " order_time TIMESTAMP(3),\n" 36 | + " amount BIGINT,\n" 37 | + " one_hour_prod_amount_sum BIGINT\n" 38 | + ") WITH (\n" 39 | + " 'connector' = 'print'\n" 40 | + ");\n" 41 | + "\n" 42 | + "INSERT INTO sink_table\n" 43 | + "SELECT product, order_time, amount,\n" 44 | + " SUM(amount) OVER (\n" 45 | + " PARTITION BY product\n" 46 | + " ORDER BY order_time\n" 47 | + " RANGE BETWEEN INTERVAL '1' HOUR PRECEDING AND CURRENT ROW\n" 48 | + " ) AS one_hour_prod_amount_sum\n" 49 | + "FROM source_table"; 50 | 51 | Arrays.stream(sql.split(";")) 52 | .forEach(flinkEnv.streamTEnv()::executeSql); 53 | } 54 | 55 | } 56 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_07/query/_05_over/_02_agg/RowIntervalTest.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._07.query._05_over._02_agg; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | public class RowIntervalTest { 9 | 10 | public static void main(String[] args) throws Exception { 11 | 12 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(new String[]{"--enable.hive.module.v2", "false"}); 13 | 14 | flinkEnv.env().setParallelism(1); 15 | 16 | String sql = "CREATE TABLE source_table (\n" 17 | + " order_id BIGINT,\n" 18 | + " product BIGINT,\n" 19 | + " amount BIGINT,\n" 20 | + " order_time as cast(CURRENT_TIMESTAMP as TIMESTAMP(3)),\n" 21 | + " WATERMARK FOR order_time AS order_time - INTERVAL '0.001' SECOND\n" 22 | + ") WITH (\n" 23 | + " 'connector' = 'datagen',\n" 24 | + " 'rows-per-second' = '1',\n" 25 | + " 'fields.order_id.min' = '1',\n" 26 | + " 'fields.order_id.max' = '2',\n" 27 | + " 'fields.amount.min' = '1',\n" 28 | + " 'fields.amount.max' = '2',\n" 29 | + " 'fields.product.min' = '1',\n" 30 | + " 'fields.product.max' = '2'\n" 31 | + ");\n" 32 | + "\n" 33 | + "CREATE TABLE sink_table (\n" 34 | + " product BIGINT,\n" 35 | + " order_time TIMESTAMP(3),\n" 36 | + " amount BIGINT,\n" 37 | + " one_hour_prod_amount_sum BIGINT\n" 38 | + ") WITH (\n" 39 | + " 'connector' = 'print'\n" 40 | + ");\n" 41 | + "\n" 42 | + "INSERT INTO sink_table\n" 43 | + "SELECT product, order_time, amount,\n" 44 | + " SUM(amount) OVER (\n" 45 | + " PARTITION BY product\n" 46 | + " ORDER BY order_time\n" 47 | + " ROWS BETWEEN 5 PRECEDING AND CURRENT ROW\n" 48 | + " ) AS one_hour_prod_amount_sum\n" 49 | + "FROM source_table"; 50 | 51 | Arrays.stream(sql.split(";")) 52 | .forEach(flinkEnv.streamTEnv()::executeSql); 53 | } 54 | 55 | } 56 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_07/query/_06_joins/_01_regular_joins/_01_inner_join/ConditionFunction$4.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._07.query._06_joins._01_regular_joins._01_inner_join; 2 | 3 | 4 | public class ConditionFunction$4 extends org.apache.flink.api.common.functions.AbstractRichFunction 5 | implements org.apache.flink.table.runtime.generated.JoinCondition { 6 | 7 | 8 | public ConditionFunction$4(Object[] references) throws Exception { 9 | } 10 | 11 | 12 | @Override 13 | public void open(org.apache.flink.configuration.Configuration parameters) throws Exception { 14 | 15 | } 16 | 17 | @Override 18 | public boolean apply(org.apache.flink.table.data.RowData in1, org.apache.flink.table.data.RowData in2) { 19 | 20 | 21 | return true; 22 | } 23 | 24 | @Override 25 | public void close() throws Exception { 26 | super.close(); 27 | 28 | } 29 | } -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_07/query/_08_datastream_trans/AlertExampleRetract.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._07.query._08_datastream_trans; 2 | 3 | import org.apache.flink.api.common.functions.FlatMapFunction; 4 | import org.apache.flink.api.java.tuple.Tuple2; 5 | import org.apache.flink.table.api.Table; 6 | import org.apache.flink.types.Row; 7 | import org.apache.flink.util.Collector; 8 | 9 | import flink.examples.FlinkEnvUtils; 10 | import flink.examples.FlinkEnvUtils.FlinkEnv; 11 | import lombok.extern.slf4j.Slf4j; 12 | 13 | @Slf4j 14 | public class AlertExampleRetract { 15 | 16 | public static void main(String[] args) throws Exception { 17 | 18 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 19 | 20 | String createTableSql = "CREATE TABLE source_table (\n" 21 | + " id BIGINT,\n" 22 | + " money BIGINT,\n" 23 | + " `time` as cast(CURRENT_TIMESTAMP as bigint) * 1000\n" 24 | + ") WITH (\n" 25 | + " 'connector' = 'datagen',\n" 26 | + " 'rows-per-second' = '1',\n" 27 | + " 'fields.id.min' = '1',\n" 28 | + " 'fields.id.max' = '100000',\n" 29 | + " 'fields.money.min' = '1',\n" 30 | + " 'fields.money.max' = '100000'\n" 31 | + ")\n"; 32 | 33 | String querySql = "SELECT max(`time`), \n" 34 | + " sum(money) as sum_money\n" 35 | + "FROM source_table\n" 36 | + "GROUP BY (`time` + 8 * 3600 * 1000) / (24 * 3600 * 1000)"; 37 | 38 | flinkEnv.streamTEnv().executeSql(createTableSql); 39 | 40 | Table resultTable = flinkEnv.streamTEnv().sqlQuery(querySql); 41 | 42 | flinkEnv.streamTEnv() 43 | .toRetractStream(resultTable, Row.class) 44 | .flatMap(new FlatMapFunction, Object>() { 45 | @Override 46 | public void flatMap(Tuple2 value, Collector out) throws Exception { 47 | long l = Long.parseLong(String.valueOf(value.f1.getField("sum_money"))); 48 | 49 | if (l > 10000L) { 50 | log.info("报警,超过 1w"); 51 | } 52 | } 53 | }); 54 | 55 | flinkEnv.env().execute(); 56 | } 57 | 58 | } 59 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_07/query/_08_datastream_trans/AlertExampleRetractError.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._07.query._08_datastream_trans; 2 | 3 | import org.apache.flink.api.common.functions.FlatMapFunction; 4 | import org.apache.flink.table.api.Table; 5 | import org.apache.flink.types.Row; 6 | import org.apache.flink.util.Collector; 7 | 8 | import flink.examples.FlinkEnvUtils; 9 | import flink.examples.FlinkEnvUtils.FlinkEnv; 10 | import lombok.extern.slf4j.Slf4j; 11 | 12 | @Slf4j 13 | public class AlertExampleRetractError { 14 | 15 | public static void main(String[] args) throws Exception { 16 | 17 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 18 | 19 | String createTableSql = "CREATE TABLE source_table (\n" 20 | + " id BIGINT,\n" 21 | + " money BIGINT,\n" 22 | + " `time` as cast(CURRENT_TIMESTAMP as bigint) * 1000\n" 23 | + ") WITH (\n" 24 | + " 'connector' = 'datagen',\n" 25 | + " 'rows-per-second' = '1',\n" 26 | + " 'fields.id.min' = '1',\n" 27 | + " 'fields.id.max' = '100000',\n" 28 | + " 'fields.money.min' = '1',\n" 29 | + " 'fields.money.max' = '100000'\n" 30 | + ")\n"; 31 | 32 | String querySql = "SELECT max(`time`), \n" 33 | + " sum(money) as sum_money\n" 34 | + "FROM source_table\n" 35 | + "GROUP BY (`time` + 8 * 3600 * 1000) / (24 * 3600 * 1000)"; 36 | 37 | flinkEnv.streamTEnv().executeSql(createTableSql); 38 | 39 | Table resultTable = flinkEnv.streamTEnv().sqlQuery(querySql); 40 | 41 | flinkEnv.streamTEnv() 42 | .toDataStream(resultTable, Row.class) 43 | .flatMap(new FlatMapFunction() { 44 | @Override 45 | public void flatMap(Row value, Collector out) throws Exception { 46 | long l = Long.parseLong(String.valueOf(value.getField("sum_money"))); 47 | 48 | if (l > 10000L) { 49 | log.info("报警,超过 1w"); 50 | } 51 | } 52 | }); 53 | 54 | flinkEnv.env().execute(); 55 | } 56 | 57 | } 58 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_07/query/_08_datastream_trans/RetractExample.java: -------------------------------------------------------------------------------- 1 | //package flink.examples.sql._07.query._08_datastream_trans; 2 | // 3 | //import org.apache.flink.api.java.tuple.Tuple2; 4 | //import org.apache.flink.streaming.api.datastream.DataStream; 5 | //import org.apache.flink.table.api.Table; 6 | //import org.apache.flink.types.Row; 7 | // 8 | //import flink.examples.FlinkEnvUtils; 9 | //import flink.examples.FlinkEnvUtils.FlinkEnv; 10 | //import lombok.extern.slf4j.Slf4j; 11 | // 12 | //@Slf4j 13 | //public class RetractExample { 14 | // 15 | // public static void main(String[] args) throws Exception { 16 | // 17 | // FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 18 | // 19 | // String createTableSql = "CREATE TABLE source_table (\n" 20 | // + " id BIGINT,\n" 21 | // + " money BIGINT,\n" 22 | // + " `time` as cast(CURRENT_TIMESTAMP as bigint) * 1000\n" 23 | // + ") WITH (\n" 24 | // + " 'connector' = 'datagen',\n" 25 | // + " 'rows-per-second' = '1',\n" 26 | // + " 'fields.id.min' = '1',\n" 27 | // + " 'fields.id.max' = '100000',\n" 28 | // + " 'fields.money.min' = '1',\n" 29 | // + " 'fields.money.max' = '100000'\n" 30 | // + ")\n"; 31 | // 32 | // String querySql = "SELECT max(`time`), \n" 33 | // + " sum(money) as sum_money\n" 34 | // + "FROM source_table\n" 35 | // + "GROUP BY (`time` + 8 * 3600 * 1000) / (24 * 3600 * 1000)"; 36 | // 37 | // flinkEnv.streamTEnv().executeSql(createTableSql); 38 | // 39 | // Table resultTable = flinkEnv.streamTEnv().sqlQuery(querySql); 40 | // 41 | // DataStream> d = flinkEnv.streamTEnv() 42 | // .toChangelogStream(resultTable, Row.class); 43 | // 44 | // flinkEnv.streamTEnv().from 45 | // 46 | // flinkEnv.env().execute(); 47 | // } 48 | // 49 | //} 50 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_07/query/_09_set_operations/Except_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._07.query._09_set_operations; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | 9 | public class Except_Test { 10 | 11 | public static void main(String[] args) throws Exception { 12 | 13 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 14 | 15 | String sql = "CREATE TABLE source_table_1 (\n" 16 | + " user_id BIGINT NOT NULL\n" 17 | + ") WITH (\n" 18 | + " 'connector' = 'datagen',\n" 19 | + " 'rows-per-second' = '10',\n" 20 | + " 'fields.user_id.min' = '1',\n" 21 | + " 'fields.user_id.max' = '10'\n" 22 | + ");\n" 23 | + "\n" 24 | + "CREATE TABLE source_table_2 (\n" 25 | + " user_id BIGINT NOT NULL\n" 26 | + ") WITH (\n" 27 | + " 'connector' = 'datagen',\n" 28 | + " 'rows-per-second' = '10',\n" 29 | + " 'fields.user_id.min' = '1',\n" 30 | + " 'fields.user_id.max' = '10'\n" 31 | + ");\n" 32 | + "\n" 33 | + "CREATE TABLE sink_table (\n" 34 | + " user_id BIGINT\n" 35 | + ") WITH (\n" 36 | + " 'connector' = 'print'\n" 37 | + ");\n" 38 | + "\n" 39 | + "INSERT INTO sink_table\n" 40 | + "SELECT user_id\n" 41 | + "FROM source_table_1\n" 42 | + "Except\n" 43 | + "SELECT user_id\n" 44 | + "FROM source_table_2\n"; 45 | 46 | Arrays.stream(sql.split(";")) 47 | .forEach(flinkEnv.streamTEnv()::executeSql); 48 | } 49 | 50 | 51 | } 52 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_07/query/_09_set_operations/Exist_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._07.query._09_set_operations; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | 9 | public class Exist_Test { 10 | 11 | public static void main(String[] args) throws Exception { 12 | 13 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 14 | 15 | String sql = "CREATE TABLE source_table_1 (\n" 16 | + " user_id BIGINT NOT NULL\n" 17 | + ") WITH (\n" 18 | + " 'connector' = 'datagen',\n" 19 | + " 'rows-per-second' = '10',\n" 20 | + " 'fields.user_id.min' = '1',\n" 21 | + " 'fields.user_id.max' = '10'\n" 22 | + ");\n" 23 | + "\n" 24 | + "CREATE TABLE source_table_2 (\n" 25 | + " user_id BIGINT NOT NULL\n" 26 | + ") WITH (\n" 27 | + " 'connector' = 'datagen',\n" 28 | + " 'rows-per-second' = '10',\n" 29 | + " 'fields.user_id.min' = '1',\n" 30 | + " 'fields.user_id.max' = '10'\n" 31 | + ");\n" 32 | + "\n" 33 | + "CREATE TABLE sink_table (\n" 34 | + " user_id BIGINT\n" 35 | + ") WITH (\n" 36 | + " 'connector' = 'print'\n" 37 | + ");\n" 38 | + "\n" 39 | + "INSERT INTO sink_table\n" 40 | + "SELECT user_id\n" 41 | + "FROM source_table_1\n" 42 | + "WHERE user_id EXISTS (\n" 43 | + " SELECT user_id\n" 44 | + " FROM source_table_2\n" 45 | + ")\n"; 46 | 47 | Arrays.stream(sql.split(";")) 48 | .forEach(flinkEnv.streamTEnv()::executeSql); 49 | } 50 | 51 | 52 | } 53 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_07/query/_09_set_operations/In_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._07.query._09_set_operations; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | 9 | public class In_Test { 10 | 11 | public static void main(String[] args) throws Exception { 12 | 13 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 14 | 15 | String sql = "CREATE TABLE source_table_1 (\n" 16 | + " user_id BIGINT NOT NULL\n" 17 | + ") WITH (\n" 18 | + " 'connector' = 'datagen',\n" 19 | + " 'rows-per-second' = '10',\n" 20 | + " 'fields.user_id.min' = '1',\n" 21 | + " 'fields.user_id.max' = '10'\n" 22 | + ");\n" 23 | + "\n" 24 | + "CREATE TABLE source_table_2 (\n" 25 | + " user_id BIGINT NOT NULL\n" 26 | + ") WITH (\n" 27 | + " 'connector' = 'datagen',\n" 28 | + " 'rows-per-second' = '10',\n" 29 | + " 'fields.user_id.min' = '1',\n" 30 | + " 'fields.user_id.max' = '10'\n" 31 | + ");\n" 32 | + "\n" 33 | + "CREATE TABLE sink_table (\n" 34 | + " user_id BIGINT\n" 35 | + ") WITH (\n" 36 | + " 'connector' = 'print'\n" 37 | + ");\n" 38 | + "\n" 39 | + "INSERT INTO sink_table\n" 40 | + "SELECT user_id\n" 41 | + "FROM source_table_1\n" 42 | + "WHERE user_id in (\n" 43 | + " SELECT user_id\n" 44 | + " FROM source_table_2\n" 45 | + ")\n"; 46 | 47 | Arrays.stream(sql.split(";")) 48 | .forEach(flinkEnv.streamTEnv()::executeSql); 49 | } 50 | 51 | 52 | } 53 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_07/query/_09_set_operations/Intersect_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._07.query._09_set_operations; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | 9 | public class Intersect_Test { 10 | 11 | public static void main(String[] args) throws Exception { 12 | 13 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 14 | 15 | String sql = "CREATE TABLE source_table_1 (\n" 16 | + " user_id BIGINT NOT NULL\n" 17 | + ") WITH (\n" 18 | + " 'connector' = 'datagen',\n" 19 | + " 'rows-per-second' = '10',\n" 20 | + " 'fields.user_id.min' = '1',\n" 21 | + " 'fields.user_id.max' = '10'\n" 22 | + ");\n" 23 | + "\n" 24 | + "CREATE TABLE source_table_2 (\n" 25 | + " user_id BIGINT NOT NULL\n" 26 | + ") WITH (\n" 27 | + " 'connector' = 'datagen',\n" 28 | + " 'rows-per-second' = '10',\n" 29 | + " 'fields.user_id.min' = '1',\n" 30 | + " 'fields.user_id.max' = '10'\n" 31 | + ");\n" 32 | + "\n" 33 | + "CREATE TABLE sink_table (\n" 34 | + " user_id BIGINT\n" 35 | + ") WITH (\n" 36 | + " 'connector' = 'print'\n" 37 | + ");\n" 38 | + "\n" 39 | + "INSERT INTO sink_table\n" 40 | + "SELECT user_id\n" 41 | + "FROM source_table_1\n" 42 | + "INTERSECT\n" 43 | + "SELECT user_id\n" 44 | + "FROM source_table_2\n"; 45 | 46 | Arrays.stream(sql.split(";")) 47 | .forEach(flinkEnv.streamTEnv()::executeSql); 48 | } 49 | 50 | 51 | } 52 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_07/query/_09_set_operations/UnionAll_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._07.query._09_set_operations; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | 9 | public class UnionAll_Test { 10 | 11 | public static void main(String[] args) throws Exception { 12 | 13 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 14 | 15 | String sql = "CREATE TABLE source_table_1 (\n" 16 | + " user_id BIGINT NOT NULL,\n" 17 | + " name STRING,\n" 18 | + " row_time AS cast(CURRENT_TIMESTAMP as timestamp(3)),\n" 19 | + " WATERMARK FOR row_time AS row_time - INTERVAL '5' SECOND\n" 20 | + ") WITH (\n" 21 | + " 'connector' = 'datagen',\n" 22 | + " 'rows-per-second' = '10',\n" 23 | + " 'fields.name.length' = '1',\n" 24 | + " 'fields.user_id.min' = '1',\n" 25 | + " 'fields.user_id.max' = '10'\n" 26 | + ");\n" 27 | + "\n" 28 | + "CREATE TABLE source_table_2 (\n" 29 | + " user_id BIGINT NOT NULL,\n" 30 | + " name STRING,\n" 31 | + " row_time AS cast(CURRENT_TIMESTAMP as timestamp(3)),\n" 32 | + " WATERMARK FOR row_time AS row_time - INTERVAL '5' SECOND\n" 33 | + ") WITH (\n" 34 | + " 'connector' = 'datagen',\n" 35 | + " 'rows-per-second' = '10',\n" 36 | + " 'fields.name.length' = '1',\n" 37 | + " 'fields.user_id.min' = '1',\n" 38 | + " 'fields.user_id.max' = '10'\n" 39 | + ");\n" 40 | + "\n" 41 | + "CREATE TABLE sink_table (\n" 42 | + " user_id BIGINT\n" 43 | + ") WITH (\n" 44 | + " 'connector' = 'print'\n" 45 | + ");\n" 46 | + "\n" 47 | + "INSERT INTO sink_table\n" 48 | + "SELECT user_id\n" 49 | + "FROM source_table_1\n" 50 | + "UNION ALL\n" 51 | + "SELECT user_id\n" 52 | + "FROM source_table_2\n"; 53 | 54 | Arrays.stream(sql.split(";")) 55 | .forEach(flinkEnv.streamTEnv()::executeSql); 56 | } 57 | 58 | 59 | } 60 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_07/query/_09_set_operations/Union_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._07.query._09_set_operations; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | 9 | public class Union_Test { 10 | 11 | public static void main(String[] args) throws Exception { 12 | 13 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 14 | 15 | String sql = "CREATE TABLE source_table_1 (\n" 16 | + " user_id BIGINT NOT NULL,\n" 17 | + " name STRING,\n" 18 | + " row_time AS cast(CURRENT_TIMESTAMP as timestamp(3)),\n" 19 | + " WATERMARK FOR row_time AS row_time - INTERVAL '5' SECOND\n" 20 | + ") WITH (\n" 21 | + " 'connector' = 'datagen',\n" 22 | + " 'rows-per-second' = '10',\n" 23 | + " 'fields.name.length' = '1',\n" 24 | + " 'fields.user_id.min' = '1',\n" 25 | + " 'fields.user_id.max' = '10'\n" 26 | + ");\n" 27 | + "\n" 28 | + "CREATE TABLE source_table_2 (\n" 29 | + " user_id BIGINT NOT NULL,\n" 30 | + " name STRING,\n" 31 | + " row_time AS cast(CURRENT_TIMESTAMP as timestamp(3)),\n" 32 | + " WATERMARK FOR row_time AS row_time - INTERVAL '5' SECOND\n" 33 | + ") WITH (\n" 34 | + " 'connector' = 'datagen',\n" 35 | + " 'rows-per-second' = '10',\n" 36 | + " 'fields.name.length' = '1',\n" 37 | + " 'fields.user_id.min' = '1',\n" 38 | + " 'fields.user_id.max' = '10'\n" 39 | + ");\n" 40 | + "\n" 41 | + "CREATE TABLE sink_table (\n" 42 | + " user_id BIGINT\n" 43 | + ") WITH (\n" 44 | + " 'connector' = 'print'\n" 45 | + ");\n" 46 | + "\n" 47 | + "INSERT INTO sink_table\n" 48 | + "SELECT user_id\n" 49 | + "FROM source_table_1\n" 50 | + "UNION\n" 51 | + "SELECT user_id\n" 52 | + "FROM source_table_2\n"; 53 | 54 | Arrays.stream(sql.split(";")) 55 | .forEach(flinkEnv.streamTEnv()::executeSql); 56 | } 57 | 58 | 59 | } 60 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_07/query/_10_order_by/OrderBy_with_time_attr_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._07.query._10_order_by; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | 9 | public class OrderBy_with_time_attr_Test { 10 | 11 | public static void main(String[] args) throws Exception { 12 | 13 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 14 | 15 | String sql = "CREATE TABLE source_table_1 (\n" 16 | + " user_id BIGINT NOT NULL,\n" 17 | + " row_time AS cast(CURRENT_TIMESTAMP as timestamp(3)),\n" 18 | + " WATERMARK FOR row_time AS row_time\n" 19 | + ") WITH (\n" 20 | + " 'connector' = 'datagen',\n" 21 | + " 'rows-per-second' = '10',\n" 22 | + " 'fields.user_id.min' = '1',\n" 23 | + " 'fields.user_id.max' = '10'\n" 24 | + ");\n" 25 | + "\n" 26 | + "CREATE TABLE sink_table (\n" 27 | + " user_id BIGINT\n" 28 | + ") WITH (\n" 29 | + " 'connector' = 'print'\n" 30 | + ");\n" 31 | + "\n" 32 | + "INSERT INTO sink_table\n" 33 | + "SELECT user_id\n" 34 | + "FROM source_table_1\n" 35 | + "Order By row_time, user_id desc\n"; 36 | 37 | Arrays.stream(sql.split(";")) 38 | .forEach(flinkEnv.streamTEnv()::executeSql); 39 | } 40 | 41 | 42 | } -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_07/query/_11_limit/Limit_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._07.query._11_limit; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | 9 | public class Limit_Test { 10 | 11 | public static void main(String[] args) throws Exception { 12 | 13 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 14 | 15 | String sql = "CREATE TABLE source_table_1 (\n" 16 | + " user_id BIGINT NOT NULL,\n" 17 | + " row_time AS cast(CURRENT_TIMESTAMP as timestamp(3)),\n" 18 | + " WATERMARK FOR row_time AS row_time\n" 19 | + ") WITH (\n" 20 | + " 'connector' = 'datagen',\n" 21 | + " 'rows-per-second' = '10',\n" 22 | + " 'fields.user_id.min' = '1',\n" 23 | + " 'fields.user_id.max' = '10'\n" 24 | + ");\n" 25 | + "\n" 26 | + "CREATE TABLE sink_table (\n" 27 | + " user_id BIGINT\n" 28 | + ") WITH (\n" 29 | + " 'connector' = 'print'\n" 30 | + ");\n" 31 | + "\n" 32 | + "INSERT INTO sink_table\n" 33 | + "SELECT user_id\n" 34 | + "FROM source_table_1\n" 35 | + "Limit 3\n"; 36 | 37 | Arrays.stream(sql.split(";")) 38 | .forEach(flinkEnv.streamTEnv()::executeSql); 39 | } 40 | 41 | 42 | } -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_07/query/_12_topn/TopN_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._07.query._12_topn; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | 9 | public class TopN_Test { 10 | 11 | public static void main(String[] args) throws Exception { 12 | 13 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 14 | 15 | String sql = "CREATE TABLE source_table (\n" 16 | + " name BIGINT NOT NULL,\n" 17 | + " search_cnt BIGINT NOT NULL,\n" 18 | + " key BIGINT NOT NULL,\n" 19 | + " row_time AS cast(CURRENT_TIMESTAMP as timestamp(3)),\n" 20 | + " WATERMARK FOR row_time AS row_time\n" 21 | + ") WITH (\n" 22 | + " 'connector' = 'datagen',\n" 23 | + " 'rows-per-second' = '10',\n" 24 | + " 'fields.name.min' = '1',\n" 25 | + " 'fields.name.max' = '10',\n" 26 | + " 'fields.key.min' = '1',\n" 27 | + " 'fields.key.max' = '2',\n" 28 | + " 'fields.search_cnt.min' = '1000',\n" 29 | + " 'fields.search_cnt.max' = '10000'\n" 30 | + ");\n" 31 | + "\n" 32 | + "CREATE TABLE sink_table (\n" 33 | + " key BIGINT,\n" 34 | + " name BIGINT,\n" 35 | + " search_cnt BIGINT,\n" 36 | + " `timestamp` TIMESTAMP(3)\n" 37 | + ") WITH (\n" 38 | + " 'connector' = 'print'\n" 39 | + ");\n" 40 | + "\n" 41 | + "INSERT INTO sink_table\n" 42 | + "SELECT key, name, search_cnt, row_time as `timestamp`\n" 43 | + "FROM (\n" 44 | + " SELECT key, name, search_cnt, row_time, \n" 45 | + " ROW_NUMBER() OVER (PARTITION BY key\n" 46 | + " ORDER BY search_cnt desc) AS rownum\n" 47 | + " FROM source_table)\n" 48 | + "WHERE rownum <= 100\n"; 49 | 50 | Arrays.stream(sql.split(";")) 51 | .forEach(flinkEnv.streamTEnv()::executeSql); 52 | } 53 | 54 | 55 | } -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_08/batch/Utils.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._08.batch; 2 | 3 | import java.util.regex.Pattern; 4 | 5 | public class Utils { 6 | 7 | public static String format(String sql) { 8 | 9 | // https://blog.csdn.net/qq_21383435/article/details/82286132 10 | 11 | Pattern p = Pattern.compile("(?ms)('(?:''|[^'])*')|--.*?$|/\\*.*?\\*/|#.*?$|"); 12 | return p.matcher(sql).replaceAll("$1"); 13 | } 14 | 15 | } 16 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_08/batch/_03_hive_udf/_01_GenericUDAFResolver2/HiveUDAF_hive_module_registry_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._08.batch._03_hive_udf._01_GenericUDAFResolver2; 2 | 3 | import java.io.IOException; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | 9 | /** 10 | * hadoop 启动:/usr/local/Cellar/hadoop/3.2.1/sbin/start-all.sh 11 | * http://localhost:9870/ 12 | * http://localhost:8088/cluster 13 | * 14 | * hive 启动:$HIVE_HOME/bin/hive --service metastore & 15 | * hive cli:$HIVE_HOME/bin/hive 16 | */ 17 | public class HiveUDAF_hive_module_registry_Test { 18 | 19 | public static void main(String[] args) throws IOException { 20 | 21 | FlinkEnv flinkEnv = FlinkEnvUtils.getBatchTableEnv(args); 22 | 23 | // TODO 可以成功执行没有任何问题 24 | flinkEnv.hiveModuleV2().registryHiveUDF("test_hive_udaf", TestHiveUDAF.class.getName()); 25 | 26 | String sql3 = "select test_hive_udaf(user_id)\n" 27 | + " , count(1) as part_pv\n" 28 | + " , max(order_amount) as part_max\n" 29 | + " , min(order_amount) as part_min\n" 30 | + " from hive_table\n" 31 | + " where p_date between '20210920' and '20210920'\n" 32 | + " group by 0"; 33 | 34 | flinkEnv.batchTEnv() 35 | .executeSql(sql3) 36 | .print(); 37 | } 38 | 39 | } 40 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_08/batch/_03_hive_udf/_01_GenericUDAFResolver2/HiveUDAF_sql_registry_create_function_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._08.batch._03_hive_udf._01_GenericUDAFResolver2; 2 | 3 | import java.io.IOException; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | 9 | /** 10 | * hadoop 启动:/usr/local/Cellar/hadoop/3.2.1/sbin/start-all.sh 11 | * http://localhost:9870/ 12 | * http://localhost:8088/cluster 13 | * 14 | * hive 启动:$HIVE_HOME/bin/hive --service metastore & 15 | * hive cli:$HIVE_HOME/bin/hive 16 | */ 17 | public class HiveUDAF_sql_registry_create_function_Test { 18 | 19 | public static void main(String[] args) throws ClassNotFoundException, IOException { 20 | FlinkEnv flinkEnv = FlinkEnvUtils.getBatchTableEnv(args); 21 | 22 | // TODO sql 执行创建 hive udaf 可以正常执行,create function 执行完成之后就会被注册到 hive catalog 中 23 | 24 | String sql2 = "CREATE FUNCTION test_hive_udaf as 'flink.examples.sql._08.batch._03_hive_udf._01_GenericUDAFResolver2.TestHiveUDAF'"; 25 | 26 | String sql3 = "select default.test_hive_udaf(user_id, '20210920')\n" 27 | + " , count(1) as part_pv\n" 28 | + " , max(order_amount) as part_max\n" 29 | + " , min(order_amount) as part_min\n" 30 | + " from hive_table\n" 31 | + " where p_date between '20210920' and '20210920'\n" 32 | + " group by 0"; 33 | 34 | flinkEnv.batchTEnv().executeSql(sql2); 35 | flinkEnv.batchTEnv().executeSql(sql3) 36 | .print(); 37 | } 38 | 39 | } 40 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_08/batch/_03_hive_udf/_01_GenericUDAFResolver2/HiveUDAF_sql_registry_create_temporary_function_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._08.batch._03_hive_udf._01_GenericUDAFResolver2; 2 | 3 | import java.io.IOException; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | 9 | /** 10 | * hadoop 启动:/usr/local/Cellar/hadoop/3.2.1/sbin/start-all.sh 11 | * http://localhost:9870/ 12 | * http://localhost:8088/cluster 13 | * 14 | * hive 启动:$HIVE_HOME/bin/hive --service metastore & 15 | * hive cli:$HIVE_HOME/bin/hive 16 | */ 17 | public class HiveUDAF_sql_registry_create_temporary_function_Test { 18 | 19 | public static void main(String[] args) throws ClassNotFoundException, IOException { 20 | FlinkEnv flinkEnv = FlinkEnvUtils.getBatchTableEnv(args); 21 | 22 | // TODO sql 执行创建 hive udtf 会报错 23 | // java.lang.UnsupportedOperationException: This CatalogFunction is a InlineCatalogFunction. This method should not be called. 24 | // 因为 CREATE TEMPORARY FUNCTION 使用的是 inline catalog 25 | 26 | String sql2 = "CREATE TEMPORARY FUNCTION test_hive_udaf as 'flink.examples.sql._08.batch._03_hive_udf._01_GenericUDAFResolver2.TestHiveUDAF'"; 27 | 28 | String sql3 = "select test_hive_udaf(user_id, '20210920')\n" 29 | + " , count(1) as part_pv\n" 30 | + " , max(order_amount) as part_max\n" 31 | + " , min(order_amount) as part_min\n" 32 | + " from hive_table\n" 33 | + " where p_date between '20210920' and '20210920'\n" 34 | + " group by 0"; 35 | 36 | flinkEnv.batchTEnv().executeSql(sql2); 37 | flinkEnv.batchTEnv().executeSql(sql3) 38 | .print(); 39 | } 40 | 41 | } 42 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_08/batch/_03_hive_udf/_02_GenericUDTF/HiveUDTF_hive_module_registry_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._08.batch._03_hive_udf._02_GenericUDTF; 2 | 3 | import java.io.IOException; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | 9 | /** 10 | * hadoop 启动:/usr/local/Cellar/hadoop/3.2.1/sbin/start-all.sh 11 | * http://localhost:9870/ 12 | * http://localhost:8088/cluster 13 | * 14 | * hive 启动:$HIVE_HOME/bin/hive --service metastore & 15 | * hive cli:$HIVE_HOME/bin/hive 16 | */ 17 | public class HiveUDTF_hive_module_registry_Test { 18 | 19 | public static void main(String[] args) throws IOException { 20 | 21 | FlinkEnv flinkEnv = FlinkEnvUtils.getBatchTableEnv(args); 22 | 23 | // TODO 可以成功执行没有任何问题 24 | flinkEnv.hiveModuleV2().registryHiveUDF("test_hive_udtf", TestHiveUDTF.class.getName()); 25 | 26 | String sql3 = "select test_hive_udtf(user_id) as (a)\n" 27 | + " from hive_table\n" 28 | + " where p_date between '20210920' and '20210920'\n"; 29 | 30 | flinkEnv.batchTEnv() 31 | .executeSql(sql3) 32 | .print(); 33 | } 34 | 35 | } 36 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_08/batch/_03_hive_udf/_02_GenericUDTF/HiveUDTF_sql_registry_create_function_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._08.batch._03_hive_udf._02_GenericUDTF; 2 | 3 | import java.io.IOException; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | 9 | /** 10 | * hadoop 启动:/usr/local/Cellar/hadoop/3.2.1/sbin/start-all.sh 11 | * http://localhost:9870/ 12 | * http://localhost:8088/cluster 13 | * 14 | * hive 启动:$HIVE_HOME/bin/hive --service metastore & 15 | * hive cli:$HIVE_HOME/bin/hive 16 | */ 17 | public class HiveUDTF_sql_registry_create_function_Test { 18 | 19 | public static void main(String[] args) throws ClassNotFoundException, IOException { 20 | FlinkEnv flinkEnv = FlinkEnvUtils.getBatchTableEnv(args); 21 | 22 | // String sql = "drop function default.test_hive_udtf"; 23 | 24 | // TODO sql 执行正常,create function 使用的是 hive catalog 没有任何问题 25 | String sql2 = "CREATE FUNCTION test_hive_udtf as 'flink.examples.sql._08.batch._03_hive_udf._02_GenericUDTF.TestHiveUDTF'"; 26 | 27 | String sql3 = "select default.test_hive_udtf(user_id)\n" 28 | + " from hive_table\n" 29 | + " where p_date between '20210920' and '20210920'\n"; 30 | 31 | // flinkEnv.batchTEnv().executeSql(sql); 32 | flinkEnv.batchTEnv().executeSql(sql2); 33 | flinkEnv.batchTEnv().executeSql(sql3) 34 | .print(); 35 | } 36 | 37 | } 38 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_08/batch/_03_hive_udf/_02_GenericUDTF/HiveUDTF_sql_registry_create_temporary_function_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._08.batch._03_hive_udf._02_GenericUDTF; 2 | 3 | import java.io.IOException; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | 9 | /** 10 | * hadoop 启动:/usr/local/Cellar/hadoop/3.2.1/sbin/start-all.sh 11 | * http://localhost:9870/ 12 | * http://localhost:8088/cluster 13 | * 14 | * hive 启动:$HIVE_HOME/bin/hive --service metastore & 15 | * hive cli:$HIVE_HOME/bin/hive 16 | */ 17 | public class HiveUDTF_sql_registry_create_temporary_function_Test { 18 | 19 | public static void main(String[] args) throws ClassNotFoundException, IOException { 20 | FlinkEnv flinkEnv = FlinkEnvUtils.getBatchTableEnv(args); 21 | 22 | // TODO sql 执行创建 hive udtf 会报错 23 | // Caused by: java.lang.UnsupportedOperationException: This CatalogFunction is a InlineCatalogFunction. This method should not be called. 24 | String sql2 = "CREATE TEMPORARY FUNCTION test_hive_udtf as 'flink.examples.sql._08.batch._03_hive_udf._02_GenericUDTF.TestHiveUDTF'"; 25 | 26 | String sql3 = "select default.test_hive_udtf(user_id)\n" 27 | + " from hive_table\n" 28 | + " where p_date between '20210920' and '20210920'\n"; 29 | 30 | flinkEnv.batchTEnv().executeSql(sql2); 31 | flinkEnv.batchTEnv().executeSql(sql3) 32 | .print(); 33 | } 34 | 35 | } 36 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_08/batch/_03_hive_udf/_02_GenericUDTF/TestHiveUDTF.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._08.batch._03_hive_udf._02_GenericUDTF; 2 | 3 | import java.util.ArrayList; 4 | 5 | import org.apache.hadoop.hive.ql.exec.UDFArgumentException; 6 | import org.apache.hadoop.hive.ql.metadata.HiveException; 7 | import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF; 8 | import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; 9 | import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; 10 | import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; 11 | import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; 12 | 13 | public class TestHiveUDTF extends GenericUDTF { 14 | 15 | @Override 16 | public StructObjectInspector initialize(ObjectInspector[] argOIs) throws UDFArgumentException { 17 | ArrayList fieldNames = new ArrayList() {{ 18 | add("column1"); 19 | }}; 20 | ArrayList fieldOIs = new ArrayList() {{ 21 | add(PrimitiveObjectInspectorFactory.javaStringObjectInspector); 22 | }}; 23 | 24 | return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs); 25 | } 26 | 27 | @Override 28 | public void process(Object[] objects) throws HiveException { 29 | 30 | forward(objects[0]); 31 | forward(objects[0]); 32 | 33 | } 34 | 35 | @Override 36 | public void close() throws HiveException { 37 | 38 | } 39 | 40 | 41 | } 42 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_08/batch/_03_hive_udf/_04_GenericUDF/HiveUDF_hive_module_registry_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._08.batch._03_hive_udf._04_GenericUDF; 2 | 3 | import java.io.IOException; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | 9 | /** 10 | * hadoop 启动:/usr/local/Cellar/hadoop/3.2.1/sbin/start-all.sh 11 | * http://localhost:9870/ 12 | * http://localhost:8088/cluster 13 | * 14 | * hive 启动:$HIVE_HOME/bin/hive --service metastore & 15 | * hive cli:$HIVE_HOME/bin/hive 16 | */ 17 | public class HiveUDF_hive_module_registry_Test { 18 | 19 | public static void main(String[] args) throws IOException { 20 | 21 | FlinkEnv flinkEnv = FlinkEnvUtils.getBatchTableEnv(args); 22 | 23 | // TODO 可以正常执行 24 | flinkEnv.hiveModuleV2().registryHiveUDF("test_hive_udf", TestGenericUDF.class.getName()); 25 | 26 | String sql3 = "select test_hive_udf(user_id)\n" 27 | + " from hive_table\n" 28 | + " where p_date between '20210920' and '20210920'\n"; 29 | 30 | flinkEnv.batchTEnv() 31 | .executeSql(sql3) 32 | .print(); 33 | } 34 | 35 | } 36 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_08/batch/_03_hive_udf/_04_GenericUDF/HiveUDF_sql_registry_create_function_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._08.batch._03_hive_udf._04_GenericUDF; 2 | 3 | import java.io.IOException; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | 9 | /** 10 | * hadoop 启动:/usr/local/Cellar/hadoop/3.2.1/sbin/start-all.sh 11 | * http://localhost:9870/ 12 | * http://localhost:8088/cluster 13 | * 14 | * hive 启动:$HIVE_HOME/bin/hive --service metastore & 15 | * hive cli:$HIVE_HOME/bin/hive 16 | */ 17 | public class HiveUDF_sql_registry_create_function_Test { 18 | 19 | public static void main(String[] args) throws ClassNotFoundException, IOException { 20 | FlinkEnv flinkEnv = FlinkEnvUtils.getBatchTableEnv(args); 21 | 22 | // TODO sql 执行创建 hive udf 可以正常执行,create function 执行完成之后就会被注册到 hive catalog 中 23 | String sql2 = "CREATE FUNCTION test_hive_udf as 'flink.examples.sql._08.batch._03_hive_udf._04_GenericUDF.TestGenericUDF'"; 24 | 25 | String sql3 = "select test_hive_udf(user_id)\n" 26 | + " from hive_table\n" 27 | + " where p_date between '20210920' and '20210920'\n"; 28 | 29 | flinkEnv.batchTEnv().executeSql(sql2); 30 | flinkEnv.batchTEnv().executeSql(sql3) 31 | .print(); 32 | } 33 | 34 | } 35 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_08/batch/_03_hive_udf/_04_GenericUDF/HiveUDF_sql_registry_create_temporary_function_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._08.batch._03_hive_udf._04_GenericUDF; 2 | 3 | import java.io.IOException; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | 9 | /** 10 | * hadoop 启动:/usr/local/Cellar/hadoop/3.2.1/sbin/start-all.sh 11 | * http://localhost:9870/ 12 | * http://localhost:8088/cluster 13 | * 14 | * hive 启动:$HIVE_HOME/bin/hive --service metastore & 15 | * hive cli:$HIVE_HOME/bin/hive 16 | */ 17 | public class HiveUDF_sql_registry_create_temporary_function_Test { 18 | 19 | public static void main(String[] args) throws ClassNotFoundException, IOException { 20 | FlinkEnv flinkEnv = FlinkEnvUtils.getBatchTableEnv(args); 21 | 22 | // TODO sql 执行创建 hive udf 可以正常执行 23 | String sql2 = "CREATE TEMPORARY FUNCTION test_hive_udf as 'flink.examples.sql._08.batch._03_hive_udf._04_GenericUDF.TestGenericUDF'"; 24 | 25 | String sql3 = "select test_hive_udf(user_id)\n" 26 | + " from hive_table\n" 27 | + " where p_date between '20210920' and '20210920'\n"; 28 | 29 | flinkEnv.batchTEnv().executeSql(sql2); 30 | flinkEnv.batchTEnv().executeSql(sql3) 31 | .print(); 32 | } 33 | 34 | } 35 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_08/batch/_03_hive_udf/_04_GenericUDF/TestGenericUDF.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._08.batch._03_hive_udf._04_GenericUDF; 2 | 3 | import org.apache.hadoop.hive.ql.exec.UDFArgumentException; 4 | import org.apache.hadoop.hive.ql.metadata.HiveException; 5 | import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; 6 | import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; 7 | import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; 8 | import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; 9 | import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; 10 | import org.apache.hadoop.io.Text; 11 | 12 | public class TestGenericUDF extends GenericUDF { 13 | 14 | private transient StringObjectInspector soi = null; 15 | 16 | private transient StringObjectInspector soi1 = null; 17 | 18 | @Override 19 | public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { 20 | PrimitiveObjectInspector primitiveObjectInspector = (PrimitiveObjectInspector) arguments[0]; 21 | soi = (StringObjectInspector) primitiveObjectInspector; 22 | return PrimitiveObjectInspectorFactory 23 | .getPrimitiveWritableObjectInspector(PrimitiveObjectInspector.PrimitiveCategory.STRING); 24 | } 25 | 26 | @Override 27 | public Object evaluate(DeferredObject[] arguments) throws HiveException { 28 | return new Text("UNKNOWN"); 29 | } 30 | 31 | @Override 32 | public String getDisplayString(String[] children) { 33 | return "test"; 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_08/batch/_04_flink_udf/FlinkUDAF_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._08.batch._04_flink_udf; 2 | 3 | public class FlinkUDAF_Test { 4 | } 5 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_08/batch/_04_flink_udf/FlinkUDF_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._08.batch._04_flink_udf; 2 | 3 | public class FlinkUDF_Test { 4 | } 5 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_08/batch/_04_flink_udf/FlinkUDTF_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._08.batch._04_flink_udf; 2 | 3 | public class FlinkUDTF_Test { 4 | } 5 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_08/batch/_05_test/_01_batch_to_datastream/Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._08.batch._05_test._01_batch_to_datastream; 2 | 3 | import java.util.concurrent.TimeUnit; 4 | 5 | import org.apache.flink.api.common.restartstrategy.RestartStrategies; 6 | import org.apache.flink.api.java.utils.ParameterTool; 7 | import org.apache.flink.configuration.Configuration; 8 | import org.apache.flink.streaming.api.CheckpointingMode; 9 | import org.apache.flink.streaming.api.environment.CheckpointConfig; 10 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 11 | import org.apache.flink.table.api.EnvironmentSettings; 12 | import org.apache.flink.table.api.TableEnvironment; 13 | import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; 14 | 15 | public class Test { 16 | 17 | public static void main(String[] args) { 18 | StreamExecutionEnvironment env = 19 | StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration()); 20 | 21 | ParameterTool parameterTool = ParameterTool.fromArgs(args); 22 | 23 | env.setRestartStrategy(RestartStrategies.failureRateRestart(6, org.apache.flink.api.common.time.Time 24 | .of(10L, TimeUnit.MINUTES), org.apache.flink.api.common.time.Time.of(5L, TimeUnit.SECONDS))); 25 | env.getConfig().setGlobalJobParameters(parameterTool); 26 | env.setParallelism(1); 27 | 28 | // ck 设置 29 | env.getCheckpointConfig().setFailOnCheckpointingErrors(false); 30 | env.enableCheckpointing(30 * 1000L, CheckpointingMode.EXACTLY_ONCE); 31 | env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3L); 32 | env.getCheckpointConfig() 33 | .enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION); 34 | 35 | EnvironmentSettings settings = EnvironmentSettings 36 | .newInstance() 37 | .useBlinkPlanner() 38 | .inBatchMode() 39 | .build(); 40 | 41 | TableEnvironment tEnv = TableEnvironment.create(settings); 42 | 43 | // TODO 这一行会抛出异常 44 | StreamTableEnvironment t1Env = StreamTableEnvironment.create(env, settings); 45 | } 46 | 47 | } 48 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_09/udf/_01_hive_udf/_01_GenericUDF/TestGenericUDF.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._09.udf._01_hive_udf._01_GenericUDF; 2 | 3 | import org.apache.hadoop.hive.ql.exec.UDFArgumentException; 4 | import org.apache.hadoop.hive.ql.metadata.HiveException; 5 | import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; 6 | import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; 7 | import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; 8 | import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; 9 | import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; 10 | import org.apache.hadoop.io.Text; 11 | 12 | public class TestGenericUDF extends GenericUDF { 13 | 14 | private transient StringObjectInspector soi = null; 15 | 16 | private transient StringObjectInspector soi1 = null; 17 | 18 | @Override 19 | public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { 20 | PrimitiveObjectInspector primitiveObjectInspector = (PrimitiveObjectInspector) arguments[0]; 21 | soi = (StringObjectInspector) primitiveObjectInspector; 22 | return PrimitiveObjectInspectorFactory 23 | .getPrimitiveWritableObjectInspector(PrimitiveObjectInspector.PrimitiveCategory.STRING); 24 | } 25 | 26 | @Override 27 | public Object evaluate(DeferredObject[] arguments) throws HiveException { 28 | return new Text("UNKNOWN"); 29 | } 30 | 31 | @Override 32 | public String getDisplayString(String[] children) { 33 | return "test"; 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_09/udf/_02_stream_hive_udf/HiveUDF_Error_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._09.udf._02_stream_hive_udf; 2 | 3 | import flink.examples.FlinkEnvUtils; 4 | import flink.examples.FlinkEnvUtils.FlinkEnv; 5 | 6 | 7 | /** 8 | * hadoop 启动:/usr/local/Cellar/hadoop/3.2.1/sbin/start-all.sh 9 | * http://localhost:9870/ 10 | * http://localhost:8088/cluster 11 | * 12 | * hive 启动:$HIVE_HOME/bin/hive --service metastore & 13 | * hive cli:$HIVE_HOME/bin/hive 14 | */ 15 | public class HiveUDF_Error_Test { 16 | 17 | public static void main(String[] args) throws Exception { 18 | 19 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(new String[] {"--enable.hive.module.v2", "false"}); 20 | 21 | String sql = "CREATE TABLE source_table (\n" 22 | + " user_id BIGINT,\n" 23 | + " `params` STRING\n" 24 | + ") WITH (\n" 25 | + " 'connector' = 'user_defined',\n" 26 | + " 'format' = 'json',\n" 27 | + " 'class.name' = 'flink.examples.sql._09.udf._02_stream_hive_udf.UserDefinedSource'\n" 28 | + ");\n" 29 | + "\n" 30 | + "CREATE TABLE sink_table (\n" 31 | + " user_id BIGINT,\n" 32 | + " `log_id` STRING\n" 33 | + ") WITH (\n" 34 | + " 'connector' = 'print'\n" 35 | + ");\n" 36 | + "\n" 37 | + "insert into sink_table\n" 38 | + "select user_id,\n" 39 | + " get_json_object(params, '$.log_id') as log_id\n" 40 | + "from source_table\n"; 41 | 42 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("pipeline.name", "Hive UDF 测试案例"); 43 | 44 | for (String innerSql : sql.split(";")) { 45 | 46 | flinkEnv.streamTEnv().executeSql(innerSql); 47 | } 48 | 49 | } 50 | 51 | } 52 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_09/udf/_02_stream_hive_udf/HiveUDF_create_temporary_error_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._09.udf._02_stream_hive_udf; 2 | 3 | import flink.examples.FlinkEnvUtils; 4 | import flink.examples.FlinkEnvUtils.FlinkEnv; 5 | 6 | 7 | /** 8 | * hadoop 启动:/usr/local/Cellar/hadoop/3.2.1/sbin/start-all.sh 9 | * http://localhost:9870/ 10 | * http://localhost:8088/cluster 11 | * 12 | * hive 启动:$HIVE_HOME/bin/hive --service metastore & 13 | * hive cli:$HIVE_HOME/bin/hive 14 | */ 15 | public class HiveUDF_create_temporary_error_Test { 16 | 17 | public static void main(String[] args) throws Exception { 18 | 19 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 20 | 21 | String sql = "CREATE TEMPORARY FUNCTION test_hive_udf as 'flink.examples.sql._09.udf._02_stream_hive_udf.TestGenericUDF';\n" 22 | + "\n" 23 | + "CREATE TABLE source_table (\n" 24 | + " user_id BIGINT,\n" 25 | + " `params` STRING\n" 26 | + ") WITH (\n" 27 | + " 'connector' = 'user_defined',\n" 28 | + " 'format' = 'json',\n" 29 | + " 'class.name' = 'flink.examples.sql._09.udf._02_stream_hive_udf.UserDefinedSource'\n" 30 | + ");\n" 31 | + "\n" 32 | + "CREATE TABLE sink_table (\n" 33 | + " user_id BIGINT,\n" 34 | + " `log_id` STRING\n" 35 | + ") WITH (\n" 36 | + " 'connector' = 'print'\n" 37 | + ");\n" 38 | + "\n" 39 | + "insert into sink_table\n" 40 | + "select user_id,\n" 41 | + " test_hive_udf(params) as log_id\n" 42 | + "from source_table\n"; 43 | 44 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("pipeline.name", "Hive UDF 测试案例"); 45 | 46 | for (String innerSql : sql.split(";")) { 47 | 48 | flinkEnv.streamTEnv().executeSql(innerSql); 49 | } 50 | 51 | } 52 | 53 | } 54 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_09/udf/_02_stream_hive_udf/HiveUDF_hive_module_registry_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._09.udf._02_stream_hive_udf; 2 | 3 | import flink.examples.FlinkEnvUtils; 4 | import flink.examples.FlinkEnvUtils.FlinkEnv; 5 | 6 | public class HiveUDF_hive_module_registry_Test { 7 | 8 | public static void main(String[] args) throws Exception { 9 | 10 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 11 | 12 | String sql = "CREATE TABLE source_table (\n" 13 | + " user_id BIGINT,\n" 14 | + " `params` STRING\n" 15 | + ") WITH (\n" 16 | + " 'connector' = 'user_defined',\n" 17 | + " 'format' = 'json',\n" 18 | + " 'class.name' = 'flink.examples.sql._09.udf._02_stream_hive_udf.UserDefinedSource'\n" 19 | + ");\n" 20 | + "\n" 21 | + "CREATE TABLE sink_table (\n" 22 | + " user_id BIGINT,\n" 23 | + " `log_id` STRING\n" 24 | + ") WITH (\n" 25 | + " 'connector' = 'print'\n" 26 | + ");\n" 27 | + "\n" 28 | + "insert into sink_table\n" 29 | + "select user_id,\n" 30 | + " test_hive_udf(params) as log_id\n" 31 | + "from source_table\n"; 32 | 33 | flinkEnv.hiveModuleV2() 34 | .registryHiveUDF( 35 | "test_hive_udf" 36 | , TestGenericUDF.class.getName()); 37 | 38 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("pipeline.name", "Hive UDF 测试案例"); 39 | 40 | for (String innerSql : sql.split(";")) { 41 | 42 | flinkEnv.streamTEnv().executeSql(innerSql); 43 | } 44 | 45 | } 46 | 47 | } 48 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_09/udf/_02_stream_hive_udf/HiveUDF_load_first_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._09.udf._02_stream_hive_udf; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | 9 | /** 10 | * hadoop 启动:/usr/local/Cellar/hadoop/3.2.1/sbin/start-all.sh 11 | * http://localhost:9870/ 12 | * http://localhost:8088/cluster 13 | * 14 | * hive 启动:$HIVE_HOME/bin/hive --service metastore & 15 | * hive cli:$HIVE_HOME/bin/hive 16 | */ 17 | public class HiveUDF_load_first_Test { 18 | 19 | public static void main(String[] args) throws Exception { 20 | 21 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 22 | 23 | String sql = "CREATE TABLE source_table (\n" 24 | + " user_id BIGINT,\n" 25 | + " `params` STRING\n" 26 | + ") WITH (\n" 27 | + " 'connector' = 'user_defined',\n" 28 | + " 'format' = 'json',\n" 29 | + " 'class.name' = 'flink.examples.sql._09.udf._02_stream_hive_udf.UserDefinedSource'\n" 30 | + ");\n" 31 | + "\n" 32 | + "CREATE TABLE sink_table (\n" 33 | + " user_id BIGINT,\n" 34 | + " `log_id` STRING\n" 35 | + ") WITH (\n" 36 | + " 'connector' = 'print'\n" 37 | + ");\n" 38 | + "\n" 39 | + "insert into sink_table\n" 40 | + "select user_id,\n" 41 | + " get_json_object(params, '$.log_id') as log_id\n" 42 | + "from source_table\n"; 43 | 44 | Arrays.stream(flinkEnv.streamTEnv().listModules()).forEach(System.out::println); 45 | 46 | Arrays.stream(flinkEnv.streamTEnv().listFunctions()).forEach(System.out::println); 47 | 48 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("pipeline.name", "Hive UDF 测试案例"); 49 | 50 | for (String innerSql : sql.split(";")) { 51 | 52 | flinkEnv.streamTEnv().executeSql(innerSql); 53 | } 54 | 55 | } 56 | 57 | } 58 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_09/udf/_02_stream_hive_udf/HiveUDF_load_second_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._09.udf._02_stream_hive_udf; 2 | 3 | import flink.examples.FlinkEnvUtils; 4 | import flink.examples.FlinkEnvUtils.FlinkEnv; 5 | 6 | 7 | /** 8 | * hadoop 启动:/usr/local/Cellar/hadoop/3.2.1/sbin/start-all.sh 9 | * http://localhost:9870/ 10 | * http://localhost:8088/cluster 11 | * 12 | * hive 启动:$HIVE_HOME/bin/hive --service metastore & 13 | * hive cli:$HIVE_HOME/bin/hive 14 | */ 15 | public class HiveUDF_load_second_Test { 16 | 17 | public static void main(String[] args) throws Exception { 18 | 19 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(new String[] {"--enable.hive.module.load-first", "false"}); 20 | 21 | String sql = "CREATE TABLE source_table (\n" 22 | + " user_id BIGINT,\n" 23 | + " `params` STRING\n" 24 | + ") WITH (\n" 25 | + " 'connector' = 'user_defined',\n" 26 | + " 'format' = 'json',\n" 27 | + " 'class.name' = 'flink.examples.sql._09.udf._02_stream_hive_udf.UserDefinedSource'\n" 28 | + ");\n" 29 | + "\n" 30 | + "CREATE TABLE sink_table (\n" 31 | + " user_id BIGINT,\n" 32 | + " `log_id` STRING\n" 33 | + ") WITH (\n" 34 | + " 'connector' = 'print'\n" 35 | + ");\n" 36 | + "\n" 37 | + "insert into sink_table\n" 38 | + "select user_id,\n" 39 | + " get_json_object(params, '$.log_id') as log_id\n" 40 | + "from source_table\n"; 41 | 42 | flinkEnv.streamTEnv().getConfig().getConfiguration().setString("pipeline.name", "Hive UDF 测试案例"); 43 | 44 | for (String innerSql : sql.split(";")) { 45 | 46 | flinkEnv.streamTEnv().executeSql(innerSql); 47 | } 48 | 49 | } 50 | 51 | } 52 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_09/udf/_02_stream_hive_udf/TestGenericUDF.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._09.udf._02_stream_hive_udf; 2 | 3 | import org.apache.hadoop.hive.ql.exec.UDFArgumentException; 4 | import org.apache.hadoop.hive.ql.metadata.HiveException; 5 | import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; 6 | import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; 7 | import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; 8 | import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; 9 | import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; 10 | import org.apache.hadoop.io.Text; 11 | 12 | public class TestGenericUDF extends GenericUDF { 13 | 14 | private transient StringObjectInspector soi = null; 15 | 16 | private transient StringObjectInspector soi1 = null; 17 | 18 | @Override 19 | public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { 20 | PrimitiveObjectInspector primitiveObjectInspector = (PrimitiveObjectInspector) arguments[0]; 21 | soi = (StringObjectInspector) primitiveObjectInspector; 22 | return PrimitiveObjectInspectorFactory 23 | .getPrimitiveWritableObjectInspector(PrimitiveObjectInspector.PrimitiveCategory.STRING); 24 | } 25 | 26 | @Override 27 | public Object evaluate(DeferredObject[] arguments) throws HiveException { 28 | return new Text("UNKNOWN"); 29 | } 30 | 31 | @Override 32 | public String getDisplayString(String[] children) { 33 | return "test"; 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_09/udf/_02_stream_hive_udf/UserDefinedSource.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._09.udf._02_stream_hive_udf; 2 | 3 | import org.apache.flink.api.common.serialization.DeserializationSchema; 4 | import org.apache.flink.streaming.api.functions.source.RichSourceFunction; 5 | import org.apache.flink.table.data.RowData; 6 | 7 | import com.google.common.collect.ImmutableMap; 8 | 9 | import flink.examples.JacksonUtils; 10 | 11 | public class UserDefinedSource extends RichSourceFunction { 12 | 13 | private DeserializationSchema dser; 14 | 15 | private volatile boolean isCancel; 16 | 17 | public UserDefinedSource(DeserializationSchema dser) { 18 | this.dser = dser; 19 | } 20 | 21 | @Override 22 | public void run(SourceContext ctx) throws Exception { 23 | 24 | int i = 0; 25 | 26 | while (!this.isCancel) { 27 | ctx.collect(this.dser.deserialize( 28 | JacksonUtils.bean2Json(ImmutableMap.of("user_id", 1111L, "params", "{\"log_id\":\"" + i + "\"}")).getBytes() 29 | )); 30 | Thread.sleep(1000); 31 | 32 | i++; 33 | } 34 | } 35 | 36 | @Override 37 | public void cancel() { 38 | this.isCancel = true; 39 | } 40 | } -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_09/udf/_05_scalar_function/ExplodeUDTF.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._09.udf._05_scalar_function; 2 | 3 | import java.util.Set; 4 | 5 | import org.apache.flink.table.annotation.DataTypeHint; 6 | import org.apache.flink.table.functions.TableFunction; 7 | 8 | 9 | public class ExplodeUDTF extends TableFunction { 10 | 11 | public void eval(@DataTypeHint("RAW") Object test) { 12 | 13 | Set test1 = (Set) test; 14 | 15 | for (String t : test1) { 16 | collect(t); 17 | } 18 | } 19 | 20 | } 21 | 22 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_09/udf/_05_scalar_function/ExplodeUDTFV2.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._09.udf._05_scalar_function; 2 | 3 | import org.apache.flink.table.functions.TableFunction; 4 | 5 | 6 | public class ExplodeUDTFV2 extends TableFunction { 7 | 8 | public void eval(String worlds) { 9 | 10 | collect(new String[]{ worlds, worlds + "111"}); 11 | collect(new String[]{ worlds, worlds + "222"}); 12 | } 13 | 14 | } 15 | 16 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_09/udf/_05_scalar_function/GetMapValue.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._09.udf._05_scalar_function; 2 | 3 | import java.util.Map; 4 | 5 | import org.apache.flink.table.annotation.DataTypeHint; 6 | import org.apache.flink.table.functions.ScalarFunction; 7 | 8 | public class GetMapValue extends ScalarFunction { 9 | 10 | public String eval(@DataTypeHint("RAW") Object map, String key) { 11 | 12 | Map innerMap = (Map) map; 13 | try { 14 | Object obj = innerMap.get(key); 15 | if (obj != null) { 16 | return obj.toString(); 17 | } else { 18 | return null; 19 | } 20 | } catch (Exception e) { 21 | return null; 22 | } 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_09/udf/_05_scalar_function/GetSetValue.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._09.udf._05_scalar_function; 2 | 3 | import java.util.Set; 4 | 5 | import org.apache.flink.table.annotation.DataTypeHint; 6 | import org.apache.flink.table.functions.ScalarFunction; 7 | 8 | public class GetSetValue extends ScalarFunction { 9 | 10 | public String eval(@DataTypeHint("RAW") Object set) { 11 | 12 | Set s = (Set) set; 13 | 14 | return s.iterator().next(); 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_09/udf/_05_scalar_function/ScalarFunctionTest.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._09.udf._05_scalar_function; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | 9 | public class ScalarFunctionTest { 10 | 11 | 12 | public static void main(String[] args) throws Exception { 13 | 14 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 15 | 16 | flinkEnv.streamTEnv().createFunction("set_string", SetStringUDF.class); 17 | flinkEnv.streamTEnv().createFunction("explode_udtf", ExplodeUDTF.class); 18 | flinkEnv.streamTEnv().createFunction("get_map_value", GetMapValue.class); 19 | 20 | String sql = "CREATE TABLE Orders (\n" 21 | + " order_id BIGINT NOT NULL,\n" 22 | + " name STRING,\n" 23 | + " row_time AS cast(CURRENT_TIMESTAMP as timestamp(3)),\n" 24 | + " WATERMARK FOR row_time AS row_time - INTERVAL '5' SECOND\n" 25 | + ") WITH (\n" 26 | + " 'connector' = 'datagen',\n" 27 | + " 'rows-per-second' = '10',\n" 28 | + " 'fields.name.length' = '1',\n" 29 | + " 'fields.order_id.min' = '1',\n" 30 | + " 'fields.order_id.max' = '10'\n" 31 | + ");\n" 32 | + "\n" 33 | + "CREATE TABLE target_table (\n" 34 | + " order_id BIGINT NOT NULL,\n" 35 | + " name STRING,\n" 36 | + " row_time timestamp(3),\n" 37 | + " name_explode STRING,\n" 38 | + " i STRING\n" 39 | + ") WITH (\n" 40 | + " 'connector' = 'print'\n" 41 | + ");\n" 42 | + "\n" 43 | + "INSERT INTO target_table\n" 44 | + "SELECT *, cast(get_map_value(name_explode, cast('a' as string)) as string) as i\n" 45 | + "FROM Orders\n" 46 | + "LEFT JOIN lateral TABLE(\n" 47 | + " explode_udtf(\n" 48 | + " set_string(name)\n" 49 | + " )\n" 50 | + " ) AS t(name_explode) ON TRUE"; 51 | 52 | Arrays.stream(sql.split(";")) 53 | .forEach(flinkEnv.streamTEnv()::executeSql); 54 | } 55 | 56 | } 57 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_09/udf/_05_scalar_function/ScalarFunctionTest2.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._09.udf._05_scalar_function; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | 9 | public class ScalarFunctionTest2 { 10 | 11 | 12 | public static void main(String[] args) throws Exception { 13 | 14 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 15 | 16 | flinkEnv.streamTEnv().createFunction("set_string", SetStringUDF.class); 17 | flinkEnv.streamTEnv().createFunction("explode_udtf", ExplodeUDTF.class); 18 | flinkEnv.streamTEnv().createFunction("get_map_value", GetMapValue.class); 19 | flinkEnv.streamTEnv().createFunction("get_set_value", GetSetValue.class); 20 | 21 | String sql = "CREATE TABLE Orders (\n" 22 | + " order_id BIGINT NOT NULL,\n" 23 | + " name STRING,\n" 24 | + " row_time AS cast(CURRENT_TIMESTAMP as timestamp(3)),\n" 25 | + " WATERMARK FOR row_time AS row_time - INTERVAL '5' SECOND\n" 26 | + ") WITH (\n" 27 | + " 'connector' = 'datagen',\n" 28 | + " 'rows-per-second' = '10',\n" 29 | + " 'fields.name.length' = '1',\n" 30 | + " 'fields.order_id.min' = '1',\n" 31 | + " 'fields.order_id.max' = '10'\n" 32 | + ");\n" 33 | + "\n" 34 | + "CREATE TABLE target_table (\n" 35 | + " order_id BIGINT NOT NULL,\n" 36 | + " name STRING,\n" 37 | + " row_time timestamp(3),\n" 38 | + " i STRING\n" 39 | + ") WITH (\n" 40 | + " 'connector' = 'print'\n" 41 | + ");\n" 42 | + "\n" 43 | + "INSERT INTO target_table\n" 44 | + "SELECT *, cast(get_set_value(set_string(name)) as string) as i\n" 45 | + "FROM Orders\n"; 46 | 47 | Arrays.stream(sql.split(";")) 48 | .forEach(flinkEnv.streamTEnv()::executeSql); 49 | } 50 | 51 | } 52 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_09/udf/_05_scalar_function/SetStringUDF.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._09.udf._05_scalar_function; 2 | 3 | import java.util.Set; 4 | 5 | import org.apache.flink.api.common.typeinfo.TypeHint; 6 | import org.apache.flink.api.common.typeinfo.TypeInformation; 7 | import org.apache.flink.table.annotation.DataTypeHint; 8 | import org.apache.flink.table.functions.ScalarFunction; 9 | 10 | import com.google.common.collect.Sets; 11 | 12 | 13 | public class SetStringUDF extends ScalarFunction { 14 | 15 | @DataTypeHint("RAW") 16 | public Object eval(String input) { 17 | return Sets.newHashSet(input, input + "_1", input + "_2"); 18 | } 19 | 20 | @Override 21 | public TypeInformation getResultType(Class[] signature) { 22 | return TypeInformation.of(new TypeHint>() { 23 | }); 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_09/udf/_05_scalar_function/TableFunctionTest2.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._09.udf._05_scalar_function; 2 | 3 | import java.util.Arrays; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | 9 | public class TableFunctionTest2 { 10 | 11 | 12 | public static void main(String[] args) throws Exception { 13 | 14 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 15 | 16 | flinkEnv.streamTEnv().createFunction("explode_udtf_v2", ExplodeUDTFV2.class); 17 | 18 | String sql = "CREATE TABLE Orders (\n" 19 | + " order_id BIGINT NOT NULL,\n" 20 | + " name STRING,\n" 21 | + " row_time AS cast(CURRENT_TIMESTAMP as timestamp(3)),\n" 22 | + " WATERMARK FOR row_time AS row_time - INTERVAL '5' SECOND\n" 23 | + ") WITH (\n" 24 | + " 'connector' = 'datagen',\n" 25 | + " 'rows-per-second' = '10',\n" 26 | + " 'fields.name.length' = '1',\n" 27 | + " 'fields.order_id.min' = '1',\n" 28 | + " 'fields.order_id.max' = '10'\n" 29 | + ");\n" 30 | + "\n" 31 | + "CREATE TABLE target_table (\n" 32 | + " order_id BIGINT NOT NULL,\n" 33 | + " name STRING,\n" 34 | + " row_time timestamp(3),\n" 35 | + " i STRING\n" 36 | + ") WITH (\n" 37 | + " 'connector' = 'print'\n" 38 | + ");\n" 39 | + "\n" 40 | + "INSERT INTO target_table\n" 41 | + "SELECT order_id, name, row_time, name_explode[2] as i\n" 42 | + "FROM Orders \n" 43 | + "LEFT JOIN lateral TABLE(explode_udtf_v2(name)) AS t(name_explode) ON TRUE\n"; 44 | 45 | Arrays.stream(sql.split(";")) 46 | .forEach(flinkEnv.streamTEnv()::executeSql); 47 | } 48 | 49 | } 50 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_12_data_type/_02_user_defined/User.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._12_data_type._02_user_defined; 2 | 3 | import java.math.BigDecimal; 4 | 5 | import org.apache.flink.table.annotation.DataTypeHint; 6 | 7 | public class User { 8 | 9 | public int age; 10 | public String name; 11 | 12 | public @DataTypeHint("DECIMAL(10, 2)") BigDecimal totalBalance; 13 | } 14 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_12_data_type/_02_user_defined/UserDefinedDataTypes_Test.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._12_data_type._02_user_defined; 2 | 3 | import org.apache.flink.table.api.TableResult; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | 9 | public class UserDefinedDataTypes_Test { 10 | 11 | public static void main(String[] args) throws Exception { 12 | 13 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 14 | 15 | flinkEnv.env().setParallelism(1); 16 | 17 | String sql = "CREATE FUNCTION user_scalar_func AS 'flink.examples.sql._12_data_type._02_user_defined.UserScalarFunction';" 18 | + "\n" 19 | + "CREATE TABLE source_table (\n" 20 | + " user_id BIGINT NOT NULL COMMENT '用户 id'\n" 21 | + ") WITH (\n" 22 | + " 'connector' = 'datagen',\n" 23 | + " 'rows-per-second' = '1',\n" 24 | + " 'fields.user_id.min' = '1',\n" 25 | + " 'fields.user_id.max' = '10'\n" 26 | + ");\n" 27 | + "\n" 28 | + "CREATE TABLE sink_table (\n" 29 | + " result_row ROW\n" 30 | + ") WITH (\n" 31 | + " 'connector' = 'print'\n" 32 | + ");" 33 | + "\n" 34 | + "INSERT INTO sink_table\n" 35 | + "select user_scalar_func(user_id) as result_row\n" 36 | + "from source_table"; 37 | ; 38 | 39 | for (String innerSql : sql.split(";")) { 40 | TableResult tableResult = flinkEnv.streamTEnv().executeSql(innerSql); 41 | 42 | tableResult.print(); 43 | } 44 | } 45 | 46 | } 47 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_12_data_type/_02_user_defined/UserDefinedDataTypes_Test2.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._12_data_type._02_user_defined; 2 | 3 | import org.apache.flink.table.api.TableResult; 4 | 5 | import flink.examples.FlinkEnvUtils; 6 | import flink.examples.FlinkEnvUtils.FlinkEnv; 7 | 8 | 9 | public class UserDefinedDataTypes_Test2 { 10 | 11 | public static void main(String[] args) throws Exception { 12 | 13 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 14 | 15 | flinkEnv.env().setParallelism(1); 16 | 17 | String sql = "CREATE FUNCTION user_scalar_func AS 'flink.examples.sql._12_data_type._02_user_defined.UserScalarFunction';" 18 | + "\n" 19 | + "CREATE TABLE source_table (\n" 20 | + " user_id BIGINT NOT NULL COMMENT '用户 id'\n" 21 | + ") WITH (\n" 22 | + " 'connector' = 'datagen',\n" 23 | + " 'rows-per-second' = '1',\n" 24 | + " 'fields.user_id.min' = '1',\n" 25 | + " 'fields.user_id.max' = '10'\n" 26 | + ");\n" 27 | + "\n" 28 | + "CREATE TABLE sink_table (\n" 29 | + " result_row_1 ROW,\n" 30 | + " result_row_2 STRING\n" 31 | + ") WITH (\n" 32 | + " 'connector' = 'print'\n" 33 | + ");" 34 | + "\n" 35 | + "INSERT INTO sink_table\n" 36 | + "select\n" 37 | + " user_scalar_func(user_id) as result_row_1,\n" 38 | + " user_scalar_func(user_scalar_func(user_id)) as result_row_2\n" 39 | + "from source_table"; 40 | ; 41 | 42 | for (String innerSql : sql.split(";")) { 43 | TableResult tableResult = flinkEnv.streamTEnv().executeSql(innerSql); 44 | 45 | tableResult.print(); 46 | } 47 | } 48 | 49 | } 50 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_12_data_type/_02_user_defined/UserScalarFunction.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._12_data_type._02_user_defined; 2 | 3 | import java.math.BigDecimal; 4 | 5 | import org.apache.flink.table.functions.ScalarFunction; 6 | 7 | public class UserScalarFunction extends ScalarFunction { 8 | 9 | // 1. 自定义数据类型作为输出参数 10 | public User eval(long i) { 11 | if (i > 0 && i <= 5) { 12 | User u = new User(); 13 | u.age = (int) i; 14 | u.name = "name1"; 15 | u.totalBalance = new BigDecimal(1.1d); 16 | return u; 17 | } else { 18 | User u = new User(); 19 | u.age = (int) i; 20 | u.name = "name2"; 21 | u.totalBalance = new BigDecimal(2.2d); 22 | return u; 23 | } 24 | } 25 | 26 | // 2. 自定义数据类型作为输入参数 27 | public String eval(User i) { 28 | if (i.age > 0 && i.age <= 5) { 29 | User u = new User(); 30 | u.age = 1; 31 | u.name = "name1"; 32 | u.totalBalance = new BigDecimal(1.1d); 33 | return u.name; 34 | } else { 35 | User u = new User(); 36 | u.age = 2; 37 | u.name = "name2"; 38 | u.totalBalance = new BigDecimal(2.2d); 39 | return u.name; 40 | } 41 | } 42 | 43 | } 44 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_12_data_type/_03_raw/RawScalarFunction.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._12_data_type._03_raw; 2 | 3 | import java.math.BigDecimal; 4 | 5 | import org.apache.flink.api.common.typeutils.base.StringSerializer; 6 | import org.apache.flink.table.annotation.DataTypeHint; 7 | import org.apache.flink.table.functions.ScalarFunction; 8 | 9 | import flink.examples.sql._12_data_type._02_user_defined.User; 10 | 11 | public class RawScalarFunction extends ScalarFunction { 12 | 13 | // 1. 自定义数据类型作为输出参数 14 | public User eval(long i) { 15 | if (i > 0 && i <= 5) { 16 | User u = new User(); 17 | u.age = (int) i; 18 | u.name = "name1"; 19 | u.totalBalance = new BigDecimal(1.1d); 20 | return u; 21 | } else { 22 | User u = new User(); 23 | u.age = (int) i; 24 | u.name = "name2"; 25 | u.totalBalance = new BigDecimal(2.2d); 26 | return u; 27 | } 28 | } 29 | 30 | // 2. 自定义数据类型作为输入参数、自定义输出类型为 Raw 类型 31 | @DataTypeHint(value = "RAW", bridgedTo = String.class, rawSerializer = StringSerializer.class) 32 | public String eval(User i) { 33 | if (i.age > 0 && i.age <= 5) { 34 | User u = new User(); 35 | u.age = 1; 36 | u.name = "name1"; 37 | u.totalBalance = new BigDecimal(1.1d); 38 | return u.name; 39 | } else { 40 | User u = new User(); 41 | u.age = 2; 42 | u.name = "name2"; 43 | u.totalBalance = new BigDecimal(2.2d); 44 | return u.name; 45 | } 46 | } 47 | 48 | } 49 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/java/flink/examples/sql/_12_data_type/_03_raw/Raw_DataTypes_Test2.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._12_data_type._03_raw; 2 | 3 | import org.apache.flink.api.common.typeutils.base.StringSerializer; 4 | import org.apache.flink.table.api.TableResult; 5 | import org.apache.flink.table.types.logical.RawType; 6 | 7 | import flink.examples.FlinkEnvUtils; 8 | import flink.examples.FlinkEnvUtils.FlinkEnv; 9 | 10 | 11 | public class Raw_DataTypes_Test2 { 12 | 13 | public static void main(String[] args) throws Exception { 14 | 15 | FlinkEnv flinkEnv = FlinkEnvUtils.getStreamTableEnv(args); 16 | 17 | RawType rawType = new RawType(String.class, StringSerializer.INSTANCE); 18 | 19 | String base64String = rawType.getSerializerString(); 20 | 21 | flinkEnv.env().setParallelism(1); 22 | 23 | String sql = String.format("CREATE FUNCTION raw_scalar_func AS 'flink.examples.sql._12_data_type._03_raw.RawScalarFunction';" 24 | + "\n" 25 | + "CREATE TABLE source_table (\n" 26 | + " user_id BIGINT NOT NULL COMMENT '用户 id'\n" 27 | + ") WITH (\n" 28 | + " 'connector' = 'datagen',\n" 29 | + " 'rows-per-second' = '1',\n" 30 | + " 'fields.user_id.min' = '1',\n" 31 | + " 'fields.user_id.max' = '10'\n" 32 | + ");\n" 33 | + "\n" 34 | + "CREATE TABLE sink_table (\n" 35 | + " result_row_1 RAW('java.lang.String', '%s')\n" 36 | + ") WITH (\n" 37 | + " 'connector' = 'print'\n" 38 | + ");" 39 | + "\n" 40 | + "INSERT INTO sink_table\n" 41 | + "select\n" 42 | + " raw_scalar_func(raw_scalar_func(user_id)) as result_row_1\n" 43 | + "from source_table", base64String); 44 | ; 45 | 46 | for (String innerSql : sql.split(";")) { 47 | TableResult tableResult = flinkEnv.streamTEnv().executeSql(innerSql); 48 | 49 | tableResult.print(); 50 | } 51 | } 52 | 53 | } 54 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/proto/source.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package flink; 4 | 5 | option java_package = "flink.examples.datastream._04.keyed_co_process.protobuf"; 6 | option java_outer_classname = "SourceOuterClassname"; 7 | option java_multiple_files = true; 8 | 9 | message Source { 10 | string name = 1; 11 | repeated string names = 2; 12 | 13 | map si_map = 7; 14 | } -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/proto/test.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package flink; 4 | 5 | option java_package = "flink.examples.sql._05.format.formats.protobuf"; 6 | option java_outer_classname = "TestOuterClassname"; 7 | option java_multiple_files = true; 8 | 9 | message Test { 10 | string name = 1; 11 | repeated string names = 2; 12 | 13 | map si_map = 7; 14 | } -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory: -------------------------------------------------------------------------------- 1 | # Licensed to the Apache Software Foundation (ASF) under one or more 2 | # contributor license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright ownership. 4 | # The ASF licenses this file to You under the Apache License, Version 2.0 5 | # (the "License"); you may not use this file except in compliance with 6 | # the License. You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | flink.examples.sql._05.format.formats.csv.ChangelogCsvFormatFactory 17 | flink.examples.sql._03.source_sink.table.socket.SocketDynamicTableFactory 18 | flink.examples.sql._03.source_sink.table.redis.v2.RedisDynamicTableFactory 19 | flink.examples.sql._03.source_sink.table.user_defined.UserDefinedDynamicTableFactory 20 | flink.examples.sql._03.source_sink.abilities.source.Abilities_TableSourceFactory 21 | flink.examples.sql._03.source_sink.abilities.source.before.Before_Abilities_TableSourceFactory 22 | flink.examples.sql._03.source_sink.abilities.sink.Abilities_TableSinkFactory 23 | flink.examples.sql._05.format.formats.protobuf.rowdata.ProtobufFormatFactory -------------------------------------------------------------------------------- /flink-examples-1.13/src/main/scala/flink/examples/sql/_04/type/TableFunc0.scala: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._04.`type` 2 | 3 | import org.apache.flink.table.functions.TableFunction 4 | 5 | 6 | case class SimpleUser(name: String, age: Int) 7 | 8 | class TableFunc0 extends TableFunction[SimpleUser] { 9 | 10 | // make sure input element's format is "" 11 | 12 | def eval(user: String): Unit = { 13 | 14 | if (user.contains("#")) { 15 | 16 | val splits = user.split("#") 17 | 18 | collect(SimpleUser(splits(0), splits(1).toInt)) 19 | 20 | } 21 | } 22 | 23 | } -------------------------------------------------------------------------------- /flink-examples-1.13/src/test/java/flink/examples/sql/_06/calcite/CalciteTest.java: -------------------------------------------------------------------------------- 1 | package flink.examples.sql._06.calcite; 2 | 3 | import java.util.List; 4 | 5 | import org.apache.calcite.plan.RelOptUtil; 6 | import org.apache.calcite.plan.RelTraitDef; 7 | import org.apache.calcite.rel.RelNode; 8 | import org.apache.calcite.schema.SchemaPlus; 9 | import org.apache.calcite.sql.parser.SqlParser; 10 | import org.apache.calcite.tools.FrameworkConfig; 11 | import org.apache.calcite.tools.Frameworks; 12 | import org.apache.calcite.tools.Programs; 13 | import org.apache.calcite.tools.RelBuilder; 14 | 15 | public class CalciteTest { 16 | 17 | public static void main(String[] args) { 18 | final FrameworkConfig config = config().build(); 19 | final RelBuilder builder = RelBuilder.create(config); 20 | final RelNode node = builder 21 | .scan("EMP") 22 | .build(); 23 | System.out.println(RelOptUtil.toString(node)); 24 | } 25 | 26 | public static Frameworks.ConfigBuilder config() { 27 | final SchemaPlus rootSchema = Frameworks.createRootSchema(true); 28 | return Frameworks.newConfigBuilder() 29 | .parserConfig(SqlParser.Config.DEFAULT) 30 | .traitDefs((List) null) 31 | .programs(Programs.heuristicJoinOrder(Programs.RULE_SET, true, 2)); 32 | } 33 | 34 | } 35 | -------------------------------------------------------------------------------- /flink-examples-1.13/src/test/proto/person.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package flink; 4 | 5 | option java_package = "flink.examples.sql._05.format.formats.protobuf"; 6 | option java_outer_classname = "PersonOuterClassname"; 7 | option java_multiple_files = true; 8 | 9 | message Person { 10 | string name = 1; 11 | repeated string names = 2; 12 | 13 | int32 id = 3; 14 | repeated int32 ids = 4; 15 | 16 | int64 long = 5; 17 | repeated int64 longs = 6; 18 | 19 | map si_map = 7; 20 | map sl_map = 8; 21 | map sd_map = 9; 22 | 23 | Dog dog = 10; 24 | repeated Dog dogs = 11; 25 | 26 | enum ContactType { 27 | MOBILE = 0; 28 | MESSAGE = 1; 29 | WECHAT = 2; 30 | EMAIL = 3; 31 | } 32 | 33 | message Contact { 34 | string number = 1; 35 | ContactType contact_type = 2; 36 | } 37 | 38 | repeated Contact contacts = 12; 39 | } 40 | 41 | message Dog { 42 | string name = 1; 43 | int32 id = 2; 44 | } -------------------------------------------------------------------------------- /flink-examples-1.13/src/test/scala/ScalaEnv.scala: -------------------------------------------------------------------------------- 1 | import org.apache.flink.api.java.tuple.Tuple3 2 | import org.apache.flink.api.scala._ 3 | import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment 4 | import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment 5 | import org.apache.flink.table.api.{DataTypes, Schema} 6 | import org.apache.flink.types.Row 7 | 8 | // https://ci.apache.org/projects/flink/flink-docs-release-1.8/dev/table/udfs.html 9 | 10 | /** 11 | * https://blog.csdn.net/fct2001140269/article/details/84066274 12 | * 13 | * https://ci.apache.org/projects/flink/flink-docs-release-1.8/dev/table/ 14 | * 15 | * https://blog.csdn.net/qq_35338741/article/details/108645832 16 | */ 17 | 18 | object ScalaEnv { 19 | 20 | def main(args: Array[String]): Unit = { 21 | val env = StreamExecutionEnvironment.getExecutionEnvironment 22 | 23 | // create a TableEnvironment 24 | val tableEnv = StreamTableEnvironment.create(env) 25 | 26 | val source = env.fromCollection(scala.Iterator.apply(Tuple3.of(new String("2"), 1L, 1627218000000L), Tuple3.of(new String("2"), 101L, 1627218000000L + 6000L), Tuple3.of(new String("2"), 201L, 1627218000000L + 7000L), Tuple3.of(new String("2"), 301L, 1627218000000L + 7000L))) 27 | 28 | tableEnv.createTemporaryView("source_db.source_table" 29 | , source 30 | , Schema 31 | .newBuilder() 32 | .column("f0", DataTypes.STRING()) 33 | .column("f1", DataTypes.BIGINT()) 34 | .column("f2", DataTypes.BIGINT()) 35 | .build()) 36 | 37 | tableEnv.createFunction("hashCode" 38 | , classOf[TableFunc0]) 39 | 40 | val sql = "select * from source_db.source_table as a LEFT JOIN LATERAL TABLE(table1(a.f1)) AS DIM(status_new) ON TRUE" 41 | 42 | tableEnv.toDataStream(tableEnv.sqlQuery(sql), classOf[Row]).print() 43 | 44 | // execute 45 | env.execute() 46 | } 47 | 48 | } -------------------------------------------------------------------------------- /flink-examples-1.13/src/test/scala/TableFunc0.scala: -------------------------------------------------------------------------------- 1 | import org.apache.flink.table.functions.TableFunction 2 | 3 | 4 | case class SimpleUser(name: String, age: Int) 5 | 6 | class TableFunc0 extends TableFunction[SimpleUser] { 7 | 8 | // make sure input element's format is "" 9 | 10 | def eval(user: Long): Unit = { 11 | 12 | // if (user.contains("#")) { 13 | // 14 | // val splits = user.split("#") 15 | // 16 | // collect(SimpleUser(splits(0), splits(1).toInt)) 17 | // 18 | // } 19 | } 20 | 21 | } -------------------------------------------------------------------------------- /flink-examples-1.8/.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | target/ 3 | !.mvn/wrapper/maven-wrapper.jar 4 | !**/src/main/** 5 | #**/src/test/** 6 | .idea/ 7 | *.iml 8 | *.DS_Store 9 | 10 | ### IntelliJ IDEA ### 11 | .idea 12 | *.iws 13 | *.ipr 14 | 15 | --------------------------------------------------------------------------------