tokensList = new ArrayList<>();
47 | boolean inQuotes = false;
48 | boolean inSingleQuotes = false;
49 | StringBuilder b = new StringBuilder();
50 | char[] chars = sqls.toCharArray();
51 | int idx = 0;
52 | for (char c : chars) {
53 | char flag = 0;
54 | if (idx > 0) {
55 | flag = chars[idx - 1];
56 | }
57 | if (c == delimiter) {
58 | if (inQuotes) {
59 | b.append(c);
60 | } else if (inSingleQuotes) {
61 | b.append(c);
62 | } else {
63 | if (StringUtils.isNotBlank(b)) {
64 | tokensList.add(b.toString());
65 | b = new StringBuilder();
66 | }
67 | }
68 | } else if (c == '\"' && '\\' != flag) {
69 | inQuotes = !inQuotes;
70 | b.append(c);
71 | } else if (c == '\'' && '\\' != flag && !inQuotes) {
72 | inSingleQuotes = !inSingleQuotes;
73 | b.append(c);
74 | } else {
75 | b.append(c);
76 | }
77 | idx++;
78 | }
79 |
80 | if (StringUtils.isNotBlank(b)) {
81 | tokensList.add(b.toString());
82 | }
83 |
84 | return tokensList;
85 | }
86 |
87 |
88 | /**
89 | * 使用zip进行压缩
90 | * @param str 压缩前的文本
91 | * @return 返回压缩后的文本
92 | */
93 | public static final String zip(String str) {
94 | if (str == null) {
95 | return null;
96 | }
97 | byte[] compressed;
98 | ByteArrayOutputStream out = null;
99 | ZipOutputStream zout = null;
100 | String compressedStr = null;
101 | try {
102 | out = new ByteArrayOutputStream();
103 | zout = new ZipOutputStream(out);
104 | zout.putNextEntry(new ZipEntry("0"));
105 | zout.write(str.getBytes());
106 | zout.closeEntry();
107 | compressed = out.toByteArray();
108 | compressedStr = new sun.misc.BASE64Encoder().encodeBuffer(compressed);
109 | } catch (IOException e) {
110 | compressed = null;
111 | compressedStr = str;
112 | } finally {
113 | if (zout != null) {
114 | try {
115 | zout.close();
116 | } catch (IOException e) {
117 | }
118 | }
119 | if (out != null) {
120 | try {
121 | out.close();
122 | } catch (IOException e) {
123 | }
124 | }
125 | }
126 | return compressedStr;
127 | }
128 | }
129 |
--------------------------------------------------------------------------------
/flink-yarn-submiter-service/src/main/java/cn/todd/flink/classloader/ChildFirstClassLoader.java:
--------------------------------------------------------------------------------
1 | package cn.todd.flink.classloader;
2 |
3 | import java.io.IOException;
4 | import java.net.URL;
5 | import java.util.ArrayList;
6 | import java.util.Enumeration;
7 | import java.util.Iterator;
8 | import java.util.List;
9 | import java.util.function.Consumer;
10 |
11 | /**
12 | * A variant of the URLClassLoader that first loads from the URLs and only after that from the
13 | * parent.
14 | *
15 | *
16 | *
17 | *
{@link #getResourceAsStream(String)} uses {@link #getResource(String)} internally so we don't
18 | * override that.
19 | */
20 | public final class ChildFirstClassLoader extends AbsSubmitterClassLoader {
21 | static {
22 | ClassLoader.registerAsParallelCapable();
23 | }
24 |
25 | /** The classes that should always go through the parent ClassLoader. */
26 | private final String[] alwaysParentFirstPatterns;
27 |
28 | public ChildFirstClassLoader(URL[] urls, ClassLoader parent) {
29 | this(urls, parent, new String[] {}, NOOP_EXCEPTION_HANDLER);
30 | }
31 |
32 | public ChildFirstClassLoader(
33 | URL[] urls, ClassLoader parent, String[] alwaysParentFirstPatterns) {
34 | this(urls, parent, alwaysParentFirstPatterns, NOOP_EXCEPTION_HANDLER);
35 | }
36 |
37 | public ChildFirstClassLoader(
38 | URL[] urls,
39 | ClassLoader parent,
40 | String[] alwaysParentFirstPatterns,
41 | Consumer classLoadingExceptionHandler) {
42 | super(urls, parent, classLoadingExceptionHandler);
43 | this.alwaysParentFirstPatterns = alwaysParentFirstPatterns;
44 | }
45 |
46 | @Override
47 | protected Class> loadClassWithoutExceptionHandling(String name, boolean resolve)
48 | throws ClassNotFoundException {
49 | // First, check if the class has already been loaded
50 | Class> c = findLoadedClass(name);
51 |
52 | if (c == null) {
53 | // check whether the class should go parent-first
54 | for (String alwaysParentFirstPattern : alwaysParentFirstPatterns) {
55 | if (name.startsWith(alwaysParentFirstPattern)) {
56 | return super.loadClassWithoutExceptionHandling(name, resolve);
57 | }
58 | }
59 |
60 | try {
61 | // check the URLs
62 | c = findClass(name);
63 | } catch (ClassNotFoundException e) {
64 | // let URLClassLoader do it, which will eventually call the parent
65 | c = super.loadClassWithoutExceptionHandling(name, resolve);
66 | }
67 | } else if (resolve) {
68 | resolveClass(c);
69 | }
70 |
71 | return c;
72 | }
73 |
74 | @Override
75 | public URL getResource(String name) {
76 | // first, try and find it via the URLClassloader
77 | URL urlClassLoaderResource = findResource(name);
78 |
79 | if (urlClassLoaderResource != null) {
80 | return urlClassLoaderResource;
81 | }
82 |
83 | // delegate to super
84 | return super.getResource(name);
85 | }
86 |
87 | @Override
88 | public Enumeration getResources(String name) throws IOException {
89 | // first get resources from URLClassloader
90 | Enumeration urlClassLoaderResources = findResources(name);
91 |
92 | final List result = new ArrayList<>();
93 |
94 | while (urlClassLoaderResources.hasMoreElements()) {
95 | result.add(urlClassLoaderResources.nextElement());
96 | }
97 |
98 | // get parent urls
99 | Enumeration parentResources = getParent().getResources(name);
100 |
101 | while (parentResources.hasMoreElements()) {
102 | result.add(parentResources.nextElement());
103 | }
104 |
105 | return new Enumeration() {
106 | Iterator iter = result.iterator();
107 |
108 | @Override
109 | public boolean hasMoreElements() {
110 | return iter.hasNext();
111 | }
112 |
113 | @Override
114 | public URL nextElement() {
115 | return iter.next();
116 | }
117 | };
118 | }
119 | }
120 |
--------------------------------------------------------------------------------
/flink-yarn-submiter/src/main/java/cn/todd/flink/utils/JobGraphBuildUtil.java:
--------------------------------------------------------------------------------
1 | package cn.todd.flink.utils;
2 |
3 | import org.apache.flink.client.program.PackagedProgram;
4 | import org.apache.flink.client.program.PackagedProgramUtils;
5 | import org.apache.flink.configuration.Configuration;
6 | import org.apache.flink.configuration.GlobalConfiguration;
7 | import org.apache.flink.runtime.jobgraph.JobGraph;
8 | import org.apache.flink.runtime.jobgraph.SavepointRestoreSettings;
9 | import org.apache.flink.util.function.FunctionUtils;
10 |
11 | import cn.todd.flink.entity.ParamsInfo;
12 | import org.apache.commons.lang.BooleanUtils;
13 | import org.apache.commons.lang.StringUtils;
14 | import org.slf4j.Logger;
15 | import org.slf4j.LoggerFactory;
16 |
17 | import java.io.File;
18 | import java.net.URL;
19 | import java.util.*;
20 | import java.util.stream.Collectors;
21 |
22 | /**
23 | * Date: 2021/3/17
24 | *
25 | * @author todd5167
26 | */
27 | public class JobGraphBuildUtil {
28 | private static final Logger LOG = LoggerFactory.getLogger(JobGraphBuildUtil.class);
29 |
30 | public static final String SAVE_POINT_PATH_KEY = "savePointPath";
31 | public static final String ALLOW_NON_RESTORED_STATE_KEY = "allowNonRestoredState";
32 | public static final String PARALLELISM = "parallelism";
33 |
34 | public static JobGraph buildJobGraph(ParamsInfo jobParamsInfo, Configuration flinkConfig)
35 | throws Exception {
36 | Properties confProperties = jobParamsInfo.getConfProperties();
37 | int parallelism =
38 | Objects.isNull(confProperties)
39 | ? 1
40 | : Integer.valueOf(confProperties.getProperty(PARALLELISM, "1"));
41 |
42 | // build program
43 | PackagedProgram.Builder builder = PackagedProgram.newBuilder();
44 | Optional.ofNullable(jobParamsInfo.getExecArgs()).ifPresent(builder::setArguments);
45 | Optional.ofNullable(jobParamsInfo.getEntryPointClassName())
46 | .ifPresent(builder::setEntryPointClassName);
47 | Optional.ofNullable(jobParamsInfo.getDependFiles())
48 | .map(JobGraphBuildUtil::getUserClassPath)
49 | .ifPresent(builder::setUserClassPaths);
50 | // deal user jar path
51 | builder.setJarFile(new File(jobParamsInfo.getRunJarPath()));
52 | // deal savepoint config
53 | Optional.ofNullable(confProperties)
54 | .ifPresent(
55 | (properties) -> {
56 | SavepointRestoreSettings savepointRestoreSettings =
57 | dealSavepointRestoreSettings(properties);
58 | builder.setSavepointRestoreSettings(savepointRestoreSettings);
59 | });
60 |
61 | PackagedProgram program = builder.build();
62 | try {
63 | JobGraph jobGraph =
64 | PackagedProgramUtils.createJobGraph(program, flinkConfig, parallelism, false);
65 | // fixme: auto upload udf
66 | Optional.ofNullable(program.getClasspaths()).ifPresent(jobGraph::addJars);
67 | return jobGraph;
68 | } finally {
69 | program.deleteExtractedLibraries();
70 | }
71 | }
72 |
73 | private static List getUserClassPath(String[] jars) {
74 | List collect =
75 | Arrays.stream(jars)
76 | .map(FunctionUtils.uncheckedFunction(URL::new))
77 | .collect(Collectors.toList());
78 |
79 | collect.stream().forEach(jar -> LOG.info("parsed user classpath from jars:{}", jar));
80 | return collect;
81 | }
82 |
83 | public static Configuration getFlinkConfiguration(String flinkConfDir) {
84 | return StringUtils.isEmpty(flinkConfDir)
85 | ? new Configuration()
86 | : GlobalConfiguration.loadConfiguration(flinkConfDir);
87 | }
88 |
89 | private static SavepointRestoreSettings dealSavepointRestoreSettings(
90 | Properties confProperties) {
91 | SavepointRestoreSettings savepointRestoreSettings = SavepointRestoreSettings.none();
92 | String savePointPath = confProperties.getProperty(SAVE_POINT_PATH_KEY);
93 | if (StringUtils.isNotBlank(savePointPath)) {
94 | String allowNonRestoredState =
95 | confProperties.getOrDefault(ALLOW_NON_RESTORED_STATE_KEY, "false").toString();
96 | savepointRestoreSettings =
97 | SavepointRestoreSettings.forPath(
98 | savePointPath, BooleanUtils.toBoolean(allowNonRestoredState));
99 | }
100 | return savepointRestoreSettings;
101 | }
102 | }
103 |
--------------------------------------------------------------------------------
/flink-yarn-submiter/src/main/java/cn/todd/flink/executor/YarnPerJobClusterExecutor.java:
--------------------------------------------------------------------------------
1 | package cn.todd.flink.executor;
2 |
3 | import org.apache.flink.client.deployment.ClusterSpecification;
4 | import org.apache.flink.client.program.ClusterClientProvider;
5 | import org.apache.flink.configuration.Configuration;
6 | import org.apache.flink.runtime.jobgraph.JobGraph;
7 | import org.apache.flink.yarn.YarnClusterDescriptor;
8 |
9 | import cn.todd.flink.entity.ParamsInfo;
10 | import cn.todd.flink.entity.ResultInfo;
11 | import cn.todd.flink.factory.YarnClusterDescriptorFactory;
12 | import cn.todd.flink.utils.JobGraphBuildUtil;
13 | import org.apache.commons.compress.utils.Lists;
14 | import org.apache.commons.lang3.StringUtils;
15 | import org.apache.commons.lang3.exception.ExceptionUtils;
16 | import org.apache.hadoop.fs.Path;
17 | import org.apache.hadoop.yarn.api.records.ApplicationId;
18 | import org.slf4j.Logger;
19 | import org.slf4j.LoggerFactory;
20 |
21 | import java.io.File;
22 | import java.net.MalformedURLException;
23 | import java.util.List;
24 |
25 | /**
26 | * Date: 2021/10/1
27 | *
28 | * @author todd5167
29 | */
30 | public class YarnPerJobClusterExecutor extends AbstractClusterExecutor {
31 | private static final Logger LOG = LoggerFactory.getLogger(YarnPerJobClusterExecutor.class);
32 |
33 | public YarnPerJobClusterExecutor(ParamsInfo jobParamsInfo) {
34 | super(jobParamsInfo);
35 | }
36 |
37 | @Override
38 | public ResultInfo submitJob() {
39 | try {
40 | // 1. parse default flink configuration from flink-conf.yaml and dynamic replacement
41 | // default config.
42 | Configuration flinkConfig = getFlinkConfigFromParamsInfo();
43 |
44 | // 2. build JobGraph from user program.
45 | JobGraph jobGraph = JobGraphBuildUtil.buildJobGraph(jobParamsInfo, flinkConfig);
46 | LOG.info("build job graph success!");
47 |
48 | // 3. build the submitted yarn environment.
49 | try (YarnClusterDescriptor clusterDescriptor =
50 | (YarnClusterDescriptor)
51 | YarnClusterDescriptorFactory.INSTANCE.createClusterDescriptor(
52 | jobParamsInfo.getHadoopConfDir(), flinkConfig)) {
53 |
54 | // 4. replace flinkJarPath and ship flink lib jars.
55 | replaceFlinkJarPathAndShipLibJars(
56 | jobParamsInfo.getFlinkJarPath(), clusterDescriptor);
57 |
58 | // 5. deploy JobGraph to yarn.
59 | ClusterSpecification clusterSpecification =
60 | YarnClusterDescriptorFactory.INSTANCE.getClusterSpecification(flinkConfig);
61 | ClusterClientProvider applicationIdClusterClientProvider =
62 | clusterDescriptor.deployJobCluster(clusterSpecification, jobGraph, true);
63 |
64 | String applicationId =
65 | applicationIdClusterClientProvider
66 | .getClusterClient()
67 | .getClusterId()
68 | .toString();
69 | String jobId = jobGraph.getJobID().toString();
70 | LOG.info("deploy per_job with appId: {}, jobId: {}", applicationId, jobId);
71 |
72 | return new ResultInfo(applicationId, jobId, "");
73 | }
74 | } catch (Exception e) {
75 | LOG.error("submit job to yarn error:{}", e);
76 | return new ResultInfo("", "", ExceptionUtils.getStackTrace(e));
77 | }
78 | }
79 |
80 | /**
81 | * 1. flink jar path use flinkJarPath/flink-dist.jar.
82 | *
83 | * 2. upload flinkJarPath jars.
84 | *
85 | * @param flinkJarPath
86 | * @param clusterDescriptor
87 | * @return
88 | * @throws MalformedURLException
89 | */
90 | private List replaceFlinkJarPathAndShipLibJars(
91 | String flinkJarPath, YarnClusterDescriptor clusterDescriptor)
92 | throws MalformedURLException {
93 | if (StringUtils.isEmpty(flinkJarPath) || !new File(flinkJarPath).exists()) {
94 | throw new RuntimeException("The param '-flinkJarPath' ref dir is not exist");
95 | }
96 | File[] jars = new File(flinkJarPath).listFiles();
97 | List shipFiles = Lists.newArrayList();
98 | for (File file : jars) {
99 | if (file.toURI().toURL().toString().contains("flink-dist")) {
100 | clusterDescriptor.setLocalJarPath(new Path(file.toURI().toURL().toString()));
101 | } else {
102 | shipFiles.add(file);
103 | }
104 | }
105 |
106 | clusterDescriptor.addShipFiles(shipFiles);
107 | return shipFiles;
108 | }
109 | }
110 |
--------------------------------------------------------------------------------
/flink-yarn-submiter-service/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | flink-spark-submiter
7 | cn.todd.submiter
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | flink-yarn-submiter-service
13 | 1.0
14 |
15 |
16 |
17 | com.google.guava
18 | guava
19 | 19.0
20 |
21 |
22 |
23 | log4j
24 | log4j
25 | 1.2.17
26 | provided
27 |
28 |
29 |
30 | org.slf4j
31 | slf4j-log4j12
32 | 1.6.1
33 | provided
34 |
35 |
36 |
37 |
38 |
39 |
40 | org.apache.maven.plugins
41 | maven-compiler-plugin
42 |
43 | 8
44 | 8
45 |
46 |
47 |
48 |
49 | org.apache.maven.plugins
50 | maven-shade-plugin
51 | 3.1.0
52 |
53 |
54 | package
55 |
56 | shade
57 |
58 |
59 | false
60 |
61 |
62 |
64 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 | *:*
74 |
75 | META-INF/*.SF
76 | META-INF/*.DSA
77 | META-INF/*.RSA
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 | com.diffplug.spotless
88 | spotless-maven-plugin
89 | 2.4.2
90 |
91 |
92 |
93 | 1.7
94 |
95 |
96 |
97 |
98 | org.apache.flink,org.apache.flink.shaded,,javax,java,scala,\#
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 | spotless-check
107 | package
108 |
109 | apply
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
--------------------------------------------------------------------------------
/flink-yarn-submiter/src/main/java/cn/todd/flink/cli/CliFrontendParser.java:
--------------------------------------------------------------------------------
1 | package cn.todd.flink.cli;
2 |
3 | import org.apache.flink.util.StringUtils;
4 | import org.apache.flink.util.function.FunctionUtils;
5 |
6 | import cn.todd.flink.entity.ParamsInfo;
7 | import cn.todd.flink.utils.JsonUtils;
8 | import org.apache.commons.cli.*;
9 |
10 | import java.lang.reflect.Field;
11 | import java.util.Arrays;
12 | import java.util.List;
13 | import java.util.Optional;
14 | import java.util.Properties;
15 | import java.util.stream.Collectors;
16 |
17 | /**
18 | * Date: 2021/10/1
19 | *
20 | * @author todd5167
21 | */
22 | public class CliFrontendParser {
23 | private DefaultParser parser = new DefaultParser();
24 | private Options options = new Options();
25 | private ParamsInfo.Builder builder = ParamsInfo.builder();
26 |
27 | public ParamsInfo parseParamsInfo(String[] args) throws ParseException {
28 | Field[] fields = SubmitterOptions.class.getDeclaredFields();
29 |
30 | Arrays.stream(fields)
31 | .forEach(
32 | FunctionUtils.uncheckedConsumer(
33 | field -> {
34 | options.addOption((Option) field.get(SubmitterOptions.class));
35 | }));
36 | CommandLine commandLine = parser.parse(options, args);
37 |
38 | Optional.ofNullable(commandLine.getOptionValue(SubmitterOptions.NAME_OPTION.getOpt()))
39 | .ifPresent(builder::setName);
40 |
41 | Optional.ofNullable(
42 | commandLine.getOptionValue(SubmitterOptions.RUN_JAR_PATH_OPTION.getOpt()))
43 | .ifPresent(builder::setRunJarPath);
44 |
45 | Optional.ofNullable(
46 | commandLine.getOptionValue(SubmitterOptions.FLINK_CONF_DIR_OPTION.getOpt()))
47 | .ifPresent(builder::setFlinkConfDir);
48 |
49 | Optional.ofNullable(
50 | commandLine.getOptionValue(SubmitterOptions.FLINK_JAR_PATH_OPTION.getOpt()))
51 | .ifPresent(builder::setFlinkJarPath);
52 |
53 | Optional.ofNullable(
54 | commandLine.getOptionValue(
55 | SubmitterOptions.HADOOP_CONF_DIR_OPTION.getOpt()))
56 | .ifPresent(builder::setHadoopConfDir);
57 |
58 | Optional.ofNullable(commandLine.getOptionValue(SubmitterOptions.QUEUE_OPTION.getOpt()))
59 | .ifPresent(builder::setQueue);
60 |
61 | Optional.ofNullable(
62 | commandLine.getOptionValue(
63 | SubmitterOptions.ENTRY_POINT_CLASSNAME_OPTION.getOpt()))
64 | .ifPresent(builder::setEntryPointClassName);
65 |
66 | Optional.ofNullable(commandLine.getOptionValue(SubmitterOptions.EXEC_ARGS_OPTION.getOpt()))
67 | .map(execArgs -> JsonUtils.parseJsonList(execArgs, String.class))
68 | .ifPresent(list -> builder.setExecArgs(list.toArray(new String[0])));
69 |
70 | Optional.ofNullable(
71 | commandLine.getOptionValue(SubmitterOptions.DEPEND_FILES_OPTION.getOpt()))
72 | .map(ds -> JsonUtils.parseJsonList(ds, String.class))
73 | .map(this::addProtocolForFile)
74 | .ifPresent(files -> builder.setDependFiles(files.toArray(new String[0])));
75 |
76 | Optional.ofNullable(commandLine.getOptionValue(SubmitterOptions.FLINK_CONF_OPTION.getOpt()))
77 | .map(ds -> JsonUtils.parseJson(ds, Properties.class))
78 | .ifPresent(builder::setConfProperties);
79 |
80 | // --------security config -------//
81 | String openSecurity =
82 | commandLine.getOptionValue(SubmitterOptions.OPEN_SECURITY_OPTION.getOpt());
83 | Optional.ofNullable(openSecurity).map(Boolean::valueOf).ifPresent(builder::setOpenSecurity);
84 |
85 | String keytabPath =
86 | commandLine.getOptionValue(SubmitterOptions.KEYTAB_PATH_OPTION.getOpt());
87 | Optional.ofNullable(keytabPath).ifPresent(builder::setKeytabPath);
88 |
89 | if (Boolean.valueOf(openSecurity) && StringUtils.isNullOrWhitespaceOnly(keytabPath)) {
90 | throw new ParseException("when open security,keytab file is required! ");
91 | }
92 |
93 | Optional.ofNullable(commandLine.getOptionValue(SubmitterOptions.KRB5_PATH_OPTION.getOpt()))
94 | .ifPresent(builder::setKrb5Path);
95 |
96 | Optional.ofNullable(commandLine.getOptionValue(SubmitterOptions.PRINCIPAL_OPTION.getOpt()))
97 | .ifPresent(builder::setPrincipal);
98 |
99 | Optional.ofNullable(
100 | commandLine.getOptionValue(SubmitterOptions.APPLICATION_ID_OPTION.getOpt()))
101 | .ifPresent(builder::setApplicationId);
102 |
103 | return builder.build();
104 | }
105 |
106 | private List addProtocolForFile(List paths) {
107 | return paths.stream().map(path -> "file://" + path).collect(Collectors.toList());
108 | }
109 | }
110 |
--------------------------------------------------------------------------------
/spark-yarn-submiter/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | flink-spark-submiter
7 | cn.todd.submiter
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | spark-yarn-submiter
13 |
14 | jar
15 |
16 | UTF-8
17 | 2.1.3
18 | 2.11.7
19 | 2.7.3
20 |
21 |
22 |
23 |
24 | cn.todd.submiter
25 | common
26 | 1.0-SNAPSHOT
27 |
28 |
29 |
30 |
31 | org.apache.spark
32 | spark-core_2.11
33 | ${spark.version}
34 |
35 |
36 | hadoop-confHdfsPath
37 | org.apache.hadoop
38 |
39 |
40 | hadoop-common
41 | org.apache.hadoop
42 |
43 |
44 | hadoop-client
45 | org.apache.hadoop
46 |
47 |
48 |
49 |
50 |
51 | org.apache.spark
52 | spark-hive_2.11
53 | ${spark.version}
54 |
55 |
56 |
57 | org.apache.spark
58 | spark-yarn_2.11
59 | ${spark.version}
60 |
61 |
62 | hadoop-yarn-common
63 | org.apache.hadoop
64 |
65 |
66 | hadoop-yarn-api
67 | org.apache.hadoop
68 |
69 |
70 |
71 | org.apache.hadoop
72 | hadooop-yarn-server-web-proxy
73 |
74 |
75 |
76 |
77 |
78 | org.apache.hive
79 | hive-jdbc
80 | 2.3.6
81 |
82 |
83 |
84 |
85 |
86 | org.apache.hadoop
87 | hadoop-hdfs
88 | ${hadoop.version}
89 |
90 |
91 |
92 | org.apache.hadoop
93 | hadoop-common
94 | ${hadoop.version}
95 |
96 |
97 |
98 | org.apache.hadoop
99 | hadoop-client
100 | ${hadoop.version}
101 |
102 |
103 |
104 | org.apache.hadoop
105 | hadoop-yarn-common
106 | ${hadoop.version}
107 |
108 |
109 |
110 | org.apache.hadoop
111 | hadoop-yarn-api
112 | ${hadoop.version}
113 |
114 |
115 |
116 | org.apache.hadoop
117 | hadoop-yarn-client
118 | ${hadoop.version}
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 | org.apache.maven.plugins
128 | maven-compiler-plugin
129 |
130 | 1.8
131 | 1.8
132 |
133 |
134 |
135 |
136 |
--------------------------------------------------------------------------------
/examples/flink-sql-executor/src/main/java/cn/todd/flink/executor/SqlExecutor.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package cn.todd.flink.executor;
20 |
21 | import cn.todd.flink.config.StreamEnvConfigManager;
22 | import cn.todd.flink.parser.ParamsInfo;
23 | import cn.todd.flink.parser.SqlCommandParser;
24 | import cn.todd.flink.parser.SqlCommandParser.SqlCommandCall;
25 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
26 | import org.apache.flink.table.api.EnvironmentSettings;
27 | import org.apache.flink.table.api.SqlParserException;
28 | import org.apache.flink.table.api.TableConfig;
29 | import org.apache.flink.table.api.TableEnvironment;
30 | import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
31 | import org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl;
32 |
33 | import java.io.IOException;
34 | import java.lang.reflect.InvocationTargetException;
35 | import java.util.List;
36 | import java.util.Properties;
37 |
38 | /**
39 | * @author todd5167
40 | */
41 | public class SqlExecutor {
42 | // --------------------------------------------------------------------------------------------
43 | private final ParamsInfo paramsInfo;
44 | private TableEnvironment tableEnvironment;
45 |
46 | public SqlExecutor(ParamsInfo paramsInfo) {
47 | this.paramsInfo = paramsInfo;
48 | }
49 |
50 | public void run() throws Exception {
51 | tableEnvironment = getStreamTableEnv(paramsInfo.getConfProp());
52 | List calls = SqlCommandParser.parseSqlText(paramsInfo.getSqlText());
53 | calls.stream().forEach(this::callCommand);
54 | }
55 |
56 |
57 | public static StreamTableEnvironment getStreamTableEnv(Properties confProperties)
58 | throws NoSuchMethodException, IOException, IllegalAccessException, InvocationTargetException {
59 | // build stream exec env
60 | StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
61 | StreamEnvConfigManager.streamExecutionEnvironmentConfig(env, confProperties);
62 |
63 | // use blink and stream mode
64 | EnvironmentSettings settings = EnvironmentSettings.newInstance()
65 | .useBlinkPlanner()
66 | .inStreamingMode()
67 | .build();
68 | StreamTableEnvironment tableEnv = StreamTableEnvironmentImpl.create(env, settings, new TableConfig());
69 | StreamEnvConfigManager.streamTableEnvironmentStateTTLConfig(tableEnv, confProperties);
70 | StreamEnvConfigManager.streamTableEnvironmentEarlyTriggerConfig(tableEnv, confProperties);
71 | return tableEnv;
72 | }
73 |
74 |
75 | // --------------------------------------------------------------------------------------------
76 | // TODO SUPPORT FUNCTION
77 | private void callCommand(SqlCommandCall cmdCall) {
78 | switch (cmdCall.command) {
79 | case SET:
80 | callSet(cmdCall);
81 | break;
82 | case CREATE_TABLE:
83 | case CREATE_VIEW:
84 | callCreateTableOrView(cmdCall);
85 | break;
86 | case INSERT_INTO:
87 | callInsertInto(cmdCall);
88 | break;
89 | default:
90 | throw new RuntimeException("Unsupported command: " + cmdCall.command);
91 | }
92 | }
93 |
94 | private void callSet(SqlCommandCall cmdCall) {
95 | String key = cmdCall.operands[0];
96 | String value = cmdCall.operands[1];
97 | tableEnvironment.getConfig().getConfiguration().setString(key, value);
98 | }
99 |
100 | private void callCreateTableOrView(SqlCommandCall cmdCall) {
101 | String ddl = cmdCall.operands[0];
102 | try {
103 | tableEnvironment.executeSql(ddl);
104 | } catch (SqlParserException e) {
105 | throw new RuntimeException("SQL parse failed:\n" + ddl + "\n", e);
106 | }
107 | }
108 |
109 | private void callInsertInto(SqlCommandCall cmdCall) {
110 | String dml = cmdCall.operands[0];
111 | try {
112 | tableEnvironment.executeSql(dml);
113 | } catch (SqlParserException e) {
114 | throw new RuntimeException("SQL parse failed:\n" + dml + "\n", e);
115 | }
116 | }
117 |
118 | }
119 |
--------------------------------------------------------------------------------
/examples/spark-sql-proxy/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | examples
7 | cn.todd.submiter
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | examples.spark-sql-proxy
13 | examples.spark-sql-proxy
14 | jar
15 |
16 |
17 | UTF-8
18 | 2.1.3
19 | spark-sql-proxy
20 |
21 |
22 |
23 |
24 | org.apache.spark
25 | spark-core_2.11
26 | ${spark.version}
27 |
28 |
29 |
30 | org.apache.spark
31 | spark-sql_2.11
32 | ${spark.version}
33 |
34 |
35 |
36 |
37 | org.apache.spark
38 | spark-hive_2.11
39 | ${spark.version}
40 |
41 |
42 |
43 | org.slf4j
44 | slf4j-api
45 | 1.7.22
46 | provided
47 |
48 |
49 |
50 | org.slf4j
51 | slf4j-log4j12
52 | 1.7.22
53 | provided
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 | org.apache.maven.plugins
62 | maven-compiler-plugin
63 |
64 | 1.8
65 | 1.8
66 | UTF-8
67 |
68 |
69 |
70 |
71 | org.apache.maven.plugins
72 | maven-jar-plugin
73 |
74 |
75 |
76 | true
77 |
78 | cn.todd.spark.SparksqlProxy
79 |
80 |
81 |
82 |
83 |
84 |
85 | maven-antrun-plugin
86 | 1.2
87 |
88 |
89 | copy-resources
90 |
91 | package
92 |
93 | run
94 |
95 |
96 |
97 |
99 |
101 |
102 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 | org.apache.maven.plugins
114 | maven-clean-plugin
115 | 3.1.0
116 |
117 |
118 |
119 | ${basedir}/../../exampleJars/${jar.name}
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
--------------------------------------------------------------------------------
/common/src/main/java/cn/todd/common/utils/PublicUtil.java:
--------------------------------------------------------------------------------
1 | package cn.todd.common.utils;
2 |
3 | import org.codehaus.jackson.JsonGenerationException;
4 | import org.codehaus.jackson.JsonParseException;
5 | import org.codehaus.jackson.map.DeserializationConfig;
6 | import org.codehaus.jackson.map.JsonMappingException;
7 | import org.codehaus.jackson.map.ObjectMapper;
8 | import org.codehaus.jackson.map.SerializationConfig;
9 |
10 | import java.io.ByteArrayInputStream;
11 | import java.io.IOException;
12 | import java.util.Map;
13 | import java.util.Properties;
14 |
15 | public class PublicUtil {
16 |
17 | private static ObjectMapper objectMapper = new ObjectMapper();
18 |
19 | static {
20 | //允许出现不识别的字段
21 | objectMapper.configure(DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES, false);
22 | objectMapper.disable(SerializationConfig.Feature.FAIL_ON_EMPTY_BEANS);
23 | }
24 |
25 | public static T objectToObject(Object params,Class clazz) throws JsonParseException, JsonMappingException, JsonGenerationException, IOException{
26 | if(params ==null) {return null;}
27 | return objectMapper.readValue(objectMapper.writeValueAsBytes(params),clazz);
28 | }
29 |
30 | public static T mapToObject(Map params,Class clazz) throws JsonParseException, JsonMappingException, JsonGenerationException, IOException{
31 | return objectMapper.readValue(objectMapper.writeValueAsBytes(params),clazz);
32 | }
33 |
34 | public static T jsonStrToObject(String jsonStr, Class clazz) throws JsonParseException, JsonMappingException, JsonGenerationException, IOException{
35 | return objectMapper.readValue(jsonStr, clazz);
36 | }
37 |
38 |
39 | @SuppressWarnings("unchecked")
40 | public static Map objectToMap(Object obj) throws JsonParseException, JsonMappingException, JsonGenerationException, IOException{
41 |
42 | return objectMapper.readValue(objectMapper.writeValueAsBytes(obj), Map.class);
43 | }
44 |
45 | public static String objToString(Object obj) throws IOException {
46 | return objectMapper.writeValueAsString(obj);
47 | }
48 |
49 |
50 | public static boolean count(int index,int multiples){
51 | return index%multiples==0;
52 | }
53 |
54 | public static Object classConvter(Class> clazz, Object obj){
55 | if(clazz.equals(Integer.class)||int.class.equals(clazz)){
56 | obj = Integer.parseInt(obj.toString());
57 | }else if(clazz.equals(Long.class)|| long.class.equals(clazz)){
58 | obj = Long.parseLong(obj.toString());
59 | }else if(clazz.equals(Double.class)|| double.class.equals(clazz)){
60 | obj = Double.parseDouble(obj.toString());
61 | }else if(clazz.equals(Float.class)|| float.class.equals(clazz)){
62 | obj = Float.parseFloat(obj.toString());
63 | }else if(clazz.equals(Byte.class)|| byte.class.equals(clazz)){
64 | obj = Byte.parseByte(obj.toString());
65 | }else if(clazz.equals(Short.class)|| short.class.equals(clazz)){
66 | obj = Short.parseShort(obj.toString());
67 | }else if(clazz.equals(Boolean.class)||boolean.class.equals(clazz)){
68 | obj = Boolean.parseBoolean(obj.toString());
69 | }else if(clazz.equals(String.class)){
70 | obj = obj.toString();
71 | }
72 | return obj;
73 | }
74 |
75 | public static Properties stringToProperties(String str) throws IOException{
76 | Properties properties = new Properties();
77 | properties.load(new ByteArrayInputStream(str.getBytes("UTF-8")));
78 | return properties;
79 | }
80 |
81 | public static boolean isJavaBaseType(Class> clazz){
82 | if(Integer.class.equals(clazz) || int.class.equals(clazz)){
83 | return true;
84 | }
85 | if(Long.class.equals(clazz) || long.class.equals(clazz)){
86 | return true;
87 | }
88 | if(Double.class.equals(clazz) || double.class.equals(clazz)){
89 | return true;
90 | }
91 | if(Float.class.equals(clazz) || float.class.equals(clazz)){
92 | return true;
93 | }
94 | if(Byte.class.equals(clazz) || byte.class.equals(clazz)){
95 | return true;
96 | }
97 | if(Short.class.equals(clazz) || short.class.equals(clazz)){
98 | return true;
99 | }
100 | if(clazz.equals(Boolean.class)||boolean.class.equals(clazz)){
101 | return true;
102 | }
103 | if(String.class.equals(clazz)){
104 | return true;
105 | }
106 | return false;
107 | }
108 |
109 |
110 | public static Object ClassConvter(Class> clazz,Object obj){
111 | if(obj ==null) {return null;}
112 | if(clazz.equals(Integer.class)||int.class.equals(clazz)){
113 | obj = Integer.parseInt(obj.toString());
114 | }else if(clazz.equals(Long.class)|| long.class.equals(clazz)){
115 | obj = Long.parseLong(obj.toString());
116 | }else if(clazz.equals(Double.class)|| double.class.equals(clazz)){
117 | obj = Double.parseDouble(obj.toString());
118 | }else if(clazz.equals(Float.class)|| float.class.equals(clazz)){
119 | obj = Float.parseFloat(obj.toString());
120 | }else if(clazz.equals(Byte.class)|| byte.class.equals(clazz)){
121 | obj = Byte.parseByte(obj.toString());
122 | }else if(clazz.equals(Short.class)|| short.class.equals(clazz)){
123 | obj = Short.parseShort(obj.toString());
124 | }else if(clazz.equals(Boolean.class)||boolean.class.equals(clazz)){
125 | obj = Boolean.parseBoolean(obj.toString());
126 | }else if(clazz.equals(String.class)){
127 | obj = obj.toString();
128 | }
129 | return obj;
130 | }
131 | }
132 |
--------------------------------------------------------------------------------
/spark-k8s-submiter/src/main/java/cn/todd/spark/entity/JobParamsInfo.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package cn.todd.spark.entity;
20 |
21 | import java.util.Properties;
22 |
23 | /**
24 | *
25 | * 任务执行依赖的参数配置
26 | * Date: 2020/6/14
27 | *
28 | * @author todd5167
29 | */
30 | public class JobParamsInfo {
31 | /** 应用名称**/
32 | private String appName;
33 |
34 | /** Main类 **/
35 | private String mainClass;
36 |
37 | /** 可执行jar包本地路径 **/
38 | private String runJarPath;
39 |
40 | /** 可执行jar包执行参数 **/
41 | private String execArgs;
42 |
43 | /** hadoopConf 路径 **/
44 | private String hadoopConfDir;
45 |
46 | /** kubeConfig 本地路径 **/
47 | private String kubeConfig;
48 |
49 | /** 拉取的镜像名称 **/
50 | private String imageName;
51 |
52 | /** 操作hdfs指定的HADOOP_USER_NAME **/
53 | private String hadoopUserName;
54 |
55 | /** spark 相关参数配置 **/
56 | private Properties confProperties;
57 |
58 | private JobParamsInfo(String appName, String mainClass, String runJarPath, String execArgs, String hadoopConfDir,
59 | String kubeConfig, Properties confProperties, String imageName, String hadoopUserName) {
60 | this.appName = appName;
61 | this.mainClass = mainClass;
62 | this.runJarPath = runJarPath;
63 | this.kubeConfig = kubeConfig;
64 | this.execArgs = execArgs;
65 | this.hadoopConfDir = hadoopConfDir;
66 | this.kubeConfig = kubeConfig;
67 | this.confProperties = confProperties;
68 | this.imageName = imageName;
69 | this.hadoopUserName = hadoopUserName;
70 | }
71 |
72 | public String getAppName() {
73 | return appName;
74 | }
75 |
76 | public String getMainClass() {
77 | return mainClass;
78 | }
79 |
80 | public String getRunJarPath() {
81 | return runJarPath;
82 | }
83 |
84 | public String getExecArgs() {
85 | return execArgs;
86 | }
87 |
88 | public String getHadoopConfDir() {
89 | return hadoopConfDir;
90 | }
91 |
92 | public String getKubeConfig() {
93 | return kubeConfig;
94 | }
95 |
96 | public Properties getConfProperties() {
97 | return confProperties;
98 | }
99 |
100 | public String getImageName() {
101 | return imageName;
102 | }
103 |
104 | public String getHadoopUserName() {
105 | return hadoopUserName;
106 | }
107 |
108 | public static Builder builder() {
109 | return new Builder();
110 | }
111 |
112 |
113 | public static class Builder {
114 | private String appName;
115 | private String mainClass;
116 | private String runJarPath;
117 | private String execArgs;
118 | private String hadoopConfDir;
119 | private String kubeConfig;
120 | private String imageName;
121 | private String hadoopUserName;
122 | private Properties confProperties;
123 |
124 |
125 | public Builder setAppName(String appName) {
126 | this.appName = appName;
127 | return this;
128 | }
129 |
130 | public Builder setMainClass(String mainClass) {
131 | this.mainClass = mainClass;
132 | return this;
133 | }
134 |
135 | public Builder setRunJarPath(String runJarPath) {
136 | this.runJarPath = runJarPath;
137 | return this;
138 | }
139 |
140 | public Builder setExecArgs(String execArgs) {
141 | this.execArgs = execArgs;
142 | return this;
143 | }
144 |
145 | public Builder setHadoopConfDir(String hadoopConfDir) {
146 | this.hadoopConfDir = hadoopConfDir;
147 | return this;
148 | }
149 |
150 | public Builder setKubeConfig(String kubeConfig) {
151 | this.kubeConfig = kubeConfig;
152 | return this;
153 | }
154 |
155 | public Builder setConfProperties(Properties confProperties) {
156 | this.confProperties = confProperties;
157 | return this;
158 | }
159 |
160 | public Builder setImageName(String imageName) {
161 | this.imageName = imageName;
162 | return this;
163 | }
164 |
165 | public Builder setHadoopUserName(String hadoopUserName) {
166 | this.hadoopUserName = hadoopUserName;
167 | return this;
168 | }
169 |
170 | public JobParamsInfo build() {
171 | return new JobParamsInfo(appName, mainClass, runJarPath, execArgs, hadoopConfDir, kubeConfig,
172 | confProperties, imageName, hadoopUserName);
173 | }
174 | }
175 |
176 | }
177 |
--------------------------------------------------------------------------------
/examples/spark-sql-proxy/src/main/java/cn/todd/spark/StringUtils.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package cn.todd.spark;
20 |
21 | import java.io.ByteArrayInputStream;
22 | import java.io.ByteArrayOutputStream;
23 | import java.io.IOException;
24 | import java.util.ArrayList;
25 | import java.util.List;
26 | import java.util.zip.ZipEntry;
27 | import java.util.zip.ZipInputStream;
28 | import java.util.zip.ZipOutputStream;
29 |
30 | /**
31 | *
32 | * Date: 2020/6/14
33 | * @author maqi
34 | */
35 | public class StringUtils {
36 | /**
37 | * 使用zip进行压缩
38 | * @param str 压缩前的文本
39 | * @return 返回压缩后的文本
40 | */
41 | public static final String zip(String str) {
42 | if (str == null)
43 | return null;
44 | byte[] compressed;
45 | ByteArrayOutputStream out = null;
46 | ZipOutputStream zout = null;
47 | String compressedStr = null;
48 | try {
49 | out = new ByteArrayOutputStream();
50 | zout = new ZipOutputStream(out);
51 | zout.putNextEntry(new ZipEntry("0"));
52 | zout.write(str.getBytes());
53 | zout.closeEntry();
54 | compressed = out.toByteArray();
55 | compressedStr = new sun.misc.BASE64Encoder().encodeBuffer(compressed);
56 | } catch (IOException e) {
57 | compressed = null;
58 | compressedStr = str;
59 | } finally {
60 | if (zout != null) {
61 | try {
62 | zout.close();
63 | } catch (IOException e) {
64 | }
65 | }
66 | if (out != null) {
67 | try {
68 | out.close();
69 | } catch (IOException e) {
70 | }
71 | }
72 | }
73 | return compressedStr;
74 | }
75 |
76 | /**
77 | * 使用zip进行解压缩
78 | * @param compressedStr 压缩后的文本
79 | * @return 解压后的字符串
80 | */
81 | public static final String unzip(String compressedStr) {
82 | if (compressedStr == null) {
83 | return null;
84 | }
85 |
86 | ByteArrayOutputStream out = null;
87 | ByteArrayInputStream in = null;
88 | ZipInputStream zin = null;
89 | String decompressed = null;
90 | try {
91 | byte[] compressed = new sun.misc.BASE64Decoder().decodeBuffer(compressedStr);
92 | out = new ByteArrayOutputStream();
93 | in = new ByteArrayInputStream(compressed);
94 | zin = new ZipInputStream(in);
95 | zin.getNextEntry();
96 | byte[] buffer = new byte[1024];
97 | int offset = -1;
98 | while ((offset = zin.read(buffer)) != -1) {
99 | out.write(buffer, 0, offset);
100 | }
101 | decompressed = out.toString();
102 | } catch (IOException e) {
103 | decompressed = null;
104 | } finally {
105 | if (zin != null) {
106 | try {
107 | zin.close();
108 | } catch (IOException e) {
109 | }
110 | }
111 | if (in != null) {
112 | try {
113 | in.close();
114 | } catch (IOException e) {
115 | }
116 | }
117 | if (out != null) {
118 | try {
119 | out.close();
120 | } catch (IOException e) {
121 | }
122 | }
123 | }
124 | return decompressed;
125 | }
126 |
127 | /**
128 | * 根据指定分隔符分割字符串---忽略在引号里面的分隔符
129 | * @param str
130 | * @param delimiter
131 | * @return
132 | */
133 | public static List splitIgnoreQuota(String str, char delimiter) {
134 | List tokensList = new ArrayList<>();
135 | boolean inQuotes = false;
136 | boolean inSingleQuotes = false;
137 | StringBuilder b = new StringBuilder();
138 | char[] chars = str.toCharArray();
139 | int idx = 0;
140 | for (char c : chars) {
141 | char flag = 0;
142 | if (idx > 0) {
143 | flag = chars[idx - 1];
144 | }
145 | if (c == delimiter) {
146 | if (inQuotes) {
147 | b.append(c);
148 | } else if (inSingleQuotes) {
149 | b.append(c);
150 | } else {
151 | tokensList.add(b.toString());
152 | b = new StringBuilder();
153 | }
154 | } else if (c == '\"' && '\\' != flag && !inSingleQuotes) {
155 | inQuotes = !inQuotes;
156 | b.append(c);
157 | } else if (c == '\'' && '\\' != flag && !inQuotes) {
158 | inSingleQuotes = !inSingleQuotes;
159 | b.append(c);
160 | } else {
161 | b.append(c);
162 | }
163 | idx++;
164 | }
165 |
166 | tokensList.add(b.toString());
167 |
168 | return tokensList;
169 | }
170 | }
171 |
--------------------------------------------------------------------------------
/spark-k8s-submiter/src/main/scala/org/apache/spark/deploy/k8s/SparkKubernetesClientFactory.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | package org.apache.spark.deploy.k8s
18 |
19 | import java.io._
20 |
21 | import com.google.common.base.Charsets
22 | import com.google.common.io.Files
23 | import io.fabric8.kubernetes.client._
24 | import io.fabric8.kubernetes.client.utils.HttpClientUtils
25 | import okhttp3.Dispatcher
26 | import org.apache.spark.SparkConf
27 | import org.apache.spark.deploy.k8s.Config._
28 | import org.apache.spark.util.ThreadUtils
29 | import org.apache.spark.deploy.k8s.ExtendConfig._
30 |
31 | import scala.io.Source
32 |
33 | /**
34 | * Spark-opinionated builder for Kubernetes clients. It uses a prefix plus common suffixes to
35 | * parse configuration keys, similar to the manner in which Spark's SecurityManager parses SSL
36 | * options for different components.
37 | */
38 | private[spark] object SparkKubernetesClientFactory {
39 |
40 | def createKubernetesClient(
41 | master: String,
42 | namespace: Option[String],
43 | kubernetesAuthConfPrefix: String,
44 | sparkConf: SparkConf,
45 | defaultServiceAccountToken: Option[File],
46 | defaultServiceAccountCaCert: Option[File]): KubernetesClient = {
47 | val oauthTokenFileConf = s"$kubernetesAuthConfPrefix.$OAUTH_TOKEN_FILE_CONF_SUFFIX"
48 | val oauthTokenConf = s"$kubernetesAuthConfPrefix.$OAUTH_TOKEN_CONF_SUFFIX"
49 | val oauthTokenFile = sparkConf.getOption(oauthTokenFileConf)
50 | .map(new File(_))
51 | .orElse(defaultServiceAccountToken)
52 | val oauthTokenValue = sparkConf.getOption(oauthTokenConf)
53 | KubernetesUtils.requireNandDefined(
54 | oauthTokenFile,
55 | oauthTokenValue,
56 | s"Cannot specify OAuth token through both a file $oauthTokenFileConf and a " +
57 | s"value $oauthTokenConf.")
58 |
59 | val caCertFile = sparkConf
60 | .getOption(s"$kubernetesAuthConfPrefix.$CA_CERT_FILE_CONF_SUFFIX")
61 | .orElse(defaultServiceAccountCaCert.map(_.getAbsolutePath))
62 | val clientKeyFile = sparkConf
63 | .getOption(s"$kubernetesAuthConfPrefix.$CLIENT_KEY_FILE_CONF_SUFFIX")
64 | val clientCertFile = sparkConf
65 | .getOption(s"$kubernetesAuthConfPrefix.$CLIENT_CERT_FILE_CONF_SUFFIX")
66 | val dispatcher = new Dispatcher(
67 | ThreadUtils.newDaemonCachedThreadPool("kubernetes-dispatcher"))
68 | val config = new ConfigBuilder()
69 | .withApiVersion("v1")
70 | .withMasterUrl(master)
71 | .withWebsocketPingInterval(0)
72 | .withOption(oauthTokenValue) {
73 | (token, configBuilder) => configBuilder.withOauthToken(token)
74 | }.withOption(oauthTokenFile) {
75 | (file, configBuilder) =>
76 | configBuilder.withOauthToken(Files.toString(file, Charsets.UTF_8))
77 | }.withOption(caCertFile) {
78 | (file, configBuilder) => configBuilder.withCaCertFile(file)
79 | }.withOption(clientKeyFile) {
80 | (file, configBuilder) => configBuilder.withClientKeyFile(file)
81 | }.withOption(clientCertFile) {
82 | (file, configBuilder) => configBuilder.withClientCertFile(file)
83 | }.withOption(namespace) {
84 | (ns, configBuilder) => configBuilder.withNamespace(ns)
85 | }.build()
86 | val baseHttpClient = HttpClientUtils.createHttpClient(config)
87 | val httpClientWithCustomDispatcher = baseHttpClient.newBuilder()
88 | .dispatcher(dispatcher)
89 | .build()
90 | new DefaultKubernetesClient(httpClientWithCustomDispatcher, config)
91 | }
92 |
93 |
94 | def createKubernetesClient(sparkConf: SparkConf, kubernetesConf: KubernetesConf[KubernetesDriverSpecificConf]): KubernetesClient = {
95 | val kubeConfig = sparkConf.get(KUBERNETES_KUBE_CONFIG_KEY)
96 | val namespace = sparkConf.get(KUBERNETES_NAMESPACE)
97 | val config = io.fabric8.kubernetes.client.Config.fromKubeconfig(getContentFromFile(kubeConfig))
98 |
99 | config.setNamespace(namespace)
100 | // 回填 spark.master
101 | val masterUrl = config.getMasterUrl
102 | kubernetesConf.sparkConf.set("spark.master", s"k8s://${masterUrl}")
103 |
104 | val dispatcher = new Dispatcher(
105 | ThreadUtils.newDaemonCachedThreadPool("kubernetes-dispatcher"))
106 |
107 | val baseHttpClient = HttpClientUtils.createHttpClient(config)
108 | val httpClientWithCustomDispatcher = baseHttpClient.newBuilder()
109 | .dispatcher(dispatcher)
110 | .build()
111 |
112 | new DefaultKubernetesClient(httpClientWithCustomDispatcher, config)
113 | }
114 |
115 | def getContentFromFile(filePath: String): String = {
116 | Source.fromFile(filePath).mkString
117 | }
118 |
119 |
120 | private implicit class OptionConfigurableConfigBuilder(val configBuilder: ConfigBuilder)
121 | extends AnyVal {
122 |
123 | def withOption[T]
124 | (option: Option[T])
125 | (configurator: ((T, ConfigBuilder) => ConfigBuilder)): ConfigBuilder = {
126 | option.map { opt =>
127 | configurator(opt, configBuilder)
128 | }.getOrElse(configBuilder)
129 | }
130 | }
131 |
132 | }
133 |
--------------------------------------------------------------------------------
/flink-yarn-submiter-service/src/main/java/cn/todd/flink/client/ClientProxy.java:
--------------------------------------------------------------------------------
1 | package cn.todd.flink.client;
2 |
3 | import cn.todd.flink.classloader.TemporaryClassLoaderContext;
4 | import cn.todd.flink.entity.CheckpointInfo;
5 | import cn.todd.flink.entity.ParamsInfo;
6 | import cn.todd.flink.entity.ResultInfo;
7 | import cn.todd.flink.enums.ETaskStatus;
8 |
9 | import java.util.List;
10 |
11 | /** 通过切换classloader加载ClusterClient */
12 | public class ClientProxy implements IClusterClient {
13 | IClusterClient proxyClient;
14 |
15 | public ClientProxy(IClusterClient proxyClient) {
16 | this.proxyClient = proxyClient;
17 | }
18 |
19 | @Override
20 | public ResultInfo submitFlinkJob(ParamsInfo jobParamsInfo) throws Exception {
21 | ClassLoader clientClassLoader = proxyClient.getClass().getClassLoader();
22 | try (TemporaryClassLoaderContext ignored =
23 | TemporaryClassLoaderContext.of(clientClassLoader)) {
24 | return proxyClient.submitFlinkJob(jobParamsInfo);
25 | }
26 | }
27 |
28 | @Override
29 | public ResultInfo submitFlinkJobWithKerberos(ParamsInfo jobParamsInfo) throws Exception {
30 | ClassLoader clientClassLoader = proxyClient.getClass().getClassLoader();
31 | try (TemporaryClassLoaderContext ignored =
32 | TemporaryClassLoaderContext.of(clientClassLoader)) {
33 | return proxyClient.submitFlinkJobWithKerberos(jobParamsInfo);
34 | }
35 | }
36 |
37 | @Override
38 | public ResultInfo killYarnJob(ParamsInfo jobParamsInfo) throws Exception {
39 | ClassLoader clientClassLoader = proxyClient.getClass().getClassLoader();
40 | try (TemporaryClassLoaderContext ignored =
41 | TemporaryClassLoaderContext.of(clientClassLoader)) {
42 | return proxyClient.killYarnJob(jobParamsInfo);
43 | }
44 | }
45 |
46 | @Override
47 | public ResultInfo killYarnJobWithKerberos(ParamsInfo jobParamsInfo) throws Exception {
48 | ClassLoader clientClassLoader = proxyClient.getClass().getClassLoader();
49 | try (TemporaryClassLoaderContext ignored =
50 | TemporaryClassLoaderContext.of(clientClassLoader)) {
51 | return proxyClient.killYarnJobWithKerberos(jobParamsInfo);
52 | }
53 | }
54 |
55 | @Override
56 | public ETaskStatus getYarnJobStatus(ParamsInfo jobParamsInfo) throws Exception {
57 | ClassLoader clientClassLoader = proxyClient.getClass().getClassLoader();
58 | try (TemporaryClassLoaderContext ignored =
59 | TemporaryClassLoaderContext.of(clientClassLoader)) {
60 | return proxyClient.getYarnJobStatus(jobParamsInfo);
61 | }
62 | }
63 |
64 | @Override
65 | public ETaskStatus getYarnJobStatusWithKerberos(ParamsInfo jobParamsInfo) throws Exception {
66 | ClassLoader clientClassLoader = proxyClient.getClass().getClassLoader();
67 | try (TemporaryClassLoaderContext ignored =
68 | TemporaryClassLoaderContext.of(clientClassLoader)) {
69 | return proxyClient.getYarnJobStatusWithKerberos(jobParamsInfo);
70 | }
71 | }
72 |
73 | @Override
74 | public List getCheckpointPaths(ParamsInfo jobParamsInfo) throws Exception {
75 | ClassLoader clientClassLoader = proxyClient.getClass().getClassLoader();
76 | try (TemporaryClassLoaderContext ignored =
77 | TemporaryClassLoaderContext.of(clientClassLoader)) {
78 | return proxyClient.getCheckpointPaths(jobParamsInfo);
79 | }
80 | }
81 |
82 | @Override
83 | public List getCheckpointPathsWithKerberos(ParamsInfo jobParamsInfo)
84 | throws Exception {
85 | ClassLoader clientClassLoader = proxyClient.getClass().getClassLoader();
86 | try (TemporaryClassLoaderContext ignored =
87 | TemporaryClassLoaderContext.of(clientClassLoader)) {
88 | return proxyClient.getCheckpointPathsWithKerberos(jobParamsInfo);
89 | }
90 | }
91 |
92 | @Override
93 | public String printFinishedLogToFile(ParamsInfo jobParamsInfo) throws Exception {
94 | ClassLoader clientClassLoader = proxyClient.getClass().getClassLoader();
95 | try (TemporaryClassLoaderContext ignored =
96 | TemporaryClassLoaderContext.of(clientClassLoader)) {
97 | return proxyClient.printFinishedLogToFile(jobParamsInfo);
98 | }
99 | }
100 |
101 | @Override
102 | public String printFinishedLogToFileWithKerberos(ParamsInfo jobParamsInfo) throws Exception {
103 | ClassLoader clientClassLoader = proxyClient.getClass().getClassLoader();
104 | try (TemporaryClassLoaderContext ignored =
105 | TemporaryClassLoaderContext.of(clientClassLoader)) {
106 | return proxyClient.printFinishedLogToFileWithKerberos(jobParamsInfo);
107 | }
108 | }
109 |
110 | @Override
111 | public ResultInfo cancelFlinkJob(ParamsInfo jobParamsInfo) throws Exception {
112 | ClassLoader clientClassLoader = proxyClient.getClass().getClassLoader();
113 | try (TemporaryClassLoaderContext ignored =
114 | TemporaryClassLoaderContext.of(clientClassLoader)) {
115 | return proxyClient.cancelFlinkJob(jobParamsInfo);
116 | }
117 | }
118 |
119 | @Override
120 | public ResultInfo cancelFlinkJobDoSavepoint(ParamsInfo jobParamsInfo) throws Exception {
121 | ClassLoader clientClassLoader = proxyClient.getClass().getClassLoader();
122 | try (TemporaryClassLoaderContext ignored =
123 | TemporaryClassLoaderContext.of(clientClassLoader)) {
124 | return proxyClient.cancelFlinkJobDoSavepoint(jobParamsInfo);
125 | }
126 | }
127 |
128 | @Override
129 | public ResultInfo cancelFlinkJobWithKerberos(ParamsInfo jobParamsInfo) throws Exception {
130 | ClassLoader clientClassLoader = proxyClient.getClass().getClassLoader();
131 | try (TemporaryClassLoaderContext ignored =
132 | TemporaryClassLoaderContext.of(clientClassLoader)) {
133 | return proxyClient.cancelFlinkJobWithKerberos(jobParamsInfo);
134 | }
135 | }
136 |
137 | @Override
138 | public ResultInfo cancelFlinkJobDoSavepointWithKerberos(ParamsInfo jobParamsInfo)
139 | throws Exception {
140 | ClassLoader clientClassLoader = proxyClient.getClass().getClassLoader();
141 | try (TemporaryClassLoaderContext ignored =
142 | TemporaryClassLoaderContext.of(clientClassLoader)) {
143 | return proxyClient.cancelFlinkJobDoSavepointWithKerberos(jobParamsInfo);
144 | }
145 | }
146 | }
147 |
--------------------------------------------------------------------------------
/spark-yarn-submiter/src/main/java/cn/todd/spark/entity/JobParamsInfo.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package cn.todd.spark.entity;
20 |
21 | import java.util.Properties;
22 |
23 | /**
24 | *
25 | * 任务执行依赖的参数配置
26 | * Date: 2020/6/14
27 | *
28 | * @author todd5167
29 | */
30 | public class JobParamsInfo {
31 | /** 应用名称**/
32 | private String appName;
33 |
34 | /** Main类 **/
35 | private String mainClass;
36 |
37 | /** 可执行jar包本地路径 **/
38 | private String runJarPath;
39 |
40 | /** 可执行jar上传到Hdfs的路径 **/
41 | private String jarHdfsDir;
42 |
43 | /** 可执行jar包执行参数 **/
44 | private String execArgs;
45 |
46 | /** archive hdfs路径 **/
47 | private String archivePath;
48 |
49 | /** yarnConf 路径 **/
50 | private String yarnConfDir;
51 |
52 | /** yarn 队列名称 **/
53 | private String queue;
54 |
55 | /** spark 相关参数配置 **/
56 | private Properties confProperties;
57 |
58 | private String principal;
59 | private String keytab;
60 |
61 | private String openKerberos;
62 |
63 |
64 | private JobParamsInfo(String appName, String mainClass, String runJarPath, String jarHdfsDir, String execArgs, String openKerberos,
65 | String archivePath, String yarnConfDir, String queue, Properties confProperties, String principal, String keytab) {
66 | this.appName = appName;
67 | this.mainClass = mainClass;
68 | this.runJarPath = runJarPath;
69 | this.queue = queue;
70 | this.jarHdfsDir = jarHdfsDir;
71 | this.archivePath = archivePath;
72 |
73 | this.yarnConfDir = yarnConfDir;
74 | this.execArgs = execArgs;
75 | this.confProperties = confProperties;
76 |
77 | this.principal = principal;
78 | this.keytab = keytab;
79 | this.openKerberos = openKerberos;
80 | }
81 |
82 | public String getAppName() {
83 | return appName;
84 | }
85 |
86 | public String getMainClass() {
87 | return mainClass;
88 | }
89 |
90 | public String getRunJarPath() {
91 | return runJarPath;
92 | }
93 |
94 | public String getJarHdfsDir() {
95 | return jarHdfsDir;
96 | }
97 |
98 | public String getExecArgs() {
99 | return execArgs;
100 | }
101 |
102 | public String getArchivePath() {
103 | return archivePath;
104 | }
105 |
106 | public String getYarnConfDir() {
107 | return yarnConfDir;
108 | }
109 |
110 | public String getQueue() {
111 | return queue;
112 | }
113 |
114 | public Properties getConfProperties() {
115 | return confProperties;
116 | }
117 |
118 | public String getPrincipal() {
119 | return principal;
120 | }
121 |
122 | public String getKeytab() {
123 | return keytab;
124 | }
125 |
126 | public String getOpenKerberos() {
127 | return openKerberos;
128 | }
129 |
130 | public static JobParamsInfo.Builder builder() {
131 | return new JobParamsInfo.Builder();
132 | }
133 |
134 |
135 | public static class Builder {
136 | private String appName;
137 | private String mainClass;
138 | private String runJarPath;
139 | private String jarHdfsDir;
140 | private String execArgs;
141 | private String archivePath;
142 | private String yarnConfDir;
143 | private String queue;
144 | private String principal;
145 | private String keytab;
146 | private String openKerberos;
147 | private Properties confProperties;
148 |
149 | public JobParamsInfo.Builder setAppName(String appName) {
150 | this.appName = appName;
151 | return this;
152 | }
153 |
154 | public JobParamsInfo.Builder setMainClass(String mainClass) {
155 | this.mainClass = mainClass;
156 | return this;
157 | }
158 |
159 | public JobParamsInfo.Builder setJarHdfsDir(String jarHdfsDir) {
160 | this.jarHdfsDir = jarHdfsDir;
161 | return this;
162 | }
163 |
164 | public JobParamsInfo.Builder setArchivePath(String archivePath) {
165 | this.archivePath = archivePath;
166 | return this;
167 | }
168 |
169 | public JobParamsInfo.Builder setPrincipal(String principal) {
170 | this.principal = principal;
171 | return this;
172 | }
173 |
174 | public JobParamsInfo.Builder setKeytab(String keytab) {
175 | this.keytab = keytab;
176 | return this;
177 | }
178 |
179 | public JobParamsInfo.Builder setQueue(String queue) {
180 | this.queue = queue;
181 | return this;
182 | }
183 |
184 | public JobParamsInfo.Builder setYarnConfDir(String yarnConfDir) {
185 | this.yarnConfDir = yarnConfDir;
186 | return this;
187 | }
188 |
189 | public JobParamsInfo.Builder setExecArgs(String execArgs) {
190 | this.execArgs = execArgs;
191 | return this;
192 | }
193 |
194 | public JobParamsInfo.Builder setConfProperties(Properties confProperties) {
195 | this.confProperties = confProperties;
196 | return this;
197 | }
198 |
199 | public JobParamsInfo.Builder setRunJarPath(String runJarPath) {
200 | this.runJarPath = runJarPath;
201 | return this;
202 | }
203 |
204 | public JobParamsInfo.Builder setOpenKerberos(String openKerberos) {
205 | this.openKerberos = openKerberos;
206 | return this;
207 | }
208 |
209 | public JobParamsInfo build() {
210 | return new JobParamsInfo(appName, mainClass, runJarPath, jarHdfsDir, execArgs, openKerberos,
211 | archivePath, yarnConfDir, queue, confProperties, principal, keytab);
212 | }
213 | }
214 |
215 | }
216 |
--------------------------------------------------------------------------------
/examples/flink-sql-executor/src/main/java/cn/todd/flink/parser/SqlCommandParser.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package cn.todd.flink.parser;
20 |
21 | import cn.todd.flink.utils.TalStringUtil;
22 | import org.apache.commons.lang3.StringUtils;
23 |
24 | import java.util.ArrayList;
25 | import java.util.Arrays;
26 | import java.util.List;
27 | import java.util.Objects;
28 | import java.util.Optional;
29 | import java.util.function.Function;
30 | import java.util.regex.Matcher;
31 | import java.util.regex.Pattern;
32 |
33 | /**
34 | * Simple parser for determining the type of command and its parameters.
35 | */
36 | public class SqlCommandParser {
37 |
38 | private static final char SQL_DELIMITER = ';';
39 |
40 | public static List parseSqlText(String sql) {
41 | sql = TalStringUtil.dealSqlComment(sql)
42 | .replaceAll("\r\n", " ")
43 | .replaceAll("\n", " ")
44 | .replace("\t", " ")
45 | .trim();
46 | List lines = TalStringUtil.splitIgnoreQuota(sql, SQL_DELIMITER);
47 | List calls = new ArrayList<>();
48 |
49 | for (String line : lines) {
50 | if (StringUtils.isEmpty(line)) {
51 | continue;
52 | }
53 | Optional optionalCall = parse(line.toString());
54 | if (optionalCall.isPresent()) {
55 | calls.add(optionalCall.get());
56 | } else {
57 | throw new RuntimeException("Unsupported command '" + line.toString() + "'");
58 | }
59 | }
60 | return calls;
61 | }
62 |
63 |
64 | public static Optional parse(String stmt) {
65 | // normalize
66 | stmt = stmt.trim();
67 | // parse
68 | for (SqlCommand cmd : SqlCommand.values()) {
69 | final Matcher matcher = cmd.pattern.matcher(stmt);
70 | if (matcher.matches()) {
71 | final String[] groups = new String[matcher.groupCount()];
72 | for (int i = 0; i < groups.length; i++) {
73 | groups[i] = matcher.group(i + 1);
74 | }
75 |
76 | // option使用Map
77 | return cmd.operandConverter.apply(groups)
78 | .map((operands) -> new SqlCommandCall(cmd, operands));
79 | }
80 | }
81 | return Optional.empty();
82 | }
83 |
84 | // --------------------------------------------------------------------------------------------
85 |
86 | private static final Function> NO_OPERANDS =
87 | (operands) -> Optional.of(new String[0]);
88 |
89 | private static final Function> SINGLE_OPERAND =
90 | (operands) -> Optional.of(new String[]{operands[0]});
91 |
92 | private static final int DEFAULT_PATTERN_FLAGS = Pattern.CASE_INSENSITIVE | Pattern.DOTALL;
93 |
94 | /**
95 | * Supported SQL commands.
96 | */
97 | public enum SqlCommand {
98 | INSERT_INTO(
99 | "(INSERT\\s+INTO.*)",
100 | SINGLE_OPERAND),
101 |
102 | CREATE_TABLE(
103 | "(CREATE\\s+TABLE.*)",
104 | SINGLE_OPERAND),
105 | CREATE_VIEW(
106 | "(CREATE\\s+VIEW\\s+(\\S+)\\s+AS.*)",
107 | SINGLE_OPERAND),
108 | SET(
109 | "SET(\\s+(\\S+)\\s*=(.*))?", // whitespace is only ignored on the left side of '='
110 | (operands) -> {
111 | if (operands.length < 3) {
112 | return Optional.empty();
113 | } else if (operands[0] == null) {
114 | return Optional.of(new String[0]);
115 | }
116 | return Optional.of(new String[]{operands[1], operands[2]});
117 | });
118 |
119 | public final Pattern pattern;
120 | public final Function> operandConverter;
121 |
122 | SqlCommand(String matchingRegex, Function> operandConverter) {
123 | this.pattern = Pattern.compile(matchingRegex, DEFAULT_PATTERN_FLAGS);
124 | this.operandConverter = operandConverter;
125 | }
126 |
127 | @Override
128 | public String toString() {
129 | return super.toString().replace('_', ' ');
130 | }
131 |
132 | public boolean hasOperands() {
133 | return operandConverter != NO_OPERANDS;
134 | }
135 | }
136 |
137 | /**
138 | * Call of SQL command with operands and command type.
139 | */
140 | public static class SqlCommandCall {
141 | public final SqlCommand command;
142 | public final String[] operands;
143 |
144 | public SqlCommandCall(SqlCommand command, String[] operands) {
145 | this.command = command;
146 | this.operands = operands;
147 | }
148 |
149 | public SqlCommandCall(SqlCommand command) {
150 | this(command, new String[0]);
151 | }
152 |
153 | @Override
154 | public boolean equals(Object o) {
155 | if (this == o) {
156 | return true;
157 | }
158 | if (o == null || getClass() != o.getClass()) {
159 | return false;
160 | }
161 | SqlCommandCall that = (SqlCommandCall) o;
162 | return command == that.command && Arrays.equals(operands, that.operands);
163 | }
164 |
165 | @Override
166 | public int hashCode() {
167 | int result = Objects.hash(command);
168 | result = 31 * result + Arrays.hashCode(operands);
169 | return result;
170 | }
171 |
172 | @Override
173 | public String toString() {
174 | return command + "(" + Arrays.toString(operands) + ")";
175 | }
176 | }
177 | }
178 |
--------------------------------------------------------------------------------
/examples/flink-sql-executor/src/main/java/cn/todd/flink/connectors/randomdata/factory/RandomDataTableSourceFactory.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package cn.todd.flink.connectors.randomdata.factory;
20 |
21 | import cn.todd.flink.connectors.randomdata.source.RandomDataGenTableSource;
22 | import org.apache.flink.configuration.ConfigOption;
23 | import org.apache.flink.configuration.Configuration;
24 | import org.apache.flink.configuration.ReadableConfig;
25 | import org.apache.flink.streaming.api.functions.source.datagen.DataGenerator;
26 | import org.apache.flink.table.api.TableSchema;
27 | import org.apache.flink.table.api.ValidationException;
28 | import org.apache.flink.table.connector.source.DynamicTableSource;
29 | import org.apache.flink.table.factories.DynamicTableSourceFactory;
30 | import org.apache.flink.table.factories.FactoryUtil;
31 | import org.apache.flink.table.factories.datagen.DataGenTableSource;
32 | import org.apache.flink.table.factories.datagen.DataGeneratorContainer;
33 | import org.apache.flink.table.factories.datagen.RandomGeneratorVisitor;
34 | import org.apache.flink.table.factories.datagen.SequenceGeneratorVisitor;
35 | import org.apache.flink.table.types.DataType;
36 | import org.apache.flink.table.utils.TableSchemaUtils;
37 |
38 | import java.util.HashSet;
39 | import java.util.Set;
40 |
41 | import static org.apache.flink.configuration.ConfigOptions.key;
42 | import static org.apache.flink.table.factories.FactoryUtil.CONNECTOR;
43 |
44 | /**
45 | * reference {@link DataGenTableSource}
46 | */
47 | public class RandomDataTableSourceFactory implements DynamicTableSourceFactory {
48 | public static final String IDENTIFIER = "randomdata";
49 | public static final Long ROWS_PER_SECOND_DEFAULT_VALUE = 10000L;
50 |
51 | public static final ConfigOption ROWS_PER_SECOND =
52 | key("rows-per-second")
53 | .longType()
54 | .defaultValue(ROWS_PER_SECOND_DEFAULT_VALUE)
55 | .withDescription("Rows per second to control the emit rate.");
56 |
57 | public static final ConfigOption NUMBER_OF_ROWS =
58 | key("number-of-rows")
59 | .longType()
60 | .noDefaultValue()
61 | .withDescription(
62 | "Total number of rows to emit. By default, the source is unbounded.");
63 |
64 | public static final String FIELDS = "fields";
65 | public static final String KIND = "kind";
66 | public static final String START = "start";
67 | public static final String END = "end";
68 | public static final String MIN = "min";
69 | public static final String MAX = "max";
70 | public static final String LENGTH = "length";
71 |
72 | public static final String SEQUENCE = "sequence";
73 | public static final String RANDOM = "random";
74 |
75 | @Override
76 | public String factoryIdentifier() {
77 | return IDENTIFIER;
78 | }
79 |
80 | @Override
81 | public Set> requiredOptions() {
82 | return new HashSet<>();
83 | }
84 |
85 | @Override
86 | public Set> optionalOptions() {
87 | Set> options = new HashSet<>();
88 | options.add(ROWS_PER_SECOND);
89 | options.add(NUMBER_OF_ROWS);
90 | return options;
91 | }
92 |
93 | @Override
94 | public DynamicTableSource createDynamicTableSource(Context context) {
95 | Configuration options = new Configuration();
96 | context.getCatalogTable().getOptions().forEach(options::setString);
97 |
98 | TableSchema schema =
99 | TableSchemaUtils.getPhysicalSchema(context.getCatalogTable().getSchema());
100 |
101 | DataGenerator>[] fieldGenerators = new DataGenerator[schema.getFieldCount()];
102 | Set> optionalOptions = new HashSet<>();
103 |
104 | for (int i = 0; i < fieldGenerators.length; i++) {
105 | String name = schema.getFieldNames()[i];
106 | DataType type = schema.getFieldDataTypes()[i];
107 |
108 | ConfigOption kind =
109 | key(FIELDS + "." + name + "." + KIND).stringType().defaultValue(RANDOM);
110 | DataGeneratorContainer container =
111 | createContainer(name, type, options.get(kind), options);
112 | fieldGenerators[i] = container.getGenerator();
113 |
114 | optionalOptions.add(kind);
115 | optionalOptions.addAll(container.getOptions());
116 | }
117 |
118 | FactoryUtil.validateFactoryOptions(requiredOptions(), optionalOptions, options);
119 |
120 | Set consumedOptionKeys = new HashSet<>();
121 | consumedOptionKeys.add(CONNECTOR.key());
122 | consumedOptionKeys.add(ROWS_PER_SECOND.key());
123 | consumedOptionKeys.add(NUMBER_OF_ROWS.key());
124 | optionalOptions.stream().map(ConfigOption::key).forEach(consumedOptionKeys::add);
125 | FactoryUtil.validateUnconsumedKeys(
126 | factoryIdentifier(), options.keySet(), consumedOptionKeys);
127 |
128 | String name = context.getObjectIdentifier().toString();
129 | return new RandomDataGenTableSource(
130 | fieldGenerators,
131 | name,
132 | schema,
133 | options.get(ROWS_PER_SECOND),
134 | options.get(NUMBER_OF_ROWS));
135 | }
136 |
137 | private DataGeneratorContainer createContainer(
138 | String name, DataType type, String kind, ReadableConfig options) {
139 | switch (kind) {
140 | case RANDOM:
141 | return type.getLogicalType().accept(new RandomGeneratorVisitor(name, options));
142 | case SEQUENCE:
143 | return type.getLogicalType().accept(new SequenceGeneratorVisitor(name, options));
144 | default:
145 | throw new ValidationException("Unsupported generator kind: " + kind);
146 | }
147 | }
148 | }
149 |
--------------------------------------------------------------------------------
/examples/flink-kafka-reader/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | examples
7 | cn.todd.submiter
8 | 1.0-SNAPSHOT
9 | ../pom.xml
10 |
11 | 4.0.0
12 |
13 | examples.flink-kafka-reader
14 | examples.flink-kafka-reader
15 |
16 |
17 | UTF-8
18 | 1.12.1
19 | flink-kafka-reader
20 |
21 |
22 |
23 |
24 | org.apache.flink
25 | flink-table-common
26 | ${flink.version}
27 | provided
28 |
29 |
30 |
31 | org.apache.flink
32 | flink-table-api-java-bridge_2.11
33 | ${flink.version}
34 | provided
35 |
36 |
37 |
38 | org.apache.flink
39 | flink-table-planner-blink_2.11
40 | ${flink.version}
41 | provided
42 |
43 |
44 |
45 | org.apache.flink
46 | flink-table-runtime-blink_2.11
47 | ${flink.version}
48 | provided
49 |
50 |
51 |
52 | org.apache.flink
53 | flink-streaming-java_2.11
54 | ${flink.version}
55 | provided
56 |
57 |
58 |
59 | org.apache.flink
60 | flink-json
61 | ${flink.version}
62 |
63 |
64 |
65 | org.apache.flink
66 | flink-connector-kafka_2.11
67 | ${flink.version}
68 |
69 |
70 |
71 |
72 |
73 |
74 | org.apache.maven.plugins
75 | maven-compiler-plugin
76 |
77 | 1.8
78 | 1.8
79 |
80 |
81 |
82 |
83 | org.apache.maven.plugins
84 | maven-shade-plugin
85 | 3.1.0
86 |
87 |
88 | package
89 |
90 | shade
91 |
92 |
93 | false
94 |
95 |
97 | cn.todd.flink.KafkaReader
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 | *:*
107 |
108 | META-INF/*.SF
109 | META-INF/*.DSA
110 | META-INF/*.RSA
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 | maven-antrun-plugin
121 | 1.2
122 |
123 |
124 | copy-resources
125 |
126 | package
127 |
128 | run
129 |
130 |
131 |
132 |
134 |
136 |
137 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 | org.apache.maven.plugins
149 | maven-clean-plugin
150 | 3.1.0
151 |
152 |
153 |
154 | ${basedir}/../../exampleJars/${jar.name}
155 |
156 |
157 |
158 |
159 |
160 |
161 |
--------------------------------------------------------------------------------
/examples/flink-sql-executor/src/main/java/cn/todd/flink/utils/TalStringUtil.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 | package cn.todd.flink.utils;
19 |
20 | import java.util.ArrayList;
21 | import java.util.List;
22 |
23 | /**
24 | * @Description
25 | * @Date 2021/2/18 7:51 PM
26 | * @Author todd5167
27 | */
28 | public class TalStringUtil {
29 |
30 | /**
31 | * Split the specified string delimiter --- ignored quotes delimiter
32 | *
33 | * @param str
34 | * @param delimiter
35 | * @return
36 | */
37 | public static List splitIgnoreQuota(String str, char delimiter) {
38 | List tokensList = new ArrayList<>();
39 | boolean inQuotes = false;
40 | boolean inSingleQuotes = false;
41 | int bracketLeftNum = 0;
42 | StringBuilder b = new StringBuilder();
43 | char[] chars = str.toCharArray();
44 | int idx = 0;
45 | for (char c : chars) {
46 | char flag = 0;
47 | if (idx > 0) {
48 | flag = chars[idx - 1];
49 | }
50 | if (c == delimiter) {
51 | if (inQuotes) {
52 | b.append(c);
53 | } else if (inSingleQuotes) {
54 | b.append(c);
55 | } else if (bracketLeftNum > 0) {
56 | b.append(c);
57 | } else {
58 | tokensList.add(b.toString());
59 | b = new StringBuilder();
60 | }
61 | } else if (c == '\"' && '\\' != flag && !inSingleQuotes) {
62 | inQuotes = !inQuotes;
63 | b.append(c);
64 | } else if (c == '\'' && '\\' != flag && !inQuotes) {
65 | inSingleQuotes = !inSingleQuotes;
66 | b.append(c);
67 | } else if (c == '(' && !inSingleQuotes && !inQuotes) {
68 | bracketLeftNum++;
69 | b.append(c);
70 | } else if (c == ')' && !inSingleQuotes && !inQuotes) {
71 | bracketLeftNum--;
72 | b.append(c);
73 | } else {
74 | b.append(c);
75 | }
76 | idx++;
77 | }
78 |
79 | tokensList.add(b.toString());
80 |
81 | return tokensList;
82 | }
83 |
84 | public static List splitField(String str) {
85 | final char delimiter = ',';
86 | List tokensList = new ArrayList<>();
87 | boolean inQuotes = false;
88 | boolean inSingleQuotes = false;
89 | int bracketLeftNum = 0;
90 | StringBuilder b = new StringBuilder();
91 | char[] chars = str.toCharArray();
92 | int idx = 0;
93 | for (char c : chars) {
94 | char flag = 0;
95 | if (idx > 0) {
96 | flag = chars[idx - 1];
97 | }
98 | if (c == delimiter) {
99 | if (inQuotes) {
100 | b.append(c);
101 | } else if (inSingleQuotes) {
102 | b.append(c);
103 | } else if (bracketLeftNum > 0) {
104 | b.append(c);
105 | } else {
106 | tokensList.add(b.toString());
107 | b = new StringBuilder();
108 | }
109 | } else if (c == '\"' && '\\' != flag && !inSingleQuotes) {
110 | inQuotes = !inQuotes;
111 | b.append(c);
112 | } else if (c == '\'' && '\\' != flag && !inQuotes) {
113 | inSingleQuotes = !inSingleQuotes;
114 | b.append(c);
115 | } else if (c == '(' && !inSingleQuotes && !inQuotes) {
116 | bracketLeftNum++;
117 | b.append(c);
118 | } else if (c == ')' && !inSingleQuotes && !inQuotes) {
119 | bracketLeftNum--;
120 | b.append(c);
121 | } else if (c == '<' && !inSingleQuotes && !inQuotes) {
122 | bracketLeftNum++;
123 | b.append(c);
124 | } else if (c == '>' && !inSingleQuotes && !inQuotes) {
125 | bracketLeftNum--;
126 | b.append(c);
127 | } else {
128 | b.append(c);
129 | }
130 | idx++;
131 | }
132 |
133 | tokensList.add(b.toString());
134 |
135 | return tokensList;
136 | }
137 |
138 |
139 | public static String replaceIgnoreQuota(String str, String oriStr, String replaceStr) {
140 | String splitPatternStr = oriStr + "(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)(?=(?:[^']*'[^']*')*[^']*$)";
141 | return str.replaceAll(splitPatternStr, replaceStr);
142 | }
143 |
144 | /**
145 | * 处理 sql 中 "--" 注释,而不删除引号内的内容
146 | *
147 | * @param sql 解析出来的 sql
148 | * @return 返回无注释内容的 sql
149 | */
150 | public static String dealSqlComment(String sql) {
151 | boolean inQuotes = false;
152 | boolean inSingleQuotes = false;
153 | StringBuilder b = new StringBuilder(sql.length());
154 | char[] chars = sql.toCharArray();
155 | for (int index = 0; index < chars.length; index++) {
156 | StringBuilder tempSb = new StringBuilder(2);
157 | if (index >= 1) {
158 | tempSb.append(chars[index - 1]);
159 | tempSb.append(chars[index]);
160 | }
161 |
162 | if ("--".equals(tempSb.toString())) {
163 | if (inQuotes) {
164 | b.append(chars[index]);
165 | } else if (inSingleQuotes) {
166 | b.append(chars[index]);
167 | } else {
168 | b.deleteCharAt(b.length() - 1);
169 | while (chars[index] != '\n') {
170 | // 判断注释内容是不是行尾或者 sql 的最后一行
171 | if (index == chars.length - 1) {
172 | break;
173 | }
174 | index++;
175 | }
176 | }
177 | } else if (chars[index] == '\"' && '\\' != chars[index] && !inSingleQuotes) {
178 | inQuotes = !inQuotes;
179 | b.append(chars[index]);
180 | } else if (chars[index] == '\'' && '\\' != chars[index] && !inQuotes) {
181 | inSingleQuotes = !inSingleQuotes;
182 | b.append(chars[index]);
183 | } else {
184 | b.append(chars[index]);
185 | }
186 | }
187 | return b.toString();
188 | }
189 |
190 | }
191 |
--------------------------------------------------------------------------------