├── .gitignore ├── LICENSE ├── Makefile ├── README.md ├── app.yaml ├── base ├── controller.json ├── crd.json ├── kustomization.json ├── namespace.json ├── service.json └── statefulset.json ├── controller-app ├── .gitignore ├── Dockerfile ├── configmap.json ├── cronjob.json ├── index.js ├── package-lock.json ├── package.json ├── pod-template-spec.json └── statefulset.json ├── dist └── flink-controller.yaml ├── environments ├── minikube │ ├── kustomization.json │ └── statefulset-patch.json └── publish │ ├── kustomization.json │ └── statefulset-patch.json ├── job-app ├── Dockerfile ├── check.sh ├── log4j.properties ├── start-batch.sh └── start-streaming.sh ├── k8s-flink-operator.code-workspace └── test ├── .classpath ├── .gitignore ├── .project ├── .settings └── org.eclipse.buildship.core.prefs ├── Dockerfile ├── Makefile ├── build.gradle ├── flink-batch-job.json ├── flink-streaming-job.json ├── flink.json ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── settings.gradle └── src ├── main ├── java │ └── flink │ │ └── test │ │ ├── AppBatch.java │ │ └── AppStreaming.java └── resources │ └── simplelogger.properties └── test └── java └── flink └── test └── AppTest.java /.gitignore: -------------------------------------------------------------------------------- 1 | /app.override.yaml 2 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Srfrnk 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | FORCE: 2 | 3 | install-metacontroller: FORCE 4 | -kubectl create namespace metacontroller 5 | kubectl apply -f https://raw.githubusercontent.com/GoogleCloudPlatform/metacontroller/master/manifests/metacontroller-rbac.yaml 6 | kubectl apply -f https://raw.githubusercontent.com/srfrnk/metacontroller/master/manifests/metacontroller.yaml 7 | 8 | deploy-minikube: TIMESTAMP=$(shell date +%y%m%d-%H%M -u) 9 | deploy-minikube: FORCE 10 | eval $$(minikube docker-env) && docker build controller-app -t srfrnk/flink-controller-app:${TIMESTAMP} 11 | eval $$(minikube docker-env) && docker build job-app -t srfrnk/flink-job-app:${TIMESTAMP} 12 | export IMAGE_VERSION=$(TIMESTAMP) && kustomize build environments/minikube | kubectl apply 13 | 14 | test-minikube: FORCE 15 | make -C test deploy-minikube 16 | 17 | publish: TIMESTAMP=$(shell date +%y%m%d-%H%M -u) 18 | publish: FORCE 19 | docker build controller-app -t srfrnk/flink-controller-app:${TIMESTAMP} 20 | docker build job-app -t srfrnk/flink-job-app:${TIMESTAMP} 21 | docker push srfrnk/flink-controller-app:${TIMESTAMP} 22 | docker push srfrnk/flink-job-app:${TIMESTAMP} 23 | export IMAGE_VERSION=$(TIMESTAMP) && kustomize build environments/publish > dist/flink-controller.yaml 24 | 25 | proxy: 26 | kubectl port-forward svc/flink-jobmanager 8081:8081 27 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # k8s-flink-operator 2 | 3 | ## Prerequisites 4 | 5 | - For `GKE` only - run: `kubectl create clusterrolebinding -cluster-admin-binding --clusterrole=cluster-admin --user=@` 6 | - Install [metacontroller](https://metacontroller.app/guide/install/) or run `make install-metacontroller` 7 | 8 | **Note** To install `metacontroller` you can usually just run these: 9 | 10 | ```bash 11 | kubectl create namespace `metacontroller` 12 | kubectl apply -f https://raw.githubusercontent.com/GoogleCloudPlatform/metacontroller/master/manifests/metacontroller-rbac.yaml 13 | kubectl apply -f https://raw.githubusercontent.com/GoogleCloudPlatform/metacontroller/master/manifests/metacontroller.yaml 14 | ``` 15 | 16 | Optionally you can use the following to reduce log flood coming from `Metacontroller`: 17 | 18 | ```bash 19 | kubectl apply -f https://raw.githubusercontent.com/srfrnk/metacontroller/master/manifests/metacontroller.yaml 20 | ``` 21 | 22 | ## Installation 23 | 24 | Just run the following: 25 | 26 | ```bash 27 | kubectl apply -f https://raw.githubusercontent.com/srfrnk/k8s-flink-operator/master/dist/flink-controller.yaml 28 | ``` 29 | 30 | ## Usage 31 | 32 | To use `flink-controller` you need to have: 33 | 34 | - A `JAR` containing code that creates a valid `Flink` job 35 | - A `docker image` that contains the `JAR` 36 | - A `K8S` configuration file defining the job 37 | 38 | ### JAR 39 | 40 | Please see the [Example Apache BEAM Pipeline](https://github.com/srfrnk/k8s-flink-operator/tree/master/test) that can run on `Flink`. 41 | 42 | - The [main file](https://github.com/srfrnk/k8s-flink-operator/blob/master/test/src/main/java/flink/test/App.java#L5) creates the job. 43 | - `gradle build` creates the `JAR` 44 | 45 | ### Docker Image 46 | 47 | - The [Dockerfile](https://github.com/srfrnk/k8s-flink-operator/blob/master/test/Dockerfile#L3) defines the image 48 | - To build the image inside `minikube`: `eval $(minikube docker-env) && docker build . -t flink-test:v1` 49 | - You can also build locally and push to any repository accessible to your `K8S` cluster 50 | 51 | ### Configuration Manifest 52 | 53 | - The [JSON Manifest](https://github.com/srfrnk/k8s-flink-operator/blob/master/test/flink1job.json) defines the job. 54 | - Can be `YAML` or `JSON` 55 | 56 | The spec must include: 57 | 58 | - `jobManagerUrl`: Cluster URL to `Flink Job Manager` ("host:port") 59 | - `jarImage`:Full image identifier ("repo/image:tag") 60 | - `jarPath`:Absolute path to `JAR` inside image 61 | - `mainClass`:Full class-name for the job (e.g. "org.example.MyClass") 62 | - Either `streaming` or `cron` : For streaming job or batch job 63 | 64 | `streaming` should include 65 | 66 | - `replicas`: number of jobs to submit simultaneously 67 | 68 | `cron` should include 69 | 70 | - `schedule`: The schedule in Cron format. [See here](https://en.wikipedia.org/wiki/Cron) 71 | - `concurrencyPolicy`: Specifies how to treat concurrent executions of a Job. Valid values are: - "Allow" (default): allows CronJobs to run concurrently; - "Forbid": forbids concurrent runs, skipping next run if previous run hasn't finished yet; - "Replace": cancels currently running job and replaces it with a new one. [See here](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.14/#cronjobspec-v1beta1-batch) 72 | 73 | The following are optional: 74 | 75 | - `version`: A string with the version label to be added to all k8s resources. If no `version` is specified a label with `NoVersion` would be added. 76 | - `props`: An array of `{key,value}` props to pass to job. (i.e. via `ParameterTool parameters = ParameterTool.fromArgs(args);`) 77 | - `volumeMounts`: An array of `volume specs` (See below). 78 | - `env`: An array of [EnvVars](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.14/#envvar-v1-core). 79 | 80 | `volume specs` have the following parameters: 81 | 82 | - `volume`: a [Volume spec](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.14/#volume-v1-core). 83 | - `mount`: a [VolumeMount spec](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.14/#volumemount-v1-core). 84 | 85 | **Note:** both `volume` and `mount` don't need to have a `name`. Any given `name` would be overwritten. 86 | 87 | The `mainClass` must contain `public static void main(String[] args)` function that runs a `Flink` job. 88 | 89 | ## Deployment 90 | 91 | - Make sure the `JAR` image is accessible to the `K8S` cluster 92 | - Apply configuration manifest: `kubectl apply -f ` 93 | 94 | ## Cleanup 95 | 96 | - `kubectl delete -f ` 97 | - `kubectl delete -f https://raw.githubusercontent.com/srfrnk/k8s-flink-operator/master/dist/flink-controller.yaml` 98 | - `kubectl delete -f https://raw.githubusercontent.com/GoogleCloudPlatform/metacontroller/master/manifests/metacontroller-rbac.yaml` 99 | - `kubectl delete -f https://raw.githubusercontent.com/GoogleCloudPlatform/metacontroller/master/manifests/metacontroller.yaml` 100 | - `kubectl delete namespace metacontroller` 101 | 102 | ## Notes 103 | 104 | - I recently published an article describing how and why I built this. You can [find it here](https://medium.com/@srfrnk/i-flink-you-freaky-and-i-like-you-a-lot-68554f7629df). 105 | -------------------------------------------------------------------------------- /app.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: 0.3.0 2 | environments: 3 | minikube: 4 | destination: 5 | namespace: default 6 | server: https://192.168.39.101:8443 7 | k8sVersion: v1.14.1 8 | path: minikube 9 | publish: 10 | destination: 11 | namespace: default 12 | server: https://192.168.39.218:8443 13 | k8sVersion: v1.14.1 14 | path: publish 15 | kind: ksonnet.io/app 16 | name: k8s-flink-operator 17 | registries: 18 | incubator: 19 | protocol: github 20 | uri: github.com/ksonnet/parts/tree/master/incubator 21 | version: 0.0.1 22 | -------------------------------------------------------------------------------- /base/controller.json: -------------------------------------------------------------------------------- 1 | { 2 | "apiVersion": "metacontroller.k8s.io/v1alpha1", 3 | "kind": "CompositeController", 4 | "metadata": { 5 | "labels": { 6 | }, 7 | "name": "flink-controller", 8 | "namespace": "flink" 9 | }, 10 | "spec": { 11 | "childResources": [ 12 | { 13 | "apiVersion": "apps/v1", 14 | "resource": "statefulsets", 15 | "updateStrategy": { 16 | "method": "InPlace" 17 | } 18 | }, 19 | { 20 | "apiVersion": "batch/v1beta1", 21 | "resource": "cronjobs", 22 | "updateStrategy": { 23 | "method": "InPlace" 24 | } 25 | }, 26 | { 27 | "apiVersion": "v1", 28 | "resource": "configmaps", 29 | "updateStrategy": { 30 | "method": "OnDelete" 31 | } 32 | } 33 | ], 34 | "generateSelector": true, 35 | "hooks": { 36 | "sync": { 37 | "webhook": { 38 | "url": "http://flink-controller.flink/sync" 39 | } 40 | } 41 | }, 42 | "parentResource": { 43 | "apiVersion": "operators.srfrnk.com/v1", 44 | "resource": "flinkjobs" 45 | } 46 | } 47 | } -------------------------------------------------------------------------------- /base/crd.json: -------------------------------------------------------------------------------- 1 | { 2 | "apiVersion": "apiextensions.k8s.io/v1beta1", 3 | "kind": "CustomResourceDefinition", 4 | "metadata": { 5 | "labels": { 6 | }, 7 | "name": "flinkjobs.operators.srfrnk.com", 8 | "namespace": "flink" 9 | }, 10 | "spec": { 11 | "additionalPrinterColumns": [ 12 | { 13 | "JSONPath": ".metadata.creationTimestamp", 14 | "description": "Time since object was created", 15 | "name": "Age", 16 | "type": "date" 17 | } 18 | ], 19 | "group": "operators.srfrnk.com", 20 | "names": { 21 | "kind": "FlinkJob", 22 | "plural": "flinkjobs", 23 | "shortNames": [ 24 | "flink", 25 | "flinks" 26 | ], 27 | "singular": "flinkjob" 28 | }, 29 | "scope": "Namespaced", 30 | "subresources": { 31 | "scale": { 32 | "labelSelectorPath": ".status.labelSelector", 33 | "specReplicasPath": ".spec.replicas", 34 | "statusReplicasPath": ".status.replicas" 35 | }, 36 | "status": {} 37 | }, 38 | "validation": { 39 | "openAPIV3Schema": { 40 | "properties": { 41 | "spec": { 42 | "properties": { 43 | "cron": { 44 | "properties": { 45 | "concurrencyPolicy": { 46 | "type": "string" 47 | }, 48 | "schedule": { 49 | "type": "string" 50 | } 51 | }, 52 | "required": [ 53 | "schedule" 54 | ] 55 | }, 56 | "env": { 57 | "items": {}, 58 | "type": "array" 59 | }, 60 | "jarImage": { 61 | "type": "string" 62 | }, 63 | "jarPath": { 64 | "type": "string" 65 | }, 66 | "jobManagerUrl": { 67 | "type": "string" 68 | }, 69 | "mainClass": { 70 | "type": "string" 71 | }, 72 | "props": { 73 | "items": { 74 | "properties": { 75 | "key": { 76 | "type": "string" 77 | }, 78 | "value": { 79 | "type": "string" 80 | }, 81 | "valueFrom": { 82 | "properties": { 83 | "configMapKeyRef": { 84 | "properties": { 85 | "key": { 86 | "type": "string" 87 | }, 88 | "name": { 89 | "type": "string" 90 | } 91 | } 92 | }, 93 | "secretKeyRef": { 94 | "properties": { 95 | "key": { 96 | "type": "string" 97 | }, 98 | "name": { 99 | "type": "string" 100 | } 101 | } 102 | } 103 | } 104 | } 105 | }, 106 | "required": [ 107 | "key" 108 | ] 109 | }, 110 | "type": "array" 111 | }, 112 | "streaming": { 113 | "properties": { 114 | "replicas": { 115 | "minimum": 0, 116 | "type": "integer" 117 | } 118 | }, 119 | "required": [ 120 | "replicas" 121 | ] 122 | }, 123 | "volumeMounts": { 124 | "items": { 125 | "properties": { 126 | "mount": {}, 127 | "volume": {} 128 | }, 129 | "required": [ 130 | "volume", 131 | "mount" 132 | ] 133 | }, 134 | "type": "array" 135 | } 136 | }, 137 | "required": [ 138 | "jobManagerUrl", 139 | "jarImage", 140 | "jarPath", 141 | "mainClass" 142 | ] 143 | } 144 | }, 145 | "required": [ 146 | "spec" 147 | ] 148 | } 149 | }, 150 | "version": "v1" 151 | } 152 | } -------------------------------------------------------------------------------- /base/kustomization.json: -------------------------------------------------------------------------------- 1 | { 2 | "resources": [ 3 | "controller.json", 4 | "crd.json", 5 | "namespace.json", 6 | "service.json", 7 | "statefulset.json" 8 | ], 9 | "vars": [ 10 | { 11 | "name": "IMAGE_VERSION", 12 | "envref": { 13 | "name": "IMAGE_VERSION" 14 | } 15 | } 16 | ] 17 | } -------------------------------------------------------------------------------- /base/namespace.json: -------------------------------------------------------------------------------- 1 | { 2 | "apiVersion": "v1", 3 | "kind": "Namespace", 4 | "metadata": { 5 | "labels": {}, 6 | "name": "flink" 7 | } 8 | } -------------------------------------------------------------------------------- /base/service.json: -------------------------------------------------------------------------------- 1 | { 2 | "apiVersion": "v1", 3 | "kind": "Service", 4 | "metadata": { 5 | "labels": { 6 | }, 7 | "name": "flink-controller", 8 | "namespace": "flink" 9 | }, 10 | "spec": { 11 | "ports": [ 12 | { 13 | "port": 80 14 | } 15 | ], 16 | "selector": { 17 | "app": "flink-controller" 18 | } 19 | } 20 | } -------------------------------------------------------------------------------- /base/statefulset.json: -------------------------------------------------------------------------------- 1 | { 2 | "apiVersion": "apps/v1", 3 | "kind": "StatefulSet", 4 | "metadata": { 5 | "labels": { 6 | "version": "$(IMAGE_VERSION)" 7 | }, 8 | "name": "flink-controller", 9 | "namespace": "flink" 10 | }, 11 | "spec": { 12 | "replicas": 1, 13 | "selector": { 14 | "matchLabels": { 15 | "app": "flink-controller" 16 | } 17 | }, 18 | "serviceName": "flink-controller", 19 | "template": { 20 | "metadata": { 21 | "labels": { 22 | "app": "flink-controller", 23 | "version": "$(IMAGE_VERSION)" 24 | } 25 | }, 26 | "spec": { 27 | "containers": [ 28 | { 29 | "args": [ 30 | "npm start" 31 | ], 32 | "command": [ 33 | "bash", 34 | "-c" 35 | ], 36 | "env": [ 37 | { 38 | "name": "IMAGE_VERSION", 39 | "value": "$(IMAGE_VERSION)" 40 | }, 41 | { 42 | "name": "DEBUG_LOG", 43 | "value": "true" 44 | } 45 | ], 46 | "image": "srfrnk/flink-controller-app:$(IMAGE_VERSION)", 47 | "name": "controller", 48 | "ports": [ 49 | { 50 | "containerPort": 80 51 | } 52 | ], 53 | "resources": { 54 | "limits": { 55 | "cpu": "100m", 56 | "memory": "100Mi" 57 | }, 58 | "requests": { 59 | "cpu": "10m", 60 | "memory": "10Mi" 61 | } 62 | } 63 | } 64 | ] 65 | } 66 | } 67 | } 68 | } -------------------------------------------------------------------------------- /controller-app/.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | -------------------------------------------------------------------------------- /controller-app/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node 2 | 3 | ADD index.js /app/index.js 4 | ADD package.json /app/package.json 5 | ADD statefulset.json /app/statefulset.json 6 | ADD cronjob.json /app/cronjob.json 7 | ADD pod-template-spec.json /app/pod-template-spec.json 8 | ADD configmap.json /app/configmap.json 9 | 10 | WORKDIR /app 11 | RUN npm i 12 | -------------------------------------------------------------------------------- /controller-app/configmap.json: -------------------------------------------------------------------------------- 1 | { 2 | "apiVersion": "v1", 3 | "kind": "ConfigMap", 4 | "metadata": { 5 | "name": "flink-job-jar", 6 | "labels": { 7 | "version": "", 8 | "k8s-flink-operator-version": "" 9 | } 10 | }, 11 | "data": { 12 | "start-streaming.sh": "#!/usr/bin/env sh\ntrap 'break;' TERM INT \n echo 'Copying jar...' \n cp ${jarDir}/${jarName} /jar/temp.jar \n mv /jar/temp.jar /jar/${jarName} \n echo 'Started' \n while :; do sleep 1; done \n echo 'Exiting'", 13 | "start-batch.sh": "#!/usr/bin/env sh\ntrap 'break;' TERM INT \n echo 'Copying jar...' \n cp ${jarDir}/${jarName} /jar/temp.jar \n mv /jar/temp.jar /jar/${jarName} \n echo 'Exiting'" 14 | } 15 | } -------------------------------------------------------------------------------- /controller-app/cronjob.json: -------------------------------------------------------------------------------- 1 | { 2 | "apiVersion": "batch/v1beta1", 3 | "kind": "CronJob", 4 | "metadata": { 5 | "name": "", 6 | "labels": { 7 | "version": "", 8 | "k8s-flink-operator-version": "" 9 | } 10 | }, 11 | "spec": { 12 | "concurrencyPolicy": "", 13 | "schedule": "", 14 | "successfulJobsHistoryLimit": 0, 15 | "failedJobsHistoryLimit": 0, 16 | "jobTemplate": { 17 | "metadata": { 18 | "name": "", 19 | "labels": { 20 | "version": "", 21 | "k8s-flink-operator-version": "" 22 | } 23 | }, 24 | "spec": { 25 | "_ttlSecondsAfterFinished": 30, 26 | "completions": 1, 27 | "parallelism": 1, 28 | "template": {} 29 | } 30 | } 31 | } 32 | } -------------------------------------------------------------------------------- /controller-app/index.js: -------------------------------------------------------------------------------- 1 | const express = require('express'); 2 | const bodyParser = require('body-parser'); 3 | const path = require('path'); 4 | const fs = require('fs'); 5 | 6 | const DEBUG_LOG = process.env.DEBUG_LOG === 'true'; 7 | const IMAGE_VERSION = process.env.IMAGE_VERSION || "latest"; 8 | const statefulsetJson = fs.readFileSync('statefulset.json', { encoding: 'utf8' }); 9 | const cronjobJson = fs.readFileSync('cronjob.json', { encoding: 'utf8' }); 10 | const configMapJson = fs.readFileSync('configmap.json', { encoding: 'utf8' }); 11 | const podTemplateSpecJson = fs.readFileSync('pod-template-spec.json', { encoding: 'utf8' }); 12 | 13 | var app = express(); 14 | app.use(bodyParser.json()); 15 | 16 | app.post('/sync', function (req, res) { 17 | const parent = req.body.parent; 18 | // const children = req.body.children; 19 | const response = { 20 | "status": {}, 21 | "children": getChildren(parent.metadata.name, parent.spec) 22 | }; 23 | if (DEBUG_LOG) { 24 | console.log(JSON.stringify({ type: "SYNC", req: req.body, res: response }), ","); 25 | } 26 | res.json(response); 27 | }); 28 | 29 | app.all("**", (req, res) => { 30 | if (DEBUG_LOG) { 31 | console.log(JSON.stringify({ type: "CATCHALL", req: req.body, res: {} })); 32 | } 33 | res.json({}); 34 | }); 35 | 36 | app.listen(80, () => { 37 | console.log("Flink controller running!"); 38 | }); 39 | 40 | function getChildren(jobName, spec) { 41 | const configMapName = `flink-job-jar-${jobName}`; 42 | const version = spec.version || 'NoVersion'; 43 | const controller = getController(jobName, version, configMapName, spec); 44 | const configMap = getConfigMap(version, configMapName); 45 | return [configMap, controller]; 46 | } 47 | 48 | function getController(jobName, version, configMapName, spec) { 49 | if (!!spec.streaming) { 50 | return getStatefulset(jobName, version, configMapName, spec); 51 | } 52 | 53 | if (!!spec.cron) { 54 | return getCronJob(jobName, version, configMapName, spec); 55 | } 56 | 57 | console.log(`Job '${jobName}': Must specify either 'streaming' or 'cron' properties in spec. No controller is created for job.`); 58 | } 59 | 60 | function getStatefulset(jobName, version, configMapName, spec) { 61 | const statefulset = JSON.parse(statefulsetJson); 62 | 63 | statefulset.metadata.name = `flink-job-${jobName}`; 64 | statefulset.metadata.labels.version = version; 65 | statefulset.metadata.labels['k8s-flink-operator-version'] = IMAGE_VERSION; 66 | statefulset.spec.replicas = spec.streaming.replicas; 67 | statefulset.spec.selector.matchLabels["flink-job"] = jobName; 68 | statefulset.spec.template = getPodTemplateSpec(jobName, version, configMapName, spec, true); 69 | 70 | return statefulset; 71 | } 72 | 73 | 74 | function getCronJob(jobName, version, configMapName, spec) { 75 | const cronjob = JSON.parse(cronjobJson); 76 | 77 | const name = `flink-job-${jobName}`; 78 | cronjob.metadata.name = name; 79 | cronjob.metadata.labels.version = version; 80 | cronjob.metadata.labels['k8s-flink-operator-version'] = IMAGE_VERSION; 81 | cronjob.spec.concurrencyPolicy = spec.cron.concurrencyPolicy || 'Allow'; 82 | cronjob.spec.schedule = spec.cron.schedule; 83 | cronjob.spec.jobTemplate.metadata.name = name; 84 | cronjob.spec.jobTemplate.metadata.labels.version = version; 85 | cronjob.spec.jobTemplate.metadata.labels['k8s-flink-operator-version'] = IMAGE_VERSION; 86 | cronjob.spec.jobTemplate.spec.template = getPodTemplateSpec(jobName, version, configMapName, spec, false); 87 | delete cronjob.spec.jobTemplate.spec.template.spec.containers[0].livenessProbe; 88 | 89 | return cronjob; 90 | } 91 | 92 | function getPodTemplateSpec(jobName, version, configMapName, spec, streaming) { 93 | const podTemplateSpec = JSON.parse(podTemplateSpecJson); 94 | 95 | podTemplateSpec.metadata.labels["flink-job"] = jobName; 96 | podTemplateSpec.metadata.labels.version = version; 97 | podTemplateSpec.metadata.labels['k8s-flink-operator-version'] = IMAGE_VERSION; 98 | 99 | const jarDir = path.dirname(spec.jarPath); 100 | const jarName = path.basename(spec.jarPath); 101 | const props = getProps(spec.props); 102 | const jobProps = props.props; 103 | const podSpec = podTemplateSpec.spec; 104 | 105 | const jobNameEnv = { "name": "jobName" }; 106 | 107 | if (streaming) { 108 | jobNameEnv.valueFrom = { fieldRef: { fieldPath: "metadata.name" } }; 109 | } 110 | else { 111 | jobNameEnv.value = jobName; 112 | } 113 | 114 | podSpec.containers[0].env = [ 115 | jobNameEnv, 116 | { 117 | "name": "DEBUG_LOG", 118 | "value": `${DEBUG_LOG}` 119 | }, 120 | { 121 | "name": "version", 122 | "value": `${version}` 123 | }, 124 | { 125 | "name": "jobManagerUrl", 126 | "value": spec.jobManagerUrl 127 | }, 128 | { 129 | "name": "jarPath", 130 | "value": `/jar/${jarName}` 131 | }, 132 | { 133 | "name": "mainClass", 134 | "value": spec.mainClass 135 | }, 136 | { 137 | "name": "jobProps", 138 | "value": jobProps 139 | }, 140 | ...(spec.env || []), 141 | ...props.env 142 | ]; 143 | podSpec.containers[1].env = [ 144 | { 145 | "name": "jarDir", 146 | "value": jarDir 147 | }, 148 | { 149 | "name": "jarName", 150 | "value": jarName 151 | } 152 | ]; 153 | podSpec.containers[0].image = `srfrnk/flink-job-app:${IMAGE_VERSION}`; 154 | podSpec.containers[1].image = spec.jarImage; 155 | podSpec.volumes[0].configMap.name = configMapName; 156 | 157 | if (streaming) { 158 | podSpec.containers[0].command = ["/app/start-streaming.sh"]; 159 | podSpec.containers[1].command = ["/app/start-streaming.sh"]; 160 | podSpec.restartPolicy = "Always"; 161 | } 162 | else { 163 | podSpec.containers[0].command = ["/app/start-batch.sh"]; 164 | podSpec.containers[1].command = ["/app/start-batch.sh"]; 165 | podSpec.restartPolicy = "Never"; 166 | } 167 | 168 | for (const volumeMount of (spec.volumeMounts || [])) { 169 | const volumeName = `volume-${Math.floor(Math.random() * 10e10)}`; 170 | volumeMount.volume.name = volumeName; 171 | volumeMount.mount.name = volumeName; 172 | 173 | podSpec.volumes.push(volumeMount.volume); 174 | podSpec.containers[0].volumeMounts.push(volumeMount.mount); 175 | } 176 | 177 | return podTemplateSpec; 178 | } 179 | 180 | function getConfigMap(version, configMapName) { 181 | const configMap = JSON.parse(configMapJson); 182 | configMap.metadata.name = configMapName; 183 | configMap.metadata.labels.version = version; 184 | configMap.metadata.labels['k8s-flink-operator-version'] = IMAGE_VERSION; 185 | return configMap; 186 | } 187 | 188 | function getProps(specProps) { 189 | const props = []; 190 | const env = []; 191 | 192 | for (const prop of specProps) { 193 | let key = prop.key; 194 | let value = prop.value; 195 | if (!!prop.valueFrom) { 196 | const valueFrom = prop.valueFrom; 197 | if (!!valueFrom.configMapKeyRef) { 198 | value = getRefValue(env, key, "configMapKeyRef", valueFrom.configMapKeyRef); 199 | } 200 | else if (!!valueFrom.secretKeyRef) { 201 | value = getRefValue(env, key, "secretKeyRef", valueFrom.secretKeyRef); 202 | } 203 | } 204 | props.push({ 205 | key: key, 206 | value: value 207 | }); 208 | } 209 | 210 | const jobProps = props.map(prop => `--${prop.key} ${prop.value}`).join(' '); 211 | return { props: jobProps, env: env }; 212 | } 213 | 214 | function getRefValue(env, key, type, ref) { 215 | const envKey = `jobProps_${key}_${type}_${ref.name}_${ref.key}`.replace(/[\-\$]/gi, '_'); 216 | env.push({ 217 | "name": envKey, 218 | "valueFrom": { 219 | [type]: { 220 | "name": ref.name, 221 | "key": ref.key, 222 | } 223 | } 224 | }); 225 | return `$\{${envKey}\}`; 226 | } 227 | -------------------------------------------------------------------------------- /controller-app/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "contoller-app", 3 | "version": "1.0.0", 4 | "lockfileVersion": 1, 5 | "requires": true, 6 | "dependencies": { 7 | "accepts": { 8 | "version": "1.3.7", 9 | "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.7.tgz", 10 | "integrity": "sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA==", 11 | "requires": { 12 | "mime-types": "~2.1.24", 13 | "negotiator": "0.6.2" 14 | } 15 | }, 16 | "array-flatten": { 17 | "version": "1.1.1", 18 | "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", 19 | "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=" 20 | }, 21 | "body-parser": { 22 | "version": "1.19.0", 23 | "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.19.0.tgz", 24 | "integrity": "sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw==", 25 | "requires": { 26 | "bytes": "3.1.0", 27 | "content-type": "~1.0.4", 28 | "debug": "2.6.9", 29 | "depd": "~1.1.2", 30 | "http-errors": "1.7.2", 31 | "iconv-lite": "0.4.24", 32 | "on-finished": "~2.3.0", 33 | "qs": "6.7.0", 34 | "raw-body": "2.4.0", 35 | "type-is": "~1.6.17" 36 | } 37 | }, 38 | "bytes": { 39 | "version": "3.1.0", 40 | "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz", 41 | "integrity": "sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==" 42 | }, 43 | "content-disposition": { 44 | "version": "0.5.2", 45 | "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.2.tgz", 46 | "integrity": "sha1-DPaLud318r55YcOoUXjLhdunjLQ=" 47 | }, 48 | "content-type": { 49 | "version": "1.0.4", 50 | "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", 51 | "integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==" 52 | }, 53 | "cookie": { 54 | "version": "0.3.1", 55 | "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", 56 | "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=" 57 | }, 58 | "cookie-signature": { 59 | "version": "1.0.6", 60 | "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", 61 | "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" 62 | }, 63 | "debug": { 64 | "version": "2.6.9", 65 | "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", 66 | "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", 67 | "requires": { 68 | "ms": "2.0.0" 69 | } 70 | }, 71 | "depd": { 72 | "version": "1.1.2", 73 | "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", 74 | "integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=" 75 | }, 76 | "destroy": { 77 | "version": "1.0.4", 78 | "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz", 79 | "integrity": "sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA=" 80 | }, 81 | "ee-first": { 82 | "version": "1.1.1", 83 | "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", 84 | "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" 85 | }, 86 | "encodeurl": { 87 | "version": "1.0.2", 88 | "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", 89 | "integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=" 90 | }, 91 | "escape-html": { 92 | "version": "1.0.3", 93 | "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", 94 | "integrity": "sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=" 95 | }, 96 | "etag": { 97 | "version": "1.8.1", 98 | "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", 99 | "integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=" 100 | }, 101 | "express": { 102 | "version": "4.16.4", 103 | "resolved": "https://registry.npmjs.org/express/-/express-4.16.4.tgz", 104 | "integrity": "sha512-j12Uuyb4FMrd/qQAm6uCHAkPtO8FDTRJZBDd5D2KOL2eLaz1yUNdUB/NOIyq0iU4q4cFarsUCrnFDPBcnksuOg==", 105 | "requires": { 106 | "accepts": "~1.3.5", 107 | "array-flatten": "1.1.1", 108 | "body-parser": "1.18.3", 109 | "content-disposition": "0.5.2", 110 | "content-type": "~1.0.4", 111 | "cookie": "0.3.1", 112 | "cookie-signature": "1.0.6", 113 | "debug": "2.6.9", 114 | "depd": "~1.1.2", 115 | "encodeurl": "~1.0.2", 116 | "escape-html": "~1.0.3", 117 | "etag": "~1.8.1", 118 | "finalhandler": "1.1.1", 119 | "fresh": "0.5.2", 120 | "merge-descriptors": "1.0.1", 121 | "methods": "~1.1.2", 122 | "on-finished": "~2.3.0", 123 | "parseurl": "~1.3.2", 124 | "path-to-regexp": "0.1.7", 125 | "proxy-addr": "~2.0.4", 126 | "qs": "6.5.2", 127 | "range-parser": "~1.2.0", 128 | "safe-buffer": "5.1.2", 129 | "send": "0.16.2", 130 | "serve-static": "1.13.2", 131 | "setprototypeof": "1.1.0", 132 | "statuses": "~1.4.0", 133 | "type-is": "~1.6.16", 134 | "utils-merge": "1.0.1", 135 | "vary": "~1.1.2" 136 | }, 137 | "dependencies": { 138 | "body-parser": { 139 | "version": "1.18.3", 140 | "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.18.3.tgz", 141 | "integrity": "sha1-WykhmP/dVTs6DyDe0FkrlWlVyLQ=", 142 | "requires": { 143 | "bytes": "3.0.0", 144 | "content-type": "~1.0.4", 145 | "debug": "2.6.9", 146 | "depd": "~1.1.2", 147 | "http-errors": "~1.6.3", 148 | "iconv-lite": "0.4.23", 149 | "on-finished": "~2.3.0", 150 | "qs": "6.5.2", 151 | "raw-body": "2.3.3", 152 | "type-is": "~1.6.16" 153 | } 154 | }, 155 | "bytes": { 156 | "version": "3.0.0", 157 | "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", 158 | "integrity": "sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=" 159 | }, 160 | "http-errors": { 161 | "version": "1.6.3", 162 | "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz", 163 | "integrity": "sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0=", 164 | "requires": { 165 | "depd": "~1.1.2", 166 | "inherits": "2.0.3", 167 | "setprototypeof": "1.1.0", 168 | "statuses": ">= 1.4.0 < 2" 169 | } 170 | }, 171 | "iconv-lite": { 172 | "version": "0.4.23", 173 | "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.23.tgz", 174 | "integrity": "sha512-neyTUVFtahjf0mB3dZT77u+8O0QB89jFdnBkd5P1JgYPbPaia3gXXOVL2fq8VyU2gMMD7SaN7QukTB/pmXYvDA==", 175 | "requires": { 176 | "safer-buffer": ">= 2.1.2 < 3" 177 | } 178 | }, 179 | "qs": { 180 | "version": "6.5.2", 181 | "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", 182 | "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==" 183 | }, 184 | "raw-body": { 185 | "version": "2.3.3", 186 | "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.3.3.tgz", 187 | "integrity": "sha512-9esiElv1BrZoI3rCDuOuKCBRbuApGGaDPQfjSflGxdy4oyzqghxu6klEkkVIvBje+FF0BX9coEv8KqW6X/7njw==", 188 | "requires": { 189 | "bytes": "3.0.0", 190 | "http-errors": "1.6.3", 191 | "iconv-lite": "0.4.23", 192 | "unpipe": "1.0.0" 193 | } 194 | }, 195 | "setprototypeof": { 196 | "version": "1.1.0", 197 | "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz", 198 | "integrity": "sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ==" 199 | }, 200 | "statuses": { 201 | "version": "1.4.0", 202 | "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz", 203 | "integrity": "sha512-zhSCtt8v2NDrRlPQpCNtw/heZLtfUDqxBM1udqikb/Hbk52LK4nQSwr10u77iopCW5LsyHpuXS0GnEc48mLeew==" 204 | } 205 | } 206 | }, 207 | "finalhandler": { 208 | "version": "1.1.1", 209 | "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.1.tgz", 210 | "integrity": "sha512-Y1GUDo39ez4aHAw7MysnUD5JzYX+WaIj8I57kO3aEPT1fFRL4sr7mjei97FgnwhAyyzRYmQZaTHb2+9uZ1dPtg==", 211 | "requires": { 212 | "debug": "2.6.9", 213 | "encodeurl": "~1.0.2", 214 | "escape-html": "~1.0.3", 215 | "on-finished": "~2.3.0", 216 | "parseurl": "~1.3.2", 217 | "statuses": "~1.4.0", 218 | "unpipe": "~1.0.0" 219 | }, 220 | "dependencies": { 221 | "statuses": { 222 | "version": "1.4.0", 223 | "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz", 224 | "integrity": "sha512-zhSCtt8v2NDrRlPQpCNtw/heZLtfUDqxBM1udqikb/Hbk52LK4nQSwr10u77iopCW5LsyHpuXS0GnEc48mLeew==" 225 | } 226 | } 227 | }, 228 | "forwarded": { 229 | "version": "0.1.2", 230 | "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz", 231 | "integrity": "sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ=" 232 | }, 233 | "fresh": { 234 | "version": "0.5.2", 235 | "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", 236 | "integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=" 237 | }, 238 | "http-errors": { 239 | "version": "1.7.2", 240 | "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.7.2.tgz", 241 | "integrity": "sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg==", 242 | "requires": { 243 | "depd": "~1.1.2", 244 | "inherits": "2.0.3", 245 | "setprototypeof": "1.1.1", 246 | "statuses": ">= 1.5.0 < 2", 247 | "toidentifier": "1.0.0" 248 | } 249 | }, 250 | "iconv-lite": { 251 | "version": "0.4.24", 252 | "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", 253 | "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", 254 | "requires": { 255 | "safer-buffer": ">= 2.1.2 < 3" 256 | } 257 | }, 258 | "inherits": { 259 | "version": "2.0.3", 260 | "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", 261 | "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" 262 | }, 263 | "ipaddr.js": { 264 | "version": "1.9.0", 265 | "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.0.tgz", 266 | "integrity": "sha512-M4Sjn6N/+O6/IXSJseKqHoFc+5FdGJ22sXqnjTpdZweHK64MzEPAyQZyEU3R/KRv2GLoa7nNtg/C2Ev6m7z+eA==" 267 | }, 268 | "media-typer": { 269 | "version": "0.3.0", 270 | "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", 271 | "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=" 272 | }, 273 | "merge-descriptors": { 274 | "version": "1.0.1", 275 | "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", 276 | "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=" 277 | }, 278 | "methods": { 279 | "version": "1.1.2", 280 | "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", 281 | "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=" 282 | }, 283 | "mime": { 284 | "version": "1.4.1", 285 | "resolved": "https://registry.npmjs.org/mime/-/mime-1.4.1.tgz", 286 | "integrity": "sha512-KI1+qOZu5DcW6wayYHSzR/tXKCDC5Om4s1z2QJjDULzLcmf3DvzS7oluY4HCTrc+9FiKmWUgeNLg7W3uIQvxtQ==" 287 | }, 288 | "mime-db": { 289 | "version": "1.40.0", 290 | "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.40.0.tgz", 291 | "integrity": "sha512-jYdeOMPy9vnxEqFRRo6ZvTZ8d9oPb+k18PKoYNYUe2stVEBPPwsln/qWzdbmaIvnhZ9v2P+CuecK+fpUfsV2mA==" 292 | }, 293 | "mime-types": { 294 | "version": "2.1.24", 295 | "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.24.tgz", 296 | "integrity": "sha512-WaFHS3MCl5fapm3oLxU4eYDw77IQM2ACcxQ9RIxfaC3ooc6PFuBMGZZsYpvoXS5D5QTWPieo1jjLdAm3TBP3cQ==", 297 | "requires": { 298 | "mime-db": "1.40.0" 299 | } 300 | }, 301 | "ms": { 302 | "version": "2.0.0", 303 | "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", 304 | "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" 305 | }, 306 | "negotiator": { 307 | "version": "0.6.2", 308 | "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.2.tgz", 309 | "integrity": "sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw==" 310 | }, 311 | "on-finished": { 312 | "version": "2.3.0", 313 | "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", 314 | "integrity": "sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=", 315 | "requires": { 316 | "ee-first": "1.1.1" 317 | } 318 | }, 319 | "parseurl": { 320 | "version": "1.3.3", 321 | "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", 322 | "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==" 323 | }, 324 | "path-to-regexp": { 325 | "version": "0.1.7", 326 | "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", 327 | "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" 328 | }, 329 | "proxy-addr": { 330 | "version": "2.0.5", 331 | "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.5.tgz", 332 | "integrity": "sha512-t/7RxHXPH6cJtP0pRG6smSr9QJidhB+3kXu0KgXnbGYMgzEnUxRQ4/LDdfOwZEMyIh3/xHb8PX3t+lfL9z+YVQ==", 333 | "requires": { 334 | "forwarded": "~0.1.2", 335 | "ipaddr.js": "1.9.0" 336 | } 337 | }, 338 | "qs": { 339 | "version": "6.7.0", 340 | "resolved": "https://registry.npmjs.org/qs/-/qs-6.7.0.tgz", 341 | "integrity": "sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ==" 342 | }, 343 | "range-parser": { 344 | "version": "1.2.1", 345 | "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", 346 | "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==" 347 | }, 348 | "raw-body": { 349 | "version": "2.4.0", 350 | "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.4.0.tgz", 351 | "integrity": "sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q==", 352 | "requires": { 353 | "bytes": "3.1.0", 354 | "http-errors": "1.7.2", 355 | "iconv-lite": "0.4.24", 356 | "unpipe": "1.0.0" 357 | } 358 | }, 359 | "safe-buffer": { 360 | "version": "5.1.2", 361 | "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", 362 | "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" 363 | }, 364 | "safer-buffer": { 365 | "version": "2.1.2", 366 | "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", 367 | "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" 368 | }, 369 | "send": { 370 | "version": "0.16.2", 371 | "resolved": "https://registry.npmjs.org/send/-/send-0.16.2.tgz", 372 | "integrity": "sha512-E64YFPUssFHEFBvpbbjr44NCLtI1AohxQ8ZSiJjQLskAdKuriYEP6VyGEsRDH8ScozGpkaX1BGvhanqCwkcEZw==", 373 | "requires": { 374 | "debug": "2.6.9", 375 | "depd": "~1.1.2", 376 | "destroy": "~1.0.4", 377 | "encodeurl": "~1.0.2", 378 | "escape-html": "~1.0.3", 379 | "etag": "~1.8.1", 380 | "fresh": "0.5.2", 381 | "http-errors": "~1.6.2", 382 | "mime": "1.4.1", 383 | "ms": "2.0.0", 384 | "on-finished": "~2.3.0", 385 | "range-parser": "~1.2.0", 386 | "statuses": "~1.4.0" 387 | }, 388 | "dependencies": { 389 | "http-errors": { 390 | "version": "1.6.3", 391 | "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz", 392 | "integrity": "sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0=", 393 | "requires": { 394 | "depd": "~1.1.2", 395 | "inherits": "2.0.3", 396 | "setprototypeof": "1.1.0", 397 | "statuses": ">= 1.4.0 < 2" 398 | } 399 | }, 400 | "setprototypeof": { 401 | "version": "1.1.0", 402 | "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz", 403 | "integrity": "sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ==" 404 | }, 405 | "statuses": { 406 | "version": "1.4.0", 407 | "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz", 408 | "integrity": "sha512-zhSCtt8v2NDrRlPQpCNtw/heZLtfUDqxBM1udqikb/Hbk52LK4nQSwr10u77iopCW5LsyHpuXS0GnEc48mLeew==" 409 | } 410 | } 411 | }, 412 | "serve-static": { 413 | "version": "1.13.2", 414 | "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.13.2.tgz", 415 | "integrity": "sha512-p/tdJrO4U387R9oMjb1oj7qSMaMfmOyd4j9hOFoxZe2baQszgHcSWjuya/CiT5kgZZKRudHNOA0pYXOl8rQ5nw==", 416 | "requires": { 417 | "encodeurl": "~1.0.2", 418 | "escape-html": "~1.0.3", 419 | "parseurl": "~1.3.2", 420 | "send": "0.16.2" 421 | } 422 | }, 423 | "setprototypeof": { 424 | "version": "1.1.1", 425 | "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.1.tgz", 426 | "integrity": "sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==" 427 | }, 428 | "statuses": { 429 | "version": "1.5.0", 430 | "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", 431 | "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=" 432 | }, 433 | "toidentifier": { 434 | "version": "1.0.0", 435 | "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz", 436 | "integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==" 437 | }, 438 | "type-is": { 439 | "version": "1.6.18", 440 | "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", 441 | "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", 442 | "requires": { 443 | "media-typer": "0.3.0", 444 | "mime-types": "~2.1.24" 445 | } 446 | }, 447 | "unpipe": { 448 | "version": "1.0.0", 449 | "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", 450 | "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=" 451 | }, 452 | "utils-merge": { 453 | "version": "1.0.1", 454 | "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", 455 | "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=" 456 | }, 457 | "vary": { 458 | "version": "1.1.2", 459 | "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", 460 | "integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=" 461 | } 462 | } 463 | } 464 | -------------------------------------------------------------------------------- /controller-app/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "contoller-app", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "start": "node index.js", 8 | "test": "echo \"Error: no test specified\" && exit 1" 9 | }, 10 | "author": "", 11 | "license": "ISC", 12 | "dependencies": { 13 | "body-parser": "^1.19.0", 14 | "express": "^4.16.4" 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /controller-app/pod-template-spec.json: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "labels": { 4 | "flink-job": "", 5 | "version": "", 6 | "k8s-flink-operator-version": "" 7 | } 8 | }, 9 | "spec": { 10 | "containers": [ 11 | { 12 | "command": [], 13 | "env": [], 14 | "image": "", 15 | "livenessProbe": { 16 | "exec": { 17 | "command": [ 18 | "/app/check.sh" 19 | ] 20 | }, 21 | "initialDelaySeconds": 30, 22 | "periodSeconds": 10 23 | }, 24 | "name": "job", 25 | "resources": { 26 | "limits": { 27 | "cpu": "1000m", 28 | "memory": "500Mi" 29 | }, 30 | "requests": { 31 | "cpu": "100m", 32 | "memory": "50Mi" 33 | } 34 | }, 35 | "volumeMounts": [ 36 | { 37 | "name": "jar", 38 | "mountPath": "/jar", 39 | "readOnly": true 40 | } 41 | ] 42 | }, 43 | { 44 | "image": "", 45 | "command": [], 46 | "env": [], 47 | "name": "jar", 48 | "resources": { 49 | "limits": { 50 | "cpu": "1m", 51 | "memory": "100Mi" 52 | }, 53 | "requests": { 54 | "cpu": "1m", 55 | "memory": "10Mi" 56 | } 57 | }, 58 | "volumeMounts": [ 59 | { 60 | "name": "jar-start", 61 | "mountPath": "/app", 62 | "readOnly": true 63 | }, 64 | { 65 | "name": "jar", 66 | "mountPath": "/jar" 67 | } 68 | ] 69 | } 70 | ], 71 | "restartPolicy": "", 72 | "terminationGracePeriodSeconds": 60, 73 | "volumes": [ 74 | { 75 | "name": "jar-start", 76 | "configMap": { 77 | "name": "", 78 | "defaultMode": 484 79 | } 80 | }, 81 | { 82 | "name": "jar", 83 | "emptyDir": {} 84 | } 85 | ] 86 | } 87 | } -------------------------------------------------------------------------------- /controller-app/statefulset.json: -------------------------------------------------------------------------------- 1 | { 2 | "apiVersion": "apps/v1", 3 | "kind": "StatefulSet", 4 | "metadata": { 5 | "name": "", 6 | "labels": { 7 | "version": "", 8 | "k8s-flink-operator-version": "" 9 | } 10 | }, 11 | "spec": { 12 | "replicas": 0, 13 | "selector": { 14 | "matchLabels": { 15 | "flink-job": "" 16 | } 17 | }, 18 | "serviceName": "", 19 | "template": {} 20 | } 21 | } -------------------------------------------------------------------------------- /dist/flink-controller.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Namespace 3 | metadata: 4 | labels: {} 5 | name: flink 6 | --- 7 | apiVersion: apiextensions.k8s.io/v1beta1 8 | kind: CustomResourceDefinition 9 | metadata: 10 | labels: {} 11 | name: flinkjobs.operators.srfrnk.com 12 | namespace: flink 13 | spec: 14 | additionalPrinterColumns: 15 | - JSONPath: .metadata.creationTimestamp 16 | description: Time since object was created 17 | name: Age 18 | type: date 19 | group: operators.srfrnk.com 20 | names: 21 | kind: FlinkJob 22 | plural: flinkjobs 23 | shortNames: 24 | - flink 25 | - flinks 26 | singular: flinkjob 27 | scope: Namespaced 28 | subresources: 29 | scale: 30 | labelSelectorPath: .status.labelSelector 31 | specReplicasPath: .spec.replicas 32 | statusReplicasPath: .status.replicas 33 | status: {} 34 | validation: 35 | openAPIV3Schema: 36 | properties: 37 | spec: 38 | properties: 39 | cron: 40 | properties: 41 | concurrencyPolicy: 42 | type: string 43 | schedule: 44 | type: string 45 | required: 46 | - schedule 47 | env: 48 | items: {} 49 | type: array 50 | jarImage: 51 | type: string 52 | jarPath: 53 | type: string 54 | jobManagerUrl: 55 | type: string 56 | mainClass: 57 | type: string 58 | props: 59 | items: 60 | properties: 61 | key: 62 | type: string 63 | value: 64 | type: string 65 | valueFrom: 66 | properties: 67 | configMapKeyRef: 68 | properties: 69 | key: 70 | type: string 71 | name: 72 | type: string 73 | secretKeyRef: 74 | properties: 75 | key: 76 | type: string 77 | name: 78 | type: string 79 | required: 80 | - key 81 | type: array 82 | streaming: 83 | properties: 84 | replicas: 85 | minimum: 0 86 | type: integer 87 | required: 88 | - replicas 89 | volumeMounts: 90 | items: 91 | properties: 92 | mount: {} 93 | volume: {} 94 | required: 95 | - volume 96 | - mount 97 | type: array 98 | required: 99 | - jobManagerUrl 100 | - jarImage 101 | - jarPath 102 | - mainClass 103 | required: 104 | - spec 105 | version: v1 106 | --- 107 | apiVersion: v1 108 | kind: Service 109 | metadata: 110 | labels: {} 111 | name: flink-controller 112 | namespace: flink 113 | spec: 114 | ports: 115 | - port: 80 116 | selector: 117 | app: flink-controller 118 | --- 119 | apiVersion: apps/v1 120 | kind: StatefulSet 121 | metadata: 122 | labels: 123 | version: 190805-1259 124 | name: flink-controller 125 | namespace: flink 126 | spec: 127 | replicas: 1 128 | selector: 129 | matchLabels: 130 | app: flink-controller 131 | serviceName: flink-controller 132 | template: 133 | metadata: 134 | labels: 135 | app: flink-controller 136 | version: 190805-1259 137 | spec: 138 | containers: 139 | - args: 140 | - npm start 141 | command: 142 | - bash 143 | - -c 144 | env: 145 | - name: IMAGE_VERSION 146 | value: 190805-1259 147 | - name: DEBUG_LOG 148 | value: "false" 149 | image: srfrnk/flink-controller-app:190805-1259 150 | name: controller 151 | ports: 152 | - containerPort: 80 153 | resources: 154 | limits: 155 | cpu: 100m 156 | memory: 100Mi 157 | requests: 158 | cpu: 10m 159 | memory: 10Mi 160 | --- 161 | apiVersion: metacontroller.k8s.io/v1alpha1 162 | kind: CompositeController 163 | metadata: 164 | labels: {} 165 | name: flink-controller 166 | namespace: flink 167 | spec: 168 | childResources: 169 | - apiVersion: apps/v1 170 | resource: statefulsets 171 | updateStrategy: 172 | method: InPlace 173 | - apiVersion: batch/v1beta1 174 | resource: cronjobs 175 | updateStrategy: 176 | method: InPlace 177 | - apiVersion: v1 178 | resource: configmaps 179 | updateStrategy: 180 | method: OnDelete 181 | generateSelector: true 182 | hooks: 183 | sync: 184 | webhook: 185 | url: http://flink-controller.flink/sync 186 | parentResource: 187 | apiVersion: operators.srfrnk.com/v1 188 | resource: flinkjobs 189 | -------------------------------------------------------------------------------- /environments/minikube/kustomization.json: -------------------------------------------------------------------------------- 1 | { 2 | "resources": [ 3 | "../../base" 4 | ], 5 | "patchesJson6902": [ 6 | { 7 | "target": { 8 | "version": "v1", 9 | "group": "apps", 10 | "kind": "StatefulSet", 11 | "namespace": "flink", 12 | "name": "flink-controller" 13 | }, 14 | "path": "statefulset-patch.json" 15 | } 16 | ] 17 | } -------------------------------------------------------------------------------- /environments/minikube/statefulset-patch.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "op": "replace", 4 | "path": "/spec/template/spec/containers/0/env/1/value", 5 | "value": "true" 6 | } 7 | ] -------------------------------------------------------------------------------- /environments/publish/kustomization.json: -------------------------------------------------------------------------------- 1 | { 2 | "resources": [ 3 | "../../base" 4 | ], 5 | "patchesJson6902": [ 6 | { 7 | "target": { 8 | "version": "v1", 9 | "group": "apps", 10 | "kind": "StatefulSet", 11 | "namespace": "flink", 12 | "name": "flink-controller" 13 | }, 14 | "path": "statefulset-patch.json" 15 | } 16 | ] 17 | } -------------------------------------------------------------------------------- /environments/publish/statefulset-patch.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "op": "replace", 4 | "path": "/spec/template/spec/containers/0/env/1/value", 5 | "value": "false" 6 | } 7 | ] -------------------------------------------------------------------------------- /job-app/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM flink:1.7.2 2 | 3 | ADD start-streaming.sh /app/start-streaming.sh 4 | ADD start-batch.sh /app/start-batch.sh 5 | ADD check.sh /app/check.sh 6 | ADD log4j.properties conf/log4j.properties 7 | ADD log4j.properties conf/log4j-console.properties 8 | ADD log4j.properties conf/log4j-cli.properties 9 | -------------------------------------------------------------------------------- /job-app/check.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | echo "Checking health:" 3 | if [[ -f /app/jobId ]] 4 | then 5 | jobId=$(cat /app/jobId) 6 | echo "Checking job ${jobId}..." 7 | count=$(flink list -m ${jobManagerUrl} -r | grep -e ": ${jobId} :" | wc -l) 8 | if [[ ${count} -gt 0 ]] 9 | then echo "Job running.";exit 0 10 | else echo "Job missing.";exit 1 11 | fi 12 | else 13 | echo "Job not yet started!" 14 | exit 0 # Job has not yet started - report as healthy (Issue #1) 15 | fi 16 | -------------------------------------------------------------------------------- /job-app/log4j.properties: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Licensed to the Apache Software Foundation (ASF) under one 3 | # or more contributor license agreements. See the NOTICE file 4 | # distributed with this work for additional information 5 | # regarding copyright ownership. The ASF licenses this file 6 | # to you under the Apache License, Version 2.0 (the 7 | # "License"); you may not use this file except in compliance 8 | # with the License. You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | ################################################################################ 18 | 19 | # This affects logging for both user code and Flink 20 | log4j.rootLogger=INFO, file, stdout 21 | 22 | # Uncomment this if you want to _only_ change Flink's logging 23 | #log4j.logger.org.apache.flink=INFO 24 | 25 | # The following lines keep the log level of common libraries/connectors on 26 | # log level INFO. The root logger does not override this. You have to manually 27 | # change the log levels here. 28 | log4j.logger.akka=INFO 29 | log4j.logger.org.apache.kafka=INFO 30 | log4j.logger.org.apache.hadoop=INFO 31 | log4j.logger.org.apache.zookeeper=INFO 32 | 33 | # Log all infos in the given file 34 | log4j.appender.file=org.apache.log4j.FileAppender 35 | log4j.appender.file.file=${log.file} 36 | log4j.appender.file.append=false 37 | log4j.appender.file.layout=org.apache.log4j.PatternLayout 38 | log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n 39 | 40 | # Suppress the irrelevant (wrong) warnings from the Netty channel handler 41 | log4j.logger.org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline=ERROR, file 42 | 43 | log4j.appender.stdout = org.apache.log4j.ConsoleAppender 44 | log4j.appender.stdout.Threshold = DEBUG 45 | log4j.appender.stdout.Target = System.out 46 | log4j.appender.stdout.layout = org.apache.log4j.PatternLayout 47 | log4j.appender.stdout.layout.ConversionPattern = %d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n 48 | -------------------------------------------------------------------------------- /job-app/start-batch.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | trap : TERM INT 3 | 4 | echo "Waiting for jar..." 5 | until [ -f ${jarPath} ] 6 | do 7 | sleep 1 8 | done 9 | 10 | jobProps=$(eval echo "${jobProps}") 11 | cmd="flink run -d -c ${mainClass} -m ${jobManagerUrl} ${jarPath} --jobName '${jobName} (${version})' ${jobProps}" 12 | 13 | echo "Starting job... ${cmd}" 14 | jobRun=$(eval ${cmd}) 15 | 16 | echo "" 17 | echo ${jobRun} 18 | echo "" 19 | 20 | jobId=$(echo ${jobRun} | grep -oP 'JobID \K[a-z0-9]*' | head -n1) 21 | echo "Started Job with ID: ${jobId}" 22 | echo ${jobId} > /app/jobId 23 | 24 | echo "Exiting" 25 | -------------------------------------------------------------------------------- /job-app/start-streaming.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | trap : TERM INT 3 | 4 | echo "Waiting for jar..." 5 | until [ -f ${jarPath} ] 6 | do 7 | sleep 1 8 | done 9 | 10 | jobProps=$(eval echo "${jobProps}") 11 | jobName=$(echo ${jobName/#"flink-job-"}) 12 | cmd="flink run -d -c ${mainClass} -m ${jobManagerUrl} ${jarPath} --jobName '${jobName} (${version})' ${jobProps}" 13 | 14 | echo "Starting job... ${cmd}" 15 | jobRun=$(eval ${cmd}) 16 | 17 | echo "" 18 | echo ${jobRun} 19 | echo "" 20 | 21 | jobId=$(echo ${jobRun} | grep -oP 'JobID \K[a-z0-9]*' | head -n1) 22 | echo "Started Job with ID: ${jobId}" 23 | echo ${jobId} > /app/jobId 24 | 25 | sleep infinity & wait 26 | 27 | echo "Stopping job ${jobId}..." 28 | flink stop -m ${jobManagerUrl} ${jobId} 29 | echo "Exiting" 30 | -------------------------------------------------------------------------------- /k8s-flink-operator.code-workspace: -------------------------------------------------------------------------------- 1 | { 2 | "folders": [ 3 | { 4 | "path": "." 5 | } 6 | ], 7 | "settings": {} 8 | } -------------------------------------------------------------------------------- /test/.classpath: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /test/.gitignore: -------------------------------------------------------------------------------- 1 | # Ignore Gradle project-specific cache directory 2 | .gradle 3 | 4 | # Ignore Gradle build output directory 5 | build 6 | bin 7 | -------------------------------------------------------------------------------- /test/.project: -------------------------------------------------------------------------------- 1 | 2 | 3 | flink-test 4 | Project test created by Buildship. 5 | 6 | 7 | 8 | 9 | org.eclipse.jdt.core.javabuilder 10 | 11 | 12 | 13 | 14 | org.eclipse.buildship.core.gradleprojectbuilder 15 | 16 | 17 | 18 | 19 | 20 | org.eclipse.jdt.core.javanature 21 | org.eclipse.buildship.core.gradleprojectnature 22 | 23 | 24 | -------------------------------------------------------------------------------- /test/.settings/org.eclipse.buildship.core.prefs: -------------------------------------------------------------------------------- 1 | connection.project.dir= 2 | eclipse.preferences.version=1 3 | -------------------------------------------------------------------------------- /test/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine 2 | ADD build/libs /jars 3 | -------------------------------------------------------------------------------- /test/Makefile: -------------------------------------------------------------------------------- 1 | FORCE: 2 | 3 | deploy-minikube: TIMESTAMP=$(shell date +%y%m%d-%H%M -u) 4 | deploy-minikube: FORCE 5 | kubectl apply -f flink.json 6 | gradle build 7 | eval $$(minikube docker-env) && docker build . -t flink-test:${TIMESTAMP} 8 | cat flink-streaming-job.json | sed "s/IMAGE_VERSION/${TIMESTAMP}/" | kubectl apply -f - 9 | cat flink-batch-job.json | sed "s/IMAGE_VERSION/${TIMESTAMP}/" | kubectl apply -f - 10 | -------------------------------------------------------------------------------- /test/build.gradle: -------------------------------------------------------------------------------- 1 | /* 2 | * This file was generated by the Gradle 'init' task. 3 | * 4 | * This generated file contains a sample Java project to get you started. 5 | * For more details take a look at the Java Quickstart chapter in the Gradle 6 | * User Manual available at https://docs.gradle.org/5.2.1/userguide/tutorial_java_projects.html 7 | */ 8 | 9 | plugins { 10 | // Apply the java plugin to add support for Java 11 | id 'java' 12 | 13 | // Apply the application plugin to add support for building an application 14 | id 'application' 15 | 16 | id 'maven' 17 | id 'com.github.johnrengelman.shadow' version '4.0.3' 18 | } 19 | 20 | repositories { 21 | // Use jcenter for resolving your dependencies. 22 | // You can declare any Maven/Ivy/file repository here. 23 | jcenter() 24 | } 25 | 26 | dependencies { 27 | compile(group: 'org.apache.beam', name: 'beam-sdks-java-core', version:'2.11.0') 28 | compile group: 'org.apache.beam', name: 'beam-model-pipeline', version: '2.11.0' 29 | compile(group: 'org.apache.beam', name: 'beam-runners-flink_2.11', version:'2.11.0') 30 | 31 | // compile group: 'org.apache.beam', name: 'beam-vendor-guava-20_0', version: '0.1' 32 | // compile group: 'org.apache.beam', name: 'beam-vendor-grpc-1_13_1', version: '0.2' 33 | 34 | compile(group: 'org.apache.flink', name: 'flink-core', version: '1.7.1') 35 | compile(group: 'org.apache.flink', name: 'flink-streaming-java_2.11', version: '1.7.1') 36 | compile(group: 'org.apache.flink', name: 'flink-clients_2.11', version: '1.7.1') 37 | 38 | compile group: 'org.slf4j', name: 'slf4j-api', version: '1.7.25' 39 | compile group: 'org.slf4j', name: 'slf4j-simple', version: '1.7.25' 40 | // compile group: 'com.fasterxml.jackson.core', name: 'jackson-core', version: '2.9.8' 41 | // compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.9.8' 42 | // compile group: 'com.fasterxml.jackson.core', name: 'jackson-annotations', version: '2.9.8' 43 | // compile group: 'joda-time', name: 'joda-time', version: '2.10.1' 44 | 45 | // This dependency is found on compile classpath of this component and consumers. 46 | implementation 'com.google.guava:guava:27.0.1-jre' 47 | 48 | // Use JUnit test framework 49 | testImplementation 'junit:junit:4.12' 50 | } 51 | 52 | shadowJar { 53 | zip64 true 54 | mergeServiceFiles() 55 | } 56 | 57 | // Define the main class for the application 58 | mainClassName = 'flink.test.App' 59 | -------------------------------------------------------------------------------- /test/flink-batch-job.json: -------------------------------------------------------------------------------- 1 | { 2 | "apiVersion": "operators.srfrnk.com/v1", 3 | "kind": "FlinkJob", 4 | "metadata": { 5 | "name": "flink-batch-job" 6 | }, 7 | "spec": { 8 | "version": "IMAGE_VERSION", 9 | "replicas": 1, 10 | "jobManagerUrl": "flink-jobmanager:8081", 11 | "jarImage": "flink-test:IMAGE_VERSION", 12 | "jarPath": "/jars/flink-test.jar", 13 | "mainClass": "flink.test.AppBatch", 14 | "cron": { 15 | "schedule": "*/5 * * * *", 16 | "concurrencyPolicy": "Allow" 17 | }, 18 | "props": [ 19 | { 20 | "key": "envKey1", 21 | "value": "value1" 22 | }, 23 | { 24 | "key": "envKey2", 25 | "value": "value2" 26 | } 27 | ] 28 | } 29 | } -------------------------------------------------------------------------------- /test/flink-streaming-job.json: -------------------------------------------------------------------------------- 1 | { 2 | "apiVersion": "operators.srfrnk.com/v1", 3 | "kind": "FlinkJob", 4 | "metadata": { 5 | "name": "flink-streaming-job" 6 | }, 7 | "spec": { 8 | "version": "IMAGE_VERSION", 9 | "jobManagerUrl": "flink-jobmanager:8081", 10 | "jarImage": "flink-test:IMAGE_VERSION", 11 | "jarPath": "/jars/flink-test.jar", 12 | "mainClass": "flink.test.AppStreaming", 13 | "streaming": { 14 | "replicas": 1 15 | }, 16 | "env": [ 17 | { 18 | "name": "env1key", 19 | "value": "env1value" 20 | } 21 | ], 22 | "props": [ 23 | { 24 | "key": "envKey1", 25 | "value": "value1" 26 | }, 27 | { 28 | "key": "envKey2", 29 | "value": "value2" 30 | } 31 | ] 32 | } 33 | } -------------------------------------------------------------------------------- /test/flink.json: -------------------------------------------------------------------------------- 1 | { 2 | "apiVersion": "v1", 3 | "kind": "List", 4 | "items": [ 5 | { 6 | "apiVersion": "v1", 7 | "kind": "Service", 8 | "metadata": { 9 | "name": "flink-jobmanager" 10 | }, 11 | "spec": { 12 | "ports": [ 13 | { 14 | "name": "rpc", 15 | "port": 6123 16 | }, 17 | { 18 | "name": "blob", 19 | "port": 6124 20 | }, 21 | { 22 | "name": "query", 23 | "port": 6125 24 | }, 25 | { 26 | "name": "ui", 27 | "port": 8081 28 | } 29 | ], 30 | "selector": { 31 | "app": "flink", 32 | "component": "jobmanager" 33 | } 34 | } 35 | }, 36 | { 37 | "apiVersion": "apps/v1", 38 | "kind": "Deployment", 39 | "metadata": { 40 | "name": "flink-taskmanager" 41 | }, 42 | "spec": { 43 | "replicas": 1, 44 | "selector": { 45 | "matchLabels": { 46 | "app": "flink", 47 | "component": "taskmanager" 48 | } 49 | }, 50 | "template": { 51 | "metadata": { 52 | "labels": { 53 | "app": "flink", 54 | "component": "taskmanager" 55 | } 56 | }, 57 | "spec": { 58 | "containers": [ 59 | { 60 | "args": [ 61 | "taskmanager" 62 | ], 63 | "env": [ 64 | { 65 | "name": "JOB_MANAGER_RPC_ADDRESS", 66 | "value": "flink-jobmanager" 67 | } 68 | ], 69 | "image": "flink:1.7.2", 70 | "name": "taskmanager", 71 | "ports": [ 72 | { 73 | "containerPort": 6121, 74 | "name": "data" 75 | }, 76 | { 77 | "containerPort": 6122, 78 | "name": "rpc" 79 | }, 80 | { 81 | "containerPort": 6125, 82 | "name": "query" 83 | } 84 | ], 85 | "resources": { 86 | "limits": { 87 | "cpu": "1000m", 88 | "memory": "1Gi" 89 | }, 90 | "requests": { 91 | "cpu": "100m", 92 | "memory": "0.1Gi" 93 | } 94 | } 95 | } 96 | ] 97 | } 98 | } 99 | } 100 | }, 101 | { 102 | "apiVersion": "apps/v1", 103 | "kind": "StatefulSet", 104 | "metadata": { 105 | "name": "flink-jobmanager" 106 | }, 107 | "spec": { 108 | "replicas": 1, 109 | "selector": { 110 | "matchLabels": { 111 | "app": "flink", 112 | "component": "jobmanager" 113 | } 114 | }, 115 | "serviceName": "flink-jobmanager", 116 | "template": { 117 | "metadata": { 118 | "labels": { 119 | "app": "flink", 120 | "component": "jobmanager" 121 | } 122 | }, 123 | "spec": { 124 | "containers": [ 125 | { 126 | "args": [ 127 | "jobmanager" 128 | ], 129 | "env": [ 130 | { 131 | "name": "JOB_MANAGER_RPC_ADDRESS", 132 | "value": "flink-jobmanager" 133 | } 134 | ], 135 | "image": "flink:1.7.2", 136 | "name": "jobmanager", 137 | "ports": [ 138 | { 139 | "containerPort": 6123, 140 | "name": "rpc" 141 | }, 142 | { 143 | "containerPort": 6124, 144 | "name": "blob" 145 | }, 146 | { 147 | "containerPort": 6125, 148 | "name": "query" 149 | }, 150 | { 151 | "containerPort": 8081, 152 | "name": "ui" 153 | } 154 | ], 155 | "resources": { 156 | "limits": { 157 | "cpu": "1000m", 158 | "memory": "1Gi" 159 | }, 160 | "requests": { 161 | "cpu": "100m", 162 | "memory": "0.1Gi" 163 | } 164 | } 165 | } 166 | ] 167 | } 168 | } 169 | } 170 | } 171 | ] 172 | } -------------------------------------------------------------------------------- /test/gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/srfrnk/k8s-flink-operator/9afabeeeb40d84024615a3a15e7dccdd5f7793a4/test/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /test/gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-5.2.1-bin.zip 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | -------------------------------------------------------------------------------- /test/gradlew: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | ############################################################################## 4 | ## 5 | ## Gradle start up script for UN*X 6 | ## 7 | ############################################################################## 8 | 9 | # Attempt to set APP_HOME 10 | # Resolve links: $0 may be a link 11 | PRG="$0" 12 | # Need this for relative symlinks. 13 | while [ -h "$PRG" ] ; do 14 | ls=`ls -ld "$PRG"` 15 | link=`expr "$ls" : '.*-> \(.*\)$'` 16 | if expr "$link" : '/.*' > /dev/null; then 17 | PRG="$link" 18 | else 19 | PRG=`dirname "$PRG"`"/$link" 20 | fi 21 | done 22 | SAVED="`pwd`" 23 | cd "`dirname \"$PRG\"`/" >/dev/null 24 | APP_HOME="`pwd -P`" 25 | cd "$SAVED" >/dev/null 26 | 27 | APP_NAME="Gradle" 28 | APP_BASE_NAME=`basename "$0"` 29 | 30 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 31 | DEFAULT_JVM_OPTS='"-Xmx64m"' 32 | 33 | # Use the maximum available, or set MAX_FD != -1 to use that value. 34 | MAX_FD="maximum" 35 | 36 | warn () { 37 | echo "$*" 38 | } 39 | 40 | die () { 41 | echo 42 | echo "$*" 43 | echo 44 | exit 1 45 | } 46 | 47 | # OS specific support (must be 'true' or 'false'). 48 | cygwin=false 49 | msys=false 50 | darwin=false 51 | nonstop=false 52 | case "`uname`" in 53 | CYGWIN* ) 54 | cygwin=true 55 | ;; 56 | Darwin* ) 57 | darwin=true 58 | ;; 59 | MINGW* ) 60 | msys=true 61 | ;; 62 | NONSTOP* ) 63 | nonstop=true 64 | ;; 65 | esac 66 | 67 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 68 | 69 | # Determine the Java command to use to start the JVM. 70 | if [ -n "$JAVA_HOME" ] ; then 71 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 72 | # IBM's JDK on AIX uses strange locations for the executables 73 | JAVACMD="$JAVA_HOME/jre/sh/java" 74 | else 75 | JAVACMD="$JAVA_HOME/bin/java" 76 | fi 77 | if [ ! -x "$JAVACMD" ] ; then 78 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 79 | 80 | Please set the JAVA_HOME variable in your environment to match the 81 | location of your Java installation." 82 | fi 83 | else 84 | JAVACMD="java" 85 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 86 | 87 | Please set the JAVA_HOME variable in your environment to match the 88 | location of your Java installation." 89 | fi 90 | 91 | # Increase the maximum file descriptors if we can. 92 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then 93 | MAX_FD_LIMIT=`ulimit -H -n` 94 | if [ $? -eq 0 ] ; then 95 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then 96 | MAX_FD="$MAX_FD_LIMIT" 97 | fi 98 | ulimit -n $MAX_FD 99 | if [ $? -ne 0 ] ; then 100 | warn "Could not set maximum file descriptor limit: $MAX_FD" 101 | fi 102 | else 103 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" 104 | fi 105 | fi 106 | 107 | # For Darwin, add options to specify how the application appears in the dock 108 | if $darwin; then 109 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" 110 | fi 111 | 112 | # For Cygwin, switch paths to Windows format before running java 113 | if $cygwin ; then 114 | APP_HOME=`cygpath --path --mixed "$APP_HOME"` 115 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` 116 | JAVACMD=`cygpath --unix "$JAVACMD"` 117 | 118 | # We build the pattern for arguments to be converted via cygpath 119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` 120 | SEP="" 121 | for dir in $ROOTDIRSRAW ; do 122 | ROOTDIRS="$ROOTDIRS$SEP$dir" 123 | SEP="|" 124 | done 125 | OURCYGPATTERN="(^($ROOTDIRS))" 126 | # Add a user-defined pattern to the cygpath arguments 127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then 128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" 129 | fi 130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 131 | i=0 132 | for arg in "$@" ; do 133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` 134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option 135 | 136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition 137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` 138 | else 139 | eval `echo args$i`="\"$arg\"" 140 | fi 141 | i=$((i+1)) 142 | done 143 | case $i in 144 | (0) set -- ;; 145 | (1) set -- "$args0" ;; 146 | (2) set -- "$args0" "$args1" ;; 147 | (3) set -- "$args0" "$args1" "$args2" ;; 148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;; 149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; 150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; 151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; 152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; 153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; 154 | esac 155 | fi 156 | 157 | # Escape application args 158 | save () { 159 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done 160 | echo " " 161 | } 162 | APP_ARGS=$(save "$@") 163 | 164 | # Collect all arguments for the java command, following the shell quoting and substitution rules 165 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" 166 | 167 | # by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong 168 | if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then 169 | cd "$(dirname "$0")" 170 | fi 171 | 172 | exec "$JAVACMD" "$@" 173 | -------------------------------------------------------------------------------- /test/gradlew.bat: -------------------------------------------------------------------------------- 1 | @if "%DEBUG%" == "" @echo off 2 | @rem ########################################################################## 3 | @rem 4 | @rem Gradle startup script for Windows 5 | @rem 6 | @rem ########################################################################## 7 | 8 | @rem Set local scope for the variables with windows NT shell 9 | if "%OS%"=="Windows_NT" setlocal 10 | 11 | set DIRNAME=%~dp0 12 | if "%DIRNAME%" == "" set DIRNAME=. 13 | set APP_BASE_NAME=%~n0 14 | set APP_HOME=%DIRNAME% 15 | 16 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 17 | set DEFAULT_JVM_OPTS="-Xmx64m" 18 | 19 | @rem Find java.exe 20 | if defined JAVA_HOME goto findJavaFromJavaHome 21 | 22 | set JAVA_EXE=java.exe 23 | %JAVA_EXE% -version >NUL 2>&1 24 | if "%ERRORLEVEL%" == "0" goto init 25 | 26 | echo. 27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 28 | echo. 29 | echo Please set the JAVA_HOME variable in your environment to match the 30 | echo location of your Java installation. 31 | 32 | goto fail 33 | 34 | :findJavaFromJavaHome 35 | set JAVA_HOME=%JAVA_HOME:"=% 36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 37 | 38 | if exist "%JAVA_EXE%" goto init 39 | 40 | echo. 41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 42 | echo. 43 | echo Please set the JAVA_HOME variable in your environment to match the 44 | echo location of your Java installation. 45 | 46 | goto fail 47 | 48 | :init 49 | @rem Get command-line arguments, handling Windows variants 50 | 51 | if not "%OS%" == "Windows_NT" goto win9xME_args 52 | 53 | :win9xME_args 54 | @rem Slurp the command line arguments. 55 | set CMD_LINE_ARGS= 56 | set _SKIP=2 57 | 58 | :win9xME_args_slurp 59 | if "x%~1" == "x" goto execute 60 | 61 | set CMD_LINE_ARGS=%* 62 | 63 | :execute 64 | @rem Setup the command line 65 | 66 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 67 | 68 | @rem Execute Gradle 69 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% 70 | 71 | :end 72 | @rem End local scope for the variables with windows NT shell 73 | if "%ERRORLEVEL%"=="0" goto mainEnd 74 | 75 | :fail 76 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 77 | rem the _cmd.exe /c_ return code! 78 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 79 | exit /b 1 80 | 81 | :mainEnd 82 | if "%OS%"=="Windows_NT" endlocal 83 | 84 | :omega 85 | -------------------------------------------------------------------------------- /test/settings.gradle: -------------------------------------------------------------------------------- 1 | /* 2 | * This file was generated by the Gradle 'init' task. 3 | * 4 | * The settings file is used to specify which projects to include in your build. 5 | * 6 | * Detailed information about configuring a multi-project build in Gradle can be found 7 | * in the user manual at https://docs.gradle.org/5.2.1/userguide/multi_project_builds.html 8 | */ 9 | 10 | rootProject.name = 'flink-test' 11 | -------------------------------------------------------------------------------- /test/src/main/java/flink/test/AppBatch.java: -------------------------------------------------------------------------------- 1 | /* 2 | * This Java source file was generated by the Gradle 'init' task. 3 | */ 4 | package flink.test; 5 | 6 | import java.io.IOException; 7 | import java.util.Arrays; 8 | import java.util.List; 9 | import java.util.NoSuchElementException; 10 | import org.apache.beam.runners.flink.FlinkPipelineOptions; 11 | import org.apache.beam.runners.flink.FlinkRunner; 12 | import org.apache.beam.sdk.Pipeline; 13 | import org.apache.beam.sdk.coders.AvroCoder; 14 | import org.apache.beam.sdk.coders.Coder; 15 | import org.apache.beam.sdk.io.Read; 16 | import org.apache.beam.sdk.io.UnboundedSource; 17 | import org.apache.beam.sdk.io.UnboundedSource.CheckpointMark; 18 | import org.apache.beam.sdk.io.UnboundedSource.UnboundedReader; 19 | import org.apache.beam.sdk.options.PipelineOptions; 20 | import org.apache.beam.sdk.options.PipelineOptionsFactory; 21 | import org.apache.beam.sdk.transforms.Create; 22 | import org.apache.beam.sdk.transforms.DoFn; 23 | import org.apache.beam.sdk.transforms.MapElements; 24 | import org.apache.beam.sdk.transforms.ParDo; 25 | import org.apache.beam.sdk.transforms.windowing.BoundedWindow; 26 | import org.apache.beam.sdk.values.TypeDescriptors; 27 | import org.apache.flink.api.java.utils.ParameterTool; 28 | import org.joda.time.Instant; 29 | import org.slf4j.LoggerFactory; 30 | 31 | public class AppBatch { 32 | private static org.slf4j.Logger LOG = LoggerFactory.getLogger(AppBatch.class); 33 | 34 | public static void main(String[] args) { 35 | ParameterTool parameters = ParameterTool.fromArgs(args); 36 | String jobName = parameters.get("jobName", "Undefined-Name"); 37 | String prop1 = parameters.get("envKey1", "-------"); 38 | String prop2 = parameters.get("envKey2", "-------"); 39 | LOG.info("prop1: {}", prop1); 40 | LOG.info("prop2: {}", prop2); 41 | FlinkPipelineOptions pipelineOptions = 42 | PipelineOptionsFactory.create().as(FlinkPipelineOptions.class); 43 | pipelineOptions.setJobName(jobName); 44 | pipelineOptions.setRunner(FlinkRunner.class); 45 | pipelineOptions.setParallelism(1); 46 | pipelineOptions.setStreaming(false); 47 | Pipeline p = Pipeline.create(pipelineOptions); 48 | 49 | p.apply(Create.of("To be, or not to be: that is the question:", 50 | "Whether 'tis nobler in the mind to suffer", "The slings and arrows of fortune,", 51 | "Or to take arms against a sea of troubles,")) 52 | 53 | .apply(ParDo.of(new DoFn() { 54 | private static final long serialVersionUID = 1; 55 | 56 | @ProcessElement 57 | public void processElement(ProcessContext c) { 58 | String line = c.element(); 59 | for (int i = 0; i < 1000; i++) { 60 | c.output(line); 61 | } 62 | } 63 | })) 64 | 65 | .apply(ParDo.of(new DoFn() { 66 | private static final long serialVersionUID = 1; 67 | 68 | @ProcessElement 69 | public void processElement(ProcessContext c) { 70 | String[] words = c.element().split("\\s"); 71 | for (String word : words) { 72 | c.output(word); 73 | } 74 | } 75 | })) 76 | 77 | .apply(MapElements.into(TypeDescriptors.strings()).via(word -> { 78 | LOG.info("{}", word); 79 | return word; 80 | })); 81 | 82 | p.run(); 83 | LOG.info("Job should start running now..."); 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /test/src/main/java/flink/test/AppStreaming.java: -------------------------------------------------------------------------------- 1 | /* 2 | * This Java source file was generated by the Gradle 'init' task. 3 | */ 4 | package flink.test; 5 | 6 | import java.io.IOException; 7 | import java.util.Arrays; 8 | import java.util.List; 9 | import java.util.NoSuchElementException; 10 | import org.apache.beam.runners.flink.FlinkPipelineOptions; 11 | import org.apache.beam.runners.flink.FlinkRunner; 12 | import org.apache.beam.sdk.Pipeline; 13 | import org.apache.beam.sdk.coders.AvroCoder; 14 | import org.apache.beam.sdk.coders.Coder; 15 | import org.apache.beam.sdk.io.Read; 16 | import org.apache.beam.sdk.io.UnboundedSource; 17 | import org.apache.beam.sdk.io.UnboundedSource.CheckpointMark; 18 | import org.apache.beam.sdk.io.UnboundedSource.UnboundedReader; 19 | import org.apache.beam.sdk.options.PipelineOptions; 20 | import org.apache.beam.sdk.options.PipelineOptionsFactory; 21 | import org.apache.beam.sdk.transforms.DoFn; 22 | import org.apache.beam.sdk.transforms.MapElements; 23 | import org.apache.beam.sdk.transforms.ParDo; 24 | import org.apache.beam.sdk.transforms.windowing.BoundedWindow; 25 | import org.apache.beam.sdk.values.TypeDescriptors; 26 | import org.apache.flink.api.java.utils.ParameterTool; 27 | import org.joda.time.Instant; 28 | import org.slf4j.LoggerFactory; 29 | 30 | public class AppStreaming { 31 | static class StreamingStringMark implements UnboundedSource.CheckpointMark { 32 | @Override 33 | public void finalizeCheckpoint() throws IOException { 34 | 35 | } 36 | } 37 | 38 | static class StreamingStringSource extends UnboundedSource { 39 | private static final long serialVersionUID = 1L; 40 | private String[] strings; 41 | 42 | public StreamingStringSource(String[] strings) { 43 | this.strings = strings; 44 | } 45 | 46 | @Override 47 | public List> split( 48 | int desiredNumSplits, PipelineOptions options) throws Exception { 49 | return Arrays.asList(this); 50 | } 51 | 52 | @Override 53 | public UnboundedReader createReader(PipelineOptions options, 54 | StreamingStringMark checkpointMark) throws IOException { 55 | return new StreamingStringReader(this); 56 | } 57 | 58 | @Override 59 | public Coder getCheckpointMarkCoder() { 60 | return AvroCoder.of(StreamingStringMark.class); 61 | } 62 | 63 | @Override 64 | public Coder getOutputCoder() { 65 | return AvroCoder.of(String.class); 66 | } 67 | } 68 | 69 | static class StreamingStringReader extends UnboundedReader { 70 | 71 | private StreamingStringSource source; 72 | private int idx = 0; 73 | 74 | public StreamingStringReader(StreamingStringSource source) { 75 | this.source = source; 76 | } 77 | 78 | @Override 79 | public boolean start() throws IOException { 80 | this.idx = 0; 81 | return true; 82 | } 83 | 84 | @Override 85 | public boolean advance() throws IOException { 86 | this.idx++; 87 | return idx % 100 == 0; 88 | } 89 | 90 | @Override 91 | public Instant getWatermark() { 92 | return Instant.now().plus(idx * 100); 93 | } 94 | 95 | @Override 96 | public CheckpointMark getCheckpointMark() { 97 | return new StreamingStringMark(); 98 | } 99 | 100 | @Override 101 | public UnboundedSource getCurrentSource() { 102 | return source; 103 | } 104 | 105 | @Override 106 | public String getCurrent() throws NoSuchElementException { 107 | return this.source.strings[this.idx % this.source.strings.length]; 108 | } 109 | 110 | @Override 111 | public Instant getCurrentTimestamp() throws NoSuchElementException { 112 | return BoundedWindow.TIMESTAMP_MIN_VALUE; 113 | } 114 | 115 | @Override 116 | public void close() throws IOException { 117 | } 118 | } 119 | 120 | private static org.slf4j.Logger LOG = LoggerFactory.getLogger(AppStreaming.class); 121 | 122 | public static void main(String[] args) { 123 | ParameterTool parameters = ParameterTool.fromArgs(args); 124 | String jobName = parameters.get("jobName", "Undefined-Name"); 125 | String prop1 = parameters.get("envKey1", "-------"); 126 | String prop2 = parameters.get("envKey2", "-------"); 127 | LOG.info("prop1: {}", prop1); 128 | LOG.info("prop2: {}", prop2); 129 | FlinkPipelineOptions pipelineOptions = 130 | PipelineOptionsFactory.create().as(FlinkPipelineOptions.class); 131 | pipelineOptions.setJobName(jobName); 132 | pipelineOptions.setRunner(FlinkRunner.class); 133 | pipelineOptions.setParallelism(1); 134 | pipelineOptions.setStreaming(true); 135 | Pipeline p = Pipeline.create(pipelineOptions); 136 | 137 | String[] strings = new String[] {"To be, or not to be: that is the question:", 138 | "Whether 'tis nobler in the mind to suffer", "The slings and arrows of fortune,", 139 | "Or to take arms against a sea of troubles,"}; 140 | 141 | p.apply(Read.from(new StreamingStringSource(strings))) 142 | 143 | .apply(ParDo.of(new DoFn() { 144 | private static final long serialVersionUID = 1; 145 | 146 | @ProcessElement 147 | public void processElement(ProcessContext c) { 148 | String line = c.element(); 149 | for (int i = 0; i < 1000; i++) { 150 | c.output(line); 151 | } 152 | } 153 | })) 154 | 155 | .apply(ParDo.of(new DoFn() { 156 | private static final long serialVersionUID = 1; 157 | 158 | @ProcessElement 159 | public void processElement(ProcessContext c) { 160 | String[] words = c.element().split("\\s"); 161 | for (String word : words) { 162 | c.output(word); 163 | } 164 | } 165 | })) 166 | 167 | .apply(MapElements.into(TypeDescriptors.strings()).via(word -> { 168 | LOG.info("{}", word); 169 | return word; 170 | })); 171 | 172 | p.run(); 173 | LOG.info("Job should start running now..."); 174 | } 175 | } 176 | -------------------------------------------------------------------------------- /test/src/main/resources/simplelogger.properties: -------------------------------------------------------------------------------- 1 | org.slf4j.simpleLogger.defaultLogLevel=info 2 | org.slf4j.simpleLogger.showDateTime=true 3 | org.slf4j.simpleLogger.dateTimeFormat=yyyy-MM-dd HH:mm:ss:SSS Z 4 | org.slf4j.simpleLogger.showLogName=true 5 | org.slf4j.simpleLogger.logFile=System.out 6 | -------------------------------------------------------------------------------- /test/src/test/java/flink/test/AppTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * This Java source file was generated by the Gradle 'init' task. 3 | */ 4 | package flink.test; 5 | 6 | import org.junit.Test; 7 | import static org.junit.Assert.*; 8 | 9 | public class AppTest { 10 | } 11 | --------------------------------------------------------------------------------