├── .circleci └── config.yml ├── .gitignore ├── Dockerfile ├── LICENSE ├── Makefile ├── Makefile.docker ├── Makefile.java ├── README.md ├── example-consumer.yaml ├── example-producer.yaml ├── kubernetes.yaml ├── openshift.yaml ├── pom.xml ├── src └── main │ ├── java │ └── io │ │ └── strimzi │ │ └── topicwebhook │ │ ├── Main.java │ │ ├── TopicWebhook.java │ │ └── TopicWebhookConfig.java │ └── resources │ ├── log4j.properties │ ├── webhook-key.pem │ └── webhook.pem └── tls ├── ca.json └── webhook.json /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | jobs: 3 | build: 4 | environment: 5 | - USER: strimzi 6 | docker: 7 | - image: scholzj/circleci-centos-java:latest 8 | steps: 9 | - setup_remote_docker 10 | - checkout 11 | - run: 12 | name: Build code 13 | command: make build 14 | - run: 15 | name: Build Docker images 16 | command: make docker_build 17 | - deploy: 18 | name: Login to Docker Hub 19 | command: docker login -u="$DOCKER_USERNAME" -p="$DOCKER_PASSWORD" 20 | - deploy: 21 | name: Push to Docker hub 22 | command: make docker_push 23 | workflows: 24 | version: 2 25 | build-workflow: 26 | jobs: 27 | - build: 28 | context: org-global -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # IntelliJ IDEA specific 2 | .idea/ 3 | *.iml 4 | target 5 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM strimzi/java-base:8-3 2 | 3 | ARG version=latest 4 | ENV VERSION ${version} 5 | ADD target/kafka-topic-webhook.jar / 6 | 7 | CMD ["/bin/launch_java.sh", "/kafka-topic-webhook.jar"] -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | PROJECT_NAME=kafka-topic-webhook 2 | 3 | all: java_build docker_build docker_push 4 | build: java_build 5 | build: java_build 6 | clean: java_clean 7 | 8 | gen_certs: 9 | echo "Generating certificates ..." 10 | cfssl genkey -initca tls/ca.json | cfssljson -bare tls/ca 11 | cfssl gencert -ca tls/ca.pem -ca-key tls/ca-key.pem tls/webhook.json | cfssljson -bare tls/webhook 12 | mv tls/webhook.pem src/main/resources/webhook.pem 13 | mv tls/webhook-key.pem src/main/resources/webhook-key.pem 14 | sed -i "s/.*caBundle.*/ caBundle: $$(cat tls/ca.pem | base64 | tr -d '\n')/" openshift.yaml 15 | sed -i "s/.*caBundle.*/ caBundle: $$(cat tls/ca.pem | base64 | tr -d '\n')/" kubernetes.yaml 16 | 17 | include ./Makefile.docker 18 | 19 | include ./Makefile.java 20 | 21 | .PHONY: build clean 22 | -------------------------------------------------------------------------------- /Makefile.docker: -------------------------------------------------------------------------------- 1 | # Makefile.docker contains the shared tasks for building, tagging and pushing Docker images. 2 | # This file is included into the Makefile files which contain the Dockerfile files (E.g. 3 | # kafka-base, kafka-statefulsets etc.). 4 | # 5 | # The DOCKER_ORG (default is name of the current user) and DOCKER_TAG (based on Git Tag, 6 | # default latest) variables are used to name the Docker image. DOCKER_REGISTRY identifies 7 | # the registry where the image will be pushed (default is Docker Hub). DOCKER_VERSION_ARG 8 | # is passed to the image build (based on Git commit, default latest) 9 | 10 | DOCKERFILE_DIR ?= ./ 11 | DOCKER_REGISTRY ?= docker.io 12 | DOCKER_ORG ?= $(USER) 13 | DOCKER_TAG ?= latest 14 | DOCKER_VERSION_ARG ?= latest 15 | 16 | all: docker_build docker_push 17 | 18 | docker_build: 19 | echo "Building Docker image ..." 20 | docker build --build-arg version=$(DOCKER_VERSION_ARG) -t strimzi/$(PROJECT_NAME):$(DOCKER_TAG) $(DOCKERFILE_DIR) 21 | 22 | docker_tag: 23 | echo "Tagging strimzi/$(PROJECT_NAME):$(DOCKER_TAG) to $(DOCKER_REGISTRY)/$(DOCKER_ORG)/$(PROJECT_NAME):$(DOCKER_TAG) ..." 24 | docker tag strimzi/$(PROJECT_NAME):$(DOCKER_TAG) $(DOCKER_REGISTRY)/$(DOCKER_ORG)/$(PROJECT_NAME):$(DOCKER_TAG) 25 | 26 | docker_push: docker_tag 27 | echo "Pushing $(DOCKER_REGISTRY)/$(DOCKER_ORG)/$(PROJECT_NAME):$(DOCKER_TAG) ..." 28 | docker push $(DOCKER_REGISTRY)/$(DOCKER_ORG)/$(PROJECT_NAME):$(DOCKER_TAG) -------------------------------------------------------------------------------- /Makefile.java: -------------------------------------------------------------------------------- 1 | # Makefile.java contains the shared tasks for building Java applications. This file is 2 | # included into the Makefile files which contain some Java sources which should be build 3 | # (E.g. cluster-controller etc.). 4 | # 5 | 6 | java_build: 7 | echo "Building JAR file ..." 8 | mvn package 9 | 10 | java_clean: 11 | echo "Cleaning Maven build ..." 12 | mvn clean 13 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![CircleCI](https://circleci.com/gh/strimzi/kafka-topic-webhook.svg?style=shield)](https://circleci.com/gh/strimzi/kafka-topic-webhook) 2 | 3 | # Kafka Topic Admission Webhook for Kubernetes / OpenShift 4 | 5 | Kafka Topic Admission Webhook allows managing Kafka topics through annotations set on Pods which are using these topics. Pods 6 | can be annotated with annotation `topic-webhook.kafka.strimzi.io/topics` containing a description of topics used by 7 | given Pod. When creating the Pod, the webhook will create the required topics or fail the creation of the Pod. 8 | 9 | The webhook is using [*Dynamic Admission Control* feature](https://v1-7.docs.kubernetes.io/docs/admin/extensible-admission-controllers/#external-admission-webhooks) of Kubernetes / OpenShift. 10 | It will deploy a simple HTTP server which implements the Admission Review webhook. This webhook is called every time when 11 | a new Pod is being created. It checks the `topic-webhook.kafka.strimzi.io/topics` annotation and if it is present it 12 | will evaluate it and either allow or reject the creation of the Pod. All Pods without the annotation will be 13 | automatically allowed. 14 | 15 | 16 | 17 | - [Annotation format](#annotation-format) 18 | - [Installation](#installation) 19 | - [OpenShift](#openshift) 20 | - [Kubernetes](#kubernetes) 21 | - [Installing into different namespace / project](#installing-into-different-namespace--project) 22 | - [Supported Kubernetes and OpenShift versions](#supported-kubernetes-and-openshift-versions) 23 | - [Examples](#examples) 24 | - [Comparing with other approaches](#comparing-with-other-approaches) 25 | - [TODO](#todo) 26 | - [FAQ](#faq) 27 | 28 | 29 | 30 | ## Annotation format 31 | 32 | The `topic-webhook.kafka.strimzi.io/topics` annotation should always contain an JSON array with one or more Kafka 33 | topics. The topic specification has a following format: 34 | ``` 35 | {"name": , "create": , "assert": , "partitions": , "replicas": , "config": } 36 | ``` 37 | 38 | The fields have following meaning and default values: 39 | 40 | | Field | Explanation | Required | Default | Example | 41 | |-------|-------------|----------|---------|---------| 42 | | `name` | Name of the Kafka topic | Yes | n/a | `myTopic` | 43 | | `create` | Create the topic if it doesnt exist | No | `true` | `true` | 44 | | `assert` | If the topic exists, assert theat it matches the required configuration | No | `false` | `false` | 45 | | `partitions` | Number of partitions | No | `1` | `3` | 46 | | `replicas` | Number of replicas / replication factor | No | `1` | `3` | 47 | | `zookeeper` | Zookeeper address | No | Configured as part of Webhook deployment | `my-zookeeper:2181/my-kafka-cluster` | 48 | | `config` | Additional configuration options for the Kafka topic | No | n/a | `{ "cleanup.policy": "compact" }` | 49 | 50 | 51 | For example: 52 | ```json 53 | {"name": "topicX", "create": true, "assert": false, "partitions": 3, "replicas": 3, "config": { "cleanup.policy": "compact" } } 54 | ``` 55 | 56 | Following example shows a Kubernetes Deployment using the annotation:_ 57 | ```yaml 58 | apiVersion: extensions/v1beta1 59 | kind: Deployment 60 | metadata: 61 | name: my-kafka-consumer 62 | spec: 63 | replicas: 1 64 | template: 65 | metadata: 66 | annotations: 67 | topic-webhook.kafka.strimzi.io/topics: "[ {\"name\": \"topicX\", \"create\": true, \"assert\": false, \"partitions\": 3, \"replicas\": 3, \"config\": { \"cleanup.policy\": \"compact\" } }, {\"name\": \"topicY\", \"create\": true, \"assert\": false } ]" 68 | spec: 69 | ... 70 | ... 71 | ``` 72 | 73 | *Full example can be found in [`example-consumer.yaml`](example-consumer.yaml) file.* 74 | 75 | ## Installation 76 | 77 | ### OpenShift 78 | 79 | On OpenShift the webhook is by default installed into `myproject` project and requires *admin* privileges (the 80 | `ExternalAdmissionHookConfiguration` installation requires the admin user). To install it, run: 81 | ``` 82 | oc apply -f openshift.yaml 83 | ``` 84 | 85 | ### Kubernetes 86 | 87 | On Kubernetes the webhook is by default installed into `default` namespace and requires *admin* privileges (the 88 | `ExternalAdmissionHookConfiguration` installation requires them). To install it, run: 89 | ``` 90 | kubectl apply -f kubernetes.yaml 91 | ``` 92 | 93 | ### Installing into different namespace / project 94 | 95 | If you want to install it into different namespace / project, you have to change `tls/webhook.json` and `openshift.yaml` to change 96 | the namespace and the URL for the SSL certificates, regenerate the certificates using `make gen_certs` and rebuild the 97 | Docker image using `make all`. This applies to both Kubernetes as well as OpenShift. 98 | 99 | ### Supported Kubernetes and OpenShift versions 100 | 101 | Dynamic Admission Control is supported since Kubernetes 1.7 and OpenShift 3.7. Depending on your Kubernets / OpenShift 102 | cluster installation, you might need to enable it manually. 103 | 104 | ## Examples 105 | 106 | To test the webhook, you can deploy the [`example-consumer.yaml`](example-consumer.yaml) and [`example-producer.yaml`](example-producer.yaml) 107 | which will trigger the topic creation and send / receive messages to this topic. The messages can be seen in their logs. 108 | 109 | ## Comparing with other approaches 110 | 111 | The other approach how to manage topics *the Kubernetes way* could be using Custom Resource Definitions (CRD) or using ConfigMaps (CM) and 112 | the operator concept. User deploys CRD or CM describing the Apache Kafka topic. A controller will monitor the CRDs / CMs 113 | and create or update topics when they are created or modified. Both approaches allow to define the messaging infrastructure 114 | using Kubernetes resources. The webhopok approach has some advantages as well as some disadvantages. 115 | 116 | **Advantages:** 117 | * The pod will be started only once the topics are created by the webhook. The process is synchronous. 118 | * The topics are defined directly in the Pod annotations. There are no conflicts when the producer and consumer are 119 | using the same CRD / CM and either overwrite each others definitions or create two conflicting definitions. 120 | * The annotation is stored directly with the consumer / producer. No need to search for separate resources. 121 | * The annotation address gives a lot of flexibility in topic configuration. It can insist on specific topic 122 | configuration or ignore the configuration details and be happy when the topic with given name simply exists. 123 | 124 | 125 | **Disadvantages:** 126 | * The webhook has to analyze all pods running in your cluster. When the webhook is unavailable - depending on the 127 | `failurePolicy` (see FAQ for more details) - it will either block all Pods from being started (including the webhook it 128 | self) or simply ignore the requested topics. Even when the webhook is available, it has to analyze all Pods and thus 129 | increases the latency when starting new Pods. 130 | * The annotations are checked only when some Pod is starting. They are not rechecked periodically, so changes done to 131 | the topics only after the Pod was started will notbe detected. 132 | * Kafka topics are not deleted when the Pods are deleted. 133 | * TLS certificates need to be configured to establish trust between Kubernetes and webhook 134 | 135 | ## TODO 136 | 137 | * Assertion of topic configuration for existing topics 138 | * Tests 139 | 140 | ## FAQ 141 | 142 | **Does the webhook constantly monitor the Kafka topics?** 143 | 144 | No. The webhook is triggered only when a Pod with the right annotation is triggered. When the topic is deleted while 145 | the Pod is running, webhook will not know or care about it. 146 | 147 | **What happens when the webhook controller is not running?** 148 | 149 | The webhook is registered as Admission Control webhook. The webhook configuration in Kubernetes / OpenShift has a 150 | configuration field `failurePolicy` which can have either value `Fail` or `Ignore`. 151 | 152 | When set to `Fail`, Pod admission will fail when the webhook controller doesn't run. This will affect all pods which 153 | will be newly started even when they do not have the annotation. This will not affect Pods which are already running. 154 | 155 | When set to `Ignore`, all Pods will be started when the webhook controller is not reachable. This means that Pods will 156 | be created even when Kafka topics from their annotation do not exist. 157 | 158 | **Can the annotation be set on other resources such as Deployments, Jobs or StatefulSets?** 159 | 160 | No, currently only Pods are supported. 161 | 162 | **Why do you use webhook and not Initializer?** 163 | 164 | The webhook is a lot easier to implement and it also seems to be the way which future Kubernetes versions will use. 165 | Since we don't modify the actuall Pod, we do not need the Initializer and can do it with Webhook only. Also the webhooks 166 | ()and mutating webhooks) seem to be currently the prefered way forward in Kubenrtees world. 167 | 168 | **What happens with the Kafka topics once the Pods are deleted?** 169 | 170 | The webhook is triggered only when Pod is created. It is not informed about the Pods being deleted. All Kafka topics 171 | created by the webhook will continue to exist and have to be removed manually. -------------------------------------------------------------------------------- /example-consumer.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: extensions/v1beta1 2 | kind: Deployment 3 | metadata: 4 | labels: 5 | app: kafka-consumer 6 | name: kafka-consumer 7 | spec: 8 | replicas: 1 9 | template: 10 | metadata: 11 | labels: 12 | app: kafka-consumer 13 | annotations: 14 | topic-webhook.kafka.strimzi.io/topics: "[ {\"name\": \"myTopic\", \"create\": true, \"assert\": false, \"partitions\": 3, \"replicas\": 3, \"config\": { \"cleanup.policy\": \"compact\" } } ]" 15 | spec: 16 | containers: 17 | - name: kafka-consumer 18 | image: scholzj/kafka-consumer:latest 19 | env: 20 | - name: BOOTSTRAP_SERVERS 21 | value: kafka:9092 22 | - name: TOPIC 23 | value: myTopic 24 | - name: GROUP_ID 25 | value: my-kafka-consumer -------------------------------------------------------------------------------- /example-producer.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: extensions/v1beta1 2 | kind: Deployment 3 | metadata: 4 | labels: 5 | app: kafka-consumer 6 | name: kafka-producer 7 | spec: 8 | replicas: 1 9 | template: 10 | metadata: 11 | labels: 12 | app: kafka-consumer 13 | annotations: 14 | topic-webhook.kafka.strimzi.io/topics: "[ {\"name\": \"myTopic\", \"create\": true, \"assert\": false, \"partitions\": 3, \"replicas\": 3, \"config\": { \"cleanup.policy\": \"compact\" } } ]" 15 | spec: 16 | containers: 17 | - name: kafka-producer 18 | image: scholzj/kafka-producer:latest 19 | env: 20 | - name: BOOTSTRAP_SERVERS 21 | value: kafka:9092 22 | - name: TOPIC 23 | value: myTopic 24 | - name: TIMER 25 | value: "10000" -------------------------------------------------------------------------------- /kubernetes.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | labels: 5 | name: strimzi-topic-webhook 6 | name: strimzi-topic-webhook 7 | namespace: default 8 | spec: 9 | ports: 10 | - name: https 11 | port: 443 12 | targetPort: 8443 13 | selector: 14 | name: strimzi-topic-webhook 15 | type: ClusterIP 16 | --- 17 | 18 | apiVersion: extensions/v1beta1 19 | kind: Deployment 20 | metadata: 21 | name: strimzi-topic-webhook 22 | labels: 23 | name: strimzi-topic-webhook 24 | namespace: default 25 | spec: 26 | replicas: 1 27 | template: 28 | metadata: 29 | name: strimzi-topic-webhook 30 | labels: 31 | name: strimzi-topic-webhook 32 | spec: 33 | containers: 34 | - image: strimzi/kafka-topic-webhook:latest 35 | name: strimzi-topic-webhook 36 | env: 37 | - name: ZOOKEEPER_URL 38 | value: my-cluster-zookeeper:2181 39 | ports: 40 | - containerPort: 8443 41 | name: https 42 | --- 43 | 44 | apiVersion: admissionregistration.k8s.io/v1alpha1 45 | kind: ExternalAdmissionHookConfiguration 46 | metadata: 47 | name: strimzi-topic-webhook 48 | externalAdmissionHooks: 49 | - name: topic-webhook.kafka.strimzi.io 50 | rules: 51 | - apiGroups: 52 | - "" 53 | apiVersions: 54 | - v1 55 | operations: 56 | - CREATE 57 | resources: 58 | - pods 59 | failurePolicy: Ignore 60 | clientConfig: 61 | caBundle: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURPakNDQWlLZ0F3SUJBZ0lVSnNOeklveUNESEpYcmhDakJvM1M2elZTNnNvd0RRWUpLb1pJaHZjTkFRRUwKQlFBd0l6RWhNQjhHQTFVRUNoTVlVM1J5YVcxNmFTQlViM0JwWXlCWFpXSm9iMjlySUVOQk1CNFhEVEU0TURFdwpOekl4TVRBd01Gb1hEVEl6TURFd05qSXhNVEF3TUZvd0l6RWhNQjhHQTFVRUNoTVlVM1J5YVcxNmFTQlViM0JwCll5QlhaV0pvYjI5cklFTkJNSUlCSWpBTkJna3Foa2lHOXcwQkFRRUZBQU9DQVE4QU1JSUJDZ0tDQVFFQXRqVXkKTjgxVDMyWnJiRHNMd0lJVkJJUDRkWERRTjlRUlh0NGc4djBCTi9aTEJRM0kzYWFLa0QzSlFXSmlkVHg1aW8wSQpuTnlUSXYrTURJMXlQZHhBT2FLMUtrYndFVFg2OGxQYXJPa1dHSHVxbDg5ZTByQWxtYkc0M3Jlc1ZFaHA2UTI0CjBnd3hoTHNoQnVaQzJpUGlaY2R3VXFZV0JNTEZWTWhESi81U1JYMUJnZEd5VGd3aWZvVlpHbllKdU9DdzlXTmEKNDBCTEc4amRhdkVLNXMzTFp4NXljMU92TktyWnFnQVAzYk5XbWFtNU94VVFyU3dvdFlZbkZ6N2JEVEpPSVdvSQpDSEg2LzBnQjZoNmhHbnNqc2tNV0QyRG9zL0xUWUJUZ3RTYUJ0Vk9Vb3ZnUnYyWlNBb0YyK3htMVZtMXZaMGhZCkVaRERXUm5KOUV5YmpKYTJ5d0lEQVFBQm8yWXdaREFPQmdOVkhROEJBZjhFQkFNQ0FRWXdFZ1lEVlIwVEFRSC8KQkFnd0JnRUIvd0lCQWpBZEJnTlZIUTRFRmdRVVhXR0ErNXgrZEM4aXU1VWpseDI1NWw4Sjh1QXdId1lEVlIwagpCQmd3Rm9BVVhXR0ErNXgrZEM4aXU1VWpseDI1NWw4Sjh1QXdEUVlKS29aSWh2Y05BUUVMQlFBRGdnRUJBS2hYCjdjK2dPKy8rRERBdXpGNUhHc3NFZEsvL2xmZWVFNzBBd3lEZXR1SzU5cmo1RUhyMWd6L1Z1WVE2Skc3TkxKN3AKQ0tiK01BRnhNdVZNNnB4NkYycU9taEdEM3YrVmhtcysxRjRLWjhWTnNDck5TNDJqc0x6aVBRKzc0WHhjalVRagpiUTFPVHN6WE1IWUFhbmVKYmM4NXdPSTVDU3Q1cVhPOSt2c0wyUWJHekNKT05DdEpYTUhLZ2NsSVZmSUszV2JUCmRXOXgvRnNFNE9FWGp3V1hOZDFPWTF1UzQ2MXpDTHZWY2U5WUhJaXNOUldqMEYrODZOR3JubS9yTXhmM1VVc2kKZyttQ0t2bDNCNVdmSXVyck1vM2IrV2ZxcXZTNWk2UWhUeXhvWGFQN2w3cnNtNGxMWm0vWURhV0JQMkk3N0svQQp6OEtnZ0RJU1VCb0x5N1pyNUlvPQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg== 62 | service: 63 | name: strimzi-topic-webhook 64 | namespace: default 65 | -------------------------------------------------------------------------------- /openshift.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | labels: 5 | name: strimzi-topic-webhook 6 | name: strimzi-topic-webhook 7 | namespace: myproject 8 | spec: 9 | ports: 10 | - name: https 11 | port: 443 12 | targetPort: 8443 13 | selector: 14 | name: strimzi-topic-webhook 15 | type: ClusterIP 16 | --- 17 | 18 | apiVersion: extensions/v1beta1 19 | kind: Deployment 20 | metadata: 21 | name: strimzi-topic-webhook 22 | labels: 23 | name: strimzi-topic-webhook 24 | namespace: myproject 25 | spec: 26 | replicas: 1 27 | template: 28 | metadata: 29 | name: strimzi-topic-webhook 30 | labels: 31 | name: strimzi-topic-webhook 32 | spec: 33 | containers: 34 | - image: strimzi/kafka-topic-webhook:latest 35 | name: strimzi-topic-webhook 36 | env: 37 | - name: ZOOKEEPER_URL 38 | value: my-cluster--zookeeper:2181 39 | ports: 40 | - containerPort: 8443 41 | name: https 42 | --- 43 | 44 | apiVersion: admissionregistration.k8s.io/v1alpha1 45 | kind: ExternalAdmissionHookConfiguration 46 | metadata: 47 | name: strimzi-topic-webhook 48 | externalAdmissionHooks: 49 | - name: topic-webhook.kafka.strimzi.io 50 | rules: 51 | - apiGroups: 52 | - "" 53 | apiVersions: 54 | - v1 55 | operations: 56 | - CREATE 57 | resources: 58 | - pods 59 | failurePolicy: Ignore 60 | clientConfig: 61 | caBundle: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURPakNDQWlLZ0F3SUJBZ0lVSnNOeklveUNESEpYcmhDakJvM1M2elZTNnNvd0RRWUpLb1pJaHZjTkFRRUwKQlFBd0l6RWhNQjhHQTFVRUNoTVlVM1J5YVcxNmFTQlViM0JwWXlCWFpXSm9iMjlySUVOQk1CNFhEVEU0TURFdwpOekl4TVRBd01Gb1hEVEl6TURFd05qSXhNVEF3TUZvd0l6RWhNQjhHQTFVRUNoTVlVM1J5YVcxNmFTQlViM0JwCll5QlhaV0pvYjI5cklFTkJNSUlCSWpBTkJna3Foa2lHOXcwQkFRRUZBQU9DQVE4QU1JSUJDZ0tDQVFFQXRqVXkKTjgxVDMyWnJiRHNMd0lJVkJJUDRkWERRTjlRUlh0NGc4djBCTi9aTEJRM0kzYWFLa0QzSlFXSmlkVHg1aW8wSQpuTnlUSXYrTURJMXlQZHhBT2FLMUtrYndFVFg2OGxQYXJPa1dHSHVxbDg5ZTByQWxtYkc0M3Jlc1ZFaHA2UTI0CjBnd3hoTHNoQnVaQzJpUGlaY2R3VXFZV0JNTEZWTWhESi81U1JYMUJnZEd5VGd3aWZvVlpHbllKdU9DdzlXTmEKNDBCTEc4amRhdkVLNXMzTFp4NXljMU92TktyWnFnQVAzYk5XbWFtNU94VVFyU3dvdFlZbkZ6N2JEVEpPSVdvSQpDSEg2LzBnQjZoNmhHbnNqc2tNV0QyRG9zL0xUWUJUZ3RTYUJ0Vk9Vb3ZnUnYyWlNBb0YyK3htMVZtMXZaMGhZCkVaRERXUm5KOUV5YmpKYTJ5d0lEQVFBQm8yWXdaREFPQmdOVkhROEJBZjhFQkFNQ0FRWXdFZ1lEVlIwVEFRSC8KQkFnd0JnRUIvd0lCQWpBZEJnTlZIUTRFRmdRVVhXR0ErNXgrZEM4aXU1VWpseDI1NWw4Sjh1QXdId1lEVlIwagpCQmd3Rm9BVVhXR0ErNXgrZEM4aXU1VWpseDI1NWw4Sjh1QXdEUVlKS29aSWh2Y05BUUVMQlFBRGdnRUJBS2hYCjdjK2dPKy8rRERBdXpGNUhHc3NFZEsvL2xmZWVFNzBBd3lEZXR1SzU5cmo1RUhyMWd6L1Z1WVE2Skc3TkxKN3AKQ0tiK01BRnhNdVZNNnB4NkYycU9taEdEM3YrVmhtcysxRjRLWjhWTnNDck5TNDJqc0x6aVBRKzc0WHhjalVRagpiUTFPVHN6WE1IWUFhbmVKYmM4NXdPSTVDU3Q1cVhPOSt2c0wyUWJHekNKT05DdEpYTUhLZ2NsSVZmSUszV2JUCmRXOXgvRnNFNE9FWGp3V1hOZDFPWTF1UzQ2MXpDTHZWY2U5WUhJaXNOUldqMEYrODZOR3JubS9yTXhmM1VVc2kKZyttQ0t2bDNCNVdmSXVyck1vM2IrV2ZxcXZTNWk2UWhUeXhvWGFQN2w3cnNtNGxMWm0vWURhV0JQMkk3N0svQQp6OEtnZ0RJU1VCb0x5N1pyNUlvPQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg== 62 | service: 63 | name: strimzi-topic-webhook 64 | namespace: myproject 65 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 4.0.0 5 | 6 | io.strimzi 7 | kafka-topic-webhook 8 | 1.0.0-SNAPSHOT 9 | 10 | 11 | 1.8 12 | 1.8 13 | 3.1.0 14 | 3.5.4 15 | 1.7.21 16 | 4.12 17 | 3.1.0 18 | 19 | 20 | 21 | 22 | io.vertx 23 | vertx-core 24 | ${vertx.version} 25 | compile 26 | 27 | 28 | io.vertx 29 | vertx-web 30 | ${vertx.version} 31 | compile 32 | 33 | 34 | io.vertx 35 | vertx-kafka-client 36 | ${vertx.version} 37 | compile 38 | 39 | 40 | org.slf4j 41 | slf4j-api 42 | ${slf4j.version} 43 | compile 44 | 45 | 46 | org.slf4j 47 | slf4j-log4j12 48 | ${slf4j.version} 49 | compile 50 | 51 | 52 | junit 53 | junit 54 | ${junit.version} 55 | test 56 | 57 | 58 | io.vertx 59 | vertx-unit 60 | ${vertx.version} 61 | test 62 | 63 | 64 | 65 | 66 | 67 | 68 | org.apache.maven.plugins 69 | maven-compiler-plugin 70 | 71 | ${maven.compiler.source} 72 | ${maven.compiler.target} 73 | 74 | 75 | 76 | org.apache.maven.plugins 77 | maven-shade-plugin 78 | ${maven.shade.version} 79 | 80 | 81 | package 82 | 83 | shade 84 | 85 | 86 | kafka-topic-webhook 87 | 88 | 89 | 90 | io.strimzi.topicwebhook.Main 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | -------------------------------------------------------------------------------- /src/main/java/io/strimzi/topicwebhook/Main.java: -------------------------------------------------------------------------------- 1 | package io.strimzi.topicwebhook; 2 | 3 | import io.vertx.core.*; 4 | import org.slf4j.Logger; 5 | import org.slf4j.LoggerFactory; 6 | 7 | public class Main { 8 | private static final Logger log = LoggerFactory.getLogger(Main.class.getName()); 9 | 10 | public static void main(String args[]) { 11 | try { 12 | Vertx vertx = Vertx.vertx(); 13 | vertx.deployVerticle(new TopicWebhook(TopicWebhookConfig.fromEnv()), res -> { 14 | if (res.failed()) { 15 | log.error("Failed to start the verticle", res.cause()); 16 | System.exit(1); 17 | } 18 | }); 19 | } catch (IllegalArgumentException e) { 20 | log.error("Unable to parse arguments", e); 21 | System.exit(1); 22 | } catch (Exception e) { 23 | log.error("Error starting Topic Webhook controller:", e); 24 | System.exit(1); 25 | } 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /src/main/java/io/strimzi/topicwebhook/TopicWebhook.java: -------------------------------------------------------------------------------- 1 | package io.strimzi.topicwebhook; 2 | 3 | import io.netty.handler.codec.http.HttpResponseStatus; 4 | import io.vertx.core.AbstractVerticle; 5 | import io.vertx.core.AsyncResult; 6 | import io.vertx.core.CompositeFuture; 7 | import io.vertx.core.Future; 8 | import io.vertx.core.Handler; 9 | import io.vertx.core.buffer.Buffer; 10 | import io.vertx.core.http.HttpServerOptions; 11 | import io.vertx.core.json.JsonArray; 12 | import io.vertx.core.json.JsonObject; 13 | import io.vertx.core.net.PemKeyCertOptions; 14 | import io.vertx.ext.web.Router; 15 | import io.vertx.ext.web.RoutingContext; 16 | import io.vertx.ext.web.handler.BodyHandler; 17 | import io.vertx.kafka.admin.AdminUtils; 18 | 19 | import java.io.BufferedReader; 20 | import java.io.InputStreamReader; 21 | import java.util.ArrayList; 22 | import java.util.HashMap; 23 | import java.util.List; 24 | import java.util.Map; 25 | import java.util.stream.Collectors; 26 | 27 | import org.slf4j.Logger; 28 | import org.slf4j.LoggerFactory; 29 | 30 | public class TopicWebhook extends AbstractVerticle { 31 | private static final Logger log = LoggerFactory.getLogger(TopicWebhook.class.getName()); 32 | 33 | private static final String ANNOTATION_KEY = "topic-webhook.kafka.strimzi.io/topics"; 34 | 35 | private static final int DEFAULT_PARTITIONS = 1; 36 | private static final int DEFAULT_REPLICAS = 1; 37 | private static final boolean DEFAULT_ASSERT = false; 38 | private static final boolean DEFAULT_CREATE = true; 39 | 40 | private static final int port = 8443; 41 | private static String zookeeper; 42 | 43 | public TopicWebhook(TopicWebhookConfig config) throws Exception { 44 | log.info("Creating Kafka Topic Webhook (KTW) controller"); 45 | 46 | zookeeper = config.getZookeeper(); 47 | log.info("Using Zookeeper {}", zookeeper); 48 | } 49 | 50 | /* 51 | Start the verticle 52 | */ 53 | @Override 54 | public void start(Future start) { 55 | log.info("Starting KTW controller"); 56 | startHttpServer(res -> { 57 | if (res.succeeded()) { 58 | log.info("KTW controller created"); 59 | start.complete(); 60 | } 61 | else { 62 | log.info("KTW controller failed to start", res.cause()); 63 | start.fail(res.cause()); 64 | } 65 | }); 66 | } 67 | 68 | /* 69 | Create and start HTTP server 70 | */ 71 | private void startHttpServer(Handler> resultHandler) { 72 | Router router = configureRouter(); 73 | 74 | HttpServerOptions httpOptions = new HttpServerOptions(); 75 | setSsl(httpOptions); 76 | 77 | log.info("Starting web server on port {}", port); 78 | vertx.createHttpServer(httpOptions) 79 | .requestHandler(router::accept) 80 | .listen(port, res -> { 81 | if (res.succeeded()) { 82 | log.info("Web server started"); 83 | resultHandler.handle(Future.succeededFuture()); 84 | } 85 | else { 86 | log.error("Web server failed to start", res.cause()); 87 | resultHandler.handle(Future.failedFuture(res.cause())); 88 | } 89 | }); 90 | } 91 | 92 | /* 93 | Configure SSL for HTTP server with key from resources 94 | TODO: Pass the key as ConfigMap / Env. variable 95 | */ 96 | private void setSsl(HttpServerOptions httpServerOptions) { 97 | httpServerOptions.setSsl(true); 98 | 99 | PemKeyCertOptions pemKeyCertOptions = new PemKeyCertOptions() 100 | .setKeyValue(Buffer.buffer(new BufferedReader(new InputStreamReader(getClass().getResourceAsStream("/webhook-key.pem"))).lines().collect(Collectors.joining("\n")))) 101 | .setCertValue(Buffer.buffer(new BufferedReader(new InputStreamReader(getClass().getResourceAsStream("/webhook.pem"))).lines().collect(Collectors.joining("\n")))); 102 | httpServerOptions.setPemKeyCertOptions(pemKeyCertOptions); 103 | } 104 | 105 | /* 106 | Setup Vert.x router (just a single route) 107 | */ 108 | private Router configureRouter() { 109 | Router router = Router.router(vertx); 110 | router.route().handler(BodyHandler.create()); 111 | router.route("/*").handler(this::handleRequest); 112 | 113 | return router; 114 | } 115 | 116 | /* 117 | Triggered by incomming requests 118 | */ 119 | private void handleRequest(RoutingContext routingContext) { 120 | log.info("Received {} request on {} with body {}", routingContext.request().method().name(), routingContext.request().absoluteURI(), routingContext.getBodyAsString()); 121 | 122 | JsonObject reviewReq = routingContext.getBodyAsJson(); 123 | if ("AdmissionReview".equals(reviewReq.getString("kind"))) { 124 | JsonObject pod = reviewReq.getJsonObject("spec").getJsonObject("object"); 125 | admit(pod, res -> { 126 | JsonObject result = res.result(); 127 | log.info("Responding with body {}", result.toString()); 128 | routingContext.response().setStatusCode(HttpResponseStatus.OK.code()).putHeader("content-type", "application/json; charset=utf-8").end(result.encodePrettily()); 129 | }); 130 | } 131 | else { 132 | log.error("Kind is not AdmissionReview but {}", reviewReq.getString("kind")); 133 | routingContext.response().setStatusCode(HttpResponseStatus.BAD_REQUEST.code()).setStatusMessage("Received unexpected request!").end(); 134 | 135 | } 136 | } 137 | 138 | /* 139 | Decide whether the Pod should be admitted or not 140 | */ 141 | private void admit(JsonObject pod, Handler> handler) { 142 | log.info("Admitting pod {} ({})", pod.getString("generateName"), pod); 143 | 144 | JsonObject annotations = pod.getJsonObject("annotations", new JsonObject()); 145 | 146 | if (annotations.containsKey(ANNOTATION_KEY)) { 147 | List topicFutures = new ArrayList<>(); 148 | 149 | String topicAnnotation = annotations.getString(ANNOTATION_KEY); 150 | JsonArray topics = new JsonArray(topicAnnotation); 151 | 152 | for (int i = 0; i < topics.size(); i++) { 153 | JsonObject topicSpec = topics.getJsonObject(i); 154 | Future completion = Future.future(); 155 | topicFutures.add(completion); 156 | log.info("Pod {} requires topic {}", pod.getString("generateName"), topicSpec.getString("name")); 157 | handleTopic(topicSpec, completion.completer()); 158 | } 159 | 160 | CompositeFuture.all(topicFutures).setHandler(res -> { 161 | if (res.succeeded()) { 162 | log.info("All topic subfutures completed successfully"); 163 | handler.handle(Future.succeededFuture(createAdmissionReviewResult(true, null))); 164 | } 165 | else { 166 | String statusMessage = "Rejected by Kafka Topic Initializer. See logs for more details."; 167 | log.error("Some topic subfutures failed. Rejecting admission with error message '{}'.", statusMessage); 168 | handler.handle(Future.succeededFuture(createAdmissionReviewResult(false, statusMessage))); 169 | } 170 | }); 171 | 172 | } 173 | else { 174 | log.info("Pod {} doesn't contain any relevant annotation and will be allowed", pod.getString("generateName")); 175 | handler.handle(Future.succeededFuture(createAdmissionReviewResult(true))); 176 | } 177 | } 178 | 179 | /* 180 | Handles the individual topic. Decodes the topics specification and "executes" it. 181 | */ 182 | private void handleTopic(JsonObject topicSpec, Handler> handler) { 183 | String topicName = topicSpec.getString("name"); 184 | String zookeeper = topicSpec.getString("zookeeper", this.zookeeper); 185 | int partitions = topicSpec.getInteger("partitions", DEFAULT_PARTITIONS); 186 | int replicas = topicSpec.getInteger("replicas", DEFAULT_REPLICAS); 187 | Map config = convertMap(topicSpec.getJsonObject("config", new JsonObject()).getMap()); 188 | boolean assertConfig = topicSpec.getBoolean("assert", DEFAULT_ASSERT); 189 | boolean create = topicSpec.getBoolean("create", DEFAULT_CREATE); 190 | 191 | // TODO Implement better handling of AdminUtils instance for different Zookeeper instances 192 | AdminUtils admin = AdminUtils.create(vertx, zookeeper); 193 | 194 | admin.topicExists(topicName, res -> { 195 | if (res.succeeded()) { 196 | if (res.result() == true) { 197 | log.info("Topic {} already exists", topicName); 198 | 199 | if (assertConfig) { 200 | // TODO: Implement topic assertion 201 | log.warn("Topic {} configuration will not be asserted. Asserting configuration is currently not implemented.", topicName); 202 | handler.handle(Future.succeededFuture()); 203 | } 204 | else { 205 | handler.handle(Future.succeededFuture()); 206 | } 207 | } 208 | else { 209 | log.info("Topic {} doesn't exists", topicName); 210 | 211 | if (create) { 212 | admin.createTopic(topicName, partitions, replicas, config, res2 -> { 213 | if (res2.succeeded()) { 214 | log.info("Topic {} created", topicName); 215 | handler.handle(Future.succeededFuture()); 216 | } else { 217 | log.error("Failed to create topic " + topicName, res2.cause()); 218 | handler.handle(Future.failedFuture("Failed to create topic " + topicName + ". ")); 219 | } 220 | }); 221 | } 222 | else { 223 | log.error("Topic " + topicName + " doesn't exist and topic creation is disabled.", res.cause()); 224 | handler.handle(Future.failedFuture("Topic " + topicName + " doesn't exist and topic creation is disabled. ")); 225 | } 226 | } 227 | } 228 | else { 229 | log.error("Failed to query topic " + topicName, res.cause()); 230 | handler.handle(Future.failedFuture("Failed to query topic " + topicName + ". ")); 231 | } 232 | }); 233 | } 234 | 235 | /* 236 | Generate review status (with message) 237 | */ 238 | private JsonObject createReviewStatus(Boolean allowed, String statusMessage) { 239 | if (statusMessage != null) { 240 | JsonObject status = new JsonObject() 241 | .put("status", "Failure") 242 | .put("message", statusMessage) 243 | .put("reason", statusMessage); 244 | return new JsonObject().put("allowed", allowed).put("status", status); 245 | } 246 | else { 247 | return new JsonObject().put("allowed", allowed); 248 | } 249 | } 250 | 251 | /* 252 | Generate ReviewResult based on status passed as parameter (without message) 253 | */ 254 | private JsonObject createAdmissionReviewResult(Boolean allowed) { 255 | return createAdmissionReviewResult(allowed, null); 256 | } 257 | 258 | /* 259 | Generate ReviewResult based on status passed as parameter 260 | */ 261 | private JsonObject createAdmissionReviewResult(Boolean allowed, String status) { 262 | JsonObject result = new JsonObject(); 263 | result.put("kind", "AdmissionReview"); 264 | result.put("apiVersion", "admission.k8s.io/v1alpha1"); 265 | result.put("status", createReviewStatus(allowed, status)); 266 | 267 | return result; 268 | } 269 | 270 | /* 271 | JsonObjetc by default converts to Map. This method converts it to Map which is 272 | needed for Kafka 273 | */ 274 | private Map convertMap(Map source) { 275 | Map target = new HashMap<>(); 276 | 277 | for (Map.Entry entry : source.entrySet()) { 278 | if (entry.getValue() instanceof String){ 279 | target.put(entry.getKey(), (String)entry.getValue()); 280 | } 281 | else if (entry.getValue() instanceof Integer) { 282 | target.put(entry.getKey(), Integer.toString((Integer)entry.getValue())); 283 | } 284 | else if (entry.getValue() instanceof Boolean) { 285 | target.put(entry.getKey(), Boolean.toString((Boolean)entry.getValue())); 286 | } 287 | else { 288 | target.put(entry.getKey(), entry.getValue().toString()); 289 | } 290 | } 291 | 292 | return target; 293 | } 294 | } 295 | -------------------------------------------------------------------------------- /src/main/java/io/strimzi/topicwebhook/TopicWebhookConfig.java: -------------------------------------------------------------------------------- 1 | package io.strimzi.topicwebhook; 2 | 3 | public class TopicWebhookConfig { 4 | private final String zookeeper; 5 | 6 | public TopicWebhookConfig(String zookeeper) { 7 | this.zookeeper = zookeeper; 8 | } 9 | 10 | public static TopicWebhookConfig fromEnv() { 11 | String zookeeper = System.getenv("ZOOKEEPER_URL"); 12 | 13 | return new TopicWebhookConfig(zookeeper); 14 | } 15 | 16 | public String getZookeeper() { 17 | return zookeeper; 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | log4j.rootLogger=INFO,stdout 2 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender 3 | log4j.appender.stdout.Target=System.out 4 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout 5 | log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n 6 | -------------------------------------------------------------------------------- /src/main/resources/webhook-key.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIIEowIBAAKCAQEA5KT2QxlxjvHHoWIGN2foT5HBY8zO3Igw+bkUeF5DA2/ZKzx1 3 | 0dt5MpYguuvBSuyE4Hiiq4A1n5VLzzghK4k4KEfQsX9RdOMDBY01S5KDKxHpiyJi 4 | fm3h+r5Xo9l+Z05jRYwEOg0j3liM83c2m2DqruEx9SIVRmPtlyIzVAEHHvoFtskK 5 | A5oUvZ/egOwJST3ELA9E77Fd4KUH0KYsZJQpMGR+SDmj/aOcx19eyFHfYtIl84B0 6 | L/eg+rmyKHBd9R35v7iTT8hy7HnhLxxe1/WZzSjXKNuqFCU2jsPXr22r3RrtoCua 7 | FM0cf13m+xCMFqsm2e8KhJeggFGazhSxKDTWUwIDAQABAoIBAElJDp3nM9zLZl81 8 | 2iQ8K9wFLeVdKhjJllkUl5kVLYmWehc/o8wF+szaDdeb5TKsqNKCGDG8QHPMGKZs 9 | qUUZEGToz910n5lFghxvbFuytFRDw4WTGdrIQZxrEhA0kC1wlKUgAeLRf2OvIk6P 10 | MXOQFujCyY52xDbxpJ/CmI0eXzfv9HRcsQS036beiTvVHwAr2b8SEsiOmwyAIBQh 11 | 4vOD8a+mhmOps6oIKa0wcCqlRwq80yM6MXXi72Qh13Jus9MO3lYiMl7Bb64r+Pay 12 | XiPxy4W+/fOqrYL2ToH8SxU5HVBeVf24MJNO/YyzEr3wsFZ14LSiU85QNkFqO94o 13 | w9FCjQECgYEA9aWkbiKTNUyenQmpouiSY4+p/Ets04pmzUAY7QUK9aetU+5fN7pb 14 | 9alvnBff4ht8e08tzMfZv+ow5BVfca1cecctcg+Fmph9xfxlGlKfoatCPThR3JD/ 15 | ygM78MR5GhIPKPSe5RXYFIqnGVNektLhznbvXyRlWkw/uFdE9Q+8ZBMCgYEA7kff 16 | fegGA+6zA5NoMZ3WeLrdhb1U5m7F/B2IBLAa/eqM08vyXcm0XUWK+16zoeBCN7fI 17 | DhflqDxeQMysFauVUTWYBICimukPcgKV3Xa8kN2MwzJdYQcuqw+OFHqKRJtmKr9p 18 | iCEovgJOor6W3c5ApEf0kuu8vGW88zRJgeohjMECgYBMyyPEanqqxvEvFr45d4uM 19 | JHV3sfyrhTDuKYOhVVo/wdOl0Xdi7JejDP0fRzducDlSM0aWJkQkgwoVw7i6yP+H 20 | 8oR6i++wjMuFAYp52CqfV/K1QM4Wrh3JAYnTkxho1gx3OUITS+jpQw9KE44VSGff 21 | K79Zdv9YY03oSx/7nJfAkQKBgEdt6ENYEKWRIrzJrYxiIm7ipZF35kGFp3giaint 22 | mMnRTBIZqzgORTFIrZcZFoRKM898Gk6PP1slbaPDNzrvyr0jccS3wwbpIqYsvDw9 23 | Yd2TbKT/LhruCkFhUlg1O8nBQ+lApCQYvJvpGSWUOKmZMKuOnnSEknlvjirh279E 24 | LnaBAoGBAIwECq9golbiTCM/DIHbgkHMXQZqPW9ZBSNNozXdyeDO3/9RUsHO3Hdb 25 | XF27XJLrAUB542xNfGDYmJZ1+KDaNa2BsP6XWvv5V81YyRtMB5ljy5ToYisVFgcg 26 | D8I/ag+NdGuN1SDcVWrk9sDnh/nYF+jFJprpFJaY8rvavNhw7iCP 27 | -----END RSA PRIVATE KEY----- 28 | -------------------------------------------------------------------------------- /src/main/resources/webhook.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIEITCCAwmgAwIBAgIUFM1tHnZ23lflWvmZPfnpBfkuHcwwDQYJKoZIhvcNAQEL 3 | BQAwIzEhMB8GA1UEChMYU3RyaW16aSBUb3BpYyBXZWJob29rIENBMB4XDTE4MDEw 4 | NzIxMTAwMFoXDTE5MDEwNzIxMTAwMFowQDEeMBwGA1UEChMVU3RyaW16aSBUb3Bp 5 | YyBXZWJob29rMR4wHAYDVQQDExVzdHJpbXppLXRvcGljLXdlYmhvb2swggEiMA0G 6 | CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkpPZDGXGO8cehYgY3Z+hPkcFjzM7c 7 | iDD5uRR4XkMDb9krPHXR23kyliC668FK7ITgeKKrgDWflUvPOCEriTgoR9Cxf1F0 8 | 4wMFjTVLkoMrEemLImJ+beH6vlej2X5nTmNFjAQ6DSPeWIzzdzabYOqu4TH1IhVG 9 | Y+2XIjNUAQce+gW2yQoDmhS9n96A7AlJPcQsD0TvsV3gpQfQpixklCkwZH5IOaP9 10 | o5zHX17IUd9i0iXzgHQv96D6ubIocF31Hfm/uJNPyHLseeEvHF7X9ZnNKNco26oU 11 | JTaOw9evbavdGu2gK5oUzRx/Xeb7EIwWqybZ7wqEl6CAUZrOFLEoNNZTAgMBAAGj 12 | ggEuMIIBKjAOBgNVHQ8BAf8EBAMCBaAwHQYDVR0lBBYwFAYIKwYBBQUHAwEGCCsG 13 | AQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFAx7WVMEqW6ugOLweO9dzTAa 14 | IEssMB8GA1UdIwQYMBaAFF1hgPucfnQvIruVI5cdueZfCfLgMIGqBgNVHREEgaIw 15 | gZ+CFXN0cmltemktdG9waWMtd2ViaG9va4Ifc3RyaW16aS10b3BpYy13ZWJob29r 16 | Lm15cHJvamVjdIIjc3RyaW16aS10b3BpYy13ZWJob29rLm15cHJvamVjdC5zdmOC 17 | HXN0cmltemktdG9waWMtd2ViaG9vay5kZWZhdWx0giFzdHJpbXppLXRvcGljLXdl 18 | Ymhvb2suZGVmYXVsdC5zdmMwDQYJKoZIhvcNAQELBQADggEBAFm+Wync9dTRO3ea 19 | vKsMv0nlU9aL0qW+wKq3OyJZWelNg20zWzfrzZV//PQovXiif7R4LY1jUx1Vl+/v 20 | qRjRpaBuXSRKg6z2y30cHNY958KCr8+uZAgMeU3ViI7RK2oGWCMRgiCu6oJt4oiA 21 | +z/1CAinsPm+wGUerXeuNDwmR7uKBin6Nb6hkBLX/ljVBO5XqUHjwt7Pst6LXYwI 22 | JnVBhpCMX87ALHK11uZd2PkU9TsxNyo5GCVoDG66bcB9erPormEYWe2hR1c/9O+h 23 | bOaIuOEObBAluDX1s5VcYcmSnuMsiCjS8gCtKWjm3XQsPA+1E/Ub4/CX9G48e7Bs 24 | 8bBJyYY= 25 | -----END CERTIFICATE----- 26 | -------------------------------------------------------------------------------- /tls/ca.json: -------------------------------------------------------------------------------- 1 | { 2 | "key": { 3 | "algo": "rsa", 4 | "size": 2048 5 | }, 6 | "names": [ 7 | { 8 | "O": "Strimzi Topic Webhook CA" 9 | } 10 | ] 11 | } 12 | -------------------------------------------------------------------------------- /tls/webhook.json: -------------------------------------------------------------------------------- 1 | { 2 | "CN": "strimzi-topic-webhook", 3 | "hosts": [ 4 | "strimzi-topic-webhook", 5 | "strimzi-topic-webhook.myproject", 6 | "strimzi-topic-webhook.myproject.svc", 7 | "strimzi-topic-webhook.default", 8 | "strimzi-topic-webhook.default.svc" 9 | ], 10 | "key": { 11 | "algo": "rsa", 12 | "size": 2048 13 | }, 14 | "names": [ 15 | { 16 | "O": "Strimzi Topic Webhook" 17 | } 18 | ] 19 | } 20 | --------------------------------------------------------------------------------