├── Dockerfile ├── LICENSE.txt ├── README.md ├── build-contracts ├── docker-compose.files-aggregation.yml ├── docker-compose.monitoring.yml └── docker-compose.yml ├── ca-certificates-ubuntu └── Dockerfile ├── cmak └── Dockerfile ├── connect-files ├── Dockerfile ├── connect-files.sh └── worker.properties ├── connect-jdbc ├── Dockerfile └── pom.xml ├── connect-jmx ├── Dockerfile ├── connect-jmx.properties └── worker.properties ├── cp ├── Dockerfile └── docker-help.sh ├── cruise-control ├── Dockerfile ├── README.md ├── hooks │ ├── build │ └── env └── opt │ └── cruise-control │ ├── config │ ├── log4j.properties │ └── log4j2.xml │ └── start.sh ├── hooks └── build ├── initutils-nonroot └── Dockerfile ├── initutils └── Dockerfile ├── jdk-adoptopenjdk-graalvm └── Dockerfile ├── jdk-adoptopenjdk └── Dockerfile ├── jdk-openjdk └── Dockerfile ├── jdk-oracle-open └── Dockerfile ├── jdk-zulu └── Dockerfile ├── kafka-entrypoints ├── .gitignore └── Dockerfile ├── kafka-nonroot └── Dockerfile ├── kafka ├── Dockerfile └── docker-help.sh ├── kubectl-cfssl └── Dockerfile ├── kubectl-kafkacat └── Dockerfile ├── monitor └── Dockerfile ├── native ├── .gitignore ├── Dockerfile ├── admincmd.Dockerfile ├── cli-scripts │ ├── cli-list.sh │ └── kafka-topics_ifnotexists.sh ├── cli.Dockerfile ├── configs-manual-additions │ ├── kafka-configs │ │ └── reflect-config.json │ ├── kafka-consumer-groups │ │ └── reflect-config.json │ ├── kafka-topics │ │ └── reflect-config.json │ └── zookeeper-server-start │ │ └── reflect-config.json ├── configs │ ├── kafka-configs │ │ ├── jni-config.json │ │ ├── proxy-config.json │ │ ├── reflect-config.json │ │ └── resource-config.json │ ├── kafka-consumer-groups │ │ ├── jni-config.json │ │ ├── proxy-config.json │ │ ├── reflect-config.json │ │ └── resource-config.json │ ├── kafka-server-start │ │ ├── jni-config.json │ │ ├── proxy-config.json │ │ ├── reflect-config.json │ │ └── resource-config.json │ ├── kafka-topics │ │ ├── jni-config.json │ │ ├── proxy-config.json │ │ ├── reflect-config.json │ │ └── resource-config.json │ └── zookeeper-server-start │ │ ├── jni-config.json │ │ ├── proxy-config.json │ │ ├── reflect-config.json │ │ └── resource-config.json ├── docker-compose.yml ├── native-usecases.sh ├── native-usecases.sh.configs │ ├── log4j.properties │ ├── myid.zoo-0 │ ├── myid.zoo-1 │ ├── myid.zoo-2 │ ├── zookeeper.properties.zoo-0 │ ├── zookeeper.properties.zoo-1 │ └── zookeeper.properties.zoo-2 └── zookeeper-server-start.Dockerfile ├── prometheus-jmx-exporter ├── Dockerfile └── collect-all-slow.yml └── tags ├── 0.10.0.1 └── Dockerfile ├── 0.10.2.0-alpine └── Dockerfile ├── 0.11.0.0-alpine └── Dockerfile └── 0.11.0.0 └── Dockerfile /Dockerfile: -------------------------------------------------------------------------------- 1 | # This is a dummy, see ./hooks/build 2 | FROM adoptopenjdk:11.0.7_10-jre-hotspot-bionic@sha256:a119e89693cfca250cecc3756c5efb5fdf523d93d813003b3c2a1d29d8884211 3 | 4 | RUN java -XX:+PrintFlagsFinal -version | grep -E "UseContainerSupport|MaxRAMPercentage|MinRAMPercentage|InitialRAMPercentage" 5 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Kafka docker builds 2 | 3 | The no-surprises Kafka docker image. Public build at [solsson/kafka](https://hub.docker.com/r/solsson/kafka/), with 100k+ pulls to date. 4 | 5 | Design goals: 6 | * Transparent build: self-contained in Dockerfile && directly from source. 7 | * Recommend use of image SHAs for security and stability ([#11](https://github.com/solsson/dockerfiles/pull/11)). 8 | * Same basic platform choices as the more [thoroughly validated](https://www.confluent.io/blog/exactly-once-semantics-are-possible-heres-how-apache-kafka-does-it/) [Confluent Platform distribution](https://hub.docker.com/r/confluentinc/cp-kafka/) ([#5](https://github.com/solsson/dockerfiles/pull/5), [#9](https://github.com/solsson/dockerfiles/pull/9)). 9 | * Supports the other tools bundled with Kafka distributions - Zookeeper, topic admin, Connect & Streams ([#7](https://github.com/solsson/dockerfiles/pull/7)). 10 | * Encourage conventions to clearly show all non-default config, to help Kafka beginners. 11 | * Avoid recommending [--net=host](http://docs.confluent.io/current/cp-docker-images/docs/quickstart.html) as such practices are impractical in multi-node environments. 12 | * Support [Kubernetes](http://kubernetes.io); transparent and tweakable cluster setups like [Yolean/kubernetes-kafka](https://github.com/Yolean/kubernetes-kafka). 13 | 14 | ## How to use 15 | 16 | The default entrypoint `docker run solsson/kafka` will list "bin" scripts and sample config files. Make a guess like `docker run --entrypoint ./bin/kafka-server-start.sh solsson/kafka` or `docker run --entrypoint ./bin/kafka-topics.sh solsson/kafka` to see tool-specific help. 17 | 18 | You most likely need to mount your own config files, or for `./bin/kafka-server-start.sh` use overrides like: 19 | ``` 20 | --override zookeeper.connect=zookeeper:2181 21 | --override log.dirs=/var/lib/kafka/data/topics 22 | --override log.retention.hours=-1 23 | --override broker.id=0 24 | --override advertised.listener=PLAINTEXT://kafka-0:9092 25 | ``` 26 | 27 | Beware of `log4j.properties`' location if you mount config. Kafka's bin scripts will guess path unless you set a `KAFKA_LOG4J_OPTS` env. 28 | 29 | We avoid environment variable rules for config override, used in [wurstmeister](https://hub.docker.com/r/wurstmeister/kafka/) and [cp-kafka](https://hub.docker.com/r/confluentinc/cp-kafka/), because bin scripts are quite heavy on env use anyway so you'd get a toxic mix. Also for cluster setups to share config across instances we tend to need bash tricks. See for example [this gotcha](https://github.com/Yolean/kubernetes-kafka/pull/45/commits/db264b09cc7903346238b4464183f3a9571f65e6) and the overrides needed for [external access](https://github.com/Yolean/kubernetes-kafka/issues/13). 30 | 31 | ## Upgrade from pre 0.11 images 32 | 33 | Earlier images used `./bin/kafka-server-start.sh` as entrypoint 34 | and had the `zookeeper.connect=zookeeper:2181` (instead of localhost:2181) built in. At upgrade use the command recommended above to restore that functionality. 35 | 36 | ## Build and test locally 37 | 38 | To build your own kafka image simply run `docker build ./kafka`. 39 | 40 | When we develop locally --- stream processing images, monitoring, 41 | compliance with kubernetes-kafka etc --- 42 | we use a [build-contract](https://github.com/Yolean/build-contract/). 43 | 44 | Build and test using: `docker run -v /var/run/docker.sock:/var/run/docker.sock -v $(pwd)/:/source solsson/build-contract test`. However... while timing issues remain you need some manual intervention: 45 | 46 | ```bash 47 | compose='docker-compose -f build-contracts/docker-compose.yml' 48 | $compose up -d zookeeper kafka-0 49 | $compose logs zookeeper kafka-0 50 | # can we create topics using the image's provided script? 51 | $compose up test-topic-create-1 52 | # can a producer send messages using snappy (has issues before with a class missing in the image) 53 | $compose up test-snappy-compression 54 | $compose up test-consume-all 55 | # demo the log/file aggregation image 56 | docker-compose -f build-contracts/docker-compose.files-aggregation.yml up 57 | # demo the JMX->kafka image 58 | docker-compose -f build-contracts/docker-compose.monitoring.yml up 59 | ``` 60 | 61 | ## Why is the repo named `dockerfiles`? 62 | 63 | This repo used to contain misc dockerfiles, but they've moved to separate repositories for dockerization projects. 64 | We've kept the repository name to avoid breaking the automated build of solsson/kafka in Docker Hub. 65 | 66 | For legacy Dockerfiles from this repo (if you navigated to here from a Docker Hub [solsson](https://hub.docker.com/u/solsson/) image), 67 | see https://github.com/solsson/dockerfiles/tree/misc-dockerfiles. 68 | 69 | ## Native builds 70 | 71 | Very experimental. 72 | 73 | ``` 74 | NOPUSH=true IMAGE_NAME=solsson/kafka:nativeagent ./hooks/build 75 | docker-compose -f native/docker-compose.yml down 76 | docker run --rm --entrypoint chown -v $(pwd)/native/configs:/configs busybox -R $(id -u) /configs 77 | NOPUSH=true IMAGE_NAME=solsson/kafka:native ./hooks/build 78 | ``` 79 | 80 | To test the native images reuse the usecases script: 81 | 82 | ``` 83 | for build in kafka-topics kafka-configs kafka-consumer-groups zookeeper-server-start; do 84 | docker tag solsson/kafka:native-$build solsson/kafka:nativeagent-$build 85 | done 86 | ./native/native-usecases.sh 87 | docker-compose -f native/docker-compose.yml down 88 | git restore --source=HEAD --staged --worktree -- native/configs/ 89 | # The cli image should simply combine the supported commands 90 | docker run --rm --entrypoint sh solsson/kafka:native-cli -c 'ls ./bin/' 91 | ``` 92 | -------------------------------------------------------------------------------- /build-contracts/docker-compose.files-aggregation.yml: -------------------------------------------------------------------------------- 1 | version: '2.0' 2 | services: 3 | 4 | zookeeper: 5 | build: ../kafka 6 | entrypoint: ./bin/zookeeper-server-start.sh 7 | command: 8 | - config/zookeeper.properties 9 | 10 | kafka-0: 11 | build: ../kafka 12 | links: 13 | - zookeeper 14 | entrypoint: ./bin/bin/kafka-server-start.sh 15 | command: 16 | - config/server.properties 17 | - --override 18 | - zookeeper.connect=zookeeper:2181 19 | - --override 20 | - broker.id=0 21 | - --override 22 | - advertised.listener=PLAINTEXT://kafka-0:9092 23 | 24 | connect-files: 25 | build: ../connect-files 26 | labels: 27 | com.yolean.build-target: "" 28 | links: 29 | - kafka-0 30 | 31 | test-connect-files-real-logs: 32 | build: ../connect-files 33 | links: 34 | - kafka-0 35 | volumes: 36 | - /var/log:/logs 37 | 38 | test-consume-files: 39 | image: solsson/kafkacat@sha256:1266d140c52cb39bf314b6f22b6d7a01c4c9084781bc779fdfade51214a713a8 40 | labels: 41 | com.yolean.build-contract: "" 42 | command: 43 | - -b 44 | - kafka-0:9092 45 | - -t 46 | - files-000 47 | - -C 48 | - -o 49 | - beginning 50 | -------------------------------------------------------------------------------- /build-contracts/docker-compose.monitoring.yml: -------------------------------------------------------------------------------- 1 | version: '2.0' 2 | services: 3 | 4 | zookeeper: 5 | build: ../kafka 6 | entrypoint: ./bin/zookeeper-server-start.sh 7 | command: 8 | - config/zookeeper.properties 9 | 10 | kafka-0: 11 | build: ../kafka 12 | links: 13 | - zookeeper 14 | environment: 15 | - JMX_PORT=5555 16 | expose: 17 | - '5555' 18 | entrypoint: ./bin/bin/kafka-server-start.sh 19 | command: 20 | - config/server.properties 21 | - --override 22 | - zookeeper.connect=zookeeper:2181 23 | - --override 24 | - broker.id=0 25 | - --override 26 | - advertised.listener=PLAINTEXT://kafka-0:9092 27 | 28 | prometheus-jmx-exporter: 29 | build: ../prometheus-jmx-exporter 30 | labels: 31 | com.yolean.build-target: "" 32 | links: 33 | - kafka-0 34 | # patch a config before start, as the image is designed for use with local JMX (same k8s pod) 35 | entrypoint: /bin/bash 36 | command: 37 | - -c 38 | - > 39 | sed -i 's|127.0.0.1|kafka-0|' example_configs/kafka-prometheus-monitoring.yml; 40 | cat example_configs/kafka-prometheus-monitoring.yml; 41 | java -jar jmx_prometheus_httpserver.jar 42 | 5556 example_configs/kafka-prometheus-monitoring.yml 43 | 44 | test-metrics-export: 45 | image: solsson/curl@sha256:8b0927b81d10043e70f3e05e33e36fb9b3b0cbfcbccdb9f04fd53f67a270b874 46 | labels: 47 | com.yolean.build-contract: "" 48 | command: 49 | - --fail-early 50 | - --retry 51 | - '10' 52 | - --retry-delay 53 | - '3' 54 | - --retry-connrefused 55 | - http://prometheus-jmx-exporter:5556/metrics 56 | 57 | connect-jmx: 58 | build: ../connect-jmx 59 | labels: 60 | com.yolean.build-target: "" 61 | links: 62 | - kafka-0 63 | 64 | # TODO starts too fast, gets % KC_ERROR: Failed to query metadata for topic jmx-test: Local: Broker transport failure 65 | # needs to retry until kafka+topic exists 66 | test-jmx: 67 | image: solsson/kafkacat@sha256:1266d140c52cb39bf314b6f22b6d7a01c4c9084781bc779fdfade51214a713a8 68 | labels: 69 | com.yolean.build-contract: "" 70 | command: 71 | - -b 72 | - kafka-0:9092 73 | - -t 74 | - jmx-test 75 | - -C 76 | - -o 77 | - beginning 78 | -------------------------------------------------------------------------------- /build-contracts/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '2.0' 2 | services: 3 | 4 | zookeeper: 5 | build: ../kafka 6 | entrypoint: ./bin/zookeeper-server-start.sh 7 | command: 8 | - config/zookeeper.properties 9 | 10 | kafka-0: 11 | build: ../kafka 12 | image: solsson/kafka 13 | labels: 14 | com.yolean.build-target: "" 15 | links: 16 | - zookeeper 17 | entrypoint: ./bin/kafka-server-start.sh 18 | command: 19 | - config/server.properties 20 | - --override 21 | - zookeeper.connect=zookeeper:2181 22 | - --override 23 | - broker.id=0 24 | # unlike Kubernetes StatefulSet, compose gives containers a random hostname (leading to redirects to a hex name) 25 | - --override 26 | - advertised.listener=PLAINTEXT://kafka-0:9092 27 | 28 | test-topic-create: 29 | build: ../kafka 30 | labels: 31 | com.yolean.build-contract: "" 32 | links: 33 | - kafka-0 34 | entrypoint: ./bin/kafka-topics.sh 35 | command: 36 | - --zookeeper 37 | - zookeeper:2181 38 | - --create 39 | - --topic 40 | - test-topic-create 41 | - --partitions 42 | - '1' 43 | - --replication-factor 44 | - '1' 45 | 46 | test-snappy-compression: 47 | image: solsson/kafkacat@sha256:1266d140c52cb39bf314b6f22b6d7a01c4c9084781bc779fdfade51214a713a8 48 | labels: 49 | com.yolean.build-contract: "" 50 | entrypoint: /bin/sh 51 | command: 52 | - -exc 53 | - sleep 5; echo "Message from $${HOSTNAME} at $$(date)" | kafkacat -z snappy -b kafka-0:9092 -t test1 -P 54 | 55 | # TODO starts too fast, gets % KC_ERROR: Failed to query metadata for topic test1: Local: Broker transport failure 56 | # needs to retry until kafka+topic exists 57 | test-consume-all: 58 | image: solsson/kafkacat@sha256:1266d140c52cb39bf314b6f22b6d7a01c4c9084781bc779fdfade51214a713a8 59 | labels: 60 | com.yolean.build-contract: "" 61 | command: 62 | - -b 63 | - kafka-0:9092 64 | - -t 65 | - test1 66 | - -C 67 | - -o 68 | - beginning 69 | - -e 70 | -------------------------------------------------------------------------------- /ca-certificates-ubuntu/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:18.04@sha256:b88f8848e9a1a4e4558ba7cfc4acc5879e1d0e7ac06401409062ad2627e6fb58 \ 2 | as install 3 | 4 | RUN set -e; \ 5 | export DEBIAN_FRONTEND=noninteractive; \ 6 | apt-get update; \ 7 | apt-get install -y --no-install-recommends ca-certificates 8 | 9 | FROM ubuntu:18.04@sha256:b88f8848e9a1a4e4558ba7cfc4acc5879e1d0e7ac06401409062ad2627e6fb58 10 | 11 | COPY --from=install /usr/sbin/update-ca-certificates /usr/sbin/update-ca-certificates 12 | -------------------------------------------------------------------------------- /cmak/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM adoptopenjdk:11.0.7_10-jdk-hotspot-bionic@sha256:05df284aea654234eb1de8d8346a0079c33ab03adda1262f92971c39388e99e8 \ 2 | as build 3 | 4 | ENV CMAK_VERSION=3.0.0.5 5 | ENV CMAK_ARCHIVE=https://github.com/yahoo/CMAK/archive/${CMAK_VERSION}.tar.gz 6 | 7 | RUN set -ex; \ 8 | export DEBIAN_FRONTEND=noninteractive; \ 9 | runDeps=''; \ 10 | buildDeps='curl ca-certificates unzip apt-transport-https gnupg2 lsb-release'; \ 11 | apt-get update && apt-get install -y $runDeps $buildDeps --no-install-recommends; \ 12 | \ 13 | curl -s https://deb.nodesource.com/gpgkey/nodesource.gpg.key | apt-key add -; \ 14 | echo "deb https://deb.nodesource.com/node_8.x stretch main" > /etc/apt/sources.list.d/nodesource.list; \ 15 | apt-get update && apt install -y --no-install-recommends nodejs; \ 16 | \ 17 | mkdir -p /opt/cmak-src; \ 18 | curl -SLs "${CMAK_ARCHIVE}" | tar -xzf - --strip-components=1 -C /opt/cmak-src; \ 19 | \ 20 | cd /opt/cmak-src; \ 21 | ./sbt clean dist; \ 22 | \ 23 | cd /opt; \ 24 | unzip cmak-src/target/universal/cmak-$CMAK_VERSION.zip; \ 25 | mv cmak-$CMAK_VERSION cmak; \ 26 | \ 27 | rm -rf /root/.sbt /root/.ivy2 /opt/cmak-src; \ 28 | \ 29 | apt-get purge -y --auto-remove $buildDeps nodejs; \ 30 | rm -rf /var/lib/apt/lists/*; \ 31 | rm -rf /var/log/dpkg.log /var/log/alternatives.log /var/log/apt 32 | 33 | FROM adoptopenjdk:11.0.7_10-jre-hotspot-bionic@sha256:a119e89693cfca250cecc3756c5efb5fdf523d93d813003b3c2a1d29d8884211 34 | 35 | COPY --from=build /opt/cmak /opt/cmak 36 | 37 | WORKDIR /opt/cmak 38 | 39 | ENTRYPOINT ["./bin/cmak"] 40 | 41 | # Should be identical to kafka-nonroot's user 42 | RUN echo 'nonroot:x:65532:65534:nonroot:/home/nonroot:/usr/sbin/nologin' >> /etc/passwd && \ 43 | mkdir -p /home/nonroot && touch /home/nonroot/.bash_history && chown -R 65532:65534 /home/nonroot 44 | RUN chmod g+w /opt/cmak && chgrp nogroup /opt/cmak 45 | USER nonroot:nogroup 46 | -------------------------------------------------------------------------------- /connect-files/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM solsson/kafka:0.11.0.0 2 | 3 | COPY worker.properties ./config/ 4 | COPY connect-files.sh ./bin/ 5 | 6 | ENV FILES_LIST_CMD="find /logs/ -name *.log" 7 | 8 | # Set up some sample logs 9 | RUN mkdir /logs/; \ 10 | echo "Mount /logs and/or change FILES_LIST_CMD (currently '$FILES_LIST_CMD') to read real content instead" > /logs/samplefile1.log; 11 | 12 | ENTRYPOINT ["./bin/connect-files.sh"] 13 | -------------------------------------------------------------------------------- /connect-files/connect-files.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | FILES=$($FILES_LIST_CMD) 5 | 6 | id=0 7 | connectors="" 8 | for FILE in $FILES; do 9 | ((++id)) 10 | echo "$id: $FILE" 11 | cat < ./config/connect-file-source-$id.properties 12 | name=local-file-source-${id} 13 | connector.class=FileStreamSource 14 | tasks.max=1 15 | file=${FILE} 16 | topic=files-000 17 | HERE 18 | 19 | connectors="$connectors ./config/connect-file-source-$id.properties" 20 | done 21 | 22 | ./bin/connect-standalone.sh ./config/worker.properties $connectors 23 | -------------------------------------------------------------------------------- /connect-files/worker.properties: -------------------------------------------------------------------------------- 1 | # Licensed to the Apache Software Foundation (ASF) under one or more 2 | # contributor license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright ownership. 4 | # The ASF licenses this file to You under the Apache License, Version 2.0 5 | # (the "License"); you may not use this file except in compliance with 6 | # the License. You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | # These are defaults. This file just demonstrates how to override some settings. 17 | bootstrap.servers=kafka-0:9092 18 | 19 | # The converters specify the format of data in Kafka and how to translate it into Connect data. Every Connect user will 20 | # need to configure these based on the format they want their data in when loaded from or stored into Kafka 21 | key.converter=org.apache.kafka.connect.json.JsonConverter 22 | value.converter=org.apache.kafka.connect.json.JsonConverter 23 | # Converter-specific settings can be passed in by prefixing the Converter's setting with the converter we want to apply 24 | # it to 25 | key.converter.schemas.enable=true 26 | value.converter.schemas.enable=true 27 | 28 | # The internal converter used for offsets and config data is configurable and must be specified, but most users will 29 | # always want to use the built-in default. Offset and config data is never visible outside of Kafka Connect in this format. 30 | internal.key.converter=org.apache.kafka.connect.json.JsonConverter 31 | internal.value.converter=org.apache.kafka.connect.json.JsonConverter 32 | internal.key.converter.schemas.enable=false 33 | internal.value.converter.schemas.enable=false 34 | 35 | offset.storage.file.filename=/tmp/connect.offsets 36 | # Flush much faster than normal, which is useful for testing/debugging 37 | offset.flush.interval.ms=10000 38 | 39 | # Set to a list of filesystem paths separated by commas (,) to enable class loading isolation for plugins 40 | # (connectors, converters, transformations). The list should consist of top level directories that include 41 | # any combination of: 42 | # a) directories immediately containing jars with plugins and their dependencies 43 | # b) uber-jars with plugins and their dependencies 44 | # c) directories immediately containing the package directory structure of classes of plugins and their dependencies 45 | # Note: symlinks will be followed to discover dependencies or plugins. 46 | # Examples: 47 | # plugin.path=/usr/local/share/java,/usr/local/share/kafka/plugins,/opt/connectors, 48 | #plugin.path= 49 | -------------------------------------------------------------------------------- /connect-jdbc/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM solsson/kafka:0.11.0.0 2 | 3 | # referenced from dependency versions in the maven pom 4 | ENV CP_VERSION=3.3.0 5 | 6 | ADD pom.xml /tmp/pom.xml 7 | 8 | # avoid patching kafka-run-class.sh, by hijacking one of the unused paths where it globs for jars 9 | ENV JARS_DIR=/opt/kafka/core/build/dependant-libs-${SCALA_VERSION} 10 | 11 | RUN set -ex; \ 12 | export DEBIAN_FRONTEND=noninteractive; \ 13 | runDeps=''; \ 14 | buildDeps='curl ca-certificates'; \ 15 | apt-get update && apt-get install -y $runDeps $buildDeps --no-install-recommends; \ 16 | \ 17 | MAVEN_VERSION=3.5.0 PATH=$PATH:/opt/maven/bin; \ 18 | mkdir -p /opt/maven; \ 19 | curl -SLs https://archive.apache.org/dist/maven/maven-3/$MAVEN_VERSION/binaries/apache-maven-$MAVEN_VERSION-bin.tar.gz | tar -xzf - --strip-components=1 -C /opt/maven; \ 20 | mvn --version; \ 21 | \ 22 | mvn -f /tmp/pom.xml dependency:copy-dependencies -DincludeScope=runtime -DoutputDirectory=$JARS_DIR; \ 23 | \ 24 | rm -Rf /opt/src /opt/maven /root/.m2; \ 25 | \ 26 | apt-get purge -y --auto-remove $buildDeps; \ 27 | rm -rf /var/lib/apt/lists/*; \ 28 | rm -rf /var/log/dpkg.log /var/log/alternatives.log /var/log/apt 29 | 30 | ENTRYPOINT ["./bin/connect-distributed.sh"] 31 | -------------------------------------------------------------------------------- /connect-jdbc/pom.xml: -------------------------------------------------------------------------------- 1 | 4 | 5 | 4.0.0 6 | 7 | se.repos 8 | kafka-connect-jdbc-build 9 | 1.0.0 10 | 11 | 12 | 13 | Apache License 2.0 14 | http://www.apache.org/licenses/LICENSE-2.0.html 15 | repo 16 | 17 | 18 | 19 | 20 | http://packages.confluent.io/maven/ 21 | 22 | 23 | 24 | 25 | confluent 26 | Confluent 27 | ${confluent.maven.repo} 28 | 29 | 30 | 31 | 32 | 33 | io.confluent 34 | kafka-connect-jdbc 35 | ${env.CP_VERSION} 36 | 37 | 38 | io.confluent 39 | kafka-connect-avro-converter 40 | ${env.CP_VERSION} 41 | 42 | 43 | 44 | org.slf4j 45 | slf4j-log4j12 46 | 47 | 48 | 49 | org.apache.zookeeper 50 | zookeeper 51 | 52 | 53 | 54 | 55 | 56 | 57 | org.mariadb.jdbc 58 | mariadb-java-client 59 | 2.1.0 60 | 61 | 62 | 63 | 64 | -------------------------------------------------------------------------------- /connect-jmx/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM solsson/kafka:0.11.0.0 2 | 3 | ENV srijiths-kafka-connectors-version=dc0a7122650e697d3ae97c970a4785bbed949479 4 | 5 | RUN set -ex; \ 6 | buildDeps='curl ca-certificates'; \ 7 | apt-get update && apt-get install -y $buildDeps --no-install-recommends; \ 8 | \ 9 | MAVEN_VERSION=3.5.0 PATH=$PATH:$(pwd)/maven/bin; \ 10 | mkdir ./maven; \ 11 | curl -SLs https://archive.apache.org/dist/maven/maven-3/$MAVEN_VERSION/binaries/apache-maven-$MAVEN_VERSION-bin.tar.gz | tar -xzf - --strip-components=1 -C ./maven; \ 12 | mvn --version; \ 13 | \ 14 | mkdir ./kafka-connectors; \ 15 | cd ./kafka-connectors; \ 16 | curl -SLs https://github.com/srijiths/kafka-connectors/archive/$srijiths-kafka-connectors-version.tar.gz \ 17 | | tar -xzf - --strip-components=1 -C ./; \ 18 | mvn clean install; \ 19 | cd ..; \ 20 | mv ~/.m2/repository/com/sree/kafka/kafka-connect-jmx/0.0.1/kafka-connect-jmx-0.0.1-jar-with-dependencies.jar ./libs/; \ 21 | rm -rf ./kafka-connectors; \ 22 | rm -rf ./maven ~/.m2; \ 23 | \ 24 | apt-get purge -y --auto-remove $buildDeps; \ 25 | rm -rf /var/lib/apt/lists/*; \ 26 | rm /var/log/dpkg.log /var/log/apt/*.log 27 | 28 | COPY *.properties ./config/ 29 | 30 | ENTRYPOINT ["./bin/connect-standalone.sh"] 31 | CMD ["./config/worker.properties", "./config/connect-jmx.properties"] 32 | -------------------------------------------------------------------------------- /connect-jmx/connect-jmx.properties: -------------------------------------------------------------------------------- 1 | # Name of the connecor 2 | name=jmx-source 3 | # Connector class to invoke the connector 4 | connector.class=com.sree.kafka.connectors.jmx.JmxConnector 5 | # Maximum number of tasks 6 | tasks.max=1 7 | 8 | # Kafka topic to push the messages 9 | kafka.topic=jmx-test 10 | # JMX is running for which application. 11 | # If you want JMX metrics for Kafka , then jmx.servicename=kafka 12 | # If you want JMX metrics for Flink , then jmx.servicename=flink etc.. 13 | jmx.servicename=kafka 14 | # If jmx.servicename is kafka , then you have to provide zookeeper.host 15 | # Else zookeeper.host parameter is not required. 16 | zookeeper.host=zookeeper:2181 17 | # If jmx.servicename is not kafka , then below property is mandatory 18 | # Provide the full JMX URL separated by comma 19 | #jmx.url=54.238.221.37:8080,54.238.237.66:8080 20 | -------------------------------------------------------------------------------- /connect-jmx/worker.properties: -------------------------------------------------------------------------------- 1 | # Licensed to the Apache Software Foundation (ASF) under one or more 2 | # contributor license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright ownership. 4 | # The ASF licenses this file to You under the Apache License, Version 2.0 5 | # (the "License"); you may not use this file except in compliance with 6 | # the License. You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | # These are defaults. This file just demonstrates how to override some settings. 17 | bootstrap.servers=kafka-0:9092 18 | 19 | # The converters specify the format of data in Kafka and how to translate it into Connect data. Every Connect user will 20 | # need to configure these based on the format they want their data in when loaded from or stored into Kafka 21 | key.converter=org.apache.kafka.connect.json.JsonConverter 22 | value.converter=org.apache.kafka.connect.json.JsonConverter 23 | # Converter-specific settings can be passed in by prefixing the Converter's setting with the converter we want to apply 24 | # it to 25 | key.converter.schemas.enable=true 26 | value.converter.schemas.enable=true 27 | 28 | # The internal converter used for offsets and config data is configurable and must be specified, but most users will 29 | # always want to use the built-in default. Offset and config data is never visible outside of Kafka Connect in this format. 30 | internal.key.converter=org.apache.kafka.connect.json.JsonConverter 31 | internal.value.converter=org.apache.kafka.connect.json.JsonConverter 32 | internal.key.converter.schemas.enable=false 33 | internal.value.converter.schemas.enable=false 34 | 35 | offset.storage.file.filename=/tmp/connect.offsets 36 | # Flush much faster than normal, which is useful for testing/debugging 37 | offset.flush.interval.ms=10000 38 | 39 | # Set to a list of filesystem paths separated by commas (,) to enable class loading isolation for plugins 40 | # (connectors, converters, transformations). The list should consist of top level directories that include 41 | # any combination of: 42 | # a) directories immediately containing jars with plugins and their dependencies 43 | # b) uber-jars with plugins and their dependencies 44 | # c) directories immediately containing the package directory structure of classes of plugins and their dependencies 45 | # Note: symlinks will be followed to discover dependencies or plugins. 46 | # Examples: 47 | # plugin.path=/usr/local/share/java,/usr/local/share/kafka/plugins,/opt/connectors, 48 | #plugin.path= 49 | -------------------------------------------------------------------------------- /cp/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM solsson/kafka-jre:8@sha256:1ebc3c27c30f5925d240aaa0858e111c2fa6d358048b0f488860ea9cd9c84822 2 | 3 | ENV CP_ARCHIVE_VERSION=v4.0.0 \ 4 | CP_PACKAGE_VERSION=4.0.0 \ 5 | MAVEN_VERSION=3.5.2 \ 6 | MAVEN_FLAGS="-Dmaven.test.skip=true" 7 | # Default versions are found in the corresponding branch of: https://github.com/confluentinc/common/blob/master/pom.xml 8 | 9 | WORKDIR /usr/local 10 | 11 | RUN set -ex; \ 12 | WORKDIR=$PWD; \ 13 | mkdir -p $WORKDIR/share/java; \ 14 | \ 15 | export DEBIAN_FRONTEND=noninteractive; \ 16 | runDeps=''; \ 17 | buildDeps='curl ca-certificates'; \ 18 | apt-get update && apt-get install -y $runDeps $buildDeps --no-install-recommends; \ 19 | \ 20 | PATH=$PATH:/opt/maven/bin; \ 21 | mkdir -p /opt/maven; \ 22 | curl -SLs https://archive.apache.org/dist/maven/maven-3/$MAVEN_VERSION/binaries/apache-maven-$MAVEN_VERSION-bin.tar.gz | tar -xzf - --strip-components=1 -C /opt/maven; \ 23 | mvn --version; \ 24 | \ 25 | mkdir -p /opt/src/common; cd /opt/src/common; \ 26 | curl -SLs https://github.com/confluentinc/common/archive/$CP_ARCHIVE_VERSION.tar.gz | tar -xzf - --strip-components=1 -C ./; \ 27 | mvn $MAVEN_FLAGS install; \ 28 | \ 29 | mkdir -p /opt/src/rest-utils; cd /opt/src/rest-utils; \ 30 | curl -SLs https://github.com/confluentinc/rest-utils/archive/$CP_ARCHIVE_VERSION.tar.gz | tar -xzf - --strip-components=1 -C ./; \ 31 | mvn $MAVEN_FLAGS install; \ 32 | \ 33 | mkdir -p /opt/src/schema-registry; cd /opt/src/schema-registry; \ 34 | curl -SLs https://github.com/confluentinc/schema-registry/archive/$CP_ARCHIVE_VERSION.tar.gz | tar -xzf - --strip-components=1 -C ./; \ 35 | mvn $MAVEN_FLAGS install; \ 36 | \ 37 | mkdir -p /opt/src/kafka-rest; cd /opt/src/kafka-rest; \ 38 | curl -SLs https://github.com/confluentinc/kafka-rest/archive/$CP_ARCHIVE_VERSION.tar.gz | tar -xzf - --strip-components=1 -C ./; \ 39 | mvn $MAVEN_FLAGS install; \ 40 | \ 41 | cd $WORKDIR; \ 42 | \ 43 | mv /opt/src/common/package/target/common-package-$CP_PACKAGE_VERSION-package/share/java/confluent-common ./share/java/; \ 44 | mv /opt/src/rest-utils/package/target/rest-utils-package-$CP_PACKAGE_VERSION-package/share/java/rest-utils ./share/java/; \ 45 | \ 46 | mv /opt/src/schema-registry/package-schema-registry/target/kafka-schema-registry-package-$CP_PACKAGE_VERSION-package/bin/* ./bin/; \ 47 | mv /opt/src/schema-registry/package-schema-registry/target/kafka-schema-registry-package-$CP_PACKAGE_VERSION-package/share/java/* ./share/java/; \ 48 | mv /opt/src/schema-registry/package-schema-registry/target/kafka-schema-registry-package-$CP_PACKAGE_VERSION-package/etc/* /etc/; \ 49 | \ 50 | mv /opt/src/kafka-rest/target/kafka-rest-$CP_PACKAGE_VERSION-package/bin/* ./bin/; \ 51 | mv /opt/src/kafka-rest/target/kafka-rest-$CP_PACKAGE_VERSION-package/share/java/* ./share/java/; \ 52 | mv /opt/src/kafka-rest/target/kafka-rest-$CP_PACKAGE_VERSION-package/etc/* /etc; \ 53 | \ 54 | rm -Rf /opt/src /opt/maven /root/.m2; \ 55 | \ 56 | apt-get purge -y --auto-remove $buildDeps; \ 57 | rm -rf /var/lib/apt/lists/*; \ 58 | rm -rf /var/log/dpkg.log /var/log/alternatives.log /var/log/apt 59 | 60 | COPY docker-help.sh /usr/local/bin/docker-help 61 | ENTRYPOINT ["docker-help"] 62 | -------------------------------------------------------------------------------- /cp/docker-help.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Hi," 3 | echo "" 4 | echo "Select as entrypoint one of these scripts:" 5 | find ./bin/* -printf "%f\n" 6 | echo "" 7 | echo "You might find one of the sample config files useful:" 8 | find /etc/ -name *.properties 9 | echo "" 10 | -------------------------------------------------------------------------------- /cruise-control/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM adoptopenjdk/openjdk11:jdk-11.0.6_10-slim 2 | ARG VERSION=2.0.95 3 | USER root 4 | RUN apt-get update && apt-get install -y --no-install-recommends git ca-certificates 5 | RUN git clone -b ${VERSION} https://github.com/linkedin/cruise-control.git 6 | RUN cd cruise-control && ./gradlew jar copyDependantLibs 7 | 8 | RUN mv -v /cruise-control/cruise-control/build/libs/cruise-control-*.jar \ 9 | /cruise-control/cruise-control/build/libs/cruise-control.jar 10 | RUN mv -v /cruise-control/cruise-control/build/dependant-libs/cruise-control-metrics-reporter-*.jar \ 11 | /cruise-control/cruise-control/build/dependant-libs/cruise-control-metrics-reporter.jar 12 | 13 | FROM node:10.16 14 | RUN mkdir /src && cd /src && git clone https://github.com/linkedin/cruise-control-ui.git 15 | WORKDIR /src/cruise-control-ui 16 | RUN git fetch origin 17 | RUN git checkout master 18 | RUN git pull 19 | RUN git rev-parse HEAD 20 | RUN npm install 21 | RUN npm run build 22 | 23 | # The container is made to work with github.com/Yolean/kubernetes-kafka, so we try to use a common base 24 | FROM adoptopenjdk/openjdk11:jdk-11.0.6_10-slim@sha256:b8fb00e5d5a2b263e4ea2fc75333c8da4e74bcfabd7d329eecf5f6547f8efb7f 25 | ARG SOURCE_REF 26 | ARG SOURCE_TYPE 27 | ARG DOCKERFILE_PATH 28 | ARG VERSION 29 | 30 | RUN mkdir -p /opt/cruise-control /opt/cruise-control/cruise-control-ui 31 | COPY --from=0 /cruise-control/cruise-control/build/libs/cruise-control.jar /opt/cruise-control/cruise-control/build/libs/cruise-control.jar 32 | COPY --from=0 /cruise-control/config /opt/cruise-control/config 33 | COPY --from=0 /cruise-control/kafka-cruise-control-start.sh /opt/cruise-control/ 34 | COPY --from=0 /cruise-control/cruise-control/build/dependant-libs /opt/cruise-control/cruise-control/build/dependant-libs 35 | COPY opt/cruise-control /opt/cruise-control/ 36 | COPY --from=1 /src/cruise-control-ui/dist /opt/cruise-control/cruise-control-ui/dist 37 | RUN echo "local,localhost,/kafkacruisecontrol" > /opt/cruise-control/cruise-control-ui/dist/static/config.csv 38 | 39 | EXPOSE 8090 40 | CMD [ "/opt/cruise-control/start.sh" ] 41 | -------------------------------------------------------------------------------- /cruise-control/README.md: -------------------------------------------------------------------------------- 1 | kafka-cruise-control 2 | ==================== 3 | 4 | LinkedIn Cruise Control manager for Kafka. This image includes the Cruise Control application. 5 | It includes the metrics reporter jar at `/opt/cruise-control/cruise-control/build/dependant-libs/cruise-control-metrics-reporter.jar`. 6 | -------------------------------------------------------------------------------- /cruise-control/hooks/build: -------------------------------------------------------------------------------- 1 | #!/bin/bash -xe 2 | 3 | . ./hooks/env 4 | 5 | docker build \ 6 | --build-arg "SOURCE_REF=$GIT_SHA1" \ 7 | --build-arg "DOCKERFILE_PATH=$DOCKERFILE_PATH" \ 8 | --build-arg "SOURCE_TYPE=$SOURCE_TYPE" \ 9 | ${VERSION:+--build-arg "VERSION=$VERSION"} \ 10 | -t $IMAGE_NAME $DOCKERFILE_PATH 11 | -------------------------------------------------------------------------------- /cruise-control/hooks/env: -------------------------------------------------------------------------------- 1 | # These values are passed by the hub, but if they aren't we can get them from git. 2 | [ -n "$SOURCE_BRANCH" ] || SOURCE_BRANCH=${CIRCLE_TAG} 3 | [ -n "$SOURCE_BRANCH" ] || SOURCE_BRANCH=$(git symbolic-ref -q --short HEAD | tr '/' '-') 4 | [ -n "$SOURCE_BRANCH" ] || SOURCE_BRANCH=latest 5 | [ -n "$GIT_SHA1" ] || GIT_SHA1=$(git rev-parse -q HEAD) 6 | 7 | [[ "${SOURCE_BRANCH:0:1}" =~ [0-9] ]] && VERSION=${SOURCE_BRANCH/-*/} 8 | 9 | [ "$SOURCE_BRANCH" = "master" ] && SOURCE_BRANCH="latest" 10 | 11 | # Set defaults for build arguments 12 | [ -n "$SOURCE_TYPE" ] || SOURCE_TYPE=git 13 | [ -n "$DOCKERFILE_PATH" ] || DOCKERFILE_PATH=. 14 | [ -n "$IMAGE_NAME" ] || IMAGE_NAME=pdouble16/kafka-cruise-control:$SOURCE_BRANCH 15 | -------------------------------------------------------------------------------- /cruise-control/opt/cruise-control/config/log4j.properties: -------------------------------------------------------------------------------- 1 | log4j.rootLogger = INFO, FILE 2 | 3 | log4j.appender.FILE=org.apache.log4j.FileAppender 4 | log4j.appender.FILE.File=/dev/stdout 5 | 6 | log4j.appender.FILE.layout=org.apache.log4j.PatternLayout 7 | log4j.appender.FILE.layout.conversionPattern=%-6r [%15.15t] %-5p %30.30c %x - %m%n 8 | -------------------------------------------------------------------------------- /cruise-control/opt/cruise-control/config/log4j2.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /cruise-control/opt/cruise-control/start.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | cd /opt/cruise-control 4 | 5 | # Set heap memory settings for container environments 6 | if [ -z "$KAFKA_HEAP_OPTS" ]; then 7 | export KAFKA_HEAP_OPTS="-XX:InitialRAMPercentage=30 -XX:MaxRAMPercentage=70 -XX:MinRAMPercentage=80" 8 | fi 9 | /bin/bash ${DEBUG:+-x} /opt/cruise-control/kafka-cruise-control-start.sh /opt/cruise-control/config/cruisecontrol.properties 8090 10 | -------------------------------------------------------------------------------- /hooks/build: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | [ -z "$DEBUG" ] || set -x 3 | set -e 4 | 5 | PUSH="" 6 | 7 | [ -z "$IMAGE_NAME" ] && echo "IMAGE_NAME is required" && exit 1; 8 | 9 | # Nativeagent is a prerequisite of native, but meant to run locally with result json and Dockerfiles commited 10 | # NOPUSH=true IMAGE_NAME=solsson/kafka:nativeagentagent ./hooks/build 11 | [ -z "$NATIVEAGENT" ] && echo $IMAGE_NAME | grep 'nativeagent$' && NATIVEAGENT=true 12 | # Docker Hub builds with tags ending with "native" 13 | [ -z "$NATIVE" ] && echo $IMAGE_NAME | grep 'native$' && NATIVE=true 14 | 15 | function imagename { 16 | buildname=$1 17 | case $IMAGE_NAME in 18 | *:latest) echo -n $IMAGE_NAME | sed "s|:latest|:$buildname|" ;; 19 | *:*) echo -n $IMAGE_NAME | sed "s|:\(.*\)|:\1-$buildname|" ;; 20 | *) echo $IMAGE_NAME:$buildname ;; 21 | esac 22 | } 23 | 24 | jre=$(imagename jre) 25 | docker build -t solsson/kafka:jre -t "$jre" ./ 26 | PUSH="$PUSH $jre" 27 | 28 | docker build -t solsson/kafka -t "$IMAGE_NAME" ./kafka 29 | PUSH="$PUSH $IMAGE_NAME" 30 | 31 | nonroot=$(imagename nonroot) 32 | docker build -t solsson/kafka:nonroot -t "$nonroot" ./kafka-nonroot 33 | PUSH="$PUSH $nonroot" 34 | 35 | graalvm=$(imagename graalvm) 36 | docker build -t solsson/kafka:graalvm -t "$graalvm" ./jdk-adoptopenjdk-graalvm 37 | PUSH="$PUSH $graalvm" 38 | 39 | nativeagent=$(imagename nativebase) 40 | docker build -t solsson/kafka:nativebase -t "$nativeagent" ./native 41 | 42 | initutils=$(imagename initutils) 43 | docker build -t solsson/kafka:initutils -t "$initutils" ./initutils 44 | PUSH="$PUSH $initutils" 45 | 46 | nonrootinitutils=$(imagename initutils-nonroot) 47 | docker build -t solsson/kafka:initutils-nonroot -t "$nonrootinitutils" ./initutils-nonroot 48 | PUSH="$PUSH $nonrootinitutils" 49 | 50 | cmak=$(imagename cmak) 51 | docker build -t solsson/kafka:cmak -t "$cmak" ./cmak 52 | PUSH="$PUSH $cmak" 53 | 54 | # Generatoe entrypoints 55 | entrypoints=$(imagename entrypoints) 56 | docker build -t $entrypoints ./kafka-entrypoints 57 | 58 | [ "$NATIVEAGENT" = "true" ] && { 59 | mkdir -p ./native/kafka-entrypoints 60 | } 61 | 62 | function entrypoint_gen { 63 | echo "# $@" 64 | script=$1; shift 65 | name=$(basename $script | sed 's|\.sh$||') 66 | dfile=./kafka-entrypoints/$name.Dockerfile 67 | image=$(imagename $name) 68 | echo "FROM solsson/kafka:nonroot" > $dfile 69 | # TODO ideally we'd have only [executable, -cp, long classpath string] in entrypoint and the rest as CMD 70 | echo -n 'ENTRYPOINT ["' >> $dfile 71 | docker run --rm --entrypoint $script $entrypoints $@ \ 72 | | sed 's| |" \\\ 73 | , "|g' \ 74 | | sed 's|^ , "/etc/kafka| ]" \ 75 | CMD ["/etc/kafka|g' \ 76 | | sed 's|, "-Xlog:gc|#, "-Xlog:gc|g' \ 77 | | sed 's|, "-Dcom.sun.management.jmxremote|#, "-Dcom.sun.management.jmxremote|g' \ 78 | | sed 's|, "/opt/kafka/libs/extensions/..*:/opt/kafka/bin/../libs/.*|, "/opt/kafka/libs/extensions/*:/opt/kafka/libs/*" \\|g' \ 79 | >> $dfile 80 | echo '"]' >> $dfile 81 | docker build -t $image -f $dfile ./kafka-entrypoints 82 | PUSH="$PUSH $image" 83 | 84 | [ "$NATIVEAGENT" != "true" ] || { 85 | cp $dfile native/$dfile 86 | sed -i "s|FROM .*|FROM $nativeagent|" native/$dfile 87 | sed -i 's| , "-cp" \\| , "-agentpath:/opt/graalvm/lib/libnative-image-agent.so=config-merge-dir=/home/nonroot/native-config" \\\ 88 | , "-cp" \\|' native/$dfile 89 | docker build -t $image -f native/$dfile ./native 90 | } 91 | 92 | } 93 | 94 | entrypoint_gen ./bin/kafka-server-start.sh /etc/kafka/server.properties 95 | entrypoint_gen ./bin/zookeeper-server-start.sh /etc/kafka/zookeeper.properties 96 | entrypoint_gen ./bin/kafka-configs.sh 97 | entrypoint_gen ./bin/kafka-topics.sh 98 | entrypoint_gen ./bin/kafka-consumer-groups.sh 99 | entrypoint_gen ./bin/kafka-reassign-partitions.sh 100 | entrypoint_gen ./bin/kafka-leader-election.sh 101 | 102 | # Entrypoints done, run native build stuff 103 | 104 | [ "$NATIVEAGENT" != "true" ] || { 105 | ./native/native-usecases.sh 106 | } 107 | 108 | [ "$NATIVE" != "true" ] || { 109 | # Don't push anything else with native builds because those images would also get a "native" tag 110 | PUSH="" 111 | for cli in \ 112 | kafka-topics:kafka.admin.TopicCommand \ 113 | kafka-configs:kafka.admin.ConfigCommand \ 114 | kafka-consumer-groups:kafka.admin.ConsumerGroupCommand; do 115 | command=$(echo $cli | cut -d: -f1) 116 | mainclass=$(echo $cli | cut -d: -f2) 117 | nativename=$(imagename $command) 118 | echo "Buildig admin CLI $command $mainclass ..." 119 | cat native/admincmd.Dockerfile | sed "s|{{command}}|$command|g" | sed "s|{{mainclass}}|$mainclass|g" | \ 120 | docker build -t solsson/kafka:native-$command -t "$nativename" -f - native/ 121 | PUSH="$PUSH $nativename" 122 | done 123 | for nativebuild in \ 124 | zookeeper-server-start \ 125 | cli; do 126 | nativename=$(imagename $nativebuild) 127 | docker build -t solsson/kafka:native-$nativebuild -t "$nativename" -f native/$nativebuild.Dockerfile native/ 128 | PUSH="$PUSH $nativename" 129 | done 130 | } 131 | 132 | # Push results 133 | 134 | echo "PUSH list contains: $PUSH" 135 | [ -z "$NOPUSH" ] || exit 0 136 | for P in $PUSH; do docker push $P; done 137 | -------------------------------------------------------------------------------- /initutils-nonroot/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM solsson/kafka:initutils 2 | 3 | # Note that there's also a nouser 65534 user which has no writable home 4 | RUN echo 'nonroot:x:65532:65534:nonroot:/home/nonroot:/usr/sbin/nologin' >> /etc/passwd && \ 5 | mkdir -p /home/nonroot && touch /home/nonroot/.bash_history && chown -R 65532:65534 /home/nonroot 6 | USER nonroot:nogroup 7 | 8 | WORKDIR /home/nonroot 9 | -------------------------------------------------------------------------------- /initutils/Dockerfile: -------------------------------------------------------------------------------- 1 | # same FROM as kafka-jre, to keep pull times down and to provide the same shell distro+version 2 | FROM ubuntu:bionic@sha256:bec5a2727be7fff3d308193cfde3491f8fba1a2ba392b7546b43a051853a341d 3 | 4 | ENV KUBERNETES_VERSION=1.17.5 KUBERNETES_CLIENT_SHA512=4cf67f972aad3425bccc48af83f8cb59ddcc96de49d3bb21cdbbcbbeee31718ef681e551d13343538a6e70c2a4ea0435e4540bc1f8cf1a91a2f73265f52b9429 5 | 6 | RUN set -ex; \ 7 | export DEBIAN_FRONTEND=noninteractive; \ 8 | runDeps='curl ca-certificates procps netcat'; \ 9 | buildDeps=''; \ 10 | apt-get update && apt-get install -y $runDeps $buildDeps --no-install-recommends; \ 11 | rm -rf /var/lib/apt/lists/*; \ 12 | \ 13 | curl -sLS -o k.tar.gz -k https://dl.k8s.io/v${KUBERNETES_VERSION}/kubernetes-client-linux-amd64.tar.gz; \ 14 | echo "$KUBERNETES_CLIENT_SHA512 k.tar.gz" | sha512sum -c; \ 15 | tar -xvzf k.tar.gz -C /usr/local/bin/ --strip-components=3 kubernetes/client/bin/kubectl; \ 16 | rm k.tar.gz; \ 17 | \ 18 | apt-get purge -y --auto-remove $buildDeps; \ 19 | rm /var/log/dpkg.log /var/log/apt/*.log 20 | -------------------------------------------------------------------------------- /jdk-adoptopenjdk-graalvm/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM adoptopenjdk:11.0.7_10-jdk-hotspot-bionic@sha256:c2ce12d7530d957f2d44dd33339eeeafa3b889c27af0824b186c4afef1f843ef 2 | 3 | COPY --from=maven:3.6.3-jdk-11-slim@sha256:18f059e73cffdf1688093e2e82f1001a7cd2baa9de92e7b8d05bf34a8318ee92 \ 4 | /usr/share/maven /usr/share/maven 5 | RUN ln -s /usr/share/maven/bin/mvn /usr/bin/mvn 6 | ENV MAVEN_HOME=/usr/share/maven 7 | ENV MAVEN_CONFIG=/root/.m2 8 | 9 | RUN set -ex; \ 10 | export DEBIAN_FRONTEND=noninteractive; \ 11 | runDeps='ca-certificates netcat-openbsd libsnappy1v5 liblz4-1 libzstd1'; \ 12 | buildDeps='curl gnupg dirmngr gcc libc6-dev zlib1g-dev libsnappy-dev liblz4-dev libzstd1-dev'; \ 13 | apt-get update && apt-get install -y $runDeps $buildDeps --no-install-recommends; \ 14 | \ 15 | # Keeping build deps for native compile 16 | #apt-get purge -y --auto-remove $buildDeps; \ 17 | rm -rf /var/lib/apt/lists; \ 18 | rm -rf /var/log/dpkg.log /var/log/alternatives.log /var/log/apt /root/.gnupg 19 | 20 | ARG graalvm_version=20.1.0 21 | ARG graalvm_variant=java11 22 | ARG graalvm_releases=graalvm/graalvm-ce-builds 23 | ARG graalvm_build= 24 | 25 | RUN set -ex; \ 26 | cd /opt; \ 27 | curl -sLS -o graalvm.tar.gz https://github.com/${graalvm_releases}/releases/download/vm-${graalvm_version}${graalvm_build}/graalvm-ce-${graalvm_variant}-linux-amd64-${graalvm_version}.tar.gz; \ 28 | tar xvzf graalvm.tar.gz; \ 29 | mv graalvm-ce-${graalvm_variant}-${graalvm_version} graalvm; \ 30 | rm graalvm.tar.gz 31 | 32 | ENV PATH=/opt/java/openjdk/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/opt/graalvm/bin 33 | 34 | RUN gu install native-image 35 | -------------------------------------------------------------------------------- /jdk-adoptopenjdk/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM adoptopenjdk:11.0.7_10-jdk-hotspot-bionic@sha256:c2ce12d7530d957f2d44dd33339eeeafa3b889c27af0824b186c4afef1f843ef 2 | -------------------------------------------------------------------------------- /jdk-openjdk/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM openjdk:11.0.3-slim@sha256:0954adb89fce4e70d87f0b7e2c587a3d67ccef1b1cd30e46191dd1327254d197 2 | -------------------------------------------------------------------------------- /jdk-oracle-open/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM debian:stretch-slim@sha256:6c31161e090aa3f62b9ee1414b58f0a352b42b2b7827166e57724a8662fe4b38 2 | 3 | # See https://jdk.java.net/11/ 4 | ENV JDK11_VERSION=11.0.2 \ 5 | JDK11_BUILD=7 \ 6 | JDK11_TGZ_SHA256=62ee5758af12bbad04f376bf2b61f114076f6d8ffe4ba8db13bb5a63b5ef0d29 7 | 8 | RUN set -ex; \ 9 | export DEBIAN_FRONTEND=noninteractive; \ 10 | runDeps=''; \ 11 | buildDeps='curl ca-certificates'; \ 12 | apt-get update && apt-get install -y $runDeps $buildDeps --no-install-recommends; \ 13 | \ 14 | cd /usr/lib; \ 15 | mkdir jvm; \ 16 | cd jvm; \ 17 | curl -SLs -o jdk.tar.gz https://download.java.net/java/GA/jdk11/${JDK11_BUILD}/GPL/openjdk-${JDK11_VERSION}_linux-x64_bin.tar.gz; \ 18 | echo "${JDK11_TGZ_SHA256} jdk.tar.gz" | sha256sum -c -; \ 19 | tar xvzf jdk.tar.gz; \ 20 | rm jdk.tar.gz; \ 21 | mv jdk-11.0.2 jdk-11; \ 22 | \ 23 | rm -v jdk-11/lib/src.zip; \ 24 | \ 25 | apt-get purge -y --auto-remove $buildDeps; \ 26 | rm -rf /var/lib/apt/lists/*; \ 27 | rm -rf /var/log/dpkg.log /var/log/alternatives.log /var/log/apt 28 | 29 | # Instead of: find /usr/lib/jvm/jdk-11/bin/ -executable -exec ln -s '{}' /usr/local/bin/ 30 | ENV PATH="$PATH:/usr/lib/jvm/jdk-11/bin" 31 | -------------------------------------------------------------------------------- /jdk-zulu/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM debian:stretch-slim@sha256:bade11bf1835c9f09b011b5b1cf9f7428328416410b238d2f937966ea820be74 2 | 3 | ENV ZULU_OPENJDK_VERSION="11=11.31+11" 4 | 5 | RUN set -ex; \ 6 | runDeps='locales procps'; \ 7 | buildDeps='gnupg dirmngr'; \ 8 | export DEBIAN_FRONTEND=noninteractive; \ 9 | apt-get update && apt-get install -y $runDeps $buildDeps --no-install-recommends; \ 10 | \ 11 | apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 0x219BD9C9; \ 12 | echo 'deb http://repos.azulsystems.com/debian stable main' > /etc/apt/sources.list.d/zulu.list; \ 13 | mkdir /usr/share/man/man1; \ 14 | apt-get update; \ 15 | apt-get -s install zulu-8 | grep zulu-; \ 16 | apt-get -s install zulu-11 | grep zulu-; \ 17 | apt-get -s install zulu-12 | grep zulu-; \ 18 | apt-get install -y zulu-${ZULU_OPENJDK_VERSION} --no-install-recommends; \ 19 | rm -rf /usr/share/man/man1; \ 20 | \ 21 | cd /usr/lib/jvm/zulu-11-amd64/; \ 22 | rm -rf demo man sample src.zip; \ 23 | \ 24 | apt-get purge -y --auto-remove $buildDeps; \ 25 | rm -rf /var/lib/apt/lists/*; \ 26 | rm -rf /var/log/dpkg.log /var/log/alternatives.log /var/log/apt 27 | 28 | ENV JAVA_HOME=/usr/lib/jvm/zulu-11-amd64 29 | 30 | # If a downstream image changes these values it must also re-run locale-gen as below 31 | ENV LANG=en_US.UTF-8 LANGUAGE=en_US:en LC_ALL=en_US.UTF-8 32 | 33 | RUN set -ex; \ 34 | sed -i -e "s/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/" /etc/locale.gen; \ 35 | sed -i -e "s/# $LANG/$LANG/" /etc/locale.gen; \ 36 | echo "LANG=\"$LANG\"" > /etc/default/locale; \ 37 | \ 38 | cat /etc/locale.gen | grep -v "^#"; \ 39 | cat /etc/default/locale; \ 40 | ls -la /usr/share/locale/locale.alias | grep /etc/locale.alias; \ 41 | LC_ALL=C dpkg-reconfigure --frontend=noninteractive locales; 42 | -------------------------------------------------------------------------------- /kafka-entrypoints/.gitignore: -------------------------------------------------------------------------------- 1 | *.Dockerfile 2 | -------------------------------------------------------------------------------- /kafka-entrypoints/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM solsson/kafka:nonroot 2 | 3 | USER root 4 | 5 | RUN sed -i 's| exec | echo -n |' ./bin/kafka-run-class.sh 6 | 7 | USER nonroot:nogroup 8 | -------------------------------------------------------------------------------- /kafka-nonroot/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM solsson/kafka 2 | 3 | RUN mkdir logs && chgrp nogroup logs && chmod g+w logs 4 | RUN ln -s $(pwd)/config /etc/kafka 5 | 6 | # These envs affect the generating of entrypoints in ../kafka-entrypoinst 7 | # Values are based on Yolean/kubernetes-kafka 8 | ENV CLASSPATH="/opt/kafka/libs/extensions/*" 9 | ENV KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:/etc/kafka/log4j.properties" 10 | 11 | # Note that there's also a nouser 65534 user which has no writable home 12 | RUN echo 'nonroot:x:65532:65534:nonroot:/home/nonroot:/usr/sbin/nologin' >> /etc/passwd && \ 13 | mkdir -p /home/nonroot && touch /home/nonroot/.bash_history && chown -R 65532:65534 /home/nonroot 14 | USER nonroot:nogroup 15 | -------------------------------------------------------------------------------- /kafka/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM curlimages/curl@sha256:aa45e9d93122a3cfdf8d7de272e2798ea63733eeee6d06bd2ee4f2f8c4027d7c \ 2 | as extralibs 3 | 4 | USER root 5 | RUN curl -sLS -o /zookeeper-3.5.8.jar https://repo1.maven.org/maven2/org/apache/zookeeper/zookeeper/3.5.8/zookeeper-3.5.8.jar 6 | RUN curl -sLS -o /zookeeper-jute-3.5.8.jar https://repo1.maven.org/maven2/org/apache/zookeeper/zookeeper-jute/3.5.8/zookeeper-jute-3.5.8.jar 7 | 8 | FROM adoptopenjdk:11.0.7_10-jre-hotspot-bionic@sha256:a119e89693cfca250cecc3756c5efb5fdf523d93d813003b3c2a1d29d8884211 9 | 10 | ENV KAFKA_VERSION=2.5.0 SCALA_VERSION=2.12 11 | 12 | RUN set -ex; \ 13 | export DEBIAN_FRONTEND=noninteractive; \ 14 | runDeps='ca-certificates netcat-openbsd'; \ 15 | buildDeps='curl gnupg dirmngr'; \ 16 | apt-get update && apt-get install -y $runDeps $buildDeps --no-install-recommends; \ 17 | \ 18 | curl -sLS -o KEYS https://www.apache.org/dist/kafka/KEYS; \ 19 | gpg --import KEYS && rm KEYS; \ 20 | \ 21 | SCALA_BINARY_VERSION=$(echo $SCALA_VERSION | cut -f 1-2 -d '.'); \ 22 | mkdir -p /opt/kafka; \ 23 | curl -sLS -o kafka_$SCALA_BINARY_VERSION-$KAFKA_VERSION.tgz.asc https://www.apache.org/dist/kafka/$KAFKA_VERSION/kafka_$SCALA_BINARY_VERSION-$KAFKA_VERSION.tgz.asc; \ 24 | curl -sLS -o kafka_$SCALA_BINARY_VERSION-$KAFKA_VERSION.tgz "https://www-eu.apache.org/dist/kafka/$KAFKA_VERSION/kafka_$SCALA_BINARY_VERSION-$KAFKA_VERSION.tgz"; \ 25 | gpg --verify kafka_$SCALA_BINARY_VERSION-$KAFKA_VERSION.tgz.asc kafka_$SCALA_BINARY_VERSION-$KAFKA_VERSION.tgz; \ 26 | tar xzf kafka_$SCALA_BINARY_VERSION-$KAFKA_VERSION.tgz --strip-components=1 -C /opt/kafka; \ 27 | rm kafka_$SCALA_BINARY_VERSION-$KAFKA_VERSION.tgz; \ 28 | \ 29 | rm -rf /opt/kafka/site-docs; \ 30 | \ 31 | apt-get purge -y --auto-remove $buildDeps; \ 32 | rm -rf /var/lib/apt/lists; \ 33 | rm -rf /var/log/dpkg.log /var/log/alternatives.log /var/log/apt /root/.gnupg 34 | 35 | COPY --from=extralibs /zookeeper-3.5.8.jar /opt/kafka/libs/zookeeper-3.5.7.jar 36 | COPY --from=extralibs /zookeeper-jute-3.5.8.jar /opt/kafka/libs/zookeeper-jute-3.5.7.jar 37 | 38 | WORKDIR /opt/kafka 39 | 40 | COPY docker-help.sh /usr/local/bin/docker-help 41 | ENTRYPOINT ["docker-help"] 42 | -------------------------------------------------------------------------------- /kafka/docker-help.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Hi," 3 | echo "" 4 | echo "This image is basically just the official Kafka distribution," 5 | echo "containing both servers and utils, each with its own help output." 6 | echo "" 7 | echo "Select as entrypoint one of these scripts:" 8 | find ./bin/ -name *.sh 9 | echo "" 10 | echo "You might find one of the sample config files useful:" 11 | find ./config/ -name *.properties 12 | echo "" 13 | echo "Add more using volumes, or downstream images." 14 | echo "Enjoy Kafka!" 15 | echo "" 16 | -------------------------------------------------------------------------------- /kubectl-cfssl/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM cfssl/cfssl@sha256:525005bc4e39d61a2302490329e414eab13ee7ec3edf261df5da68ff65cb506b \ 2 | as cfssl 3 | 4 | RUN whereis cfssl 5 | RUN whereis cfssljson 6 | 7 | FROM debian:stretch-slim@sha256:9490c476443a3869e39c2897fa66c91daf5dcbbfca53c976dac7bbdc45775b28 8 | 9 | ENV KUBERNETES_VERSION=1.14.3 KUBERNETES_CLIENTS_SHA256=47ada9a1743d3f45c2443a6bf37a5f8c16a77601c91e3d27e83f7c9c5cca0737 10 | 11 | RUN set -ex; \ 12 | export DEBIAN_FRONTEND=noninteractive; \ 13 | runDeps='procps'; \ 14 | buildDeps='curl ca-certificates'; \ 15 | apt-get update && apt-get install -y $runDeps $buildDeps --no-install-recommends; \ 16 | rm -rf /var/lib/apt/lists/*; \ 17 | \ 18 | curl -sLS -o k.tar.gz -k https://dl.k8s.io/v${KUBERNETES_VERSION}/kubernetes-client-linux-amd64.tar.gz; \ 19 | echo "$KUBERNETES_CLIENTS_SHA256 k.tar.gz" | sha256sum -c; \ 20 | tar -xvzf k.tar.gz -C /usr/local/bin/ --strip-components=3 kubernetes/client/bin/kubectl; \ 21 | rm k.tar.gz; \ 22 | \ 23 | apt-get purge -y --auto-remove $buildDeps; \ 24 | rm /var/log/dpkg.log /var/log/apt/*.log 25 | 26 | RUN kubectl version --client 27 | 28 | COPY --from=cfssl /go/bin/cfssl* /usr/local/bin/ 29 | RUN cfssl version 30 | -------------------------------------------------------------------------------- /kubectl-kafkacat/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM solsson/kafkacat@sha256:5bf858fde6fffbaaf0278fd27458626f96bc908a440ba434536841bb79c70e25 2 | 3 | ENV KUBERNETES_VERSION=1.13.1 KUBERNETES_CLIENT_SHA512=ca00442f50b5d5627357dce97c90c17cb0126d746b887afdab2d4db9e0826532469fd1ee62f40eb6923761618f46752d10993578ca19c8b92c3a2aeb5102a318 4 | 5 | RUN set -ex; \ 6 | export DEBIAN_FRONTEND=noninteractive; \ 7 | runDeps='procps'; \ 8 | buildDeps='curl ca-certificates'; \ 9 | apt-get update && apt-get install -y $runDeps $buildDeps --no-install-recommends; \ 10 | rm -rf /var/lib/apt/lists/*; \ 11 | \ 12 | curl -sLS -o k.tar.gz -k https://dl.k8s.io/v${KUBERNETES_VERSION}/kubernetes-client-linux-amd64.tar.gz; \ 13 | echo "$KUBERNETES_CLIENT_SHA512 k.tar.gz" | sha512sum -c; \ 14 | tar -xvzf k.tar.gz -C /usr/local/bin/ --strip-components=3 kubernetes/client/bin/kubectl; \ 15 | rm k.tar.gz; \ 16 | \ 17 | apt-get purge -y --auto-remove $buildDeps; \ 18 | rm /var/log/dpkg.log /var/log/apt/*.log 19 | 20 | ENTRYPOINT ["kubectl"] 21 | -------------------------------------------------------------------------------- /monitor/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM adoptopenjdk/openjdk11:jdk-11.0.4_11-slim@sha256:79f43f49f505df27528a3dce52e30339116ed6716b1f658206ba76caca26c85b 2 | 3 | ENV KAFKA_MONITOR_REPO=https://github.com/linkedin/kafka-monitor \ 4 | KAFKA_MONITOR_VERSION=2.0.4 \ 5 | KAFKA_MONITOR_SHA256=3713a76a970bd99e72fe8ce1c8be77d4b036057367352265a70ad23b55acef35 6 | 7 | RUN set -ex; \ 8 | export DEBIAN_FRONTEND=noninteractive; \ 9 | runDeps=''; \ 10 | buildDeps='curl ca-certificates unzip'; \ 11 | apt-get update && apt-get install -y $runDeps $buildDeps --no-install-recommends; \ 12 | \ 13 | cd /opt; \ 14 | GRADLE_VERSION=4.10.2 PATH=$PATH:$(pwd)/gradle-$GRADLE_VERSION/bin; \ 15 | curl -SLs -o gradle-$GRADLE_VERSION-bin.zip https://services.gradle.org/distributions/gradle-$GRADLE_VERSION-bin.zip; \ 16 | echo "b49c6da1b2cb67a0caf6c7480630b51c70a11ca2016ff2f555eaeda863143a29 gradle-$GRADLE_VERSION-bin.zip" | sha256sum -c -; \ 17 | unzip gradle-$GRADLE_VERSION-bin.zip; \ 18 | rm gradle-$GRADLE_VERSION-bin.zip; \ 19 | gradle -v; \ 20 | \ 21 | mkdir -p /opt/kafka-monitor; \ 22 | curl -o monitor.tar.gz -SLs "$KAFKA_MONITOR_REPO/archive/$KAFKA_MONITOR_VERSION.tar.gz"; \ 23 | echo "$KAFKA_MONITOR_SHA256 monitor.tar.gz" | sha256sum -c; \ 24 | tar -xzf monitor.tar.gz --strip-components=1 -C /opt/kafka-monitor; \ 25 | rm monitor.tar.gz; \ 26 | \ 27 | cd /opt/kafka-monitor; \ 28 | rm gradlew; \ 29 | gradle --no-daemon jar; \ 30 | \ 31 | sed -i 's/localhost:2181/zookeeper:2181/' config/kafka-monitor.properties; \ 32 | sed -i 's/localhost:9092/bootstrap:9092/' config/kafka-monitor.properties; \ 33 | \ 34 | cat config/kafka-monitor.properties; \ 35 | cat config/log4j.properties; \ 36 | \ 37 | rm -rf /opt/gradle* /root/.gradle; \ 38 | \ 39 | apt-get purge -y --auto-remove $buildDeps nodejs; \ 40 | rm -rf /var/lib/apt/lists/*; \ 41 | rm -rf /var/log/dpkg.log /var/log/alternatives.log /var/log/apt 42 | 43 | WORKDIR /opt/kafka-monitor 44 | 45 | ENTRYPOINT ["./bin/kafka-monitor-start.sh"] 46 | CMD ["/opt/kafka-monitor/config/kafka-monitor.properties"] 47 | -------------------------------------------------------------------------------- /native/.gitignore: -------------------------------------------------------------------------------- 1 | 2 | # These are only used for ./native/native-usecases.sh, and the result is ./native/config resources 3 | kafka-entrypoints 4 | -------------------------------------------------------------------------------- /native/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM solsson/kafka:graalvm 2 | 3 | # Should be identical to kafka-nonroot's user 4 | RUN echo 'nonroot:x:65532:65534:nonroot:/home/nonroot:/usr/sbin/nologin' >> /etc/passwd && \ 5 | mkdir -p /home/nonroot && touch /home/nonroot/.bash_history && chown -R 65532:65534 /home/nonroot 6 | USER nonroot:nogroup 7 | 8 | COPY --from=solsson/kafka:nonroot /opt/kafka /opt/kafka 9 | COPY --from=solsson/kafka:nonroot /etc/kafka /etc/kafka 10 | -------------------------------------------------------------------------------- /native/admincmd.Dockerfile: -------------------------------------------------------------------------------- 1 | FROM adoptopenjdk:11.0.7_10-jdk-hotspot-bionic@sha256:c2ce12d7530d957f2d44dd33339eeeafa3b889c27af0824b186c4afef1f843ef \ 2 | as nonlibs 3 | RUN echo "class Empty {public static void main(String[] a){}}" > Empty.java && javac Empty.java && jar --create --file /empty.jar Empty.class 4 | 5 | FROM curlimages/curl@sha256:aa45e9d93122a3cfdf8d7de272e2798ea63733eeee6d06bd2ee4f2f8c4027d7c \ 6 | as extralibs 7 | 8 | USER root 9 | RUN curl -sLS -o /slf4j-nop-1.7.30.jar https://repo1.maven.org/maven2/org/slf4j/slf4j-nop/1.7.30/slf4j-nop-1.7.30.jar 10 | RUN curl -sLS -o /quarkus-kafka-client-1.6.0.Final.jar https://repo1.maven.org/maven2/io/quarkus/quarkus-kafka-client/1.6.0.Final/quarkus-kafka-client-1.6.0.Final.jar 11 | 12 | FROM solsson/kafka:nativebase as native 13 | 14 | ARG classpath=/opt/kafka/libs/extensions/*:/opt/kafka/libs/* 15 | 16 | COPY --from=extralibs /*.jar /opt/kafka/libs/extensions/ 17 | 18 | # docker run --rm --entrypoint ls solsson/kafka -l /opt/kafka/libs/ | grep log 19 | COPY --from=nonlibs /empty.jar /opt/kafka/libs/slf4j-log4j12-1.7.30.jar 20 | 21 | COPY configs/{{command}} /home/nonroot/native-config 22 | 23 | RUN native-image \ 24 | --no-server \ 25 | --install-exit-handlers \ 26 | -H:+ReportExceptionStackTraces \ 27 | --no-fallback \ 28 | -H:IncludeResourceBundles=joptsimple.HelpFormatterMessages \ 29 | -H:IncludeResourceBundles=joptsimple.ExceptionMessages \ 30 | -H:ConfigurationFileDirectories=/home/nonroot/native-config \ 31 | # When testing the build for a new version we should remove this one, but then it tends to come back 32 | --allow-incomplete-classpath \ 33 | --report-unsupported-elements-at-runtime \ 34 | # -D options from entrypoint 35 | -Djava.awt.headless=true \ 36 | -Dkafka.logs.dir=/opt/kafka/bin/../logs \ 37 | -cp ${classpath} \ 38 | -H:Name={{command}} \ 39 | {{mainclass}} \ 40 | /home/nonroot/{{command}} 41 | 42 | FROM gcr.io/distroless/base-debian10:nonroot@sha256:78f2372169e8d9c028da3856bce864749f2bb4bbe39c69c8960a6e40498f8a88 43 | 44 | COPY --from=native \ 45 | /lib/x86_64-linux-gnu/libz.so.* \ 46 | /lib/x86_64-linux-gnu/ 47 | 48 | COPY --from=native \ 49 | /usr/lib/x86_64-linux-gnu/libzstd.so.* \ 50 | /usr/lib/x86_64-linux-gnu/libsnappy.so.* \ 51 | /usr/lib/x86_64-linux-gnu/liblz4.so.* \ 52 | /usr/lib/x86_64-linux-gnu/ 53 | 54 | WORKDIR /usr/local 55 | 56 | ARG command= 57 | COPY --from=native /home/nonroot/{{command}} ./bin/{{command}}.sh 58 | 59 | ENTRYPOINT [ "/usr/local/bin/{{command}}.sh" ] 60 | -------------------------------------------------------------------------------- /native/cli-scripts/cli-list.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | (cd /usr/local/bin && ls -l) 4 | -------------------------------------------------------------------------------- /native/cli-scripts/kafka-topics_ifnotexists.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Utility for --create to replace --if-not-exists until that flag ceases to require zookeeper 3 | log=$(mktemp) 4 | kafka-topics "$@" | tee $log 5 | result=${PIPESTATUS[0]} 6 | grep "already exists" $log && exit 0 7 | rm $log 8 | exit $result 9 | -------------------------------------------------------------------------------- /native/cli.Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:20.04@sha256:c844b5fee673cd976732dda6860e09b8f2ae5b324777b6f9d25fd70a0904c2e0 2 | 3 | WORKDIR /usr/local 4 | COPY --from=solsson/kafka:native-kafka-topics /usr/local/bin/* /usr/local/bin/ 5 | COPY --from=solsson/kafka:native-kafka-configs /usr/local/bin/* /usr/local/bin/ 6 | COPY --from=solsson/kafka:native-kafka-consumer-groups /usr/local/bin/* /usr/local/bin/ 7 | 8 | RUN set -ex; \ 9 | export DEBIAN_FRONTEND=noninteractive; \ 10 | runDeps='ca-certificates netcat-openbsd libsnappy1v5 liblz4-1 libzstd1 kafkacat jq'; \ 11 | apt-get update && apt-get install -y $runDeps $buildDeps --no-install-recommends; \ 12 | \ 13 | rm -rf /var/lib/apt/lists; \ 14 | rm -rf /var/log/dpkg.log /var/log/alternatives.log /var/log/apt /root/.gnupg 15 | 16 | COPY cli-scripts/* /usr/local/bin/ 17 | 18 | RUN for sh in $(find /usr/local/bin/ -name *.sh); do \ 19 | ln -s $sh $(echo -n $sh | sed 's/\.sh$//'); \ 20 | done 21 | 22 | # Should be identical to kafka-nonroot's user 23 | RUN echo 'nonroot:x:65532:65534:nonroot:/home/nonroot:/usr/sbin/nologin' >> /etc/passwd && \ 24 | mkdir -p /home/nonroot && touch /home/nonroot/.bash_history && chown -R 65532:65534 /home/nonroot 25 | USER nonroot:nogroup 26 | 27 | ENTRYPOINT [ "cli-list" ] 28 | -------------------------------------------------------------------------------- /native/configs-manual-additions/kafka-configs/reflect-config.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name": "org.apache.log4j.helpers.Loader" 4 | }, 5 | { 6 | "name": "sun.security.provider.ConfigFile", 7 | "methods": [ 8 | { "name": "", "parameterTypes": [] } 9 | ] 10 | } 11 | ] 12 | -------------------------------------------------------------------------------- /native/configs-manual-additions/kafka-consumer-groups/reflect-config.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name": "org.apache.log4j.helpers.Loader" 4 | }, 5 | { 6 | "name": "sun.security.provider.ConfigFile", 7 | "methods": [ 8 | { "name": "", "parameterTypes": [] } 9 | ] 10 | } 11 | ] 12 | -------------------------------------------------------------------------------- /native/configs-manual-additions/kafka-topics/reflect-config.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name": "org.apache.log4j.helpers.Loader" 4 | }, 5 | { 6 | "name": "io.netty.util.internal.logging.Log4J2Logger" 7 | }, 8 | { 9 | "name": "sun.security.provider.ConfigFile", 10 | "methods": [ 11 | { "name": "", "parameterTypes": [] } 12 | ] 13 | } 14 | ] 15 | -------------------------------------------------------------------------------- /native/configs-manual-additions/zookeeper-server-start/reflect-config.json: -------------------------------------------------------------------------------- 1 | [ 2 | ] 3 | -------------------------------------------------------------------------------- /native/configs/kafka-configs/jni-config.json: -------------------------------------------------------------------------------- 1 | [ 2 | ] 3 | -------------------------------------------------------------------------------- /native/configs/kafka-configs/proxy-config.json: -------------------------------------------------------------------------------- 1 | [ 2 | ] 3 | -------------------------------------------------------------------------------- /native/configs/kafka-configs/reflect-config.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name":"com.fasterxml.jackson.databind.ext.Java7SupportImpl", 4 | "methods":[{"name":"","parameterTypes":[] }] 5 | }, 6 | { 7 | "name":"java.io.Serializable", 8 | "allDeclaredMethods":true 9 | }, 10 | { 11 | "name":"java.lang.Comparable", 12 | "allDeclaredMethods":true 13 | }, 14 | { 15 | "name":"java.lang.Integer", 16 | "allDeclaredFields":true, 17 | "allDeclaredMethods":true, 18 | "allDeclaredConstructors":true 19 | }, 20 | { 21 | "name":"java.lang.Number", 22 | "allDeclaredFields":true, 23 | "allDeclaredMethods":true 24 | }, 25 | { 26 | "name":"java.lang.Runtime", 27 | "methods":[{"name":"version","parameterTypes":[] }] 28 | }, 29 | { 30 | "name":"java.lang.Runtime$Version", 31 | "methods":[{"name":"major","parameterTypes":[] }] 32 | }, 33 | { 34 | "name":"java.lang.String", 35 | "methods":[{"name":"","parameterTypes":["java.lang.String"] }] 36 | }, 37 | { 38 | "name":"java.lang.String[]" 39 | }, 40 | { 41 | "name":"java.lang.String[][]" 42 | }, 43 | { 44 | "name":"java.lang.Thread", 45 | "methods":[{"name":"getContextClassLoader","parameterTypes":[] }] 46 | }, 47 | { 48 | "name":"java.lang.ThreadLocal", 49 | "methods":[{"name":"remove","parameterTypes":[] }] 50 | }, 51 | { 52 | "name":"java.util.AbstractMap", 53 | "allDeclaredFields":true, 54 | "allDeclaredMethods":true 55 | }, 56 | { 57 | "name":"java.util.Map", 58 | "allDeclaredMethods":true 59 | }, 60 | { 61 | "name":"kafka.utils.Log4jController", 62 | "methods":[{"name":"","parameterTypes":[] }] 63 | }, 64 | { 65 | "name":"org.apache.kafka.common.resource.PatternType[]" 66 | }, 67 | { 68 | "name":"org.apache.log4j.Appender" 69 | }, 70 | { 71 | "name":"org.apache.log4j.Category" 72 | }, 73 | { 74 | "name":"org.apache.log4j.CategoryKey" 75 | }, 76 | { 77 | "name":"org.apache.log4j.ConsoleAppender", 78 | "methods":[{"name":"","parameterTypes":[] }] 79 | }, 80 | { 81 | "name":"org.apache.log4j.DailyRollingFileAppender", 82 | "methods":[{"name":"","parameterTypes":[] }] 83 | }, 84 | { 85 | "name":"org.apache.log4j.Layout" 86 | }, 87 | { 88 | "name":"org.apache.log4j.Logger" 89 | }, 90 | { 91 | "name":"org.apache.log4j.PatternLayout", 92 | "methods":[{"name":"","parameterTypes":[] }] 93 | }, 94 | { 95 | "name":"org.apache.log4j.helpers.Loader" 96 | }, 97 | { 98 | "name":"org.apache.log4j.spi.OptionHandler" 99 | }, 100 | { 101 | "name":"org.apache.zookeeper.ClientCnxnSocketNIO", 102 | "methods":[{"name":"","parameterTypes":["org.apache.zookeeper.client.ZKClientConfig"] }] 103 | }, 104 | { 105 | "name":"scala.Serializable", 106 | "allDeclaredMethods":true 107 | }, 108 | { 109 | "name":"scala.Tuple2[]" 110 | }, 111 | { 112 | "name":"scala.collection.convert.Wrappers$MapWrapper", 113 | "allDeclaredFields":true, 114 | "allDeclaredMethods":true 115 | }, 116 | { 117 | "name":"sun.security.provider.ConfigFile", 118 | "methods":[{"name":"","parameterTypes":[] }] 119 | } 120 | ] 121 | -------------------------------------------------------------------------------- /native/configs/kafka-configs/resource-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "resources":[ 3 | {"pattern":"\\Qkafka/kafka-version.properties\\E"}, 4 | {"pattern":"\\Qorg/slf4j/impl/StaticLoggerBinder.class\\E"} 5 | ], 6 | "bundles":[] 7 | } 8 | -------------------------------------------------------------------------------- /native/configs/kafka-consumer-groups/jni-config.json: -------------------------------------------------------------------------------- 1 | [ 2 | ] 3 | -------------------------------------------------------------------------------- /native/configs/kafka-consumer-groups/proxy-config.json: -------------------------------------------------------------------------------- 1 | [ 2 | ] 3 | -------------------------------------------------------------------------------- /native/configs/kafka-consumer-groups/reflect-config.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name":"java.lang.Long", 4 | "methods":[{"name":"valueOf","parameterTypes":["java.lang.String"] }] 5 | }, 6 | { 7 | "name":"java.lang.String", 8 | "methods":[{"name":"","parameterTypes":["java.lang.String"] }] 9 | }, 10 | { 11 | "name":"java.lang.Thread", 12 | "methods":[{"name":"getContextClassLoader","parameterTypes":[] }] 13 | }, 14 | { 15 | "name":"kafka.admin.ConsumerGroupCommand$PartitionAssignmentState[]" 16 | }, 17 | { 18 | "name":"kafka.utils.Log4jController", 19 | "methods":[{"name":"","parameterTypes":[] }] 20 | }, 21 | { 22 | "name":"org.apache.log4j.Appender" 23 | }, 24 | { 25 | "name":"org.apache.log4j.Category" 26 | }, 27 | { 28 | "name":"org.apache.log4j.CategoryKey" 29 | }, 30 | { 31 | "name":"org.apache.log4j.ConsoleAppender", 32 | "methods":[{"name":"","parameterTypes":[] }] 33 | }, 34 | { 35 | "name":"org.apache.log4j.DailyRollingFileAppender", 36 | "methods":[{"name":"","parameterTypes":[] }] 37 | }, 38 | { 39 | "name":"org.apache.log4j.Layout" 40 | }, 41 | { 42 | "name":"org.apache.log4j.Logger" 43 | }, 44 | { 45 | "name":"org.apache.log4j.PatternLayout", 46 | "methods":[{"name":"","parameterTypes":[] }] 47 | }, 48 | { 49 | "name":"org.apache.log4j.helpers.Loader" 50 | }, 51 | { 52 | "name":"org.apache.log4j.spi.OptionHandler" 53 | }, 54 | { 55 | "name":"sun.security.provider.ConfigFile", 56 | "methods":[{"name":"","parameterTypes":[] }] 57 | } 58 | ] 59 | -------------------------------------------------------------------------------- /native/configs/kafka-consumer-groups/resource-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "resources":[ 3 | {"pattern":"\\Qkafka/kafka-version.properties\\E"}, 4 | {"pattern":"\\Qorg/slf4j/impl/StaticLoggerBinder.class\\E"} 5 | ], 6 | "bundles":[] 7 | } 8 | -------------------------------------------------------------------------------- /native/configs/kafka-server-start/jni-config.json: -------------------------------------------------------------------------------- 1 | [ 2 | ] 3 | -------------------------------------------------------------------------------- /native/configs/kafka-server-start/proxy-config.json: -------------------------------------------------------------------------------- 1 | [ 2 | ["sun.misc.SignalHandler"] 3 | ] 4 | -------------------------------------------------------------------------------- /native/configs/kafka-server-start/reflect-config.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name":"com.fasterxml.jackson.databind.ext.Java7SupportImpl", 4 | "methods":[{"name":"","parameterTypes":[] }] 5 | }, 6 | { 7 | "name":"java.io.File[]" 8 | }, 9 | { 10 | "name":"java.io.Serializable", 11 | "allDeclaredMethods":true 12 | }, 13 | { 14 | "name":"java.lang.Comparable", 15 | "allDeclaredMethods":true 16 | }, 17 | { 18 | "name":"java.lang.Integer", 19 | "allDeclaredFields":true, 20 | "allDeclaredMethods":true, 21 | "allDeclaredConstructors":true 22 | }, 23 | { 24 | "name":"java.lang.Iterable", 25 | "allDeclaredMethods":true 26 | }, 27 | { 28 | "name":"java.lang.Long", 29 | "allDeclaredFields":true, 30 | "allDeclaredMethods":true, 31 | "allDeclaredConstructors":true 32 | }, 33 | { 34 | "name":"java.lang.Number", 35 | "allDeclaredFields":true, 36 | "allDeclaredMethods":true 37 | }, 38 | { 39 | "name":"java.lang.Runnable[]" 40 | }, 41 | { 42 | "name":"java.lang.Runtime", 43 | "methods":[{"name":"version","parameterTypes":[] }] 44 | }, 45 | { 46 | "name":"java.lang.Runtime$Version", 47 | "methods":[{"name":"major","parameterTypes":[] }] 48 | }, 49 | { 50 | "name":"java.lang.String", 51 | "methods":[{"name":"","parameterTypes":["java.lang.String"] }] 52 | }, 53 | { 54 | "name":"java.lang.String[]" 55 | }, 56 | { 57 | "name":"java.lang.Thread", 58 | "methods":[{"name":"getContextClassLoader","parameterTypes":[] }] 59 | }, 60 | { 61 | "name":"java.lang.ThreadLocal", 62 | "methods":[{"name":"remove","parameterTypes":[] }] 63 | }, 64 | { 65 | "name":"java.util.AbstractCollection", 66 | "allDeclaredFields":true, 67 | "allDeclaredMethods":true 68 | }, 69 | { 70 | "name":"java.util.AbstractList", 71 | "allDeclaredFields":true, 72 | "allDeclaredMethods":true 73 | }, 74 | { 75 | "name":"java.util.AbstractMap", 76 | "allDeclaredFields":true, 77 | "allDeclaredMethods":true 78 | }, 79 | { 80 | "name":"java.util.Collection", 81 | "allDeclaredMethods":true 82 | }, 83 | { 84 | "name":"java.util.List", 85 | "allDeclaredMethods":true 86 | }, 87 | { 88 | "name":"java.util.Map", 89 | "allDeclaredMethods":true 90 | }, 91 | { 92 | "name":"java.util.zip.CRC32C" 93 | }, 94 | { 95 | "name":"kafka.server.DelayedOperationPurgatory$WatcherList[]" 96 | }, 97 | { 98 | "name":"kafka.utils.Log4jController", 99 | "methods":[{"name":"","parameterTypes":[] }] 100 | }, 101 | { 102 | "name":"kafka.utils.timer.TimerTaskList[]" 103 | }, 104 | { 105 | "name":"org.apache.kafka.common.protocol.types.Struct[]" 106 | }, 107 | { 108 | "name":"org.apache.kafka.common.resource.PatternType[]" 109 | }, 110 | { 111 | "name":"org.apache.log4j.Appender" 112 | }, 113 | { 114 | "name":"org.apache.log4j.Category" 115 | }, 116 | { 117 | "name":"org.apache.log4j.CategoryKey" 118 | }, 119 | { 120 | "name":"org.apache.log4j.ConsoleAppender", 121 | "methods":[{"name":"","parameterTypes":[] }] 122 | }, 123 | { 124 | "name":"org.apache.log4j.DailyRollingFileAppender", 125 | "methods":[{"name":"","parameterTypes":[] }] 126 | }, 127 | { 128 | "name":"org.apache.log4j.Layout" 129 | }, 130 | { 131 | "name":"org.apache.log4j.Logger" 132 | }, 133 | { 134 | "name":"org.apache.log4j.PatternLayout", 135 | "methods":[{"name":"","parameterTypes":[] }] 136 | }, 137 | { 138 | "name":"org.apache.log4j.spi.OptionHandler" 139 | }, 140 | { 141 | "name":"org.apache.zookeeper.ClientCnxnSocketNIO", 142 | "methods":[{"name":"","parameterTypes":["org.apache.zookeeper.client.ZKClientConfig"] }] 143 | }, 144 | { 145 | "name":"scala.Equals", 146 | "allDeclaredMethods":true 147 | }, 148 | { 149 | "name":"scala.Product", 150 | "allDeclaredMethods":true 151 | }, 152 | { 153 | "name":"scala.Serializable", 154 | "allDeclaredMethods":true 155 | }, 156 | { 157 | "name":"scala.Tuple2[]" 158 | }, 159 | { 160 | "name":"scala.collection.convert.Wrappers$IterableWrapperTrait", 161 | "allDeclaredMethods":true 162 | }, 163 | { 164 | "name":"scala.collection.convert.Wrappers$MapWrapper", 165 | "allDeclaredFields":true, 166 | "allDeclaredMethods":true 167 | }, 168 | { 169 | "name":"scala.collection.convert.Wrappers$MutableBufferWrapper", 170 | "allDeclaredFields":true, 171 | "allDeclaredMethods":true 172 | }, 173 | { 174 | "name":"scala.collection.convert.Wrappers$MutableMapWrapper", 175 | "allDeclaredFields":true, 176 | "allDeclaredMethods":true 177 | }, 178 | { 179 | "name":"scala.collection.convert.Wrappers$SeqWrapper", 180 | "allDeclaredFields":true, 181 | "allDeclaredMethods":true 182 | }, 183 | { 184 | "name":"sun.misc.Signal", 185 | "methods":[ 186 | {"name":"","parameterTypes":["java.lang.String"] }, 187 | {"name":"getName","parameterTypes":[] }, 188 | {"name":"handle","parameterTypes":["sun.misc.Signal","sun.misc.SignalHandler"] } 189 | ] 190 | }, 191 | { 192 | "name":"sun.misc.SignalHandler", 193 | "methods":[{"name":"handle","parameterTypes":["sun.misc.Signal"] }] 194 | }, 195 | { 196 | "name":"sun.misc.Unsafe", 197 | "fields":[{"name":"theUnsafe"}] 198 | }, 199 | { 200 | "name":"sun.security.provider.ConfigFile", 201 | "methods":[{"name":"","parameterTypes":[] }] 202 | } 203 | ] 204 | -------------------------------------------------------------------------------- /native/configs/kafka-server-start/resource-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "resources":[ 3 | {"pattern":"\\Qkafka/kafka-version.properties\\E"}, 4 | {"pattern":"\\Qorg/slf4j/impl/StaticLoggerBinder.class\\E"} 5 | ], 6 | "bundles":[] 7 | } 8 | -------------------------------------------------------------------------------- /native/configs/kafka-topics/jni-config.json: -------------------------------------------------------------------------------- 1 | [ 2 | ] 3 | -------------------------------------------------------------------------------- /native/configs/kafka-topics/proxy-config.json: -------------------------------------------------------------------------------- 1 | [ 2 | ] 3 | -------------------------------------------------------------------------------- /native/configs/kafka-topics/reflect-config.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name":"com.fasterxml.jackson.databind.ext.Java7SupportImpl", 4 | "methods":[{"name":"","parameterTypes":[] }] 5 | }, 6 | { 7 | "name":"java.lang.Integer", 8 | "methods":[{"name":"valueOf","parameterTypes":["java.lang.String"] }] 9 | }, 10 | { 11 | "name":"java.lang.Runtime", 12 | "methods":[{"name":"version","parameterTypes":[] }] 13 | }, 14 | { 15 | "name":"java.lang.Runtime$Version", 16 | "methods":[{"name":"major","parameterTypes":[] }] 17 | }, 18 | { 19 | "name":"java.lang.String", 20 | "methods":[{"name":"","parameterTypes":["java.lang.String"] }] 21 | }, 22 | { 23 | "name":"java.lang.String[]" 24 | }, 25 | { 26 | "name":"java.lang.Thread", 27 | "methods":[{"name":"getContextClassLoader","parameterTypes":[] }] 28 | }, 29 | { 30 | "name":"java.lang.ThreadLocal", 31 | "methods":[{"name":"remove","parameterTypes":[] }] 32 | }, 33 | { 34 | "name":"kafka.utils.Log4jController", 35 | "methods":[{"name":"","parameterTypes":[] }] 36 | }, 37 | { 38 | "name":"org.apache.log4j.Appender" 39 | }, 40 | { 41 | "name":"org.apache.log4j.Category" 42 | }, 43 | { 44 | "name":"org.apache.log4j.CategoryKey" 45 | }, 46 | { 47 | "name":"org.apache.log4j.ConsoleAppender", 48 | "methods":[{"name":"","parameterTypes":[] }] 49 | }, 50 | { 51 | "name":"org.apache.log4j.DailyRollingFileAppender", 52 | "methods":[{"name":"","parameterTypes":[] }] 53 | }, 54 | { 55 | "name":"org.apache.log4j.Layout" 56 | }, 57 | { 58 | "name":"org.apache.log4j.Logger" 59 | }, 60 | { 61 | "name":"org.apache.log4j.PatternLayout", 62 | "methods":[{"name":"","parameterTypes":[] }] 63 | }, 64 | { 65 | "name":"org.apache.log4j.spi.OptionHandler" 66 | }, 67 | { 68 | "name":"org.apache.zookeeper.ClientCnxnSocketNIO", 69 | "methods":[{"name":"","parameterTypes":["org.apache.zookeeper.client.ZKClientConfig"] }] 70 | }, 71 | { 72 | "name":"scala.Tuple2[]" 73 | }, 74 | { 75 | "name":"sun.security.provider.ConfigFile", 76 | "methods":[{"name":"","parameterTypes":[] }] 77 | } 78 | ] 79 | -------------------------------------------------------------------------------- /native/configs/kafka-topics/resource-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "resources":[ 3 | {"pattern":"\\Qkafka/kafka-version.properties\\E"}, 4 | {"pattern":"\\Qorg/slf4j/impl/StaticLoggerBinder.class\\E"} 5 | ], 6 | "bundles":[] 7 | } 8 | -------------------------------------------------------------------------------- /native/configs/zookeeper-server-start/jni-config.json: -------------------------------------------------------------------------------- 1 | [ 2 | ] 3 | -------------------------------------------------------------------------------- /native/configs/zookeeper-server-start/proxy-config.json: -------------------------------------------------------------------------------- 1 | [ 2 | ] 3 | -------------------------------------------------------------------------------- /native/configs/zookeeper-server-start/reflect-config.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name":"java.lang.Runtime", 4 | "methods":[{"name":"version","parameterTypes":[] }] 5 | }, 6 | { 7 | "name":"java.lang.Runtime$Version", 8 | "methods":[{"name":"major","parameterTypes":[] }] 9 | }, 10 | { 11 | "name":"java.lang.String" 12 | }, 13 | { 14 | "name":"java.lang.Thread", 15 | "methods":[{"name":"getContextClassLoader","parameterTypes":[] }] 16 | }, 17 | { 18 | "name":"java.lang.ThreadLocal", 19 | "methods":[{"name":"remove","parameterTypes":[] }] 20 | }, 21 | { 22 | "name":"org.apache.log4j.Appender" 23 | }, 24 | { 25 | "name":"org.apache.log4j.ConsoleAppender", 26 | "methods":[{"name":"","parameterTypes":[] }] 27 | }, 28 | { 29 | "name":"org.apache.log4j.Layout" 30 | }, 31 | { 32 | "name":"org.apache.log4j.Level" 33 | }, 34 | { 35 | "name":"org.apache.log4j.LogManager" 36 | }, 37 | { 38 | "name":"org.apache.log4j.Logger", 39 | "methods":[{"name":"getRootLogger","parameterTypes":[] }] 40 | }, 41 | { 42 | "name":"org.apache.log4j.PatternLayout", 43 | "methods":[{"name":"","parameterTypes":[] }] 44 | }, 45 | { 46 | "name":"org.apache.log4j.Priority" 47 | }, 48 | { 49 | "name":"org.apache.log4j.spi.LoggerRepository" 50 | }, 51 | { 52 | "name":"org.apache.log4j.spi.OptionHandler" 53 | }, 54 | { 55 | "name":"org.apache.zookeeper.server.NIOServerCnxnFactory", 56 | "methods":[{"name":"","parameterTypes":[] }] 57 | }, 58 | { 59 | "name":"sun.security.provider.ConfigFile", 60 | "methods":[{"name":"","parameterTypes":[] }] 61 | } 62 | ] 63 | -------------------------------------------------------------------------------- /native/configs/zookeeper-server-start/resource-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "resources":[{"pattern":"\\Qorg/slf4j/impl/StaticLoggerBinder.class\\E"}], 3 | "bundles":[] 4 | } 5 | -------------------------------------------------------------------------------- /native/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '2.1' 2 | services: 3 | 4 | chgrp: 5 | image: busybox 6 | entrypoint: 7 | - chgrp 8 | command: 9 | - -R 10 | - "65534" 11 | - /configs 12 | volumes: 13 | - ./configs:/configs 14 | 15 | zoo-0: 16 | image: solsson/kafka:nativeagent-zookeeper-server-start 17 | depends_on: 18 | - chgrp 19 | command: 20 | - /etc/kafka/zookeeper.properties.zoo-0 21 | volumes: 22 | - ./native-usecases.sh.configs:/etc/kafka 23 | - ./native-usecases.sh.configs/myid.zoo-0:/home/nonroot/myid 24 | 25 | zoo-1: 26 | image: solsson/kafka:nativeagent-zookeeper-server-start 27 | depends_on: 28 | - zoo-0 29 | command: 30 | - /etc/kafka/zookeeper.properties.zoo-1 31 | volumes: 32 | - ./configs/zookeeper-server-start:/home/nonroot/native-config 33 | - ./native-usecases.sh.configs:/etc/kafka 34 | - ./native-usecases.sh.configs/myid.zoo-1:/home/nonroot/myid 35 | 36 | zoo-2: 37 | image: solsson/kafka:nativeagent-zookeeper-server-start 38 | depends_on: 39 | - zoo-1 40 | command: 41 | - /etc/kafka/zookeeper.properties.zoo-2 42 | volumes: 43 | - ./native-usecases.sh.configs:/etc/kafka 44 | - ./native-usecases.sh.configs/myid.zoo-2:/home/nonroot/myid 45 | 46 | kafka-0: 47 | image: solsson/kafka:nativeagent-kafka-server-start 48 | depends_on: 49 | - zoo-2 50 | volumes: 51 | - ./configs/kafka-server-start:/home/nonroot/native-config 52 | command: 53 | - /etc/kafka/server.properties 54 | - --override 55 | - zookeeper.connect=zoo-1:2181 56 | - --override 57 | - advertised.listeners=PLAINTEXT://kafka-0:9092 58 | - --override 59 | - auto.create.topics.enable=false 60 | 61 | kafka-1: 62 | image: solsson/kafka:nativeagent-kafka-server-start 63 | depends_on: 64 | - kafka-0 65 | command: 66 | - /etc/kafka/server.properties 67 | - --override 68 | - broker.id=1 69 | - --override 70 | - zookeeper.connect=zoo-2:2181 71 | - --override 72 | - advertised.listeners=PLAINTEXT://kafka-1:9092 73 | - --override 74 | - auto.create.topics.enable=false 75 | 76 | step1: 77 | image: solsson/kafka:nativeagent-kafka-topics 78 | volumes: [ ./configs/kafka-topics:/home/nonroot/native-config ] 79 | command: 80 | - --bootstrap-server=kafka-0:9092 81 | - --create 82 | - --topic=topic1 83 | - --config 84 | - compression.type=uncompressed 85 | 86 | step2: 87 | image: solsson/kafka:nativeagent-kafka-topics 88 | volumes: [ ./configs/kafka-topics:/home/nonroot/native-config ] 89 | command: 90 | - --zookeeper=zoo-0:2181 91 | - --create 92 | - --topic=topic1 93 | - --partitions=1 94 | - --replication-factor=1 95 | - --config 96 | - compression.type=uncompressed 97 | - --if-not-exists 98 | 99 | step3: 100 | image: solsson/kafka:nativeagent-kafka-topics 101 | volumes: [ ./configs/kafka-topics:/home/nonroot/native-config ] 102 | command: 103 | - --bootstrap-server=kafka-0:9092 104 | - --list 105 | 106 | step4: 107 | image: solsson/kafka:nativeagent-kafka-configs 108 | volumes: [ ./configs/kafka-configs:/home/nonroot/native-config ] 109 | command: 110 | - --zookeeper=zoo-0:2181 111 | - --entity-type=topics 112 | - --entity-name=topic1 113 | - --alter 114 | - --add-config 115 | - retention.ms=-1 116 | 117 | step5: 118 | image: solsson/kafka:nativeagent-kafka-configs 119 | volumes: [ ./configs/kafka-configs:/home/nonroot/native-config ] 120 | command: 121 | - --bootstrap-server=kafka-0:9092 122 | - --entity-type=brokers 123 | - --entity-name=0 124 | - --alter 125 | - --add-config 126 | - min.insync.replicas=1 127 | 128 | step6: 129 | image: solsson/kafka:nativeagent-kafka-consumer-groups 130 | volumes: [ ./configs/kafka-consumer-groups:/home/nonroot/native-config ] 131 | command: 132 | - --bootstrap-server=kafka-0:9092 133 | - --group=testgroup 134 | - --topic=topic1 135 | - --reset-offsets 136 | - --execute 137 | - --to-latest 138 | 139 | step7: 140 | image: solsson/kafka:nativeagent-kafka-consumer-groups 141 | volumes: [ ./configs/kafka-consumer-groups:/home/nonroot/native-config ] 142 | command: 143 | - --bootstrap-server=kafka-0:9092 144 | - --group=testgroup 145 | - --describe 146 | 147 | kafkacat: 148 | image: edenhill/kafkacat:1.5.0 149 | depends_on: 150 | - kafka-0 151 | environment: 152 | - b=kafka-0:9092 153 | - t=topic1 154 | entrypoint: 155 | - /bin/sh 156 | - -c 157 | command: 158 | - | 159 | while true; do sleep 1 && kafkacat -b $$b -L -t $$t | grep 'partition '; done 160 | -------------------------------------------------------------------------------- /native/native-usecases.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | [ -z "$DEBUG" ] || set -x 3 | set -e 4 | 5 | basedir="$(dirname $0)" 6 | compose="docker-compose -f $basedir/docker-compose.yml" 7 | 8 | # Clean up existing native-image config to get fresh results 9 | [ ! -z "$CLEANUP" ] || CLEANUP=" 10 | zookeeper-server-start 11 | kafka-server-start 12 | kafka-topics 13 | kafka-configs 14 | kafka-consumer-groups 15 | " 16 | 17 | for entrypoint in $CLEANUP; do 18 | dir=$basedir/configs/$entrypoint 19 | echo '[]' > $dir/jni-config.json 20 | echo '[]' > $dir/reflect-config.json 21 | echo '[]' > $dir/proxy-config.json 22 | echo '{}' > $dir/resource-config.json 23 | [ ! -d $basedir/configs-manual-additions/$entrypoint ] || \ 24 | cp $basedir/configs-manual-additions/$entrypoint/* $dir/ 25 | chmod -R ug+w $dir 26 | done 27 | 28 | $compose up -d zoo-0 29 | $compose up -d zoo-1 30 | $compose up -d zoo-2 31 | sleep 5 32 | $compose up -d kafka-0 33 | $compose up -d kafkacat 34 | $compose ps 35 | 36 | for step in $(seq 1 7); do 37 | $compose up step$step 38 | done 39 | -------------------------------------------------------------------------------- /native/native-usecases.sh.configs/log4j.properties: -------------------------------------------------------------------------------- 1 | log4j.rootLogger=INFO, stdout 2 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender 3 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout 4 | log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n 5 | 6 | # Suppress connection log messages, three lines per livenessProbe execution 7 | log4j.logger.org.apache.zookeeper.server.NIOServerCnxnFactory=WARN 8 | log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN 9 | -------------------------------------------------------------------------------- /native/native-usecases.sh.configs/myid.zoo-0: -------------------------------------------------------------------------------- 1 | 1 2 | -------------------------------------------------------------------------------- /native/native-usecases.sh.configs/myid.zoo-1: -------------------------------------------------------------------------------- 1 | 2 2 | -------------------------------------------------------------------------------- /native/native-usecases.sh.configs/myid.zoo-2: -------------------------------------------------------------------------------- 1 | 3 2 | -------------------------------------------------------------------------------- /native/native-usecases.sh.configs/zookeeper.properties.zoo-0: -------------------------------------------------------------------------------- 1 | 4lw.commands.whitelist=ruok 2 | dataDir=/home/nonroot 3 | dataLogDir=/home/nonroot/log 4 | # https://zookeeper.apache.org/doc/r3.5.5/zookeeperAdmin.html#sc_zkMulitServerSetup 5 | tickTime=2000 6 | clientPort=2181 7 | initLimit=5 8 | syncLimit=2 9 | # disable the per-ip limit on the number of connections since this is a non-production config 10 | maxClientCnxns=0 11 | # Disable the adminserver by default to avoid port conflicts. 12 | # Set the port to something non-conflicting if choosing to enable this 13 | admin.enableServer=false 14 | # The following must be partially edited by the init-config script 15 | server.1=0.0.0.0:2888:3888:participant 16 | server.2=zoo-1:2888:3888:participant 17 | server.3=zoo-2:2888:3888:participant 18 | -------------------------------------------------------------------------------- /native/native-usecases.sh.configs/zookeeper.properties.zoo-1: -------------------------------------------------------------------------------- 1 | 4lw.commands.whitelist=ruok 2 | dataDir=/home/nonroot 3 | dataLogDir=/home/nonroot/log 4 | # https://zookeeper.apache.org/doc/r3.5.5/zookeeperAdmin.html#sc_zkMulitServerSetup 5 | tickTime=2000 6 | clientPort=2181 7 | initLimit=5 8 | syncLimit=2 9 | # disable the per-ip limit on the number of connections since this is a non-production config 10 | maxClientCnxns=0 11 | # Disable the adminserver by default to avoid port conflicts. 12 | # Set the port to something non-conflicting if choosing to enable this 13 | admin.enableServer=false 14 | # The following must be partially edited by the init-config script 15 | server.1=zoo-0:2888:3888:participant 16 | server.2=0.0.0.0:2888:3888:participant 17 | server.3=zoo-2:2888:3888:participant 18 | -------------------------------------------------------------------------------- /native/native-usecases.sh.configs/zookeeper.properties.zoo-2: -------------------------------------------------------------------------------- 1 | 4lw.commands.whitelist=ruok 2 | dataDir=/home/nonroot 3 | dataLogDir=/home/nonroot/log 4 | # https://zookeeper.apache.org/doc/r3.5.5/zookeeperAdmin.html#sc_zkMulitServerSetup 5 | tickTime=2000 6 | clientPort=2181 7 | initLimit=5 8 | syncLimit=2 9 | # disable the per-ip limit on the number of connections since this is a non-production config 10 | maxClientCnxns=0 11 | # Disable the adminserver by default to avoid port conflicts. 12 | # Set the port to something non-conflicting if choosing to enable this 13 | admin.enableServer=false 14 | # The following must be partially edited by the init-config script 15 | server.1=zoo-0:2888:3888:participant 16 | server.2=zoo-1:2888:3888:participant 17 | server.3=0.0.0.0:2888:3888:participant 18 | -------------------------------------------------------------------------------- /native/zookeeper-server-start.Dockerfile: -------------------------------------------------------------------------------- 1 | FROM curlimages/curl@sha256:aa45e9d93122a3cfdf8d7de272e2798ea63733eeee6d06bd2ee4f2f8c4027d7c \ 2 | as extralibs 3 | 4 | USER root 5 | RUN curl -sLS -o /slf4j-simple-1.7.30.jar https://repo1.maven.org/maven2/org/slf4j/slf4j-simple/1.7.30/slf4j-simple-1.7.30.jar 6 | RUN curl -sLS -o /log4j-over-slf4j-1.7.30.jar https://repo1.maven.org/maven2/org/slf4j/log4j-over-slf4j/1.7.30/log4j-over-slf4j-1.7.30.jar 7 | 8 | FROM solsson/kafka:nativebase as native 9 | 10 | #ARG classpath=/opt/kafka/libs/slf4j-log4j12-1.7.30.jar:/opt/kafka/libs/log4j-1.2.17.jar:/opt/kafka/libs/slf4j-api-1.7.30.jar:/opt/kafka/libs/zookeeper-3.5.7.jar:/opt/kafka/libs/zookeeper-jute-3.5.7.jar 11 | COPY --from=extralibs /*.jar /opt/kafka/libs/extensions/ 12 | ARG classpath=/opt/kafka/libs/slf4j-api-1.7.30.jar:/opt/kafka/libs/extensions/slf4j-simple-1.7.30.jar:/opt/kafka/libs/extensions/log4j-over-slf4j-1.7.30.jar:/opt/kafka/libs/zookeeper-3.5.7.jar:/opt/kafka/libs/zookeeper-jute-3.5.7.jar 13 | 14 | COPY configs/zookeeper-server-start /home/nonroot/native-config 15 | 16 | # Remaining issues: 17 | # - java.lang.NoClassDefFoundError: Could not initialize class org.apache.zookeeper.server.admin.JettyAdminServer 18 | # which is fine because https://github.com/apache/zookeeper/blob/release-3.5.7/zookeeper-server/src/main/java/org/apache/zookeeper/server/admin/AdminServerFactory.java 19 | # documents that admin server is optional and it's only at startup 20 | # - WARN org.apache.zookeeper.server.ZooKeeperServer - Failed to register with JMX 21 | # java.lang.NullPointerException at org.apache.zookeeper.jmx.MBeanRegistry.register(MBeanRegistry.java:108) 22 | # is very annoying because it happens a lot so it fills logs 23 | 24 | RUN native-image \ 25 | --no-server \ 26 | --install-exit-handlers \ 27 | -H:+ReportExceptionStackTraces \ 28 | --no-fallback \ 29 | -H:ConfigurationFileDirectories=/home/nonroot/native-config \ 30 | # Added because of org.apache.zookeeper.common.X509Util, org.apache.zookeeper.common.ZKConfig, javax.net.ssl.SSLContext ... 31 | --allow-incomplete-classpath \ 32 | # Added because of "ClassNotFoundException: org.apache.zookeeper.server.NIOServerCnxnFactory" 33 | --report-unsupported-elements-at-runtime \ 34 | # -D options from entrypoint 35 | -Djava.awt.headless=true \ 36 | -Dkafka.logs.dir=/opt/kafka/bin/../logs \ 37 | # -Dlog4j.configuration=file:/etc/kafka/log4j.properties \ 38 | -cp ${classpath} \ 39 | -H:Name=zookeeper-server-start \ 40 | org.apache.zookeeper.server.quorum.QuorumPeerMain \ 41 | /home/nonroot/zookeeper-server-start 42 | 43 | FROM gcr.io/distroless/base-debian10:nonroot@sha256:56da492c4800196c29f3e9fac3c0e66af146bfd31694f29f0958d6d568139dd9 44 | 45 | COPY --from=native \ 46 | /lib/x86_64-linux-gnu/libz.so.* \ 47 | /lib/x86_64-linux-gnu/ 48 | 49 | COPY --from=native \ 50 | /usr/lib/x86_64-linux-gnu/libzstd.so.* \ 51 | /usr/lib/x86_64-linux-gnu/libsnappy.so.* \ 52 | /usr/lib/x86_64-linux-gnu/liblz4.so.* \ 53 | /usr/lib/x86_64-linux-gnu/ 54 | 55 | WORKDIR /usr/local 56 | COPY --from=native /home/nonroot/zookeeper-server-start ./bin/zookeeper-server-start.sh 57 | COPY --from=native /etc/kafka /etc/kafka 58 | 59 | ENTRYPOINT [ "/usr/local/bin/zookeeper-server-start.sh" ] 60 | CMD ["/etc/kafka/zookeeper.properties"] 61 | -------------------------------------------------------------------------------- /prometheus-jmx-exporter/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM adoptopenjdk:11.0.7_10-jre-hotspot-bionic@sha256:a119e89693cfca250cecc3756c5efb5fdf523d93d813003b3c2a1d29d8884211 2 | 3 | ENV EXPORTER_VERSION=76cf1bf03c4cd4a050eb089f37af6a8af15abf0a 4 | ENV EXPORTER_REPO=github.com/prometheus/jmx_exporter 5 | 6 | WORKDIR /usr/local/ 7 | 8 | RUN set -ex; \ 9 | DEBIAN_FRONTEND=noninteractive; \ 10 | runDeps=''; \ 11 | buildDeps='curl ca-certificates'; \ 12 | apt-get update && apt-get install -y $runDeps $buildDeps --no-install-recommends; \ 13 | \ 14 | MAVEN_VERSION=3.5.2 PATH=$PATH:$(pwd)/maven/bin; \ 15 | mkdir ./maven; \ 16 | curl -SLs https://archive.apache.org/dist/maven/maven-3/$MAVEN_VERSION/binaries/apache-maven-$MAVEN_VERSION-bin.tar.gz | tar -xzf - --strip-components=1 -C ./maven; \ 17 | mvn --version; \ 18 | \ 19 | mkdir ./jmx_exporter; \ 20 | curl -SLs https://$EXPORTER_REPO/archive/$EXPORTER_VERSION.tar.gz | tar -xzf - --strip-components=1 -C ./jmx_exporter; \ 21 | cd ./jmx_exporter; \ 22 | mvn package; \ 23 | find jmx_prometheus_httpserver/ -name *-jar-with-dependencies.jar -exec mv -v '{}' ../jmx_prometheus_httpserver.jar \;; \ 24 | mv example_configs ../; \ 25 | cd ..; \ 26 | \ 27 | rm -Rf ./jmx_exporter ./maven /root/.m2; \ 28 | \ 29 | apt-get purge -y --auto-remove $buildDeps; \ 30 | rm -rf /var/lib/apt/lists/*; \ 31 | rm -rf /var/log/dpkg.log /var/log/alternatives.log /var/log/apt 32 | 33 | COPY collect-all-slow.yml example_configs/ 34 | 35 | ENTRYPOINT ["java", "-jar", "jmx_prometheus_httpserver.jar"] 36 | CMD ["5556", "example_configs/collect-all-slow.yml"] 37 | -------------------------------------------------------------------------------- /prometheus-jmx-exporter/collect-all-slow.yml: -------------------------------------------------------------------------------- 1 | # https://github.com/prometheus/jmx_exporter 2 | startDelaySeconds: 0 3 | jmxUrl: service:jmx:rmi:///jndi/rmi://127.0.0.1:5555/jmxrmi 4 | ssl: false 5 | lowercaseOutputName: false 6 | lowercaseOutputLabelNames: false 7 | -------------------------------------------------------------------------------- /tags/0.10.0.1/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM solsson/kafka:0.10.0.1@sha256:301b1ca59d62e5cb9c030513ac30ebe6c4c020546611e265dd282380b42be6ee 2 | -------------------------------------------------------------------------------- /tags/0.10.2.0-alpine/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM solsson/kafka:0.11.0.0@sha256:4c194db2ec15698aca6f1aa8a2fd5e5c566caed82b4bf43446c388f315397756 2 | -------------------------------------------------------------------------------- /tags/0.11.0.0-alpine/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM solsson/kafka:0.11.0.0@sha256:4c194db2ec15698aca6f1aa8a2fd5e5c566caed82b4bf43446c388f315397756 2 | -------------------------------------------------------------------------------- /tags/0.11.0.0/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM solsson/kafka@sha256:b27560de08d30ebf96d12e74f80afcaca503ad4ca3103e63b1fd43a2e4c976ce 2 | --------------------------------------------------------------------------------