├── .github └── workflows │ ├── docker-image.yml │ └── kafka-connect.yml ├── .gitignore ├── LICENSE ├── README.md ├── docs └── librdkafka.md ├── java └── message-hub-liberty-sample │ └── lib-message-hub │ ├── README.md │ ├── messagehub.login-1.0.0.jar │ └── messagehub.login-1.0.0.jar.asc ├── kafka-connect ├── Dockerfile ├── IKS │ ├── README.md │ ├── connect-distributed.properties │ ├── connect-log4j.properties │ └── kafka-connect.yaml ├── README.md └── build.sh ├── kafka-java-console-sample ├── .gitignore ├── Dockerfile ├── LICENSE ├── README.md ├── build.gradle ├── docs │ ├── CF_Standard_Enterprise_Plan.md │ ├── Cloud_Foundry.md │ ├── Docker_Local.md │ ├── Kubernetes_Service.md │ └── Local.md ├── kafka-java-console-sample.yaml ├── manifest.yml ├── settings.gradle └── src │ └── main │ ├── java │ └── com │ │ └── eventstreams │ │ └── samples │ │ ├── ArgumentParser.java │ │ ├── ConsumerRunnable.java │ │ ├── EventStreamsConsoleSample.java │ │ ├── ProducerRunnable.java │ │ └── env │ │ ├── Environment.java │ │ └── EventStreamsCredentials.java │ └── resources │ └── log4j2.properties ├── kafka-java-console-schema-sample ├── .gitignore ├── Dockerfile ├── LICENSE ├── README.md ├── build.gradle ├── docs │ ├── CF_Enterprise_Plan.md │ ├── Cloud_Foundry.md │ ├── Docker_Local.md │ ├── Kubernetes_Service.md │ └── Local.md ├── kafka-java-console-schema-sample.yaml ├── manifest.yml ├── settings.gradle └── src │ └── main │ ├── avro │ └── com │ │ └── eventstreams │ │ └── samples │ │ └── Message.avsc │ ├── java │ └── com │ │ └── eventstreams │ │ └── samples │ │ ├── ArgumentParser.java │ │ ├── ConsumerRunnable.java │ │ ├── EventStreamsConsoleSample.java │ │ ├── ProducerRunnable.java │ │ └── env │ │ ├── Environment.java │ │ └── EventStreamsCredentials.java │ └── resources │ ├── .gitignore │ └── log4j2.properties ├── kafka-java-liberty-sample ├── .gitignore ├── Dockerfile ├── LICENSE ├── README.md ├── build.gradle ├── docs │ ├── CF_Standard_Enterprise_Plan.md │ ├── Cloud_Foundry.md │ ├── Docker_Local.md │ └── Kubernetes_Service.md ├── kafka-java-liberty-sample.yaml ├── log4j │ └── log4j2.properties ├── manifest.yml ├── resources │ ├── consumer.properties │ └── producer.properties ├── settings.gradle └── src │ └── main │ ├── java │ └── com │ │ └── eventstreams │ │ └── samples │ │ ├── env │ │ ├── Environment.java │ │ └── EventStreamsCredentials.java │ │ └── servlet │ │ ├── KafkaServlet.java │ │ ├── MessageList.java │ │ └── RESTRequest.java │ ├── webapp │ ├── images │ │ └── mh_featured.png │ ├── index.html │ ├── index.js │ └── style.css │ └── wlp │ └── server.xml ├── kafka-mirrormaker ├── Dockerfile ├── IKS │ ├── README.md │ ├── destination.properties │ ├── kafka-mirrormaker.yaml │ ├── source.properties │ └── tools-log4j.properties ├── README.md └── entrypoint.sh ├── kafka-nodejs-console-sample ├── .cfignore ├── .gitignore ├── Dockerfile ├── LICENSE ├── README.md ├── app.js ├── consumerLoop.js ├── docs │ ├── CF_Standard_Enterprise_Plan.md │ ├── Cloud_Foundry.md │ ├── Docker_Local.md │ ├── Kubernetes_Service.md │ └── Local.md ├── kafka-nodejs-console-sample.yaml ├── manifest.yml ├── package-lock.json ├── package.json └── producerLoop.js ├── kafka-python-console-sample ├── Dockerfile ├── LICENSE ├── Procfile ├── README.md ├── app.py ├── consumertask.py ├── docs │ ├── CF_Standard_Enterprise_Plan.md │ ├── Cloud_Foundry.md │ ├── Docker_Local.md │ ├── Kubernetes_Service.md │ └── Local.md ├── kafka-python-console-sample.yaml ├── manifest.yml ├── producertask.py ├── requirements.txt ├── rest.py └── runtime.txt └── kafka-topic-stats ├── .gitignore ├── README.md ├── build.gradle ├── settings.gradle └── src └── main └── java └── com └── eventstreams └── samples └── TopicStats.java /.github/workflows/docker-image.yml: -------------------------------------------------------------------------------- 1 | name: Build & publish event-streams-samples images 2 | on: 3 | push: 4 | branches: ['master'] 5 | paths: 6 | - 'kafka-java-console-sample/**' 7 | - 'kafka-mirrormaker/**' 8 | - 'kafka-java-console-schema-sample/**' 9 | - 'kafka-nodejs-console-sample/**' 10 | - 'kafka-python-console-sample/**' 11 | - 'kafka-java-liberty-sample/**' 12 | 13 | env: 14 | REGISTRY: ghcr.io 15 | 16 | jobs: 17 | build-and-push-image: 18 | runs-on: ubuntu-latest 19 | strategy: 20 | fail-fast: false 21 | matrix: 22 | include: 23 | - directory: kafka-java-console-sample 24 | image: kafka-java-console-sample:latest 25 | - directory: kafka-java-console-schema-sample 26 | image: kafka-java-console-schema-sample:latest 27 | - directory: kafka-mirrormaker 28 | image: kafka-mirrormaker:latest 29 | - directory: kafka-nodejs-console-sample 30 | image: kafka-nodejs-console-sample:latest 31 | - directory: kafka-python-console-sample 32 | image: kafka-python-console-sample:latest 33 | - directory: kafka-java-liberty-sample 34 | image: kafka-java-liberty-sample:latest 35 | 36 | permissions: 37 | contents: read 38 | packages: write 39 | 40 | steps: 41 | - name: Checkout repository 42 | uses: actions/checkout@v4 43 | 44 | - name: Log in to registry 45 | run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u $ --password-stdin 46 | - name: Build image 47 | run: | 48 | cd ${{ matrix.directory }} 49 | docker build . --file Dockerfile --tag ${{ matrix.image }} --label "latest" 50 | - name: Push image 51 | run: | 52 | IMAGE_ID=ghcr.io/${{ github.repository }}/${{ matrix.image }} 53 | # This changes all uppercase characters to lowercase. 54 | IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]') 55 | echo IMAGE_ID=$IMAGE_ID 56 | docker tag ${{ matrix.image }} $IMAGE_ID 57 | docker push $IMAGE_ID 58 | -------------------------------------------------------------------------------- /.github/workflows/kafka-connect.yml: -------------------------------------------------------------------------------- 1 | name: Build & publish event-streams-samples images 2 | on: 3 | push: 4 | branches: ['master'] 5 | paths: 6 | - 'kafka-connect/**' 7 | 8 | env: 9 | REGISTRY: ghcr.io 10 | 11 | jobs: 12 | build-and-push-image: 13 | runs-on: ubuntu-latest 14 | permissions: 15 | contents: read 16 | packages: write 17 | 18 | steps: 19 | - name: Checkout repository 20 | uses: actions/checkout@v4 21 | 22 | - name: Log in to registry 23 | run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u $ --password-stdin 24 | - name: Run bash script 25 | run: bash kafka-connect/build.sh 26 | - name: Build image 27 | run: | 28 | docker build . --file kafka-connect/Dockerfile --tag eventstreams-kafka-connect:latest --label "latest" 29 | - name: Push image 30 | run: | 31 | IMAGE_ID=ghcr.io/${{ github.repository }}/eventstreams-kafka-connect:latest 32 | # This changes all uppercase characters to lowercase. 33 | IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]') 34 | echo IMAGE_ID=$IMAGE_ID 35 | docker tag eventstreams-kafka-connect:latest $IMAGE_ID 36 | docker push $IMAGE_ID 37 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | rest-nodejs-express-sample/node_modules 2 | *.~lock.* 3 | *.classpath 4 | .project 5 | **.DS_Store 6 | 7 | *.iml 8 | .idea 9 | gradle* 10 | .gradle -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # event-streams-samples 2 | IBM Event Streams for IBM Cloud is a scalable, distributed, high throughput message bus to unite your on-premise and off-premise cloud technologies. You can wire micro-services together using open protocols, connect stream data to analytics to realize powerful insight and feed event data to multiple applications to react in real time. 3 | 4 | This repository is for samples which interact with the Event Streams for IBM Cloud service. 5 | Currently, there are samples for the Kafka and MQ Light APIs. 6 | Information and instructions regarding the use of these samples can be found in their respective directories. 7 | 8 | ## Aim of the Samples 9 | The aim of the samples is to help you get started with Event Streams for IBM Cloud within minutes. They are not production-ready applications but should give you useful pointers at how to build, package and deploy applications as well as how to make basic API calls against us with error handling. We did all the heavy lifting so you can focus on developing exciting code with value! 10 | 11 | ## Provisioning your Event Streams for IBM Cloud Cluster 12 | In order to provision an Event Streams for IBM Cloud cluster, please visit the [IBM Cloud® catalog](https://cloud.ibm.com/catalog/). Please also familiarise yourself with Event Streams for IBM Cloud and Apache Kafka basics and terminology. [Our documentation](https://cloud.ibm.com/docs/services/EventStreams?topic=eventstreams-getting_started) is a good starting point. 13 | 14 | ### Pricing plans 15 | IBM Event Streams can be provisioned on IBM Cloud® in various pricing plans. Please refer to our [documentation](https://cloud.ibm.com/docs/services/EventStreams?topic=eventstreams-plan_choose#plan_choose) to help choose a plan that works for you. 16 | 17 | __Important Note__: Provisioning an Event Streams service in IBM Cloud® incurs a fee. Please review pricing before provisioning. The samples in this repository will create topic(s) on your behalf - creating a topic might also incur a fee. For more information, please consult the IBM Cloud® documentation if necessary. 18 | 19 | ## Connecting to your Event Streams for IBM Cloud Cluster 20 | In each sample, we demonstrate a single connection path for our Standard/Enterprise plans respectively. The aim was to get you started quickly. However your client's needs might be different. Therefore we wrote a [guide](https://cloud.ibm.com/docs/services/EventStreams?topic=eventstreams-connecting#connecting) that discusses credential generation in detail and showing you all possible ways of doing this. 21 | 22 | ## Our APIs and Sample Applications 23 | 24 | ### Kafka API (recommended): 25 | * [kafka-java-console-sample](/kafka-java-console-sample/README.md) : Sample Java console application using the Event Streams for IBM Cloud Kafka API 26 | * [kafka-java-liberty-sample](/kafka-java-liberty-sample/README.md) : Sample IBM Websphere Liberty profile application using the Event Streams for IBM Cloud Kafka API 27 | * [kafka-nodejs-console-sample](kafka-nodejs-console-sample/README.md) : Sample Node.js console application using the Event Streams for IBM Cloud Kafka API 28 | * [kafka-python-console-sample](/kafka-python-console-sample/README.md) : Sample Python console application using the Event Streams for IBM Cloud Kafka API 29 | * [kafka-connect](/kafka-connect/README.md) : Sample Docker image with Kafka Connect 30 | * [kafka-mirrormaker](/kafka-mirrormaker/README.md) : Sample Docker image with Kafka Mirror Maker 31 | 32 | ### Spring Kafka: 33 | * [spring kafka tutorial](https://developer.ibm.com/tutorials/use-spring-kafka-to-access-an-event-streams-service/) : Tutorial to quickly get you up and running using IBM Event Streams. 34 | * [spring-kafka](https://github.com/wkorando/event-stream-kafka) : Sample app to connect to Event Streams using Spring Kafka 35 | 36 | ### Utilities: 37 | * [kafka-topic-stats](/kafka-topic-stats/README.md): Utility for displaying Kafka topic usage and configuration. 38 | 39 | ## Get Further Assistance 40 | 41 | If you have any issues, just ask us a question (tagged with `ibm-eventstreams`) on [StackOverflow.com](http://stackoverflow.com/questions/tagged/ibm-eventstreams). 42 | 43 | 44 | For more information regarding IBM Event Streams for IBM Cloud, [view the documentation on IBM Cloud](https://cloud.ibm.com/docs/services/EventStreams?topic=eventstreams-getting_started). 45 | -------------------------------------------------------------------------------- /docs/librdkafka.md: -------------------------------------------------------------------------------- 1 | # How to build librdkafka for use with IBM Event Streams for IBM Cloud 2 | 3 | [Librdkafka](https://github.com/edenhill/librdkafka) is a very good Kafka client written in C/C++. In fact, the clients we used in our Node.js and Python samples, node-rdkafka and confluent-kafka-python respectively, are actually languages bindings on top of librdkafka. 4 | 5 | Librdkafka is very flexible but in order to be able to work with Event Streams for IBM Cloud it needs to be built with the correct dependencies. 6 | 7 | ## Linux dependencies 8 | * openssl-dev 9 | * libsasl2-dev 10 | * libsasl2-modules 11 | * C++ toolchain 12 | 13 | ## macOS dependencies 14 | * [Brew](http://brew.sh/) 15 | * [Apple Xcode command line tools](https://developer.apple.com/xcode/) 16 | * `openssl` via Brew 17 | * Export `CPPFLAGS=-I/usr/local/opt/openssl/include` and `LDFLAGS=-L/usr/local/opt/openssl/lib` 18 | * Open Keychain Access, export all certificates in System Roots to a single .pem file 19 | -------------------------------------------------------------------------------- /java/message-hub-liberty-sample/lib-message-hub/README.md: -------------------------------------------------------------------------------- 1 | # event-streams-login-library 2 | 3 | This library is required to connect to IBM Event Streams for IBM Cloud __only__ when using the Kafka __0.9.x__ Java client. 4 | 5 | It is not needed any more to run the Java samples included in this repository. 6 | -------------------------------------------------------------------------------- /java/message-hub-liberty-sample/lib-message-hub/messagehub.login-1.0.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ibm-messaging/event-streams-samples/cc6c4c4fc28ab4517bbe14fc356854c5057c650e/java/message-hub-liberty-sample/lib-message-hub/messagehub.login-1.0.0.jar -------------------------------------------------------------------------------- /java/message-hub-liberty-sample/lib-message-hub/messagehub.login-1.0.0.jar.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | Version: GnuPG v1 3 | 4 | iQEcBAABAgAGBQJWXHVJAAoJEEv2uTXLGdqSTEgIAIz/vgFHbiuPH8y7FkdjPov6 5 | agiRXP98cxUTq2NZiO8Wm4EasC96eYTdr6SvfVbF59RBwGepUSMe9PgsLL1QYFew 6 | kU+Jv2dNV0te2gRWfyrnJEqwjiyMdIzxjtbWSxMx2MsMoyZegXUsBahKtI9D0DnJ 7 | goaaYp/i3owAHFAQSWFrVHzP6yynEd1Uy2VcmkbjUFkK7YcVcNvJBTLPlaKs+3CK 8 | pNrC5TF0iRaP26WcU+SZ6YcngAPeigQsaKaTeFBgBzAPPxg2918qiYk0w/9H813Q 9 | 8E7+/ljnR1AntXhwjJWu0cQSF9/hzxc3WqisOj+MNdoYs/6i45zAYm6xso3VSJo= 10 | =HOc2 11 | -----END PGP SIGNATURE----- 12 | -------------------------------------------------------------------------------- /kafka-connect/Dockerfile: -------------------------------------------------------------------------------- 1 | # This dockerfile expects Connector jars to have been built under a `connectors` directory 2 | # 3 | FROM alpine as builder 4 | 5 | RUN apk update 6 | RUN apk --no-cache add curl 7 | 8 | RUN curl -L "https://downloads.apache.org/kafka/3.6.2/kafka_2.12-3.6.2.tgz" -o kafka.tgz 9 | RUN mkdir /opt/kafka \ 10 | && tar -xf kafka.tgz -C /opt/kafka --strip-components=1 11 | 12 | FROM openjdk:11 13 | 14 | RUN addgroup --gid 5000 --system esgroup && \ 15 | adduser --uid 5000 --ingroup esgroup --system esuser 16 | 17 | COPY --chown=esuser:esgroup --from=builder /opt/kafka/bin/ /opt/kafka/bin/ 18 | COPY --chown=esuser:esgroup --from=builder /opt/kafka/libs/ /opt/kafka/libs/ 19 | COPY --chown=esuser:esgroup --from=builder /opt/kafka/config/ /opt/kafka/config/ 20 | RUN mkdir /opt/kafka/logs && chown esuser:esgroup /opt/kafka/logs 21 | 22 | COPY --chown=esuser:esgroup connectors /opt/connectors 23 | 24 | WORKDIR /opt/kafka 25 | 26 | EXPOSE 8083 27 | 28 | USER esuser 29 | 30 | ENTRYPOINT ["./bin/connect-distributed.sh", "config/connect-distributed.properties"] 31 | -------------------------------------------------------------------------------- /kafka-connect/IKS/README.md: -------------------------------------------------------------------------------- 1 | 2 | ## Deploying `eventstreams-kafka-connect` to Kubernetes connecting to IBM Event Streams 3 | 4 | ### Prerequisites 5 | 6 | - `kubectl` access to a Kubernetes cluster. 7 | - Credentials for an IBM Event Streams instance that has the following permissions: 8 | - to create topics required by the Kafka Connect configuration (see `connect-distributed.properties`) 9 | - to read/write to the topics accessed by the Connectors 10 | 11 | ### Configure Kafka Connect 12 | 13 | Edit `connect-distributed.properties` replacing the `` and `` placeholders with your Event Streams credentials. 14 | 15 | Create the following Kubernetes resources: 16 | 17 | ```shell 18 | kubectl create secret generic connect-distributed-config --from-file=connect-distributed.properties 19 | kubectl create configmap connect-log4j-config --from-file=connect-log4j.properties 20 | ``` 21 | 22 | ### Run Kafka Connect in distributed mode in your Kubernetes cluster 23 | 24 | Deploy the `event-streams-samples/eventstreams-kafka-connect` Docker image: 25 | 26 | ```shell 27 | kubectl apply -f ./kafka-connect.yaml 28 | ``` 29 | Note that the sample yaml file specifies a single replica. Edit the `replicas` field if you want to run multiple Connect workers. 30 | Also, note that affinity rules might be needed to spread the workers across nodes. 31 | 32 | ### Manage Connectors 33 | 34 | To manage connectors, port forward to the `kafkaconnect-service` Service on port 8083: 35 | 36 | ```shell 37 | kubectl port-forward service/kafkaconnect-service 8083 38 | ``` 39 | 40 | The Connect REST API is then available via `http://localhost:8083`. 41 | The Connect REST API is documented at https://kafka.apache.org/documentation/#connect_rest 42 | 43 | ### Run Connectors 44 | 45 | When the Kafka Connect runtime is running, see the instructions for running the connectors: 46 | - [Run the COS Sink connector](https://github.com/ibm-messaging/kafka-connect-ibmcos-sink#running-the-connector) 47 | - [Run the MQ Source connector](https://github.com/ibm-messaging/kafka-connect-mq-source#running-the-connector) 48 | -------------------------------------------------------------------------------- /kafka-connect/IKS/connect-distributed.properties: -------------------------------------------------------------------------------- 1 | ## 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | ## 17 | 18 | # This file contains some of the configurations for the Kafka Connect distributed worker. This file is intended 19 | # to be used with the examples, and some settings may differ from those used in a production system, especially 20 | # the `bootstrap.servers` and those specifying replication factors. 21 | 22 | # A list of host/port pairs to use for establishing the initial connection to the Kafka cluster. 23 | bootstrap.servers= 24 | security.protocol=SASL_SSL 25 | sasl.mechanism=PLAIN 26 | sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required username="token" password=""; 27 | 28 | consumer.security.protocol=SASL_SSL 29 | consumer.sasl.mechanism=PLAIN 30 | consumer.sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required username="token" password=""; 31 | 32 | producer.security.protocol=SASL_SSL 33 | producer.sasl.mechanism=PLAIN 34 | producer.sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required username="token" password=""; 35 | 36 | plugin.path=/opt/connectors 37 | 38 | # unique name for the cluster, used in forming the Connect cluster group. Note that this must not conflict with consumer group IDs 39 | group.id=connect-cluster 40 | 41 | # The converters specify the format of data in Kafka and how to translate it into Connect data. Every Connect user will 42 | # need to configure these based on the format they want their data in when loaded from or stored into Kafka 43 | key.converter=org.apache.kafka.connect.json.JsonConverter 44 | value.converter=org.apache.kafka.connect.json.JsonConverter 45 | # Converter-specific settings can be passed in by prefixing the Converter's setting with the converter we want to apply 46 | # it to 47 | key.converter.schemas.enable=true 48 | value.converter.schemas.enable=true 49 | 50 | # Topic to use for storing offsets. This topic should have many partitions and be replicated and compacted. 51 | # Kafka Connect will attempt to create the topic automatically when needed, but you can always manually create 52 | # the topic before starting Kafka Connect if a specific topic configuration is needed. 53 | # Most users will want to use the built-in default replication factor of 3 or in some cases even specify a larger value. 54 | # Since this means there must be at least as many brokers as the maximum replication factor used, we'd like to be able 55 | # to run this example on a single-broker cluster and so here we instead set the replication factor to 1. 56 | offset.storage.topic=connect-offsets 57 | offset.storage.replication.factor=3 58 | #offset.storage.partitions=25 59 | 60 | # Topic to use for storing connector and task configurations; note that this should be a single partition, highly replicated, 61 | # and compacted topic. Kafka Connect will attempt to create the topic automatically when needed, but you can always manually create 62 | # the topic before starting Kafka Connect if a specific topic configuration is needed. 63 | # Most users will want to use the built-in default replication factor of 3 or in some cases even specify a larger value. 64 | # Since this means there must be at least as many brokers as the maximum replication factor used, we'd like to be able 65 | # to run this example on a single-broker cluster and so here we instead set the replication factor to 1. 66 | config.storage.topic=connect-configs 67 | config.storage.replication.factor=3 68 | 69 | # Topic to use for storing statuses. This topic can have multiple partitions and should be replicated and compacted. 70 | # Kafka Connect will attempt to create the topic automatically when needed, but you can always manually create 71 | # the topic before starting Kafka Connect if a specific topic configuration is needed. 72 | # Most users will want to use the built-in default replication factor of 3 or in some cases even specify a larger value. 73 | # Since this means there must be at least as many brokers as the maximum replication factor used, we'd like to be able 74 | # to run this example on a single-broker cluster and so here we instead set the replication factor to 1. 75 | status.storage.topic=connect-status 76 | status.storage.replication.factor=3 77 | #status.storage.partitions=5 78 | 79 | # Flush much faster than normal, which is useful for testing/debugging 80 | offset.flush.interval.ms=10000 81 | -------------------------------------------------------------------------------- /kafka-connect/IKS/connect-log4j.properties: -------------------------------------------------------------------------------- 1 | log4j.rootLogger=INFO, stdout 2 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender 3 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout 4 | log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n 5 | log4j.logger.org.apache.zookeeper=ERROR 6 | log4j.logger.org.I0Itec.zkclient=ERROR 7 | log4j.logger.org.reflections=ERROR 8 | # switch to TRACE for debugging the IBM connectors 9 | log4j.logger.com.ibm.eventstreams=INFO 10 | -------------------------------------------------------------------------------- /kafka-connect/IKS/kafka-connect.yaml: -------------------------------------------------------------------------------- 1 | # Deployment 2 | apiVersion: apps/v1 3 | kind: Deployment 4 | metadata: 5 | name: kafkaconnect-deploy 6 | labels: 7 | app: kafkaconnect 8 | spec: 9 | replicas: 1 10 | selector: 11 | matchLabels: 12 | app: kafkaconnect 13 | template: 14 | metadata: 15 | namespace: default 16 | labels: 17 | app: kafkaconnect 18 | spec: 19 | securityContext: 20 | runAsNonRoot: true 21 | runAsUser: 5000 22 | containers: 23 | - name: kafkaconnect-container 24 | image: event-streams-samples/eventstreams-kafka-connect:latest 25 | readinessProbe: 26 | httpGet: 27 | path: / 28 | port: 8083 29 | livenessProbe: 30 | httpGet: 31 | path: / 32 | port: 8083 33 | ports: 34 | - containerPort: 8083 35 | volumeMounts: 36 | - name: connect-config 37 | mountPath: /opt/kafka/config/connect-distributed.properties 38 | subPath: connect-distributed.properties 39 | - name: connect-log4j 40 | mountPath: /opt/kafka/config/connect-log4j.properties 41 | subPath: connect-log4j.properties 42 | volumes: 43 | - name: connect-config 44 | secret: 45 | secretName: connect-distributed-config 46 | - name: connect-log4j 47 | configMap: 48 | name: connect-log4j-config 49 | --- 50 | # Service 51 | apiVersion: v1 52 | kind: Service 53 | metadata: 54 | name: kafkaconnect-service 55 | labels: 56 | app: kafkaconnect 57 | spec: 58 | ports: 59 | - name: kafkaconnect 60 | protocol: TCP 61 | port: 8083 62 | selector: 63 | app: kafkaconnect 64 | -------------------------------------------------------------------------------- /kafka-connect/README.md: -------------------------------------------------------------------------------- 1 | # kafka-connect 2 | 3 | This repository contains the artifacts required to build the `event-streams-samples/eventstreams-kafka-connect` Docker image. 4 | 5 | This image contains the Kafka Connect runtime and the [IBM Cloud Object Storage sink connector](https://github.com/ibm-messaging/kafka-connect-ibmcos-sink) and the [IBM MQ source connector](https://github.com/ibm-messaging/kafka-connect-mq-source). 6 | 7 | A prebuilt image is provided on Github Packages, you can use the following command to pull the image: 8 | 9 | ```docker pull ghcr.io/ibm-messaging/event-streams-samples/eventstreams-kafka-connect:latest 10 | ``` 11 | 12 | ## Running the image in Kubernetes 13 | 14 | Instructions for running the `event-streams-samples/eventstreams-kafka-connect` image in Kubernetes can be found [here](IKS/README.md). 15 | 16 | ## Building the image 17 | 18 | In case you don't want to use the image we provide, you can build an image by completing these steps: 19 | 20 | 1. Run the `build.sh` script to download and compile the connectors: 21 | ```shell 22 | ./build.sh 23 | ``` 24 | 25 | 2. Build the docker image: 26 | ```shell 27 | docker build . 28 | ``` 29 | If you want to use the sample [YAML file](IKS/kafka-connect.yaml), be sure to update the image name with your own image name. 30 | -------------------------------------------------------------------------------- /kafka-connect/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # Builds the Connectors for the Dockerfile requires Maven, git and gradle 3 | set -e 4 | 5 | # Cleanup 6 | rm -rf ./connectors/* 7 | rm -rf ./kafka-connect-ibmcos-sink 8 | rm -rf ./kafka-connect-mq-source 9 | mkdir -p ./connectors 10 | 11 | # Build COS Sink 12 | ( 13 | git clone https://github.com/ibm-messaging/kafka-connect-ibmcos-sink 14 | cd kafka-connect-ibmcos-sink 15 | gradle clean shadowJar 16 | cp ./build/libs/kafka-connect-ibmcos-sink-*-all.jar ../connectors 17 | ) 18 | 19 | # Build MQ Source 20 | ( 21 | git clone https://github.com/ibm-messaging/kafka-connect-mq-source 22 | cd kafka-connect-mq-source 23 | mvn clean package 24 | cp ./target/kafka-connect-mq-source-*-jar-with-dependencies.jar ../connectors 25 | ) 26 | -------------------------------------------------------------------------------- /kafka-java-console-sample/.gitignore: -------------------------------------------------------------------------------- 1 | /build 2 | /bin 3 | /lib 4 | .gradle 5 | /.settings/ 6 | 7 | -------------------------------------------------------------------------------- /kafka-java-console-sample/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM gradle:jdk11-alpine as jdk 2 | 3 | COPY --chown=1000 . /usr/src/app 4 | WORKDIR /usr/src/app 5 | 6 | COPY build.gradle settings.gradle /usr/src/app/ 7 | COPY src /usr/src/app/src 8 | 9 | RUN gradle -s --no-daemon assemble 10 | 11 | FROM openjdk:11 12 | 13 | COPY --from=jdk /usr/src/app/build/libs/kafka-java-console-sample-2.0.jar /usr/src/app/ 14 | 15 | USER 1000 16 | 17 | ENTRYPOINT ["java", "-jar", "/usr/src/app/kafka-java-console-sample-2.0.jar"] 18 | -------------------------------------------------------------------------------- /kafka-java-console-sample/README.md: -------------------------------------------------------------------------------- 1 | # IBM Event Streams for IBM Cloud Kafka Java console sample application 2 | This Java console application demonstrates how to connect to [IBM Event Streams for IBM Cloud](https://cloud.ibm.com/docs/services/EventStreams?topic=eventstreams-getting_started), send and receive messages using the [Kafka](https://kafka.apache.org) Java API. It also shows how to create topics using the Kafka Admin API. 3 | 4 | It can be run locally on your machine or deployed into [IBM Cloud](https://cloud.ibm.com/). 5 | 6 | For help with additional deployment modes, please refer to our [connection guide](https://cloud.ibm.com/docs/services/EventStreams?topic=eventstreams-connecting#connecting). 7 | 8 | __Important Note__: This sample creates a topic with one partition on your behalf. On the Standard plan, this will incur a fee if the topic does not already exist. 9 | 10 | ## Running the application 11 | 12 | The application can be run in the following environments: 13 | 14 | * [IBM Cloud Kubernetes Service](./docs/Kubernetes_Service.md) 15 | * [IBM Cloud Foundry](./docs/Cloud_Foundry.md) 16 | * [Docker Local](./docs/Docker_Local.md) 17 | * [Local Development](./docs/Local.md) 18 | 19 | -------------------------------------------------------------------------------- /kafka-java-console-sample/build.gradle: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2015-2018 IBM 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | // 17 | // Licensed Materials - Property of IBM 18 | // © Copyright IBM Corp. 2015-2018 19 | // 20 | plugins { 21 | id 'application' 22 | id 'java' 23 | id 'eclipse' 24 | } 25 | 26 | group = 'com.ibm.eventstreams.samples' 27 | version = '2.0' 28 | mainClassName = 'com.eventstreams.samples.EventStreamsConsoleSample' 29 | 30 | sourceCompatibility = 1.8 31 | targetCompatibility = 1.8 32 | 33 | repositories { 34 | mavenCentral() 35 | } 36 | 37 | dependencies { 38 | implementation 'org.apache.kafka:kafka-clients:2.7.+' 39 | implementation group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.17.+' 40 | implementation group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.17.+' 41 | implementation 'org.apache.logging.log4j:log4j-slf4j-impl:2.17.+' 42 | implementation 'com.fasterxml.jackson.core:jackson-databind:2.9.+' 43 | } 44 | 45 | jar { 46 | duplicatesStrategy = DuplicatesStrategy.INCLUDE 47 | manifest { 48 | attributes('Main-Class': mainClassName) 49 | } 50 | from { 51 | configurations.compileClasspath.filter{ it.exists() }.collect { it.isDirectory() ? it : zipTree(it) } 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /kafka-java-console-sample/docs/CF_Standard_Enterprise_Plan.md: -------------------------------------------------------------------------------- 1 | # IBM Cloud Foundry deployment to an Standard/Enterprise Plan Event Streams for IBM Cloud 2 | 3 | ## Overview 4 | 5 | To deploy and run the sample: 6 | * Create a Cloud Foundry Service Alias for your Standard/Enterprise Service 7 | * Setup your `manifest.yml` with your service details 8 | * Use `ibmcloud cf push --no-start` to deploy the app to IBM Cloud Foundry 9 | * Re-configure binding with Manager role 10 | * Start the app 11 | * Inspect the application's logs 12 | 13 | ## Set up a Cloud Foundry Service Alias 14 | Before continuing, connect to IBM Cloud with the [IBM Cloud command line interface](https://cloud.ibm.com/docs/cli?topic=cloud-cli-ibmcloud-cli). 15 | 16 | The Standard/Enterprise plan is IAM enabled. Therefore the following extra step is required to create a Cloud Foundry alias for your Service: 17 | 18 | Create a Cloud Foundry alias for your service's associated CRN: 19 | ```shell 20 | ibmcloud resource service-alias-create --instance-name 21 | ``` 22 | 23 | Having created this alias associated your Service with a Cloud Foundry Organization and Space, thereby enabling your Cloud Foundry application to referrence it and connect to it. 24 | 25 | ## Setup the manifest.yml 26 | 27 | 1. Select the Event Streams for IBM Cloud service you would like to bind your application to. Do this by replacing `` with your service instance alias name in `manifest.yml`: 28 | ```yaml 29 | services: 30 | - "" 31 | ``` 32 | 2. Consider your domain: You might need to change this in the `manifest.yml` as the domain varies by IBM Cloud region. If unsure, just delete the domain line and IBM Cloud will pick the domain for you. 33 | 34 | ## Build the Sample 35 | Build the project using gradle: 36 | ```shell 37 | gradle clean build 38 | ``` 39 | 40 | The command above creates a zip file under `build/distributions`. 41 | 42 | ## Deploy the Application 43 | 44 | Push the app without starting it immediately by running the following command in the same directory as the `manifest.yml` file: 45 | ```shell 46 | ibmcloud app push --no-start 47 | ``` 48 | 49 | ## Re-configure the binding 50 | A binding between your app and service-alias is created for you automatically, but by default does not have permissions to create topics. This means that we need to delete the existing binding and create a new one with the correct role: 51 | 52 | ``` 53 | ibmcloud resource service-binding-delete kafka-java-console-sample 54 | ibmcloud resource service-binding-create kafka-java-console-sample Manager 55 | ``` 56 | 57 | ## Start the app 58 | Now it should be safe to start the application: 59 | ```shell 60 | ibmcloud app start kafka-java-console-sample 61 | ``` 62 | 63 | ## Produce and Consume Messages 64 | The sample application should have created the default sample topic and started producing and consuming messages in an infinite loop. View the logs to verify this: 65 | ```shell 66 | ibmcloud app logs kafka-java-console-sample 67 | ``` 68 | -------------------------------------------------------------------------------- /kafka-java-console-sample/docs/Cloud_Foundry.md: -------------------------------------------------------------------------------- 1 | 2 | # Running in IBM Cloud Foundry 3 | 4 | ## Prerequisites 5 | To build and run the sample, you must have the done the following: 6 | 7 | * Obtain this repository's contents, either use `git` or just download the samples as a ZIP 8 | * Install the [IBM Cloud CLI](https://cloud.ibm.com/docs/cli?topic=cloud-cli-install-ibmcloud-cli) 9 | * Provision an [Event Streams Service Instance](https://cloud.ibm.com/catalog/services/event-streams) in [IBM Cloud®](https://cloud.ibm.com/) 10 | * Install [Gradle 4+](https://gradle.org/) 11 | * Install Java 7+ 12 | 13 | ## Standard/Enterprise Plan? 14 | 15 | **It's important to know which Event Streams for IBM Cloud plan you're using as the sample deployment steps are subtly different on each plan respectively.** 16 | 17 | By this point, you should have an Event Streams for IBM Cloud instance provisioned. If you haven't done this step yet, please refer to the main [readme](../README.md). 18 | 19 | If you are not sure what type of Event Streams for IBM Cloud instance you have then you can find this information out by visiting IBM Cloud's web console [dashboard](https://cloud.ibm.com/resources). 20 | 21 | *Please make sure you are in the appropriate Region, Account, Organization and Space where you provisioned your Event Streams instance!* 22 | 23 | * Event Streams for IBM Cloud Standard plan services are "Services" with the plan column showing "Standard". 24 | * Event Streams for IBM Cloud Enterprise plan services are "Services" with the plan column showing "Enterprise". 25 | 26 | 27 | ## Deploy the Application 28 | 29 | The deployment for the Standard/Enterprise Plan can be found in the links listed below 30 | 31 | ### [Classic Plan Deployment Guide](CF_Classic_Plan.md) 32 | 33 | ### [Standard/Enterprise Plan Deployment Guide](CF_Standard_Enterprise_Plan.md) 34 | 35 | 36 | ## Further references 37 | 38 | If you want find out more about Cloud Foundry applications then check the following documents: 39 | 40 | [Cloud Foundry manifest documentation](http://docs.cloudfoundry.org/devguide/deploy-apps/manifest.html) 41 | 42 | 43 | 44 | 45 | 46 | 47 | -------------------------------------------------------------------------------- /kafka-java-console-sample/docs/Docker_Local.md: -------------------------------------------------------------------------------- 1 | 2 | # Running in Docker Locally 3 | 4 | ## Prerequisites 5 | To build and run the sample, you must have the done the following: 6 | 7 | * Obtain this repository's contents, either use `git` or just download the samples as a ZIP 8 | * Provision an [Event Streams Service Instance](https://cloud.ibm.com/catalog/services/event-streams) in [IBM Cloud®](https://cloud.ibm.com/) 9 | * Install [Docker](https://docs.docker.com/install/) 10 | 11 | ## Run the Application 12 | 13 | 1. Build the container image from the `Dockerfile`: 14 | ```shell 15 | docker build -t java-console-sample . 16 | ``` 17 | 18 | 2. Export the Event Streams for IBM Cloud instance credentials: 19 | 20 | From the Event Streams for IBM Cloud instance dashboard, click `Service Credentials` and select or create a new one. Copy its content and export it as below: 21 | ```shell 22 | export VCAP_SERVICES='{ 23 | "instance_id": "...", 24 | "api_key": "...", 25 | "kafka_admin_url": "....", 26 | "kafka_rest_url": "...", 27 | "kafka_brokers_sasl": [ 28 | ... 29 | ], 30 | "user": "...", 31 | "password": "..." 32 | }' 33 | ``` 34 | 35 | 3. Run the container image 36 | ```shell 37 | docker run -e VCAP_SERVICES="$VCAP_SERVICES" java-console-sample 38 | ``` 39 | 40 | ## Further references 41 | 42 | If you want find out more about Docker then check the following document: 43 | 44 | [Docker documentation](https://docs.docker.com/install/overview/) 45 | -------------------------------------------------------------------------------- /kafka-java-console-sample/docs/Kubernetes_Service.md: -------------------------------------------------------------------------------- 1 | # Running in IBM Cloud Kubernetes Service 2 | 3 | ## Prerequisites 4 | To build and run the sample, you must have the done the following: 5 | 6 | * Obtain this repository's contents, either use `git` or just download the samples as a ZIP 7 | * Install the [IBM Cloud CLI](https://cloud.ibm.com/docs/cli/reference/bluemix_cli?topic=cloud-cli-install-ibmcloud-cli) 8 | * Install the [Kubernetes CLI](https://kubernetes.io/docs/tasks/tools/install-kubectl/) 9 | * Provision an [Event Streams Service Instance](https://cloud.ibm.com/catalog/services/event-streams) in [IBM Cloud®](https://cloud.ibm.com/) 10 | * Provision a [Kubernetes Service instance](https://cloud.ibm.com/kubernetes/catalog/cluster) in [IBM Cloud®](https://cloud.ibm.com/) 11 | 12 | 13 | ## Deploy the Application 14 | 15 | 1. From the Event Streams for IBM Cloud instance dashboard, click `Service Credentials` and select or create a new one. Copy its content, create a file `credentials.json` and paste the content. 16 | 2. To deploy the application you first need to bind the Event Streams for IBM Cloud service instance to the cluster. Create a secret using the content from the file `credentials.json` 17 | ```shell 18 | kubectl create secret generic eventstreams-binding --from-file=binding=credentials.json 19 | ``` 20 | The command above creates a secret in your cluster named `eventstreams-binding`. 21 | 3. [Configure the CLI to run kubectl](https://cloud.ibm.com/docs/containers?topic=containers-cs_cli_install#cs_cli_configure) 22 | 23 | 4. Deploy the application in the cluster: 24 | ```shell 25 | kubectl apply -f kafka-java-console-sample.yaml 26 | ``` 27 | 5. Access the application logs: 28 | ```shell 29 | kubectl wait pod kafka-java-console-sample --for=condition=Ready 30 | kubectl logs kafka-java-console-sample --follow 31 | ``` 32 | 33 | ## Further references 34 | 35 | If you want find out more about IBM Cloud Kubernetes Service or Kubernetes then check the following documents: 36 | 37 | [IBM Cloud Kubernetes Service](https://www.ibm.com/cloud/container-service) 38 | 39 | [Kubernetes Documentation](https://kubernetes.io/docs/home/) 40 | 41 | 42 | -------------------------------------------------------------------------------- /kafka-java-console-sample/docs/Local.md: -------------------------------------------------------------------------------- 1 | # IBM Event Streams for IBM Cloud Kafka Java console sample application: Local Development guide 2 | As pushing the application into IBM Cloud® does not require you to build the application locally, this guide is here to guide you through the process, should you wish to build the application locally. 3 | 4 | We will not discuss establishing a connection from your laptop to Event Streams for IBM Cloud. This is described in the [connection guide](https://cloud.ibm.com/docs/services/EventStreams?topic=eventstreams-connecting#connecting). 5 | 6 | ## Prerequisites 7 | 8 | 1. **If you don't already have one, create an Event Streams service instance.** 9 | 10 | 1. Log in to the IBM Cloud console. 11 | 12 | 2. Click **Catalog**. 13 | 14 | 3. From the navigation pane, click **Integration**, click the **Event Streams** tile, and then select the **Lite plan**. The Event Streams service instance page opens. 15 | 16 | 4. Enter a name for your service. You can use the default value. 17 | 18 | 5. Click **Create**. The Event Streams **Getting started** page opens. 19 | 20 | 2. **If you don't already have them, install the following prerequisites:** 21 | 22 | * [git](https://git-scm.com/) 23 | * [Gradle 8 of higher](https://gradle.org/) 24 | * Java 8 or higher 25 | 26 | 27 | 28 | ## Steps to Build the Sample 29 | 30 | ### 1. **Create a topic** 31 | 32 | The topic is the core of Event Streams flows. Data passes through a topic from producing applications to consuming applications. 33 | 34 | We'll be using the IBM Cloud console (UI) to create the topic, and will reference it when starting the application. 35 | 36 | 1. Go to the **Topics** tab. 37 | 38 | 2. Click **New topic**. 39 | 40 | 3. Name your topic. 41 | 42 | > The sample application is configured to connect to topic `kafka-java-console-sample-topic`. If the topic does not exist, it is created when the application is started. 43 | 44 | 4. Keep the defaults set in the rest of the topic creation, click **Next** and then **Create topic**. 45 | 46 | 5. The topic appears in the table. Congratulations, you have created a topic! 47 | 48 | --- 49 | 50 | ### 2. **Create credentials** 51 | 52 | To allow the sample application to access your topic, we need to create some credentials for it. 53 | 54 | 1. Go to **Service credentials** in the navigation pane. 55 | 56 | 2. Click **New credential**. 57 | 58 | 3. Give the credential a name so you can identify its purpose later. You can accept the default value. 59 | 60 | 4. Give the credential the **Manager** role so that it can access the topics, and create them if necessary. 61 | 62 | 5. Click **Add**. The new credential is listed in the table in **Service credentials**. 63 | 64 | 6. Click **View credentials** to see the `api_key` and `kafka_brokers_sasl` values. 65 | 66 | --- 67 | 68 | ### 3. **Clone the Github repository for the sample application** 69 | 70 | The sample application is stored here. Clone the `event-streams-samples` repository by running the clone command from the command line. 71 | 72 | ``` 73 | git clone https://github.com/ibm-messaging/event-streams-samples.git 74 | ``` 75 | 76 |
77 | When the repository is cloned, from the command line change into the kafka-java-console-sample directory. 78 | 79 | ``` 80 | cd event-streams-samples/kafka-java-console-sample 81 | ``` 82 | 83 |
84 | Build the contents of the kafka-java-console-sample directory. 85 | 86 | ``` 87 | gradle clean && gradle build 88 | ``` 89 | --- 90 | 91 | ### 4. **Run the consuming application** 92 | 93 | Start the sample consuming application from the command line, replacing the `kafka_brokers_sasl` and `api_key` values. 94 | 95 | The `java -jar ./build/libs/kafka-java-console-sample-2.0.jar` part of the command identifies the locations of the .JAR file to run within the cloned repository. You do not need to change this. 96 | 97 | Use the `kafka_brokers_sasl` from the **Service credentials** created in Step 2. We recommend using all the `kafka_brokers_sasl` listed in the **Service credentials** that you created. 98 | 99 | >The `kafka_brokers_sasl` must be formatted as `"host:port,host2:port2"`.
Format the contents of `kafka_brokers_sasl` in a text editor before entering it in the command line. 100 | 101 | Then, use the `api_key` from the **Service credentials** created in Step 2. `-consumer` specifies that the consumer should start. 102 | 103 | ``` 104 | java -jar ./build/libs/kafka-java-console-sample-2.0.jar 105 | -consumer 106 | ``` 107 | 108 | An `INFO No messages consumed` is displayed when the consuming application is running, but there is no data being consumed. 109 | 110 | --- 111 | 112 | ### 5. **Run the producing application** 113 | 114 | Open a new command line window and change into the kafka-java-console-sample directory. 115 | 116 | ``` 117 | cd event-streams-samples/kafka-java-console-sample 118 | ``` 119 | 120 | Then, start the sample producing application from the command line, replacing the `kafka_brokers_sasl` and `api_key` values with the same ones used to run the consumer. 121 | 122 | Use the `api_key` from the **Service credentials** created in Step 2. `-producer` specifies that the producer should start. 123 | 124 | ``` 125 | java -jar ./build/libs/kafka-java-console-sample-2.0.jar 126 | -producer 127 | ``` 128 | 129 | --- 130 | 131 | ### 6. **Success!** 132 | 133 | When the producer starts, messages are produced to the topic. Messages are then consumed from the topic by the consuming application. 134 | You can verify the successful flow of messages when you see `INFO Message consumed` from the consumer. 135 | 136 | The sample runs indefinitely until you stop it. To stop the process, run an exit command `Ctrl+C`. 137 | -------------------------------------------------------------------------------- /kafka-java-console-sample/kafka-java-console-sample.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Pod 3 | metadata: 4 | name: kafka-java-console-sample 5 | namespace: default 6 | spec: 7 | containers: 8 | - image: event-streams-samples/kafka-java-console-sample:latest 9 | imagePullPolicy: Always 10 | name: java-sample 11 | env: 12 | - name: VCAP_SERVICES 13 | valueFrom: 14 | secretKeyRef: 15 | name: eventstreams-binding 16 | key: binding 17 | -------------------------------------------------------------------------------- /kafka-java-console-sample/manifest.yml: -------------------------------------------------------------------------------- 1 | applications: 2 | - name: kafka-java-console-sample 3 | buildpack: "https://github.com/cloudfoundry/java-buildpack.git" 4 | memory: 1G 5 | disk_quota: 512M 6 | path: build/distributions/kafka-java-console-sample-2.0.zip 7 | no-route: true 8 | health-check-type: none 9 | services: 10 | - "" 11 | -------------------------------------------------------------------------------- /kafka-java-console-sample/settings.gradle: -------------------------------------------------------------------------------- 1 | rootProject.name = "kafka-java-console-sample" 2 | -------------------------------------------------------------------------------- /kafka-java-console-sample/src/main/java/com/eventstreams/samples/ArgumentParser.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2018 IBM 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | /** 17 | * Licensed Materials - Property of IBM 18 | * (c) Copyright IBM Corp. 2015-2018 19 | */ 20 | package com.eventstreams.samples; 21 | 22 | import java.util.Arrays; 23 | import java.util.HashMap; 24 | import java.util.HashSet; 25 | import java.util.LinkedList; 26 | import java.util.Map; 27 | import java.util.Queue; 28 | import java.util.Set; 29 | 30 | class ArgumentParser { 31 | 32 | static class ArgumentParserBuilder { 33 | private final Set flags = new HashSet<>(); 34 | private final Set options = new HashSet<>(); 35 | 36 | private ArgumentParserBuilder() {} 37 | 38 | ArgumentParserBuilder flag(final String name) { 39 | flags.add(name); 40 | return this; 41 | } 42 | 43 | ArgumentParserBuilder option(final String name) { 44 | options.add(name); 45 | return this; 46 | } 47 | 48 | ArgumentParser build() { 49 | return new ArgumentParser(this); 50 | } 51 | 52 | private Set flags() { 53 | return flags; 54 | } 55 | 56 | private Set options() { 57 | return options; 58 | } 59 | } 60 | 61 | private final Set flags; 62 | private final Set options; 63 | 64 | private ArgumentParser(final ArgumentParserBuilder builder) { 65 | this.flags = builder.flags(); 66 | this.options = builder.options(); 67 | } 68 | 69 | static ArgumentParserBuilder builder() { 70 | return new ArgumentParserBuilder(); 71 | } 72 | 73 | Map parseArguments(final String... argArray) throws IllegalArgumentException { 74 | final Queue args = new LinkedList<>(Arrays.asList(argArray)); 75 | final Map result = new HashMap<>(); 76 | while (!args.isEmpty()) { 77 | final String arg = args.poll(); 78 | if (flags.contains(arg)) { 79 | result.put(arg, ""); 80 | } else if (options.contains(arg)) { 81 | final String value = args.poll(); 82 | if (value == null) { 83 | throw new IllegalArgumentException("Command line argument '" + arg 84 | + "' must be followed by another argument"); 85 | } 86 | result.put(arg, value); 87 | } else { 88 | throw new IllegalArgumentException("Unexpected command line argument: " + arg); 89 | } 90 | } 91 | return result; 92 | } 93 | } 94 | -------------------------------------------------------------------------------- /kafka-java-console-sample/src/main/java/com/eventstreams/samples/ConsumerRunnable.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2015-2016 IBM 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | /** 17 | * Licensed Materials - Property of IBM 18 | * (c) Copyright IBM Corp. 2015-2016 19 | */ 20 | package com.eventstreams.samples; 21 | 22 | import java.time.Duration; 23 | import java.util.Arrays; 24 | import java.util.List; 25 | import java.util.Map; 26 | 27 | import org.apache.kafka.clients.consumer.ConsumerRecord; 28 | import org.apache.kafka.clients.consumer.ConsumerRecords; 29 | import org.apache.kafka.clients.consumer.KafkaConsumer; 30 | import org.apache.kafka.common.KafkaException; 31 | import org.apache.kafka.common.PartitionInfo; 32 | import org.apache.kafka.common.errors.WakeupException; 33 | import org.slf4j.Logger; 34 | import org.slf4j.LoggerFactory; 35 | 36 | public class ConsumerRunnable implements Runnable { 37 | private static final Logger logger = LoggerFactory.getLogger(ConsumerRunnable.class); 38 | 39 | private final KafkaConsumer kafkaConsumer; 40 | private volatile boolean closing = false; 41 | 42 | public ConsumerRunnable(Map consumerConfigs, String topic) { 43 | // Create a Kafka consumer with the provided client configuration 44 | kafkaConsumer = new KafkaConsumer<>(consumerConfigs); 45 | 46 | // Checking for topic existence before subscribing 47 | List partitions = kafkaConsumer.partitionsFor(topic); 48 | if (partitions == null || partitions.isEmpty()) { 49 | logger.error("Topic '{}' does not exists - application will terminate", topic); 50 | kafkaConsumer.close(Duration.ofSeconds(5L)); 51 | throw new IllegalStateException("Topic '" + topic + "' does not exists - application will terminate"); 52 | } else { 53 | logger.info(partitions.toString()); 54 | } 55 | 56 | kafkaConsumer.subscribe(Arrays.asList(topic)); 57 | } 58 | 59 | @Override 60 | public void run() { 61 | logger.info("{} is starting.", ConsumerRunnable.class); 62 | 63 | try { 64 | while (!closing) { 65 | try { 66 | // Poll on the Kafka consumer, waiting up to 3 secs if there's nothing to consume. 67 | ConsumerRecords records = kafkaConsumer.poll(Duration.ofMillis(3000L)); 68 | 69 | if (records.isEmpty()) { 70 | logger.info("No messages consumed"); 71 | } else { 72 | // Iterate through all the messages received and print their content 73 | for (ConsumerRecord record : records) { 74 | logger.info("Message consumed: {}", record); 75 | } 76 | } 77 | 78 | } catch (final WakeupException e) { 79 | logger.warn("Consumer closing - caught exception: {}", e, e); 80 | } catch (final KafkaException e) { 81 | logger.error("Sleeping for 5s - Consumer has caught: {}", e, e); 82 | try { 83 | Thread.sleep(5000); // Longer sleep before retrying 84 | } catch (InterruptedException e1) { 85 | logger.warn("Consumer closing - caught exception: {}", e, e); 86 | } 87 | } 88 | } 89 | } finally { 90 | kafkaConsumer.close(Duration.ofSeconds(5L)); 91 | logger.info("{} has shut down.", ConsumerRunnable.class); 92 | } 93 | } 94 | 95 | public void shutdown() { 96 | closing = true; 97 | kafkaConsumer.wakeup(); 98 | logger.info("{} is shutting down.", ConsumerRunnable.class); 99 | } 100 | } 101 | -------------------------------------------------------------------------------- /kafka-java-console-sample/src/main/java/com/eventstreams/samples/ProducerRunnable.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2015-2016 IBM 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | /** 17 | * Licensed Materials - Property of IBM 18 | * (c) Copyright IBM Corp. 2015-2016 19 | */ 20 | package com.eventstreams.samples; 21 | 22 | import java.time.Duration; 23 | import java.util.List; 24 | import java.util.Map; 25 | import java.util.concurrent.Future; 26 | import java.util.concurrent.TimeUnit; 27 | 28 | import org.apache.kafka.clients.producer.KafkaProducer; 29 | import org.apache.kafka.clients.producer.ProducerRecord; 30 | import org.apache.kafka.clients.producer.RecordMetadata; 31 | import org.apache.kafka.common.PartitionInfo; 32 | import org.apache.kafka.common.errors.TimeoutException; 33 | import org.slf4j.Logger; 34 | import org.slf4j.LoggerFactory; 35 | 36 | 37 | public class ProducerRunnable implements Runnable { 38 | private static final Logger logger = LoggerFactory.getLogger(ProducerRunnable.class); 39 | 40 | private final KafkaProducer kafkaProducer; 41 | private final String topic; 42 | private volatile boolean closing = false; 43 | 44 | public ProducerRunnable(Map producerConfigs, String topic) { 45 | this.topic = topic; 46 | 47 | // Create a Kafka producer with the provided client configuration 48 | kafkaProducer = new KafkaProducer<>(producerConfigs); 49 | 50 | try { 51 | // Checking for topic existence. 52 | // If the topic does not exist, the kafkaProducer will retry for about 60 secs 53 | // before throwing a TimeoutException 54 | // see configuration parameter 'metadata.fetch.timeout.ms' 55 | List partitions = kafkaProducer.partitionsFor(topic); 56 | logger.info(partitions.toString()); 57 | } catch (TimeoutException kte) { 58 | logger.error("Topic '{}' may not exist - application will terminate", topic); 59 | kafkaProducer.close(Duration.ofSeconds(5L)); 60 | throw new IllegalStateException("Topic '" + topic + "' may not exist - application will terminate", kte); 61 | } 62 | } 63 | 64 | @Override 65 | public void run() { 66 | // Simple counter for messages sent 67 | int producedMessages = 0; 68 | logger.info("{} is starting.", ProducerRunnable.class); 69 | 70 | try { 71 | while (!closing) { 72 | String key = "key"; 73 | String message = "{\"message\":\"This is a test message #\",\"message_number\":" + producedMessages + "}"; 74 | 75 | try { 76 | // If a partition is not specified, the client will use the default partitioner to choose one. 77 | ProducerRecord record = new ProducerRecord<>(topic,key,message); 78 | 79 | // Send record asynchronously 80 | Future future = kafkaProducer.send(record); 81 | 82 | // Synchronously wait for a response from Event Streams / Kafka on every message produced. 83 | // For high throughput the future should be handled asynchronously. 84 | RecordMetadata recordMetadata = future.get(5000, TimeUnit.MILLISECONDS); 85 | producedMessages++; 86 | 87 | logger.info("Message produced, offset: {}", recordMetadata.offset()); 88 | 89 | // Short sleep for flow control in this sample app 90 | // to make the output easily understandable 91 | Thread.sleep(2000L); 92 | 93 | } catch (final InterruptedException e) { 94 | logger.warn("Producer closing - caught exception: {}", e, e); 95 | } catch (final Exception e) { 96 | logger.error("Sleeping for 5s - Producer has caught : {}", e, e); 97 | try { 98 | Thread.sleep(5000L); // Longer sleep before retrying 99 | } catch (InterruptedException e1) { 100 | logger.warn("Producer closing - caught exception: {}", e, e); 101 | } 102 | } 103 | } 104 | } finally { 105 | kafkaProducer.close(Duration.ofSeconds(5L)); 106 | logger.info("{} has shut down.", ProducerRunnable.class); 107 | } 108 | } 109 | 110 | public void shutdown() { 111 | closing = true; 112 | logger.info("{} is shutting down.", ProducerRunnable.class); 113 | } 114 | } 115 | -------------------------------------------------------------------------------- /kafka-java-console-sample/src/main/java/com/eventstreams/samples/env/Environment.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2018 IBM 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | /** 17 | * Licensed Materials - Property of IBM 18 | * (c) Copyright IBM Corp. 2018 19 | */ 20 | package com.eventstreams.samples.env; 21 | import java.io.IOException; 22 | import java.util.Iterator; 23 | 24 | import org.apache.kafka.common.errors.IllegalSaslStateException; 25 | import org.slf4j.Logger; 26 | import org.slf4j.LoggerFactory; 27 | 28 | import com.fasterxml.jackson.databind.JsonNode; 29 | import com.fasterxml.jackson.databind.ObjectMapper; 30 | 31 | public class Environment { 32 | 33 | private static final Logger logger = LoggerFactory.getLogger(Environment.class); 34 | private static final String SERVICE_NAME = "messagehub"; 35 | 36 | public static EventStreamsCredentials getEventStreamsCredentials() { 37 | String vcapServices = System.getenv("VCAP_SERVICES"); 38 | logger.info("VCAP_SERVICES: \n{}", vcapServices); 39 | try { 40 | if (vcapServices != null) { 41 | JsonNode mhub = parseVcapServices(vcapServices); 42 | ObjectMapper mapper = new ObjectMapper(); 43 | return mapper.readValue(mhub.toString(), EventStreamsCredentials.class); 44 | } else { 45 | logger.error("VCAP_SERVICES environment variable is null."); 46 | throw new IllegalStateException("VCAP_SERVICES environment variable is null."); 47 | } 48 | } catch (IOException ioe) { 49 | logger.error("VCAP_SERVICES environment variable parsing failed."); 50 | throw new IllegalStateException("VCAP_SERVICES environment variable parsing failed.", ioe); 51 | } 52 | } 53 | 54 | private static JsonNode parseVcapServices(String vcapServices) throws IOException { 55 | ObjectMapper mapper = new ObjectMapper(); 56 | JsonNode vcapServicesJson = mapper.readValue(vcapServices, JsonNode.class); 57 | 58 | // when running in CloudFoundry VCAP_SERVICES is wrapped into a bigger JSON object 59 | // so it needs to be extracted. We attempt to read the "instance_id" field to identify 60 | // if it has been wrapped 61 | if (vcapServicesJson.get("instance_id") != null) { 62 | return vcapServicesJson; 63 | } else { 64 | String vcapKey = null; 65 | Iterator it = vcapServicesJson.fieldNames(); 66 | // Find the Event Streams service bound to this application. 67 | while (it.hasNext() && vcapKey == null) { 68 | String potentialKey = it.next(); 69 | if (potentialKey.startsWith(SERVICE_NAME)) { 70 | logger.warn("Using the '{}' key from VCAP_SERVICES.", potentialKey); 71 | vcapKey = potentialKey; 72 | } 73 | } 74 | 75 | if (vcapKey == null) { 76 | throw new IllegalSaslStateException("No Event Streams service bound"); 77 | } else { 78 | return vcapServicesJson.get(vcapKey).get(0).get("credentials"); 79 | } 80 | } 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /kafka-java-console-sample/src/main/java/com/eventstreams/samples/env/EventStreamsCredentials.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2015-2016 IBM 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | /** 17 | * Licensed Materials - Property of IBM 18 | * (c) Copyright IBM Corp. 2015-2016 19 | */ 20 | package com.eventstreams.samples.env; 21 | 22 | import com.fasterxml.jackson.annotation.JsonIgnoreProperties; 23 | import com.fasterxml.jackson.annotation.JsonProperty; 24 | 25 | @JsonIgnoreProperties(ignoreUnknown=true) 26 | public class EventStreamsCredentials { 27 | 28 | private String apiKey, user, password; 29 | private String[] kafkaBrokersSasl; 30 | 31 | @JsonProperty("api_key") 32 | public String getApiKey() { 33 | return apiKey; 34 | } 35 | 36 | @JsonProperty("api_key") 37 | public void setLabel(String apiKey) { 38 | this.apiKey = apiKey; 39 | } 40 | 41 | @JsonProperty 42 | public String getUser() { 43 | return user; 44 | } 45 | 46 | @JsonProperty 47 | public void setUser(String user) { 48 | this.user = user; 49 | } 50 | 51 | @JsonProperty 52 | public String getPassword() { 53 | return password; 54 | } 55 | 56 | @JsonProperty 57 | public void setPassword(String password) { 58 | this.password = password; 59 | } 60 | 61 | @JsonProperty("kafka_brokers_sasl") 62 | public String[] getKafkaBrokersSasl() { 63 | return kafkaBrokersSasl; 64 | } 65 | 66 | @JsonProperty("kafka_brokers_sasl") 67 | public void setKafkaBrokersSasl(String[] kafkaBrokersSasl) { 68 | this.kafkaBrokersSasl = kafkaBrokersSasl; 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /kafka-java-console-sample/src/main/resources/log4j2.properties: -------------------------------------------------------------------------------- 1 | rootLogger.level=info 2 | rootLogger.appenderRef.console.ref=STDOUT 3 | 4 | appender.console.type=Console 5 | appender.console.name=STDOUT 6 | appender.console.layout.type=PatternLayout 7 | appender.console.layout.pattern=[%d] %p %m (%c)%n 8 | -------------------------------------------------------------------------------- /kafka-java-console-schema-sample/.gitignore: -------------------------------------------------------------------------------- 1 | /build 2 | /bin 3 | /lib 4 | .gradle 5 | /.settings/ 6 | 7 | -------------------------------------------------------------------------------- /kafka-java-console-schema-sample/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM gradle:jdk11-alpine as jdk 2 | 3 | COPY --chown=1000 . /usr/src/app 4 | WORKDIR /usr/src/app 5 | 6 | COPY build.gradle settings.gradle /usr/src/app/ 7 | COPY src /usr/src/app/src 8 | 9 | RUN gradle -s --no-daemon assemble 10 | 11 | FROM openjdk:11 12 | 13 | COPY --from=jdk /usr/src/app/build/libs/kafka-java-console-schema-sample-2.0.jar /usr/src/app/ 14 | 15 | USER 1000 16 | 17 | ENTRYPOINT ["java", "-jar", "/usr/src/app/kafka-java-console-schema-sample-2.0.jar"] 18 | -------------------------------------------------------------------------------- /kafka-java-console-schema-sample/README.md: -------------------------------------------------------------------------------- 1 | # IBM Event Streams for IBM Cloud Kafka Java console sample application 2 | This Java console application demonstrates how to connect to [IBM Event Streams for IBM Cloud](https://cloud.ibm.com/docs/services/EventStreams?topic=eventstreams-getting_started), send and receive messages using the [Kafka](https://kafka.apache.org) Java API, and using a third party Avro Serializer and Deserializer for the schema registry. It also shows how to create topics using the Kafka Admin API. 3 | 4 | It can be run locally on your machine or deployed into [IBM Cloud](https://cloud.ibm.com/). 5 | 6 | For help with additional deployment modes, please refer to our [connection guide](https://cloud.ibm.com/docs/services/EventStreams?topic=eventstreams-connecting#connecting). 7 | 8 | ## Running the application 9 | 10 | The application can be run in the following environments: 11 | 12 | * [IBM Cloud Kubernetes Service](./docs/Kubernetes_Service.md) 13 | * [IBM Cloud Foundry](./docs/Cloud_Foundry.md) 14 | * [Docker Local](./docs/Docker_Local.md) 15 | * [Local Development](./docs/Local.md) 16 | 17 | -------------------------------------------------------------------------------- /kafka-java-console-schema-sample/build.gradle: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2015-2018 IBM 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | // 17 | // Licensed Materials - Property of IBM 18 | // © Copyright IBM Corp. 2015-2018 19 | // 20 | plugins { 21 | id 'application' 22 | id 'java' 23 | id 'com.github.johnrengelman.shadow' version '7.1.0' 24 | id 'eclipse' 25 | id "com.github.davidmc24.gradle.plugin.avro" version "1.3.0" 26 | } 27 | 28 | group = 'com.ibm.eventstreams.samples' 29 | version = '2.0' 30 | mainClassName = 'com.eventstreams.samples.EventStreamsConsoleSample' 31 | 32 | sourceCompatibility = 1.8 33 | targetCompatibility = 1.8 34 | 35 | repositories { 36 | mavenCentral() 37 | maven { 38 | url = 'https://packages.confluent.io/maven/' 39 | } 40 | } 41 | 42 | dependencies { 43 | implementation 'org.apache.kafka:kafka-clients:2.7.+' 44 | implementation group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.17.+' 45 | implementation group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.17.+' 46 | implementation 'org.apache.logging.log4j:log4j-slf4j-impl:2.17.+' 47 | implementation 'com.fasterxml.jackson.core:jackson-databind:2.9.+' 48 | implementation 'io.confluent:kafka-avro-serializer:6.2.+' 49 | implementation 'org.apache.avro:avro:1.11.+' 50 | } 51 | 52 | avro { 53 | createSetters = false 54 | fieldVisibility = "PRIVATE" 55 | } 56 | 57 | jar { 58 | archiveClassifier.set('no-deps') 59 | } 60 | shadowJar { 61 | from "resources/log4j.properties" 62 | archiveClassifier.set('') 63 | manifest { 64 | attributes('Main-Class': mainClassName) 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /kafka-java-console-schema-sample/docs/CF_Enterprise_Plan.md: -------------------------------------------------------------------------------- 1 | # IBM Cloud Foundry deployment to an Enterprise Plan Event Streams for IBM Cloud 2 | 3 | ## Overview 4 | 5 | To deploy and run the sample: 6 | * Create a Cloud Foundry Service Alias for your Standard/Enterprise Service 7 | * Setup your `manifest.yml` with your service details 8 | * Use `ibmcloud cf push --no-start` to deploy the app to IBM Cloud Foundry 9 | * Re-configure binding with Manager role 10 | * Start the app 11 | * Inspect the application's logs 12 | 13 | ## Set up a Cloud Foundry Service Alias 14 | Before continuing, connect to IBM Cloud with the [IBM Cloud command line interface](https://cloud.ibm.com/docs/cli?topic=cloud-cli-ibmcloud-cli). 15 | 16 | The Enterprise plan is IAM enabled. Therefore the following extra step is required to create a Cloud Foundry alias for your Service: 17 | 18 | Create a Cloud Foundry alias for your service's associated CRN: 19 | ```shell 20 | ibmcloud resource service-alias-create --instance-name 21 | ``` 22 | 23 | Having created this alias associated your Service with a Cloud Foundry Organization and Space, thereby enabling your Cloud Foundry application to referrence it and connect to it. 24 | 25 | ## Setup the manifest.yml 26 | 27 | 1. Select the Event Streams for IBM Cloud service you would like to bind your application to. Do this by replacing `` with your service instance alias name in `manifest.yml`: 28 | ```yaml 29 | services: 30 | - "" 31 | ``` 32 | 2. Consider your domain: You might need to change this in the `manifest.yml` as the domain varies by IBM Cloud region. If unsure, just delete the domain line and IBM Cloud will pick the domain for you. 33 | 34 | ## Build the Sample 35 | Build the project using gradle: 36 | ```shell 37 | gradle clean build 38 | ``` 39 | 40 | The command above creates a zip file under `build/distributions`. 41 | 42 | ## Deploy the Application 43 | 44 | Push the app without starting it immediately by running the following command in the same directory as the `manifest.yml` file: 45 | ```shell 46 | ibmcloud app push --no-start 47 | ``` 48 | 49 | ## Re-configure the binding 50 | A binding between your app and service-alias is created for you automatically, but by default does not have permissions to create topics. This means that we need to delete the existing binding and create a new one with the correct role: 51 | 52 | ``` 53 | ibmcloud resource service-binding-delete kafka-java-console-schema-sample 54 | ibmcloud resource service-binding-create kafka-java-console-schema-sample Manager 55 | ``` 56 | 57 | ## Start the app 58 | Now it should be safe to start the application: 59 | ```shell 60 | ibmcloud app start kafka-java-console-schema-sample 61 | ``` 62 | 63 | ## Produce and Consume Messages 64 | The sample application should have created the default sample topic and started producing and consuming messages in an infinite loop. View the logs to verify this: 65 | ```shell 66 | ibmcloud app logs kafka-java-console-schema-sample 67 | ``` 68 | -------------------------------------------------------------------------------- /kafka-java-console-schema-sample/docs/Cloud_Foundry.md: -------------------------------------------------------------------------------- 1 | 2 | # Running in IBM Cloud Foundry 3 | 4 | ## Prerequisites 5 | To build and run the sample, you must have the done the following: 6 | 7 | * Obtain this repository's contents, either use `git` or just download the samples as a ZIP 8 | * Install the [IBM Cloud CLI](https://cloud.ibm.com/docs/cli?topic=cloud-cli-install-ibmcloud-cli) 9 | * Provision an [Event Streams Service Enterprise Plan Instance](https://cloud.ibm.com/catalog/services/event-streams) in [IBM Cloud®](https://cloud.ibm.com/), this is the only plan to support the schema registry. 10 | * Install [Gradle 4+](https://gradle.org/) 11 | * Install Java 7+ 12 | 13 | 14 | By this point, you should have an Event Streams for IBM Cloud instance, enterprise plan, provisioned. If you haven't done this step yet, please refer to the main [readme](../README.md). 15 | 16 | 17 | ## Deploy the Application 18 | 19 | ### [Enterprise Plan Deployment Guide](CF_Enterprise_Plan.md) 20 | 21 | 22 | ## Further references 23 | 24 | If you want find out more about Cloud Foundry applications then check the following documents: 25 | 26 | [Cloud Foundry manifest documentation](http://docs.cloudfoundry.org/devguide/deploy-apps/manifest.html) 27 | 28 | 29 | 30 | 31 | 32 | 33 | -------------------------------------------------------------------------------- /kafka-java-console-schema-sample/docs/Docker_Local.md: -------------------------------------------------------------------------------- 1 | 2 | # Running in Docker Locally 3 | 4 | ## Prerequisites 5 | To build and run the sample, you must have the done the following: 6 | 7 | * Obtain this repository's contents, either use `git` or just download the samples as a ZIP 8 | * Provision an [Event Streams Service Enterprise Plan Instance](https://cloud.ibm.com/catalog/services/event-streams) in [IBM Cloud®](https://cloud.ibm.com/), this is the only plan to support the schema registry. 9 | * Install [Docker](https://docs.docker.com/install/) 10 | 11 | ## Run the Application 12 | 13 | 1. Build the container image from the `Dockerfile`: 14 | ```shell 15 | docker build -t java-console-schema-sample . 16 | ``` 17 | 18 | 2. Export the Event Streams for IBM Cloud instance credentials: 19 | 20 | From the Event Streams for IBM Cloud instance dashboard, click `Service Credentials` and select or create a new one. Copy its content and export it as below: 21 | ```shell 22 | export VCAP_SERVICES='{ 23 | "instance_id": "...", 24 | "api_key": "...", 25 | "kafka_admin_url": "....", 26 | "kafka_rest_url": "...", 27 | "kafka_brokers_sasl": [ 28 | ... 29 | ], 30 | "user": "...", 31 | "password": "..." 32 | }' 33 | ``` 34 | 35 | 3. Run the container image 36 | ```shell 37 | docker run -e VCAP_SERVICES="$VCAP_SERVICES" java-console-schema-sample 38 | ``` 39 | 40 | ## Further references 41 | 42 | If you want find out more about Docker then check the following document: 43 | 44 | [Docker documentation](https://docs.docker.com/install/overview/) 45 | -------------------------------------------------------------------------------- /kafka-java-console-schema-sample/docs/Kubernetes_Service.md: -------------------------------------------------------------------------------- 1 | # Running in IBM Cloud Kubernetes Service 2 | 3 | ## Prerequisites 4 | To build and run the sample, you must have the done the following: 5 | 6 | * Obtain this repository's contents, either use `git` or just download the samples as a ZIP 7 | * Install the [IBM Cloud CLI](https://cloud.ibm.com/docs/cli/reference/bluemix_cli?topic=cloud-cli-install-ibmcloud-cli) 8 | * Install the [Kubernetes CLI](https://kubernetes.io/docs/tasks/tools/install-kubectl/) 9 | * Provision an [Event Streams Service Enterprise Plan Instance](https://cloud.ibm.com/catalog/services/event-streams) in [IBM Cloud®](https://cloud.ibm.com/), this is the only plan to support the schema registry. 10 | * Provision a [Kubernetes Service instance](https://cloud.ibm.com/kubernetes/catalog/cluster) in [IBM Cloud®](https://cloud.ibm.com/) 11 | 12 | 13 | ## Deploy the Application 14 | 15 | 1. From the Event Streams for IBM Cloud instance dashboard, click `Service Credentials` and select or create a new one. Copy its content, create a file `credentials.json` and paste the content. 16 | 2. To deploy the application you first need to bind the Event Streams for IBM Cloud service instance to the cluster. Create a secret using the content from the file `credentials.json` 17 | ```shell 18 | kubectl create secret generic eventstreams-binding --from-file=binding=credentials.json 19 | ``` 20 | The command above creates a secret in your cluster named `eventstreams-binding`. 21 | 3. [Configure the CLI to run kubectl](https://cloud.ibm.com/docs/containers?topic=containers-cs_cli_install#cs_cli_configure) 22 | 23 | 4. Deploy the application in the cluster: 24 | ```shell 25 | kubectl apply -f kafka-java-console-schema-sample.yaml 26 | ``` 27 | 5. Access the application logs: 28 | ```shell 29 | kubectl wait pod kafka-java-console-schema-sample --for=condition=Ready 30 | kubectl logs kafka-java-console-schema-sample --follow 31 | ``` 32 | 33 | ## Further references 34 | 35 | If you want find out more about IBM Cloud Kubernetes Service or Kubernetes then check the following documents: 36 | 37 | [IBM Cloud Kubernetes Service](https://www.ibm.com/cloud/container-service) 38 | 39 | [Kubernetes Documentation](https://kubernetes.io/docs/home/) 40 | 41 | 42 | -------------------------------------------------------------------------------- /kafka-java-console-schema-sample/docs/Local.md: -------------------------------------------------------------------------------- 1 | # IBM Event Streams for IBM Cloud Kafka Java console sample application: Local Development guide 2 | As pushing the application into IBM Cloud® does not require you to build the application locally, this guide is here to guide you through the process, should you wish to build the application locally. 3 | 4 | We will not discuss establishing a connection from your laptop to Event Streams for IBM Cloud. This is described in the [connection guide](https://cloud.ibm.com/docs/services/EventStreams?topic=eventstreams-connecting#connecting). 5 | 6 | ## Prerequisites 7 | 8 | 1. **If you don't already have one, create an Event Streams Enterprise Plan service instance.** 9 | 10 | 1. Log in to the IBM Cloud console. 11 | 12 | 2. Click **Catalog**. 13 | 14 | 3. From the navigation pane, click **Integration**, click the **Event Streams** tile, and then select the **Enterprise plan**. The Event Streams service instance page opens. 15 | 16 | 4. Enter a name for your service. You can use the default value. 17 | 18 | 5. Click **Create**. The Event Streams **Getting started** page opens. 19 | 20 | 2. **If you don't already have them, install the following prerequisites:** 21 | 22 | * [git](https://git-scm.com/) 23 | * [Gradle](https://gradle.org/) 24 | * Java 8 or higher 25 | 26 | 27 | 28 | ## Steps to Build the Sample 29 | 30 | ### 1. **Create a topic** 31 | 32 | The topic is the core of Event Streams flows. Data passes through a topic from producing applications to consuming applications. 33 | 34 | We'll be using the IBM Cloud console (UI) to create the topic, and will reference it when starting the application. 35 | 36 | 1. Go to the **Topics** tab. 37 | 38 | 2. Click **New topic**. 39 | 40 | 3. Name your topic. 41 | 42 | > The sample application is configured to connect to topic `kafka-java-console-sample-topic`. If the topic does not exist, it is created when the application is started. 43 | 44 | 4. Keep the defaults set in the rest of the topic creation, click **Next** and then **Create topic**. 45 | 46 | 5. The topic appears in the table. Congratulations, you have created a topic! 47 | 48 | --- 49 | 50 | ### 2. **Create credentials** 51 | 52 | To allow the sample application to access your topic, we need to create some credentials for it. 53 | 54 | 1. Go to **Service credentials** in the navigation pane. 55 | 56 | 2. Click **New credential**. 57 | 58 | 3. Give the credential a name so you can identify its purpose later. You can accept the default value. 59 | 60 | 4. Give the credential the **Manager** role so that it can access the topics, and create them if necessary. 61 | 62 | 5. Click **Add**. The new credential is listed in the table in **Service credentials**. 63 | 64 | 6. Click **View credentials** to see the `api_key`, `kafka_brokers_sasl`. `kafka_http_url` values. 65 | 66 | --- 67 | 68 | ### 3. **Clone the Github repository for the sample application** 69 | 70 | The sample application is stored here. Clone the `event-streams-samples` repository by running the clone command from the command line. 71 | 72 | ``` 73 | git clone https://github.com/ibm-messaging/event-streams-samples.git 74 | ``` 75 | 76 |
77 | When the repository is cloned, from the command line change into the kafka-java-console-schema-sample directory. 78 | 79 | ``` 80 | cd event-streams-samples/kafka-java-console-schema-sample 81 | ``` 82 | 83 |
84 | Build the contents of the kafka-java-console-schema-sample directory. 85 | 86 | ``` 87 | gradle clean && gradle build 88 | ``` 89 | --- 90 | 91 | ### 4. **Run the consuming application** 92 | 93 | Start the sample consuming application from the command line, replacing the `kafka_brokers_sasl` `kafka_http_url` and `api_key` values. 94 | 95 | The `java -jar ./build/libs/kafka-java-console-schema-sample-2.0.jar` part of the command identifies the locations of the .JAR file to run within the cloned repository. You do not need to change this. 96 | 97 | Use the `kafka_brokers_sasl` from the **Service credentials** created in Step 2. We recommend using all the `kafka_brokers_sasl` listed in the **Service credentials** that you created. 98 | 99 | >The `kafka_brokers_sasl` must be formatted as `"host:port,host2:port2"`.
Format the contents of `kafka_brokers_sasl` in a text editor before entering it in the command line. 100 | 101 | Then, use the `kafka_http_url` as supplied from the **Service credentials** 102 | 103 | Then, use the `api_key` from the **Service credentials** created in Step 2. `-consumer` specifies that the consumer should start. 104 | 105 | ``` 106 | java -jar ./build/libs/kafka-java-console-schema-sample-2.0.jar 107 | -consumer 108 | ``` 109 | 110 | An `INFO No messages consumed` is displayed when the consuming application is running, but there is no data being consumed. 111 | 112 | --- 113 | 114 | ### 5. **Run the producing application** 115 | 116 | Open a new command line window and change into the kafka-java-console-schema-sample directory. 117 | 118 | ``` 119 | cd event-streams-samples/kafka-java-console-schema-sample 120 | ``` 121 | 122 | Then, start the sample producing application from the command line, replacing the `kafka_brokers_sasl`, `kafka_http_url` and `api_key` values with the same ones used to run the consumer. 123 | 124 | Use the `api_key` from the **Service credentials** created in Step 2. `-producer` specifies that the producer should start. 125 | 126 | ``` 127 | java -jar ./build/libs/kafka-java-console-schema-sample-2.0.jar 128 | -producer 129 | ``` 130 | 131 | --- 132 | 133 | ### 6. **Success!** 134 | 135 | When the producer starts, messages are produced to the topic. Messages are then consumed from the topic by the consuming application. 136 | You can verify the successful flow of messages when you see `INFO Message consumed` from the consumer. 137 | 138 | The sample runs indefinitely until you stop it. To stop the process, run an exit command `Ctrl+C`. -------------------------------------------------------------------------------- /kafka-java-console-schema-sample/kafka-java-console-schema-sample.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Pod 3 | metadata: 4 | name: kafka-java-console-schema-sample 5 | namespace: default 6 | spec: 7 | containers: 8 | - image: event-streams-samples/kafka-java-console-schema-sample:latest 9 | imagePullPolicy: Always 10 | name: java-sample 11 | env: 12 | - name: VCAP_SERVICES 13 | valueFrom: 14 | secretKeyRef: 15 | name: eventstreams-binding 16 | key: binding 17 | -------------------------------------------------------------------------------- /kafka-java-console-schema-sample/manifest.yml: -------------------------------------------------------------------------------- 1 | applications: 2 | - name: kafka-java-console-schema-sample 3 | buildpack: "https://github.com/cloudfoundry/java-buildpack.git" 4 | memory: 1G 5 | disk_quota: 512M 6 | path: build/distributions/kafka-java-console-schema-sample-2.0.zip 7 | no-route: true 8 | health-check-type: none 9 | services: 10 | - "YOUR_SERVICE_ALIAS_INSTANCE_NAME" 11 | -------------------------------------------------------------------------------- /kafka-java-console-schema-sample/settings.gradle: -------------------------------------------------------------------------------- 1 | pluginManagement { 2 | repositories { 3 | gradlePluginPortal() 4 | mavenCentral() 5 | maven { 6 | name "JCenter Gradle Plugins" 7 | url "https://dl.bintray.com/gradle/gradle-plugins" 8 | } 9 | } 10 | } 11 | rootProject.name = "kafka-java-console-schema-sample" -------------------------------------------------------------------------------- /kafka-java-console-schema-sample/src/main/avro/com/eventstreams/samples/Message.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "namespace": "com.eventstreams.samples", 3 | "type": "record", 4 | "name": "Message", 5 | "fields": [ 6 | {"name": "message", "type": "string"}, 7 | {"name": "message_number", "type": "int"} 8 | ] 9 | } -------------------------------------------------------------------------------- /kafka-java-console-schema-sample/src/main/java/com/eventstreams/samples/ArgumentParser.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2018 IBM 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | /** 17 | * Licensed Materials - Property of IBM 18 | * (c) Copyright IBM Corp. 2015-2018 19 | */ 20 | package com.eventstreams.samples; 21 | 22 | import java.util.Arrays; 23 | import java.util.HashMap; 24 | import java.util.HashSet; 25 | import java.util.LinkedList; 26 | import java.util.Map; 27 | import java.util.Queue; 28 | import java.util.Set; 29 | 30 | class ArgumentParser { 31 | 32 | static class ArgumentParserBuilder { 33 | private final Set flags = new HashSet<>(); 34 | private final Set options = new HashSet<>(); 35 | 36 | private ArgumentParserBuilder() {} 37 | 38 | ArgumentParserBuilder flag(final String name) { 39 | flags.add(name); 40 | return this; 41 | } 42 | 43 | ArgumentParserBuilder option(final String name) { 44 | options.add(name); 45 | return this; 46 | } 47 | 48 | ArgumentParser build() { 49 | return new ArgumentParser(this); 50 | } 51 | 52 | private Set flags() { 53 | return flags; 54 | } 55 | 56 | private Set options() { 57 | return options; 58 | } 59 | } 60 | 61 | private final Set flags; 62 | private final Set options; 63 | 64 | private ArgumentParser(final ArgumentParserBuilder builder) { 65 | this.flags = builder.flags(); 66 | this.options = builder.options(); 67 | } 68 | 69 | static ArgumentParserBuilder builder() { 70 | return new ArgumentParserBuilder(); 71 | } 72 | 73 | Map parseArguments(final String... argArray) throws IllegalArgumentException { 74 | final Queue args = new LinkedList<>(Arrays.asList(argArray)); 75 | final Map result = new HashMap<>(); 76 | while (!args.isEmpty()) { 77 | final String arg = args.poll(); 78 | if (flags.contains(arg)) { 79 | result.put(arg, ""); 80 | } else if (options.contains(arg)) { 81 | final String value = args.poll(); 82 | if (value == null) { 83 | throw new IllegalArgumentException("Command line argument '" + arg 84 | + "' must be followed by another argument"); 85 | } 86 | result.put(arg, value); 87 | } else { 88 | throw new IllegalArgumentException("Unexpected command line argument: " + arg); 89 | } 90 | } 91 | return result; 92 | } 93 | } 94 | -------------------------------------------------------------------------------- /kafka-java-console-schema-sample/src/main/java/com/eventstreams/samples/ConsumerRunnable.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2015-2016 IBM 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | /** 17 | * Licensed Materials - Property of IBM 18 | * (c) Copyright IBM Corp. 2015-2016 19 | */ 20 | package com.eventstreams.samples; 21 | 22 | import java.time.Duration; 23 | import java.util.Arrays; 24 | import java.util.List; 25 | import java.util.Map; 26 | 27 | import org.apache.kafka.clients.consumer.ConsumerRecord; 28 | import org.apache.kafka.clients.consumer.ConsumerRecords; 29 | import org.apache.kafka.clients.consumer.KafkaConsumer; 30 | import org.apache.kafka.common.KafkaException; 31 | import org.apache.kafka.common.PartitionInfo; 32 | import org.apache.kafka.common.errors.WakeupException; 33 | import org.slf4j.Logger; 34 | import org.slf4j.LoggerFactory; 35 | 36 | public class ConsumerRunnable implements Runnable { 37 | private static final Logger logger = LoggerFactory.getLogger(ConsumerRunnable.class); 38 | 39 | private final KafkaConsumer kafkaConsumer; 40 | private volatile boolean closing = false; 41 | 42 | public ConsumerRunnable(Map consumerConfigs, String topic) { 43 | // Create a Kafka consumer with the provided client configuration 44 | kafkaConsumer = new KafkaConsumer<>(consumerConfigs); 45 | 46 | // Checking for topic existence before subscribing 47 | List partitions = kafkaConsumer.partitionsFor(topic); 48 | if (partitions == null || partitions.isEmpty()) { 49 | logger.error("Topic '{}' does not exists - application will terminate", topic); 50 | kafkaConsumer.close(Duration.ofSeconds(5L)); 51 | throw new IllegalStateException("Topic '" + topic + "' does not exists - application will terminate"); 52 | } else { 53 | logger.info(partitions.toString()); 54 | } 55 | 56 | kafkaConsumer.subscribe(Arrays.asList(topic)); 57 | } 58 | 59 | @Override 60 | public void run() { 61 | logger.info("{} is starting.", ConsumerRunnable.class); 62 | 63 | try { 64 | while (!closing) { 65 | try { 66 | // Poll on the Kafka consumer, waiting up to 3 secs if there's nothing to consume. 67 | ConsumerRecords records = kafkaConsumer.poll(Duration.ofMillis(3000L)); 68 | 69 | if (records.isEmpty()) { 70 | logger.info("No messages consumed"); 71 | } else { 72 | // Iterate through all the messages received and print their content 73 | for (ConsumerRecord record : records) { 74 | logger.info("Message consumed: {}", record); 75 | } 76 | } 77 | 78 | } catch (final WakeupException e) { 79 | logger.warn("Consumer closing - caught exception: {}", e, e); 80 | } catch (final KafkaException e) { 81 | logger.error("Sleeping for 5s - Consumer has caught: {}", e, e); 82 | try { 83 | Thread.sleep(5000); // Longer sleep before retrying 84 | } catch (InterruptedException e1) { 85 | logger.warn("Consumer closing - caught exception: {}", e, e); 86 | } 87 | } 88 | } 89 | } finally { 90 | kafkaConsumer.close(Duration.ofSeconds(5L)); 91 | logger.info("{} has shut down.", ConsumerRunnable.class); 92 | } 93 | } 94 | 95 | public void shutdown() { 96 | closing = true; 97 | kafkaConsumer.wakeup(); 98 | logger.info("{} is shutting down.", ConsumerRunnable.class); 99 | } 100 | } 101 | -------------------------------------------------------------------------------- /kafka-java-console-schema-sample/src/main/java/com/eventstreams/samples/ProducerRunnable.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2015-2016 IBM 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | /** 17 | * Licensed Materials - Property of IBM 18 | * (c) Copyright IBM Corp. 2015-2016 19 | */ 20 | package com.eventstreams.samples; 21 | 22 | import java.time.Duration; 23 | import java.util.List; 24 | import java.util.Map; 25 | import java.util.concurrent.Future; 26 | import java.util.concurrent.TimeUnit; 27 | 28 | import org.apache.kafka.clients.producer.KafkaProducer; 29 | import org.apache.kafka.clients.producer.ProducerRecord; 30 | import org.apache.kafka.clients.producer.RecordMetadata; 31 | import org.apache.kafka.common.PartitionInfo; 32 | import org.apache.kafka.common.errors.TimeoutException; 33 | import org.slf4j.Logger; 34 | import org.slf4j.LoggerFactory; 35 | 36 | import com.eventstreams.samples.Message; 37 | 38 | public class ProducerRunnable implements Runnable { 39 | private static final Logger logger = LoggerFactory.getLogger(ProducerRunnable.class); 40 | 41 | private final KafkaProducer kafkaProducer; 42 | private final String topic; 43 | private volatile boolean closing = false; 44 | 45 | public ProducerRunnable(Map producerConfigs, String topic) { 46 | this.topic = topic; 47 | 48 | // Create a Kafka producer with the provided client configuration 49 | kafkaProducer = new KafkaProducer<>(producerConfigs); 50 | 51 | try { 52 | // Checking for topic existence. 53 | // If the topic does not exist, the kafkaProducer will retry for about 60 secs 54 | // before throwing a TimeoutException 55 | // see configuration parameter 'metadata.fetch.timeout.ms' 56 | List partitions = kafkaProducer.partitionsFor(topic); 57 | logger.info(partitions.toString()); 58 | } catch (TimeoutException kte) { 59 | logger.error("Topic '{}' may not exist - application will terminate", topic); 60 | kafkaProducer.close(Duration.ofSeconds(5L)); 61 | throw new IllegalStateException("Topic '" + topic + "' may not exist - application will terminate", kte); 62 | } 63 | } 64 | 65 | @Override 66 | public void run() { 67 | // Simple counter for messages sent 68 | int producedMessages = 0; 69 | logger.info("{} is starting.", ProducerRunnable.class); 70 | 71 | try { 72 | while (!closing) { 73 | String key = "key"; 74 | String messageString = "This is a test message"; 75 | 76 | try { 77 | // If a partition is not specified, the client will use the default partitioner to choose one. 78 | //ProducerRecord record = new ProducerRecord(topic,key,message); 79 | final Message message = new Message(messageString, producedMessages); 80 | final ProducerRecord record = new ProducerRecord(topic, key, message); 81 | 82 | // Send record asynchronously 83 | Future future = kafkaProducer.send(record); 84 | 85 | // Synchronously wait for a response from Event Streams / Kafka on every message produced. 86 | // For high throughput the future should be handled asynchronously. 87 | RecordMetadata recordMetadata = future.get(5, TimeUnit.SECONDS); 88 | producedMessages++; 89 | 90 | logger.info("Message produced, offset: {}", recordMetadata.offset()); 91 | 92 | // Short sleep for flow control in this sample app 93 | // to make the output easily understandable 94 | Thread.sleep(2000L); 95 | 96 | } catch (final InterruptedException e) { 97 | logger.warn("Producer closing - caught exception: {}", e, e); 98 | } catch (final Exception e) { 99 | logger.error("Sleeping for 5s - Producer has caught : {}", e, e); 100 | try { 101 | Thread.sleep(5000L); // Longer sleep before retrying 102 | } catch (InterruptedException e1) { 103 | logger.warn("Producer closing - caught exception: {}", e, e); 104 | } 105 | } 106 | } 107 | } finally { 108 | kafkaProducer.close(Duration.ofSeconds(5L)); 109 | logger.info("{} has shut down.", ProducerRunnable.class); 110 | } 111 | } 112 | 113 | public void shutdown() { 114 | closing = true; 115 | logger.info("{} is shutting down.", ProducerRunnable.class); 116 | } 117 | } 118 | -------------------------------------------------------------------------------- /kafka-java-console-schema-sample/src/main/java/com/eventstreams/samples/env/Environment.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2018 IBM 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | /** 17 | * Licensed Materials - Property of IBM 18 | * (c) Copyright IBM Corp. 2018 19 | */ 20 | package com.eventstreams.samples.env; 21 | import java.io.IOException; 22 | import java.util.Iterator; 23 | 24 | import org.apache.kafka.common.errors.IllegalSaslStateException; 25 | import org.slf4j.Logger; 26 | import org.slf4j.LoggerFactory; 27 | 28 | import com.fasterxml.jackson.databind.JsonNode; 29 | import com.fasterxml.jackson.databind.ObjectMapper; 30 | 31 | public class Environment { 32 | 33 | private static final Logger logger = LoggerFactory.getLogger(Environment.class); 34 | private static final String SERVICE_NAME = "messagehub"; 35 | 36 | public static EventStreamsCredentials getEventStreamsCredentials() { 37 | String vcapServices = System.getenv("VCAP_SERVICES"); 38 | logger.info("VCAP_SERVICES: \n{}", vcapServices); 39 | try { 40 | if (vcapServices != null) { 41 | JsonNode mhub = parseVcapServices(vcapServices); 42 | ObjectMapper mapper = new ObjectMapper(); 43 | return mapper.readValue(mhub.toString(), EventStreamsCredentials.class); 44 | } else { 45 | logger.error("VCAP_SERVICES environment variable is null."); 46 | throw new IllegalStateException("VCAP_SERVICES environment variable is null."); 47 | } 48 | } catch (IOException ioe) { 49 | logger.error("VCAP_SERVICES environment variable parsing failed."); 50 | throw new IllegalStateException("VCAP_SERVICES environment variable parsing failed.", ioe); 51 | } 52 | } 53 | 54 | private static JsonNode parseVcapServices(String vcapServices) throws IOException { 55 | ObjectMapper mapper = new ObjectMapper(); 56 | JsonNode vcapServicesJson = mapper.readValue(vcapServices, JsonNode.class); 57 | 58 | // when running in CloudFoundry VCAP_SERVICES is wrapped into a bigger JSON object 59 | // so it needs to be extracted. We attempt to read the "instance_id" field to identify 60 | // if it has been wrapped 61 | if (vcapServicesJson.get("instance_id") != null) { 62 | return vcapServicesJson; 63 | } else { 64 | String vcapKey = null; 65 | Iterator it = vcapServicesJson.fieldNames(); 66 | // Find the Event Streams service bound to this application. 67 | while (it.hasNext() && vcapKey == null) { 68 | String potentialKey = it.next(); 69 | if (potentialKey.startsWith(SERVICE_NAME)) { 70 | logger.warn("Using the '{}' key from VCAP_SERVICES.", potentialKey); 71 | vcapKey = potentialKey; 72 | } 73 | } 74 | 75 | if (vcapKey == null) { 76 | throw new IllegalSaslStateException("No Event Streams service bound"); 77 | } else { 78 | return vcapServicesJson.get(vcapKey).get(0).get("credentials"); 79 | } 80 | } 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /kafka-java-console-schema-sample/src/main/java/com/eventstreams/samples/env/EventStreamsCredentials.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2015-2016 IBM 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | /** 17 | * Licensed Materials - Property of IBM 18 | * (c) Copyright IBM Corp. 2015-2016 19 | */ 20 | package com.eventstreams.samples.env; 21 | 22 | import com.fasterxml.jackson.annotation.JsonIgnoreProperties; 23 | import com.fasterxml.jackson.annotation.JsonProperty; 24 | 25 | @JsonIgnoreProperties(ignoreUnknown=true) 26 | public class EventStreamsCredentials { 27 | 28 | private String apiKey, user, password, kafkaHttpUrl; 29 | private String[] kafkaBrokersSasl; 30 | 31 | @JsonProperty("api_key") 32 | public String getApiKey() { 33 | return apiKey; 34 | } 35 | 36 | @JsonProperty("api_key") 37 | public void setLabel(String apiKey) { 38 | this.apiKey = apiKey; 39 | } 40 | 41 | @JsonProperty 42 | public String getUser() { 43 | return user; 44 | } 45 | 46 | @JsonProperty 47 | public void setUser(String user) { 48 | this.user = user; 49 | } 50 | 51 | @JsonProperty 52 | public String getPassword() { 53 | return password; 54 | } 55 | 56 | @JsonProperty 57 | public void setPassword(String password) { 58 | this.password = password; 59 | } 60 | 61 | @JsonProperty("kafka_brokers_sasl") 62 | public String[] getKafkaBrokersSasl() { 63 | return kafkaBrokersSasl; 64 | } 65 | 66 | @JsonProperty("kafka_brokers_sasl") 67 | public void setKafkaBrokersSasl(String[] kafkaBrokersSasl) { 68 | this.kafkaBrokersSasl = kafkaBrokersSasl; 69 | } 70 | 71 | @JsonProperty("kafka_http_url") 72 | public String getKafkaHttpUrl() { 73 | return kafkaHttpUrl; 74 | } 75 | 76 | @JsonProperty("kafka_http_url") 77 | public void setKafkaHttpUrl(String kafkaHttpUrl) { 78 | this.kafkaHttpUrl = kafkaHttpUrl; 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /kafka-java-console-schema-sample/src/main/resources/.gitignore: -------------------------------------------------------------------------------- 1 | /jaas.conf 2 | -------------------------------------------------------------------------------- /kafka-java-console-schema-sample/src/main/resources/log4j2.properties: -------------------------------------------------------------------------------- 1 | rootLogger.level=info 2 | rootLogger.appenderRef.console.ref=STDOUT 3 | 4 | appender.console.type=Console 5 | appender.console.name=STDOUT 6 | appender.console.layout.type=PatternLayout 7 | appender.console.layout.pattern=[%d] %p %m (%c)%n 8 | -------------------------------------------------------------------------------- /kafka-java-liberty-sample/.gitignore: -------------------------------------------------------------------------------- 1 | /target/ 2 | /bin/ 3 | /build/ 4 | /.gradle/ 5 | /.settings/ 6 | /.project 7 | 8 | #### intellij settings 9 | *.iml 10 | .idea 11 | gradle* 12 | -------------------------------------------------------------------------------- /kafka-java-liberty-sample/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM gradle:jdk11-alpine as jdk 2 | 3 | COPY --chown=1000 . /usr/src/app 4 | WORKDIR /usr/src/app 5 | 6 | USER root 7 | RUN apk --no-cache add busybox-static \ 8 | && gradle -s --no-daemon assemble 9 | 10 | FROM websphere-liberty:javaee8 11 | 12 | COPY --from=jdk /bin/busybox.static /bin/unzip 13 | COPY --from=jdk --chown=1001:0 /usr/src/app/target/defaultServer/apps/EventStreamsLibertyApp.war /tmp 14 | COPY --from=jdk --chown=1001:0 /usr/src/app/target/defaultServer/server.xml /config/server.xml 15 | 16 | RUN mkdir -p /opt/ibm/wlp/usr/servers/defaultServer/apps/EventStreamsLibertyApp.war \ 17 | && unzip -q /tmp/EventStreamsLibertyApp.war \ 18 | -d /opt/ibm/wlp/usr/servers/defaultServer/apps/EventStreamsLibertyApp.war \ 19 | && chmod -R a+rwX /opt/ibm/wlp/usr/servers/defaultServer/apps/EventStreamsLibertyApp.war 20 | -------------------------------------------------------------------------------- /kafka-java-liberty-sample/README.md: -------------------------------------------------------------------------------- 1 | # IBM Event Streams for IBM Cloud Liberty sample application 2 | 3 | This repository holds a sample application that was built using Liberty for Java™. The application will interact with an Event Streams for IBM Cloud service to produce and consume messages. 4 | 5 | **Liberty for Java™ applications on IBM Cloud®** are powered by the IBM WebSphere® Liberty Buildpack. The Liberty profile is a highly composable, fast-to-start, dynamic application server runtime environment. It is part of IBM WebSphere Application Server v8.5.5. 6 | 7 | For more information regarding IBM Event Streams for IBM Cloud, [see the documentation on IBM Cloud®](https://cloud.ibm.com/docs/services/EventStreams?topic=eventstreams-getting_started). 8 | 9 | __Important Note__: This sample creates a topic with one partition on your behalf. On the Standard plan, this will incur a fee if the topic does not already exist. 10 | 11 | ## Running the application 12 | 13 | The application can be run in the following environments: 14 | 15 | * [IBM Cloud Kubernetes Service](./docs/Kubernetes_Service.md) 16 | * [IBM Cloud Foundry](./docs/Cloud_Foundry.md) 17 | * [Docker Local](./docs/Docker_Local.md) 18 | -------------------------------------------------------------------------------- /kafka-java-liberty-sample/build.gradle: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2016, 2018 IBM 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | // 17 | // Licensed Materials - Property of IBM 18 | // © Copyright IBM Corp. 2016, 2018 19 | // 20 | plugins { 21 | id 'java' 22 | id 'war' 23 | id 'eclipse' 24 | } 25 | 26 | group = 'com.ibm.eventstreams.samples' 27 | version = '1.0' 28 | description = """Event Streams Liberty Sample""" 29 | 30 | sourceCompatibility = 1.8 31 | targetCompatibility = 1.8 32 | 33 | repositories { 34 | mavenCentral() 35 | } 36 | 37 | dependencies { 38 | implementation 'org.apache.geronimo.specs:geronimo-servlet_3.0_spec:1.0' 39 | implementation 'org.apache.kafka:kafka-clients:2.7.+' 40 | implementation group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.17.+' 41 | implementation group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.17.+' 42 | implementation 'org.apache.logging.log4j:log4j-slf4j-impl:2.17.+' 43 | implementation 'com.fasterxml.jackson.core:jackson-annotations:2.9.+' 44 | implementation 'com.fasterxml.jackson.core:jackson-core:2.9.+' 45 | implementation 'com.fasterxml.jackson.core:jackson-databind:2.9.+' 46 | } 47 | 48 | // move generated war file into the DefaultServer/apps diretory 49 | tasks.withType(Jar) { 50 | destinationDirectory = file("$projectDir/target/defaultServer/apps") 51 | } 52 | 53 | war { 54 | archiveFileName = "EventStreamsLibertyApp.war" 55 | from('resources'){ 56 | include '*' 57 | into "resources" 58 | } 59 | from('src/main/resources'){ 60 | include '*' 61 | into "WEB-INF/lib" 62 | } 63 | from('log4j'){ 64 | include '*' 65 | into "WEB-INF/classes" 66 | } 67 | } 68 | 69 | task copyServerXml { 70 | copy { 71 | from "$projectDir/src/main/wlp/server.xml" 72 | into "$projectDir/target/defaultServer" 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /kafka-java-liberty-sample/docs/CF_Standard_Enterprise_Plan.md: -------------------------------------------------------------------------------- 1 | # IBM Cloud Foundry deployment to an Standard/Enterprise Plan Event Streams for IBM Cloud 2 | 3 | ## Overview 4 | 5 | To deploy and run the sample: 6 | * Create a Cloud Foundry Service Alias for your Standard/Enterprise Service 7 | * Setup your `manifest.yml` with your service details 8 | * Use `ibmcloud cf push --no-start` to deploy the app to IBM Cloud Foundry 9 | * Re-configure binding with Manager role 10 | * Start the app 11 | * Inspect the application's logs 12 | 13 | ## Set up a Cloud Foundry Service Alias 14 | Before continuing, connect to IBM Cloud with the [IBM Cloud command line interface](https://cloud.ibm.com/docs/cli?topic=cloud-cli-ibmcloud-cli). 15 | 16 | The Standard/Enterprise plan is IAM enabled. Therefore the following extra step is required to create a Cloud Foundry alias for your Service: 17 | 18 | Create a Cloud Foundry alias for your service's associated CRN: 19 | ```shell 20 | ibmcloud resource service-alias-create --instance-name 21 | ``` 22 | 23 | Having created this alias associated your Service with a Cloud Foundry Organization and Space, thereby enabling your Cloud Foundry application to referrence it and connect to it. 24 | 25 | ## Setup the manifest.yml 26 | To deploy applications using the IBM WebSphere Application Server Liberty Buildpack, you are required to accept the IBM Liberty license and IBM JRE license by following the instructions below: 27 | 28 | 1. Read the current IBM [Liberty-License](http://public.dhe.ibm.com/ibmdl/export/pub/software/websphere/wasdev/downloads/wlp/8.5.5.7/lafiles/runtime/en.html) and the current IBM [JVM-License](http://www14.software.ibm.com/cgi-bin/weblap/lap.pl?la_formnum=&li_formnum=L-JWOD-9SYNCP&title=IBM%C2%AE+SDK%2C+Java+Technology+Edition%2C+Version+8.0&l=en). 29 | 2. Select the Event Streams for IBM Cloud service you would like to bind your application to. Do this by replacing `` with your service instance alias name in `manifest.yml`: 30 | ```yaml 31 | services: 32 | - "" 33 | ``` 34 | 3. Consider your domain: You might need to change this in the `manifest.yml` as the domain varies by IBM Cloud region. If unsure, just delete the domain line and IBM Cloud will pick the domain for you. 35 | 4. Extract the `D/N: ` from the Liberty-License and JVM-License. 36 | 5. Add the following environment variables and extracted license codes to the `manifest.yml` file in the directory from which you push your application. For further information on the format of 37 | the `manifest.yml` file refer to the [manifest documentation]. 38 | 39 | ```yaml 40 | env: 41 | IBM_JVM_LICENSE: 42 | IBM_LIBERTY_LICENSE: 43 | ``` 44 | 45 | __Note:__ You may need to use a unique hostname e.g. *host: JohnsSampleLibertyApp* 46 | 47 | ## Build the Sample 48 | Build the project using gradle: 49 | ```shell 50 | gradle build war 51 | ``` 52 | 53 | You should see a directory called `target` created in your project home directory. A WAR file is created under `target/defaultServer`, as well as a copy of the server.xml file. 54 | 55 | ## Deploy the Application 56 | 57 | Push the app without starting it immediately by running the following command in the same directory as the `manifest.yml` file: 58 | ```shell 59 | ibmcloud app push --no-start 60 | ``` 61 | 62 | ## Re-configure the binding 63 | A binding between your app and service-alias is created for you automatically, but by default does not have permissions to create topics. This means that we need to delete the existing binding and create a new one with the correct role: 64 | 65 | ``` 66 | ibmcloud resource service-binding-delete 67 | ibmcloud resource service-binding-create Manager 68 | ``` 69 | 70 | ## Start the app 71 | Now it should be safe to start the application, **make sure you capture this output as it display's your application's URL binding**: 72 | ```shell 73 | ibmcloud app start 74 | ``` 75 | You can optionally inspect the app's logs (The app only logs when the UI button gets hit): 76 | ```shell 77 | ibmcloud app logs 78 | ``` 79 | 80 | ## Produce and Consume Messages 81 | Once the sample has been successfully deployed, navigate to the URL **stated in the start logs of your `ibmcloud app start` command above**. This url is made up from the app name and domain that were specified in the manifest.yml. Once here, you can produce a message by clicking on the `Post Message` button. 82 | 83 | If the message was successfully produced and then consumed, you will then see the prompted message: 84 | 85 | ##### Already consumed messages: 86 | ```shell 87 | Message: [{"value":"This is a test message, msgId=0"}]. Offset: 1 88 | ``` 89 | 90 | ##### We have produced a message: ```This is a test message, msgId=0``` 91 | ```shell 92 | Consumed messages: [{"value":"This is a test message, msgId=0"}]. Offset: 1 93 | ``` 94 | -------------------------------------------------------------------------------- /kafka-java-liberty-sample/docs/Cloud_Foundry.md: -------------------------------------------------------------------------------- 1 | 2 | # Running in IBM Cloud Foundry 3 | 4 | ## Prerequisites 5 | To build and run the sample, you must have the done the following: 6 | 7 | * Obtain this repository's contents, either use `git` or just download the samples as a ZIP 8 | * Install the [IBM Cloud CLI](https://cloud.ibm.com/docs/cli?topic=cloud-cli-install-ibmcloud-cli) 9 | * Provision an [Event Streams Service Instance](https://cloud.ibm.com/catalog/services/event-streams) in [IBM Cloud®](https://cloud.ibm.com/) 10 | * Install [Gradle 4+](https://gradle.org/) 11 | * Install Java 7+ 12 | 13 | ## Standard/Enterprise Plan? 14 | 15 | **It's important to know which Event Streams for IBM Cloud plan you're using as the sample deployment steps are subtly different on each plan respectively.** 16 | 17 | By this point, you should have an Event Streams for IBM Cloud instance provisioned. If you haven't done this step yet, please refer to the main [readme](../README.md). 18 | 19 | If you are not sure what type of Event Streams for IBM Cloud instance you have then you can find this information out by visiting IBM Cloud's web console [dashboard](https://cloud.ibm.com/resources). 20 | 21 | *Please make sure you are in the appropriate Region, Account, Organization and Space where you provisioned your Event Streams instance!* 22 | 23 | * Event Streams for IBM Cloud Standard plan services are "Services" with the plan column showing "Standard". 24 | * Event Streams for IBM Cloud Enterprise plan services are "Services" with the plan column showing "Enterprise". 25 | 26 | 27 | ## Deploy the Application 28 | 29 | The deployment for the Standard/Enterprise plan can be found in the link listed below 30 | 31 | ### [Classic Plan Deployment Guide](CF_Classic_Plan.md) 32 | 33 | ### [Standard/Enterprise Plan Deployment Guide](CF_Standard_Enterprise_Plan.md) 34 | 35 | ## Further references 36 | 37 | If you want find out more about Cloud Foundry applications or Liberty then check the following documents: 38 | 39 | [Cloud Foundry manifest documentation](http://docs.cloudfoundry.org/devguide/deploy-apps/manifest.html) 40 | 41 | [Liberty documentation](https://developer.ibm.com/wasdev/websphere-liberty/) 42 | 43 | 44 | ## Licenses 45 | 46 | [Liberty-License](http://public.dhe.ibm.com/ibmdl/export/pub/software/websphere/wasdev/downloads/wlp/8.5.5.7/lafiles/runtime/en.html) 47 | 48 | [JVM-License](http://www14.software.ibm.com/cgi-bin/weblap/lap.pl?la_formnum=&li_formnum=L-JWOD-9SYNCP&title=IBM%C2%AE+SDK%2C+Java+Technology+Edition%2C+Version+8.0&l=en) 49 | 50 | 51 | 52 | 53 | 54 | -------------------------------------------------------------------------------- /kafka-java-liberty-sample/docs/Docker_Local.md: -------------------------------------------------------------------------------- 1 | 2 | # Running in Docker Locally 3 | 4 | ## Prerequisites 5 | To build and run the sample, you must have the done the following: 6 | 7 | * Obtain this repository's contents, either use `git` or just download the samples as a ZIP 8 | * Provision an [Event Streams Service Instance](https://cloud.ibm.com/catalog/services/event-streams) in [IBM Cloud®](https://cloud.ibm.com/) 9 | * Install [Docker](https://docs.docker.com/install/) 10 | 11 | ## Run the Application 12 | 13 | 1. Build the container image from the `Dockerfile`: 14 | ```shell 15 | docker build -t liberty-sample . 16 | ``` 17 | 18 | 2. Export the Event Streams for IBM Cloud instance credentials: 19 | 20 | From the Event Streams for IBM Cloud instance dashboard, click `Service Credentials` and select or create a new one. Copy its content and export it as below: 21 | ```shell 22 | export VCAP_SERVICES='{ 23 | "instance_id": "...", 24 | "api_key": "...", 25 | "kafka_admin_url": "....", 26 | "kafka_rest_url": "...", 27 | "kafka_brokers_sasl": [ 28 | ... 29 | ], 30 | "user": "...", 31 | "password": "..." 32 | }' 33 | ``` 34 | 35 | 3. Run the container image 36 | ```shell 37 | docker run -e VCAP_SERVICES="$VCAP_SERVICES" -p 9080:9080 liberty-sample 38 | ``` 39 | 40 | 4. Access the application: 41 | 42 | Point your browser to [http://localhost:9080](http://localhost:9080) 43 | 44 | 45 | ## Further references 46 | 47 | If you want find out more about Docker then check the following document: 48 | 49 | [Docker documentation](https://docs.docker.com/install/overview/) 50 | -------------------------------------------------------------------------------- /kafka-java-liberty-sample/docs/Kubernetes_Service.md: -------------------------------------------------------------------------------- 1 | # Running in IBM Cloud Kubernetes Service 2 | 3 | ## Prerequisites 4 | To build and run the sample, you must have the done the following: 5 | 6 | * Obtain this repository's contents, either use `git` or just download the samples as a ZIP 7 | * Install the [IBM Cloud CLI](https://cloud.ibm.com/docs/cli?topic=cloud-cli-install-ibmcloud-cli) 8 | * Install the [Kubernetes CLI](https://kubernetes.io/docs/tasks/tools/install-kubectl/) 9 | * Provision an [Event Streams Service Instance](https://cloud.ibm.com/catalog/services/event-streams) in [IBM Cloud®](https://cloud.ibm.com/) 10 | * Provision a [Kubernetes Service instance](https://cloud.ibm.com/kubernetes/catalog/cluster) in [IBM Cloud®](https://cloud.ibm.com/) 11 | 12 | 13 | ## Deploy the Application 14 | 15 | 1. From the Event Streams for IBM Cloud instance dashboard, click `Service Credentials` and select or create a new one. Copy its content, create a file `credentials.json` and paste the content. 16 | 2. To deploy the application you first need to bind the Event Streams for IBM Cloud service instance to the cluster. Create a secret using the content from the file `credentials.json` 17 | ```shell 18 | kubectl create secret generic eventstreams-binding --from-file=binding=credentials.json 19 | ``` 20 | The command above creates a secret in your cluster named `eventstreams-binding`. 21 | 3. [Configure the CLI to run kubectl](https://cloud.ibm.com/docs/containers?topic=containers-cs_cli_install#cs_cli_configure) 22 | 23 | 4. Deploy the application in the cluster: 24 | ```shell 25 | kubectl apply -f kafka-java-liberty-sample.yaml 26 | ``` 27 | 5. Access the application: 28 | ```shell 29 | kubectl wait pod kafka-java-liberty-sample --for=condition=Ready 30 | kubectl port-forward kafka-java-liberty-sample 9080:9080 31 | ``` 32 | Point your browser to [http://localhost:9080](http://localhost:9080) 33 | 34 | ## Further references 35 | 36 | If you want find out more about IBM Cloud Kubernetes Service or Kubernetes then check the following documents: 37 | 38 | [IBM Cloud Kubernetes Service](https://www.ibm.com/cloud/container-service) 39 | 40 | [Kubernetes Documentation](https://kubernetes.io/docs/home/) 41 | 42 | 43 | -------------------------------------------------------------------------------- /kafka-java-liberty-sample/kafka-java-liberty-sample.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Pod 3 | metadata: 4 | name: kafka-java-liberty-sample 5 | namespace: default 6 | spec: 7 | containers: 8 | - image: event-streams-samples/kafka-java-liberty-sample:latest 9 | imagePullPolicy: IfNotPresent 10 | name: liberty-sample 11 | env: 12 | - name: VCAP_SERVICES 13 | valueFrom: 14 | secretKeyRef: 15 | name: eventstreams-binding 16 | key: binding 17 | ports: 18 | - containerPort: 9080 19 | name: sample 20 | protocol: TCP -------------------------------------------------------------------------------- /kafka-java-liberty-sample/log4j/log4j2.properties: -------------------------------------------------------------------------------- 1 | rootLogger.level=info 2 | rootLogger.appenderRef.console.ref=STDOUT 3 | 4 | appender.console.type=Console 5 | appender.console.name=STDOUT 6 | appender.console.layout.type=PatternLayout 7 | appender.console.layout.pattern=[%d] %p %m (%c)%n 8 | -------------------------------------------------------------------------------- /kafka-java-liberty-sample/manifest.yml: -------------------------------------------------------------------------------- 1 | applications: 2 | - path: target/defaultServer 3 | memory: 512M 4 | instances: 1 5 | buildpack: liberty-for-java 6 | name: EventStreamsLibertyApp 7 | disk_quota: 1024M 8 | random-route: true 9 | services: 10 | - "" 11 | env: 12 | IBM_JVM_LICENSE: 13 | IBM_LIBERTY_LICENSE: 14 | -------------------------------------------------------------------------------- /kafka-java-liberty-sample/resources/consumer.properties: -------------------------------------------------------------------------------- 1 | key.deserializer=org.apache.kafka.common.serialization.StringDeserializer 2 | value.deserializer=org.apache.kafka.common.serialization.StringDeserializer 3 | client.id=event-streams-sample 4 | group.id=event-streams-sample 5 | enable.auto.commit=false 6 | auto.offset.reset=latest 7 | security.protocol=SASL_SSL 8 | sasl.mechanism=PLAIN 9 | sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required username="token" password="APIKEY"; 10 | -------------------------------------------------------------------------------- /kafka-java-liberty-sample/resources/producer.properties: -------------------------------------------------------------------------------- 1 | key.serializer=org.apache.kafka.common.serialization.StringSerializer 2 | value.serializer=org.apache.kafka.common.serialization.StringSerializer 3 | bootstrap.servers= 4 | client.id=event-streams-sample 5 | acks=all 6 | security.protocol=SASL_SSL 7 | sasl.mechanism=PLAIN 8 | sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required username="token" password="APIKEY"; 9 | -------------------------------------------------------------------------------- /kafka-java-liberty-sample/settings.gradle: -------------------------------------------------------------------------------- 1 | rootProject.name = "kafka-java-liberty-sample" 2 | -------------------------------------------------------------------------------- /kafka-java-liberty-sample/src/main/java/com/eventstreams/samples/env/Environment.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2018 IBM 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | /** 17 | * Licensed Materials - Property of IBM 18 | * (c) Copyright IBM Corp. 2018 19 | */ 20 | package com.eventstreams.samples.env; 21 | 22 | import com.fasterxml.jackson.databind.JsonNode; 23 | import com.fasterxml.jackson.databind.ObjectMapper; 24 | import org.slf4j.Logger; 25 | import org.slf4j.LoggerFactory; 26 | 27 | import java.io.IOException; 28 | import java.util.Iterator; 29 | 30 | public class Environment { 31 | 32 | private static final Logger logger = LoggerFactory.getLogger(Environment.class); 33 | 34 | public static EventStreamsCredentials getEventStreamsCredentials() { 35 | String vcapServices = System.getenv("VCAP_SERVICES"); 36 | logger.info("VCAP_SERVICES: \n" + vcapServices); 37 | if (vcapServices == null) { 38 | logger.error("VCAP_SERVICES environment variable is null."); 39 | throw new IllegalStateException("VCAP_SERVICES environment variable is null."); 40 | } 41 | return transformVcapServices(vcapServices); 42 | } 43 | 44 | private static EventStreamsCredentials transformVcapServices(String vcapServices) { 45 | try { 46 | ObjectMapper mapper = new ObjectMapper(); 47 | JsonNode instanceCredentials = mapper.readValue(vcapServices, JsonNode.class); 48 | // when running in CloudFoundry VCAP_SERVICES is wrapped into a bigger JSON object 49 | // so it needs to be extracted. We attempt to read the "instance_id" field to identify 50 | // if it has been wrapped 51 | if (instanceCredentials.get("instance_id") == null) { 52 | Iterator it = instanceCredentials.fieldNames(); 53 | // Find the Event Streams service bound to this application. 54 | while (it.hasNext()) { 55 | String potentialKey = it.next(); 56 | String messageHubJsonKey = "messagehub"; 57 | if (potentialKey.startsWith(messageHubJsonKey)) { 58 | logger.warn("Using the '" + potentialKey + "' key from VCAP_SERVICES."); 59 | instanceCredentials = instanceCredentials.get(potentialKey) 60 | .get(0) 61 | .get("credentials"); 62 | break; 63 | } 64 | } 65 | } 66 | return mapper.readValue(instanceCredentials.toString(), EventStreamsCredentials.class); 67 | } catch (IOException e) { 68 | logger.error("VCAP_SERVICES environment variable parses failed."); 69 | throw new IllegalStateException("VCAP_SERVICES environment variable parses failed.", e); 70 | } 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /kafka-java-liberty-sample/src/main/java/com/eventstreams/samples/env/EventStreamsCredentials.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2016 IBM 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | /** 17 | * Licensed Materials - Property of IBM 18 | * (c) Copyright IBM Corp. 2016 19 | */ 20 | package com.eventstreams.samples.env; 21 | 22 | import com.fasterxml.jackson.annotation.JsonIgnoreProperties; 23 | import com.fasterxml.jackson.annotation.JsonProperty; 24 | 25 | @JsonIgnoreProperties(ignoreUnknown=true) 26 | public class EventStreamsCredentials { 27 | 28 | private String apiKey, kafkaAdminUrl, user, password; 29 | private String[] kafkaBrokersSasl; 30 | 31 | @JsonProperty("api_key") 32 | public String getApiKey() { 33 | return apiKey; 34 | } 35 | 36 | @JsonProperty("api_key") 37 | public void setLabel(String apiKey) { 38 | this.apiKey = apiKey; 39 | } 40 | 41 | @JsonProperty("kafka_admin_url") 42 | public String getKafkaAdminUrl() { 43 | return kafkaAdminUrl; 44 | } 45 | 46 | @JsonProperty("kafka_admin_url") 47 | public void setKafkaAdminUrl(String kafkaAdminUrl) { 48 | this.kafkaAdminUrl = kafkaAdminUrl; 49 | } 50 | 51 | @JsonProperty 52 | public String getUser() { 53 | return user; 54 | } 55 | 56 | @JsonProperty 57 | public void setUser(String user) { 58 | this.user = user; 59 | } 60 | 61 | @JsonProperty 62 | public String getPassword() { 63 | return password; 64 | } 65 | 66 | @JsonProperty 67 | public void setPassword(String password) { 68 | this.password = password; 69 | } 70 | 71 | @JsonProperty("kafka_brokers_sasl") 72 | public String[] getKafkaBrokersSasl() { 73 | return kafkaBrokersSasl; 74 | } 75 | 76 | @JsonProperty("kafka_brokers_sasl") 77 | public void setKafkaBrokersSasl(String[] kafkaBrokersSasl) { 78 | this.kafkaBrokersSasl = kafkaBrokersSasl; 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /kafka-java-liberty-sample/src/main/java/com/eventstreams/samples/servlet/MessageList.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2016 IBM 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | /** 17 | * Licensed Materials - Property of IBM 18 | * (c) Copyright IBM Corp. 2016 19 | */ 20 | package com.eventstreams.samples.servlet; 21 | 22 | import com.fasterxml.jackson.core.JsonFactory; 23 | import com.fasterxml.jackson.core.JsonGenerator; 24 | 25 | import java.io.ByteArrayOutputStream; 26 | import java.io.IOException; 27 | import java.util.ArrayList; 28 | import java.util.List; 29 | 30 | public class MessageList { 31 | private final List messages = new ArrayList<>(); 32 | 33 | public void push(T message) { 34 | this.messages.add(message); 35 | } 36 | 37 | /** 38 | * Build message list dependent on the format Event Streams requires. The 39 | * message list is in the form: [{ "value": base_64_string }, ...] 40 | * 41 | * @return {String} String representation of a JSON object. 42 | * @throws IOException 43 | */ 44 | public String build() throws IOException { 45 | final JsonFactory jsonFactory = new JsonFactory(); 46 | final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); 47 | final JsonGenerator jsonGenerator = jsonFactory.createGenerator(outputStream); 48 | 49 | jsonGenerator.writeStartArray(); 50 | for (T message : messages) { 51 | jsonGenerator.writeStartObject(); 52 | jsonGenerator.writeFieldName("value"); 53 | jsonGenerator.writeObject(message); 54 | jsonGenerator.writeEndObject(); 55 | } 56 | jsonGenerator.writeEndArray(); 57 | 58 | jsonGenerator.close(); 59 | outputStream.close(); 60 | 61 | return new String(outputStream.toByteArray()); 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /kafka-java-liberty-sample/src/main/java/com/eventstreams/samples/servlet/RESTRequest.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2016 IBM 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | /** 17 | * Licensed Materials - Property of IBM 18 | * (c) Copyright IBM Corp. 2016 19 | */ 20 | package com.eventstreams.samples.servlet; 21 | 22 | import java.io.BufferedReader; 23 | import java.io.DataOutputStream; 24 | import java.io.InputStream; 25 | import java.io.InputStreamReader; 26 | import java.net.URL; 27 | 28 | import javax.net.ssl.HttpsURLConnection; 29 | import javax.net.ssl.SSLContext; 30 | 31 | public class RESTRequest { 32 | private String apiKey, baseUrl; 33 | 34 | public RESTRequest(String baseUrl, String apiKey) { 35 | this.apiKey = apiKey; 36 | this.baseUrl = baseUrl; 37 | } 38 | 39 | /** 40 | * Execute a GET request against the specified REST target. 41 | * 42 | * @param target 43 | * {String} The REST API target to run against (for example, 44 | * '/admin/topics') 45 | * @param acceptHeader 46 | * {Boolean} A flag to notify the caller whether or not to 47 | * include the 'Accept' header in its request. 48 | * @return {String} The response received from the server. 49 | */ 50 | public String get(String target, boolean acceptHeader) { 51 | HttpsURLConnection connection = null; 52 | 53 | if (!target.startsWith("/")) { 54 | target = "/" + target; 55 | } 56 | 57 | try { 58 | // Create secure connection to the REST URL. 59 | SSLContext sslContext = SSLContext.getInstance("TLSv1.2"); 60 | sslContext.init(null, null, null); 61 | 62 | URL url = new URL(baseUrl + target); 63 | connection = (HttpsURLConnection) url.openConnection(); 64 | connection.setSSLSocketFactory(sslContext.getSocketFactory()); 65 | connection.setRequestMethod("GET"); 66 | // Apply API key header and kafka content type Accept header if 67 | // the 'acceptHeader' flag is set to true. 68 | connection.setRequestProperty("X-Auth-Token", this.apiKey); 69 | 70 | if (acceptHeader) { 71 | connection.setRequestProperty("Accept", "application/vnd.kafka.binary.v1+json"); 72 | } 73 | 74 | // Read the response data from the request and return 75 | // it to the function caller. 76 | InputStream is = connection.getInputStream(); 77 | BufferedReader rd = new BufferedReader(new InputStreamReader(is)); 78 | String inputLine = ""; 79 | StringBuffer response = new StringBuffer(); 80 | 81 | while ((inputLine = rd.readLine()) != null) { 82 | response.append(inputLine); 83 | } 84 | 85 | rd.close(); 86 | 87 | return response.toString(); 88 | } catch (Exception e) { 89 | e.printStackTrace(); 90 | } finally { 91 | if (connection != null) { 92 | connection.disconnect(); 93 | } 94 | } 95 | 96 | return ""; 97 | } 98 | 99 | /** 100 | * Execute a GET request against the specified REST target. 101 | * 102 | * @param target 103 | * {String} The REST API target to run against (for example, 104 | * '/admin/topics') 105 | * @param body 106 | * {String} The data to be provided in the body section of the 107 | * POST request. 108 | * @param ignoredErrorCodes 109 | * {int[]} An list of error codes which will be ignored as a 110 | * side-effect of the request. Can be provided as null. 111 | * @return {String} The response received from the server. 112 | */ 113 | public String post(String target, String body, int[] ignoredErrorCodes) { 114 | HttpsURLConnection connection = null; 115 | int responseCode = 0; 116 | 117 | if (!target.startsWith("/")) { 118 | target = "/" + target; 119 | } 120 | 121 | try { 122 | 123 | // Create secure connection to the REST URL. 124 | SSLContext sslContext = SSLContext.getInstance("TLSv1.2"); 125 | sslContext.init(null, null, null); 126 | 127 | URL url = new URL(baseUrl + target); 128 | connection = (HttpsURLConnection) url.openConnection(); 129 | connection.setSSLSocketFactory(sslContext.getSocketFactory()); 130 | connection.setDoOutput(true); 131 | connection.setRequestMethod("POST"); 132 | 133 | // Apply headers, in this case, the API key and Kafka content type. 134 | connection.setRequestProperty("X-Auth-Token", this.apiKey); 135 | connection.setRequestProperty("Content-Type", "application/json"); 136 | 137 | // Send the request, writing the body data 138 | // to the output stream. 139 | DataOutputStream wr = new DataOutputStream(connection.getOutputStream()); 140 | wr.writeBytes(body); 141 | wr.close(); 142 | 143 | responseCode = connection.getResponseCode(); 144 | 145 | // Retrieve the response, transform it, then 146 | // return it to the caller. 147 | InputStream is = connection.getInputStream(); 148 | BufferedReader rd = new BufferedReader(new InputStreamReader(is)); 149 | StringBuilder response = new StringBuilder(); 150 | String line; 151 | 152 | while ((line = rd.readLine()) != null) { 153 | response.append(line); 154 | response.append('\r'); 155 | } 156 | 157 | rd.close(); 158 | 159 | return response.toString(); 160 | } catch (Exception e) { 161 | boolean isIgnored = false; 162 | 163 | // Filter out error codes which are ignored. If the 164 | // response code is in the ignore list, the error 165 | // is not printed. 166 | if (ignoredErrorCodes != null) { 167 | for (int i = 0; i < ignoredErrorCodes.length; i++) { 168 | if (ignoredErrorCodes[i] == responseCode) { 169 | isIgnored = true; 170 | } 171 | } 172 | } 173 | 174 | if (!isIgnored) { 175 | e.printStackTrace(); 176 | } 177 | } finally { 178 | if (connection != null) { 179 | connection.disconnect(); 180 | } 181 | } 182 | 183 | return ""; 184 | } 185 | } 186 | -------------------------------------------------------------------------------- /kafka-java-liberty-sample/src/main/webapp/images/mh_featured.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ibm-messaging/event-streams-samples/cc6c4c4fc28ab4517bbe14fc356854c5057c650e/kafka-java-liberty-sample/src/main/webapp/images/mh_featured.png -------------------------------------------------------------------------------- /kafka-java-liberty-sample/src/main/webapp/index.html: -------------------------------------------------------------------------------- 1 | 20 | 21 | 22 | 23 | Sample Liberty app over Event Streams 24 | 25 | 26 | 27 | 28 | 31 | 32 | 42 |

43 |
44 |
45 |

Produced Message

46 |

No Messages

47 |

Consumed Message

48 |

No Messages

49 |
50 | 51 | 52 | 53 | 54 | -------------------------------------------------------------------------------- /kafka-java-liberty-sample/src/main/webapp/index.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2016 IBM 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | /** 17 | * Licensed Materials - Property of IBM 18 | * (c) Copyright IBM Corp. 2016 19 | */ 20 | 21 | //index.js 22 | // request message on server 23 | //Calls KafkaServlet 24 | xhrGet("KafkaServlet", function(responseText){ 25 | // add to document 26 | var mytitle = document.getElementById('message'); 27 | mytitle.innerHTML = responseText; 28 | 29 | }, function(err){ 30 | console.log(err); 31 | }); 32 | 33 | function postMessage(){ 34 | var responseDiv = document.getElementById('consumeResponse'); 35 | responseDiv.innerHTML = "

waiting for consumer...

"; 36 | var responseDiv = document.getElementById('produceResponse'); 37 | responseDiv.innerHTML = "

waiting for producer...

"; 38 | var postMessageBtn = document.getElementById('btnPost'); 39 | postMessageBtn.value='request sent ...'; 40 | xhrPost("KafkaServlet", function(responseText){ 41 | // fill in produced and consume message sections 42 | // responseText is an array of an produced then consumed message 43 | var response = responseText.split(",,"); 44 | 45 | var produceDiv = document.getElementById('produceResponse'); 46 | produceDiv.innerHTML = response[0]; 47 | var consumeDiv = document.getElementById('consumeResponse'); 48 | consumeDiv.innerHTML = response[1]; 49 | // reset button text 50 | postMessageBtn.value='Produce a Message'; 51 | 52 | xhrGet("KafkaServlet", function(responseText){ 53 | // add message to 'already consumed messages' section 54 | var mytitle = document.getElementById('message'); 55 | mytitle.innerHTML = responseText; 56 | postMessageBtn.value='Produce a Message'; 57 | 58 | }, function(err){ 59 | console.log(err); 60 | }); 61 | }, function(err){ 62 | console.log(err); 63 | }); 64 | } 65 | 66 | //utilities 67 | function createXHR(){ 68 | if(typeof XMLHttpRequest != 'undefined'){ 69 | return new XMLHttpRequest(); 70 | }else{ 71 | try{ 72 | return new ActiveXObject('Msxml2.XMLHTTP'); 73 | }catch(e){ 74 | try{ 75 | return new ActiveXObject('Microsoft.XMLHTTP'); 76 | }catch(e){} 77 | } 78 | } 79 | return null; 80 | } 81 | function xhrGet(url, callback, errback){ 82 | var xhr = new createXHR(); 83 | xhr.open("GET", url, true); 84 | xhr.onreadystatechange = function(){ 85 | if(xhr.readyState == 4){ 86 | if(xhr.status == 200){ 87 | callback(xhr.responseText); 88 | }else{ 89 | errback('service not available'); 90 | } 91 | } 92 | }; 93 | xhr.timeout = 3000; 94 | xhr.ontimeout = errback; 95 | xhr.send(); 96 | } 97 | 98 | function xhrPost(url, callback, errback){ 99 | var xhr = new createXHR(); 100 | xhr.open("POST", url, true); 101 | xhr.onreadystatechange = function(){ 102 | if(xhr.readyState == 4){ 103 | if(xhr.status == 200){ 104 | callback(xhr.responseText); 105 | }else{ 106 | errback('XMLHttpRequest ready state: ' + xhr.readyState + '. Service not available'); 107 | } 108 | } 109 | }; 110 | xhr.timeout = 10000; 111 | xhr.ontimeout = errback; 112 | xhr.send(); 113 | } 114 | 115 | function parseJson(str){ 116 | return window.JSON ? JSON.parse(str) : eval('(' + str + ')'); 117 | } 118 | function prettyJson(str){ 119 | // If browser does not have JSON utilities, just print the raw string value. 120 | return window.JSON ? JSON.stringify(JSON.parse(str), null, ' ') : str; 121 | } 122 | -------------------------------------------------------------------------------- /kafka-java-liberty-sample/src/main/webapp/style.css: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2016 IBM 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | /** 17 | * Licensed Materials - Property of IBM 18 | * (c) Copyright IBM Corp. 2016 19 | */ 20 | 21 | /* style.css 22 | * This file provides css styles. 23 | */ 24 | 25 | body,html { 26 | background-color: #3b4b54; width : 100%; 27 | height: 100%; 28 | margin: 0 auto; 29 | font-family: "HelveticaNeue-Light", "Helvetica Neue Light", "Helvetica Neue", Helvetica, Arial, "Lucida Grande", sans-serif; 30 | color: #ffffff; 31 | } 32 | 33 | .message { 34 | background-color: white; 35 | padding: 5px 5px 5px 10px; 36 | border: 3px solid #659ce2; 37 | margin: 15px; 38 | border-radius: 5px; 39 | } 40 | 41 | .brief { 42 | padding: 5px; 43 | margin: 15px; 44 | color: white; 45 | } 46 | 47 | .briefText { 48 | color: inherit; 49 | } 50 | 51 | .nav { 52 | background-color: #4d6168; 53 | } 54 | 55 | .wrapper { 56 | background-color: #4d6168; 57 | padding: 5px; 58 | margin: 15px; 59 | box-shadow: 5px 5px 5px #343434; 60 | } 61 | 62 | p { 63 | margin: 15px; 64 | } 65 | 66 | a { 67 | text-decoration: none; 68 | color: black; 69 | font-size: 0.8em; 70 | font-style: italic; 71 | } 72 | 73 | h3 { 74 | margin-left: 15px; 75 | 76 | } 77 | 78 | .newappIcon { 79 | padding-top: 10px; 80 | display: block; 81 | margin: 0 auto; 82 | padding-bottom: 10px; 83 | max-width:200px; 84 | } 85 | 86 | h1 { 87 | font-weight: bold; 88 | font-size: 2em; 89 | } 90 | 91 | small.code { 92 | font-family: "Times New Roman", monospace; 93 | /*color: #00aed1; */ 94 | color: black; 95 | } 96 | 97 | #btnPost { 98 | border: medium solid #4187be; 99 | color: white; 100 | font-family: "Helvetica"; 101 | background: #4187be none repeat scroll 0 0; 102 | padding: 4px; 103 | } 104 | 105 | #btnPost:hover { 106 | background: #73aaef; 107 | border: medium solid #73aaef; 108 | } 109 | 110 | .leftHalf { 111 | float: left; 112 | background-color: #26343f; 113 | width: 45%; 114 | height: 100%; 115 | } 116 | 117 | .rightHalf { 118 | float: right; 119 | width: 55%; 120 | background-color: #313f4a; 121 | height: 100%; 122 | overflow:auto; 123 | } 124 | 125 | .description { 126 | padding-left: 50px; 127 | padding-right: 50px; 128 | text-align: center; 129 | font-size: 1.2em; 130 | } 131 | 132 | .blue { 133 | color: #00aed1; 134 | } 135 | 136 | 137 | table { 138 | table-layout: fixed; 139 | width: 800px; 140 | margin: 0 auto; 141 | word-wrap: break-word; 142 | padding-top:1%; 143 | } 144 | 145 | th { 146 | border-bottom: 1px solid #000; 147 | } 148 | 149 | th, td { 150 | text-align: left; 151 | padding: 2px 20px; 152 | } 153 | 154 | .env-var { 155 | text-align: right; 156 | border-right: 1px solid #000; 157 | width: 30%; 158 | } 159 | 160 | pre { 161 | padding: 0; 162 | margin: 0; 163 | } 164 | -------------------------------------------------------------------------------- /kafka-java-liberty-sample/src/main/wlp/server.xml: -------------------------------------------------------------------------------- 1 | 21 | 22 | 24 | 25 | 26 | 27 | 28 | servlet-3.1 29 | appSecurity-2.0 30 | 31 | 32 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | -------------------------------------------------------------------------------- /kafka-mirrormaker/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine as builder 2 | 3 | RUN apk update 4 | RUN apk --no-cache add curl 5 | 6 | RUN curl -L "https://downloads.apache.org/kafka/3.4.1/kafka_2.12-3.4.1.tgz" -o kafka.tgz 7 | RUN mkdir /opt/kafka \ 8 | && tar -xf kafka.tgz -C /opt/kafka --strip-components=1 9 | 10 | FROM openjdk:11 11 | 12 | RUN addgroup --gid 5000 --system esgroup && \ 13 | adduser --uid 5000 --ingroup esgroup --system esuser 14 | 15 | COPY --chown=esuser:esgroup --from=builder /opt/kafka/bin/ /opt/kafka/bin/ 16 | COPY --chown=esuser:esgroup --from=builder /opt/kafka/libs/ /opt/kafka/libs/ 17 | COPY --chown=esuser:esgroup --from=builder /opt/kafka/config/ /opt/kafka/config/ 18 | RUN mkdir /opt/kafka/logs && chown esuser:esgroup /opt/kafka/logs 19 | 20 | COPY --chown=esuser:esgroup entrypoint.sh /opt/kafka 21 | 22 | WORKDIR /opt/kafka 23 | 24 | ENV TOPIC_REGEX=.* 25 | 26 | USER esuser 27 | 28 | ENTRYPOINT ["kafka-mirrormaker/entrypoint.sh"] 29 | -------------------------------------------------------------------------------- /kafka-mirrormaker/IKS/README.md: -------------------------------------------------------------------------------- 1 | 2 | ## Deploying `event-streams-samples/kafka-mirrormaker` to Kubernetes to replicate data between 2 Event Streams clusters 3 | 4 | These steps detail how to replicate data from a Kafka cluster (source) to another Kafka cluster (destination) using the `event-streams-samples/kafka-mirrormaker` image. 5 | 6 | ### Prerequisites 7 | 8 | - `kubectl` access to a Kubernetes cluster. 9 | - Credentials for an IBM Event Streams instance that has the following permissions: 10 | - to read/write to the topics 11 | 12 | Mirror Maker does not automatically create topics in the destination cluster. You must create these topics before starting Mirror Maker. 13 | 14 | ### Configure Mirror Maker 15 | 16 | Edit `source.properties` replacing the `` and `` placeholders with your Event Streams credentials for the source cluster. 17 | 18 | Edit `destination.properties` replacing the `` and `` placeholders with your Event Streams credentials for the destination cluster. 19 | 20 | Create the following Kubernetes resources: 21 | 22 | ```shell 23 | kubectl create secret generic source-config --from-file=source.properties 24 | kubectl create secret generic destination-config --from-file=destination.properties 25 | kubectl create configmap tools-log4j-config --from-file=tools-log4j.properties 26 | ``` 27 | 28 | ### Run Mirror Maker in your Kubernetes cluster 29 | 30 | Deploy the `event-streams-samples/kafka-mirrormaker` Docker image: 31 | 32 | ```shell 33 | kubectl apply -f ./kafka-mirrormaker.yaml 34 | ``` 35 | -------------------------------------------------------------------------------- /kafka-mirrormaker/IKS/destination.properties: -------------------------------------------------------------------------------- 1 | bootstrap.servers= 2 | security.protocol=SASL_SSL 3 | sasl.mechanism=PLAIN 4 | sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required username="token" password=""; 5 | ssl.protocol=TLSv1.2 6 | ssl.enabled.protocols=TLSv1.2 7 | ssl.endpoint.identification.algorithm=HTTPS 8 | 9 | acks=all 10 | client.id=mirror_maker_producer 11 | -------------------------------------------------------------------------------- /kafka-mirrormaker/IKS/kafka-mirrormaker.yaml: -------------------------------------------------------------------------------- 1 | # Deployment 2 | apiVersion: apps/v1 3 | kind: Deployment 4 | metadata: 5 | name: kafkamirrormaker-deploy 6 | labels: 7 | app: kafkamirrormaker 8 | spec: 9 | replicas: 1 10 | selector: 11 | matchLabels: 12 | app: kafkamirrormaker 13 | template: 14 | metadata: 15 | namespace: default 16 | labels: 17 | app: kafkamirrormaker 18 | spec: 19 | securityContext: 20 | runAsNonRoot: true 21 | runAsUser: 5000 22 | containers: 23 | - name: kafkamirrormaker-container 24 | image: event-streams-samples/kafka-mirrormaker:latest 25 | env: 26 | - name: TOPIC_REGEX 27 | value: "mytopic" 28 | volumeMounts: 29 | - name: source-config 30 | mountPath: /opt/kafka/config/source.properties 31 | subPath: source.properties 32 | - name: destination-config 33 | mountPath: /opt/kafka/config/destination.properties 34 | subPath: destination.properties 35 | - name: tools-log4j 36 | mountPath: /opt/kafka/config/tools-log4j.properties 37 | subPath: tools-log4j.properties 38 | volumes: 39 | - name: source-config 40 | secret: 41 | secretName: source-config 42 | - name: destination-config 43 | secret: 44 | secretName: destination-config 45 | - name: tools-log4j 46 | configMap: 47 | name: tools-log4j-config 48 | -------------------------------------------------------------------------------- /kafka-mirrormaker/IKS/source.properties: -------------------------------------------------------------------------------- 1 | bootstrap.servers= 2 | security.protocol=SASL_SSL 3 | sasl.mechanism=PLAIN 4 | sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required username="token" password=""; 5 | ssl.protocol=TLSv1.2 6 | ssl.enabled.protocols=TLSv1.2 7 | ssl.endpoint.identification.algorithm=HTTPS 8 | 9 | exclude.internal.topics=true 10 | client.id=mirror_maker_consumer 11 | group.id=mirror_maker_consumer 12 | partition.assignment.strategy=org.apache.kafka.clients.consumer.RoundRobinAssignor 13 | -------------------------------------------------------------------------------- /kafka-mirrormaker/IKS/tools-log4j.properties: -------------------------------------------------------------------------------- 1 | log4j.rootLogger=INFO, stderr 2 | 3 | log4j.appender.stderr=org.apache.log4j.ConsoleAppender 4 | log4j.appender.stderr.layout=org.apache.log4j.PatternLayout 5 | log4j.appender.stderr.layout.ConversionPattern=[%d] %p %m (%c)%n 6 | log4j.appender.stderr.Target=System.err 7 | -------------------------------------------------------------------------------- /kafka-mirrormaker/README.md: -------------------------------------------------------------------------------- 1 | # kafka-mirrormaker 2 | 3 | This repository contains the artifacts required to build the `event-streams-samples/kafka-mirrormaker` Docker image. 4 | 5 | This image contains [Kafka Mirror Maker](http://kafka.apache.org/documentation/#basic_ops_mirror_maker) and can be used to replicate data between clusters. 6 | 7 | A prebuilt image is provided on Github Packages, you can use the following command to pull the image: 8 | 9 | ```docker pull ghcr.io/ibm-messaging/event-streams-samples/kafka-mirrormaker:latest 10 | ``` 11 | 12 | ## Running the image in Kubernetes 13 | 14 | Instructions for running the `event-streams-samples/kafka-mirrormaker` image in Kubernetes can be found [here](IKS/README.md). 15 | 16 | ## Building the image 17 | 18 | To build the image yourself, complete these steps: 19 | 20 | 1. Build the docker image: 21 | ```shell 22 | docker build . 23 | ``` 24 | If you want to use the sample [YAML file](IKS/kafka-mirrormaker.yaml), ensure that you update the image name with your own image name. 25 | -------------------------------------------------------------------------------- /kafka-mirrormaker/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ./bin/kafka-mirror-maker.sh \ 4 | --consumer.config config/source.properties \ 5 | --producer.config config/destination.properties \ 6 | --whitelist=${TOPIC_REGEX} 7 | -------------------------------------------------------------------------------- /kafka-nodejs-console-sample/.cfignore: -------------------------------------------------------------------------------- 1 | node_modules/* 2 | -------------------------------------------------------------------------------- /kafka-nodejs-console-sample/.gitignore: -------------------------------------------------------------------------------- 1 | /.project 2 | *.DS_Store 3 | /node_modules/* 4 | !node_modules/node-rdkafka-prebuilt/ 5 | 6 | /.settings/ 7 | /manifest-* -------------------------------------------------------------------------------- /kafka-nodejs-console-sample/Dockerfile: -------------------------------------------------------------------------------- 1 | # Dockerfile to run the sample under current Node LTS 2 | # 3 | # docker build . -t node-rdkafka 4 | # docker run --rm -it -e VCAP_SERVICES=${VCAP_SERVICES} node-rdkafka 5 | # OR 6 | # docker run --rm -it node-rdkafka /etc/ssl/certs 7 | # 8 | FROM ubuntu:18.04 9 | 10 | RUN apt-get update -qqy \ 11 | && apt-get install -y --no-install-recommends \ 12 | build-essential \ 13 | node-gyp \ 14 | nodejs-dev \ 15 | libssl1.0-dev \ 16 | liblz4-dev \ 17 | libpthread-stubs0-dev \ 18 | libsasl2-dev \ 19 | libsasl2-modules \ 20 | make \ 21 | python \ 22 | nodejs npm ca-certificates \ 23 | && rm -rf /var/cache/apt/* /var/lib/apt/lists/* 24 | 25 | WORKDIR /usr/src/app 26 | 27 | COPY *.js *.json *.md /usr/src/app/ 28 | 29 | RUN npm install -d 30 | ENV LD_LIBRARY_PATH=/usr/src/app/node_modules/node-rdkafka/build/deps 31 | ENTRYPOINT [ "node", "app.js" ] 32 | CMD [ "" ] 33 | -------------------------------------------------------------------------------- /kafka-nodejs-console-sample/README.md: -------------------------------------------------------------------------------- 1 | # IBM Event Streams for IBM Cloud Kafka Node.js console sample application 2 | This Node.js console application demonstrates how to connect to [IBM Event Streams for IBM Cloud](https://cloud.ibm.com/docs/services/EventStreams?topic=eventstreams-getting_started), send and receive messages using the [node-rdkafka](https://github.com/Blizzard/node-rdkafka) module. It also shows how to create topics using the Kafka Admin API as exposed by `node-rdkafka`. 3 | 4 | __Important Note__: This sample creates a topic with one partition on your behalf. On the Standard plan, this will incur a fee if the topic does not already exist. 5 | 6 | ## Running the application 7 | 8 | The application can be run in the following environments: 9 | 10 | * [IBM Cloud Kubernetes Service](./docs/Kubernetes_Service.md) 11 | * [IBM Cloud Foundry](./docs/Cloud_Foundry.md) 12 | * [Docker Local](./docs/Docker_Local.md) 13 | * [Local Development](./docs/Local.md) 14 | -------------------------------------------------------------------------------- /kafka-nodejs-console-sample/consumerLoop.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2015-2018 IBM 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | /** 17 | * Licensed Materials - Property of IBM 18 | * © Copyright IBM Corp. 2015-2018 19 | */ 20 | 21 | var consumer; 22 | var consumerLoop; 23 | 24 | var exports = module.exports = {}; 25 | exports.consumerLoop = consumerLoop; 26 | 27 | /** 28 | * Constructs a KafkaConsumer and registers listeners on the most common events 29 | * 30 | * @param {object} Kafka - an instance of the node-rdkafka module 31 | * @param {object} consumer_opts - consumer configuration 32 | * @param {string} topicName - name of the topic to consumer from 33 | * @param {function} shutdown - shutdown function 34 | * @return {KafkaConsumer} - the KafkaConsumer instance 35 | */ 36 | exports.buildConsumer = function(Kafka, consumer_opts, topicName, shutdown) { 37 | var topicOpts = { 38 | 'auto.offset.reset': 'latest' 39 | }; 40 | 41 | consumer = new Kafka.KafkaConsumer(consumer_opts, topicOpts); 42 | 43 | // Register listener for debug information; only invoked if debug option set in driver_options 44 | consumer.on('event.log', function(log) { 45 | console.log(log); 46 | }); 47 | 48 | // Register error listener 49 | consumer.on('event.error', function(err) { 50 | console.error('Error from consumer:' + JSON.stringify(err)); 51 | }); 52 | 53 | var consumedMessages = [] 54 | // Register callback to be invoked when consumer has connected 55 | consumer.on('ready', function() { 56 | console.log('The consumer has connected.'); 57 | 58 | // request metadata for one topic 59 | consumer.getMetadata({ 60 | topic: topicName, 61 | timeout: 10000 62 | }, 63 | function(err, metadata) { 64 | if (err) { 65 | console.error('Error getting metadata: ' + JSON.stringify(err)); 66 | shutdown(-1); 67 | } else { 68 | console.log('Consumer obtained metadata: ' + JSON.stringify(metadata)); 69 | if (metadata.topics[0].partitions.length === 0) { 70 | console.error('ERROR - Topic ' + topicName + ' does not exist. Exiting'); 71 | shutdown(-1); 72 | } 73 | } 74 | }); 75 | 76 | consumer.subscribe([topicName]); 77 | 78 | consumerLoop = setInterval(function () { 79 | if (consumer.isConnected()) { 80 | // The consume(num, cb) method can take a callback to process messages. 81 | // In this sample code we use the ".on('data')" event listener instead, 82 | // for illustrative purposes. 83 | consumer.consume(10); 84 | } 85 | 86 | if (consumedMessages.length === 0) { 87 | console.log('No messages consumed'); 88 | } else { 89 | for (var i = 0; i < consumedMessages.length; i++) { 90 | var m = consumedMessages[i]; 91 | console.log('Message consumed: topic=' + m.topic + ', partition=' + m.partition + ', offset=' + m.offset + ', key=' + m.key + ', value=' + m.value.toString()); 92 | } 93 | consumedMessages = []; 94 | } 95 | }, 2000); 96 | }); 97 | 98 | // Register a listener to process received messages 99 | consumer.on('data', function(m) { 100 | consumedMessages.push(m); 101 | }); 102 | return consumer; 103 | } 104 | -------------------------------------------------------------------------------- /kafka-nodejs-console-sample/docs/CF_Standard_Enterprise_Plan.md: -------------------------------------------------------------------------------- 1 | # IBM Cloud Foundry deployment to an Standard/Enterprise Plan Event Streams for IBM Cloud 2 | 3 | ## Overview 4 | 5 | To deploy and run the sample: 6 | * Create a Cloud Foundry Service Alias for your Standard/Enterprise Service 7 | * Setup your `manifest.yml` with your service details 8 | * Use `ibmcloud cf push --no-start` to deploy the app to IBM Cloud Foundry 9 | * Re-configure binding with Manager role 10 | * Start the app 11 | * Inspect the application's logs 12 | 13 | ## Set up a Cloud Foundry Service Alias 14 | Before continuing, connect to IBM Cloud with the [IBM Cloud command line interface](https://cloud.ibm.com/docs/cli?topic=cloud-cli-ibmcloud-cli). 15 | 16 | The Standard/Enterprise plan is IAM enabled. Therefore the following extra step is required to create a Cloud Foundry alias for your Service: 17 | 18 | Create a Cloud Foundry alias for your service's associated CRN: 19 | ```shell 20 | ibmcloud resource service-alias-create --instance-name 21 | ``` 22 | 23 | Having created this alias associated your Service with a Cloud Foundry Organization and Space, thereby enabling your Cloud Foundry application to referrence it and connect to it. 24 | 25 | ## Setup the manifest.yml 26 | 27 | 1. Select the Event Streams for IBM Cloud service you would like to bind your application to. Do this by replacing `` with your service instance alias name in `manifest.yml`: 28 | ```yaml 29 | services: 30 | - "" 31 | ``` 32 | 2. Consider your domain: You might need to change this in the `manifest.yml` as the domain varies by IBM Cloud region. If unsure, just delete the domain line and IBM Cloud will pick the domain for you. 33 | 34 | ## Deploy the Application 35 | 36 | Push the app without starting it immediately by running the following command in the same directory as the `manifest.yml` file: 37 | ```shell 38 | ibmcloud app push --no-start 39 | ``` 40 | 41 | ## Re-configure the binding 42 | A binding between your app and service-alias is created for you automatically, but by default does not have permissions to create topics. This means that we need to delete the existing binding and create a new one with the correct role: 43 | 44 | ``` 45 | ibmcloud resource service-binding-delete kafka-nodejs-console-sample 46 | ibmcloud resource service-binding-create kafka-nodejs-console-sample Manager 47 | ``` 48 | 49 | ## Start the app 50 | Now it should be safe to start the application: 51 | ```shell 52 | ibmcloud app start kafka-nodejs-console-sample 53 | ``` 54 | 55 | ## Produce and Consume Messages 56 | The sample application should have created the default sample topic and started producing and consuming messages in an infinite loop. View the logs to verify this: 57 | ```shell 58 | ibmcloud app logs kafka-nodejs-console-sample 59 | ``` 60 | -------------------------------------------------------------------------------- /kafka-nodejs-console-sample/docs/Cloud_Foundry.md: -------------------------------------------------------------------------------- 1 | 2 | # Running in IBM Cloud Foundry 3 | 4 | ## Prerequisites 5 | To build and run the sample, you must have the done the following: 6 | 7 | * Obtain this repository's contents, either use `git` or just download the samples as a ZIP 8 | * Install the [IBM Cloud CLI](https://cloud.ibm.com/docs/cli?topic=cloud-cli-install-ibmcloud-cli) 9 | * Provision an [Event Streams Service Instance](https://cloud.ibm.com/catalog/services/event-streams) in [IBM Cloud®](https://cloud.ibm.com/) 10 | 11 | ## Standard/Enterprise Plan? 12 | 13 | **It's important to know which Event Streams for IBM Cloud plan you're using as the sample deployment steps are subtly different on each plan respectively.** 14 | 15 | By this point, you should have an Event Streams for IBM Cloud instance provisioned. If you haven't done this step yet, please refer to the main [readme](../README.md). 16 | 17 | If you are not sure what type of Event Streams for IBM Cloud instance you have then you can find this information out by visiting IBM Cloud's web console [dashboard](https://cloud.ibm.com/resources). 18 | 19 | *Please make sure you are in the appropriate Region, Account, Organization and Space where you provisioned your Event Streams instance!* 20 | 21 | * Event Streams for IBM Cloud Standard plan services are "Services" with the plan column showing "Standard". 22 | * Event Streams for IBM Cloud Enterprise plan services are "Services" with the plan column showing "Enterprise". 23 | 24 | 25 | ## Deploy the Application 26 | 27 | The deployment for the Standard/Enterprise plan can be found in the link listed below 28 | 29 | ### [Classic Plan Deployment Guide](CF_Classic_Plan.md) 30 | 31 | ### [Standard/Enterprise Plan Deployment Guide](CF_Standard_Enterprise_Plan.md) 32 | 33 | 34 | ## Further references 35 | 36 | If you want find out more about Cloud Foundry applications then check the following documents: 37 | 38 | [Cloud Foundry manifest documentation](http://docs.cloudfoundry.org/devguide/deploy-apps/manifest.html) 39 | 40 | 41 | 42 | 43 | 44 | 45 | -------------------------------------------------------------------------------- /kafka-nodejs-console-sample/docs/Docker_Local.md: -------------------------------------------------------------------------------- 1 | 2 | # Running in Docker Locally 3 | 4 | ## Prerequisites 5 | To build and run the sample, you must have the done the following: 6 | 7 | * Obtain this repository's contents, either use `git` or just download the samples as a ZIP 8 | * Provision an [Event Streams Service Instance](https://cloud.ibm.com/catalog/services/event-streams) in [IBM Cloud®](https://cloud.ibm.com/) 9 | * Install [Docker](https://docs.docker.com/install/) 10 | 11 | ## Run the Application 12 | 13 | 1. Build the container image from the `Dockerfile`: 14 | ```shell 15 | docker build -t nodejs-console-sample . 16 | ``` 17 | 18 | 2. Export the Event Streams for IBM Cloud instance credentials: 19 | 20 | From the Event Streams for IBM Cloud instance dashboard, click `Service Credentials` and select or create a new one. Copy its content and export it as below: 21 | ```shell 22 | export VCAP_SERVICES='{ 23 | "instance_id": "...", 24 | "api_key": "...", 25 | "kafka_admin_url": "....", 26 | "kafka_rest_url": "...", 27 | "kafka_brokers_sasl": [ 28 | ... 29 | ], 30 | "user": "...", 31 | "password": "..." 32 | }' 33 | ``` 34 | 35 | 3. Run the container image 36 | ```shell 37 | docker run -e VCAP_SERVICES="$VCAP_SERVICES" nodejs-console-sample 38 | ``` 39 | 40 | ## Further references 41 | 42 | If you want find out more about Docker then check the following document: 43 | 44 | [Docker documentation](https://docs.docker.com/install/overview/) 45 | -------------------------------------------------------------------------------- /kafka-nodejs-console-sample/docs/Kubernetes_Service.md: -------------------------------------------------------------------------------- 1 | # Running in IBM Cloud Kubernetes Service 2 | 3 | ## Prerequisites 4 | To build and run the sample, you must have the done the following: 5 | 6 | * Obtain this repository's contents, either use `git` or just download the samples as a ZIP 7 | * Install the [IBM Cloud CLI](https://cloud.ibm.com/docs/cli?topic=cloud-cli-install-ibmcloud-cli) 8 | * Install the [Kubernetes CLI](https://kubernetes.io/docs/tasks/tools/install-kubectl/) 9 | * Provision an [Event Streams Service Instance](https://cloud.ibm.com/catalog/services/event-streams) in [IBM Cloud®](https://cloud.ibm.com/) 10 | * Provision a [Kubernetes Service instance](https://cloud.ibm.com/kubernetes/catalog/cluster) in [IBM Cloud®](https://cloud.ibm.com/) 11 | 12 | 13 | ## Deploy the Application 14 | 15 | 1. From the Event Streams for IBM Cloud instance dashboard, click `Service Credentials` and select or create a new one. Copy its content, create a file `credentials.json` and paste the content. 16 | 2. To deploy the application you first need to bind the Event Streams for IBM Cloud service instance to the cluster. Create a secret using the content from the file `credentials.json` 17 | ```shell 18 | kubectl create secret generic eventstreams-binding --from-file=binding=credentials.json 19 | ``` 20 | The command above creates a secret in your cluster named `eventstreams-binding`. 21 | 3. [Configure the CLI to run kubectl](https://cloud.ibm.com/docs/containers?topic=containers-cs_cli_install#cs_cli_configure) 22 | 23 | 4. Deploy the application in the cluster: 24 | ```shell 25 | kubectl apply -f kafka-nodejs-console-sample.yaml 26 | ``` 27 | 5. Access the application logs: 28 | ```shell 29 | kubectl wait pod kafka-nodejs-console-sample --for=condition=Ready 30 | kubectl logs kafka-nodejs-console-sample --follow 31 | ``` 32 | 33 | ## Further references 34 | 35 | If you want find out more about IBM Cloud Kubernetes Service or Kubernetes then check the following documents: 36 | 37 | [IBM Cloud Kubernetes Service](https://www.ibm.com/cloud/container-service) 38 | 39 | [Kubernetes Documentation](https://kubernetes.io/docs/home/) 40 | 41 | 42 | -------------------------------------------------------------------------------- /kafka-nodejs-console-sample/docs/Local.md: -------------------------------------------------------------------------------- 1 | # IBM Event Streams for IBM Cloud Kafka Node.js console sample application: Local Development guide 2 | As pushing the application into IBM Cloud® does not require you to build the application locally, this guide is here to guide you through the process, should you wish to build the application locally. 3 | 4 | We will not discuss establishing a connection from your laptop to Event Streams for IBM Cloud. This is described in the [ connection guide](https://cloud.ibm.com/docs/services/EventStreams?topic=eventstreams-connecting#connecting). 5 | 6 | ## Prerequisites 7 | * [Node.js](https://nodejs.org/en/) 8.X LTS 8 | * [node-gyp] (https://www.npmjs.com/package/node-gyp) 9 | 10 | Node-rdkafka will build librdkafka automatically. You must ensure you have the dependencies listed below installed. For more details, see [librdakfka's instructions](../../docs/librdkafka.md). 11 | 12 | ##### Linux 13 | * openssl-dev 14 | * libsasl2-dev 15 | * libsasl2-modules 16 | * C++ toolchain 17 | 18 | ##### macOS 19 | * [Brew](http://brew.sh/) 20 | * [Apple Xcode command line tools](https://developer.apple.com/xcode/) 21 | * `openssl` via Brew 22 | * Export `CPPFLAGS=-I/usr/local/opt/openssl/include` and `LDFLAGS=-L/usr/local/opt/openssl/lib` 23 | 24 | ## Installing dependencies 25 | Run the following commands on your local machine, after the prerequisites for your environment have been completed: 26 | ```shell 27 | npm install 28 | ``` 29 | 30 | ## Running the Sample 31 | Once built, to run the sample, execute the following command: 32 | ```shell 33 | node app.js 34 | ``` 35 | 36 | To find the values for `` and ``, access your Event Streams instance in IBM Cloud®, go to the `Service Credentials` tab and select the `Credentials` you want to use. If your user value is `token`, specify that with the password separated by a `:`. 37 | 38 | `` is the path where the trusted SSL certificates are stored on your machine and is therefore system dependent. 39 | For example: 40 | * Ubuntu: /etc/ssl/certs 41 | * RedHat: /etc/pki/tls/cert.pem 42 | * macOS: /usr/local/etc/openssl/cert.pem or /usr/local/etc/openssl@1.1/cert.pem from openssl installed by brew 43 | 44 | __Note__: `` must be a single string enclosed in quotes. For example: `"host1:port1,host2:port2"`. We recommend using all the Kafka hosts listed in the `Credentials` you selected. 45 | 46 | Alternatively, you can run only the producer or only the consumer by respectively appending the switches `-producer` or `-consumer` to the command above. 47 | 48 | The sample will run indefinitely until interrupted. To stop the process, use `Ctrl+C`. 49 | -------------------------------------------------------------------------------- /kafka-nodejs-console-sample/kafka-nodejs-console-sample.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Pod 3 | metadata: 4 | name: kafka-nodejs-console-sample 5 | namespace: default 6 | spec: 7 | containers: 8 | - image: event-streams-samples/kafka-nodejs-console-sample:latest 9 | imagePullPolicy: IfNotPresent 10 | name: nodejs-sample 11 | env: 12 | - name: VCAP_SERVICES 13 | valueFrom: 14 | secretKeyRef: 15 | name: eventstreams-binding 16 | key: binding -------------------------------------------------------------------------------- /kafka-nodejs-console-sample/manifest.yml: -------------------------------------------------------------------------------- 1 | applications: 2 | - name: kafka-nodejs-console-sample 3 | buildpack: sdk-for-nodejs 4 | stack: cflinuxfs3 5 | instances: 1 6 | memory: 256M 7 | disk_quota: 512M 8 | path: . 9 | no-route: true 10 | health-check-type: none 11 | services: 12 | - "" 13 | env: 14 | LD_LIBRARY_PATH: /home/vcap/app/node_modules/node-rdkafka/build/deps/ 15 | -------------------------------------------------------------------------------- /kafka-nodejs-console-sample/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "event-streams-nodejs-console-sample", 3 | "version": "3.0.0", 4 | "lockfileVersion": 1, 5 | "requires": true, 6 | "dependencies": { 7 | "bindings": { 8 | "version": "1.5.0", 9 | "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", 10 | "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", 11 | "requires": { 12 | "file-uri-to-path": "1.0.0" 13 | } 14 | }, 15 | "file-uri-to-path": { 16 | "version": "1.0.0", 17 | "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", 18 | "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==" 19 | }, 20 | "nan": { 21 | "version": "2.14.0", 22 | "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz", 23 | "integrity": "sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg==" 24 | }, 25 | "node-rdkafka": { 26 | "version": "2.7.0", 27 | "resolved": "https://registry.npmjs.org/node-rdkafka/-/node-rdkafka-2.7.0.tgz", 28 | "integrity": "sha512-pSO60jT0AC0eEzXlRxUoNpEp7nCdtglHMBJ2r5lP0y+TqXDAUrV1it1ZtXfi4yeKShksLdRfMI/jeLXWXMMpLA==", 29 | "requires": { 30 | "bindings": "^1.3.1", 31 | "nan": "^2.11.1" 32 | } 33 | } 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /kafka-nodejs-console-sample/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "event-streams-nodejs-console-sample", 3 | "version": "3.0.0", 4 | "description": "Nodejs sample that uses node-rdkafka to interact with Event Streams", 5 | "repository": { 6 | "type": "git", 7 | "url": "https://github.com/ibm-messaging/event-streams-samples" 8 | }, 9 | "scripts": { 10 | "start": "node ./app.js" 11 | }, 12 | "author": "IBM", 13 | "license": "Apache-2.0", 14 | "maintainers": [ 15 | { 16 | "name": "mimaison", 17 | "email": "mickael.maison@uk.ibm.com" 18 | }, 19 | { 20 | "name": "edoardocomar", 21 | "email": "ecomar@uk.ibm.com" 22 | } 23 | ], 24 | "keywords": [ 25 | "ibm", 26 | "message", 27 | "hub", 28 | "eventstreams", 29 | "kafka", 30 | "apache", 31 | "bluemix", 32 | "cloud", 33 | "node-rdkafka" 34 | ], 35 | "dependencies": { 36 | "node-rdkafka": "2.7.4" 37 | }, 38 | "engines": { 39 | "node": "10" 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /kafka-nodejs-console-sample/producerLoop.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2015-2018 IBM 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | /** 17 | * Licensed Materials - Property of IBM 18 | * © Copyright IBM Corp. 2015-2018 19 | */ 20 | 21 | var producer; 22 | var exports = module.exports = {}; 23 | 24 | /** 25 | * Constructs a Kafka Producer and registers listeners on the most common events 26 | * 27 | * @param {object} Kafka - an instance of the node-rdkafka module 28 | * @param {object} producer_opts - producer configuration 29 | * @param {string} topicName - name of the topic to produce to 30 | * @param {function} shutdown - shutdown function 31 | * @return {Producer} - the Kafka Producer instance 32 | */ 33 | exports.buildProducer = function(Kafka, producer_opts, topicName, shutdown) { 34 | // Create Kafka producer 35 | var topicOpts = { 36 | 'request.required.acks': -1, 37 | 'produce.offset.report': true 38 | }; 39 | producer = new Kafka.Producer(producer_opts, topicOpts); 40 | producer.setPollInterval(100); 41 | 42 | // Register listener for debug information; only invoked if debug option set in driver_options 43 | producer.on('event.log', function(log) { 44 | console.log(log); 45 | }); 46 | 47 | // Register error listener 48 | producer.on('event.error', function(err) { 49 | console.error('Error from producer:' + JSON.stringify(err)); 50 | }); 51 | 52 | // Register delivery report listener 53 | producer.on('delivery-report', function(err, dr) { 54 | if (err) { 55 | console.error('Delivery report: Failed sending message ' + dr.value); 56 | console.error(err); 57 | // We could retry sending the message 58 | } else { 59 | console.log('Message produced, partition: ' + dr.partition + ' offset: ' + dr.offset); 60 | } 61 | }); 62 | 63 | function sendMessages(counter, topic, partition) { 64 | var message = new Buffer('This is a test message #' + counter); 65 | var key = 'Key' + counter; 66 | // Short sleep for flow control in this sample app 67 | // to make the output easily understandable 68 | var timeout = 2000; 69 | try { 70 | producer.produce(topic, partition, message, key); 71 | counter++; 72 | } catch (err) { 73 | console.error('Failed sending message ' + message); 74 | console.error(err); 75 | timeout = 5000; // Longer wait before retrying 76 | } 77 | setTimeout(function () { 78 | if (producer.isConnected()) { 79 | sendMessages(counter, topic, partition); 80 | } 81 | }, timeout); 82 | } 83 | 84 | // Register callback invoked when producer has connected 85 | producer.on('ready', function() { 86 | console.log('The producer has connected.'); 87 | 88 | // request metadata for all topics 89 | producer.getMetadata({ 90 | timeout: 10000 91 | }, 92 | function(err, metadata) { 93 | if (err) { 94 | console.error('Error getting metadata: ' + JSON.stringify(err)); 95 | shutdown(-1); 96 | } else { 97 | console.log('Producer obtained metadata: ' + JSON.stringify(metadata)); 98 | var topicsByName = metadata.topics.filter(function(t) { 99 | return t.name === topicName; 100 | }); 101 | if (topicsByName.length === 0) { 102 | console.error('ERROR - Topic ' + topicName + ' does not exist. Exiting'); 103 | shutdown(-1); 104 | } 105 | } 106 | }); 107 | var counter = 0; 108 | 109 | // Start sending messages 110 | sendMessages(counter, topicName, null); 111 | }); 112 | return producer; 113 | } 114 | -------------------------------------------------------------------------------- /kafka-python-console-sample/Dockerfile: -------------------------------------------------------------------------------- 1 | # Dockerfile to run the sample under current Python 3 2 | # 3 | # docker build -t python-rdkafka 4 | # docker run --rm -it -e VCAP_SERVICES=${VCAP_SERVICES} python-rdkafka 5 | # OR 6 | # docker run --rm -it python-rdkafka 7 | # 8 | FROM python:3.6-stretch 9 | 10 | WORKDIR /usr/src/app 11 | 12 | COPY . /usr/src/app 13 | 14 | RUN pip install -r requirements.txt 15 | 16 | ENTRYPOINT [ "python3", "-u", "app.py" ] 17 | CMD [ "" ] 18 | -------------------------------------------------------------------------------- /kafka-python-console-sample/Procfile: -------------------------------------------------------------------------------- 1 | web: python app.py 2 | -------------------------------------------------------------------------------- /kafka-python-console-sample/README.md: -------------------------------------------------------------------------------- 1 | # IBM Event Streams for IBM Cloud Kafka Python console sample application 2 | This Python console application demonstrates how to connect to [IBM Event Streams for IBM Cloud](https://cloud.ibm.com/docs/services/EventStreams?topic=eventstreams-getting_started), send and receive messages using the [confluent-kafka-python](https://github.com/confluentinc/confluent-kafka-python) library. It also shows how to create and list topics using the Event Streams for IBM Cloud Admin REST API. 3 | 4 | __Important Note__: This sample creates a topic with one partition on your behalf. On the Standard plan, this will incur a fee if the topic does not already exist. 5 | 6 | ## Running the application 7 | 8 | The application can be run in the following environments: 9 | 10 | * [IBM Cloud Kubernetes Service](./docs/Kubernetes_Service.md) 11 | * [IBM Cloud Foundry](./docs/Cloud_Foundry.md) 12 | * [Docker Local](./docs/Docker_Local.md) 13 | * [Local Development](./docs/Local.md) -------------------------------------------------------------------------------- /kafka-python-console-sample/app.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2015-2018 IBM 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | 16 | Licensed Materials - Property of IBM 17 | © Copyright IBM Corp. 2015-2018 18 | """ 19 | import asyncio 20 | import json 21 | import os 22 | import signal 23 | import sys 24 | 25 | import consumertask 26 | import producertask 27 | import rest 28 | 29 | class EventStreamsSample(object): 30 | 31 | def __init__(self, args): 32 | self.topic_name = 'kafka-python-console-sample-topic' 33 | self.opts = {} 34 | self.run_consumer = True 35 | self.run_producer = True 36 | self.consumer = None 37 | self.producer = None 38 | 39 | if os.environ.get('VCAP_SERVICES'): 40 | print('Using VCAP_SERVICES to find credentials.') 41 | vcap_services = json.loads(os.environ.get('VCAP_SERVICES')) 42 | 43 | if 'instance_id' in vcap_services: 44 | self.opts['brokers'] = ','.join(vcap_services['kafka_brokers_sasl']) 45 | self.opts['api_key'] = vcap_services['api_key'] 46 | self.opts['rest_endpoint'] = vcap_services['kafka_admin_url'] 47 | else: 48 | for vcap_service in vcap_services: 49 | if vcap_service.startswith('messagehub'): 50 | eventstreams_service = vcap_services[vcap_service][0] 51 | self.opts['brokers'] = ','.join(eventstreams_service['credentials']['kafka_brokers_sasl']) 52 | self.opts['api_key'] = eventstreams_service['credentials']['api_key'] 53 | self.opts['rest_endpoint'] = eventstreams_service['credentials']['kafka_admin_url'] 54 | self.opts['ca_location'] = '/etc/ssl/certs' 55 | else: 56 | # Running locally on development machine 57 | print('Using command line arguments to find credentials.') 58 | 59 | if len(args) < 5: 60 | print('ERROR: It appears the application is running without VCAP_SERVICES but the arguments are incorrect for local mode.') 61 | print('\nUsage:\npython {0} {{ | : }} [ -consumer | -producer ]\n'.format(args[0])) 62 | sys.exit(-1) 63 | 64 | self.opts['brokers'] = args[1] 65 | self.opts['rest_endpoint'] = args[2] 66 | if ":" in args[3]: 67 | credentials_list = args[3].split(":") 68 | self.opts['api_key'] = credentials_list[1] 69 | else: 70 | self.opts['api_key'] = args[3] 71 | 72 | # IBM Cloud/Ubuntu: '/etc/ssl/certs' 73 | # Red Hat: '/etc/pki/tls/cert.pem', 74 | # Mac OS X: select System root certificates from Keychain Access and export as .pem on the filesystem 75 | self.opts['ca_location'] = args[4] 76 | if not os.path.exists(self.opts['ca_location']): 77 | print('Error - Failed to access : {0}'.format(self.opts['ca_location'])) 78 | sys.exit(-1) 79 | 80 | # In local mode the app can run only the producer or only the consumer 81 | if len(args) == 6: 82 | if args[5] == '-consumer': 83 | self.run_producer = False 84 | if args[5] == '-producer': 85 | self.run_consumer = False 86 | 87 | print('Kafka Endpoints: {0}'.format(self.opts['brokers'])) 88 | print('Admin REST Endpoint: {0}'.format(self.opts['rest_endpoint'])) 89 | 90 | if any(k not in self.opts for k in ('brokers', 'ca_location', 'rest_endpoint', 'api_key')): 91 | print('Error - Failed to retrieve options. Check that app is bound to an Event Streams service or that command line options are correct.') 92 | sys.exit(-1) 93 | 94 | # Use Event Streams' REST admin API to create the topic 95 | # with 1 partition and a retention period of 24 hours. 96 | rest_client = rest.EventStreamsRest(self.opts['rest_endpoint'], self.opts['api_key']) 97 | print('Creating the topic {0} with Admin REST API'.format(self.topic_name)) 98 | response = rest_client.create_topic(self.topic_name, 1, 24) 99 | print(response.text) 100 | 101 | # Use Event Streams' REST admin API to list the existing topics 102 | print('Admin REST Listing Topics:') 103 | response = rest_client.list_topics() 104 | print(response.text) 105 | 106 | def shutdown(self, signal, frame): 107 | print('Shutdown received.') 108 | if self.run_consumer: 109 | self.consumer.stop() 110 | if self.run_producer: 111 | self.producer.stop() 112 | 113 | @asyncio.coroutine 114 | def run_tasks(self): 115 | driver_options = { 116 | 'bootstrap.servers': self.opts['brokers'], 117 | 'security.protocol': 'SASL_SSL', 118 | 'ssl.ca.location': self.opts['ca_location'], 119 | 'sasl.mechanisms': 'PLAIN', 120 | 'sasl.username': 'token', 121 | 'sasl.password': self.opts['api_key'], 122 | 'api.version.request': True, 123 | 'broker.version.fallback': '0.10.2.1', 124 | 'log.connection.close' : False 125 | } 126 | consumer_opts = { 127 | 'client.id': 'kafka-python-console-sample-consumer', 128 | 'group.id': 'kafka-python-console-sample-group' 129 | } 130 | producer_opts = { 131 | 'client.id': 'kafka-python-console-sample-producer', 132 | } 133 | 134 | # Add the common options to consumer and producer 135 | for key in driver_options: 136 | consumer_opts[key] = driver_options[key] 137 | producer_opts[key] = driver_options[key] 138 | 139 | tasks = [] 140 | # Start the clients 141 | if self.run_producer: 142 | self.producer = producertask.ProducerTask(producer_opts, self.topic_name) 143 | tasks.append(asyncio.ensure_future(self.producer.run())) 144 | 145 | if self.run_consumer: 146 | self.consumer = consumertask.ConsumerTask(consumer_opts, self.topic_name) 147 | tasks.append(asyncio.ensure_future(self.consumer.run())) 148 | 149 | done, pending = yield from asyncio.wait(tasks) 150 | for future in done | pending: 151 | future.result() 152 | sys.exit(0) 153 | 154 | if __name__ == "__main__": 155 | app = EventStreamsSample(sys.argv) 156 | signal.signal(signal.SIGINT, app.shutdown) 157 | signal.signal(signal.SIGTERM, app.shutdown) 158 | print('This sample app will run until interrupted.') 159 | sys.exit(asyncio.get_event_loop().run_until_complete(app.run_tasks())) 160 | -------------------------------------------------------------------------------- /kafka-python-console-sample/consumertask.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2015-2018 IBM 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | 16 | Licensed Materials - Property of IBM 17 | © Copyright IBM Corp. 2015-2018 18 | """ 19 | import asyncio 20 | from confluent_kafka import Consumer 21 | 22 | class ConsumerTask(object): 23 | 24 | def __init__(self, conf, topic_name): 25 | self.consumer = Consumer(conf) 26 | self.topic_name = topic_name 27 | self.running = True 28 | 29 | def stop(self): 30 | self.running = False 31 | 32 | @asyncio.coroutine 33 | def run(self): 34 | print('The consumer has started') 35 | self.consumer.subscribe([self.topic_name]) 36 | while self.running: 37 | msg = self.consumer.poll(1) 38 | if msg is not None and msg.error() is None: 39 | print('Message consumed: topic={0}, partition={1}, offset={2}, key={3}, value={4}'.format( 40 | msg.topic(), 41 | msg.partition(), 42 | msg.offset(), 43 | msg.key().decode('utf-8'), 44 | msg.value().decode('utf-8'))) 45 | else: 46 | print('No messages consumed') 47 | yield from asyncio.sleep(2) 48 | self.consumer.unsubscribe() 49 | self.consumer.close() 50 | 51 | -------------------------------------------------------------------------------- /kafka-python-console-sample/docs/CF_Standard_Enterprise_Plan.md: -------------------------------------------------------------------------------- 1 | # IBM Cloud Foundry deployment to an Standard/Enterprise Plan Event Streams for IBM Cloud 2 | 3 | ## Overview 4 | 5 | To deploy and run the sample: 6 | * Create a Cloud Foundry Service Alias for your Standard/Enterprise Service 7 | * Setup your `manifest.yml` with your service details 8 | * Use `ibmcloud cf push --no-start` to deploy the app to IBM Cloud Foundry 9 | * Re-configure binding with Manager role 10 | * Start the app 11 | * Inspect the application's logs 12 | 13 | ## Set up a Cloud Foundry Service Alias 14 | Before continuing, connect to IBM Cloud with the [IBM Cloud command line interface](https://cloud.ibm.com/docs/cli?topic=cloud-cli-ibmcloud-cli). 15 | 16 | The Standard/Enterprise plan is IAM enabled. Therefore the following extra step is required to create a Cloud Foundry alias for your Service: 17 | 18 | Create a Cloud Foundry alias for your service's associated CRN: 19 | ```shell 20 | ibmcloud resource service-alias-create --instance-name 21 | ``` 22 | 23 | Having created this alias associated your Service with a Cloud Foundry Organization and Space, thereby enabling your Cloud Foundry application to referrence it and connect to it. 24 | 25 | ## Setup the manifest.yml 26 | 27 | 1. Select the Event Streams for IBM Cloud service you would like to bind your application to. Do this by replacing `` with your service instance alias name in `manifest.yml`: 28 | ```yaml 29 | services: 30 | - "" 31 | ``` 32 | 2. Consider your domain: You might need to change this in the `manifest.yml` as the domain varies by IBM Cloud region. If unsure, just delete the domain line and IBM Cloud will pick the domain for you. 33 | 34 | ## Deploy the Application 35 | 36 | Push the app without starting it immediately by running the following command in the same directory as the `manifest.yml` file: 37 | ```shell 38 | ibmcloud app push --no-start 39 | ``` 40 | 41 | ## Re-configure the binding 42 | A binding between your app and service-alias is created for you automatically, but by default does not have permissions to create topics. This means that we need to delete the existing binding and create a new one with the correct role: 43 | 44 | ``` 45 | ibmcloud resource service-binding-delete kafka-python-console-sample 46 | ibmcloud resource service-binding-create kafka-python-console-sample Manager 47 | ``` 48 | 49 | ## Start the app 50 | Now it should be safe to start the application: 51 | ```shell 52 | ibmcloud app start kafka-python-console-sample 53 | ``` 54 | 55 | ## Produce and Consume Messages 56 | The sample application should have created the default sample topic and started producing and consuming messages in an infinite loop. View the logs to verify this: 57 | ```shell 58 | ibmcloud app logs kafka-python-console-sample 59 | ``` 60 | -------------------------------------------------------------------------------- /kafka-python-console-sample/docs/Cloud_Foundry.md: -------------------------------------------------------------------------------- 1 | 2 | # Running in IBM Cloud Foundry 3 | 4 | ## Prerequisites 5 | To build and run the sample, you must have the done the following: 6 | 7 | * Obtain this repository's contents, either use `git` or just download the samples as a ZIP 8 | * Install the [IBM Cloud CLI](https://cloud.ibm.com/docs/cli?topic=cloud-cli-install-ibmcloud-cli) 9 | * Provision a [Event Streams Service Instance](https://cloud.ibm.com/catalog/services/event-streams) in [IBM Cloud®](https://cloud.ibm.com/) 10 | 11 | ## Standard/Enterprise Plan? 12 | 13 | **It's important to know which Event Streams for IBM Cloud plan you're using as the sample deployment steps are subtly different on each plan respectively.** 14 | 15 | By this point, you should have an Event Streams for IBM Cloud instance provisioned. If you haven't done this step yet, please refer to the main [readme](../README.md). 16 | 17 | If you are not sure what type of Event Streams for IBM Cloud instance you have then you can find this information out by visiting IBM Cloud's web console [dashboard](https://cloud.ibm.com/resources). 18 | 19 | *Please make sure you are in the appropriate Region, Account, Organization and Space where you provisioned your Event Streams instance!* 20 | 21 | * Event Streams for IBM Cloud Standard plan services are "Services" with the plan column showing "Standard". 22 | * Event Streams for IBM Cloud Enterprise plan services are "Services" with the plan column showing "Enterprise". 23 | 24 | 25 | ## Deploy the Application 26 | 27 | The deployment for the Standard/Enterprise plan can be found in the link listed below 28 | 29 | ### [Classic Plan Deployment Guide](CF_Classic_Plan.md) 30 | 31 | ### [Standard/Enterprise Plan Deployment Guide](CF_Standard_Enterprise_Plan.md) 32 | 33 | 34 | ## Further references 35 | 36 | If you want find out more about Cloud Foundry applications then check the following documents: 37 | 38 | [Cloud Foundry manifest documentation](http://docs.cloudfoundry.org/devguide/deploy-apps/manifest.html) 39 | 40 | 41 | 42 | 43 | 44 | 45 | -------------------------------------------------------------------------------- /kafka-python-console-sample/docs/Docker_Local.md: -------------------------------------------------------------------------------- 1 | 2 | # Running in Docker Locally 3 | 4 | ## Prerequisites 5 | To build and run the sample, you must have the done the following: 6 | 7 | * Obtain this repository's contents, either use `git` or just download the samples as a ZIP 8 | * Provision an [Event Streams Service Instance](https://cloud.ibm.com/catalog/services/event-streams) in [IBM Cloud®](https://cloud.ibm.com/) 9 | * Install [Docker](https://docs.docker.com/install/) 10 | 11 | ## Run the Application 12 | 13 | 1. Build the container image from the `Dockerfile`: 14 | ```shell 15 | docker build -t python-console-sample . 16 | ``` 17 | 18 | 2. Export the Event Streams for IBM Cloud instance credentials: 19 | 20 | From the Event Streams for IBM Cloud instance dashboard, click `Service Credentials` and select or create a new one. Copy its content and export it as below: 21 | ```shell 22 | export VCAP_SERVICES='{ 23 | "instance_id": "...", 24 | "api_key": "...", 25 | "kafka_admin_url": "....", 26 | "kafka_rest_url": "...", 27 | "kafka_brokers_sasl": [ 28 | ... 29 | ], 30 | "user": "...", 31 | "password": "..." 32 | }' 33 | ``` 34 | 35 | 3. Run the container image 36 | ```shell 37 | docker run -e VCAP_SERVICES="$VCAP_SERVICES" python-console-sample 38 | ``` 39 | 40 | ## Further references 41 | 42 | If you want find out more about Docker then check the following document: 43 | 44 | [Docker documentation](https://docs.docker.com/install/overview/) 45 | -------------------------------------------------------------------------------- /kafka-python-console-sample/docs/Kubernetes_Service.md: -------------------------------------------------------------------------------- 1 | # Running in IBM Cloud Kubernetes Service 2 | 3 | ## Prerequisites 4 | To build and run the sample, you must have the done the following: 5 | 6 | * Obtain this repository's contents, either use `git` or just download the samples as a ZIP 7 | * Install the [IBM Cloud CLI](https://cloud.ibm.com/docs/cli?topic=cloud-cli-install-ibmcloud-cli) 8 | * Install the [Kubernetes CLI](https://kubernetes.io/docs/tasks/tools/install-kubectl/) 9 | * Provision a [Event Streams Service Instance](https://cloud.ibm.com/catalog/services/event-streams) in [IBM Cloud®](https://cloud.ibm.com/) 10 | * Provision a [Kubernetes Service instance](https://cloud.ibm.com/kubernetes/catalog/cluster) in [IBM Cloud®](https://cloud.ibm.com/) 11 | 12 | 13 | ## Deploy the Application 14 | 15 | 1. From the Event Streams for IBM Cloud instance dashboard, click `Service Credentials` and select or create a new one. Copy its content, create a file `credentials.json` and paste the content. 16 | 2. To deploy the application you first need to bind the Event Streams for IBM Cloud service instance to the cluster. Create a secret using the content from the file `credentials.json` 17 | ```shell 18 | kubectl create secret generic eventstreams-binding --from-file=binding=credentials.json 19 | ``` 20 | The command above creates a secret in your cluster named `eventstreams-binding`. 21 | 3. [Configure the CLI to run kubectl](https://cloud.ibm.com/docs/containers?topic=containers-cs_cli_install#cs_cli_configure) 22 | 23 | 4. Deploy the application in the cluster: 24 | ```shell 25 | kubectl apply -f kafka-python-console-sample.yaml 26 | ``` 27 | 5. Access the application logs: 28 | ```shell 29 | kubectl wait pod kafka-python-console-sample --for=condition=Ready 30 | kubectl logs kafka-python-console-sample --follow 31 | ``` 32 | 33 | ## Further references 34 | 35 | If you want find out more about IBM Cloud Kubernetes Service or Kubernetes then check the following documents: 36 | 37 | [IBM Cloud Kubernetes Service](https://www.ibm.com/cloud/container-service) 38 | 39 | [Kubernetes Documentation](https://kubernetes.io/docs/home/) 40 | 41 | 42 | -------------------------------------------------------------------------------- /kafka-python-console-sample/docs/Local.md: -------------------------------------------------------------------------------- 1 | # IBM Event Streams for IBM Cloud Kafka Python console sample application: Local Development guide 2 | As pushing the application into IBM Cloud® does not require you to build the application locally, this guide is here to guide you through the process, should you wish to build the application locally. 3 | 4 | We will not discuss establishing a connection from your laptop to Event Streams for IBM Cloud. This is described in the [ connection guide](https://cloud.ibm.com/docs/services/EventStreams?topic=eventstreams-connecting#connecting). 5 | 6 | ## Prerequisites 7 | * [Python](https://www.python.org/downloads/) 3.6 or later 8 | 9 | ##### macOS 10 | * Open Keychain Access, export all certificates in System Roots to a single .pem file 11 | 12 | ## Installing dependencies 13 | Run the following commands on your local machine, after the prerequisites for your environment have been completed: 14 | ```shell 15 | pip install -r requirements.txt 16 | ``` 17 | 18 | ## Running the Sample 19 | Once built, to run the sample, execute the following command: 20 | ```shell 21 | python3 app.py 22 | ``` 23 | 24 | To find the values for ``, `` and ``, access your Event Streams instance in IBM Cloud®, go to the `Service Credentials` tab and select the `Credentials` you want to use. If your user value is `token`, specify that with the password seperated by a `:`. 25 | 26 | `` is the path where the trusted SSL certificates are stored on your machine and is therefore system dependent. 27 | For example: 28 | * Ubuntu: /etc/ssl/certs 29 | * RedHat: /etc/pki/tls/cert.pem 30 | * macOS: The .pem file you created in the prerequisite section 31 | 32 | __Note__: `` must be a single string enclosed in quotes. For example: `"host1:port1,host2:port2"`. We recommend using all the Kafka hosts listed in the `Credentials` you selected. 33 | 34 | Alternatively, you can run only the producer or only the consumer by respectively appending the switches `-producer` or `-consumer` to the command above. 35 | 36 | The sample will run indefinitely until interrupted. To stop the process, use `Ctrl+C`. -------------------------------------------------------------------------------- /kafka-python-console-sample/kafka-python-console-sample.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Pod 3 | metadata: 4 | name: kafka-python-console-sample 5 | namespace: default 6 | spec: 7 | containers: 8 | - image: event-streams-samples/kafka-python-console-sample:latest 9 | imagePullPolicy: IfNotPresent 10 | name: python-sample 11 | env: 12 | - name: VCAP_SERVICES 13 | valueFrom: 14 | secretKeyRef: 15 | name: eventstreams-binding 16 | key: binding -------------------------------------------------------------------------------- /kafka-python-console-sample/manifest.yml: -------------------------------------------------------------------------------- 1 | applications: 2 | - name: kafka-python-console-sample 3 | buildpack: "https://github.com/cloudfoundry/python-buildpack.git" 4 | instances: 1 5 | memory: 512M 6 | disk_quota: 256M 7 | path: . 8 | no-route: true 9 | health-check-type: none 10 | services: 11 | - "" 12 | -------------------------------------------------------------------------------- /kafka-python-console-sample/producertask.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2015-2018 IBM 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | 16 | Licensed Materials - Property of IBM 17 | © Copyright IBM Corp. 2015-2018 18 | """ 19 | import asyncio 20 | from confluent_kafka import Producer 21 | 22 | class ProducerTask(object): 23 | 24 | def __init__(self, conf, topic_name): 25 | self.topic_name = topic_name 26 | self.producer = Producer(conf) 27 | self.counter = 0 28 | self.running = True 29 | 30 | def stop(self): 31 | self.running = False 32 | 33 | def on_delivery(self, err, msg): 34 | if err: 35 | print('Delivery report: Failed sending message {0}'.format(msg.value())) 36 | print(err) 37 | # We could retry sending the message 38 | else: 39 | print('Message produced, offset: {0}'.format(msg.offset())) 40 | 41 | @asyncio.coroutine 42 | def run(self): 43 | print('The producer has started') 44 | while self.running: 45 | message = 'This is a test message #{0}'.format(self.counter) 46 | key = 'key' 47 | sleep = 2 # Short sleep for flow control 48 | try: 49 | self.producer.produce(self.topic_name, message, key, -1, self.on_delivery) 50 | self.producer.poll(0) 51 | self.counter += 1 52 | except Exception as err: 53 | print('Failed sending message {0}'.format(message)) 54 | print(err) 55 | sleep = 5 # Longer sleep before retrying 56 | yield from asyncio.sleep(sleep) 57 | self.producer.flush() 58 | 59 | -------------------------------------------------------------------------------- /kafka-python-console-sample/requirements.txt: -------------------------------------------------------------------------------- 1 | requests 2 | confluent_kafka 3 | asyncio 4 | -------------------------------------------------------------------------------- /kafka-python-console-sample/rest.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2015-2018 IBM 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | 16 | Licensed Materials - Property of IBM 17 | © Copyright IBM Corp. 2015-2018 18 | """ 19 | import requests 20 | 21 | class EventStreamsRest(object): 22 | 23 | def __init__(self, rest_endpoint, api_key): 24 | self.path = '{0}/admin/topics'.format(rest_endpoint) 25 | self.headers = { 26 | 'X-Auth-Token': api_key, 27 | 'Content-Type': 'application/json' 28 | } 29 | 30 | def create_topic(self, topic_name, partitions=1, retention_hours=24): 31 | """ 32 | POST /admin/topics 33 | """ 34 | payload = { 35 | 'name': topic_name, 36 | 'partitions': partitions, 37 | 'configs': { 38 | 'retentionMs': retention_hours * 60 * 60 * 1000 39 | } 40 | } 41 | return requests.post(self.path, headers=self.headers, json=payload) 42 | 43 | def list_topics(self): 44 | """ 45 | GET /admin/topics 46 | """ 47 | return requests.get(self.path, headers=self.headers) 48 | -------------------------------------------------------------------------------- /kafka-python-console-sample/runtime.txt: -------------------------------------------------------------------------------- 1 | python-3.7.x 2 | -------------------------------------------------------------------------------- /kafka-topic-stats/.gitignore: -------------------------------------------------------------------------------- 1 | .gradle 2 | build 3 | bin 4 | .vscode 5 | -------------------------------------------------------------------------------- /kafka-topic-stats/README.md: -------------------------------------------------------------------------------- 1 | # kafka-topic-stats 2 | Prints usage and topic configuration for [Event Streams](https://cloud.ibm.com/catalog/services/event-streams) topic partitions. 3 | 4 | ## Building 5 | This project requires Java 11. 6 | 7 | ``` 8 | ./gradle jar 9 | ``` 10 | 11 | ## Running 12 | ``` 13 | export API_KEY=your api key here 14 | export BOOTSTRAP_ENDPOINTS=kafka-0.example.org:9093,kafka-1.example.org:9093 15 | java -jar ./build/libs/kafka-topic-stats.jar 16 | ``` 17 | 18 | ## Example output 19 | ``` 20 | Topic Name, Partition ID, Used Bytes, retention.bytes, segment.bytes, cleanup.policy, retention.ms 21 | mytopic, 0, 0, 1073741824, 536870912, delete, 86400000 22 | mytopic, 1, 0, 1073741824, 536870912, delete, 86400000 23 | mytopic, 2, 0, 1073741824, 536870912, delete, 86400000 24 | ``` 25 | -------------------------------------------------------------------------------- /kafka-topic-stats/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'application' 3 | } 4 | 5 | repositories { 6 | mavenCentral() 7 | } 8 | 9 | dependencies { 10 | implementation 'org.apache.kafka:kafka-clients:3.0.1' 11 | implementation 'org.slf4j:slf4j-nop:1.7.36' 12 | } 13 | 14 | jar { 15 | manifest { 16 | attributes "Main-Class": "com.eventstreams.samples.TopicStats" 17 | } 18 | 19 | from { 20 | configurations.runtimeClasspath.collect { it.isDirectory() ? it : zipTree(it) } 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /kafka-topic-stats/settings.gradle: -------------------------------------------------------------------------------- 1 | rootProject.name = 'kafka-topic-stats' 2 | include('app') 3 | -------------------------------------------------------------------------------- /kafka-topic-stats/src/main/java/com/eventstreams/samples/TopicStats.java: -------------------------------------------------------------------------------- 1 | package com.eventstreams.samples; 2 | 3 | import java.util.Collection; 4 | import java.util.HashMap; 5 | import java.util.TreeMap; 6 | import java.util.concurrent.ExecutionException; 7 | import java.util.stream.Collectors; 8 | 9 | import org.apache.kafka.clients.CommonClientConfigs; 10 | import org.apache.kafka.clients.admin.AdminClient; 11 | import org.apache.kafka.clients.admin.Config; 12 | import org.apache.kafka.clients.admin.ReplicaInfo; 13 | import org.apache.kafka.common.TopicPartition; 14 | import org.apache.kafka.common.config.ConfigResource; 15 | import org.apache.kafka.common.config.ConfigResource.Type; 16 | import org.apache.kafka.common.config.SaslConfigs; 17 | import org.apache.kafka.common.config.TopicConfig; 18 | 19 | public class TopicStats { 20 | public static void main(String[] args) { 21 | final var apiKey = System.getenv("API_KEY"); 22 | final var bootstrap = System.getenv("BOOTSTRAP_ENDPOINTS"); 23 | 24 | var shouldExit = false; 25 | if (apiKey == null || apiKey.isEmpty()) { 26 | System.err.println("API_KEY environment variable is not set"); 27 | shouldExit = true; 28 | } 29 | if (bootstrap == null || bootstrap.isEmpty()) { 30 | System.err.println("BOOTSTRAP_ENDPOINTS environment variable is not set"); 31 | shouldExit = true; 32 | } 33 | if (shouldExit) { 34 | System.exit(1); 35 | } 36 | 37 | final var configs = new HashMap(); 38 | configs.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, bootstrap); 39 | configs.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_SSL"); 40 | configs.put(SaslConfigs.SASL_MECHANISM, "PLAIN"); 41 | configs.put(SaslConfigs.SASL_JAAS_CONFIG, 42 | "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"token\" password=\"" 43 | + apiKey + "\";"); 44 | 45 | try (final AdminClient admin = AdminClient.create(configs)) { 46 | // Discover brokers 47 | final var nodes = admin.describeCluster().nodes().get(); 48 | final var brokers = nodes.stream().map(v -> v.id()).collect(Collectors.toSet()); 49 | 50 | // Get replica information for each broker and reduce into a map of TopicPartition -> ReplicaInfo. 51 | final var brokerToLogDir = admin.describeLogDirs(brokers).allDescriptions().get(); 52 | final var logDirs = brokerToLogDir.values().stream().map(v -> v.values()).flatMap(Collection::stream).collect(Collectors.toSet()); 53 | final var replicaInfo = logDirs.stream().map(v -> v.replicaInfos()).reduce((m1, m2) -> { 54 | var r = new HashMap(); 55 | r.putAll(m1); 56 | // All of a partition's replicas should be about the same size. Be pessimistic for 57 | // the case that the partition is not fully in-sync, and pick the largest sized replica. 58 | m2.forEach((k, v) -> { 59 | final var m1v = m1.get(k); 60 | if (m1v == null || m1v.size() < v.size()) { 61 | r.put(k, v); 62 | } 63 | }); 64 | return r; 65 | }).get(); 66 | 67 | // Get all the topics, their descriptions, and their configuration 68 | final var topicNames = admin.listTopics().names().get(); 69 | final var topicsDesc = admin.describeTopics(topicNames).all().get(); 70 | final var topicsConfigResources = topicNames.stream().map(v -> new ConfigResource(Type.TOPIC, v)).collect(Collectors.toSet()); 71 | final var topicsConfigs = admin.describeConfigs(topicsConfigResources).all().get(); 72 | 73 | class PartitionInfo { 74 | final Config config; 75 | final long usedBytes; 76 | 77 | PartitionInfo(Config config, long usedBytes) { 78 | this.config = config; 79 | this.usedBytes = usedBytes; 80 | } 81 | } 82 | 83 | // Wraps TopicPartition in Comparable. 84 | class ComparableTopicPartition implements Comparable { 85 | final String topic; 86 | final int partition; 87 | 88 | ComparableTopicPartition(TopicPartition tp) { 89 | this.topic = tp.topic(); 90 | this.partition = tp.partition(); 91 | } 92 | 93 | @Override 94 | public int compareTo(ComparableTopicPartition o) { 95 | var result = this.topic.compareTo(o.topic); 96 | if (result == 0) { // topic names are the same, compare based on partition number 97 | result = this.partition - o.partition; 98 | } 99 | return result; 100 | } 101 | } 102 | 103 | final var usageInfo = new TreeMap(); // TreeMap as it implements SortedMap. 104 | topicsConfigs.forEach((resource, config) -> { 105 | final var topicName = resource.name(); 106 | final var topicDescription = topicsDesc.get(topicName); 107 | if (topicDescription == null) { 108 | // skip if there isn't a description. The topic information is gathered 109 | // at slightly different times, so it's possible there will be inconsistencies. 110 | return; 111 | } 112 | for (final var partition : topicDescription.partitions()) { 113 | final var tp = new TopicPartition(topicName, partition.partition()); 114 | final var info = replicaInfo.get(tp); 115 | if (info == null) { 116 | // skip if there isn't replica information. As per above, the data gathered 117 | // might not be completely consistent. 118 | continue; 119 | } 120 | final var usedBytes = info.size(); 121 | usageInfo.put(new ComparableTopicPartition(tp), new PartitionInfo(config, usedBytes)); 122 | } 123 | }); 124 | 125 | // Output in CSV format 126 | System.out.println("Topic Name, Partition ID, Used Bytes, retention.bytes, segment.bytes, cleanup.policy, retention.ms"); // column titles 127 | usageInfo.forEach((k, v) -> { 128 | System.out.printf("%s, %s, %d, %s, %s, %s, %s\n", 129 | k.topic, 130 | k.partition, 131 | v.usedBytes, 132 | v.config.get(TopicConfig.RETENTION_BYTES_CONFIG).value(), 133 | v.config.get(TopicConfig.SEGMENT_BYTES_CONFIG).value(), 134 | v.config.get(TopicConfig.CLEANUP_POLICY_CONFIG).value(), 135 | v.config.get(TopicConfig.RETENTION_MS_CONFIG).value()); 136 | }); 137 | 138 | } catch(final ExecutionException e) { 139 | // ExecutionException typically wraps the exception we *actually* care about... 140 | if (e.getCause() != null) { 141 | e.getCause().printStackTrace(); 142 | } else { 143 | e.printStackTrace(); 144 | } 145 | } catch(final Exception e) { 146 | e.printStackTrace(); 147 | } 148 | } 149 | } 150 | --------------------------------------------------------------------------------