├── .gitignore
├── LICENSE
├── README.md
├── demo_setup.sh
├── docker-demo.yaml
├── docker-kafka.yaml
├── docker-registry.yaml
├── docker
└── Dockerfile
├── infinispan.sh
├── kubernetes
├── resources-infinispan.yaml
└── resources.yaml
├── pom.xml
├── schemas
├── schema1.avsc
└── schema2.avsc
├── secured_run.sh
├── src
├── main
│ ├── avro
│ │ └── log.avsc
│ ├── java
│ │ └── io
│ │ │ └── apicurio
│ │ │ └── registry
│ │ │ └── demo
│ │ │ ├── ApplicationConfiguration.java
│ │ │ ├── ApplicationImpl.java
│ │ │ ├── Lifecycle.java
│ │ │ ├── Main.java
│ │ │ ├── MergeTransformer.java
│ │ │ ├── domain
│ │ │ └── LogInput.java
│ │ │ ├── simple
│ │ │ ├── avro
│ │ │ │ ├── SimpleAvroAppConstants.java
│ │ │ │ ├── SimpleAvroBootstrapper.java
│ │ │ │ ├── SimpleAvroConsumerApp.java
│ │ │ │ └── SimpleAvroProducerApp.java
│ │ │ └── json
│ │ │ │ ├── Message.java
│ │ │ │ ├── SimpleJsonSchemaAppConstants.java
│ │ │ │ ├── SimpleJsonSchemaBootstrapper.java
│ │ │ │ ├── SimpleJsonSchemaConsumerApp.java
│ │ │ │ └── SimpleJsonSchemaProducerApp.java
│ │ │ └── utils
│ │ │ ├── PropertiesUtil.java
│ │ │ └── ProtoSerde.java
│ ├── proto
│ │ └── log.proto
│ └── resources
│ │ └── application.properties
└── test
│ └── java
│ └── io
│ └── apicurio
│ └── registry
│ └── test
│ └── TestMain.java
├── stores
├── keystore.p12
├── password.txt
└── truststore.p12
└── strimzi.sh
/.gitignore:
--------------------------------------------------------------------------------
1 | /target/
2 | .classpath
3 | .project
4 | .settings
5 | docker/*.jar
6 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Apicurio Service Registry Demo
2 |
3 | ## How to run the demo
4 |
5 | * Clone and build Apicurio registry project: https://github.com/Apicurio/apicurio-registry
6 |
7 | `git clone git@github.com:Apicurio/apicurio-registry.git`
8 |
9 | `mvn clean install -DskipTests -Pstreams`
10 |
11 | * Download and run Kafka (also set KAFKA_HOME environment variable)
12 |
13 | * Run demo_setup.sh script
14 |
15 | `https://github.com/alesj/registry-demo/blob/master/demo_setup.sh`
16 |
17 | * Run two instances of registry
18 |
19 | `java -jar -Dquarkus.profile=dev /Users/alesj/projects/redhat/apicurio-registry/storage/streams/target/apicurio-registry-storage-streams-1.2.3-SNAPSHOT-runner.jar`
20 |
21 | `java -jar -Dquarkus.profile=dev -Dquarkus.http.port=8081 -D%dev.registry.streams.topology.application.server=localhost:9001 /Users/alesj/projects/redhat/apicurio-registry/storage/streams/target/apicurio-registry-storage-streams-1.2.3-SNAPSHOT-runner.jar`
22 |
23 | * Run demo's Main (from IDE)
24 |
25 | `https://github.com/alesj/registry-demo/blob/master/src/main/java/io/apicurio/registry/demo/Main.java`
26 |
27 | * Run demo's TestMain (from IDE)
28 |
29 | `https://github.com/alesj/registry-demo/blob/master/src/test/java/io/apicurio/registry/test/TestMain.java`
30 |
31 | * Simple cURL commands (create and update artifact)
32 |
33 | `curl -d '{"foo":"bar"}' -H "Content-Type: application/json" -H "X-Registry-ArtifactType: JSON" -H "X-Registry-ArtifactId: qwerty" http://localhost:8080/api/artifacts`
34 |
35 | `curl -X PUT -d "{"title":"baz"}" -H "Content-Type: application/json" -H "X-Registry-ArtifactType: JSON" http://localhost:8080/api/artifacts/qwerty`
36 |
37 | ## What does the demo do / show-case
38 |
39 | Demo runs two clustered instances of registry, with its Streams storage.
40 | The demo's main application creates a simple Kafka stream, which uses registry's Avro deserializer for input topic.
41 | The test main uses registry's Avro serializer to send the messages to main's input topic.
42 | Avro serializer and deserializer use registry to get a hold of Avro schema in order to be able to properly serialize and deserialize Kafka messages.
43 |
44 | In the test main we can see how we register the schema at one node,
45 | whereas the serializer points to second node for lookup.
46 |
47 | ### What about Registry Maven Plugin?
48 |
49 | Checkout our [pom.xml](https://github.com/Apicurio/apicurio-registry-demo/blob/master/pom.xml), its profiles, for the plugin usage.
50 |
51 | e.g. how to register a schema
52 |
53 | ```xml
54 |
55 | io.apicurio
56 | apicurio-registry-maven-plugin
57 | ${registry.version}
58 |
59 |
60 | generate-sources
61 |
62 | register
63 |
64 |
65 | http://localhost:8080/api
66 | AVRO
67 |
68 | ${project.basedir}/schemas/schema1.avsc
69 |
70 |
71 |
72 |
73 |
74 | ```
75 |
76 | ## What's there to see?
77 |
78 | In the main's console you should see `INFO: Merged logs: fst: 1572428933954 snd: 1572428934726`
79 |
80 | And if you run `$KAFKA_HOME/bin/kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic logx-topic --from-beginning`
81 | you should see calculated diff between two matching logs: e.g. `Log diff: 192`
82 |
83 | This means that Streams have processed input topic with registry's Avro deserializer and forwarded + transformed the record value to logx-topic, for Kafka console consumer to handle.
84 |
85 | ## Running things in Docker
86 |
87 | Start ZooKeeper: `docker run -p 2181:2181 --name zookeeper wurstmeister/zookeeper`
88 |
89 | Start Kafka: `docker run --link zookeeper --name kafka -p 9092:9092 -e KAFKA_ADVERTISED_HOST_NAME=kafka -e KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181 wurstmeister/kafka`
90 |
91 | Add `127.0.0.1 kafka` to /etc/hosts (so that brokers map "kafka" to localhost).
92 |
93 | Run script to setup topics: `./demo_setup.sh docker`
94 |
95 | Start single Registry:
96 |
97 | `docker run -p 8080:8080 -e QUARKUS_PROFILE=prod -e KAFKA_BOOTSTRAP_SERVERS=kafka:9092 -e APPLICATION_ID=demo_app_1 --link kafka apicurio/apicurio-registry-streams`
98 |
99 | ## Running things with Docker-compose
100 |
101 | To run a 2 node cluster (needed for this demo).
102 |
103 | Build Demo project: `mvn clean install`
104 |
105 | Build Demo's latest Docker image: `docker build docker -t apicurio/apicurio-registry-demo`
106 |
107 | Run this 3 docker-compose yamls:
108 |
109 | * `docker-compose -f docker-kafka.yaml up`
110 |
111 | * `docker-compose -f docker-registry.yaml up`
112 |
113 | * `docker-compose -f docker-demo.yaml up`
114 |
115 | Cleanup: `docker rm -f zookeeper kafka registry1 registry2 demo`
116 |
117 | ## Running things with Kubernetes (minikube)
118 |
119 | You need to have `minikube` and `kubectl` installed locally.
120 |
121 | * remove any previous `minikube` instance
122 |
123 | * simply run our `strimzi.sh` script.
124 |
125 | * grab minikube IP: `minikube ip` -->
126 |
127 | * run TestMain with `-Dbootstrap.servers=:32100 -Dregistry.url.1=:30080 -Dregistry.url.2=:30080`
128 | * e.g. mvn exec:java -Dexec.mainClass="io.apicurio.registry.demo.Main" -Dbootstrap.servers=192.168.39.204:32100 -Dregistry.url.1=192.168.39.204:30080 -Dregistry.url.2=192.168.39.204:30080
129 |
--------------------------------------------------------------------------------
/demo_setup.sh:
--------------------------------------------------------------------------------
1 | ## Functions
2 |
3 | setupStreams() {
4 | if [ -z $KAFKA_HOME ]; then
5 | echo 'Missing KAFKA_HOME env var'
6 | exit 1;
7 | fi
8 | trap "exit 1" SIGINT SIGTERM
9 | # ZooKeeper & Kafka
10 | cleanup
11 | echo $KAFKA_HOME
12 | cd $KAFKA_HOME/bin
13 | ./zookeeper-server-start.sh $KAFKA_HOME/config/zookeeper.properties &
14 | sleep 1
15 | ./kafka-server-start.sh $KAFKA_HOME/config/server.properties &
16 | sleep 2
17 | topics
18 | sleep 1
19 | echo 'Demo Kafka ready ...'
20 | }
21 |
22 | setupDocker() {
23 | if [ -z $KAFKA_HOME ]; then
24 | echo 'Missing KAFKA_HOME env var'
25 | exit 1;
26 | fi
27 | echo $KAFKA_HOME
28 | cd $KAFKA_HOME/bin
29 | topics
30 | echo 'Demo Kafka Docker ready ...'
31 | }
32 |
33 | topics() {
34 | ./kafka-topics.sh --zookeeper localhost --create --topic storage-topic --partitions 1 --replication-factor 1 --config cleanup.policy=compact
35 | ./kafka-topics.sh --zookeeper localhost --create --topic global-id-topic --partitions 1 --replication-factor 1 --config cleanup.policy=compact
36 | ./kafka-topics.sh --zookeeper localhost --create --topic input-topic --partitions 1 --replication-factor 1 --config cleanup.policy=compact
37 | ./kafka-topics.sh --zookeeper localhost --create --topic logx-topic --partitions 1 --replication-factor 1 --config cleanup.policy=compact
38 | ./kafka-topics.sh --zookeeper localhost --create --topic dbx-topic --partitions 1 --replication-factor 1 --config cleanup.policy=compact
39 | }
40 |
41 | cleanup() {
42 | ps -ax | grep kafka | awk '{print $1}' | xargs kill -9
43 | ps -ax | grep zookeeper | awk '{print $1}' | xargs kill -9
44 | rm -rf /tmp/zookeeper/
45 | rm -rf /tmp/kafka-*
46 | }
47 |
48 | ## Main
49 |
50 | if [ -z $1 ]; then
51 | echo 'Setting-up Demo Kafka ...'
52 | setupStreams
53 | elif [ "$1" == "cleanup" ]
54 | then
55 | echo 'Demo Kafka cleanup ...'
56 | cleanup
57 | elif [ "$1" == "docker" ]
58 | then
59 | echo 'Demo Kafka Docker ...'
60 | setupDocker
61 | fi
62 |
--------------------------------------------------------------------------------
/docker-demo.yaml:
--------------------------------------------------------------------------------
1 | version: '2'
2 | services:
3 | demo:
4 | container_name: demo
5 | image: apicurio/apicurio-registry-demo
6 | environment:
7 | BOOTSTRAP_SERVERS: kafka:9092
8 | REGISTRY_URL: http://registry1:8080
9 | APPLICATION_ID: demo_id
10 |
11 |
--------------------------------------------------------------------------------
/docker-kafka.yaml:
--------------------------------------------------------------------------------
1 | version: '2'
2 | services:
3 | zookeeper:
4 | container_name: zookeeper
5 | image: wurstmeister/zookeeper
6 | ports:
7 | - "2181:2181"
8 | kafka:
9 | container_name: kafka
10 | depends_on:
11 | - zookeeper
12 | image: wurstmeister/kafka
13 | ports:
14 | - "9092:9092"
15 | environment:
16 | KAFKA_ADVERTISED_HOST_NAME: kafka
17 | KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
18 | KAFKA_CREATE_TOPICS: "storage-topic:1:1:compact,global-id-topic:1:1:compact,input-topic:1:1:compact,logx-topic:1:1:compact,dbx-topic:1:1:compact"
19 |
--------------------------------------------------------------------------------
/docker-registry.yaml:
--------------------------------------------------------------------------------
1 | version: '2'
2 | services:
3 | registry1:
4 | container_name: registry1
5 | image: apicurio/apicurio-registry-streams
6 | ports:
7 | - "8080:8080"
8 | - "9000"
9 | environment:
10 | QUARKUS_PROFILE: prod
11 | KAFKA_BOOTSTRAP_SERVERS: kafka:9092
12 | APPLICATION_ID: registry_id
13 | APPLICATION_SERVER: registry1:9000
14 | registry2:
15 | container_name: registry2
16 | image: apicurio/apicurio-registry-streams
17 | ports:
18 | - "8081:8080"
19 | - "9000"
20 | environment:
21 | QUARKUS_PROFILE: prod
22 | KAFKA_BOOTSTRAP_SERVERS: kafka:9092
23 | APPLICATION_ID: registry_id
24 | APPLICATION_SERVER: registry2:9000
25 |
--------------------------------------------------------------------------------
/docker/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM fabric8/java-alpine-openjdk8-jre
2 |
3 | ADD apicurio-registry-demo-1-SNAPSHOT-runner.jar /deployments
4 |
5 | ENTRYPOINT [ "/deployments/run-java.sh" ]
6 |
--------------------------------------------------------------------------------
/infinispan.sh:
--------------------------------------------------------------------------------
1 | minikube start --memory="4000m"
2 | kubectl apply -f kubernetes/resources-infinispan.yaml
3 |
4 |
--------------------------------------------------------------------------------
/kubernetes/resources-infinispan.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: apps/v1
2 | kind: Deployment
3 | metadata:
4 | labels:
5 | app: registry
6 | name: registry
7 | spec:
8 | replicas: 2
9 | selector:
10 | matchLabels:
11 | app: registry
12 | template:
13 | metadata:
14 | labels:
15 | app: registry
16 | spec:
17 | containers:
18 | - image: apicurio/apicurio-registry-infinispan:latest-release
19 | name: registry
20 | env:
21 | - name: QUARKUS_PROFILE
22 | value: prod
23 | - name: JAVA_OPTIONS
24 | value: "-D%prod.registry.infinispan.transport.configurationFile=default-configs/default-jgroups-kubernetes.xml -Djgroups.dns.query=jgrp.default.svc.cluster.local"
25 | - name: QUARKUS_LOG_LEVEL
26 | value: "INFO"
27 | resources:
28 | limits:
29 | memory: "1000Mi"
30 | ports:
31 | - containerPort: 7800
32 | name: ping
33 | protocol: TCP
34 | livenessProbe:
35 | httpGet:
36 | path: /health/live
37 | port: 8080
38 | scheme: HTTP
39 | initialDelaySeconds: 5
40 | timeoutSeconds: 5
41 | periodSeconds: 10
42 | successThreshold: 1
43 | failureThreshold: 3
44 | readinessProbe:
45 | httpGet:
46 | path: /health/ready
47 | port: 8080
48 | scheme: HTTP
49 | initialDelaySeconds: 5
50 | timeoutSeconds: 5
51 | periodSeconds: 10
52 | successThreshold: 1
53 | failureThreshold: 3
54 |
55 | ---
56 | apiVersion: v1
57 | kind: Service
58 | metadata:
59 | labels:
60 | app: registry
61 | name: registry
62 | spec:
63 | ports:
64 | - name: http
65 | port: 80
66 | protocol: TCP
67 | targetPort: 8080
68 | nodePort: 30080
69 | selector:
70 | app: registry
71 | type: NodePort
72 | ---
73 | apiVersion: v1
74 | kind: Service
75 | metadata:
76 | annotations:
77 | service.alpha.kubernetes.io/tolerate-unready-endpoints: "true"
78 | name: jgrp
79 | labels:
80 | run: jgrp
81 | spec:
82 | publishNotReadyAddresses: true
83 | clusterIP: None
84 | ports:
85 | - name: ping
86 | port: 7800
87 | protocol: TCP
88 | targetPort: 7800
89 | selector:
90 | app: registry
91 | ---
--------------------------------------------------------------------------------
/kubernetes/resources.yaml:
--------------------------------------------------------------------------------
1 | # Start Strimzi (Kafka), Apicurio Registry and our Demo app
2 | # https://strimzi.io/quickstarts/minikube/
3 | # https://github.com/Apicurio/apicurio-registry
4 | apiVersion: kafka.strimzi.io/v1beta1
5 | kind: Kafka
6 | metadata:
7 | namespace: kafka
8 | name: my-cluster
9 | spec:
10 | kafka:
11 | version: 2.3.1
12 | replicas: 1
13 | # Expose Kafka and its brokers with NodePort listener
14 | # https://strimzi.io/docs/latest/#proc-accessing-kafka-using-nodeports-deployment-configuration-kafka
15 | listeners:
16 | plain: {}
17 | external:
18 | type: nodeport
19 | tls: false
20 | overrides:
21 | bootstrap:
22 | nodePort: 32100
23 | config:
24 | offsets.topic.replication.factor: 1
25 | transaction.state.log.replication.factor: 1
26 | transaction.state.log.min.isr: 1
27 | log.message.format.version: "2.3.1"
28 | storage:
29 | type: jbod
30 | volumes:
31 | - id: 0
32 | type: persistent-claim
33 | size: 100Gi
34 | deleteClaim: false
35 | zookeeper:
36 | replicas: 1
37 | storage:
38 | type: persistent-claim
39 | size: 100Gi
40 | deleteClaim: false
41 | entityOperator:
42 | topicOperator: {}
43 | userOperator: {}
44 | ---
45 | apiVersion: kafka.strimzi.io/v1beta1
46 | kind: KafkaTopic
47 | metadata:
48 | namespace: kafka
49 | name: storage-topic
50 | labels:
51 | strimzi.io/cluster: my-cluster
52 | spec:
53 | partitions: 1
54 | replicas: 1
55 | config:
56 | cleanup.policy: compact
57 | ---
58 | apiVersion: kafka.strimzi.io/v1beta1
59 | kind: KafkaTopic
60 | metadata:
61 | namespace: kafka
62 | name: global-id-topic
63 | labels:
64 | strimzi.io/cluster: my-cluster
65 | spec:
66 | partitions: 1
67 | replicas: 1
68 | config:
69 | cleanup.policy: compact
70 | ---
71 | apiVersion: kafka.strimzi.io/v1beta1
72 | kind: KafkaTopic
73 | metadata:
74 | namespace: kafka
75 | name: input-topic
76 | labels:
77 | strimzi.io/cluster: my-cluster
78 | spec:
79 | partitions: 1
80 | replicas: 1
81 | config:
82 | cleanup.policy: compact
83 | ---
84 | apiVersion: kafka.strimzi.io/v1beta1
85 | kind: KafkaTopic
86 | metadata:
87 | namespace: kafka
88 | name: logx-topic
89 | labels:
90 | strimzi.io/cluster: my-cluster
91 | spec:
92 | partitions: 1
93 | replicas: 1
94 | config:
95 | cleanup.policy: compact
96 | ---
97 | apiVersion: kafka.strimzi.io/v1beta1
98 | kind: KafkaTopic
99 | metadata:
100 | namespace: kafka
101 | name: dbx-topic
102 | labels:
103 | strimzi.io/cluster: my-cluster
104 | spec:
105 | partitions: 1
106 | replicas: 1
107 | config:
108 | cleanup.policy: compact
109 | ---
110 | apiVersion: apps/v1
111 | kind: Deployment
112 | metadata:
113 | labels:
114 | app: registry
115 | name: registry
116 | spec:
117 | replicas: 2
118 | selector:
119 | matchLabels:
120 | app: registry
121 | template:
122 | metadata:
123 | labels:
124 | app: registry
125 | spec:
126 | containers:
127 | - image: apicurio/apicurio-registry-streams:latest-snapshot
128 | name: registry
129 | env:
130 | - name: QUARKUS_PROFILE
131 | value: prod
132 | - name: KAFKA_BOOTSTRAP_SERVERS
133 | value: my-cluster-kafka-bootstrap.kafka:9092
134 | - name: APPLICATION_ID
135 | value: registry_id
136 | - name: APPLICATION_SERVER_HOST
137 | valueFrom:
138 | fieldRef:
139 | fieldPath: status.podIP
140 | - name: APPLICATION_SERVER_PORT
141 | value: "9000"
142 | - name: QUARKUS_LOG_LEVEL
143 | value: "INFO"
144 | resources:
145 | limits:
146 | memory: "1000Mi"
147 | livenessProbe:
148 | httpGet:
149 | path: /health/live
150 | port: 8080
151 | scheme: HTTP
152 | initialDelaySeconds: 5
153 | timeoutSeconds: 5
154 | periodSeconds: 10
155 | successThreshold: 1
156 | failureThreshold: 3
157 | readinessProbe:
158 | httpGet:
159 | path: /health/ready
160 | port: 8080
161 | scheme: HTTP
162 | initialDelaySeconds: 5
163 | timeoutSeconds: 5
164 | periodSeconds: 10
165 | successThreshold: 1
166 | failureThreshold: 3
167 |
168 | ---
169 | apiVersion: v1
170 | kind: Service
171 | metadata:
172 | labels:
173 | app: registry
174 | name: registry
175 | spec:
176 | ports:
177 | - name: http
178 | port: 80
179 | protocol: TCP
180 | targetPort: 8080
181 | nodePort: 30080
182 | selector:
183 | app: registry
184 | type: NodePort
185 |
186 | ---
187 |
188 | apiVersion: apps/v1
189 | kind: Deployment
190 | metadata:
191 | labels:
192 | app: demo
193 | name: demo
194 | spec:
195 | replicas: 1
196 | selector:
197 | matchLabels:
198 | app: demo
199 | template:
200 | metadata:
201 | labels:
202 | app: demo
203 | spec:
204 | containers:
205 | - image: alesj/apicurio-registry-demo
206 | name: demo
207 | env:
208 | - name: BOOTSTRAP_SERVERS
209 | value: my-cluster-kafka-bootstrap.kafka:9092
210 | - name: APPLICATION_ID
211 | value: demo_id
212 | - name: REGISTRY_URL
213 | value: http://registry
214 | resources:
215 | limits:
216 | memory: "1000Mi"
217 |
--------------------------------------------------------------------------------
/pom.xml:
--------------------------------------------------------------------------------
1 |
3 | 4.0.0
4 |
5 | io.apicurio
6 | apicurio-registry-demo
7 | 1.3.x-SNAPSHOT
8 |
9 | Apicurio Service Registry Demo
10 | Apicurio Service Registry Demo
11 |
12 |
13 | 1.3.0.Final
14 | 1.9.2
15 | 3.11.1
16 | 1.7.25
17 |
18 | 1.5.1.Final
19 |
20 | 0.6.1
21 | 1.6.1
22 |
23 | 3.1.0
24 |
25 | 1.8
26 | ${java.version}
27 | ${java.version}
28 |
29 |
30 |
31 |
32 |
33 | io.quarkus
34 | quarkus-bom
35 | ${quarkus.version}
36 | pom
37 | import
38 |
39 |
40 |
41 |
42 |
43 |
44 | org.slf4j
45 | slf4j-jdk14
46 | ${slf4j.version}
47 |
48 |
49 |
50 | io.apicurio
51 | apicurio-registry-utils-serde
52 | ${registry.version}
53 |
54 |
55 |
56 | com.google.protobuf
57 | protobuf-java
58 | ${protobuf.version}
59 |
60 |
61 |
62 | io.quarkus
63 | quarkus-arc
64 |
65 |
66 |
67 | org.apache.kafka
68 | kafka-streams
69 |
70 |
71 |
72 |
73 |
74 |
75 | kr.motd.maven
76 | os-maven-plugin
77 | ${os-maven-plugin.version}
78 |
79 |
80 |
81 |
82 |
83 | org.xolstice.maven.plugins
84 | protobuf-maven-plugin
85 | ${proto-plugin.version}
86 | true
87 |
88 |
89 |
90 | compile
91 |
92 |
93 |
94 |
95 | com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier}
96 |
97 |
98 |
99 |
100 |
101 |
102 | io.quarkus
103 | quarkus-maven-plugin
104 | ${quarkus.version}
105 |
106 |
107 |
108 | build
109 |
110 |
111 |
112 |
113 |
114 | maven-resources-plugin
115 | ${maven-resource-plugin.version}
116 |
117 |
118 | copy-resources
119 | verify
120 |
121 | copy-resources
122 |
123 |
124 | ${project.basedir}/docker
125 |
126 |
127 | ${project.build.directory}
128 |
129 | **/*-runner.jar
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 | avro
143 |
144 |
145 |
146 | org.apache.avro
147 | avro-maven-plugin
148 | ${avro.version}
149 |
150 |
151 | generate-sources
152 |
153 | schema
154 |
155 |
156 | ${project.basedir}/src/main/avro/
157 | ${project.basedir}/src/main/java/
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 | upload
167 |
168 |
169 |
170 | io.apicurio
171 | apicurio-registry-maven-plugin
172 | ${registry.version}
173 |
174 |
175 | generate-sources
176 |
177 | register
178 |
179 |
180 | http://localhost:8080/api
181 | AVRO
182 |
183 | ${project.basedir}/schemas/schema1.avsc
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 | test
194 |
195 |
196 |
197 | io.apicurio
198 | apicurio-registry-maven-plugin
199 | ${registry.version}
200 |
201 |
202 | generate-sources
203 |
204 | test-update
205 |
206 |
207 | http://localhost:8080/api
208 | AVRO
209 |
210 | ${project.basedir}/schemas/schema2.avsc
211 |
212 |
213 |
214 |
215 |
216 |
217 |
218 |
219 |
220 | download
221 |
222 |
223 |
224 | io.apicurio
225 | apicurio-registry-maven-plugin
226 | ${registry.version}
227 |
228 |
229 | generate-sources
230 |
231 | download
232 |
233 |
234 | http://localhost:8080/api
235 |
236 | schema1
237 |
238 | ${project.build.directory}
239 |
240 |
241 |
242 |
243 |
244 |
245 |
246 |
247 |
248 |
249 |
--------------------------------------------------------------------------------
/schemas/schema1.avsc:
--------------------------------------------------------------------------------
1 | {"namespace": "example.avro", "type": "record", "name": "user", "fields": [{"name": "name", "type": "string"}, {"name": "favorite_number", "type": "int"}]}
2 |
--------------------------------------------------------------------------------
/schemas/schema2.avsc:
--------------------------------------------------------------------------------
1 | {"namespace": "example.avro", "type": "record", "name": "user", "fields": [{"name": "name", "type": "string"}, {"name": "favorite_number", "type": "int"}, {"name": "favorite_color", "type": "string", "default": "green"}]}
2 |
--------------------------------------------------------------------------------
/secured_run.sh:
--------------------------------------------------------------------------------
1 | docker run -it --network host -v /Users/alesj/projects/redhat/apicurio-registry-demo/stores:/config -e JAVA_OPTIONS="-D%dev.registry.streams.topology.bootstrap.servers=192.168.0.3:9092 -D%dev.registry.streams.storage-producer.bootstrap.servers=192.168.0.3:9092 -D%dev.registry.streams.topology.security.protocol=SSL -D%dev.registry.streams.topology.ssl.truststore.location=/config/truststore.p12 -D%dev.registry.streams.topology.ssl.truststore.password=Z_pkTh9xgZovK4t34cGB2o6afT4zZg0L -D%dev.registry.streams.topology.ssl.truststore.type=PKCS12 -D%dev.registry.streams.topology.ssl.endpoint.identification.algorithm= -D%dev.registry.streams.storage-producer.security.protocol=SSL -D%dev.registry.streams.storage-producer.ssl.truststore.location=/config/truststore.p12 -D%dev.registry.streams.storage-producer.ssl.truststore.password=Z_pkTh9xgZovK4t34cGB2o6afT4zZg0L -D%dev.registry.streams.storage-producer.ssl.truststore.type=PKCS12 -D%dev.registry.streams.storage-producer.ssl.endpoint.identification.algorithm=" apicurio/apicurio-registry-streams:latest
2 |
--------------------------------------------------------------------------------
/src/main/avro/log.avsc:
--------------------------------------------------------------------------------
1 | {"namespace": "io.apicurio.registry.demo.domain",
2 | "type": "record",
3 | "name": "LogInput",
4 | "fields": [
5 | {"name": "line", "type": "string"},
6 | {"name": "timestamp", "type": ["long", "null"]}
7 | ]
8 | }
--------------------------------------------------------------------------------
/src/main/java/io/apicurio/registry/demo/ApplicationConfiguration.java:
--------------------------------------------------------------------------------
1 | package io.apicurio.registry.demo;
2 |
3 | import io.quarkus.runtime.ShutdownEvent;
4 | import io.quarkus.runtime.StartupEvent;
5 |
6 | import javax.enterprise.context.ApplicationScoped;
7 | import javax.enterprise.event.Observes;
8 | import javax.enterprise.inject.Produces;
9 |
10 | /**
11 | * @author Ales Justin
12 | */
13 | @ApplicationScoped
14 | public class ApplicationConfiguration {
15 |
16 | @Produces
17 | public Lifecycle application() {
18 | return new ApplicationImpl(new String[]{});
19 | }
20 |
21 | public void init(@Observes StartupEvent event, Lifecycle lifecycle) {
22 | lifecycle.start();
23 | }
24 |
25 | public void destroy(@Observes ShutdownEvent event, Lifecycle lifecycle) {
26 | lifecycle.stop();
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/src/main/java/io/apicurio/registry/demo/ApplicationImpl.java:
--------------------------------------------------------------------------------
1 | package io.apicurio.registry.demo;
2 |
3 | import io.apicurio.registry.client.CompatibleClient;
4 | import io.apicurio.registry.client.RegistryService;
5 | import io.apicurio.registry.demo.domain.Log;
6 | import io.apicurio.registry.demo.domain.LogInput;
7 | import io.apicurio.registry.demo.utils.PropertiesUtil;
8 | import io.apicurio.registry.demo.utils.ProtoSerde;
9 | import io.apicurio.registry.utils.serde.AvroKafkaDeserializer;
10 | import io.apicurio.registry.utils.serde.AvroKafkaSerializer;
11 | import io.apicurio.registry.utils.serde.avro.DefaultAvroDatumProvider;
12 | import org.apache.kafka.clients.CommonClientConfigs;
13 | import org.apache.kafka.common.config.TopicConfig;
14 | import org.apache.kafka.common.serialization.Deserializer;
15 | import org.apache.kafka.common.serialization.Serde;
16 | import org.apache.kafka.common.serialization.Serdes;
17 | import org.apache.kafka.streams.KafkaStreams;
18 | import org.apache.kafka.streams.StreamsBuilder;
19 | import org.apache.kafka.streams.StreamsConfig;
20 | import org.apache.kafka.streams.Topology;
21 | import org.apache.kafka.streams.kstream.Consumed;
22 | import org.apache.kafka.streams.kstream.KStream;
23 | import org.apache.kafka.streams.kstream.Produced;
24 | import org.apache.kafka.streams.state.KeyValueStore;
25 | import org.apache.kafka.streams.state.StoreBuilder;
26 | import org.apache.kafka.streams.state.Stores;
27 | import org.slf4j.Logger;
28 | import org.slf4j.LoggerFactory;
29 |
30 | import javax.enterprise.inject.Vetoed;
31 | import java.util.HashMap;
32 | import java.util.Map;
33 | import java.util.Properties;
34 |
35 | import static io.apicurio.registry.demo.utils.PropertiesUtil.property;
36 |
37 | /**
38 | * @author Ales Justin
39 | */
40 | @Vetoed
41 | @SuppressWarnings("deprecation")
42 | public class ApplicationImpl implements Lifecycle {
43 | private static final Logger log = LoggerFactory.getLogger(ApplicationImpl.class);
44 |
45 | public static final String INPUT_TOPIC = "input-topic";
46 | public static final String LOG_STORE = "log-store";
47 |
48 | private KafkaStreams streams;
49 |
50 | public ApplicationImpl(String[] args) {
51 | Properties properties = PropertiesUtil.properties(args);
52 | properties.put(
53 | CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG,
54 | property(properties, CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092")
55 | );
56 | properties.put(
57 | StreamsConfig.APPLICATION_ID_CONFIG,
58 | property(properties, StreamsConfig.APPLICATION_ID_CONFIG, "registry-demo")
59 | );
60 |
61 | String registryUrl = property(properties, "registry.url", "http://localhost:8080/api");
62 | RegistryService service = CompatibleClient.createCompatible(registryUrl);
63 |
64 | StreamsBuilder builder = new StreamsBuilder();
65 |
66 | // Demo topology
67 |
68 | Map configuration = new HashMap<>();
69 | configuration.put(TopicConfig.CLEANUP_POLICY_CONFIG, TopicConfig.CLEANUP_POLICY_COMPACT);
70 | configuration.put(TopicConfig.MIN_COMPACTION_LAG_MS_CONFIG, "0");
71 | configuration.put(TopicConfig.SEGMENT_BYTES_CONFIG, String.valueOf(64 * 1024 * 1024));
72 |
73 | Deserializer deserializer = new AvroKafkaDeserializer<>(
74 | service,
75 | new DefaultAvroDatumProvider().setUseSpecificAvroReader(true)
76 | );
77 | Serde logSerde = Serdes.serdeFrom(
78 | new AvroKafkaSerializer<>(service),
79 | deserializer
80 | );
81 | KStream input = builder.stream(
82 | INPUT_TOPIC,
83 | Consumed.with(Serdes.String(), logSerde)
84 | );
85 |
86 | Serde logMergeSerde = new ProtoSerde<>(Log.LogMerge.parser());
87 |
88 | StoreBuilder> storageStoreBuilder =
89 | Stores
90 | .keyValueStoreBuilder(
91 | Stores.inMemoryKeyValueStore(LOG_STORE),
92 | Serdes.String(), logMergeSerde
93 | )
94 | .withCachingEnabled()
95 | .withLoggingEnabled(configuration);
96 | builder.addStateStore(storageStoreBuilder);
97 |
98 | KStream output = input.transform(
99 | MergeTransformer::new,
100 | LOG_STORE
101 | );
102 |
103 | // for Kafka console consumer show-case, pure String
104 | output.mapValues(value -> String.format("Log diff: %s", Math.abs(value.getSnd() - value.getFst())))
105 | .to("logx-topic", Produced.with(Serdes.String(), Serdes.String()));
106 |
107 | output.to("dbx-topic", Produced.with(Serdes.String(), logMergeSerde));
108 |
109 | Topology topology = builder.build(properties);
110 | streams = new KafkaStreams(topology, properties);
111 | }
112 |
113 | public void start() {
114 | log.info("Demo application starting ...");
115 | streams.start();
116 | log.info("Demo application started ...");
117 | }
118 |
119 | public void stop() {
120 | log.info("Demo application stopping ...");
121 | if (streams != null) {
122 | streams.close();
123 | }
124 | log.info("Demo application stopped ...");
125 | }
126 | }
127 |
--------------------------------------------------------------------------------
/src/main/java/io/apicurio/registry/demo/Lifecycle.java:
--------------------------------------------------------------------------------
1 | package io.apicurio.registry.demo;
2 |
3 | /**
4 | * @author Ales Justin
5 | */
6 | public interface Lifecycle {
7 | void start();
8 | void stop();
9 | }
10 |
--------------------------------------------------------------------------------
/src/main/java/io/apicurio/registry/demo/Main.java:
--------------------------------------------------------------------------------
1 | package io.apicurio.registry.demo;
2 |
3 | /**
4 | * @author Ales Justin
5 | */
6 | public class Main {
7 | public static void main(String[] args) {
8 | Lifecycle application = new ApplicationImpl(args);
9 | Runtime.getRuntime().addShutdownHook(new Thread(application::stop, "Registry-Demo-Shutdown-Thread"));
10 | application.start();
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/src/main/java/io/apicurio/registry/demo/MergeTransformer.java:
--------------------------------------------------------------------------------
1 | package io.apicurio.registry.demo;
2 |
3 | import io.apicurio.registry.demo.domain.Log;
4 | import io.apicurio.registry.demo.domain.LogInput;
5 | import org.apache.kafka.streams.KeyValue;
6 | import org.apache.kafka.streams.kstream.Transformer;
7 | import org.apache.kafka.streams.processor.ProcessorContext;
8 | import org.apache.kafka.streams.state.KeyValueStore;
9 | import org.slf4j.Logger;
10 | import org.slf4j.LoggerFactory;
11 |
12 | /**
13 | * @author Ales Justin
14 | */
15 | class MergeTransformer implements Transformer> {
16 | private static final Logger logger = LoggerFactory.getLogger(MergeTransformer.class);
17 |
18 | private KeyValueStore store;
19 |
20 | @SuppressWarnings("unchecked")
21 | @Override
22 | public void init(ProcessorContext context) {
23 | //noinspection unchecked
24 | store = (KeyValueStore) context.getStateStore(ApplicationImpl.LOG_STORE);
25 | }
26 |
27 | @Override
28 | public KeyValue transform(String key, LogInput log) {
29 | Log.LogMerge merge = store.delete(key); // get + remove
30 | if (merge != null) {
31 | merge = Log.LogMerge.newBuilder(merge)
32 | .setSnd(log.getTimestamp())
33 | .build();
34 |
35 | logger.info("Merged logs: {}", merge);
36 |
37 | return new KeyValue<>(key, merge);
38 | } else {
39 | merge = Log.LogMerge.newBuilder()
40 | .setFst(log.getTimestamp())
41 | .build();
42 | store.put(key, merge);
43 | return null; // skip
44 | }
45 | }
46 |
47 | @Override
48 | public void close() {
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/src/main/java/io/apicurio/registry/demo/domain/LogInput.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Autogenerated by Avro
3 | *
4 | * DO NOT EDIT DIRECTLY
5 | */
6 | package io.apicurio.registry.demo.domain;
7 |
8 | import org.apache.avro.message.BinaryMessageDecoder;
9 | import org.apache.avro.message.BinaryMessageEncoder;
10 | import org.apache.avro.message.SchemaStore;
11 | import org.apache.avro.specific.SpecificData;
12 | import org.apache.avro.util.Utf8;
13 |
14 | @org.apache.avro.specific.AvroGenerated
15 | public class LogInput extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
16 | private static final long serialVersionUID = 593939009577832190L;
17 | public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"LogInput\",\"namespace\":\"io.apicurio.registry.demo.domain\",\"fields\":[{\"name\":\"line\",\"type\":\"string\"},{\"name\":\"timestamp\",\"type\":[\"long\",\"null\"]}]}");
18 | public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
19 |
20 | private static SpecificData MODEL$ = new SpecificData();
21 |
22 | private static final BinaryMessageEncoder ENCODER =
23 | new BinaryMessageEncoder(MODEL$, SCHEMA$);
24 |
25 | private static final BinaryMessageDecoder DECODER =
26 | new BinaryMessageDecoder(MODEL$, SCHEMA$);
27 |
28 | /**
29 | * Return the BinaryMessageEncoder instance used by this class.
30 | * @return the message encoder used by this class
31 | */
32 | public static BinaryMessageEncoder getEncoder() {
33 | return ENCODER;
34 | }
35 |
36 | /**
37 | * Return the BinaryMessageDecoder instance used by this class.
38 | * @return the message decoder used by this class
39 | */
40 | public static BinaryMessageDecoder getDecoder() {
41 | return DECODER;
42 | }
43 |
44 | /**
45 | * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}.
46 | * @param resolver a {@link SchemaStore} used to find schemas by fingerprint
47 | * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore
48 | */
49 | public static BinaryMessageDecoder createDecoder(SchemaStore resolver) {
50 | return new BinaryMessageDecoder(MODEL$, SCHEMA$, resolver);
51 | }
52 |
53 | /**
54 | * Serializes this LogInput to a ByteBuffer.
55 | * @return a buffer holding the serialized data for this instance
56 | * @throws java.io.IOException if this instance could not be serialized
57 | */
58 | public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException {
59 | return ENCODER.encode(this);
60 | }
61 |
62 | /**
63 | * Deserializes a LogInput from a ByteBuffer.
64 | * @param b a byte buffer holding serialized data for an instance of this class
65 | * @return a LogInput instance decoded from the given buffer
66 | * @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class
67 | */
68 | public static LogInput fromByteBuffer(
69 | java.nio.ByteBuffer b) throws java.io.IOException {
70 | return DECODER.decode(b);
71 | }
72 |
73 | @Deprecated public java.lang.CharSequence line;
74 | @Deprecated public java.lang.Long timestamp;
75 |
76 | /**
77 | * Default constructor. Note that this does not initialize fields
78 | * to their default values from the schema. If that is desired then
79 | * one should use newBuilder()
.
80 | */
81 | public LogInput() {}
82 |
83 | /**
84 | * All-args constructor.
85 | * @param line The new value for line
86 | * @param timestamp The new value for timestamp
87 | */
88 | public LogInput(java.lang.CharSequence line, java.lang.Long timestamp) {
89 | this.line = line;
90 | this.timestamp = timestamp;
91 | }
92 |
93 | public org.apache.avro.specific.SpecificData getSpecificData() { return MODEL$; }
94 | public org.apache.avro.Schema getSchema() { return SCHEMA$; }
95 | // Used by DatumWriter. Applications should not call.
96 | public java.lang.Object get(int field$) {
97 | switch (field$) {
98 | case 0: return line;
99 | case 1: return timestamp;
100 | default: throw new org.apache.avro.AvroRuntimeException("Bad index");
101 | }
102 | }
103 |
104 | // Used by DatumReader. Applications should not call.
105 | public void put(int field$, java.lang.Object value$) {
106 | switch (field$) {
107 | case 0: line = (java.lang.CharSequence)value$; break;
108 | case 1: timestamp = (java.lang.Long)value$; break;
109 | default: throw new org.apache.avro.AvroRuntimeException("Bad index");
110 | }
111 | }
112 |
113 | /**
114 | * Gets the value of the 'line' field.
115 | * @return The value of the 'line' field.
116 | */
117 | public java.lang.CharSequence getLine() {
118 | return line;
119 | }
120 |
121 |
122 | /**
123 | * Sets the value of the 'line' field.
124 | * @param value the value to set.
125 | */
126 | public void setLine(java.lang.CharSequence value) {
127 | this.line = value;
128 | }
129 |
130 | /**
131 | * Gets the value of the 'timestamp' field.
132 | * @return The value of the 'timestamp' field.
133 | */
134 | public java.lang.Long getTimestamp() {
135 | return timestamp;
136 | }
137 |
138 |
139 | /**
140 | * Sets the value of the 'timestamp' field.
141 | * @param value the value to set.
142 | */
143 | public void setTimestamp(java.lang.Long value) {
144 | this.timestamp = value;
145 | }
146 |
147 | /**
148 | * Creates a new LogInput RecordBuilder.
149 | * @return A new LogInput RecordBuilder
150 | */
151 | public static io.apicurio.registry.demo.domain.LogInput.Builder newBuilder() {
152 | return new io.apicurio.registry.demo.domain.LogInput.Builder();
153 | }
154 |
155 | /**
156 | * Creates a new LogInput RecordBuilder by copying an existing Builder.
157 | * @param other The existing builder to copy.
158 | * @return A new LogInput RecordBuilder
159 | */
160 | public static io.apicurio.registry.demo.domain.LogInput.Builder newBuilder(io.apicurio.registry.demo.domain.LogInput.Builder other) {
161 | if (other == null) {
162 | return new io.apicurio.registry.demo.domain.LogInput.Builder();
163 | } else {
164 | return new io.apicurio.registry.demo.domain.LogInput.Builder(other);
165 | }
166 | }
167 |
168 | /**
169 | * Creates a new LogInput RecordBuilder by copying an existing LogInput instance.
170 | * @param other The existing instance to copy.
171 | * @return A new LogInput RecordBuilder
172 | */
173 | public static io.apicurio.registry.demo.domain.LogInput.Builder newBuilder(io.apicurio.registry.demo.domain.LogInput other) {
174 | if (other == null) {
175 | return new io.apicurio.registry.demo.domain.LogInput.Builder();
176 | } else {
177 | return new io.apicurio.registry.demo.domain.LogInput.Builder(other);
178 | }
179 | }
180 |
181 | /**
182 | * RecordBuilder for LogInput instances.
183 | */
184 | public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase
185 | implements org.apache.avro.data.RecordBuilder {
186 |
187 | private java.lang.CharSequence line;
188 | private java.lang.Long timestamp;
189 |
190 | /** Creates a new Builder */
191 | private Builder() {
192 | super(SCHEMA$);
193 | }
194 |
195 | /**
196 | * Creates a Builder by copying an existing Builder.
197 | * @param other The existing Builder to copy.
198 | */
199 | private Builder(io.apicurio.registry.demo.domain.LogInput.Builder other) {
200 | super(other);
201 | if (isValidValue(fields()[0], other.line)) {
202 | this.line = data().deepCopy(fields()[0].schema(), other.line);
203 | fieldSetFlags()[0] = other.fieldSetFlags()[0];
204 | }
205 | if (isValidValue(fields()[1], other.timestamp)) {
206 | this.timestamp = data().deepCopy(fields()[1].schema(), other.timestamp);
207 | fieldSetFlags()[1] = other.fieldSetFlags()[1];
208 | }
209 | }
210 |
211 | /**
212 | * Creates a Builder by copying an existing LogInput instance
213 | * @param other The existing instance to copy.
214 | */
215 | private Builder(io.apicurio.registry.demo.domain.LogInput other) {
216 | super(SCHEMA$);
217 | if (isValidValue(fields()[0], other.line)) {
218 | this.line = data().deepCopy(fields()[0].schema(), other.line);
219 | fieldSetFlags()[0] = true;
220 | }
221 | if (isValidValue(fields()[1], other.timestamp)) {
222 | this.timestamp = data().deepCopy(fields()[1].schema(), other.timestamp);
223 | fieldSetFlags()[1] = true;
224 | }
225 | }
226 |
227 | /**
228 | * Gets the value of the 'line' field.
229 | * @return The value.
230 | */
231 | public java.lang.CharSequence getLine() {
232 | return line;
233 | }
234 |
235 |
236 | /**
237 | * Sets the value of the 'line' field.
238 | * @param value The value of 'line'.
239 | * @return This builder.
240 | */
241 | public io.apicurio.registry.demo.domain.LogInput.Builder setLine(java.lang.CharSequence value) {
242 | validate(fields()[0], value);
243 | this.line = value;
244 | fieldSetFlags()[0] = true;
245 | return this;
246 | }
247 |
248 | /**
249 | * Checks whether the 'line' field has been set.
250 | * @return True if the 'line' field has been set, false otherwise.
251 | */
252 | public boolean hasLine() {
253 | return fieldSetFlags()[0];
254 | }
255 |
256 |
257 | /**
258 | * Clears the value of the 'line' field.
259 | * @return This builder.
260 | */
261 | public io.apicurio.registry.demo.domain.LogInput.Builder clearLine() {
262 | line = null;
263 | fieldSetFlags()[0] = false;
264 | return this;
265 | }
266 |
267 | /**
268 | * Gets the value of the 'timestamp' field.
269 | * @return The value.
270 | */
271 | public java.lang.Long getTimestamp() {
272 | return timestamp;
273 | }
274 |
275 |
276 | /**
277 | * Sets the value of the 'timestamp' field.
278 | * @param value The value of 'timestamp'.
279 | * @return This builder.
280 | */
281 | public io.apicurio.registry.demo.domain.LogInput.Builder setTimestamp(java.lang.Long value) {
282 | validate(fields()[1], value);
283 | this.timestamp = value;
284 | fieldSetFlags()[1] = true;
285 | return this;
286 | }
287 |
288 | /**
289 | * Checks whether the 'timestamp' field has been set.
290 | * @return True if the 'timestamp' field has been set, false otherwise.
291 | */
292 | public boolean hasTimestamp() {
293 | return fieldSetFlags()[1];
294 | }
295 |
296 |
297 | /**
298 | * Clears the value of the 'timestamp' field.
299 | * @return This builder.
300 | */
301 | public io.apicurio.registry.demo.domain.LogInput.Builder clearTimestamp() {
302 | timestamp = null;
303 | fieldSetFlags()[1] = false;
304 | return this;
305 | }
306 |
307 | @Override
308 | public LogInput build() {
309 | try {
310 | LogInput record = new LogInput();
311 | record.line = fieldSetFlags()[0] ? this.line : (java.lang.CharSequence) defaultValue(fields()[0]);
312 | record.timestamp = fieldSetFlags()[1] ? this.timestamp : (java.lang.Long) defaultValue(fields()[1]);
313 | return record;
314 | } catch (org.apache.avro.AvroMissingFieldException e) {
315 | throw e;
316 | } catch (java.lang.Exception e) {
317 | throw new org.apache.avro.AvroRuntimeException(e);
318 | }
319 | }
320 | }
321 |
322 | @SuppressWarnings("unchecked")
323 | private static final org.apache.avro.io.DatumWriter
324 | WRITER$ = (org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$);
325 |
326 | @Override public void writeExternal(java.io.ObjectOutput out)
327 | throws java.io.IOException {
328 | WRITER$.write(this, SpecificData.getEncoder(out));
329 | }
330 |
331 | @SuppressWarnings("unchecked")
332 | private static final org.apache.avro.io.DatumReader
333 | READER$ = (org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$);
334 |
335 | @Override public void readExternal(java.io.ObjectInput in)
336 | throws java.io.IOException {
337 | READER$.read(this, SpecificData.getDecoder(in));
338 | }
339 |
340 | @Override protected boolean hasCustomCoders() { return true; }
341 |
342 | @Override public void customEncode(org.apache.avro.io.Encoder out)
343 | throws java.io.IOException
344 | {
345 | out.writeString(this.line);
346 |
347 | if (this.timestamp == null) {
348 | out.writeIndex(1);
349 | out.writeNull();
350 | } else {
351 | out.writeIndex(0);
352 | out.writeLong(this.timestamp);
353 | }
354 |
355 | }
356 |
357 | @Override public void customDecode(org.apache.avro.io.ResolvingDecoder in)
358 | throws java.io.IOException
359 | {
360 | org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff();
361 | if (fieldOrder == null) {
362 | this.line = in.readString(this.line instanceof Utf8 ? (Utf8)this.line : null);
363 |
364 | if (in.readIndex() != 0) {
365 | in.readNull();
366 | this.timestamp = null;
367 | } else {
368 | this.timestamp = in.readLong();
369 | }
370 |
371 | } else {
372 | for (int i = 0; i < 2; i++) {
373 | switch (fieldOrder[i].pos()) {
374 | case 0:
375 | this.line = in.readString(this.line instanceof Utf8 ? (Utf8)this.line : null);
376 | break;
377 |
378 | case 1:
379 | if (in.readIndex() != 0) {
380 | in.readNull();
381 | this.timestamp = null;
382 | } else {
383 | this.timestamp = in.readLong();
384 | }
385 | break;
386 |
387 | default:
388 | throw new java.io.IOException("Corrupt ResolvingDecoder.");
389 | }
390 | }
391 | }
392 | }
393 | }
394 |
395 |
396 |
397 |
398 |
399 |
400 |
401 |
402 |
403 |
404 |
--------------------------------------------------------------------------------
/src/main/java/io/apicurio/registry/demo/simple/avro/SimpleAvroAppConstants.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Red Hat
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package io.apicurio.registry.demo.simple.avro;
18 |
19 | /**
20 | * @author eric.wittmann@gmail.com
21 | */
22 | public class SimpleAvroAppConstants {
23 |
24 | public static final String TOPIC_NAME = "SimpleAvroAppTopic";
25 | public static final String SUBJECT_NAME = "Greeting";
26 |
27 | public static final String SCHEMA = "{\"type\":\"record\",\"name\":\"Greeting\",\"fields\":[{\"name\":\"Message\",\"type\":\"string\"},{\"name\":\"Time\",\"type\":\"long\"}]}";
28 |
29 | }
30 |
--------------------------------------------------------------------------------
/src/main/java/io/apicurio/registry/demo/simple/avro/SimpleAvroBootstrapper.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Red Hat
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package io.apicurio.registry.demo.simple.avro;
18 |
19 | import java.io.ByteArrayInputStream;
20 | import java.nio.charset.StandardCharsets;
21 |
22 | import javax.ws.rs.WebApplicationException;
23 |
24 | import org.slf4j.Logger;
25 | import org.slf4j.LoggerFactory;
26 |
27 | import io.apicurio.registry.client.RegistryRestClient;
28 | import io.apicurio.registry.client.RegistryRestClientFactory;
29 | import io.apicurio.registry.rest.beans.ArtifactMetaData;
30 | import io.apicurio.registry.rest.beans.IfExistsType;
31 | import io.apicurio.registry.types.ArtifactType;
32 |
33 | /**
34 | * This command line application is used to register the schema used by the producer and consumer in the
35 | * Apicurio registry. This must be run before the producer or consumer. It only needs to be run one
36 | * time as it simply stores a schema in the registry. If the registry is non-persistent (in-memory only)
37 | * then this will need to be executed once per registry startup.
38 | *
39 | * @author eric.wittmann@gmail.com
40 | */
41 | public class SimpleAvroBootstrapper {
42 |
43 | private static final Logger LOGGER = LoggerFactory.getLogger(SimpleAvroBootstrapper.class);
44 |
45 | private static RegistryRestClient client;
46 | static {
47 | // Create a Service Registry client
48 | String registryUrl = "http://localhost:8080/api";
49 | client = RegistryRestClientFactory.create(registryUrl);
50 | }
51 |
52 | public static final void main(String [] args) throws Exception {
53 | try {
54 | LOGGER.info("\n\n--------------\nBootstrapping the Avro Schema demo.\n--------------\n");
55 | String topicName = SimpleAvroAppConstants.TOPIC_NAME;
56 |
57 | // Register the Avro Schema schema in the Apicurio registry.
58 | String artifactId = topicName;
59 | try {
60 | createSchemaInServiceRegistry(artifactId, SimpleAvroAppConstants.SCHEMA);
61 | } catch (Exception e) {
62 | if (is409Error(e)) {
63 | LOGGER.warn("\n\n--------------\nWARNING: Schema already existed in registry!\n--------------\n");
64 | return;
65 | } else {
66 | throw e;
67 | }
68 | }
69 |
70 | LOGGER.info("\n\n--------------\nBootstrapping complete.\n--------------\n");
71 | } finally {
72 | }
73 | }
74 |
75 | /**
76 | * Create the artifact in the registry (or update it if it already exists).
77 | * @param artifactId
78 | * @param schema
79 | * @throws Exception
80 | */
81 | private static void createSchemaInServiceRegistry(String artifactId, String schema) throws Exception {
82 |
83 | LOGGER.info("---------------------------------------------------------");
84 | LOGGER.info("=====> Creating artifact in the registry for Avro Schema with ID: {}", artifactId);
85 | try {
86 | ByteArrayInputStream content = new ByteArrayInputStream(schema.getBytes(StandardCharsets.UTF_8));
87 | ArtifactMetaData metaData = client.createArtifact(ArtifactType.AVRO, artifactId, IfExistsType.RETURN, content);
88 | LOGGER.info("=====> Successfully created Avro Schema artifact in Service Registry: {}", metaData);
89 | LOGGER.info("---------------------------------------------------------");
90 | } catch (Exception t) {
91 | throw t;
92 | }
93 | }
94 |
95 | private static boolean is409Error(Exception e) {
96 | if (e.getCause() instanceof WebApplicationException) {
97 | WebApplicationException wae = (WebApplicationException) e.getCause();
98 | if (wae.getResponse().getStatus() == 409) {
99 | return true;
100 | }
101 | }
102 | return false;
103 | }
104 |
105 | }
106 |
--------------------------------------------------------------------------------
/src/main/java/io/apicurio/registry/demo/simple/avro/SimpleAvroConsumerApp.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Red Hat
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package io.apicurio.registry.demo.simple.avro;
18 |
19 | import io.apicurio.registry.demo.utils.PropertiesUtil;
20 | import io.apicurio.registry.utils.serde.AbstractKafkaSerDe;
21 | import io.apicurio.registry.utils.serde.AvroKafkaDeserializer;
22 | import org.apache.avro.generic.GenericRecord;
23 | import org.apache.kafka.clients.consumer.ConsumerConfig;
24 | import org.apache.kafka.clients.consumer.ConsumerRecords;
25 | import org.apache.kafka.clients.consumer.KafkaConsumer;
26 | import org.apache.kafka.clients.producer.ProducerConfig;
27 | import org.apache.kafka.common.serialization.StringDeserializer;
28 | import org.slf4j.Logger;
29 | import org.slf4j.LoggerFactory;
30 |
31 | import java.time.Duration;
32 | import java.util.Collections;
33 | import java.util.Properties;
34 |
35 | /**
36 | * Kafka application that does the following:
37 | *
38 | * 1) Consumes messages from the topic!
39 | *
40 | * The application uses the Avro Kafka Deserializer to deserialize the message, which
41 | * will fetch the Schema from the Service Registry by its global identifier.
42 | *
43 | * @author eric.wittmann@gmail.com
44 | */
45 | public class SimpleAvroConsumerApp {
46 |
47 | private static final Logger LOGGER = LoggerFactory.getLogger(SimpleAvroConsumerApp.class);
48 |
49 | public static void main(String [] args) throws Exception {
50 | // Config properties!
51 | Properties props = PropertiesUtil.properties(args);
52 |
53 | // Configure Kafka
54 | props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
55 | props.putIfAbsent(ConsumerConfig.GROUP_ID_CONFIG, "Consumer-" + SimpleAvroAppConstants.TOPIC_NAME);
56 | props.putIfAbsent(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
57 | props.putIfAbsent(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000");
58 | props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
59 | props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
60 | props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, AvroKafkaDeserializer.class.getName());
61 |
62 | // Configure Service Registry location
63 | props.putIfAbsent(AbstractKafkaSerDe.REGISTRY_URL_CONFIG_PARAM, "http://localhost:8080/api");
64 |
65 | // Create the Kafka Consumer
66 | KafkaConsumer consumer = new KafkaConsumer<>(props);
67 |
68 | // Subscribe to the topic
69 | LOGGER.info("=====> Subscribing to topic: {}", SimpleAvroAppConstants.TOPIC_NAME);
70 | consumer.subscribe(Collections.singletonList(SimpleAvroAppConstants.TOPIC_NAME));
71 |
72 | // Consume messages!!
73 | LOGGER.info("=====> Consuming messages...");
74 | try {
75 | while (Boolean.TRUE) {
76 | final ConsumerRecords records = consumer.poll(Duration.ofSeconds(1));
77 | if (records.count() == 0) {
78 | // Do nothing - no messages waiting.
79 | } else records.forEach(record -> {
80 | LOGGER.info("=====> CONSUMED: {} {} {} {}", record.topic(),
81 | record.partition(), record.offset(), record.value());
82 | });
83 | }
84 | } finally {
85 | consumer.close();
86 | }
87 |
88 | }
89 | }
90 |
--------------------------------------------------------------------------------
/src/main/java/io/apicurio/registry/demo/simple/avro/SimpleAvroProducerApp.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Red Hat
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package io.apicurio.registry.demo.simple.avro;
18 |
19 |
20 | import io.apicurio.registry.demo.utils.PropertiesUtil;
21 | import io.apicurio.registry.utils.serde.AbstractKafkaSerDe;
22 | import io.apicurio.registry.utils.serde.AbstractKafkaSerializer;
23 | import io.apicurio.registry.utils.serde.AvroKafkaSerializer;
24 | import io.apicurio.registry.utils.serde.strategy.FindBySchemaIdStrategy;
25 | import io.apicurio.registry.utils.serde.strategy.SimpleTopicIdStrategy;
26 | import org.apache.avro.Schema;
27 | import org.apache.avro.generic.GenericData;
28 | import org.apache.avro.generic.GenericRecord;
29 | import org.apache.kafka.clients.producer.KafkaProducer;
30 | import org.apache.kafka.clients.producer.Producer;
31 | import org.apache.kafka.clients.producer.ProducerConfig;
32 | import org.apache.kafka.clients.producer.ProducerRecord;
33 | import org.apache.kafka.common.serialization.StringSerializer;
34 | import org.slf4j.Logger;
35 | import org.slf4j.LoggerFactory;
36 |
37 | import java.util.Date;
38 | import java.util.Properties;
39 |
40 | /**
41 | * Kafka application that does the following:
42 | *
43 | * 1) Produces a message every 5s on the topic
44 | *
45 | * @author eric.wittmann@gmail.com
46 | */
47 | public class SimpleAvroProducerApp {
48 |
49 | private static final Logger LOGGER = LoggerFactory.getLogger(SimpleAvroProducerApp.class);
50 |
51 | public static void main(String [] args) throws Exception {
52 | // Config properties!
53 | Properties props = PropertiesUtil.properties(args);
54 |
55 | // Configure kafka.
56 | props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
57 | props.putIfAbsent(ProducerConfig.CLIENT_ID_CONFIG, "Producer-" + SimpleAvroAppConstants.TOPIC_NAME);
58 | props.putIfAbsent(ProducerConfig.ACKS_CONFIG, "all");
59 | props.putIfAbsent(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
60 | props.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, AvroKafkaSerializer.class.getName());
61 |
62 | // Configure Service Registry location and ID strategies
63 | props.putIfAbsent(AbstractKafkaSerDe.REGISTRY_URL_CONFIG_PARAM, "http://localhost:8080/api");
64 | props.putIfAbsent(AbstractKafkaSerializer.REGISTRY_ARTIFACT_ID_STRATEGY_CONFIG_PARAM, SimpleTopicIdStrategy.class.getName());
65 | props.putIfAbsent(AbstractKafkaSerializer.REGISTRY_GLOBAL_ID_STRATEGY_CONFIG_PARAM, FindBySchemaIdStrategy.class.getName());
66 |
67 | // Create the Kafka producer
68 | Producer