tags = 3;
78 | }
79 |
80 | message KafkaMeasurable {
81 | KafkaMetricName metricName = 1;
82 | double value = 2;
83 | }
84 |
85 | enum MetricType {
86 | UNKNOWN = 0;
87 | BROKER = 1;
88 | PRODUCER = 2;
89 | CONSUMER = 3;
90 | }
91 |
92 | message MetricsMessage {
93 | MetricType metricType = 1;
94 | int64 timestamp = 2;
95 | // an array of Yammer gauges
96 | repeated YammerGauge yammerGauge = 3;
97 | // an array of Yammer meters
98 | repeated YammerMeter yammerMeter = 4;
99 | // an array of Yammer histograms
100 | repeated YammerHistogram yammerHistogram = 5;
101 | // an array of Yammer timers
102 | repeated YammerTimer yammerTimer = 6;
103 | // an array of Kafka measurables
104 | repeated KafkaMeasurable kafkaMeasurable = 7;
105 | string clusterId = 8;
106 | int32 brokerId = 9;
107 | // Client Id for consumers and producers
108 | string clientId = 10;
109 | // Group Id for consumers
110 | string groupId = 11;
111 | // System-level metrics
112 | SystemMetrics systemMetrics = 12;
113 |
114 | string version = 13;
115 | string commitId = 14;
116 | // an array of the kafka process roles defined by the KafkaConfig.ProcessRolesProp
117 | // The values are sanitized and sorted alphabetically
118 | repeated string processRoles = 15;
119 | }
120 |
121 | message SystemMetrics {
122 | // Volume metrics
123 | repeated VolumeMetrics volumes = 1;
124 | }
125 |
126 | message VolumeMetrics {
127 | string name = 1;
128 | int64 usableBytes = 2;
129 | int64 totalBytes = 3;
130 | repeated LogDir logDirs = 4;
131 | }
132 |
133 | message LogDir {
134 | string path = 1;
135 | }
136 |
--------------------------------------------------------------------------------
/src/test/java/io/kcache/kwack/util/LocalClusterTestHarness.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed under the Apache License, Version 2.0 (the "License");
3 | * you may not use this file except in compliance with the License.
4 | * You may obtain a copy of the License at
5 | *
6 | * http://www.apache.org/licenses/LICENSE-2.0
7 | *
8 | * Unless required by applicable law or agreed to in writing, software
9 | * distributed under the License is distributed on an "AS IS" BASIS,
10 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 | * See the License for the specific language governing permissions and
12 | * limitations under the License.
13 | */
14 |
15 | package io.kcache.kwack.util;
16 |
17 | import io.confluent.kafka.schemaregistry.SchemaProvider;
18 | import io.confluent.kafka.schemaregistry.avro.AvroSchemaProvider;
19 | import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
20 | import io.confluent.kafka.schemaregistry.json.JsonSchemaProvider;
21 | import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaProvider;
22 | import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig;
23 | import io.kcache.kwack.KwackConfig;
24 | import io.kcache.kwack.KwackEngine;
25 | import java.util.Arrays;
26 | import java.util.Collections;
27 | import java.util.List;
28 | import java.util.Properties;
29 | import org.junit.jupiter.api.AfterEach;
30 | import org.junit.jupiter.api.BeforeEach;
31 | import org.slf4j.Logger;
32 | import org.slf4j.LoggerFactory;
33 |
34 | /**
35 | * Test harness to run against a real, local Kafka cluster. This is essentially
36 | * Kafka's ZookeeperTestHarness and KafkaServerTestHarness traits combined.
37 | */
38 | public abstract class LocalClusterTestHarness extends ClusterTestHarness {
39 |
40 | private static final Logger LOG = LoggerFactory.getLogger(LocalClusterTestHarness.class);
41 |
42 | protected static final String MOCK_URL = "mock://kwack";
43 |
44 | protected Properties props;
45 |
46 | protected Integer serverPort;
47 | protected KwackEngine engine;
48 |
49 | public LocalClusterTestHarness() {
50 | super();
51 | }
52 |
53 | public LocalClusterTestHarness(int numBrokers) {
54 | super(numBrokers);
55 | }
56 |
57 | public KwackEngine getEngine() {
58 | return engine;
59 | }
60 |
61 | @Override
62 | protected void setUp() throws Exception {
63 | super.setUp();
64 |
65 | Thread.sleep(1000);
66 |
67 | setUpServer();
68 | List providers = Arrays.asList(
69 | new AvroSchemaProvider(), new JsonSchemaProvider(), new ProtobufSchemaProvider()
70 | );
71 | SchemaRegistryClient schemaRegistry = KwackEngine.createSchemaRegistry(
72 | Collections.singletonList(MOCK_URL), providers, null);
73 | registerInitialSchemas(schemaRegistry);
74 | }
75 |
76 | private void setUpServer() {
77 | try {
78 | props = new Properties();
79 | injectKwackProperties(props);
80 |
81 | KwackConfig config = new KwackConfig(props);
82 |
83 | engine = KwackEngine.getInstance();
84 | engine.configure(config);
85 | } catch (Exception e) {
86 | LOG.error("Server died unexpectedly", e);
87 | System.exit(1);
88 | }
89 | }
90 |
91 | protected void registerInitialSchemas(SchemaRegistryClient schemaRegistry) throws Exception {
92 | }
93 |
94 | protected void injectKwackProperties(Properties props) {
95 | props.put(KwackConfig.KAFKACACHE_BOOTSTRAP_SERVERS_CONFIG, brokerList);
96 | props.put(KwackConfig.KAFKACACHE_TOPIC_REPLICATION_FACTOR_CONFIG, 1);
97 | props.put(KwackConfig.SCHEMA_REGISTRY_URL_CONFIG, MOCK_URL);
98 | props.put(KwackConfig.DB_CONFIG, ":memory:?cache=private");
99 | props.put(AbstractKafkaSchemaSerDeConfig.AUTO_REGISTER_SCHEMAS, "true");
100 | }
101 |
102 | @AfterEach
103 | public void tearDown() throws Exception {
104 | try {
105 | KwackEngine.closeInstance();
106 | } catch (Exception e) {
107 | LOG.warn("Exception during tearDown", e);
108 | }
109 | super.tearDown();
110 | }
111 | }
112 |
--------------------------------------------------------------------------------
/src/test/java/io/kcache/kwack/util/RestApp.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed under the Apache License, Version 2.0 (the "License");
3 | * you may not use this file except in compliance with the License.
4 | * You may obtain a copy of the License at
5 | *
6 | * http://www.apache.org/licenses/LICENSE-2.0
7 | *
8 | * Unless required by applicable law or agreed to in writing, software
9 | * distributed under the License is distributed on an "AS IS" BASIS,
10 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 | * See the License for the specific language governing permissions and
12 | * limitations under the License.
13 | */
14 |
15 | package io.kcache.kwack.util;
16 |
17 |
18 | import io.confluent.kafka.schemaregistry.CompatibilityLevel;
19 | import io.confluent.kafka.schemaregistry.client.rest.RestService;
20 | import io.confluent.kafka.schemaregistry.exceptions.SchemaRegistryException;
21 | import io.confluent.kafka.schemaregistry.rest.SchemaRegistryConfig;
22 | import io.confluent.kafka.schemaregistry.rest.SchemaRegistryRestApplication;
23 | import io.confluent.kafka.schemaregistry.storage.SchemaRegistry;
24 | import io.confluent.kafka.schemaregistry.storage.SchemaRegistryIdentity;
25 | import java.util.Properties;
26 | import org.eclipse.jetty.server.Server;
27 |
28 | public class RestApp {
29 |
30 | public final Properties prop;
31 | public RestService restClient;
32 | public SchemaRegistryRestApplication restApp;
33 | public Server restServer;
34 | public String restConnect;
35 |
36 | public RestApp(int port, String zkConnect, String kafkaTopic) {
37 | this(port, zkConnect, kafkaTopic, CompatibilityLevel.NONE.name, null);
38 | }
39 |
40 | public RestApp(int port, String zkConnect, String kafkaTopic, String compatibilityType, Properties schemaRegistryProps) {
41 | this(port, zkConnect, null, kafkaTopic, compatibilityType, true, schemaRegistryProps);
42 | }
43 |
44 | public RestApp(int port,
45 | String zkConnect, String kafkaTopic,
46 | String compatibilityType, boolean leaderEligibility, Properties schemaRegistryProps) {
47 | this(port, zkConnect, null, kafkaTopic, compatibilityType,
48 | leaderEligibility, schemaRegistryProps);
49 | }
50 |
51 | public RestApp(int port,
52 | String zkConnect, String bootstrapBrokers,
53 | String kafkaTopic, String compatibilityType, boolean leaderEligibility,
54 | Properties schemaRegistryProps) {
55 | prop = new Properties();
56 | if (schemaRegistryProps != null) {
57 | prop.putAll(schemaRegistryProps);
58 | }
59 | prop.setProperty(SchemaRegistryConfig.PORT_CONFIG, ((Integer) port).toString());
60 | if (bootstrapBrokers != null) {
61 | prop.setProperty(SchemaRegistryConfig.KAFKASTORE_BOOTSTRAP_SERVERS_CONFIG, bootstrapBrokers);
62 | }
63 | prop.put(SchemaRegistryConfig.KAFKASTORE_TOPIC_CONFIG, kafkaTopic);
64 | prop.put(SchemaRegistryConfig.SCHEMA_COMPATIBILITY_CONFIG, compatibilityType);
65 | prop.put(SchemaRegistryConfig.LEADER_ELIGIBILITY, leaderEligibility);
66 | }
67 |
68 | public void start() throws Exception {
69 | restApp = new SchemaRegistryRestApplication(prop);
70 | restServer = restApp.createServer();
71 | restServer.start();
72 | restApp.postServerStart();
73 | restConnect = restServer.getURI().toString();
74 | if (restConnect.endsWith("/"))
75 | restConnect = restConnect.substring(0, restConnect.length()-1);
76 | // For testing with Apache Http Client
77 | // restClient = new RestService(restConnect, false, true);
78 | restClient = new RestService(restConnect);
79 | }
80 |
81 | public void stop() throws Exception {
82 | if (restClient != null) {
83 | restClient.close();
84 | restClient = null;
85 | }
86 | if (restServer != null) {
87 | restServer.stop();
88 | restServer.join();
89 | }
90 | }
91 |
92 | /**
93 | * This method must be called before calling {@code RestApp.start()}
94 | * for the additional properties to take affect.
95 | *
96 | * @param props the additional properties to set
97 | */
98 | public void addConfigs(Properties props) {
99 | prop.putAll(props);
100 | }
101 |
102 | public boolean isLeader() {
103 | return restApp.schemaRegistry().isLeader();
104 | }
105 |
106 | public void setLeader(SchemaRegistryIdentity schemaRegistryIdentity)
107 | throws SchemaRegistryException {
108 | restApp.schemaRegistry().setLeader(schemaRegistryIdentity);
109 | }
110 |
111 | public SchemaRegistryIdentity myIdentity() {
112 | return restApp.schemaRegistry().myIdentity();
113 | }
114 |
115 | public SchemaRegistryIdentity leaderIdentity() {
116 | return restApp.schemaRegistry().leaderIdentity();
117 | }
118 |
119 | public SchemaRegistry schemaRegistry() {
120 | return restApp.schemaRegistry();
121 | }
122 | }
123 |
--------------------------------------------------------------------------------
/src/test/java/io/kcache/kwack/ProtobufTest.java:
--------------------------------------------------------------------------------
1 | package io.kcache.kwack;
2 |
3 | import static org.junit.jupiter.api.Assertions.assertEquals;
4 |
5 | import com.google.common.collect.ImmutableList;
6 | import com.google.common.collect.Lists;
7 | import com.google.protobuf.ByteString;
8 | import com.google.protobuf.Timestamp;
9 | import com.google.type.Date;
10 | import com.google.type.TimeOfDay;
11 | import io.confluent.protobuf.type.utils.DecimalUtils;
12 | import io.kcache.kwack.proto.ComplexProto.Data;
13 | import io.kcache.kwack.proto.ComplexProto.Kind;
14 | import io.kcache.kwack.proto.ComplexProto.Complex;
15 | import io.kcache.kwack.proto.SimpleProto.Simple;
16 | import io.reactivex.rxjava3.core.Observable;
17 | import java.io.IOException;
18 | import java.math.BigDecimal;
19 | import java.math.BigInteger;
20 | import java.time.Instant;
21 | import java.time.LocalDate;
22 | import java.time.LocalTime;
23 | import java.util.ArrayList;
24 | import java.util.Base64;
25 | import java.util.HashMap;
26 | import java.util.List;
27 | import java.util.Map;
28 | import java.util.Properties;
29 | import org.apache.kafka.clients.producer.KafkaProducer;
30 | import org.junit.jupiter.api.Test;
31 |
32 | public class ProtobufTest extends AbstractSchemaTest {
33 |
34 | private Simple createSimpleObj() {
35 | return Simple.newBuilder().setId(123).setName("hi").build();
36 | }
37 |
38 | private Complex createComplexObj() {
39 | return Complex.newBuilder()
40 | .setName("test")
41 | .setMystring("testUser")
42 | .setMybytes(ByteString.copyFrom(new byte[]{0, 1, 2}))
43 | .setMyint(1)
44 | .setMyuint(2)
45 | .setMylong(2L)
46 | .setMyulong(3L)
47 | .setMyfloat(3.0f)
48 | .setMydouble(4.0d)
49 | .setMyboolean(true)
50 | .setKind(Kind.ONE)
51 | .setMyoneofint(5)
52 | .addStrArray("hi")
53 | .addStrArray("there")
54 | .addDataArray(Data.newBuilder().setData("hi").build())
55 | .addDataArray(Data.newBuilder().setData("there").build())
56 | .putDataMap("bye", Data.newBuilder().setData("there").build())
57 | .setDecimal(DecimalUtils.fromBigDecimal(new BigDecimal("123.45")))
58 | .setDate(Date.newBuilder().setYear(2024).setMonth(1).setDay(1).build())
59 | .setTime(TimeOfDay.newBuilder().setHours(12).setMinutes(30).setSeconds(30).build())
60 | .setTimestamp(Timestamp.newBuilder().setSeconds(1234567890L).build())
61 | .build();
62 | }
63 |
64 | @Test
65 | public void testSimple() throws IOException {
66 | Simple obj = createSimpleObj();
67 | Properties producerProps = createProducerProps(MOCK_URL);
68 | KafkaProducer producer = createProducer(producerProps);
69 | produce(producer, getTopic(), new Object[] { obj });
70 | producer.close();
71 |
72 | engine.init();
73 | Observable