Skip to content

Commit fdfb815

Browse files
committed
DBZ-5720 [Cloudevents] Switch Quarkus app to Apicurio
1 parent f4dacaa commit fdfb815

7 files changed

Lines changed: 32 additions & 18 deletions

File tree

cloudevents/README.md

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ Examine its contents like so:
5151
docker run --rm --tty \
5252
--network cloudevents-network \
5353
quay.io/debezium/tooling:1.2 \
54-
kafkacat -b kafka:9092 -C -o beginning -q -s value=avro -r http://schema-registry:8080 \
54+
kafkacat -b kafka:9092 -C -o beginning -q \
5555
-t customers2 | jq .
5656
```
5757

@@ -65,7 +65,7 @@ curl -i -X PUT -H "Accept:application/json" -H "Content-Type:application/json"
6565
docker run --rm --tty \
6666
--network cloudevents-network \
6767
quay.io/debezium/tooling:1.2 \
68-
kafkacat -b kafka:9092 -C -o beginning -q -s value=avro -r http://schema-registry:8080 \
68+
kafkacat -b kafka:9092 -C -o beginning -q \
6969
-t dbserver3.inventory.customers | jq .
7070
```
7171

@@ -76,8 +76,8 @@ The same stream processing application writes out that data to the `customers3`
7676
docker run --rm --tty \
7777
--network cloudevents-network \
7878
quay.io/debezium/tooling:1.2 \
79-
kafkacat -b kafka:9092 -C -o beginning -q -s value=avro -r http://schema-registry:8081 \
80-
-t customers2 | jq .
79+
kafkacat -b kafka:9092 -C -o beginning -q \
80+
-t customers3
8181
```
8282

8383
## CloudEvents Binary Mode

cloudevents/avro-data-extractor/pom.xml

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
<version.surefire>3.0.0-M6</version.surefire>
2222

2323
<apache.kafka.version>3.2.0</apache.kafka.version>
24-
<version.quarkus>2.11.0.Final</version.quarkus>
24+
<version.quarkus>2.13.1.Final</version.quarkus>
2525
<version.debezium>2.0.0.Final</version.debezium>
2626
<version.kafka.avro>7.2.0</version.kafka.avro>
2727
</properties>
@@ -98,7 +98,12 @@
9898
<dependency>
9999
<groupId>io.confluent</groupId>
100100
<artifactId>kafka-streams-avro-serde</artifactId>
101-
<version>5.3.2</version>
101+
<version>7.2.1</version>
102+
</dependency>
103+
<dependency>
104+
<groupId>io.apicurio</groupId>
105+
<artifactId>apicurio-registry-utils-serde</artifactId>
106+
<version>1.3.2.Final</version>
102107
</dependency>
103108
<dependency>
104109
<groupId>io.quarkus</groupId>
@@ -125,7 +130,6 @@
125130
<artifactId>kafka-connect-avro-converter</artifactId>
126131
<version>${version.kafka.avro}</version>
127132
</dependency>
128-
129133
</dependencies>
130134
<profiles>
131135
<profile>

cloudevents/avro-data-extractor/src/main/docker/Dockerfile.jvm

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ COPY --chown=185 target/quarkus-app/*.jar /deployments/
8686
COPY --chown=185 target/quarkus-app/app/ /deployments/app/
8787
COPY --chown=185 target/quarkus-app/quarkus/ /deployments/quarkus/
8888

89-
EXPOSE 8080
89+
EXPOSE 8079
9090
USER 185
9191
ENV AB_JOLOKIA_OFF=""
9292
ENV JAVA_OPTS="-Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager"

cloudevents/avro-data-extractor/src/main/java/io/debezium/examples/cloudevents/dataextractor/StreamsPipelineManager.java

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -7,13 +7,13 @@
77

88
import java.nio.ByteBuffer;
99
import java.util.Collections;
10-
import java.util.Map;
1110

1211
import javax.enterprise.context.ApplicationScoped;
1312
import javax.enterprise.inject.Produces;
1413
import javax.inject.Inject;
1514

1615
import org.apache.avro.generic.GenericRecord;
16+
import org.apache.kafka.common.serialization.Deserializer;
1717
import org.apache.kafka.common.serialization.Serde;
1818
import org.apache.kafka.common.serialization.Serdes;
1919
import org.apache.kafka.streams.StreamsBuilder;
@@ -24,10 +24,15 @@
2424
import org.slf4j.Logger;
2525
import org.slf4j.LoggerFactory;
2626

27-
import io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig;
28-
import io.confluent.kafka.streams.serdes.avro.GenericAvroSerde;
27+
import io.apicurio.registry.client.CompatibleClient;
28+
import io.apicurio.registry.client.RegistryService;
29+
import io.apicurio.registry.utils.serde.AvroKafkaDeserializer;
30+
import io.apicurio.registry.utils.serde.AvroKafkaSerializer;
31+
2932
import io.debezium.examples.cloudevents.dataextractor.model.CloudEvent;
3033
import io.debezium.serde.DebeziumSerdes;
34+
35+
3136
/**
3237
* Starts up the KStreams pipeline once the source topics have been created.
3338
*
@@ -73,9 +78,10 @@ Topology createStreamTopology() {
7378
.mapValues(ce -> ce.data)
7479
.to(jsonAvroExtractedTopic, Produced.with(longKeySerde, Serdes.ByteArray()));
7580

76-
Serde<GenericRecord> genericAvroSerde = new GenericAvroSerde();
77-
Map<String, String> config = Collections.singletonMap(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl);
78-
genericAvroSerde.configure(config, false);
81+
82+
RegistryService service = CompatibleClient.createCompatible(schemaRegistryUrl);
83+
Deserializer<GenericRecord> deserializer = new AvroKafkaDeserializer(service);
84+
Serde<GenericRecord> genericAvroSerde = Serdes.serdeFrom(new AvroKafkaSerializer<>(service), deserializer);
7985

8086
builder.stream(avroAvroCustomersTopic, Consumed.with(longKeySerde, genericAvroSerde))
8187
.mapValues(ce -> ((ByteBuffer) ce.get("data")).array())

cloudevents/avro-data-extractor/src/main/java/io/debezium/examples/cloudevents/dataextractor/model/CloudEvent.java

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,12 +14,16 @@ public class CloudEvent {
1414
public String iodebeziumconnector;
1515
public String iodebeziumname;
1616
public String iodebeziumtsms;
17-
public boolean iodebeziumsnapshot;
17+
public String iodebeziumsnapshot;
1818
public String iodebeziumdb;
19+
public String iodebeziumsequence;
1920
public String iodebeziumschema;
2021
public String iodebeziumtable;
2122
public String iodebeziumtxId;
23+
public String iodebeziumtxid;
2224
public String iodebeziumlsn;
2325
public String iodebeziumxmin;
26+
public String iodebeziumtxtotalorder;
27+
public String iodebeziumtxdatacollectionorder;
2428
public byte[] data;
2529
}

cloudevents/avro-data-extractor/src/main/resources/application.properties

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,9 @@ json.avro.extracted.topic=customers2
44
avro.avro.customers.topic=dbserver3.inventory.customers
55
avro.avro.extracted.topic=customers3
66

7-
schema.registry.url=http://schema-registry:8081
7+
schema.registry.url=http://schema-registry:8080/api/
88

9-
quarkus.kafka-streams.bootstrap-servers=localhost:9092
9+
quarkus.kafka-streams.bootstrap-servers=kafka:9092
1010
quarkus.kafka-streams.application-id=cloudevents-data-extractor
1111
quarkus.kafka-streams.topics=${json.avro.customers.topic},${avro.avro.customers.topic}
1212

cloudevents/docker-compose.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ services:
4848
networks:
4949
- my-network
5050
schema-registry:
51-
image: apicurio/apicurio-registry-mem:2.2.5.Final
51+
image: apicurio/apicurio-registry-mem:2.3.1.Final
5252
ports:
5353
- 8080:8080
5454
networks:

0 commit comments

Comments
 (0)