Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import javax.annotation.Nullable;
import lombok.RequiredArgsConstructor;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.header.Header;
import org.apache.kafka.common.header.Headers;
import org.apache.kafka.common.header.internals.RecordHeader;
import org.apache.kafka.common.header.internals.RecordHeaders;

Expand All @@ -20,18 +20,23 @@ public ProducerRecord<byte[], byte[]> create(String topic,
@Nullable String key,
@Nullable String value,
@Nullable Map<String, String> headers) {

Headers kafkaHeaders = createHeaders(headers);

return new ProducerRecord<>(
topic,
partition,
key == null ? null : keySerializer.serialize(key),
value == null ? null : valuesSerializer.serialize(value),
headers == null ? null : createHeaders(headers)
key == null ? null : keySerializer.serialize(key, kafkaHeaders),
value == null ? null : valuesSerializer.serialize(value, kafkaHeaders),
kafkaHeaders
);
}

private Iterable<Header> createHeaders(Map<String, String> clientHeaders) {
private Headers createHeaders(Map<String, String> clientHeaders) {
RecordHeaders headers = new RecordHeaders();
clientHeaders.forEach((k, v) -> headers.add(new RecordHeader(k, v == null ? null : v.getBytes())));
if (clientHeaders != null) {
clientHeaders.forEach((k, v) -> headers.add(new RecordHeader(k, v == null ? null : v.getBytes())));
}
return headers;
}

Expand Down
13 changes: 12 additions & 1 deletion api/src/main/java/io/kafbat/ui/serdes/SerdeInstance.java
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.common.header.Headers;

@Slf4j
@RequiredArgsConstructor
Expand Down Expand Up @@ -80,7 +81,17 @@ public boolean canDeserialize(String topic, Serde.Target type) {
public Serde.Serializer serializer(String topic, Serde.Target type) {
return wrapWithClassloader(() -> {
var serializer = serde.serializer(topic, type);
return input -> wrapWithClassloader(() -> serializer.serialize(input));
return new Serde.Serializer() {
@Override
public byte[] serialize(String input) {
return wrapWithClassloader(() -> serializer.serialize(input));
}

@Override
public byte[] serialize(String input, Headers headers) {
return wrapWithClassloader(() -> serializer.serialize(input, headers));
}
};
});
}

Expand Down
4 changes: 4 additions & 0 deletions serde-api/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,10 @@ tasks.register('javadocJar', Jar) {
from javadoc.destinationDir
}

dependencies {
implementation libs.kafka.clients
}

artifacts {
archives sourceJar, javadocJar
}
Expand Down
5 changes: 5 additions & 0 deletions serde-api/src/main/java/io/kafbat/ui/serde/api/Serde.java
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import java.io.Closeable;
import java.util.Optional;
import org.apache.kafka.common.header.Headers;

/**
* Main interface of serialization/deserialization logic.
Expand Down Expand Up @@ -121,6 +122,10 @@ interface Serializer {
* @return serialized bytes. Can be null if input is null or empty string.
*/
byte[] serialize(String input);

default byte[] serialize(String input, Headers headers) {
return serialize(input);
}
}

/**
Expand Down
Loading