Skip to content

Commit

Permalink
feat: Added input data conversion to JSON. Resolves #14
Browse files Browse the repository at this point in the history
  • Loading branch information
Julien Ruaux committed Jun 29, 2022
1 parent 547cbf9 commit c610d4e
Show file tree
Hide file tree
Showing 2 changed files with 24 additions and 1 deletion.
6 changes: 6 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,12 @@
<version>${kafka.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>connect-json</artifactId>
<version>${kafka.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.redis</groupId>
<artifactId>lettucemod</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
Expand All @@ -28,11 +29,14 @@
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.config.ConfigException;
import org.apache.kafka.connect.data.Field;
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.data.Struct;
import org.apache.kafka.connect.errors.ConnectException;
import org.apache.kafka.connect.errors.DataException;
import org.apache.kafka.connect.json.JsonConverter;
import org.apache.kafka.connect.sink.SinkRecord;
import org.apache.kafka.connect.sink.SinkTask;
import org.apache.kafka.connect.storage.Converter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.item.ExecutionContext;
Expand Down Expand Up @@ -73,6 +77,7 @@ public class RedisEnterpriseSinkTask extends SinkTask {
private RedisEnterpriseSinkConfig config;
private RedisItemWriter<byte[], byte[], SinkRecord> writer;
private StatefulRedisConnection<String, String> connection;
private Converter jsonConverter;

@Override
public String version() {
Expand All @@ -84,6 +89,8 @@ public void start(final Map<String, String> props) {
config = new RedisEnterpriseSinkConfig(props);
client = RedisModulesClient.create(config.getRedisURI());
connection = client.connect();
jsonConverter = new JsonConverter();
jsonConverter.configure(Collections.singletonMap("schemas.enable", "false"), false);
writer = writer(client).build();
writer.open(new ExecutionContext());
final java.util.Set<TopicPartition> assignment = this.context.assignment();
Expand Down Expand Up @@ -139,7 +146,7 @@ private RedisOperation<byte[], byte[], SinkRecord> operation() {
return Hset.<byte[], byte[], SinkRecord>key(this::key).map(this::map).del(this::isDelete).build();
case JSON:
return JsonSet.<byte[], byte[], SinkRecord>key(this::key).path(".".getBytes(config.getCharset()))
.value(this::value).del(this::isDelete).build();
.value(this::jsonValue).del(this::isDelete).build();
case STRING:
return Set.<byte[], byte[], SinkRecord>key(this::key).value(this::value).del(this::isDelete).build();
case STREAM:
Expand All @@ -166,6 +173,16 @@ private byte[] value(SinkRecord record) {
return bytes("value", record.value());
}

private byte[] jsonValue(SinkRecord record) {
if (record.value() == null) {
return null;
}
Schema schema = record.valueSchema();
Object value = record.value();

return jsonConverter.fromConnectData(record.topic(), schema, value);
}

private Long longMember(SinkRecord record) {
Object key = record.key();
if (key == null) {
Expand Down

0 comments on commit c610d4e

Please sign in to comment.