Improvements/kafka rule node (#2505)

* added metadata key-values as kafka headers

* added default charset to configuration

* fix typo
This commit is contained in:
ShvaykaD 2020-03-11 18:07:42 +02:00 committed by GitHub
parent 72ef0ede74
commit 515dc983d3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 37 additions and 12 deletions

View File

@ -16,16 +16,21 @@
package org.thingsboard.rule.engine.kafka; package org.thingsboard.rule.engine.kafka;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.BooleanUtils;
import org.apache.kafka.clients.producer.*; import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.header.Headers;
import org.apache.kafka.common.header.internals.RecordHeader;
import org.apache.kafka.common.header.internals.RecordHeaders;
import org.thingsboard.rule.engine.api.util.TbNodeUtils; import org.thingsboard.rule.engine.api.util.TbNodeUtils;
import org.thingsboard.rule.engine.api.*; import org.thingsboard.rule.engine.api.*;
import org.thingsboard.server.common.data.plugin.ComponentType; import org.thingsboard.server.common.data.plugin.ComponentType;
import org.thingsboard.server.common.msg.TbMsg; import org.thingsboard.server.common.msg.TbMsg;
import org.thingsboard.server.common.msg.TbMsgMetaData; import org.thingsboard.server.common.msg.TbMsgMetaData;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.Properties; import java.util.Properties;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicInteger;
@Slf4j @Slf4j
@RuleNode( @RuleNode(
@ -46,8 +51,11 @@ public class TbKafkaNode implements TbNode {
private static final String PARTITION = "partition"; private static final String PARTITION = "partition";
private static final String TOPIC = "topic"; private static final String TOPIC = "topic";
private static final String ERROR = "error"; private static final String ERROR = "error";
public static final String TB_MSG_MD_PREFIX = "tb_msg_md_";
private TbKafkaNodeConfiguration config; private TbKafkaNodeConfiguration config;
private boolean addMetadataKeyValuesAsKafkaHeaders;
private Charset toBytesCharset;
private Producer<?, String> producer; private Producer<?, String> producer;
@ -66,8 +74,10 @@ public class TbKafkaNode implements TbNode {
properties.put(ProducerConfig.BUFFER_MEMORY_CONFIG, config.getBufferMemory()); properties.put(ProducerConfig.BUFFER_MEMORY_CONFIG, config.getBufferMemory());
if (config.getOtherProperties() != null) { if (config.getOtherProperties() != null) {
config.getOtherProperties() config.getOtherProperties()
.forEach((k,v) -> properties.put(k, v)); .forEach(properties::put);
} }
addMetadataKeyValuesAsKafkaHeaders = BooleanUtils.toBooleanDefaultIfNull(config.isAddMetadataKeyValuesAsKafkaHeaders(), false);
toBytesCharset = config.getKafkaHeadersCharset() != null ? Charset.forName(config.getKafkaHeadersCharset()) : StandardCharsets.UTF_8;
try { try {
this.producer = new KafkaProducer<>(properties); this.producer = new KafkaProducer<>(properties);
} catch (Exception e) { } catch (Exception e) {
@ -79,16 +89,16 @@ public class TbKafkaNode implements TbNode {
public void onMsg(TbContext ctx, TbMsg msg) throws ExecutionException, InterruptedException, TbNodeException { public void onMsg(TbContext ctx, TbMsg msg) throws ExecutionException, InterruptedException, TbNodeException {
String topic = TbNodeUtils.processPattern(config.getTopicPattern(), msg.getMetaData()); String topic = TbNodeUtils.processPattern(config.getTopicPattern(), msg.getMetaData());
try { try {
producer.send(new ProducerRecord<>(topic, msg.getData()), if (!addMetadataKeyValuesAsKafkaHeaders) {
(metadata, e) -> { producer.send(new ProducerRecord<>(topic, msg.getData()),
if (metadata != null) { (metadata, e) -> processRecord(ctx, msg, metadata, e));
TbMsg next = processResponse(ctx, msg, metadata); } else {
ctx.tellNext(next, TbRelationTypes.SUCCESS); Headers headers = new RecordHeaders();
} else { msg.getMetaData().values().forEach((key, value) -> headers.add(new RecordHeader(TB_MSG_MD_PREFIX + key, value.getBytes(toBytesCharset))));
TbMsg next = processException(ctx, msg, e); producer.send(new ProducerRecord<>(topic, null, null, null, msg.getData(), headers),
ctx.tellFailure(next, e); (metadata, e) -> processRecord(ctx, msg, metadata, e));
} }
});
} catch (Exception e) { } catch (Exception e) {
ctx.tellFailure(msg, e); ctx.tellFailure(msg, e);
} }
@ -105,6 +115,16 @@ public class TbKafkaNode implements TbNode {
} }
} }
private void processRecord(TbContext ctx, TbMsg msg, RecordMetadata metadata, Exception e) {
if (metadata != null) {
TbMsg next = processResponse(ctx, msg, metadata);
ctx.tellNext(next, TbRelationTypes.SUCCESS);
} else {
TbMsg next = processException(ctx, msg, e);
ctx.tellFailure(next, e);
}
}
private TbMsg processResponse(TbContext ctx, TbMsg origMsg, RecordMetadata recordMetadata) { private TbMsg processResponse(TbContext ctx, TbMsg origMsg, RecordMetadata recordMetadata) {
TbMsgMetaData metaData = origMsg.getMetaData().copy(); TbMsgMetaData metaData = origMsg.getMetaData().copy();
metaData.putValue(OFFSET, String.valueOf(recordMetadata.offset())); metaData.putValue(OFFSET, String.valueOf(recordMetadata.offset()));

View File

@ -36,6 +36,9 @@ public class TbKafkaNodeConfiguration implements NodeConfiguration<TbKafkaNodeCo
private String valueSerializer; private String valueSerializer;
private Map<String, String> otherProperties; private Map<String, String> otherProperties;
private boolean addMetadataKeyValuesAsKafkaHeaders;
private String kafkaHeadersCharset;
@Override @Override
public TbKafkaNodeConfiguration defaultConfiguration() { public TbKafkaNodeConfiguration defaultConfiguration() {
TbKafkaNodeConfiguration configuration = new TbKafkaNodeConfiguration(); TbKafkaNodeConfiguration configuration = new TbKafkaNodeConfiguration();
@ -49,6 +52,8 @@ public class TbKafkaNodeConfiguration implements NodeConfiguration<TbKafkaNodeCo
configuration.setKeySerializer(StringSerializer.class.getName()); configuration.setKeySerializer(StringSerializer.class.getName());
configuration.setValueSerializer(StringSerializer.class.getName()); configuration.setValueSerializer(StringSerializer.class.getName());
configuration.setOtherProperties(Collections.emptyMap()); configuration.setOtherProperties(Collections.emptyMap());
configuration.setAddMetadataKeyValuesAsKafkaHeaders(false);
configuration.setKafkaHeadersCharset("UTF-8");
return configuration; return configuration;
} }
} }