Commit 515dc983d3deff4e320eebd6219b6d378d04d24c
Committed by
GitHub
1 parent
72ef0ede
Improvements/kafka rule node (#2505)
* added metadata key-values as kafka headers * added default charset to configuration * fix typo
Showing
2 changed files
with
37 additions
and
12 deletions
... | ... | @@ -16,16 +16,21 @@ |
16 | 16 | package org.thingsboard.rule.engine.kafka; |
17 | 17 | |
18 | 18 | import lombok.extern.slf4j.Slf4j; |
19 | +import org.apache.commons.lang3.BooleanUtils; | |
19 | 20 | import org.apache.kafka.clients.producer.*; |
21 | +import org.apache.kafka.common.header.Headers; | |
22 | +import org.apache.kafka.common.header.internals.RecordHeader; | |
23 | +import org.apache.kafka.common.header.internals.RecordHeaders; | |
20 | 24 | import org.thingsboard.rule.engine.api.util.TbNodeUtils; |
21 | 25 | import org.thingsboard.rule.engine.api.*; |
22 | 26 | import org.thingsboard.server.common.data.plugin.ComponentType; |
23 | 27 | import org.thingsboard.server.common.msg.TbMsg; |
24 | 28 | import org.thingsboard.server.common.msg.TbMsgMetaData; |
25 | 29 | |
30 | +import java.nio.charset.Charset; | |
31 | +import java.nio.charset.StandardCharsets; | |
26 | 32 | import java.util.Properties; |
27 | 33 | import java.util.concurrent.ExecutionException; |
28 | -import java.util.concurrent.atomic.AtomicInteger; | |
29 | 34 | |
30 | 35 | @Slf4j |
31 | 36 | @RuleNode( |
... | ... | @@ -46,8 +51,11 @@ public class TbKafkaNode implements TbNode { |
46 | 51 | private static final String PARTITION = "partition"; |
47 | 52 | private static final String TOPIC = "topic"; |
48 | 53 | private static final String ERROR = "error"; |
54 | + public static final String TB_MSG_MD_PREFIX = "tb_msg_md_"; | |
49 | 55 | |
50 | 56 | private TbKafkaNodeConfiguration config; |
57 | + private boolean addMetadataKeyValuesAsKafkaHeaders; | |
58 | + private Charset toBytesCharset; | |
51 | 59 | |
52 | 60 | private Producer<?, String> producer; |
53 | 61 | |
... | ... | @@ -66,8 +74,10 @@ public class TbKafkaNode implements TbNode { |
66 | 74 | properties.put(ProducerConfig.BUFFER_MEMORY_CONFIG, config.getBufferMemory()); |
67 | 75 | if (config.getOtherProperties() != null) { |
68 | 76 | config.getOtherProperties() |
69 | - .forEach((k,v) -> properties.put(k, v)); | |
77 | + .forEach(properties::put); | |
70 | 78 | } |
79 | + addMetadataKeyValuesAsKafkaHeaders = BooleanUtils.toBooleanDefaultIfNull(config.isAddMetadataKeyValuesAsKafkaHeaders(), false); | |
80 | + toBytesCharset = config.getKafkaHeadersCharset() != null ? Charset.forName(config.getKafkaHeadersCharset()) : StandardCharsets.UTF_8; | |
71 | 81 | try { |
72 | 82 | this.producer = new KafkaProducer<>(properties); |
73 | 83 | } catch (Exception e) { |
... | ... | @@ -79,16 +89,16 @@ public class TbKafkaNode implements TbNode { |
79 | 89 | public void onMsg(TbContext ctx, TbMsg msg) throws ExecutionException, InterruptedException, TbNodeException { |
80 | 90 | String topic = TbNodeUtils.processPattern(config.getTopicPattern(), msg.getMetaData()); |
81 | 91 | try { |
82 | - producer.send(new ProducerRecord<>(topic, msg.getData()), | |
83 | - (metadata, e) -> { | |
84 | - if (metadata != null) { | |
85 | - TbMsg next = processResponse(ctx, msg, metadata); | |
86 | - ctx.tellNext(next, TbRelationTypes.SUCCESS); | |
87 | - } else { | |
88 | - TbMsg next = processException(ctx, msg, e); | |
89 | - ctx.tellFailure(next, e); | |
90 | - } | |
91 | - }); | |
92 | + if (!addMetadataKeyValuesAsKafkaHeaders) { | |
93 | + producer.send(new ProducerRecord<>(topic, msg.getData()), | |
94 | + (metadata, e) -> processRecord(ctx, msg, metadata, e)); | |
95 | + } else { | |
96 | + Headers headers = new RecordHeaders(); | |
97 | + msg.getMetaData().values().forEach((key, value) -> headers.add(new RecordHeader(TB_MSG_MD_PREFIX + key, value.getBytes(toBytesCharset)))); | |
98 | + producer.send(new ProducerRecord<>(topic, null, null, null, msg.getData(), headers), | |
99 | + (metadata, e) -> processRecord(ctx, msg, metadata, e)); | |
100 | + } | |
101 | + | |
92 | 102 | } catch (Exception e) { |
93 | 103 | ctx.tellFailure(msg, e); |
94 | 104 | } |
... | ... | @@ -105,6 +115,16 @@ public class TbKafkaNode implements TbNode { |
105 | 115 | } |
106 | 116 | } |
107 | 117 | |
118 | + private void processRecord(TbContext ctx, TbMsg msg, RecordMetadata metadata, Exception e) { | |
119 | + if (metadata != null) { | |
120 | + TbMsg next = processResponse(ctx, msg, metadata); | |
121 | + ctx.tellNext(next, TbRelationTypes.SUCCESS); | |
122 | + } else { | |
123 | + TbMsg next = processException(ctx, msg, e); | |
124 | + ctx.tellFailure(next, e); | |
125 | + } | |
126 | + } | |
127 | + | |
108 | 128 | private TbMsg processResponse(TbContext ctx, TbMsg origMsg, RecordMetadata recordMetadata) { |
109 | 129 | TbMsgMetaData metaData = origMsg.getMetaData().copy(); |
110 | 130 | metaData.putValue(OFFSET, String.valueOf(recordMetadata.offset())); | ... | ... |
... | ... | @@ -36,6 +36,9 @@ public class TbKafkaNodeConfiguration implements NodeConfiguration<TbKafkaNodeCo |
36 | 36 | private String valueSerializer; |
37 | 37 | private Map<String, String> otherProperties; |
38 | 38 | |
39 | + private boolean addMetadataKeyValuesAsKafkaHeaders; | |
40 | + private String kafkaHeadersCharset; | |
41 | + | |
39 | 42 | @Override |
40 | 43 | public TbKafkaNodeConfiguration defaultConfiguration() { |
41 | 44 | TbKafkaNodeConfiguration configuration = new TbKafkaNodeConfiguration(); |
... | ... | @@ -49,6 +52,8 @@ public class TbKafkaNodeConfiguration implements NodeConfiguration<TbKafkaNodeCo |
49 | 52 | configuration.setKeySerializer(StringSerializer.class.getName()); |
50 | 53 | configuration.setValueSerializer(StringSerializer.class.getName()); |
51 | 54 | configuration.setOtherProperties(Collections.emptyMap()); |
55 | + configuration.setAddMetadataKeyValuesAsKafkaHeaders(false); | |
56 | + configuration.setKafkaHeadersCharset("UTF-8"); | |
52 | 57 | return configuration; |
53 | 58 | } |
54 | 59 | } | ... | ... |