I need more headers fort my speacial needs like logging and so on. And I'm a little bit confused on how to add them. I believe that they should be added somewhere in place where i communicate directly via send() methode. How can I set custom headers to my Kafka message in Spring App where I communicate like this:
final var streamMessage = StreamMessage
.builder()
.payload(Event.builder().eventId(eventId).result(result).build())
.build();
ListenableFuture<SendResult<String, StreamMessage<?>>> future = kafkaTemplate.send(TOPIC, streamMessage);
And my Kafka configuration looks like this:
@Configuration
public class KafkaConfiguration {
private final Map<String, Object> producerProps;
private final Map<String, Object> consumerProps;
@Autowired
public KafkaConfiguration(@Value("${kafka.bootstrap.servers}") String bootstrapServers) {
this.producerProps = producerProps(bootstrapServers);
this.consumerProps = consumerProps(bootstrapServers);
}
private Map<String, Object> producerProps(String bootstrapServers) {
final Map<String, Object> props = new ConcurrentHashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
props.put(ProducerConfig.ACKS_CONFIG, "all");
props.put(ProducerConfig.LINGER_MS_CONFIG, 1);
props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
return props;
}
private Map<String, Object> consumerProps(String bootstrapServers) {
final Map<String, Object> props = new ConcurrentHashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 50);
return props;
}
@Bean
public ConsumerFactory<String, StreamMessage<?>> consumerFactory() {
JsonDeserializer<StreamMessage<?>> deserializer = new JsonDeserializer<>(StreamMessage.class);
deserializer.setRemoveTypeHeaders(false);
deserializer.addTrustedPackages("*");
deserializer.setUseTypeMapperForKey(true);
return new DefaultKafkaConsumerFactory<>(consumerProps,
new StringDeserializer(),
deserializer);
}
@Bean
public ProducerFactory<String, StreamMessage<?>> producerFactory() {
return new DefaultKafkaProducerFactory<>(producerProps);
}
@Bean
public KafkaTemplate<String, StreamMessage<?>> kafkaProducerTemplate() {
return new KafkaTemplate<>(producerFactory());
}
@Bean
public ConcurrentKafkaListenerContainerFactory<String, StreamMessage<?>> listenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, StreamMessage<?>> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
return factory;
}
}