0

Background: when consume message failed and publish to DLQ, I would like to add the custom header , the header key is exception and value is the exception

this is my configuration

spring:
  cloud:
    function:
      definition: numberConsumer
    stream:
      bindings:
        numberProducer-out-0:
          destination: first-topic
        numberConsumer-in-0:
          group: group
          destination: first-topic
      kafka:
        bindings:
          numberConsumer-in-0:
            consumer:
              standard-headers:
              enableDlq: true
              dlqName: dlq
              dlq-partitions: 1

this is my consumer

  @Bean
  public Consumer<String> numberConsumer() {
    return message -> log.info("receive message : {}", message);
  }

I try to use this way but not work,not sure what's wrong with my code?

@Configuration
@Slf4j
public class KafkaConfiguration {

  @Bean
  public ListenerContainerCustomizer<AbstractMessageListenerContainer<String, String>> customizer(DefaultErrorHandler errorHandler) {
    return (container, dest, group) -> {
      container.setCommonErrorHandler(errorHandler);
    };
  }

  @Bean
  public DefaultErrorHandler errorHandler(DeadLetterPublishingRecoverer deadLetterPublishingRecoverer) {
    return new DefaultErrorHandler(deadLetterPublishingRecoverer);
  }

  @Bean
  public DeadLetterPublishingRecoverer publisher(KafkaOperations template) {
    DeadLetterPublishingRecoverer recover = new DeadLetterPublishingRecoverer(template);

    recover.setExceptionHeadersCreator((kafkaHeaders, exception, isKey, headerNames) -> {
      var exceptionType = getRootCauseExceptionType(exception);
      kafkaHeaders.add("exception-type", exceptionType.getBytes());
    });
    return recover;
  }

I use the spring-cloud-stream-kafka version 4.0.1

  • I didn't use it before, but after looking into the class docs, I think for adding headers you should use `addHeadersFunction` https://docs.spring.io/spring-kafka/docs/current/api/org/springframework/kafka/listener/DeadLetterPublishingRecoverer.html#addHeadersFunction(java.util.function.BiFunction) – Karim Tawfik Mar 13 '23 at 08:17
  • thanks for you replay, I used it before, but not work, I guess it's provided by kafka.retrytopic, so it's not work in spring cloud stream kafka – Yingjie Guan Mar 13 '23 at 08:58

1 Answers1

0

I fix this issue by add this bean

  @Bean
  ListenerContainerWithDlqAndRetryCustomizer cust(KafkaTemplate<?, ?> template) {
    return new ListenerContainerWithDlqAndRetryCustomizer() {

      @Override
      public void configure(AbstractMessageListenerContainer<?, ?> container, String destinationName,
                            String group,
                            @Nullable BiFunction<ConsumerRecord<?, ?>, Exception, TopicPartition> dlqDestinationResolver,
                            @Nullable BackOff backOff) {

          DeadLetterPublishingRecoverer dlpr = new DeadLetterPublishingRecoverer(template,
              dlqDestinationResolver);
          dlpr.setExceptionHeadersCreator((kafkaHeaders, exception, isKey, headerNames) -> {
            var exceptionType = getRootCauseExceptionType(exception);
            kafkaHeaders.add("exception-type", exceptionType.getBytes());
          });

          container.setCommonErrorHandler(new DefaultErrorHandler(dlpr, backOff));

      }

      @Override
      public boolean retryAndDlqInBinding(String destinationName, String group) {
        return false;
      }

    };
  }