1

Getting Magic v1 does not support record headers, while producing message,Below my code,

KafkaProducerConfig:

@Configuration

public class KafkaProducerConfig {
@Value(value = "${kafka.bootstrap-servers}")
private String bootstrapAddress;

@Bean
public ProducerFactory<String, Event> producerFactory() {
    Map<String, Object> config = new HashMap<>();
    config.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
    config.put(JsonSerializer.ADD_TYPE_INFO_HEADERS, false);
    config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
    config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);

    return new DefaultKafkaProducerFactory<>(config);
}

@Bean
public KafkaTemplate<String, Event> kafkaTemplate() {
    return new KafkaTemplate<>(producerFactory());
}

}

KafkaConsumerConfig:

@EnableKafka

@Configuration

public class KafkaConsumerConfig {

@Value(value = "${spring.kafka.bootstrap-servers}")
private String bootstrapAddress;

@Value(value = "${spring.kafka.consumer.group-id}")
private String groupId;

@Value(value = "${kafka.consumer.enable.auto.commit}")
private String autoCommit;

@Value(value = "${kafka.consumer.auto.commit.interval.ms}")
private String autoCommitInterval;

@Value(value = "${kafka.consumer.auto.offset.reset}")
private String autoOffsetReset;

@Value(value = "${kafka.consumer.session.timeout.ms}")
private String sessionTimeout;

@Value(value = "${kafka.consumer.concurrency}")
private String concurrency;

@Value(value = "${kafka.consumer.pollTimeout}")
private String pollTimeout;

@Bean
public ConsumerFactory<String, Event> consumerFactory() {
    Map<String, Object> config = new HashMap<>();
    config.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
    config.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
    config.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, autoCommit);
    config.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, autoCommitInterval);
    config.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, autoOffsetReset);
    config.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, sessionTimeout);
    config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
    config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class);
    return new DefaultKafkaConsumerFactory<>(config, new StringDeserializer(),
            new JsonDeserializer<>(Event.class));
}

@Bean
public ConcurrentKafkaListenerContainerFactory<String, Event> kafkaListenerContainerFactory() {
    ConcurrentKafkaListenerContainerFactory<String, Event> factory = new ConcurrentKafkaListenerContainerFactory<>();
    factory.setConsumerFactory(consumerFactory());
    factory.setConcurrency(Integer.parseInt(concurrency));
    factory.getContainerProperties().setPollTimeout(Integer.parseInt(pollTimeout));
    return factory;
}

}

KafkaTopicConfig:

@Configuration

public class KafkaTopicConfig {

@Value(value = "${spring.kafka.bootstrap-servers}")
private String bootstrapAddress;

@Value(value = "${kafka.topicName}")
private String topicName;

@Value(value = "${kafka.topic.partitions}")
private String partitions;

@Value(value = "${kafka.topic.replicationFactor}")
private String replicationFactor;

@Bean
public KafkaAdmin kafkaAdmin() {
    Map<String, Object> configs = new HashMap<>();
    configs.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
    return new KafkaAdmin(configs);
}

@Bean
public NewTopic ClientTopic() {
    return new NewTopic(topicName, Integer.parseInt(partitions), (short) Short.parseShort(replicationFactor));
}

}

producing message: Getting error when producing message

     kafkaTemplate.send(topicName, event);

Consuming message:

@KafkaListener(topics = "someTopicName", groupId = "somegroupId")
    public void consume(Event event) {
       // here Business logic
    }

Gradle dependecies which I am using:

 implementation ('org.springframework.kafka:spring-kafka')
 implementation('com.fasterxml.jackson.core:jackson-databind:2.9.4')

Spring boot version which I am using:

springBootVersion = '2.0.3.RELEASE'

Please let me know what I am doing wrong

I have tried adding in producer factory but didn't work,

config.put(JsonSerializer.ADD_TYPE_INFO_HEADERS, false);
Chandrika Joshi
  • 1,211
  • 11
  • 24
Gopinath
  • 31
  • 1
  • 6
  • What is your Kafka broker version? – Gary Russell Apr 16 '19 at 14:57
  • @GaryRussell - sorry for late reply, below details, the version is kafka_2.12-0.10.2.1.jar where 2.12 is version of scala and 0.10.2.1 is kafka – Gopinath Apr 17 '19 at 02:04
  • The 0.10 broker does not support headers. It is very old. Can you not upgrade? Regardless, as long as you don't add headers, it should work ok. Use a debugger in the serializer to verify the headers are empty. – Gary Russell Apr 17 '19 at 02:41
  • @GaryRussell I was trying to not to add header using 'config.put(JsonSerializer.ADD_TYPE_INFO_HEADERS, false)' this piece of code in producer factory but still headers are getting added. Please let me know how can I avoid adding header – Gopinath Apr 17 '19 at 03:24
  • That should prevent it. As I said, use a debugger to see what's wrong. If you can't figure it out, post a minimal test project someplace. But you really need to get a newer broker. There have been many improvements. – Gary Russell Apr 17 '19 at 03:27
  • @GaryRussell - OK. Let me try to debug the code what's going wrong and if possible I will try to upgrade broker to newer version. – Gopinath Apr 17 '19 at 04:03
  • @GaryRussell I upgraded broker to newer version, issue is fixed. Thank you very much :) – Gopinath Apr 22 '19 at 12:42
  • For anyone else encountering this issue and unable to upgrade broker, it could be due to tracing headers being added. See: https://stackoverflow.com/a/76118062/6346531 – aksh1618 Apr 27 '23 at 08:23

0 Answers0