0

I have a single kafka consumer. It consumes a string. Based on the string we then convert it to different avro object and publish them to different topics. We require EOS and the issue we are getting is the producer marked with @Primary works however the one without primary fails with the error below. Is there anyway to accomodate both?

KafkaConsumer

  @Configuration
public class KafkaConsumerConfig {

    @Value("${kafka.server}")
    String server;

    @Value("${kafka.consumer.groupid}")
    String groupid;

    @Autowired
    Tracer tracer;

    @Bean
    public ConsumerFactory<String, String> consumerFactory() {
        Map<String, Object> config = new HashMap<>();
        config.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, server);
        config.put(ConsumerConfig.GROUP_ID_CONFIG, groupid);
        config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        config.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
        config.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, "read_committed");
        config.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, 120000);
        config.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 10000);
        //config.put(ConsumerConfig.HEARTBEAT_INTERVAL_MS_CONFIG, 15000);       

        return new TracingConsumerFactory<>(new DefaultKafkaConsumerFactory<>(config), tracer);
    }

    @Bean
    public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory(
            KafkaAwareTransactionManager<Object, Object> transactionManager) {
        ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<String, String>();
        factory.setConsumerFactory(consumerFactory());
        factory.setAutoStartup(false);
        factory.setConcurrency(2);

        factory.setBatchListener(true);
        factory.getContainerProperties().setAckMode(AckMode.BATCH);
        factory.getContainerProperties().setEosMode(EOSMode.ALPHA);
        factory.getContainerProperties().setTransactionManager(transactionManager);

        return factory;
    }

}

KafkaProducer 1

    @Configuration
public class KafkaProducerConfig {

    @Value("${kafka.server}")
    String server;

    @Autowired
    public Tracer tracer;

    String tranId = "eventsanavro";

    
    @Bean(name = "transactionalProducerFactoryAvro")
    public ProducerFactory<String, TransactionAvroEntity> producerFactoryavro() {
        Map<String, Object> config = new HashMap<>();
        config.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, server);
        config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, AvroSerializer.class.getName());
        config.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, "true");
        config.put(ProducerConfig.ACKS_CONFIG, "all");
        config.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, tranId);
        config.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, "snappy");
        config.put(ProducerConfig.LINGER_MS_CONFIG, "200");
        config.put(ProducerConfig.BATCH_SIZE_CONFIG, Integer.toString(256 * 1024));
        config.put(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG, 120000);
        config.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, 60000);
        config.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, 5);
        config.put(ProducerConfig.BUFFER_MEMORY_CONFIG, Integer.toString(32768 * 1024));
        config.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, "true");
        config.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, tranId);
        return new TracingProducerFactory<>(new DefaultKafkaProducerFactory<>(config), tracer);
    }

    
    @Qualifier("transactionalProducerFactoryAvro")
    @Bean(name = "transactionalKafkaTemplateAvro")
    public KafkaTemplate<String, TransactionAvroEntity> kafkaTemplate() {
        return new KafkaTemplate<>(producerFactoryavro());
    }

    
    @Qualifier("transactionalProducerFactoryAvro")
    @Bean(name = "transactionalKafkaTransactionManagerAvro")
    public KafkaAwareTransactionManager<?, ?> kafkaTransactionManager(
            ProducerFactory<String, TransactionAvroEntity> producerFactory) {
        return new KafkaTransactionManager<>(producerFactory);
    }
}

KafkaProducer 2

    @Configuration
public class KafkaProducerNonAvroConfig {

    @Value("${kafka.server}")
    String server;

    @Autowired
    public Tracer tracer;
    
    String tranId = "eventsannonavro";
    @Primary
    @Bean(name = "transactionalProducerFactoryNonAvro")
    public ProducerFactory<String, String> producerFactoryNonAvro() {
        Map<String, Object> config = new HashMap<>();
        config.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, server);
        config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        config.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, "true");
        config.put(ProducerConfig.ACKS_CONFIG, "all");
        config.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, tranId);
        config.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, "snappy");
        config.put(ProducerConfig.LINGER_MS_CONFIG, "200");
        config.put(ProducerConfig.BATCH_SIZE_CONFIG, Integer.toString(256 * 1024));     
        config.put(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG,120000);
        config.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG,60000);                             
        config.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, 5);        
        config.put(ProducerConfig.BUFFER_MEMORY_CONFIG, Integer.toString(32768* 1024));
        config.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, "true");
        config.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, tranId);
        return new TracingProducerFactory<>(new DefaultKafkaProducerFactory<>(config), tracer);
    }

    @Primary
    @Qualifier("transactionalProducerFactoryNonAvro")
    @Bean(name = "transactionalKafkaTemplateNonAvro")
    public KafkaTemplate<String, String> kafkatemplate() {
        return new KafkaTemplate<>(producerFactoryNonAvro());
    }
    
    @Primary
    @Qualifier("transactionalProducerFactoryNonAvro")
    @Bean(name = "transactionalKafkaTransactionManagerNonAvro")
    public KafkaAwareTransactionManager<?, ?> kafkaTransactionManager(ProducerFactory<String, String> producerFactory) {
        return new KafkaTransactionManager<>(producerFactory);
    }
    
}

ProducerWrapper

@Service
public class KafkaTopicProducer {

    @Autowired
    private KafkaTemplate<String, TransactionAvroEntity> kafkaTemplate;

    @Autowired
    private KafkaTemplate<String, String> kafkaProducerNonAvrokafkaTemplate;

    public void topicProducerAvro(TransactionAvroEntity payload, String topic, Headers headers) {

        ProducerRecord<String, TransactionAvroEntity> producerRecord = new ProducerRecord<String, TransactionAvroEntity>(
                topic, null, UUID.randomUUID().toString(), payload, headers);


        kafkaTemplate.send(producerRecord);

    }

    public void kafkaAvroFlush() {
        kafkaTemplate.flush();
    }

    public void topicProducerNonAvro(String payload, String topic, Headers headers) {

        ProducerRecord<String, String> producerRecord = new ProducerRecord<String, String>(topic, null,
                UUID.randomUUID().toString(), payload, headers);

        kafkaProducerNonAvrokafkaTemplate.send(producerRecord);

    }

    public void kafkaNonAvroFlush() {
        kafkaProducerNonAvrokafkaTemplate.flush();
    }
}

ERROR Caused by: java.lang.IllegalStateException: No transaction is in process; possible solutions: run the template operation within the scope of a template.executeInTransaction() operation, start a transaction with @Transactional before invoking the template method, run in a transaction started by a listener container when consuming a record

Full Stack Trace

2022-05-03 09:35:11,358  INFO  [nerMoz-0-C-1] o.a.kafka.clients.consumer.KafkaConsumer : [Consumer clientId=consumer-ifhEventSanitizer-1, groupId=ifhEventSanitizer] Seeking to offset 0 for partition za.local.file.singleLineGLTransactionEvent.1-0 
 2022-05-03 09:35:11,883  INFO  [nerMoz-0-C-1] o.a.kafka.clients.producer.KafkaProducer : [Producer clientId=producer-eventsanavroifhEventSanitizer.za.local.file.singleLineGLTransactionEvent.1.0, transactionalId=eventsanavroifhEventSanitizer.za.local.file.singleLineGLTransactionEvent.1.0] Aborting incomplete transaction 
 2022-05-03 09:35:11,884  ERROR [nerMoz-0-C-1] essageListenerContainer$ListenerConsumer : Transaction rolled back 
org.springframework.kafka.listener.ListenerExecutionFailedException: Listener method 'public boolean com.fnb.fin.ifhEventSanitizer.kafka.KafkaConsumerMoz.consume(java.util.List<org.apache.kafka.clients.consumer.ConsumerRecord<java.lang.String, java.lang.String>>,org.apache.kafka.clients.consumer.Consumer<?, ?>)' threw exception; nested exception is java.lang.IllegalStateException: No transaction is in process; possible solutions: run the template operation within the scope of a template.executeInTransaction() operation, start a transaction with @Transactional before invoking the template method, run in a transaction started by a listener container when consuming a record; nested exception is java.lang.IllegalStateException: No transaction is in process; possible solutions: run the template operation within the scope of a template.executeInTransaction() operation, start a transaction with @Transactional before invoking the template method, run in a transaction started by a listener container when consuming a record
    at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.decorateException(KafkaMessageListenerContainer.java:2372)
    at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeBatchOnMessage(KafkaMessageListenerContainer.java:2008)
    at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeBatchOnMessageWithRecordsOrList(KafkaMessageListenerContainer.java:1978)
    at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeBatchOnMessage(KafkaMessageListenerContainer.java:1930)
    at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeBatchListener(KafkaMessageListenerContainer.java:1842)
    at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.access$2100(KafkaMessageListenerContainer.java:518)
    at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer$1.doInTransactionWithoutResult(KafkaMessageListenerContainer.java:1749)
    at org.springframework.transaction.support.TransactionCallbackWithoutResult.doInTransaction(TransactionCallbackWithoutResult.java:36)
    at org.springframework.transaction.support.TransactionTemplate.execute(TransactionTemplate.java:140)
    at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeBatchListenerInTx(KafkaMessageListenerContainer.java:1740)
    at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeBatchListener(KafkaMessageListenerContainer.java:1722)
    at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeListener(KafkaMessageListenerContainer.java:1704)
    at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeIfHaveRecords(KafkaMessageListenerContainer.java:1274)
    at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.pollAndInvoke(KafkaMessageListenerContainer.java:1266)
    at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:1161)
    at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
    at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
    at java.base/java.lang.Thread.run(Thread.java:832)
    Suppressed: org.springframework.kafka.listener.ListenerExecutionFailedException: Restored Stack Trace
        at org.springframework.kafka.listener.adapter.MessagingMessageListenerAdapter.invokeHandler(MessagingMessageListenerAdapter.java:363)
        at org.springframework.kafka.listener.adapter.BatchMessagingMessageListenerAdapter.invoke(BatchMessagingMessageListenerAdapter.java:180)
        at org.springframework.kafka.listener.adapter.BatchMessagingMessageListenerAdapter.onMessage(BatchMessagingMessageListenerAdapter.java:172)
        at org.springframework.kafka.listener.adapter.BatchMessagingMessageListenerAdapter.onMessage(BatchMessagingMessageListenerAdapter.java:61)
        at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeBatchOnMessage(KafkaMessageListenerContainer.java:1988)
Caused by: java.lang.IllegalStateException: No transaction is in process; possible solutions: run the template operation within the scope of a template.executeInTransaction() operation, start a transaction with @Transactional before invoking the template method, run in a transaction started by a listener container when consuming a record
    at org.springframework.util.Assert.state(Assert.java:76)
    at org.springframework.kafka.core.KafkaTemplate.getTheProducer(KafkaTemplate.java:657)
    at org.springframework.kafka.core.KafkaTemplate.doSend(KafkaTemplate.java:569)
    at org.springframework.kafka.core.KafkaTemplate.send(KafkaTemplate.java:406)
    at com.fnb.fin.ifhEventSanitizer.kafka.KafkaTopicProducer.topicProducerNonAvro(KafkaTopicProducer.java:44)
    at com.fnb.fin.ifhEventSanitizer.kafka.KafkaConsumerMoz.consume(KafkaConsumerMoz.java:108)
    at jdk.internal.reflect.GeneratedMethodAccessor111.invoke(Unknown Source)
    at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.base/java.lang.reflect.Method.invoke(Method.java:564)
    at org.springframework.messaging.handler.invocation.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:171)
    at org.springframework.messaging.handler.invocation.InvocableHandlerMethod.invoke(InvocableHandlerMethod.java:120)
    at org.springframework.kafka.listener.adapter.HandlerAdapter.invoke(HandlerAdapter.java:56)
    at org.springframework.kafka.listener.adapter.MessagingMessageListenerAdapter.invokeHandler(MessagingMessageListenerAdapter.java:347)
    at org.springframework.kafka.listener.adapter.BatchMessagingMessageListenerAdapter.invoke(BatchMessagingMessageListenerAdapter.java:180)
    at org.springframework.kafka.listener.adapter.BatchMessagingMessageListenerAdapter.onMessage(BatchMessagingMessageListenerAdapter.java:172)
    at org.springframework.kafka.listener.adapter.BatchMessagingMessageListenerAdapter.onMessage(BatchMessagingMessageListenerAdapter.java:61)
    at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeBatchOnMessage(KafkaMessageListenerContainer.java:1988)
    ... 16 common frames omitted
Aleshan
  • 31
  • 6
  • You need to show the full stack trace; it appears you are trying to use a transactional template when there is no transaction in process; e.g. for a producer-only transaction; in which case, you need to use one of the described techniques to start the transaction. – Gary Russell May 02 '22 at 14:00
  • @GaryRussell i have added in the full stack trace. We are trying to achieve exactly once so we create and wire in a kafkatransaction into the consumer and producer. However since we have a producer for avro data and another for non avro the one we mark as primary is the only one that transactions occur in. The other one throws the above stack trace. Is there anyway to link more than one producer to a consumer or create a generic producer that can serialize in avro and strings? – Aleshan May 03 '22 at 10:20

1 Answers1

1

The KafkaTransactionManager can only start a transaction in a producer from one factory; even if it could start two, you would lose EOS guarantees since they would be different transactions so, if you perform sends to both, they won't be in the same transaction.

To solve this problem, you should use one producer factory with a DelegatingByTypeSerializer or DelegatingByTopicSerializer.

e.g.

public ProducerFactory<String, Object> producerFactory() {
    ...
    Map<Class<?>, Serializer> delegates = new LinkedHashMap<>(); // retains the order when iterating
    delegates.put(String.class, new StringSerializer());
    delegates.put(Object.class, new JsonSerializer<>());
    DelegatingByTypeSerializer dbts = new DelegatingByTypeSerializer(delegates, true);
    return new TracingProducerFactory<>(
        new DefaultKafkaProducerFactory<>(config, new StringSerializer(), dbts), tracer);
}
Gary Russell
  • 166,535
  • 14
  • 146
  • 179
  • Thank you so much this worked perfectly! You sir are a legend! I was wondering if you could assist me with another issue i have open at the link below https://stackoverflow.com/questions/72027226/kafka-eos-retry-flag – Aleshan May 03 '22 at 15:11