We are working on a requirement where the Spring-kafka consumer is committing to a topic and then should manually commit the offset.
One of the scenario that we are thinking is what happens when the consumer fails between the time that it submits the message to the topic and before the offset is manually committed. In such a scenario the application will reprocess the message and submit one more time resulting in duplicate messages in the topic.
Is there any way that both the activities can be part of a TransactionManager, so that it success/fails all ?
Configuration file
@Bean
public ProducerFactory producerFactory() {
Map<String, Object> config = new HashMap<>();
config.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
DefaultKafkaProducerFactory<String, User> defaultKafkaProducerFactory = new DefaultKafkaProducerFactory<>(config);
defaultKafkaProducerFactory.setTransactionIdPrefix("trans");
//defaultKafkaProducerFactory.transactionCapable();
return defaultKafkaProducerFactory;
//return new DefaultKafkaProducerFactory<>(config);
}
@Bean
public KafkaTemplate<String, User> kafkaTemplate() {
return new KafkaTemplate<>(producerFactory());
}
@Bean
public KafkaTransactionManager<String, User> transactionManager() {
KafkaTransactionManager transactionManager = new KafkaTransactionManager(producerFactory());
transactionManager.setTransactionSynchronization(AbstractPlatformTransactionManager.SYNCHRONIZATION_ALWAYS);
transactionManager.setNestedTransactionAllowed(true);
return transactionManager;
}
/**
* New configuration for the consumerFactory added
*
* @return
*/
@Bean
public ConsumerFactory<String, User> consumerFactory() {
Map<String, Object> config = new HashMap<>();
config.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
config.put(ConsumerConfig.GROUP_ID_CONFIG, "firstTopic-group");
config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class);
config.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
return new DefaultKafkaConsumerFactory<>(config, new StringDeserializer(), new JsonDeserializer<User>(User.class));
}
@Bean
public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, User>> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, User> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL_IMMEDIATE);
factory.getContainerProperties().setTransactionManager(transactionManager());
factory.setRetryTemplate(kafkaRetry());
factory.setStatefulRetry(true);
factory.setErrorHandler(getErrorHandler());
factory.setRecoveryCallback(retryContext -> {
//implement the logic to decide the action after all retries are over.
ConsumerRecord consumerRecord = (ConsumerRecord) retryContext.getAttribute("record");
System.out.println("Recovery is called for message " + consumerRecord.value());
return Optional.empty();
});
return factory;
}
public RetryTemplate kafkaRetry() {
RetryTemplate retryTemplate = new RetryTemplate();
ExponentialBackOffPolicy backOffPolicy = new ExponentialBackOffPolicy();
backOffPolicy.setInitialInterval(60 * 1000);
backOffPolicy.setMultiplier(3);
backOffPolicy.setMaxInterval(4 * 60 * 1000); // original 25 * 60 * 1000
retryTemplate.setBackOffPolicy(backOffPolicy);
SimpleRetryPolicy retryPolicy = new SimpleRetryPolicy();
retryPolicy.setMaxAttempts(4);
retryTemplate.setRetryPolicy(retryPolicy);
return retryTemplate;
}
public SeekToCurrentErrorHandler getErrorHandler() {
SeekToCurrentErrorHandler errorHandler = new SeekToCurrentErrorHandler() {
@Override
public void handle(Exception thrownException,
List<ConsumerRecord<?, ?>> records,
Consumer<?, ?> consumer,
MessageListenerContainer container) {
//super.handle(thrownException, records, consumer, container);
if (!records.isEmpty()) {
ConsumerRecord<?, ?> record = records.get(0);
String topic = record.topic();
long offset = record.offset();
int partition = record.partition();
if (thrownException instanceof DeserializationException) {
System.out.println("------1111------deserialization exception ");
} else {
System.out.println("------xxxx------Record is empty ");
consumer.seek(new TopicPartition(topic, partition), offset);
}
} else {
System.out.println("------4444------Record is empty ");
}
}
};
return errorHandler;
}
Kafka listener
@Autowired
KafkaTemplate<String, User> kafkaTemplate;
@KafkaListener(topics = "firstTopic", groupId = "firstTopic-group")
@Transactional
public void onCustomerMessage(User user, Acknowledgment acknowledgment) throws Exception {
/*System.out.println("get the message " +user.getFirstName());
if (user.getFirstName().equalsIgnoreCase("Test")) {
throw new RuntimeException("Incompatible message " + user.getFirstName());
}
*/
//postToSecondTopic(acknowledgment, user);
System.out.println("NOT In transaction");
kafkaTemplate.executeInTransaction(t -> {
System.out.println("---------------------->");
int number = (int) (Math.random() * 10);
t.send("secondtopic", user);
if (number % 5 == 0)
throw new RuntimeException("fail");
acknowledgment.acknowledge();
return true;
});
System.out.println("*** exit ***");
}
Error in the logs
2020-05-28 15:52:53.597 ERROR 112469 --- [nio-8080-exec-1] o.a.c.c.C.[.[.[/].[dispatcherServlet] : Servlet.service() for servlet [dispatcherServlet] in context with path [] threw exception [Request processing failed; nested exception is java.lang.IllegalStateException: No transaction is in process; possible solutions: run the template operation within the scope of a template.executeInTransaction() operation, start a transaction with @Transactional before invoking the template method, run in a transaction started by a listener container when consuming a record] with root cause
java.lang.IllegalStateException: No transaction is in process; possible solutions: run the template operation within the scope of a template.executeInTransaction() operation, start a transaction with @Transactional before invoking the template method, run in a transaction started by a listener container when consuming a record at org.springframework.util.Assert.state(Assert.java:73) ~[spring-core-5.2.5.RELEASE.jar:5.2.5.RELEASE] at org.springframework.kafka.core.KafkaTemplate.doSend(KafkaTemplate.java:394) ~[spring-kafka-2.3.7.RELEASE.jar:2.3.7.RELEASE] at org.springframework.kafka.core.KafkaTemplate.send(KafkaTemplate.java:216) ~[spring-kafka-2.3.7.RELEASE.jar:2.3.7.RELEASE] at com.barade.sandesh.springKafka.UserResource.postComments(UserResource.java:26) ~[classes/:na] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.8.0_252] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[na:1.8.0_252] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_252] at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_252] at org.springframework.web.method.support.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:190) ~[spring-web-5.2.5.RELEASE.jar:5.2.5.RELEASE] at org.springframework.web.method.support.InvocableHandlerMethod.invokeForRequest(InvocableHandlerMethod.java:138) ~[spring-web-5.2.5.RELEASE.jar:5.2.5.RELEASE] at org.springframework.web.servlet.mvc.method.annotation.ServletInvocableHandlerMethod.invokeAndHandle(ServletInvocableHandlerMethod.java:105) ~[spring-webmvc-5.2.5.RELEASE.jar:5.2.5.RELEASE] at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.invokeHandlerMethod(RequestMappingHandlerAdapter.java:879) ~[spring-webmvc-5.2.5.RELEASE.jar:5.2.5.RELEASE] at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.handleInternal(RequestMappingHandlerAdapter.java:793) ~[spring-webmvc-5.2.5.RELEASE.jar:5.2.5.RELEASE] at org.springframework.web.servlet.mvc.method.AbstractHandlerMethodAdapter.handle(AbstractHandlerMethodAdapter.java:87) ~[spring-webmvc-5.2.5.RELEASE.jar:5.2.5.RELEASE] at org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:1040) ~[spring-webmvc-5.2.5.RELEASE.jar:5.2.5.RELEASE] at org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:943) ~[spring-webmvc-5.2.5.RELEASE.jar:5.2.5.RELEASE] at org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:1006) ~[spring-webmvc-5.2.5.RELEASE.jar:5.2.5.RELEASE] at org.springframework.web.servlet.FrameworkServlet.doPost(FrameworkServlet.java:909) ~[spring-webmvc-5.2.5.RELEASE.jar:5.2.5.RELEASE]
UserResource
@RestController
@RequestMapping("accounts")
public class UserResource {
@Autowired
KafkaTemplate <String, User> kafkaTemplate;
@PostMapping("/users")
public String postComments(@RequestParam ("firstName") final String firstName,
@RequestParam ("lastName") final String lastName,
@RequestParam ("userName") final String userName ) {
List<String> accountTypes = new ArrayList<String>();
kafkaTemplate.send("firstTopic", new User(firstName,lastName,userName));
return "Message sent to the Error queue";
}
}