1

Basically I am consuming Messages from spring cloud stream kafka and inserting it into the MongoDB My code works fine if my mongo cluster is up I have 2 problems In case My Mongo Instance is down

  1. auto commit of cloud stream is disabled (autoCommitOffset set to false) then also re-polling is not happening even if it hasn't Acknowledged the message yet
  2. While Checking For Mongo Connection it takes some time and in that time period if it receive two meesages with same ID and after that if i start the instance of mongo it duplicates the messages which in normal case is working fine

Do we have any solution for these?

Here is my code,

interface ResourceInventorySink {
companion object {
    const val INPUT = "resourceInventoryInput"
}
@Input(INPUT)
fun input(): SubscribableChannel

}

    @EnableBinding(ResourceInventorySink::class)
    class InventoryEventListeners {



   val logger = LoggerFactory.getLogger(javaClass)
    @Autowired
    lateinit var  resourceInventoryService : ResourceInventoryService


    @StreamListener(ResourceInventorySink.INPUT, condition = OperationConstants.INSERT)
    fun receiveInsert(event : Message<ResourceInventoryEvent>) {
        logger.info("received Insert message {}", event.payload.toString())
        val success = resourceInventoryService.insert(event.payload)
        success.subscribe({
            logger.info("Data Inserted", event.payload.toString())
            event.headers.get(KafkaHeaders.ACKNOWLEDGMENT, Acknowledgment::class.java)?.acknowledge()
        },{
            if(it !is DataAccessResourceFailureException) {
                logger.error("Exception Occured {} {}", it.message , it.cause.toString())
                event.headers.get(KafkaHeaders.ACKNOWLEDGMENT, Acknowledgment::class.java)?.acknowledge()
            }
            else {
                logger.error("Error Inserting in Mongo DB {}", it.cause)
            }

        })
    }

Here is my service class

@Service
class ResourceInventoryService() {

    val logger = LoggerFactory.getLogger(javaClass)

    @Autowired
    lateinit var  resourceInventoryRepository: ResourceInventoryRepository

    fun insert(newResource: ResourceInventoryEvent) = resourceInventoryRepository
                                                     .findByProductId(newResource.productId)
                                                     .switchIfEmpty(newResource.convertTODocument().toMono())
                                                     .flatMap { resourceInventoryRepository.save(it) }
                                                     .onErrorResume { Mono.error(it) }

this is my application.yml

spring:
  cloud:
    stream:
      default:
        consumer:
          useNativeEncoding: true
      kafka:
        binder:
          brokers:
            - localhost:9092
          consumer-properties:
            key.deserializer : org.apache.kafka.common.serialization.StringDeserializer
            value.deserializer: io.confluent.kafka.serializers.KafkaAvroDeserializer
            schema.registry.url: http://localhost:8081
            enable.auto.commit: false
            specific.avro.reader: true
        bindings:
          resourceInventoryInput:
            consumer:
              autoOffsetCommit: false
      default-binder: kafka
      bindings:
        resourceInventoryInput:
          binder: kafka
          destination: ${application.messaging.topic}
          content-type: application/*+avro
          group: ${application.messaging.group}

EDIT 1. Acknowledgment is null

cchantep
  • 9,118
  • 3
  • 30
  • 41
  • Using reactive (or any async) code from a non-reactive source (kafka bindere) is difficult with Kafka because Kafka only maintains an offset within the log; individual records are not acknowledged. If records are processed in parallel there is no way to easily manage the offset. The record will never be re-fetched unless you re-seek to the offset. – Gary Russell Jun 17 '20 at 15:01

0 Answers0