0

While trying to create and delete librdkafka producer every 5 mins the memory usage in windows is keep increasing and it's seems like a memory leak. librdkafka version used 2.1.1. The code is pasted below,

bool SendData(const std::vector<std::string>& Data)
{
    RdKafka::Conf* Conf = RdKafka::Conf::create(RdKafka::Conf::CONF_GLOBAL);
    if (Conf == nullptr)
        throw std::runtime_error("Failed to create the Kafka Configuration.");

    if (Conf->set("bootstrap.servers", m_Broker, ErrorMessage) != RdKafka::Conf::CONF_OK)
        throw std::runtime_error("Failed to set bootstrap server, Error : " + ErrorMessage);
    if (m_Conf->set("dr_cb", &m_Report, ErrorMessage) != RdKafka::Conf::CONF_OK)
            throw std::runtime_error("Failed to set the call back function, Error : " + ErrorMessage);

    if (m_Conf->set("event_cb", &m_EventReport, ErrorMessage) != RdKafka::Conf::CONF_OK)
        throw std::runtime_error("Failed to set the call back function, Error : " + ErrorMessage);

    if (m_Conf->set("security.protocol", "ssl", ErrorMessage) != RdKafka::Conf::CONF_OK)
        throw std::runtime_error("Failed to security protocol as SSL, Error : " + ErrorMessage);

    if (m_Conf->set("enable.ssl.certificate.verification", "false", ErrorMessage) != RdKafka::Conf::CONF_OK)
        throw std::runtime_error("Failed to disable SSL Client verification, Error : " + ErrorMessage);

    int Timeout = (m_SchedulerTime * 60) * 1000; // m_SchedulerTime is 5 mins
    if (m_Conf->set("transaction.timeout.ms", std::to_string(Timeout), ErrorMessage) != RdKafka::Conf::CONF_OK)
        throw std::runtime_error("Failed to set transaction time out, Error : " + ErrorMessage);

    if (m_Conf->set("message.send.max.retries", "0", ErrorMessage) != RdKafka::Conf::CONF_OK)
        throw std::runtime_error("Failed to set retries in kafka, Error : " + ErrorMessage);

    if (m_Conf->set("compression.type", "gzip", ErrorMessage) != RdKafka::Conf::CONF_OK)
        throw std::runtime_error("Failed to set compression type as gzip, Error : " + ErrorMessage);
    
    RdKafka::Producer* Producer = RdKafka::Producer::create(Conf, ErrorMessage);
    if (Producer == nullptr)
        throw std::runtime_error("Failed to create kafka producer, Error : " + ErrorMessage);
    
    delete Conf;

    RdKafka::ErrorCode ErrorCode;
    for (const std::string& data : Data)
    {
        ErrorCode = Producer->produce(m_Topic, RdKafka::Topic::PARTITION_UA, RdKafka::Producer::RK_MSG_COPY, const_cast<char*>(data.c_str()), data.size(), NULL, 0, 0, NULL, NULL);

        if (ErrorCode != RdKafka::ERR_NO_ERROR)
        {
            if (ErrorCode == RdKafka::ERR__QUEUE_FULL)
            {
                delete Producer;
                return false;
            }

            delete producer;
            return false;
        }

        Producer->poll(0);
    }

    ErrorCode = Producer->flush((m_SchedulerTime * 60) * 1000); // m_SchedulerTime is 5 mins
    if (ErrorCode != RdKafka::ERR_NO_ERROR)
    {
        delete producer;
        return false;
    }

    if (Producer->outq_len() > 0)
    {
        delete producer;
        return false;
    }
    
    delete producer;
    RdKafka::wait_destroyed((m_SchedulerTime * 60) * 1000); // m_SchedulerTime is 5 mins
    return true;
}

int main()
{
    std::vector<std::string> Data;
    while (true)
    {
        //adding data to Data vector;
        
        if (!SendData(Data))
            //Log failed send
        
        //sleep for 5 mins
    }
}

I can not create and keep the connection open for a long time, there are more than 5000 + will be connected to the same kafka server, that's the reason I am creating and deleting the object every 5 mins, please help me with any other way to solve or fix this issue

Vishnu
  • 11
  • 5

0 Answers0