0

I have configured MongoDB source connector and sending data to Apache kafka topic and data is coming fine in real time.

Now I have requirement to fetch specific time period data from MongoDB and push that data to kafka topic but query is not working in query parameter as well as pipeline parameter

Source connector configuration:

{
 "name": "ApplicationName",
 "config": {
    "connector.class": "com.mongodb.kafka.connect.MongoSourceConnector",
    "connection.uri": "DBurl",
    "database": "DatabaseName",
    "collection": "CollectionName",
    "topic.namespace.map":"{'DBName.'CollectionName : 'topicname'}",
    "publish.full.document.only":true,
    "batch.size" : 1,
    "poll.await.time.ms" : 1,
    **"timestamp.field.name": "dbLastUpdateDate",
    "timestamp.initial": "2023-02-10T00:00:00.000Z",
    "timestamp.incrementing.field.name": "dbLastUpdateDate",**
    "output.format.key" : "json",
    "output.format.value" : "json",
    "key.converter.schemas.enable" : false,
    "value.converter.schemas.enable" : false,
    "key.converter":"org.apache.kafka.connect.storage.StringConverter",
    "value.converter" : "org.apache.kafka.connect.storage.StringConverter"
    }
  }

I have tried with query as well.

{
  "name": "ApplicationName",
  "config": {
    "connector.class": "com.mongodb.kafka.connect.MongoSourceConnector",
    "connection.uri": "DBurl",
    "database": "DatabaseName",
    "collection": "CollectionName",
    "topic.namespace.map":"{'DBName.'CollectionName : 'topicname'}",
    "publish.full.document.only":true,
    "batch.size" : 1,
    "poll.await.time.ms" : 1,
 **   "query": "{\"$and\": [{\"dbLastUpdateDate\": {\"$gte\": { \"$ISODate\": \"2023-02-15T06:07:00.000Z\" }}},{\"dbLastUpdateDate\": {\"$lt\": { \"$ISODate\": \"2023-02-21T04:00:38.000Z\" }}}]}"**
    "output.format.key" : "json",
    "output.format.value" : "json",
    "key.converter.schemas.enable" : false,
    "value.converter.schemas.enable" : false,
    "key.converter":"org.apache.kafka.connect.storage.StringConverter",
    "value.converter" : "org.apache.kafka.connect.storage.StringConverter"
  }
}

One record from DB

{ "_id" : "SR230223.1252.B28566", "**dbLastUpdateDate**" : **ISODate** 
 ("**2023-02-23T11:52:35.254Z"**), 
  "header" :
    { "createdBy" : "PT220509.1220.055276",....................................................................................................................................................}

What I am doing wrong?

I have tried multip solutions but nothing worked:

"query": "{\"$match\": {\"$and\": [{\"dbLastUpdateDate\": {\"$gte\": { 
\"$date\": \"2023-02-15T06:07:00.000Z\" }}},{\"dbLastUpdateDate\": 
{\"$lt\": { \"$date\": \"2023-02-21T04:00:38.000Z\" }}}]}}"

"query": "{\"$and\": [{\"dbLastUpdateDate\": {\"$gte\": { \"$new 
ISODate\": \"2023-02-15T06:07:00.000Z\" }}},{\"dbLastUpdateDate\": 
{\"$lt\": { \"$new ISODate\": \"2023-02-21T04:00:38.000Z\" }}}]}"

"query": "{\"$match\": {\"$and\": [{\"dbLastUpdateDate\": {\"$gte\": { 
\"$date\": \"2023-02-15T06:07:00.000Z\" }}},{\"dbLastUpdateDate\": 
{\"$lt\": { \"$date\": \"2023-02-21T04:00:38.000Z\" }}}]}}"
double-beep
  • 5,031
  • 17
  • 33
  • 41

0 Answers0