Am trying to push a message to JdbcSink the message is as below
{
"schema": {
"type": "struct",
"fields": [{
"field": "ID",
"type": {
"type": "bytes",
"scale": 0,
"precision": 64,
"connect.version": 1,
"connect.parameters": {
"scale": "0"
},
"connect.name": "org.apache.kafka.connect.data.Decimal",
"logicalType": "decimal"
}
}, {
"field": "STORE_DATE",
"type": ["null", {
"type": "long",
"connect.version": 1,
"connect.name": "org.apache.kafka.connect.data.Timestamp",
"logicalType": "timestamp-millis"
}],
"default": null
}, {
"field": "DATA",
"type": ["null", "string"],
"default": null
}],
"name": "KAFKA_STREAM"
},
"payload": {
"ID": 17,
"STORE_DATE": null,
"DATA": "THIS IS TEST DATA"
}
}
but it keeps throwing error Caused by: org.apache.kafka.connect.errors.DataException: Schema must contain 'type' field
this is the connector configuration am using currently
{
"connector.class": "io.confluent.connect.jdbc.JdbcSinkConnector",
"topics": "DEV_KAFKA_STREAM",
"connection.url": "url",
"connection.user": "user",
"connection.password": "password",
"insert.mode": "insert",
"table.name.format": "KAFKA_STREAM",
"pk.fields": "ID",
"auto.create": "false",
"errors.log.enable": "true",
"errors.log.include.messages": "true",
"value.converter": "org.apache.kafka.connect.json.JsonConverter",
"value.converter.schemas.enable": "true"
}
am not sure how to debug this or how to find the root cause as the json does have type
field