0

I implementing a microservices using kafka cloud-stream with avro when I put Sleuth dependency the OUTPUT of the Function, the producer couldn't serialize avro.

Dependency in gradle.build:

import org.jetbrains.kotlin.gradle.tasks.KotlinCompile

buildscript {
    dependencies {
        classpath("com.commercehub.gradle.plugin:gradle-avro-plugin:0.16.0")
    }
}

plugins {
    id("org.springframework.boot") version "2.4.2"
    id("io.spring.dependency-management") version "1.0.11.RELEASE"
    kotlin("jvm") version "1.4.21"
    kotlin("plugin.spring") version "1.4.21"
}

apply(plugin = "com.commercehub.gradle.plugin.avro")

group = "com.example"
version = "0.0.1-SNAPSHOT"
java.sourceCompatibility = JavaVersion.VERSION_15

repositories {
    mavenCentral()
    maven { url = uri("https://packages.confluent.io/maven/") }
}

extra["springCloudVersion"] = "2020.0.1"

dependencies {
    implementation("com.fasterxml.jackson.module:jackson-module-kotlin")
    implementation("org.jetbrains.kotlin:kotlin-reflect")
    implementation("org.jetbrains.kotlin:kotlin-stdlib-jdk8")
    implementation("org.springframework.boot:spring-boot-starter-web")
    implementation("org.springframework.cloud:spring-cloud-stream")
    implementation("org.springframework.cloud:spring-cloud-starter-stream-kafka")
    implementation("org.apache.kafka:kafka-streams")
    //if you take off sleuth dependency of the project the AVRO is converted the json
    implementation("org.springframework.cloud:spring-cloud-starter-sleuth")
    implementation("io.confluent:kafka-avro-serializer:5.5.0")
    implementation("org.apache.avro:avro:1.10.0")
    implementation("com.github.javafaker:javafaker:1.0.2")
    testImplementation("org.springframework.boot:spring-boot-starter-test")
}

dependencyManagement {
    imports {
        mavenBom("org.springframework.cloud:spring-cloud-dependencies:${property("springCloudVersion")}")
    }
}

tasks.withType<KotlinCompile> {
    kotlinOptions {
        freeCompilerArgs = listOf("-Xjsr305=strict")
        jvmTarget = "15"
    }
}

tasks.withType<Test> {
    useJUnitPlatform()
}

follows the little AVRO schema that I used:

{
  "namespace": "com.example.demo.events",
  "type": "record",
  "name": "Person",
  "fields": [
    {
      "name": "name",
      "type": "string",
      "avro.java.string": "String"
    }
  ]
}

The configuration in application.yml:

server.port: 9091
spring:
  application.name: demo-processor
  cloud:
    stream:
      function:
        definition: transform
      bindings:
        transform-in-0:
          destination: topic-a
          contentType: application/*+avro
          group: 'process-uppercase'
        transform-out-0:
          destination: topic-b
          contentType: application/*+avro
          producer:
            useNativeEncoding: true
      kafka:
        binder:
          brokers: localhost:9092
        bindings:
          transform-in-0:
            consumer:
              configuration:
                schema.registry.url: http://localhost:8081
                value.deserializer: io.confluent.kafka.serializers.KafkaAvroDeserializer
                specific.avro.reader: true
          transform-out-0:
            producer:
              configuration:
                schema.registry.url: http://localhost:8081
                value.serializer: io.confluent.kafka.serializers.KafkaAvroSerializer

This the code:

package com.example.demo

import com.example.demo.events.Person
import org.springframework.boot.autoconfigure.SpringBootApplication
import org.springframework.boot.runApplication
import org.springframework.context.annotation.Bean
import org.springframework.messaging.Message
import org.springframework.messaging.support.MessageBuilder
import java.util.function.Function

@SpringBootApplication
class DemoProcessorApplication {
    //this function work very well without Sleuth dependency
    @Bean
    fun transform() = Function<Message<Person>, Message<Person>> {
        var person = it.payload
        var name = person.getName()
        person.setName(name.toUpperCase())
        println("Transform ${name} to ${name.toUpperCase()}")
        MessageBuilder.withPayload(person).build()
    }

}

fun main(args: Array<String>) {
    runApplication<DemoProcessorApplication>(*args)
}

I thank you for your help!

Igor
  • 3
  • 4
  • I guess adding Sleuth does not break your transform function but breaks the component that would eventually call it, right? Kafka instrumentation happens using headers, so the only difference between an instrumented and non-instrumented message is an additional Kafka header. I'm not very familiar with Avro, could you please check if this can break it? – Jonatan Ivanov Feb 08 '21 at 20:07
  • I think the way to fix this, an alternative solution lol. I removed the content type from Output and it worked. When I use sleuth for some reason the Message Converter doesn't know Avro converter – Igor Feb 12 '21 at 09:05

0 Answers0