1

Hi I just update spring boot to version 3 and in my project we are configure zipkin configuration to send span to kafka with specific topic and it not working now

zipkin:
  sender.type: kafka
  kafka.topic: topic-example

is there anyway for Micrometer tracing to configure zipkin the same way in the application.yaml? or any alternative configuration ?

====NEW UPDATE========== I tried another approach :

pom.xml

<!--Observability dependencies-->
        <dependency>
            <groupId>io.micrometer</groupId>
            <artifactId>micrometer-tracing</artifactId>
        </dependency>
        <dependency>
            <groupId>io.micrometer</groupId>
            <artifactId>micrometer-tracing-bridge-otel</artifactId>
        </dependency>
        <dependency>
            <groupId>io.opentelemetry</groupId>
            <artifactId>opentelemetry-exporter-zipkin</artifactId>
        </dependency>
        <dependency>
            <groupId>io.zipkin.reporter2</groupId>
            <artifactId>zipkin-sender-kafka</artifactId>
        </dependency>
        <!--Observability dependencies-->

KafkaConfiguration.java

@Configuration
@EnableConfigurationProperties(KafkaProperties.class)
public class KafkaConfig {

    static String join(List<?> parts) {
        StringBuilder to = new StringBuilder();
        for (int i = 0, length = parts.size(); i < length; i++) {
            to.append(parts.get(i));
            if (i + 1 < length) {
                to.append(',');
            }
        }
        return to.toString();
    }

    @Bean("zipkinSender")
    Sender kafkaSender(KafkaProperties config, Environment environment) {
        // Need to get property value from Environment
        // because when using @VaultPropertySource in reactive web app
        // this bean is initiated before @Value is resolved
        // See gh-1990
        String topic = environment.getProperty("spring.zipkin.kafka.topic", "zipkin");
        Map<String, Object> properties = config.buildProducerProperties();
        properties.put("key.serializer", ByteArraySerializer.class.getName());
        properties.put("value.serializer", ByteArraySerializer.class.getName());
        // Kafka expects the input to be a String, but KafkaProperties returns a list
        Object bootstrapServers = properties.get("bootstrap.servers");
        if (bootstrapServers instanceof List) {
            properties.put("bootstrap.servers", join((List) bootstrapServers));
        }
        return KafkaSender.newBuilder().topic(topic).overrides(properties).build();
    }
}

spring:
  kafka:
    consumer:
      bootstrap-servers: localhost:9092
      group-id: group-id
      auto-offset-reset: earliest
      key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
    producer:
      bootstrap-servers: localhost:9092
      key-serializer: org.apache.kafka.common.serialization.StringSerializer
      value-serializer: org.apache.kafka.common.serialization.StringSerializer


  zipkin:
    kafka.topic: user

and I tried to check the logs by accessing running docker container :

docker exec -it kafka-container /bin/sh

bin/kafka-console-consumer.sh --topic topic-name --bootstrap-server localhost:9092 --property print.headers=true

Still it does not work please let me know if I did mistake

  • 1
    spring-cloud-sleuth does not work with spring boot 3.0.0 https://github.com/micrometer-metrics/tracing/wiki/Spring-Cloud-Sleuth-3.1-Migration-Guide – spencergibb Dec 19 '22 at 17:31
  • I know but how sender.type and kafka.topic works in Mircometer Tracing ? @spencergibb – Dimas Adriyanto Setyawan Dec 19 '22 at 18:43
  • Check my original answer about the `batch.size` property... without that you might have an infinite loop in your sender, and that is silent so it might just not work and you don't know why. – mdrg Jan 17 '23 at 07:33
  • is the properties: for consumer or producer ? @mdrg – Dimas Adriyanto Setyawan Jan 17 '23 at 08:17
  • The properties are not under producer nor consumer, check my answer snippet for the properties. And look at the `kafkaSender` method, it is creating a producer (it is sending Kafka messages after all). – mdrg Jan 17 '23 at 12:31

3 Answers3

1

We currently don't support any other sending mechanism than http. You can create a Sender bean yourself that would use Kafka. Please file an issue in Spring Boot that you're interested in adding different sender mechanisms

Marcin Grzejszczak
  • 10,624
  • 1
  • 16
  • 32
0

While there is no official support, I found a way to make it work again (Spring Boot 3.0.1):

  1. Add the dependencies io.micrometer:micrometer-tracing-bridge-otel, io.opentelemetry:opentelemetry-exporter-zipkin, io.zipkin.reporter2:zipkin-sender-kafka and org.springframework.kafka:spring-kafka.

  2. Add a configuration class like the one below, code copied from Sleuth's ZipkinKafkaSenderConfiguration:

     @Configuration
     @EnableConfigurationProperties(KafkaProperties.class)
     public class KafkaConfig {
    
         static String join(List<?> parts) {
             StringBuilder to = new StringBuilder();
             for (int i = 0, length = parts.size(); i < length; i++) {
                 to.append(parts.get(i));
                 if (i + 1 < length) {
                     to.append(',');
                 }
             }
             return to.toString();
         }
    
         @Bean("zipkinSender")
         Sender kafkaSender(KafkaProperties config, Environment environment) {
             // Need to get property value from Environment
             // because when using @VaultPropertySource in reactive web app
             // this bean is initiated before @Value is resolved
             // See gh-1990
             String topic = environment.getProperty("spring.zipkin.kafka.topic", "zipkin");
             Map<String, Object> properties = config.buildProducerProperties();
             properties.put("key.serializer", ByteArraySerializer.class.getName());
             properties.put("value.serializer", ByteArraySerializer.class.getName());
             // Kafka expects the input to be a String, but KafkaProperties returns a list
             Object bootstrapServers = properties.get("bootstrap.servers");
             if (bootstrapServers instanceof List) {
                 properties.put("bootstrap.servers", join((List) bootstrapServers));
             }
             return KafkaSender.newBuilder().topic(topic).overrides(properties).build();
         }
     }
    
  3. Configure Kafka in your application.yaml file:

     spring:
       kafka:
         bootstrap-servers: one-host:9092,another-host:9092
         properties:
           # Set a value for batch.size or an infinite loop will happen when trying to send data to Kafka
           batch.size: 16384
           # Configure your security, sasl or whatever else you need
     # Notice that sampling properties and others moved from 'spring.sleuth' to 'management.tracing' (double-check the property names used)
     management:
       tracing:
         sampling:
           probability: 1.0
         baggage:
           remote-fields: Some-Header
           correlation-fields: Some-Header
    

This should make it work like before with Spring Boot 2.x and Spring Cloud Sleuth.

mdrg
  • 3,242
  • 2
  • 22
  • 44
  • could you share your sample project on github @mdrg ? I tried it but somehow it doesn't work I may wrong with dependencies set up or docker kafka logs – Dimas Adriyanto Setyawan Jan 14 '23 at 19:36
  • @DimasAdriyantoSetyawan I coded this into a larger project which I can't share, and hardly will have the time to make a self-contained sample of this. But hopefully all the required code is here, and if not, post a specific question with the description of your problem and you might get some extra help. – mdrg Jan 16 '23 at 18:13
  • sure thanks for letting me know, regarding the tracing did the trace id came up in kafka header too ? or just a body? did you use Kcat to get kafka log? – Dimas Adriyanto Setyawan Jan 17 '23 at 08:15
  • I used `kafkactl consume kafka` against the appropriate instance. Also, notice my edited config section, I forgot about the properties moved to `management.tracing` (default sampling rate is `0.1`, that might confuse you if you expect higher sampling). – mdrg Jan 17 '23 at 10:15
0

I managed to make it works with Spring Boot 3

According to Spring Cloud Sleuth 3.1 Migration Guide, the API code from Sleuth has been migrated to Micrometer Tracing.

The Brave and OpenTelemetry bridges have their respective modules in Micrometer Tracing.

Choose your Tracer instrumentation and add dependencies

  1. For OpenTelemetry
    org.springframework.kafka:spring-kafka
    io.micrometer:micrometer-tracing:VERSION
    io.zipkin.reporter2:zipkin-sender-kafka:VERSION
    io.micrometer:micrometer-tracing-bridge-otel:VERSION 
    io.opentelemetry:opentelemetry-api:VERSION
    org.apache.httpcomponents.client5:httpclient5:VERSION

Not sure that org.apache.httpcomponents.client5:httpclient5:VERSION is required but i had to put it to make it works.

  1. For Brave
org.springframework.kafka:spring-kafka
io.micrometer:micrometer-tracing:VERSION
io.zipkin.reporter2:zipkin-sender-kafka:VERSION   
io.zipkin.brave:brave:VERSION
io.micrometer:micrometer-tracing-bridge-brave:VERSION
  1. Configure a bean that send event to kafka with Zipkin sender.
@Configuration(proxyBeanMethods = false)
@ConditionalOnClass(ByteArraySerializer.class)
@ConditionalOnProperty(value = "management.tracing.enabled", havingValue = "true")
public class TracingKafkaSenderConfiguration {

    private static final String SENDER_BEAN_NAME = "zipkinSender";

    @Configuration(proxyBeanMethods = false)
    @EnableConfigurationProperties(KafkaProperties.class)
    static class TracingKafkaSenderBeanConfiguration {

        static String join(List<?> parts) {
            StringBuilder to = new StringBuilder();
            for (int i = 0, length = parts.size(); i < length; i++) {
                to.append(parts.get(i));
                if (i + 1 < length) {
                    to.append(',');
                }
            }
            return to.toString();
        }

        @Bean(SENDER_BEAN_NAME)
        Sender kafkaSender(KafkaProperties config, Environment environment) {
            String topic = environment.getProperty("management.tracing.kafka.topic", "topic");
            String serviceName = environment.getProperty("management.tracing.service.name", "kafka-sender");
            Map<String, Object> properties = config.buildProducerProperties();
            properties.put("key.serializer", ByteArraySerializer.class.getName());
            properties.put("value.serializer", ByteArraySerializer.class.getName());
            properties.put(CommonClientConfigs.CLIENT_ID_CONFIG, serviceName);
            Object bootstrapServers = properties.get("bootstrap.servers");
            if (bootstrapServers instanceof List) {
                properties.put("bootstrap.servers", join((List) bootstrapServers));
            }
            return KafkaSender.newBuilder().topic(topic).overrides(properties).build();
        }
    }
}

  1. Change properties from 'spring.sleuth' to 'management.tracing'
management:
  tracing:
    enabled: true
    kafka:
      topic: topic
    service:
      name: kafka-sender
    sampling:
      probability: 0.1
    baggage:
      remote-fields:
        - field-one

Notice that management.kafka and management.service.name is specific config to use kafka and spring application service name.