I am trying to (unit) test a Spring Cloud Stream Kafka processor that uses Kafka DSL, but receives the following error "Connection to node -1 could not be established. Broker may not be available.
". Besides, the test does not shut down.
I tried both EmbeddedKafka and TestBinder and yet I have the same behavior.
I tried to start from the reponse given by Spring Cloud Team (which works) and I adapted the application to use Kafka DSL and left the test class pretty much as is. Does EmbeddedKafka actually support Kafka DSL?
I am using Elmhurst.RELEASE
@SpringBootApplication
@EnableBinding(MyBinding.class)
public class So43330544Application {
public static void main(String[] args) {
SpringApplication.run(So43330544Application.class, args);
}
@StreamListener
@SendTo(MyBinding.OUTPUT)
public KStream<String,String> process(@Input(MyBinding.INPUT) KStream<String, String> in) {
return in.peek((k,v) -> System.out.println("Received value " +v ))
.mapValues(v -> v.toUpperCase());
}
}
interface MyBinding {
String INPUT = "input";
String OUTPUT = "output";
@Input(INPUT)
KStream<String, String> messagesIn();
@Output(OUTPUT)
KStream<String, String> messagesOut();
}
Update
As shown in the following example, the approach proposed in this answer worked for me when I am using the Spring Cloud Stream generic syntax for writing event processors, but did not work when I am using Kafka DSL (KStreams). To see the difference in behaviors, just switch to either ExampleAppWorking
or ExampleAppNotWorking
in the @SpringBootTest
annotation :
@RunWith(SpringRunner.class)
@SpringBootTest(classes=ExampleKafkaEmbeddedTest.ExampleAppNotWorking.class)
@DirtiesContext(classMode=ClassMode.AFTER_EACH_TEST_METHOD)
public class ExampleKafkaEmbeddedTest {
@ClassRule
public static KafkaEmbedded embeddedKafka = new KafkaEmbedded(1, false, "so0544in","so0544out");
@Autowired
private KafkaTemplate<Integer, byte[]> template;
@Autowired
private KafkaProperties properties;
private static Consumer consumer;
@BeforeClass
public static void setup() throws Exception{
System.setProperty("spring.kafka.bootstrap-servers", embeddedKafka.getBrokersAsString());
System.setProperty("spring.cloud.stream.kafka.binder.zkNodes", embeddedKafka.getZookeeperConnectionString());
System.setProperty("server.port","0");
System.setProperty("spring.jmx.enabled" , "false");
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-id", "false", embeddedKafka);
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
consumer = cf.createConsumer();
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "so0544out");
}
@After
public void tearDown() {
if (consumer != null){
consumer.close();
}
}
@Test
public void testSendReceive() {
template.send("so0544in", "foo".getBytes());
Map<String, Object> configs = properties.buildConsumerProperties();
configs.put(ConsumerConfig.GROUP_ID_CONFIG, "test0544");
configs.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "so0544out");
System.out.println("Contenu chaine resultat : " + cr.value());
assertEquals(cr.value(), "FOO");
}
@SpringBootApplication
@EnableBinding(Processor.class)
public static class ExampleAppWorking {
public static void main(String[] args) {
SpringApplication.run(ExampleAppWorking.class, args);
}
@StreamListener(Processor.INPUT)
@SendTo(Processor.OUTPUT)
public String receive(String in) {
return in.toUpperCase();
}
}
@SpringBootApplication
@EnableBinding(MyBinding.class)
public static class ExampleAppNotWorking {
public static void main(String[] args) {
SpringApplication.run(ExampleAppNotWorking.class, args);
}
@StreamListener
@SendTo(MyBinding.OUTPUT)
public KStream<Integer,byte[]> toUpperCase (@Input(MyBinding.INPUT) KStream<Integer,byte[]> in){
return in.map((key, val) -> KeyValue.pair(key, new String(val).toUpperCase().getBytes()));
}
}
public interface MyBinding {
String INPUT = "input";
String OUTPUT = "output";
@Input(INPUT)
KStream<Integer, String> messagesIn();
@Input(OUTPUT)
KStream<Integer, String> messagesOut();
}
}