My Quarkus App appears to be dropping the first batch of messages sent every time I restart my Docker Container. I am currently running 'Confluentinc' Kafka (Zookeeper/Broker) images. The drop appears to effect the first batch of messages published from a Junit Test each time the Docker Container is destroyed and created. Subsequent re-runs of the same test, do not produce the same failure. This issue seems to happen somewhere between the broker and the Quarkus @Injected @ApplicationScoped "Stuff" listener. This is important because the testing broker from Quarkus (Vectorized/redpanda) is supposed to restart between each test.
Interestingly enough, (sometimes) simply having the Offset Explorer (2.3) connected to the cluster before running the test prevents the the failure from occuring. (!?)
My testing file: https://github.com/Eli-Ronai/QuarkusKafkaBugDemo
package Test;
import io.quarkus.runtime.StartupEvent;
import io.quarkus.test.junit.QuarkusTest;
import org.eclipse.microprofile.reactive.messaging.Channel;
import org.eclipse.microprofile.reactive.messaging.Emitter;
import org.eclipse.microprofile.reactive.messaging.Incoming;
import org.junit.jupiter.api.*;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import javax.enterprise.context.ApplicationScoped;
import javax.enterprise.event.Observes;
import javax.inject.Inject;
import java.util.concurrent.LinkedBlockingQueue;
import static org.testng.Assert.assertEquals;
@ApplicationScoped
class Stuff {
private static final String TEST_CHANNEL_NAME = "TestChannel1";
private static final String TEST_CHANNEL_NAME2 = "TestChannel2";
final LinkedBlockingQueue<String> queue = new LinkedBlockingQueue<>();
void startup(@Observes StartupEvent event){
System.out.println("Injected the listener");
}
@Inject
@Channel(TEST_CHANNEL_NAME+"-out")
Emitter<String> emitter;
@Inject
@Channel(TEST_CHANNEL_NAME2+"-out")
Emitter<String> emitterTwo;
@Incoming(TEST_CHANNEL_NAME)
public void listen(String message) throws InterruptedException {
System.out.println(message);
queue.put(message);
}
@Incoming(TEST_CHANNEL_NAME2)
public void listen2(String message) throws InterruptedException {
System.out.println(message);
queue.put(message);
}
public void send(String message) {
emitter.send(message);
}
public String take() throws InterruptedException {
return queue.take();
}
public void sendToTwo(String message) {
emitterTwo.send(message);
}
}
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
@QuarkusTest
public class BufferTests {
final Stuff stuff;
@Inject
public BufferTests(Stuff stuff) {
this.stuff = stuff;
}
@Order(2)
@ParameterizedTest
@ValueSource(strings = {"a"})
@Timeout(10)
public void TestAsParameterizedTest(String unusedTestId) {
final int TEST_SIZE = 1000;
final var mainThread = Thread.currentThread();
new Thread(() -> {
final var start = System.currentTimeMillis();
for (int i = 0; i < TEST_SIZE; i++) {
stuff.send(Integer.toString(i));
}
try {
synchronized (this) {
wait(System.currentTimeMillis() - start + 9000);
}
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
mainThread.interrupt();
}).start();
int i = 0;
try {
for (; i < TEST_SIZE; i++) {
System.out.println(stuff.take());
}
} catch (InterruptedException ignored) {
}
assertEquals(i, TEST_SIZE);
}
@Order(4)
@ParameterizedTest
@ValueSource(strings = {"a", "b", "c", "d", "e"})
@Timeout(10)
public void ConfirmThatTestsRunIndependently(String testId) {
final int TEST_SIZE = 1000;
final var mainThread = Thread.currentThread();
new Thread(() -> {
final var start = System.currentTimeMillis();
for (int i = 0; i < TEST_SIZE; i++) {
stuff.send(i+testId);
}
try {
synchronized (this) {
wait(System.currentTimeMillis() - start + 9000);
}
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
mainThread.interrupt();
}).start();
int i = 0;
try {
for (; i < TEST_SIZE; i++) {
String item = stuff.take();
assertEquals(testId, item.substring(item.length()-1));
}
} catch (InterruptedException ignored) {
}
assertEquals(i, TEST_SIZE);
}
@Order(5)
@ParameterizedTest
@ValueSource(strings = {"a", "b", "c", "d", "e"})
@Timeout(10)
public void ConfirmThatTestsRunIndependentlyParallel(String testId) {
final int TEST_SIZE = 1000;
final var mainThread = Thread.currentThread();
new Thread(() -> {
final var start = System.currentTimeMillis();
for (int i = 0; i < TEST_SIZE; i++) {
stuff.send(i+testId);
}
try {
synchronized (this) {
wait(System.currentTimeMillis() - start + 9000);
}
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
mainThread.interrupt();
}).start();
int i = 0;
try {
for (; i < TEST_SIZE; i++) {
String item = stuff.take();
assertEquals(testId, item.substring(item.length()-1));
}
} catch (InterruptedException ignored) {
}
assertEquals(i, TEST_SIZE);
}
@Order(1)
@ParameterizedTest
@ValueSource(strings = {"a", "b", "c", "d", "e"})
@Timeout(10)
public void ProducesTheBug(String testId) {
final int TEST_SIZE = 1000;
final var mainThread = Thread.currentThread();
//Send
new Thread(() -> {
final var start = System.currentTimeMillis();
for (int i = 0; i < TEST_SIZE; i++) {
stuff.send(i+testId);
}
try {
synchronized (this) {
wait(System.currentTimeMillis() - start + 9000);
}
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
mainThread.interrupt();
}).start();
int i = 0;
//Receive
try {
for (; i < TEST_SIZE; i++) {
String item = stuff.take();
assertEquals(testId, item.substring(item.length()-1));
}
} catch (InterruptedException ignored) {
}
assertEquals(i, TEST_SIZE);
}
@Order(3)
@Test
@Timeout(10)
// This test is copied and pasted from ProducesTheBug()
public void DoesNotProduceTheBug() {
String testId = "a";
final int TEST_SIZE = 1000;
final var mainThread = Thread.currentThread();
//Send
new Thread(() -> {
final var start = System.currentTimeMillis();
for (int i = 0; i < TEST_SIZE; i++) {
stuff.send(i+testId);
}
try {
synchronized (this) {
wait(System.currentTimeMillis() - start + 9000);
}
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
mainThread.interrupt();
}).start();
int i = 0;
//Receive
try {
for (; i < TEST_SIZE; i++) {
String item = stuff.take();
assertEquals(testId, item.substring(item.length()-1));
}
} catch (InterruptedException ignored) {
}
assertEquals(i, TEST_SIZE);
}
@Order(6)
@ParameterizedTest
@ValueSource(strings = {"a", "b", "c", "d", "e"})
@Timeout(10)
public void SendTestMessagesToSecondTopic(String testId) {
final int TEST_SIZE = 1000;
final var mainThread = Thread.currentThread();
new Thread(() -> {
final var start = System.currentTimeMillis();
for (int i = 0; i < TEST_SIZE; i++) {
stuff.sendToTwo(i+testId);
}
try {
synchronized (this) {
wait(System.currentTimeMillis() - start + 9000);
}
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
mainThread.interrupt();
}).start();
int i = 0;
try {
for (; i < TEST_SIZE; i++) {
String item = stuff.take();
assertEquals(testId, item.substring(item.length()-1));
}
} catch (InterruptedException ignored) {
}
assertEquals(i, TEST_SIZE);
}
}
application.properties:
mp.messaging.incoming.TestChannel1.topic=TestChannel1
mp.messaging.incoming.TestChannel1.connector = smallrye-kafka
mp.messaging.outgoing.TestChannel1-out.topic=TestChannel1
mp.messaging.outgoing.TestChannel1-out.connector = smallrye-kafka
mp.messaging.incoming.TestChannel2.topic=TestChannel2
mp.messaging.incoming.TestChannel2.connector = smallrye-kafka
mp.messaging.outgoing.TestChannel2-out.topic=TestChannel2
mp.messaging.outgoing.TestChannel2-out.connector = smallrye-kafka
#Code blow disallows quarkus dev-services from creating a test broker.
quarkus.kafka.devservices.enabled=false
kafka.bootstrap.servers=9092
mp.messaging.incoming.TestChannel1.bootstrap.servers=localhost:9092
mp.messaging.outgoing.TestChannel1-out.bootstrap.servers=localhost:9092
mp.messaging.outgoing.TestChannel1-out.key.deserializer=org.apache.kafka.common.serialization.StringSerializer
mp.messaging.incoming.TestChannel2.bootstrap.servers=localhost:9092
mp.messaging.outgoing.TestChannel2-out.bootstrap.servers=localhost:9092
mp.messaging.incoming.TestChannel1.value.deserializer=org.apache.kafka.common.serialization.StringDeserializer
mp.messaging.outgoing.TestChannel1-out.value.serializer=org.apache.kafka.common.serialization.StringSerializer
mp.messaging.incoming.TestChannel2.value.deserializer=org.apache.kafka.common.serialization.StringDeserializer
mp.messaging.outgoing.TestChannel2-out.value.serializer=org.apache.kafka.common.serialization.StringSerializer
docker-compose:
---
version: '1'
services:
zookeeper:
image: confluentinc/cp-zookeeper:latest
hostname: zookeeper
container_name: zookeeper
ports:
- "2181:2181"
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000
broker:
image: confluentinc/cp-server:latest
hostname: broker
container_name: broker
depends_on:
- zookeeper
ports:
- "9092:9092"
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker:49170,PLAINTEXT_HOST://localhost:9092
KAFKA_METRIC_REPORTERS: io.confluent.metrics.reporter.ConfluentMetricsReporter
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
KAFKA_CONFLUENT_LICENSE_TOPIC_REPLICATION_FACTOR: 1
KAFKA_CONFLUENT_BALANCER_TOPIC_REPLICATION_FACTOR: 1
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
KAFKA_CONFLUENT_SCHEMA_REGISTRY_URL: http://schema-registry:8083/
CONFLUENT_METRICS_REPORTER_BOOTSTRAP_SERVERS: broker:49170
CONFLUENT_METRICS_REPORTER_TOPIC_REPLICAS: 1
# schema-registry:
# image: confluentinc/cp-schema-registry:latest
# hostname: schema-registry
# container_name: schema-registry
# depends_on:
# - broker
# ports:
# - "8083:8083"
# environment:
# SCHEMA_REGISTRY_HOST_NAME: schema-registry
# SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'broker:49170'
# SCHEMA_REGISTRY_LISTENERS: http://0.0.0.0:8083/
- Force Eager Loading of the @ApplicationScoped 'Stuff' listener with @Observes StartupEvent constructor Messages not received.
- Verify Messages being published through non-Quarkus listener (Desktop terminal window) Messages received by pure-kafka consumer.
- System.out.println statement in the @Incoming listen method. Messages not printed to console.
- Publish 1000 messages at once. First batch of 1000 messages do not get delivered, and are also not in Kafka que. (Kafka believes they were acknowledged)
- Wait up to 3 minutes after container creation before running tests Messages not received. -Publish messages to multiple topics Messages were received on second topic, the missed messages are limited to the first batch received only.
- Added a Schema Registry Dependency to the Docker Compose. Prevents messages from going missing in the minimal bug reproduction example, but not in the full app.
- Stopping And Starting the Container Message Received, the Docker container must be destroyed and created to reproduce the bug
I am expecting to receive all messages sent through Kafka, in the event that a message is not received, by the @Incoming listener I am expecting that Kafka should not acknowledge the message as received and keep it in the que.