0

How I can pause kafka consumer so that my application will stop processing data .

public static void main(String[] args) throws InterruptedException {
     SparkConf conf = new SparkConf()
                .setMaster("local[*]")
        .setAppName("NetworkWordCount2");

    JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations.seconds(2));
  jssc.sparkContext().setLogLevel("WARN");

  Collection<String> topics = Arrays.asList("test-topic");
  JavaInputDStream<ConsumerRecord<String, String>> stream = KafkaUtils.createDirectStream(jssc, 
                LocationStrategies.PreferConsistent(), 
                 ConsumerStrategies.<String, String>Subscribe(topics, getKafkaParam()));
 
    ///now few opration on stream data


jssc.start();
jssc.awaitTermination();

}

private static Map<String, Object> getKafkaParam() {
        Map<String, Object> kafkaParams = new HashMap<>();
        kafkaParams.put("bootstrap.servers", "localhost:9092");
        kafkaParams.put("key.deserializer", StringDeserializer.class);
        kafkaParams.put("value.deserializer", StringDeserializer.class);
        kafkaParams.put("group.id", "custom_group");
        kafkaParams.put("auto.offset.reset", "latest");
        //kafkaParams.put("enable.auto.commit", true);
        
        return kafkaParams;
    }

Is there any way I can get Consumer object inside worker node to perform pause operation ?

  • You can add java code highlighting with ```java ``` This will make it easier to readers to quickly parse the question. – J Schmidt Sep 19 '22 at 09:09

0 Answers0