I'm trying to read data from Kafka topic using Spark streaming, Below is the code and libraries am using. while code looks fine but the ssc.start() gets hanged without printing any ERROR or INFO. Any pointers for issue will be a great help.
spark-shell --jars kafka_2.12-2.0.0.jar,spark-streaming-kafka-0-10_2.12-
2.4.0.jar,kafka-clients-2.0.0.jar
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.streaming.kafka010._
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
import org.apache.spark.streaming._
import org.apache.spark.streaming.kafka010._
import org.apache.spark.SparkConf
val ssc = new StreamingContext(sc, Seconds(10));
val topics = Array("Qradar_Demo");
val kafkaParams = Map[String, Object](
"bootstrap.servers" -> "hostname:6667",
"key.deserializer" -> classOf[StringDeserializer],
"value.deserializer" -> classOf[StringDeserializer],
"group.id" -> "use_a_separate_group_id_for_each_stream",
"auto.offset.reset" -> "latest",
"enable.auto.commit" -> (false: java.lang.Boolean)
)
val messages = KafkaUtils.createDirectStream[String, String]( ssc,PreferConsistent,Subscribe[String, String](topics, kafkaParams))
messages.print()
ssc.start()