0

While submitting the flink job on the data proc cluster getting java.util.ServiceConfigurationError: io.grpc.NameResolverProvider: Provider io.grpc.netty.shaded.io.grpc.netty.UdsNameResolverProvider not a subtype exception. We are trying to read the data from pubsub. Please find the code which we written.

import java.sql.Time;

import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.serialization.DeserializationSchema;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.TaskManagerOptions;
import org.apache.flink.contrib.streaming.state.RocksDBStateBackend;
import org.apache.flink.runtime.state.filesystem.FsCheckpointStorage;
import org.apache.flink.shaded.zookeeper.org.apache.zookeeper.Login;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.streaming.connectors.gcp.pubsub.PubSubSource;
import org.apache.flink.streaming.connectors.gcp.pubsub.common.PubSubSubscriber;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;


public class ReadFromPubsub
{

    public static void main(String args[]) throws Exception 
    {
        System.out.println("Flink Pubsub Code Read 1");
        final Logger LOG = LoggerFactory.getLogger(ReadFromPubsub.class);

        final StreamExecutionEnvironment streamExecEnv = StreamExecutionEnvironment.getExecutionEnvironment();   
        
       
        streamExecEnv.setStateBackend(new RocksDBStateBackend("file:///tmp/checkpoints"));
        streamExecEnv.enableCheckpointing();
        streamExecEnv.setRestartStrategy(RestartStrategies.fixedDelayRestart(10,0L));
        streamExecEnv.setParallelism(1);
        
        //streamExecEnv.setRestartStrategy(RestartStrategies.failureRateRestart(10,Time.of(5, TimeUnit.MINUTES),Time.of(10, TimeUnit.SECONDS)));
     
        
        
        try
        {
            System.out.println("Flink try block start");
            DeserializationSchema<String> deserializer = new SimpleStringSchema();
            SourceFunction<String> pubsubSource = PubSubSource.newBuilder()
                                                              .withDeserializationSchema(deserializer)
                                                              .withProjectName("vz-it-np-gudv-dev-vzntdo-0")
                                                              .withSubscriptionName("subscription1") 
                                                      .build();
            streamExecEnv.addSource(pubsubSource).print();
            System.out.println("Flink try block End");
        }    
        catch(Exception e)
        {
             System.out.println("Flink Exception ----- :"+e);
        }
        
        
        //DataStreamSource<String> ds = streamExecEnv.addSource(pubsubSource);
        //ds.print();
        //streamExecEnv.addSource(pubsubSource).broadcast();
        
        Logger rootLogger = LoggerFactory.getLogger("org.apache.flink"); 
        //rootLogger.setLevel(Level.DEBUG); 
        rootLogger.debug(null);

        System.out.println("Flink Pubsub Code Read End");
        streamExecEnv.execute();
    }
}

Also I am using the below dependencies in the pom.xml

<dependencies>  
    <dependency>
  <groupId>org.slf4j</groupId>
  <artifactId>slf4j-api</artifactId>
  <version>1.7.5</version>
</dependency>

<dependency>
  <groupId>org.apache.kafka</groupId>
  <artifactId>kafka-clients</artifactId>
  <version>3.2.0</version>
</dependency>

<dependency>
    <groupId>org.apache.flink</groupId>
    <artifactId>flink-java</artifactId>
    <version>1.9.3</version>
</dependency>


<dependency>
    <groupId>org.apache.flink</groupId>
    <artifactId>flink-streaming-java_2.11</artifactId>
    <version>1.9.3</version>
</dependency>

    

<dependency>
    <groupId>org.apache.flink</groupId>
    <artifactId>flink-connector-gcp-pubsub_2.12</artifactId>
    <version>1.9.3</version>

</dependency>


<dependency>
    <groupId>org.apache.flink</groupId>
    <artifactId>flink-core</artifactId>
    <version>1.9.3</version>
</dependency>

 <dependency>
    <groupId>com.google.cloud</groupId>
    <artifactId>google-cloud-pubsub</artifactId>
    <version>1.66.0</version>
</dependency>

<dependency>
    <groupId>org.apache.flink</groupId>
    <artifactId>flink-statebackend-rocksdb_2.12</artifactId>
    <version>1.9.3</version>
</dependency>

<dependency>
    <groupId>io.grpc</groupId>
    <artifactId>grpc-stub</artifactId>
    <version>1.53.0</version>
</dependency>

<dependency>
    <groupId>io.grpc</groupId>
    <artifactId>grpc-netty-shaded</artifactId>
    <version>1.53.0</version>
</dependency>

<dependency>
    <groupId>com.google.apis</groupId>
    <artifactId>google-api-services-pubsub</artifactId>
    <version>v1-rev356-1.22.0</version>
</dependency>

<dependency>
    <groupId>io.grpc</groupId>
    <artifactId>grpc-core</artifactId>
    <version>1.0.0</version>
</dependency>

Note: We are using the flink 1.9.3 version and dataproc image version 1.5.71-rocky8

0 Answers0