0

I am trying to build a simple pipeline that moves data from our Cloud SQL (MySQL) into BigQuery. All the JDBC driver stuff is working fine (if I use the trash can as a sink, I can see the preview data) and the schema propagated.

enter image description here

I created the BigQuery DB and Table — but when ever I run the pipeline, I get the following error which does not tell me really anything. What is going wrong here?

enter image description here

java.lang.NullPointerException: null
    at com.google.common.base.Preconditions.checkNotNull(Preconditions.java:877) ~[com.google.guava.guava-13.0.1.jar:na]
    at com.google.common.collect.Lists$TransformingSequentialList.<init>(Lists.java:542) ~[com.google.guava.guava-13.0.1.jar:na]
    at com.google.common.collect.Lists.transform(Lists.java:526) ~[com.google.guava.guava-13.0.1.jar:na]
    at com.google.cloud.bigquery.FieldList.fromPb(FieldList.java:116) ~[na:na]
    at com.google.cloud.bigquery.Schema.fromPb(Schema.java:107) ~[na:na]
    at com.google.cloud.bigquery.TableDefinition$Builder.table(TableDefinition.java:120) ~[na:na]
    at com.google.cloud.bigquery.StandardTableDefinition.fromPb(StandardTableDefinition.java:220) ~[na:na]
    at com.google.cloud.bigquery.TableDefinition.fromPb(TableDefinition.java:155) ~[na:na]
    at com.google.cloud.bigquery.TableInfo$BuilderImpl.<init>(TableInfo.java:183) ~[na:na]
    at com.google.cloud.bigquery.Table.fromPb(Table.java:603) ~[na:na]
    at com.google.cloud.bigquery.BigQueryImpl.getTable(BigQueryImpl.java:415) ~[na:na]
    at io.cdap.plugin.gcp.bigquery.sink.AbstractBigQuerySink.validateSchema(AbstractBigQuerySink.java:237) ~[na:na]
    at io.cdap.plugin.gcp.bigquery.sink.AbstractBigQuerySink.getBigQueryTableFields(AbstractBigQuerySink.java:316) ~[na:na]
    at io.cdap.plugin.gcp.bigquery.sink.AbstractBigQuerySink.initOutput(AbstractBigQuerySink.java:126) ~[na:na]
    at io.cdap.plugin.gcp.bigquery.sink.BigQuerySink.prepareRunInternal(BigQuerySink.java:83) ~[na:na]
    at io.cdap.plugin.gcp.bigquery.sink.AbstractBigQuerySink.prepareRun(AbstractBigQuerySink.java:94) ~[na:na]
    at io.cdap.plugin.gcp.bigquery.sink.AbstractBigQuerySink.prepareRun(AbstractBigQuerySink.java:59) ~[na:na]
    at io.cdap.cdap.etl.common.plugin.WrappedBatchSink.lambda$prepareRun$0(WrappedBatchSink.java:52) ~[na:na]
    at io.cdap.cdap.etl.common.plugin.Caller$1.call(Caller.java:30) ~[na:na]
    at io.cdap.cdap.etl.common.plugin.StageLoggingCaller.call(StageLoggingCaller.java:40) ~[na:na]
    at io.cdap.cdap.etl.common.plugin.WrappedBatchSink.prepareRun(WrappedBatchSink.java:51) ~[na:na]
    at io.cdap.cdap.etl.common.plugin.WrappedBatchSink.prepareRun(WrappedBatchSink.java:37) ~[na:na]
    at io.cdap.cdap.etl.common.submit.SubmitterPlugin.lambda$prepareRun$2(SubmitterPlugin.java:71) ~[na:na]
    at io.cdap.cdap.internal.app.runtime.AbstractContext$2.run(AbstractContext.java:551) ~[na:na]
    at io.cdap.cdap.data2.transaction.Transactions$CacheBasedTransactional.finishExecute(Transactions.java:224) ~[na:na]
    at io.cdap.cdap.data2.transaction.Transactions$CacheBasedTransactional.execute(Transactions.java:211) ~[na:na]
    at io.cdap.cdap.internal.app.runtime.AbstractContext.execute(AbstractContext.java:546) ~[na:na]
    at io.cdap.cdap.internal.app.runtime.AbstractContext.execute(AbstractContext.java:534) ~[na:na]
    at io.cdap.cdap.etl.common.submit.SubmitterPlugin.prepareRun(SubmitterPlugin.java:69) ~[na:na]
    at io.cdap.cdap.etl.batch.PipelinePhasePreparer.prepare(PipelinePhasePreparer.java:111) ~[na:na]
    at io.cdap.cdap.etl.batch.mapreduce.MapReducePreparer.prepare(MapReducePreparer.java:97) ~[na:na]
    at io.cdap.cdap.etl.batch.mapreduce.ETLMapReduce.initialize(ETLMapReduce.java:192) ~[na:na]
    at io.cdap.cdap.api.mapreduce.AbstractMapReduce.initialize(AbstractMapReduce.java:109) ~[na:na]
    at io.cdap.cdap.api.mapreduce.AbstractMapReduce.initialize(AbstractMapReduce.java:32) ~[na:na]
    at io.cdap.cdap.internal.app.runtime.batch.MapReduceRuntimeService$1.initialize(MapReduceRuntimeService.java:182) ~[na:na]
    at io.cdap.cdap.internal.app.runtime.batch.MapReduceRuntimeService$1.initialize(MapReduceRuntimeService.java:177) ~[na:na]
    at io.cdap.cdap.internal.app.runtime.AbstractContext.lambda$initializeProgram$1(AbstractContext.java:640) ~[na:na]
    at io.cdap.cdap.internal.app.runtime.AbstractContext.execute(AbstractContext.java:600) ~[na:na]
    at io.cdap.cdap.internal.app.runtime.AbstractContext.initializeProgram(AbstractContext.java:637) ~[na:na]
    at io.cdap.cdap.internal.app.runtime.batch.MapReduceRuntimeService.beforeSubmit(MapReduceRuntimeService.java:547) ~[na:na]
    at io.cdap.cdap.internal.app.runtime.batch.MapReduceRuntimeService.startUp(MapReduceRuntimeService.java:226) ~[na:na]
    at com.google.common.util.concurrent.AbstractExecutionThreadService$1$1.run(AbstractExecutionThreadService.java:47) ~[com.google.guava.guava-13.0.1.jar:na]
    at io.cdap.cdap.internal.app.runtime.batch.MapReduceRuntimeService$2$1.run(MapReduceRuntimeService.java:450) [na:na]
    at java.lang.Thread.run(Thread.java:748) [na:1.8.0_222]
Dino
  • 352
  • 2
  • 8
  • Did you know you can do this now with EXTERNAL_QUERY without doing a pipeline? https://cloud.google.com/bigquery/docs/cloud-sql-federated-queries – Pentium10 Oct 04 '19 at 11:55
  • Yes, eventually there is a lot more pre-processing / merging going to happen. Which is the reason data-fusion seems interesting. For a 1:1 copy, this is of course none-sense but I wanted to keep the failure example small. – Dino Oct 04 '19 at 12:26
  • Can you share the table's schema? It looks like the NPE is being encountered when processing the table's schema. – Ali Anwar Oct 08 '19 at 18:09
  • Could u provide the configuration for BQ – Nitin Motgi Oct 17 '19 at 03:30

0 Answers0