In this reference https://developers.google.com/apps-script/advanced/bigquery,
In order to load CSV data into BigQuery, they use:
var file = DriveApp.getFileById(csvFileId);
var data = file.getBlob().setContentType('application/octet-stream');
// Create the data upload job.
var job = {
configuration: {
load: {
destinationTable: {
projectId: projectId,
datasetId: datasetId,
tableId: tableId
},
skipLeadingRows: 1
}
}
};
job = BigQuery.Jobs.insert(job, projectId, data);
As I understand, they send a blob to BigQuery file.getBlob().setContentType('application/octet-stream');
, which is not friendly
How to send a JSON to BigQuery in Apps Script?
With the library @google-cloud/bigquery
(using in a project outside of Apps Script), I can do something like this:
https://cloud.google.com/bigquery/streaming-data-into-bigquery#streaminginsertexamples
// Import the Google Cloud client library
const { BigQuery } = require('@google-cloud/bigquery')
const moment = require('moment')
exports.insertUsageLog = async (userId) => {
const datasetId = 'usage'
const tableId = 'logs'
const rows = [
// The JSON data is collected here
{
timestamp: moment.utc().toISOString(),
userId,
// Something else ...
},
]
// Create a client
const bigqueryClient = new BigQuery()
// Insert data into a table
await bigqueryClient
.dataset(datasetId)
.table(tableId)
.insert(rows)
console.log(`Inserted ${rows.length} rows`)
}