Insert into BigQuery via Cloud Function not working

4.1k views Asked by At

I am inserting the following json into a pre-created BigQuery table.

{
  "Member_ID": 881230,
  "First_Name": "Dave2",
  "Last_Name": "Manin2",
  "Gender": "M",
  "Age": 53,
  "Height": "5,2",
  "Weight": 145,
  "Hours_Sleep": 4,
  "Calories_Consumed": 2497,
  "Exercise_Calories_Burned": 876,
  "Date": "2018-10-17"
}

When I insert the above row into the table directly it works fine but errors out (see below for complete error message) via a function. What I am missing? Any help is appreciated.

This is my node.js (error message after the code)

exports.subscribe = function (event, callback) {
  const BigQuery = require('@google-cloud/bigquery');
  const projectId = "mydemo-221920"; //Enter your project ID here
  const datasetId = "mydemodata"; //Enter your BigQuery dataset name here
  const tableId = "memid"; //Enter your BigQuery table name here -- make sure it is setup correctly
  const PubSubMessage = event.data;
  // Incoming data is in JSON format
   const incomingData = PubSubMessage.data ? Buffer.from(PubSubMessage.data, 'base64').toString() : "{'Member_ID':'na','First_Name':'na','Last_Name':'na','Gender':'na','Age':'na','Height':'na','Weight':'na','Hours_Sleep':'na','Calories_Consumed':'na','Exercise_Calories_Burned':'na','Date':'na'}";
  console.log(`My log  PubSub en String est: ${incomingData}`);
  const jsonData = JSON.parse(incomingData);
var rows = [jsonData];

  console.log(`Uploading data: ${JSON.stringify(rows)}`);

  // Instantiates a client
  const bigquery = BigQuery({
    projectId: projectId
  });

  // Inserts data into memeid table
  bigquery
    .dataset(datasetId)
    .table(tableId)
    .insert(rows)
    .then((foundErrors) => {
      rows.forEach((row) => console.log('Inserted: ', row));

      if (foundErrors && foundErrors.insertErrors != undefined) {
        foundErrors.forEach((err) => {
            console.log('Error: ', err);
        })
      }
    })
    .catch((err) => {
      console.error('ERROR:', err);
    });
  // [END bigquery_insert_stream]


  callback();
}

I get this error when the function is trying to insert

ERROR: { PartialFailureError: A failure occurred during this request. 
 at /user_code/node_modules/@google-cloud/bigquery/src/table.js:1213:13
 at Object.handleResp (/user_code/node_modules/@google-cloud/bigquery/node_modules/@google-cloud/common/src/util.js:135:3) 
 at /user_code/node_modules/@google-cloud/bigquery/node_modules/@google-cloud/common/src/util.js:465:12 
 at Request.onResponse [as _callback] (/user_code/node_modules/@google-cloud/bigquery/node_modules/retry-request/index.js:198:7)
 at Request.self.callback (/user_code/node_modules/@google-cloud/bigquery/node_modules/request/request.js:185:22)
 at emitTwo (events.js:106:13)
 at Request.emit (events.js:191:7)
 at Request.<anonymous> (/user_code/node_modules/@google-cloud/bigquery/node_modules/request/request.js:1161:10)
 at emitOne (events.js:96:13)
 at Request.emit (events.js:188:7) errors: [ { errors: [Object], row: [Object] } ], response: { kind: 'bigquery#tableDataInsertAllResponse', insertErrors: [ [Object] ] }, message: 'A failure occurred during this request.' }

The function is reading this json

{
 insertId:  "000000-6dbc4c0f-fe15-4260-95c6-10afe7d0960b"  
 labels: {…}  
 logName:  "projects/mydemo-221920/logs/cloudfunctions.googleapis.com%2Fcloud-functions"  
 receiveTimestamp:  "2018-11-13T17:27:48.429240186Z"  
 resource: {…}  
 severity:  "INFO"  
 textPayload:  "My log  PubSub en String est: {"Member_ID":881230,"First_Name":"Dave2","Last_Name":"Manin2","Gender":"M","Age":53,"Height":"5,2","Weight":145,"Hours_Sleep":4,"Calories_Consumed":2497,"Exercise_Calories_Burned":876,"Date":"2018-10-17"}"  
 timestamp:  "2018-11-13T17:27:42.143Z"  
 trace:  "projects/mydemo-221920/traces/cef3531fe182bf1d6da6e47aae3bbff3"  
}
1

There are 1 answers

0
Alex Riquelme On

I cannot see any errors in your code or in your json. I tried the code that you provided and I've been able to stream data into BigQuery without getting any errors:

logs ok

BQ data

I tried to modify the schema of my BigQuery table in order to reproduce your issue and I've been able to get your exact same error:

Log error

This is schema should fix your issue:

BQ schema