An issue with reading a gzipped file (.gz) with IBM Cloud Function (Action: Node.js 12)

91 views Asked by At

I can read the data.json.gz file on my local machine with the code mentioned below (node --version: v14.15.0). But when I try to use the same in IBM Cloud with an Action (Node.js 12) to read the same file from an Object Store Bucket, I get the below error ["stderr: ERROR: undefined - input_buf.on is not a function"].

I am very new to NodeJS; Can someone help to identify the issue here? I do appreciate your support.

Code that works on Local machine (Windows 10):

function decompressFile(filename) {
    var fs = require("fs"),
        zlib = require("zlib"),
    var input = fs.createReadStream(filename);
    var data = [];

    input.on('data', function(chunk){
        data.push(chunk);
    }).on('end', function(){
        var buf = Buffer.concat(data);
        zlib.gunzip(buf, function(err, buffer) {
            if (!err) {
                var dataString = buffer.toString()
                console.log(dataString, dataString+'\n');
                var dataJSON = JSON.parse(dataString.toString('utf8'));
            }else{
                console.log(err);
            }
        });
    });
}

decompressFile("data.json.gz");

Code that does not work on IBM Cloud Function and Object Store Bucket:

 // Get file contents of gzipped item
async function getGzippedItem(cosClient, bucketName, itemName) { // <<< async keyword added
    const fs = require('fs');
    const zlib = require('zlib');
   
    return await cosClient.getObject({  // <<< turned into assignment with await
        Bucket: bucketName,
        Key: itemName
    }).promise() 
        .then((instream=fs.createReadStream(itemName)) => {
            if (instream != null) {         
                var data = [];
                var input_buf = instream.Body

    input_buf.on('data', function(chunk){
        data.push(chunk);
    }).on('end', function() {
        var buf = Buffer.concat(data);

        zlib.gunzip(buf, function (err, buffer) {
            if (!err) {
                var dataString = buffer.toString()
                var dataJSON = JSON.parse(dataString.toString('utf8'));

            } else {
                console.log(err);
            }
        });


    });
                return buf
                }
        })
        .catch((e) => {
            console.error(`ERROR: ${e.code} - ${e.message}\n`);
        }); 
};


async function main(params) {

bucketName = 'bucket'
itemName = 'data.json.gz'

var ibm = require('ibm-cos-sdk');
var util = require('util');
var fs = require('fs');

// Initializing configuration
const myCOS = require('ibm-cos-sdk');

var config = {
    endpoint: 'endpoint',
    apiKeyId: 'apiKeyId',
    ibmAuthEndpoint: 'ibmAuthEndpoint',
    serviceInstanceId: 'serviceInstanceId',
};

var cosClient = new myCOS.S3(config);

gzippedItemContent = await getGzippedItem(cosClient, bucketName, itemName) // <<< await keyword added
console.log(">>>>>>>>>>>>>>>: ", typeof gzippedItemContent, gzippedItemContent )

}
1

There are 1 answers

1
briadeus On BEST ANSWER

The message is telling you, that your input_buf object is not of the type you expect it to be. The result of your createReadStream() call is just a stream:

[Stream] the readable stream object that can be piped or read from (by registering 'data' event listeners).

So you should be able to access the value directly (not declaring var input_buf = instream.Body):

   var getObjectStream = cosClient.getObject({
    Bucket: 'BUCKET',
    Key: 'KEY'
    }).createReadStream();
   
   getObjectStream.on('data', function(c) {
      data += c.toString();
    });

Have a look at the test section of the ibm-cos-sdk-js project, it is describing how to use the API.