Migration aws-sdk-js v2 to v3. Error Sharp: Input file is missing

1.4k views Asked by At

Works with aws-sdk-js v2:

const Sharp = require('sharp');
const AWS = require('aws-sdk');

const S3 = new AWS.S3({ signatureVersion: 'v4' });

const originalPhoto = (await S3.getObject({ Bucket: "myBucket", Key: "myKey" }).promise()).Body;
const originalPhotoDimensions = await Sharp(originalPhoto).metadata();

enter image description here

Error with aws-sdk-js v3: I understand that is because originalPhoto is not a Buffer.

const Sharp = require('sharp');
const { S3Client, GetObjectCommand } = require("@aws-sdk/client-s3");

const S3 = new S3Client({ region: "us-east-1" });

const originalPhoto = (await S3.send(new GetObjectCommand({ Bucket: "myBucket", Key: "myKey" }))).Body;
const originalPhotoDimensions = await Sharp(originalPhoto).metadata(); // <--- Error

enter image description here

The error: Input file is missing

ERROR   Unhandled Promise Rejection     
{"errorType":"Runtime.UnhandledPromiseRejection","errorMessage":"Error: Input file is missing",

Sharp version: 0.28.1

3

There are 3 answers

3
Mike Slinn On

Try this:

// See https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/migrating-to-v3.html
// npm init
// npm install sharp @aws-sdk/client-s3
// node s3.js

const Sharp = require('sharp');

const { S3Client, GetBucketTaggingCommand, GetObjectCommand } = require("@aws-sdk/client-s3");
const { fromIni } = require("@aws-sdk/credential-provider-ini");

const s3 = new S3Client({
  credentials: fromIni({
    profile: 'default'
  })
});


bucket = 'nameOfBucket';
key = 'imageName';

const run = async () => {
  try {
    const photo = await s3.send(new GetObjectCommand({Bucket: bucket, Key: key})).Body;
    const originalPhotoDimensions = await Sharp(photo).metadata();
  } catch (err) {
    console.log("Error", err);
  }
};

run();
0
Constantin Predescu On

A solution inspired by this article: Buffer output from aws-sdk v3 s3 GetObjectCommand

const Sharp = require('sharp');
const {S3Client, GetObjectCommand} = require("@aws-sdk/client-s3");

const S3 = new S3Client({
  region: "eu-west-1",
});

async function getObject(Bucket, Key) {
  const stream = await S3
    .send(new GetObjectCommand({
      Bucket,
      Key,
    }))
    .then((response) => response.Body);

  return new Promise((resolve, reject) => {
    const chunks = []
    stream.on('data', chunk => chunks.push(chunk))
    stream.once('end', () => resolve(Buffer.concat(chunks)))
    stream.once('error', reject)
  })
}

const originalPhoto = await getObject('Bucket', 'Key');
const originalPhotoDimensions = await Sharp(originalPhoto).metadata();

0
Raph On

For those in search of a working solution, you need to return a buffer and pass it to sharp:

const Sharp = require('sharp');
const { S3Client, GetObjectCommand } = require("@aws-sdk/client-s3");

const s3Client = new S3Client({ region: "eu-west-1" });

async function getFileAsBuffer(fileKey) {
    const params = {
        Bucket: BUCKET,
        Key: fileKey,
    };

    try {
        // Create a helper function to convert a ReadableStream to a buffer.
        const streamToBuffer = (stream) =>
        new Promise((resolve, reject) => {
            const chunks = [];
            stream.on("data", (chunk) => chunks.push(chunk));
            stream.on("error", reject);
            stream.on("end", () => resolve(Buffer.concat(chunks)));
        });

      // Get the object from the Amazon S3 bucket. It is returned as a ReadableStream.
        const data = await s3Client.send(new GetObjectCommand(params));
        return await streamToBuffer(data.Body);
    } catch (err) {
        console.log("Error", err);
    }
}

async function getImageMetaData() {
    const fileBuffer = await getFileAsBuffer('fileKey');
    const metadata = await Sharp(fileBuffer).metadata();
    console.log("metadata:", metadata.format, metadata.height, metadata.width);
}