aws sdk v3 lib-storage: upload file of 112 GB crash chrome for out of memory

86 views Asked by At

I have bundled "@aws-sdk/lib-storage" and "@aws-sdk/client-s3" with webpack for use it on frontend

package.json

{
  "name": "aws-s3-upload",
  "version": "1.0.0",
  "description": "",
  "main": "index.js",
  "scripts": {
    "test": "echo \"Error: no test specified\" && exit 1",
    "build": "webpack"
  },
  "author": "",
  "license": "ISC",
  "dependencies": {
    "@aws-sdk/client-s3": "^3.499.0",
    "@aws-sdk/lib-storage": "^3.499.0"
  },
  "devDependencies": {
    "path-browserify": "^1.0.1",
    "webpack": "^5.90.0",
    "webpack-cli": "^5.1.4"
  }
}

browser.js

const { S3Client } = require('@aws-sdk/client-s3');
const { Upload } = require("@aws-sdk/lib-storage");
window.AWS = {S3Client, Upload}
export {S3Client, Upload};

webpack.config.js

// Import path for resolving file paths
var path = require("path");
module.exports = {
  mode: 'production',
  performance: {
    hints: false
  },
  // Specify the entry point for our app.
  entry: [path.join(__dirname, "browser.js")],
  // Specify the output file containing our bundled code.
  output: {
    path: __dirname,
    filename: 'bundle.js'
  },
  // Enable WebPack to use the 'path' package.
  resolve: {
    fallback: { path: require.resolve("path-browserify") }
  }
  /**
  * In Webpack version v2.0.0 and earlier, you must tell 
  * webpack how to use "json-loader" to load 'json' files.
  * To do this Enter 'npm --save-dev install json-loader' at the 
  * command line to install the "json-loader' package, and include the 
  * following entry in your webpack.config.js.
  * module: {
    rules: [{test: /\.json$/, use: use: "json-loader"}]
  }
  **/
};

Build produce bundle.js

npm run buid

usage

index.html

 <script src="bundle.js"></script>
 <script>
  const s3Client = new AWS.S3Client({
    region:  ...,
    endpoint:  ...,
    credentials: {
          accessKeyId: ...,
          secretAccessKey:  ...,
          sessionToken:  ...,
     }
   });
   const uploadParams = {
     Bucket: ...,
     Key: ...,
     Body: file // file from <input type="file">
   };
  const upload = new AWS.Upload({
    client: s3Client ,
    params: uploadParams,
    queueSize: 4, // optional concurrency configuration
    partSize: 1024 * 1024 * 5, // optional size of each part
    leavePartsOnError: false, // optional manually handle dropped parts
  });

  upload .on("httpUploadProgress", (progress) => {
    console.log(progress);
  });
  await upload.done();
</script>

If upload a file of 112 GB, after 25 GB chrome crash for out of memory.

0

There are 0 answers