Connection resets after 60 seconds in node.js upload application

1.3k views Asked by At

I've written an application in node.js consisting of a server and a client for storing/uploading files.

For reproduction purposes, here's a proof of concept using a null write stream in the server and a random read stream in the client.

Using node.js 12.19.0 on Ubuntu 18.04. The client depends on node-fetch v2.6.1.

The issue I have is after 60 seconds the connection is reset and haven't found a way to make this work.

Any ideas are appreciated.

Thank you.

testServer.js

// -- DevNull Start --
var util         = require('util')
  , stream       = require('stream')
  , Writable     = stream.Writable
  , setImmediate = setImmediate || function (fn) { setTimeout(fn, 0) }
  ;
util.inherits(DevNull, Writable);
function DevNull (opts) {
  if (!(this instanceof DevNull)) return new DevNull(opts);
  opts = opts || {};
  Writable.call(this, opts);
}
DevNull.prototype._write = function (chunk, encoding, cb) {
  setImmediate(cb);
}
// -- DevNull End --
const http = require('http');
const server = http.createServer();
server.on('request', async (req, res) => {
  try {
    req.socket.on('end', function() { 
      console.log('SOCKET END: other end of the socket sends a FIN packet');
    });
    req.socket.on('timeout', function() { 
      console.log('SOCKET TIMEOUT');
    });
    req.socket.on('error', function(error) { 
      console.log('SOCKET ERROR: ' + JSON.stringify(error));
    });
    req.socket.on('close', function(had_error) { 
      console.log('SOCKET CLOSED. IT WAS ERROR: ' + had_error);
    });
    const writeStream = DevNull();
    const promise = new Promise((resolve, reject) => {
      req.on('end', resolve);
      req.on('error', reject);
    });
    req.pipe(writeStream);
    await promise;
    res.writeHead(200);
    res.end('OK');
  } catch (err) {
    res.writeHead(500);
    res.end(err.message);
  }
});
server.listen(8081)
  .on('listening', () => { console.log('Listening on port', server.address().port); });

testClient.js

// -- RandomStream Start --
var crypto = require('crypto');
var stream = require('stream');
var util = require('util');
var Readable = stream.Readable;
function RandomStream(length, options) {
  // allow calling with or without new
  if (!(this instanceof RandomStream)) {
    return new RandomStream(length, options);
  }
  // init Readable
  Readable.call(this, options);
  // save the length to generate
  this.lenToGenerate = length;
}
util.inherits(RandomStream, Readable);
RandomStream.prototype._read = function (size) {
  if (!size) size = 1024; // default size
  var ready = true;
  while (ready) { // only cont while push returns true
    if (size > this.lenToGenerate) { // only this left
      size = this.lenToGenerate;
    }
    if (size) {
      ready = this.push(crypto.randomBytes(size));
      this.lenToGenerate -= size;
    }
    // when done, push null and exit loop
    if (!this.lenToGenerate) {
      this.push(null);
      ready = false;
    }
  }
};
// -- RandomStream End --
const fetch = require('node-fetch');
const runSuccess = async () => { // Runs in ~35 seconds
  const t = Date.now();
  try {
    const resp = await fetch('http://localhost:8081/test', {
      method: 'PUT',
      body: new RandomStream(256e6) // new RandomStream(1024e6)
    });
    const data = await resp.text();
    console.log(Date.now() - t, data);
  } catch (err) {
    console.warn(Date.now() - t, err);
  }
};
const runFail = async () => { // Fails after 60 seconds
  const t = Date.now();
  try {
    const resp = await fetch('http://localhost:8081/test', {
      method: 'PUT',
      body: new RandomStream(1024e6)
    });
    const data = await resp.text();
    console.log(Date.now() - t, data);
  } catch (err) {
    console.warn(Date.now() - t, err);
  }
};
// runSuccess().then(() => process.exit(0));
runFail().then(() => process.exit(0));
2

There are 2 answers

0
Jorge M. On BEST ANSWER

This turned out to be a bug in node.js

Discussion here: https://github.com/nodejs/node/issues/35661

0
jeeves On

I tried (unsuccessfully) to reproduce what you are seeing based on your code example. Neither the success call is completing in ~35 seconds nor is the error being thrown in 60 seconds.

However, that being said, I think what is happening here is that your client is terminating the request. You can increase the timeout by adding a httpAgent to the fetch PUT call. You can then set a timeout in the httpAgent.

const http = require('http');

...

const runFail = async () => { // Fails after 60 seconds
  const t = Date.now();
  try {
    const resp = await fetch('http://localhost:8081/test', {
      method: 'PUT',
      body: new RandomStream(1024e6),
      agent: new http.Agent({ keepAlive: true, timeout: 300000 })
    });
    const data = await resp.text();
    console.log(Date.now() - t, data);
  } catch (err) {
    console.warn(Date.now() - t, err);
  }
};

See the fetch docs for adding a custom http(s) agent here

See options for creating http(s) agent here