ERROR
- I get the error: ERROR: TypeError: Cannot read property 'outstream_for_folders' of undefined
- I don't understand why I am getting this error. I added lots of error checking for 'undefined', but I still get the error, so I think it may have something to do with promises or the "batching-fs-writestream" library.
- If you look at the output you can follow all the uses of the variable 'outstream_for_folders'
- error is caught on lines; ~363~ and ~370~
Note: just search this page for "~363" or "~370" to quickly find the relevant code below;
CONFIGURATION:
- I am using node version 12. Unfortunately, I can not update the server to use a new version of node.
- I am using "batching-fs-writestream" version "1.0.3". https://www.npmjs.com/package/batching-fs-writestream
- Runnable Code is available up on https://codesandbox.io/s/error-using-fs-createwritestream-end-2o5rnw?file=/main.js
- I added console.log() trace points to all uses of the variable outstream_for_folders
Project Info
- This is a directory tree walker, that writes two output files;
- md5_folders.ndjson holding all the folders found
- md5_files.ndjson holding all the folders found
- I need to use a stream, because this will eventually be run on a server with lots of files.
- later the output ndjson files will be processed by another app a second time to find duplicate files using md5 hash
- then another web app will present these duplicate file list to a web GUI app for the user to pick which duplicate files the user wants to delete or keep.
Console output and ERRORS
sandbox@sse-sandbox-2o5rnw:/sandbox$ node main.js
~406 calling main()
~37 calling outstream_for_files= batching(tempStr)
~63~ calling outstream_for_files.on(error)...
~86 calling outstream_for_files.on(finish)...
~393 calling processRecursiveFolder()
~363~ ERROR: TypeError: Cannot read property 'outstream_for_folders' of undefined
~367 calling outstream_for_files.end():
~370~ ERROR: TypeError: Cannot read property 'outstream_for_files' of undefined
~408 Exitting
~283 calling dan_output_json_to_Stream(outstream_for_files...
~120 calling .write(...)
~283 calling dan_output_json_to_Stream(outstream_for_files...
~120 calling .write(...)
~283 calling dan_output_json_to_Stream(outstream_for_files...
~120 calling .write(...)
~283 calling dan_output_json_to_Stream(outstream_for_files...
~120 calling .write(...)
~283 calling dan_output_json_to_Stream(outstream_for_files...
~120 calling .write(...)
~283 calling dan_output_json_to_Stream(outstream_for_files...
~120 calling .write(...)
~120 calling .write(...)
~120 calling .write(...)
~120 calling .write(...)
~120 calling .write(...)
~120 calling .write(...)
~120 calling .write(...)
~120 calling .write(...)
~120 calling .write(...)
sandbox@sse-sandbox-2o5rnw:/sandbox$
main.js
//"use strict";
// see https://codesandbox.io/s/error-using-fs-createwritestream-end-2o5rnw?file=/main.js
const fs = require("fs");
const crypto = require("crypto");
//const stream = require('stream');
//const util = require('util');
const path = require("path");
const batching = require("batching-fs-writestream");
var dan_gs = {
outstream_for_folders: null,
outstream_for_files: null,
last_id: 1,
md5hashtable: ["n/a"],
startdirPath: `${path.join(__dirname, "testdir")}`,
pathToStreams: `${path.join(__dirname, "z_md5")}`
};
// =============== dan Utils =========================
//const dan_utils= require('./dan_utils');
function dan_init_output_Streams(gsArg) {
var tempStr = "";
try {
if (!fs.existsSync(gsArg.pathToStreams)) {
fs.mkdirSync(gsArg.pathToStreams, { recursive: true }, (err) => {
console.log(
"~28~ ERROR: cound not mkdirSync(" + gsArg.pathToStreams + ") :" + err
);
throw err;
});
}
//console.log( `~34~ pathToStreams is ` + gsArg.pathToStreams );
tempStr = `${path.join(gsArg.pathToStreams, "md5_folders.ndjson")}`;
console.log("~37 calling outstream_for_files= batching(tempStr) ");
gsArg.outstream_for_folders = batching(tempStr);
if (gsArg.outstream_for_folders === null) {
console.log("~43~ outstream_for_folders is null() : ");
}
} catch (err) {
console.log(`~46~ ERROR: ` + err);
}
try {
tempStr = `${path.join(gsArg.pathToStreams, "md5_files.ndjson")}`;
//console.log( `~24~ tempStr is ` + tempStr );
gsArg.outstream_for_files = batching(tempStr);
} catch (err) {
console.log(`~44~ ERROR: ` + err);
}
// ----- ----- ----- ----- ----- ----- -----
try {
if (
typeof gsArg.outstream_for_folders === "undefined" ||
gsArg.outstream_for_folders === undefined ||
gsArg.outstream_for_folders == null
) {
console.log("~61~ ERROR: outstream_for_folders is undefined() : ");
} else {
console.log("~63~ calling outstream_for_files.on(error)...");
gsArg.outstream_for_folders.on("error", (err) => {
console.log(`~65~ ERROR occured in outstream_for_folders !` + err);
});
}
gsArg.outstream_for_files.on("error", (err) => {
console.log(`~70~ ERROR: occured in outstream_for_files !` + err);
});
} catch (err) {
console.log(`~73~ ERROR: ` + err);
}
// ----- ----- ----- ----- ----- ----- -----
// Once ndjson has flushed all data to the output stream, let's indicate done.
try {
if (
typeof gsArg.outstream_for_folders === "undefined" ||
gsArg.outstream_for_folders === undefined ||
gsArg.outstream_for_folders == null
) {
console.log("~84~ ERROR: outstream_for_folders is undefined() : ");
} else {
console.log("~86 calling outstream_for_files.on(finish)...");
gsArg.outstream_for_folders.on("finish", function () {
console.log("~88 outstream_for_folders Finished!");
console.log("- - - - - - - - - - - - - - - - - - - - - - -");
});
}
gsArg.outstream_for_files.on("finish", function () {
console.log("~94 outstream_for_files Finished!");
console.log("- - - - - - - - - - - - - - - - - - - - - - -");
});
} catch (err) {
console.log(`~98~ ERROR: ` + err);
}
return gsArg;
}
async function dan_output_json_to_Stream(outstreamArg, jsonObjOrStr) {
let tempJsonStr = "{}";
try {
//console.log( '~108 isobject(): ' + isObject(jsonObjOrStr) + ' ' + typeof(jsonObjOrStr));
if (isObject(jsonObjOrStr)) {
tempJsonStr = JSON.stringify(jsonObjOrStr);
} else if (
typeof jsonObjOrStr === "string" ||
jsonObjOrStr instanceof String
) {
tempJsonStr = jsonObjOrStr;
} else {
console.log("~118~ ERROR: " + typeof jsonObjOrStr);
}
console.log("~120 calling .write(...) ");
outstreamArg.write(tempJsonStr);
} catch (e) {
console.log("~123~ ERROR: (outstreamArg.write(jsonObj)) " + e);
}
}
function dan_create_json_for_folder(
folderid,
parentid,
pathStr,
foldernameStr,
fs_stat_obj
) {
let jsonObj = {
folderid: folderid,
pid: parentid,
p: pathStr + "\\",
n: foldernameStr,
a: fs_stat_obj.atimeMs,
m: fs_stat_obj.mtimeMs,
b: fs_stat_obj.birthtimeMs,
z: "01_init",
x: ""
};
//let jsonStr = JSON.stringify(jsonObj);
//console.log ('\n~147~ jsonStr is ' + jsonStr );
return jsonObj;
}
function dan_create_json_for_file(
fileid,
parentid,
pathStr,
filenameStr,
md5_Str,
sh256_Str,
fs_stat_obj
) {
let jsonObj = {
fileid: fileid,
pid: parentid,
p: pathStr,
n: filenameStr,
size: fs_stat_obj.size,
md5: md5_Str,
sha: "",
a: fs_stat_obj.atimeMs,
m: fs_stat_obj.mtimeMs,
b: fs_stat_obj.birthtimeMs,
z: "01_init",
x: "",
dups: "",
flag2Bdeleted: "F"
};
//let jsonStrjsonStr = JSON.stringify(jsonObj);
//console.log ('\n~181~ jsonStr is ' + jsonStr );
return jsonObj;
}
function dan_calculateMD5(filePath) {
return new Promise((resolve, reject) => {
let hash = crypto.createHash("md5");
//console.log( `~190~ dan_calculateMD5( ` + filePath + ')' );
let stream = fs.createReadStream(filePath);
stream.on("error", function (err) {
reject(err);
});
stream.on("data", function (data) {
hash.update(data, "utf8");
});
stream.on("end", function () {
stream.close();
// Return ( hash value in hex)
resolve(hash.digest("hex"));
});
});
}
function dan_validateMD5(hash) {
return RegExp("^[a-f0-9]{32}$", "gm").test(hash);
}
//https://codeburst.io/node-js-fs-module-write-streams-657cdbcc3f47
function isObject(objValue) {
return (
objValue && typeof objValue === "object" && objValue.constructor === Object
);
}
// =============== end of dan Util =========================
// SEE https://stackoverflow.com/questions/66758627/calculating-md5-hashes-of-multiple-files
//
// also see https://stackoverflow.com/questions/5827612/node-js-fs-readdir-recursive-directory-search/5827895#5827895
//
// for fs.stat() - https://www.brainbell.com/javascript/fs-stats-structure.html
// npm install util
// npm install path
// npm install stream
// npm install batching-fs-writestream
//
// cd D:\Program Files\nodejs\dan_nodejs
// node dan_dir_to_json.js
function processRecursiveFolder(parent_id, routePath) {
//var fileName;
fs.readdir(routePath, (err, files) => {
// Get files in path
files.forEach((item) => {
let filePath = path.join(routePath, item); // Join root dir with path of folder
//console.log ('\n~35~ filePath is ' + filePath );
//console.log ('~36~ routePath is ' + routePath );
//console.log ('~37~ item is ' + item );
fs.stat(filePath, async (err, stat) => {
// Get stats of dir
//console.log ('\n~80~ fs.stat() is ' + JSON.stringify(stat) );
if (stat.isDirectory()) {
// If dir is folder, run recursively
try {
//console.log (`\n~83~ DIR: $(routePath) is a directory, recurse`);
dan_gs.last_id = dan_gs.last_id + 1;
let tempJsonObj1 = dan_create_json_for_folder(
dan_gs.last_id,
parent_id,
routePath,
item,
stat
);
let tempJsonStr1 = JSON.stringify(tempJsonObj1) + "\n";
//console.log( "~62~" + tempJsonStr );
if (
dan_gs.outstream_for_folders === undefined ||
typeof dan_gs.outstream_for_folders === "undefined" ||
dan_gs.outstream_for_folders == null
) {
console.log(
"~279~ ERROR: outstream_for_folders is undefined() : "
);
} else {
console.log(
"~283 calling dan_output_json_to_Stream(outstream_for_files..."
);
dan_output_json_to_Stream(
dan_gs.outstream_for_folders,
tempJsonStr1
);
}
processRecursiveFolder(dan_gs.last_id, filePath);
} catch (err) {
console.log(`~72~ ERROR: ` + err);
}
} else {
// Continue
let fileName = path
.basename(filePath)
.replace(path.extname(filePath), ""); // Get fileName without extension
//console.log ('\n~89~ ' + item + ' is a file, calc MD5 ' + fileName );
await dan_calculateMD5(filePath)
.then(function (fileHash) {
if (dan_validateMD5(fileHash)) {
try {
dan_gs.last_id = dan_gs.last_id + 1;
//console.log('\n~88 File: ' + filePath );
//console.log('\n~88 Hash: ~IS~ ' + fileHash );
let tempJsonObj2 = dan_create_json_for_file(
dan_gs.last_id,
parent_id,
routePath,
item,
fileHash,
"",
stat
);
let tempJsonStr2 = JSON.stringify(tempJsonObj2) + "\n";
//console.log( "~94~" + tempJsonStr );
dan_output_json_to_Stream(
dan_gs.outstream_for_files,
tempJsonStr2
);
dan_gs.md5hashtable.push(fileHash);
} catch (err) {
console.log(`~100~ ERROR: ` + err);
}
} else {
throw new Error(
"~103~ ERROR: Unable to calculate hash for file: " +
fileName +
"\nError: " +
fileHash +
"\n"
);
}
})
.catch(function (err) {
throw new Error("~107~ ERROR: " + err + "\n");
});
}
});
});
});
}
function dan_close_output_Streams(gsArg) {
// ----- ----- ----- ----- ----- ----- -----
// Once we've written each record in the record-set, we have to end the stream so that
// the stream knows to flush and close the file output stream
try {
if (
typeof gsArg.outstream_for_folders === "undefined" ||
gsArg.outstream_for_folders === undefined ||
gsArg.outstream_for_folders == null
) {
console.log("~357~ ERROR: outstream_for_folders is undefined() : ");
} else {
console.log("~359~ calling outstream_for_files.end() : ");
gsArg.outstream_for_folders.end();
}
} catch (err) {
console.log(`~363~ ERROR: ` + err);
}
try {
console.log("~367 calling outstream_for_files.end() : ");
gsArg.outstream_for_files.end();
} catch (err) {
console.log(`~370~ ERROR: ` + err);
}
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
// - - - - - - - - main() - - - - - - - - - - - - - -
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
async function main() {
try {
dan_gs = dan_init_output_Streams(dan_gs);
if (
typeof dan_gs.outstream_for_folders === "undefined" ||
dan_gs.outstream_for_folders === undefined ||
dan_gs.outstream_for_folders == null
) {
console.log("~386~ ERROR: outstream_for_folders is undefined() : ");
}
} catch (err) {
console.log(`~389~ ERROR: ` + err);
}
try {
console.log("~393 calling processRecursiveFolder()");
processRecursiveFolder(100001, dan_gs.startdirPath);
} catch (err) {
console.log(`~400~ ERROR: ` + err);
}
try {
dan_close_output_Streams();
} catch (err) {
console.log(`~402~ ERROR: ` + err);
}
}
console.log("~406 calling main()");
main();
console.log("~408 Exitting");
You are calling the function
dan_close_output_Streams
with an undefined (missing) argument. To fix, go to line 401 and changewith