错误: 类型错误: 无法读取属性"..."的 undefined using .end() 和 fs.createwritestream()



错误

  • 我得到错误:错误:类型错误:无法读取未定义的属性"outstream_for_folders">
  • 我不明白为什么我会犯这个错误。我添加了很多"未定义"的错误检查,但我仍然会收到错误,所以我认为这可能与promise或;批处理fs写流";图书馆
  • 如果查看输出,您可以遵循变量"outstream_for_folders"的所有用法
  • 错误在线路上被捕获~363~~370~

注意:只需在该页面上搜索">~363";或">~370";迅速查找下面的相关代码;

配置:

  • 我使用的是节点版本12。不幸的是,我无法更新服务器以使用新版本的节点
  • 我正在使用">批处理fs写流";版本";1.0.3";。https://www.npmjs.com/package/batching-fs-writestream
  • 可运行代码在https://codesandbox.io/s/error-using-fs-createwritestream-end-2o5rnw?file=/main.js
  • 我为变量outstream_for_folders的所有使用添加了console.log()跟踪点

项目信息

  • 这是一个目录树助行器,用于写入两个输出文件
  1. md5_folders.ndjson保存所有找到的文件夹
  2. md5_files.ndjson保存找到的所有文件夹
  • 我需要使用流,因为这最终会在有很多文件的服务器上运行
  • 稍后,输出的ndjson文件将由另一个应用程序进行第二次处理,以使用md5哈希查找重复文件
  • 则另一个web应用程序将这些重复文件列表呈现给web GUI应用程序以供用户选择用户想要删除或保留哪些重复文件

控制台输出和错误

sandbox@sse-sandbox-2o5rnw:/sandbox$ node main.js
~406      calling main()
~37      calling  outstream_for_files= batching(tempStr)
~63~     calling  outstream_for_files.on(error)...
~86      calling  outstream_for_files.on(finish)...
~393     calling processRecursiveFolder()
~363~ ERROR: TypeError: Cannot read property 'outstream_for_folders' of undefined
~367     calling outstream_for_files.end():
~370~ ERROR: TypeError: Cannot read property 'outstream_for_files' of undefined
~408      Exitting
~283     calling dan_output_json_to_Stream(outstream_for_files...
~120     calling  .write(...)
~283     calling dan_output_json_to_Stream(outstream_for_files...
~120     calling  .write(...)
~283     calling dan_output_json_to_Stream(outstream_for_files...
~120     calling  .write(...)
~283     calling dan_output_json_to_Stream(outstream_for_files...
~120     calling  .write(...)
~283     calling dan_output_json_to_Stream(outstream_for_files...
~120     calling  .write(...)
~283     calling dan_output_json_to_Stream(outstream_for_files...
~120     calling  .write(...)
~120     calling  .write(...)
~120     calling  .write(...)
~120     calling  .write(...)
~120     calling  .write(...)
~120     calling  .write(...)
~120     calling  .write(...)
~120     calling  .write(...)
~120     calling  .write(...)
sandbox@sse-sandbox-2o5rnw:/sandbox$

main.js

//"use strict";
// see https://codesandbox.io/s/error-using-fs-createwritestream-end-2o5rnw?file=/main.js
const fs = require("fs");
const crypto = require("crypto");
//const stream = require('stream');
//const util = require('util');
const path = require("path");
const batching = require("batching-fs-writestream");
var dan_gs = {
outstream_for_folders: null,
outstream_for_files: null,
last_id: 1,
md5hashtable: ["n/a"],
startdirPath: `${path.join(__dirname, "testdir")}`,
pathToStreams: `${path.join(__dirname, "z_md5")}`
};
// =============== dan Utils =========================
//const dan_utils= require('./dan_utils');
function dan_init_output_Streams(gsArg) {
var tempStr = "";
try {
if (!fs.existsSync(gsArg.pathToStreams)) {
fs.mkdirSync(gsArg.pathToStreams, { recursive: true }, (err) => {
console.log(
"~28~ ERROR: cound not mkdirSync(" + gsArg.pathToStreams + ") :" + err
);
throw err;
});
}
//console.log( `~34~ pathToStreams is ` + gsArg.pathToStreams );
tempStr = `${path.join(gsArg.pathToStreams, "md5_folders.ndjson")}`;
console.log("~37      calling  outstream_for_files= batching(tempStr) ");
gsArg.outstream_for_folders = batching(tempStr);
if (gsArg.outstream_for_folders === null) {
console.log("~43~ outstream_for_folders is null() : ");
}
} catch (err) {
console.log(`~46~ ERROR: ` + err);
}
try {
tempStr = `${path.join(gsArg.pathToStreams, "md5_files.ndjson")}`;
//console.log( `~24~ tempStr is ` + tempStr );
gsArg.outstream_for_files = batching(tempStr);
} catch (err) {
console.log(`~44~ ERROR: ` + err);
}
//    -----   -----   -----   -----   -----   -----   -----
try {
if (
typeof gsArg.outstream_for_folders === "undefined" ||
gsArg.outstream_for_folders === undefined ||
gsArg.outstream_for_folders == null
) {
console.log("~61~ ERROR: outstream_for_folders is undefined() : ");
} else {
console.log("~63~     calling  outstream_for_files.on(error)...");
gsArg.outstream_for_folders.on("error", (err) => {
console.log(`~65~ ERROR occured in outstream_for_folders  !` + err);
});
}
gsArg.outstream_for_files.on("error", (err) => {
console.log(`~70~ ERROR: occured in outstream_for_files  !` + err);
});
} catch (err) {
console.log(`~73~ ERROR: ` + err);
}
//    -----   -----   -----   -----   -----   -----   -----
// Once ndjson has flushed all data to the output stream, let's indicate done.
try {
if (
typeof gsArg.outstream_for_folders === "undefined" ||
gsArg.outstream_for_folders === undefined ||
gsArg.outstream_for_folders == null
) {
console.log("~84~ ERROR: outstream_for_folders is undefined() : ");
} else {
console.log("~86      calling  outstream_for_files.on(finish)...");
gsArg.outstream_for_folders.on("finish", function () {
console.log("~88 outstream_for_folders  Finished!");
console.log("- - - - - - - - - - - - - - - - - - - - - - -");
});
}
gsArg.outstream_for_files.on("finish", function () {
console.log("~94 outstream_for_files Finished!");
console.log("- - - - - - - - - - - - - - - - - - - - - - -");
});
} catch (err) {
console.log(`~98~ ERROR: ` + err);
}
return gsArg;
}
async function dan_output_json_to_Stream(outstreamArg, jsonObjOrStr) {
let tempJsonStr = "{}"; 
try {
//console.log( '~108 isobject(): ' + isObject(jsonObjOrStr)  + '  ' + typeof(jsonObjOrStr));
if (isObject(jsonObjOrStr)) {
tempJsonStr = JSON.stringify(jsonObjOrStr);
} else if (
typeof jsonObjOrStr === "string" ||
jsonObjOrStr instanceof String
) {
tempJsonStr = jsonObjOrStr;
} else {
console.log("~118~ ERROR:  " + typeof jsonObjOrStr);
}
console.log("~120     calling  .write(...)  ");
outstreamArg.write(tempJsonStr);
} catch (e) {
console.log("~123~ ERROR: (outstreamArg.write(jsonObj)) " + e);
}
}
function dan_create_json_for_folder(
folderid,
parentid,
pathStr,
foldernameStr,
fs_stat_obj
) {
let jsonObj = {
folderid: folderid,
pid: parentid,
p: pathStr + "\",
n: foldernameStr,
a: fs_stat_obj.atimeMs,
m: fs_stat_obj.mtimeMs,
b: fs_stat_obj.birthtimeMs,
z: "01_init",
x: ""
};
//let jsonStr = JSON.stringify(jsonObj);
//console.log ('n~147~ jsonStr is ' + jsonStr );
return jsonObj;
}
function dan_create_json_for_file(
fileid,
parentid,
pathStr,
filenameStr,
md5_Str,
sh256_Str,
fs_stat_obj
) {
let jsonObj = {
fileid: fileid,
pid: parentid,
p: pathStr,
n: filenameStr,
size: fs_stat_obj.size,
md5: md5_Str,
sha: "",
a: fs_stat_obj.atimeMs,
m: fs_stat_obj.mtimeMs,
b: fs_stat_obj.birthtimeMs,
z: "01_init",
x: "",
dups: "",
flag2Bdeleted: "F"
};
//let jsonStrjsonStr = JSON.stringify(jsonObj);
//console.log ('n~181~ jsonStr is ' + jsonStr );
return jsonObj;
}
function dan_calculateMD5(filePath) {
return new Promise((resolve, reject) => {
let hash = crypto.createHash("md5");
//console.log( `~190~ dan_calculateMD5( ` + filePath + ')'  );
let stream = fs.createReadStream(filePath);
stream.on("error", function (err) {
reject(err);
});
stream.on("data", function (data) {
hash.update(data, "utf8");
});
stream.on("end", function () {
stream.close();
// Return ( hash value in hex)
resolve(hash.digest("hex"));
});
});
}
function dan_validateMD5(hash) {
return RegExp("^[a-f0-9]{32}$", "gm").test(hash);
}
//https://codeburst.io/node-js-fs-module-write-streams-657cdbcc3f47
function isObject(objValue) {
return (
objValue && typeof objValue === "object" && objValue.constructor === Object
);
}
// =============== end of dan Util =========================
// SEE  https://stackoverflow.com/questions/66758627/calculating-md5-hashes-of-multiple-files
//
// also see https://stackoverflow.com/questions/5827612/node-js-fs-readdir-recursive-directory-search/5827895#5827895
//
// for fs.stat() - https://www.brainbell.com/javascript/fs-stats-structure.html
//      npm install util
//      npm install path
//      npm install stream
//      npm install batching-fs-writestream
//
//      cd D:Program Filesnodejsdan_nodejs
//      node dan_dir_to_json.js
function processRecursiveFolder(parent_id, routePath) {
//var fileName;
fs.readdir(routePath, (err, files) => {
// Get files in path
files.forEach((item) => {
let filePath = path.join(routePath, item); // Join root dir with path of folder
//console.log ('n~35~ filePath is ' + filePath );
//console.log ('~36~ routePath is ' + routePath );
//console.log ('~37~ item is ' + item );
fs.stat(filePath, async (err, stat) => {
// Get stats of dir
//console.log ('n~80~ fs.stat() is ' + JSON.stringify(stat) );
if (stat.isDirectory()) {
// If dir is folder, run recursively
try {
//console.log (`n~83~ DIR: $(routePath) is a directory, recurse`);
dan_gs.last_id = dan_gs.last_id + 1;
let tempJsonObj1 = dan_create_json_for_folder(
dan_gs.last_id,
parent_id,
routePath,
item,
stat
);
let tempJsonStr1 = JSON.stringify(tempJsonObj1) + "n";
//console.log( "~62~" + tempJsonStr );
if (
dan_gs.outstream_for_folders === undefined ||
typeof dan_gs.outstream_for_folders === "undefined" ||
dan_gs.outstream_for_folders == null
) {
console.log(
"~279~ ERROR: outstream_for_folders is undefined() : "
);
} else {
console.log(
"~283     calling dan_output_json_to_Stream(outstream_for_files..."
);
dan_output_json_to_Stream(
dan_gs.outstream_for_folders,
tempJsonStr1
);
}
processRecursiveFolder(dan_gs.last_id, filePath);
} catch (err) {
console.log(`~72~ ERROR: ` + err);
}
} else {
// Continue
let fileName = path
.basename(filePath)
.replace(path.extname(filePath), ""); // Get fileName without extension
//console.log ('n~89~ ' + item + ' is a file, calc MD5  ' + fileName );
await dan_calculateMD5(filePath)
.then(function (fileHash) {
if (dan_validateMD5(fileHash)) {
try {
dan_gs.last_id = dan_gs.last_id + 1;
//console.log('n~88 File: ' + filePath );
//console.log('n~88 Hash: ~IS~ ' + fileHash );
let tempJsonObj2 = dan_create_json_for_file(
dan_gs.last_id,
parent_id,
routePath,
item,
fileHash,
"",
stat
);
let tempJsonStr2 = JSON.stringify(tempJsonObj2) + "n";
//console.log( "~94~" + tempJsonStr );
dan_output_json_to_Stream(
dan_gs.outstream_for_files,
tempJsonStr2
);
dan_gs.md5hashtable.push(fileHash);
} catch (err) {
console.log(`~100~ ERROR: ` + err);
}
} else {
throw new Error(
"~103~ ERROR: Unable to calculate hash for file: " +
fileName +
"nError: " +
fileHash +
"n"
);
}
})
.catch(function (err) {
throw new Error("~107~ ERROR: " + err + "n");
});
}
});
});
});
}
function dan_close_output_Streams(gsArg) {
//    -----   -----   -----   -----   -----   -----   -----
// Once we've written each record in the record-set, we have to end the stream so that
// the  stream knows to flush and close the file output stream
try {
if (
typeof gsArg.outstream_for_folders === "undefined" ||
gsArg.outstream_for_folders === undefined ||
gsArg.outstream_for_folders == null
) {
console.log("~357~ ERROR: outstream_for_folders is undefined() : ");
} else {
console.log("~359~ calling outstream_for_files.end() : ");
gsArg.outstream_for_folders.end();
}
} catch (err) {
console.log(`~363~ ERROR: ` + err);
}
try {
console.log("~367     calling outstream_for_files.end() : ");
gsArg.outstream_for_files.end();
} catch (err) {
console.log(`~370~ ERROR: ` + err);
}
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
// - - - - - - - -      main()       - - - - - - - - - - - - - -
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
async function main() {
try {
dan_gs = dan_init_output_Streams(dan_gs);
if (
typeof dan_gs.outstream_for_folders === "undefined" ||
dan_gs.outstream_for_folders === undefined ||
dan_gs.outstream_for_folders == null
) {
console.log("~386~ ERROR: outstream_for_folders is undefined() : ");
}
} catch (err) {
console.log(`~389~ ERROR: ` + err);
}
try {
console.log("~393     calling processRecursiveFolder()");
processRecursiveFolder(100001, dan_gs.startdirPath);
} catch (err) {
console.log(`~400~ ERROR: ` + err);
}
try {
dan_close_output_Streams();
} catch (err) {
console.log(`~402~ ERROR: ` + err);
}
}
console.log("~406      calling main()");
main();
console.log("~408      Exitting");

您正在使用未定义(缺少)的参数调用函数dan_close_output_Streams。要修复,请转到第401行并更改

dan_close_output_Streams();

带有

dan_close_output_Streams(dan_gs);

最新更新