NodeJS fs Stats.isFile() not defined - node.js

I'm new to NodeJS and I#m trying to read a directory recursively this is my code
var fs = require('fs');
var readDir = function (dir, calback) {
fs.readdir(dir, function (err, files) {
if (err)
console.log(err);
for (var file in files) {
fs.stat(dir + "/" + file, function (err, stats) {
if (err)
console.log(err);
if (stats.isFile()) {
calback(dir + "/" + file);
}
if (stats.isDirectory()) {
walk(file, calback);
}
});
}
});
};
This is my ErrorMessage
C:\Users\Lukas\Desktop\Enide-Studio-05-kepler-win32\ws\PlayerTest\hello-world-server.js:24
if (fs.stats.isFile()) {
^
TypeError: Cannot call method 'isFile' of undefined
at C:\Users\Lukas\Desktop\Enide-Studio-05-kepler-win32\ws\PlayerTest\hello-world-server.js:24:30
at Object.oncomplete (fs.js:107:15)
What is my mistake??

You are not exiting your function after encountering an error.
fs.stat(dir + "/" + file, function (err, stats) {
if (err) {
console.log(err);
return; // exit here since stats will be undefined
}
if (stats.isFile()) {
calback(dir + "/" + file);
}
if (stats.isDirectory()) {
walk(file, calback);
}
});

Related

TypeError: res.json is not a function when using require('fs');

Trying to pass contents for files I am reading via res.json. I think I am over writing my res function, but I dont see a fix.
app.get('/uploads/', (res, req) => {
dirname = './client/uploads'
fs.readdir(dirname, function(err, filenames) {
console.log(filenames)
if (err) {
console.log(err);
return;
}
filenames.forEach(function(filename) {
if (filename != '.DS_Store'){
fs.readFile(dirname + "/" + filename, 'utf-8', function(err, content) {
res.json({content: content})
if (err) {
//onError(err);
console.log(err)
return;
}
});
}
});
});
})
You mis-matched the arguments of /uploads route handler, req is the first argument
app.get('/uploads/', (req, res) => {
//...
})

Lambda Function Error : EROFS: read-only file system, open './tmp/test.zip' Process exited before completing request

I have download a zip file from s3 bucket then extracting the zip file
and finally upload one file to s3 bucket in Lambda function using
Node JS.But am getting the error
==> Error: EROFS: read-only file system, open './tmp/test.zip'
"Process exited before completing> request"
exports.handler = function (callback) {
downloadZipFile(params, downloadPath, function (err) {
if (err) {
callback(err);
} else {
processZipFile(downloadPath, function (err) {
if (err) {
callback(err);
} else {
callback(null);
}
});
}
});
};
function downloadZipFile(params, downloadPath, callback) {
const file = fs.createWriteStream(downloadPath);
s3.getObject(params)
.on('httpData', function (chunk) {
file.write(chunk);
})
.on('success', function () {
callback(null);
})
.on('error', function (err) {
callback(err);
})
.on('complete', function () {
file.end();
})
.send();
}
function processZipFile(filePath) {
const stats = fs.statSync(filePath)
const fileSizeInBytes = stats.size
if (fileSizeInBytes > 0) {
var srcPath = filePath;
var destPath = "./tmp";
targz.decompress({
src: srcPath,
dest: destPath
}, function (err) {
if (err) {
console.log(err);
} else {
console.log("Done!");
UploadFile();
}
});
}
}
function UploadFile() {
var body = fs.createReadStream('./tmp/SampleFile.txt')
var srcfileKey = "SampleFile.txt";
// Upload the stream
var s3obj = new AWS.S3({ params: { Bucket: bucketName, Key: srcfileKey } });
s3obj.upload({ Body: body }, function (err, data) {
if (err) {
console.log("An error occurred", err);
}
console.log("Uploaded the file at", data.Location);
})
}
You need to change the file path to just /tmp instead of ./tmp. Lambda only allows you to write to the /tmp directory.

Cant set headers after they are sent node.js

I am trying to combine multiple textfiles,convert them in a single zip file using zip archiver.
exports.downloadFilesInZip = function(req, res, next) {
var respObj = {};
var file_names = [];
var projectId = 111;
var file_ids = 11111;
console.log(projectId);
db.getConnection(function (err, connection) {
if (err) {
debug(err);
next(err);
}
else {
var updateQuery = "select data from file_data where file_id IN (?)";
console.log(updateQuery);
connection.query(updateQuery,[file_ids], function (err, results) {
console.log("inside" + updateQuery);
if (err) {
connection.release();
console.log("error" + JSON.stringify(err));
debug(err);
next(err);
}
else {
async.eachSeries(results,function(item,loopCallBack){
var text = "";
console.log("hllllllll");
console.log(item.data);
console.log(JSON.parse(item.data));
document_text = JSON.parse(item.data);
console.log("dssddssdsdsdsdsd"+document_text);
for(var j=0; j < document_text.length ;j++)
{
text += document_text[j]['text'];
}
//file_names.push(convertStringToTextFile(text));
convertStringToTextFile(text,function(err,file_name){
if(err){
console.log(err);
loopCallBack(err);
}
else {
file_names.push(file_name);
loopCallBack();
}
})
},function(err){
if(err){
console.log(err);
next(err);
}
else {
var updateQuery = "select name from project where id in (?)";
console.log(updateQuery);
connection.query(updateQuery,[projectId], function (err, results) {
console.log("inside" + updateQuery);
connection.release();
if (err) {
console.log("error" + JSON.stringify(err));
debug(err);
next(err);
}
else {
var fileName_link = JSON.stringify(results[0].name);
console.log("projectname"+fileName_link);
convertTextFilesToZip(file_names,fileName_link, function (err, filename) {
if (err) {
console.log(err);
next(err);
}
else {
console.log("filename link" + filename);
res.json({
status: 0,
file_link: filename
});
}
});
}
});
}
});
}
});
}
});
}
}
convertStringToTextFile = function(text,cb){
var json_filename = 'tmp/file_'+uuid.v4().replace('-','')+'.txt';
fs.writeFile(json_filename, text , function (err) {
if (err) {
debug(err);
cb(err);
}
else{
cb(null,json_filename);
}
});
};
convertTextFilesToZip = function(textFiles,file_link,cb){
console.log("textfiles"+textFiles);
var filename = 'reports/'+JSON.parse(file_link)+'_extractedText.zip';
var output = fs.createWriteStream(filename);
output.on('close', function() {
console.log(zipArchive.pointer() + ' total bytes');
console.log('archiver has been finalized and the output file descriptor has closed.');
});
zipArchive.on('error', function(err) {
cb(err);
});
zipArchive.pipe(output);
zipArchive.bulk([
{ expand: true, src: textFiles }
]);
zipArchive.finalize();
cb(null,filename);
}
It works okay the first time and after that it throws this error.I have checked other posts in which res is returned twice but i couldn't find it.It says that can't set headers after they are sent.I think the problem is in the convertTextFilesToZip function but i cant seem to pinpoint the exact location which is generating the error.ANy help is appreciated.
Error: Can't set headers after they are sent.
at ServerResponse.OutgoingMessage.setHeader (_http_outgoing.js:350:11)
at ServerResponse.header (/Users/zeeshandar/Desktop/Agreements_info/agreements_info/node_modules/express/lib/response.js:700:10)
at ServerResponse.send (/Users/zeeshandar/Desktop/Agreements_info/agreements_info/node_modules/express/lib/response.js:154:12)
at fn (/Users/zeeshandar/Desktop/Agreements_info/agreements_info/node_modules/express/lib/response.js:934:10)
at View.exports.renderFile [as engine] (/Users/zeeshandar/Desktop/Agreements_info/agreements_info/node_modules/jade/lib/index.js:374:12)
at View.render (/Users/zeeshandar/Desktop/Agreements_info/agreements_info/node_modules/express/lib/view.js:93:8)
at EventEmitter.app.render (/Users/zeeshandar/Desktop/Agreements_info/agreements_info/node_modules/express/lib/application.js:566:10)
at ServerResponse.res.render (/Users/zeeshandar/Desktop/Agreements_info/agreements_info/node_modules/express/lib/response.js:938:7)
at /Users/zeeshandar/Desktop/Agreements_info/agreements_info/app.js:207:13
at Layer.handle_error (/Users/zeeshandar/Desktop/Agreements_info/agreements_info/node_modules/express/li b/router/layer.js:58:5)
Making my comment into an answer since it appears to have led to the solution.
The variable zipArchive is not initialized in convertTextFilesToZip() therefore you are reusing that variable from one function call to the next and that seems unlikely to be the right implementation.
Also, I would expect your method calls to zipArchive to be asynchronous and it doesn't look like your are coding for that since the callback is called before you have any sort of completion notification.

Node async waterfall callback was already called

I'm trying to read some files from dir with async.waterfall, it seems to me that I'm doing stuff right, but I get the specified error and the readData function is never called. What's wrong?
var fs = require("fs");
var async = require("async");
var folder = "./files/";
try {
async.waterfall([
function readDir(cb) {
fs.readdir(folder, function(err, files) {
cb(err, files);
});
},
function loopFiles(files, cb) {
files.forEach(function(fn) {
console.log("loop " + fn);
cb(null, fn);
});
},
function check(fn, cb) {
console.log("check "+fn);
fs.stat(folder + fn, function(err, stats) {
console.log(stats.isFile());
cb(err, stats, fn);
});
},
function readData(stats, fn, cb) {
console.log("read "+fn);
if (stats.isFile()) {
fs.readFile(folder + fn, "utf-8", function(err, data) {
cb(err, data);
});
}
}
], function(err, result) {
if (err) {
throw err;
}
console.log(result);
});
} catch (err) {
console.log(err);
}
The problem is that you're calling cb(null, fn) in loopFiles() multiple times if files.length > 1. You will probably need to perform a separate async.waterfall() or use some other async.* method for each file.
One other problem is in readData() where you aren't calling cb() in the case that stats.isFile() evaluates to false.

async.each confusion in NodeJS

I'm taking my first steps in NodeJS, and I'm having an issue with the async-module. I had the following code which works fine:
var http = require('http');
var fs = require('fs');
var async = require('async');
function load_albums(callback) {
fs.readdir("albums", function(err, content) {
console.log(content);
if(err) {
callback(err);
return;
}
var directories = [];
(function iterator(index) {
if(index == content.length) {
callback(null, directories);
return;
}
fs.stat("albums/" + content[index], function(err, stats) {
if (err) {
callback(err);
}
if(stats.isDirectory()) {
directories.push(content[index]);
}
console.log(index);
iterator(index + 1);
});
})(0);
});
}
function handle_request(request, response) {
load_albums(function(err, albums) {
if (err) {
response.writeHead(503, {"Content-Type": "application/json"});
response.end(JSON.stringify(err) + "\n");
return;
}
var out = { error: null,
data: { albums: albums}};
response.writeHead(200, { "Content-Type" : "application/json" });
response.end(JSON.stringify(out) + "\n");
});
}
var s = http.createServer(handle_request);
s.listen(8080);
This works fine, and gives the expected output:
{"error":null,"data":{"albums":["testdir1","testdir2"]}}
However, I intended to replace the iterator with the async.each function.
I ended up with this:
function load_albums(callback) {
fs.readdir("albums", function(err, content) {
console.log(content);
if(err) {
callback(err);
return;
}
var directories = [];
async.each(content, function(item, callback2) {
fs.stat("albums/" + item, function(err,stats) {
if(stats.isDirectory()) {
directories.push(item);
}
callback2();
});
});
callback(null, directories);
});
}
However, this doesn't seem to work, as "albums" seems to be empty now:
{"error":null,"data":{"albums":[]}}
What am I missing here? I guess it has something to do with calling the fs.stats() function, but I'm unsure about what I'm doing wrong.
async.each() takes three arguments. You are not passing the last one which is the one that tells you when it is done. You also haven't implemented error handling on fs.stat(). You can change to this:
function load_albums(callback) {
fs.readdir("albums", function(err, content) {
console.log(content);
if(err) {
callback(err);
return;
}
var directories = [];
async.each(content, function(item, callback2) {
fs.stat("albums/" + item, function(err,stats) {
if (!err && stats.isDirectory()) {
directories.push(item);
}
callback2(err);
});
}, function(err) {
callback(err, directories);
});
});
}
As answered by #jfriend00 final callback is third parameter of asyn.each. Currently this callback is running without waiting for async.each to complete.
Also you're serving albums for all the request. They should be served on a particular resource URL like /albums or /albums/
I have made these modifications to the code, now it loads albums on http://localhost:8080/albums otherwise it returns 'No Content.
var http = require('http');
var fs = require('fs');
var async = require('async');
function load_albums(loadCompleteCallback) {
fs.readdir("albums", function(err, content) {
console.log(content);
if(err) {
callback(err);
return;
}
var directories = [];
async.each(content, function(item, doneCallback) {
fs.stat("albums/" + item, function(err,stats) {
if(stats.isDirectory()) {
directories.push(item);
}
return doneCallback(err);
});
}
, function (err) {
loadCompleteCallback(err, directories);
});
});
}
function handle_request(request, response) {
console.log("requested path: " + request.url);
if(request.url.match(/^\/albums[\/]?$/) ) {
load_albums(function(err, albums) {
if (err) {
response.writeHead(503, {"Content-Type": "application/json"});
response.end(JSON.stringify(err) + "\n");
return;
}
var out = { error: null,
data: { albums: albums}};
response.writeHead(200, { "Content-Type" : "application/json" });
response.end(JSON.stringify(out) + "\n");
});
} else {
response.writeHead(200, { "Content-Type" : "application/json" });
response.end("No Content\n");
}
}
var s = http.createServer(handle_request);
s.listen(8080);
console.log("server running at : http://localhost:" + 8080);

Resources