Nodejs return console.log('...')? - node.js

I was looking at how to write files in node, and I found this block of code:
var fs = require('fs');
fs.writeFile("/tmp/test", "Hey there!", function(err) {
if(err) {
return console.log(err);
}
console.log("The file was saved!");
});
Now, inside the if(err){} block, where is this console.log(err) being returned to? How does the error handling work in node?

It is essentially doing nothing but breaking the logic chain of the callback.
Error handing in node is mainly callback based like you see here.
For example:
var fs = require('fs');
fs.writeFile("/tmp/test", "Hey there!", function(err) {
if (err) {
/* Handle error appropriately */
} else {
/* Code that relies on /tmp/test to exist. */
}
});

So basically, you are saying to fs.writeFile that when it is finished to call a function
function(err) {
if(err) {
return console.log(err);
}
console.log("The file was saved!");
}
Normally the callbacks do not care about what you return, So the return in case of error that you write there means that the code does not proceed, so that the second console.log is not printed.

Related

Return 404 code in proper way instead empty array

I have an quite simple application the idea is that someone has unique code which value are stored in one mongo collection in other we are keeping some data which we need to return if the key was found in first collection.
As probably you have noticed I'm using NodeJS with MongoDB and Mongoose, Express.
I have a problem with method bellow:
exports.getCompanyByKey = function(req, res) {
console.log(req.params.keyvalue);
var query = Company.where({keyValue : req.params.keyvalue});
query.findOne(function(err, company){
if(err){
res.send(err);
}else{
SampleData.findOne({}, function(err, sample_data){
if(err)
res.send(err);
res.json(sample_data);
});
}
});
};
The problem is that it will always return the data beause it's not throwing an error but empty array - so is there any other good and proper way as it should be don to throw 404 error without statement such as if(length<0) res.status(404).send('Error message).
I simply want to minimalize amount of if statements.
Maybe there is some other way to write implementation od error handling for mongoose which in general instead returning empty array will give us error code with message?
It's not exactly clear what you're asking, but if you want to make an error condition out of something that is not normally an error, then an if statement (or some other test like that) is required to test for that specific condition.
You could make your own function for querying that turns an empty response into an error and you could "hide" the if condition in that function if you want, but it's still an if condition that tests for your specific condition.
So, to return a 404 if the array is empty, you would just add an if statement (as you already appear to know):
exports.getCompanyByKey = function(req, res) {
console.log(req.params.keyvalue);
var query = Company.where({keyValue : req.params.keyvalue});
query.findOne(function(err, company){
if(err){
res.status(500).send(err);
} else {
SampleData.findOne({}, function(err, sample_data){
if(err) {
res.status(500).send(err);
} else {
if (sample_data.length) {
res.json(sample_data);
} else {
res.status(404).send("no data");
}
}
});
}
});
};
FYI, you also need to make sure you are properly setting a status code when there's an error and that you are never sending multiple responses to the same request (even when there's an error). I've also fixed several cases of those issues in your code.
This could likely be written cleaner and responses consolidated by using the promise interface to your database and send an error in one .catch().
For example, you could simplify your code by creating a utility function for .findOne() that detects and sends an error response automatically:
function findOne(res, db, q, cb) {
db.findOne(q, function(err, data) {
if (err) {
res.status(500).send(err);
cb(err);
} else if (!q.length) {
res.status(404).send("no data");
cb(new Error("no data"));
} else {
cb(null, data);
}
});
}
Then, your function could be simplified to this:
exports.getCompanyByKey = function(req, res) {
var query = Company.where({keyValue : req.params.keyvalue});
query.findOne(function(err, company){
if(err){
res.status(500).send(err);
} else {
findOne(res, SampleData, {}, function(err, sample_data) {
// any error response has already been sent
if (!err) {
res.json(sample_data);
}
});
}
});
};
Again, this would be better to use your Db's promise interface.

async.<fn>Limit stop after first iteraton loop

this question is related with an answer to my previous question. there #robertklep recommends me to use mapLimit() instead of .map() because .map() can't handle a large series of data, and with that solution all works fine. But now I restructured my code, and now neither of the .<fn>Limit() functions run after the first loop iteration. do I missing something here?
var proccesBook = function(file, cb) {
testFile(file, function (epub) {
if (epub) {
getEpuData(file, function (data) {
insertBookInDB(data)
})
}else{
cb(file)
}
})
}
async.mapLimit(full_files_path, 10, proccesBook, function(err){
if(err){
console.log('Corrupted file', err);
} else {
console.log('Processing complete');
};
})
// ---> only runs for the first 10 series data
Your primary issue is you don't call cb in the success branch of processBook. Your control flow must guarantee to call the callback exactly once for each worker function invocation.
Other asides:
You don't seem to need the results, so eachLimit is fine
Only need mapLimit if you need the results of each worker
You need to follow the standard error-first convention when calling the callback. Don't do cb(file) as that will be interpretted as an error and about the remaining processing.
var proccesBook = function(file, cb) {
testFile(file, function (epub) {
if (epub) {
getEpuData(file, function (data) {
insertBookInDB(data)
cb() // This is what you were missing
})
}else{
cb()
}
})
}
async.eachlimit(full_files_path, 10, proccesBook, function(err){
if(err){
console.log('Corrupted file', err);
} else {
console.log('Processing complete');
};
})

nodejs how to return callback for fs failure in my module

I am just two days into node.js. I am attempting to write my own module, which uses fs (filesystem) api. The code is as below:
var fs=require('fs');
var path=require('path');
// pkg file name constant
const PKG_FILE = 'pkg.json';
module.exports = function(srcDir, targetDir, callback) {
console.log('checking dir: ', srcDir, '...');
fs.readdir(srcDir, function(err, files) {
// early return, in case of error.
if (err) return callback(err);
// look for pkg.json file and if found, iterate through
// the list of components
var pkgFile = path.join(srcDir, PKG_FILE);
console.log(pkgFile);
var pkg = fs.readFile(pkgFile, function(err, data) {
if (err) return callback(err);
var obj = JSON.parse(data);
for (var attr in obj) {
console.log(attr, " : ", obj[attr]);
}
});
});
}
In the above code, I am trying to return to the caller, via the statement
if (err) return callback(err);
However, when an fs error occurs, I get a different error indicating that callback is undefined. Exact error is below:
rvnath#rv ~/projects/comviva/mbs/node/siteright $ node pkgtest.js riu3
checking dir: riu3 ...
/home/rvnath/projects/comviva/mbs/node/siteright/libs/pkgcreator.js:17
if (err) return callback(err);
^
TypeError: undefined is not a function
at /home/rvnath/projects/comviva/mbs/node/siteright/libs/pkgcreator.js:17:20
at FSReqWrap.oncomplete (fs.js:95:15)
Does this mean that I am not supposed to call "return callback(err) in nested closures?
Thanks to anyone for correcting me.
regards.
EDIT
I am using my module as below.
rvnath#rv ~/projects/comviva/mbs/node/siteright $ cat pkgtest.js
var packager = require('./libs/pkgcreator');
packager(process.argv[2], function(err, files) {
//if (err) {
//console.log("some error occurred", err);
//return;
//}
console.log("files listing...");
files.forEach(function(file) {
console.log(file);
});
});
In case of success, I get the result correctly.
checking dir: /home/rvnath/projects/comviva/mbs/ecp/ecp_6.0 ...
/home/rvnath/projects/comviva/mbs/ecp/ecp_6.0/pkg.json
reading module: smp3
reading module: pushp
reading module: bulkp
reading module: drp
reading module: mop
reading module: ecp_ws
reading module: ecp_admin
but for failure, it says callback undefined.
As expected, you are not passing a callback to the function which you are exporting -- it is indeed undefined. Simply add an anonymous function as third parameter to the function from your module, and the problem should be gone.
packager(process.argv[2], function(err, files) {
// your code
}, function(err) {
// The actual callback
console.log('Sadly, something went wrong: ' + err);
});

using node.js async series, but doesn't work

I want to use async to make node.js work something in order, first query two rates from mongodb,then use these rates to compute two new rate:
async.series([
function(callback){
db.collection('heros',function(err,collection){
if(err){
console.log(err);
}
if(!err){
console.log("2 collection fetched!");
collection.findOne({'id':win},function(err,result){
if (err) throw err;
rate1=result.rate;
console.log("win-rate:"+rate1);
});
collection.findOne({'id':lose},function(err,result){
if (err) throw err;
rate2=result.rate;
console.log("lose-rate:"+rate2);
});
}
});
callback(null);
}, function(callback){
var Ea= 1/(1+Math.pow(10,(rate2-rate1)/400));
var Eb= 1/(1+Math.pow(10,(rate1-rate2)/400));
var ra= rate1+16*(1-Ea);
var rb= rate2+16*(0-Eb);
console.log("ra:"+ra);
console.log("rb:"+rb);
callback(null);
},
function(callback){
db.collection('heros',function(err,collection){
if(!err){
collection.update({'id':win},{$set: {rate:ra}},function(err,result){
if(err) throw err;
if(!err){
console.log("update successful");
}
});
collection.update({'id':lose},{$set:{rate:rb}},function(err,result){
if(err) throw err;
if(!err){
console.log("update successful");
}
});
}
});
callback(null);
}
]);
but when I run it, it shows error messages:
ReferenceError: ra is not defined
it seems that nodejs jumps to computing or updating without waiting for the query complete.
You're declaring the variables like ra inside of a function block, so they're scoped to that function and unavailable elsewhere. You'd need to put them somewhere more accessible. For example, you could put them in a global variable:
var ra;
async.series([ ... ]);
Further, when you use series, you should call the callback function only when all of the work for that step has completed.
For example, one task should look like this:
db.collection('heros',function(err,collection){
if(err){
console.log(err); // log the error
callback(err); // call the async call back with the error
return; // stop
}
console.log("2 collection fetched!");
collection.findOne({'id':win},function(err,result){
if (err) { callback(err); return; } // again, handle error
rate1 = result.rate; // grab results
console.log("win-rate:"+rate1); // log
callback(null, rate1); // now indicate complete by calling
});
});
As you've got multiple async function calls within a single task, you may want to split them to multiple tasks or consider using the parallel function as well. You could even use parallel within one of the other tasks in the series call to handle the case where you have two findOne calls that need to be made.

Reading and returning multiple files in Node.js using fs.readFile

I'm writing a simple request handler to return a pair of css files. Using fs.readFileSync this was easy. However, I'm having difficulty accomplishing the same task using the async version of readFile. Below is my code. Having my response.write() method calls split among two different callbacks seems to be problematic. Can someone point out what I've done wrong? Interestingly this code works if I put response.end() inside of the first else statement. However, that creates a problem in that the second css file does not get returned (because response.end() has already been fired).
function css(response) {
response.writeHead(200, {"Content-Type": "text/css"});
fs.readFile('css/bootstrap.css', function(error, content){
if(error){
console.log(error);
}
else{
response.write(content);
}
});
fs.readFile('css/bootstrap-responsive.css', function(error, content){
if(error){
console.log(error);
}
else{
response.write(content)
}
});
response.end();
}
The primary issue with what you have is that response.end() gets called right away. You need to only call it after the files have done their response.write calls.
The easiest way would be to use a control flow library. Managing multiple asynchronous callbacks is generally complicated.
https://github.com/joyent/node/wiki/modules#wiki-async-flow
I'm going to use the async library because it's the one I know best.
var fs = require('fs');
var async = require('async');
function css(response) {
response.writeHead(200, {"Content-Type": "text/css"});
async.eachSeries(
// Pass items to iterate over
['css/bootstrap.css', 'css/bootstrap-responsive.css'],
// Pass iterator function that is called for each item
function(filename, cb) {
fs.readFile(filename, function(err, content) {
if (!err) {
response.write(content);
}
// Calling cb makes it go to the next item.
cb(err);
});
},
// Final callback after each item has been iterated over.
function(err) {
response.end()
}
);
}
If you want to accomplish this without a library, or just want another way, this is how I would do it more directly. Basically you keep a count and call end once both file reads have finished.
function css(response) {
response.writeHead(200, {"Content-Type": "text/css"});
var count = 0;
var handler = function(error, content){
count++;
if (error){
console.log(error);
}
else{
response.write(content);
}
if (count == 2) {
response.end();
}
}
fs.readFile('css/bootstrap.css', handler);
fs.readFile('css/bootstrap-responsive.css', handler);
}
You can simply rely on html5 Promise. The code can be as simple as follows:
var promises= ['file1.css', 'file2.css'].map(function(_path){
return new Promise(function(_path, resolve, reject){
fs.readFile(_path, 'utf8', function(err, data){
if(err){
console.log(err);
resolve(""); //following the same code flow
}else{
resolve(data);
}
});
}.bind(this, _path));
});
Promise.all(promises).then(function(results){
//Put your callback logic here
response.writeHead(200, {"Content-Type": "text/css"});
results.forEach(function(content){response.write(content)});
response.end();
});
There's a simple common solution to get them all with an one callback.
You can place it anywhere in your project to reuse in many different cases.
var FS = require('fs');
/**
* Abstract helper to asyncly read a bulk of files
* Note that `cb` will receive an array of errors for each file as an array of files data
* Keys in resulting arrays will be the same as in `paths`
*
* #param {Array} paths - file paths array
* #param {Function} cb
* #param {Array} errors - a list of file reading error
* #param {Array} data - a list of file content data
*/
function FS_readFiles (paths, cb) {
var result = [], errors = [], l = paths.length;
paths.forEach(function (path, k) {
FS.readFile(path, function (err, data) {
// decrease waiting files
--l;
// just skip non-npm packages and decrease valid files count
err && (errors[k] = err);
!err && (result[k] = data);
// invoke cb if all read
!l && cb (errors.length? errors : undef, result);
});
});
}
Just put inside it a bulk of files and it will returns to you each of them as a buffer.
Simple example:
var cssFiles = [
'css/bootstrap.css',
'css/bootstrap-responsive.css'
];
function css(response) {
FS_readFiles(cssFiles, function (errors, data) {
response.writeHead(200, {"Content-Type": "text/css"});
data.forEach(function (v) {
response.write(v);
});
response.end();
});
}
Offtopic: Btw, requests like this you better to cache on front-end proxy server like nginx or varnish. It's never change.
const fs = require('fs');
function readFilePromise(fileName) {
return new Promise(function (resolve, reject) {
fs.readFile(fileName, 'utf-8', function(err, data){
if(err){reject(err)} else {
resolve(data)
}
})
})
}
Promise.all([readFilePromise("abc.txt"), readFilePromise("dec.txt")]).then(function(out){
console.log(out)
})
Async is an awesome lib. However the standard for these things is moving in the direction of promises for handling multiple asynchronous operations. In fact in ECMAScript6 this will be a standard part of the library. There are several libraries that implement promises including JQuery. However, for node, I like to use 'q'
Here is the same code using promises: One note.. you might want to move the first writeHead call to coincide with the first successful read.
var Q = require('q');
function css(response) {
response.writeHead(200, {"Content-Type": "text/css"});
var defer = Q.defer();
fs.readFile('css/bootstrap.css', function(error, content){
if(error){
defer.reject(error)
}
else{
response.write(content);
defer.resolve();
}
});
defer.promise.then(function() { //this gets executed when the first read succeeds and is written
var secondDefer = Q.defer();
fs.readFile('css/bootstrap-responsive.css', function(error, content){
if(error){
secondDefer.reject(error);
}
else{
response.write(content);
secondDefer.resolve();
}
});
return secondDefer.promise;
},
function(error) { //this gets called when the first read fails
console.log(error);
//other error handling
}).
done(function() {
response.end();
},
function(error) { //this is the error handler for the second read fails
console.log(error);
response.end(); //gotta call end anyway
});
}

Resources