I'd like to use promises with NodeJS but it seems not working as expected.
I'd like to see LOG 1, LOG 2 and LOG 3 for all files in this order. And LOG 4 is hidden...
But I get this :
LOG 1
LOG 3
LOG 1
LOG 3
LOG 1
LOG 3
SUCCESS LOG 2
SUCCESS LOG 2
SUCCESS LOG 2
SUCCESS LOG 2
My code :
var filePromise = _.map(files, function(file) {
var filePath = './sql/' + file;
fs.lstat(filePath, function(err, stats) {
if (stats.isFile() && file !== '.gitignore' && file !== 'index.js') {
fs.readFile(filePath, 'utf-8', function(err, data) {
console.log('LOG 1');
db.query(data).then(function() {
console.log(colors.green('SUCCESS LOG 2'));
}).catch(function() {
console.log(colors.red('ERROR LOG 2'));
});
console.log('LOG 3');
});
}
});
});
Promise.all(filePromise).then(function() {
console.log(colors.green('LOG 4'));
});
In order for filePromise to contain a collection of promises, you need to return a promise for each iteration of your map call. Only then will the call to Promise.all be evaluated correctly.
var filePromise = _.map(files, function(file) {
return new Promise(function(resolve, reject) {
var filePath = './sql/' + file;
fs.lstat(filePath, function(err, stats) {
if (stats.isFile() && file !== '.gitignore' && file !== 'index.js') {
fs.readFile(filePath, 'utf-8', function(err, data) {
console.log('LOG 1');
db.query(data).then(function() {
console.log(colors.green('SUCCESS LOG 2'));
resolve();
}).catch(function() {
console.log(colors.red('ERROR LOG 2'));
reject();
});
console.log('LOG 3');
});
} else {
reject(new Error("failed condition"))
}
});
})
});
Promise.all(filePromise).then(function() {
console.log(colors.green('LOG 4'));
});
This does not fix the ordering of your console logs. That is a separate issue. Your console logs are not actually out of order, they are logging as expected. However, this is due to the async call you are making to db.query which, when triggered does not block waiting for a response. Instead the next line is executed immediately after the function call returns.
Side note: Make sure to handle your err arguments passed to the inline callback functions of fs.lstat and fs.readFile. Failure to do so will result in the promise never completing in the event of an error.
Related
I'm creating REST API with node.js without express.js framework, and for database I use fs. I'm having problem with get method. All other methods work fine. When I try to get all json files from directory, I can get everything only one time, second and all other times, nothing happens, not even an error, it just stuck for few minutes, and after that I get this error in chrome: net::ERR_EMPTY_RESPONSE, and this in firefox: TypeError: "NetworkError when attempting to fetch resource.". Get one json file works fine. I think problem is in back-end. Because I have same problems with postman.
I'm using node 10.14.1. As I understand when I'm trying to get all files from directory second time, my server.js file don't send any response. And app stops before chosenHandler in server.js line 64.
Get handler:
// Resources - get
// Required data: none
// Optional data: ID
handlers._resources.get = (data, callback) => {
// Check that the ID is valid
checkId(data.queryStringObject.id)
if (resourceDBId) {
// Lookup the resource
_data.read('resources', resourceDBId, (err, data) => !err &&
data ? callback(ok, data) : callback(notFound, {Error: 'User doesn\'t exist'}))
} else {
_data.readAll('resources', (err, data) => {
if (!err && data) {
if (data.last) {
resourcesData.push(data.data)
callback(ok, resourcesData)
}
if (!data.last) resourcesData.push(data.data)
} else callback(internalServerError, {Error: 'Can\'t get all resources'})
})
}
}
All code are here: https://github.com/FreeDevStan/sale
The readAll method in the lib/data.js file needs to initialize i.
Because i is used undefined, it is treated as global variable i and does not enter the loop, and the callback is not working, so response is not possible.
I recommend you to change it as below.
lib.readAll = (dir, callback) => {
fs.readdir(lib.baseDir + dir, (err, data) => {
if(!err && data) {
let i = 0;
while(i < data.length) {
if (i < data.length - 1) {
fs.readFile(lib.baseDir + dir + '/' + data[i], 'utf-8', (err, content) => {
let parsedContent = helpers.parseJsonToObject(content)
err ? callback(err, content) : callback(false, {last: false, data: parsedContent})
})
} if (i === data.length - 1) {
fs.readFile(lib.baseDir + dir + '/' + data[i], 'utf-8', (err, content) => {
let parsedContent = helpers.parseJsonToObject(content)
err ? callback(err, content) : callback(false, {last: true, data: parsedContent})
})
}
i++
}
} else callback(err, data)
})
}
I have the following code:
function get_status(){
try {
/* GET - status */
async.map(['http://url1.com/', 'http://url2.com/', 'http://url3.com/'], function(value, callback) {
/* GET - request status */
request.post({
url: value,
form: { 'mycustomdata': ""+mycustomdata+"" },
method: 'POST'}, function(err, res, body) {
/* CHECK - response */
if (!err && typeof body !== 'undefined' && res.statusCode == 200) {
console.log('get status success...')
callback();
} else {
callback('failed to get status');
}
})
}, function(err, results) {
if (err) {
console.log(err); return false;
} else {
console.log('finished...') }
})
}
I need when all three urls in async.map processing is finished(some url respond some not) to call where it outputs 'finished...' and from this output to callthis function again...so that function runs every time all url is processed.
So in short..i need when async.map process all urls to output to console 'finished...' and run same function again.
But i m getting only from console...
get status success...
failed to get status..
get status sucess..
i never get called
'finished'...
so that i can cal function itsef...could you please light me how it needs to be written?
You're only ever passing in the err param to the final callback. I believe, from my last use of async.js, you need to pass in null as the first param to continue to the next function, here being the final function. Otherwise is skips to the final function as an error.
callback(null, results);
If you want it to call itself again, that's just simple recursion. Wrap the async.map in a function, and when console.log('finished...') runs, call the function again.
Ok i made full example so that you can see what is problem and you can try to change code:
var async = require('async'),
request = require('request');
var myUrls = [ 'http://56.123.65.86:8080/api/server_status/',
'http://88.96.42.122:8080/api/server_status/',
'http://12.23.32.25:8080/api/server_status/',
'http://251.214.44.58:8080/api/server_status/',
'http://122.23.32.54:8080/api/server_status/' ]
async.map(myUrls, function(url, callback) {
console.log('getting url...'+url)
request(url, function(error, response, html) {
// Some processing is happening here before the callback is invoked
if (typeof response !== undefined) {
console.log('response ok...'+url)
return callback(null, html)
} else {
console.log('response failed...'+url)
return callback(error, html);
}
});
}, function(err, results) {
if (results) {
console.log('all finished...')
}
});
I im getting this:
getting url...http://56.123.65.86:8080/api/server_status/
getting url...http://88.96.42.122:8080/api/server_status/
getting url...http://12.23.32.25:8080/api/server_status/
getting url...http://251.214.44.58:8080/api/server_status/
getting url...http://122.23.32.54:8080/api/server_status/
And need to get this:
getting url...http://56.123.65.86:8080/api/server_status/
getting url...http://88.96.42.122:8080/api/server_status/
getting url...http://12.23.32.25:8080/api/server_status/
getting url...http://251.214.44.58:8080/api/server_status/
getting url...http://122.23.32.54:8080/api/server_status/
all finished...
I cannot get all finished...so it stays with getting url and that stays forever...so i cannot get idea why is all finished... not triggeret after all items are processed...
I tried to define local variable then call lambda function which populates the value to my local variable:
var listOfAliases = null;
lambda.invoke(params, function(err, data) {
if (err) {
//context.fail(err);
console.log(`This is the ERROR execution =${err} =================================`);
prompt(err);
} else {
//context.succeed('Data loaded from DB: '+ data.Payload);
listOfAliases = JSON.stringify(data.Payload);
console.log(`This is the VALIDE execution =${data.Payload} =================================`); //I can see this in the log with proper values
console.log(`This is the VALIDE execution(listOfAliases) =${listOfAliases} =================================`); //I can see this in the log with proper values
}
callback(null, JSON.parse(data.Payload));
});
console.log(`This is the DB execution listOfAliases=${listOfAliases} =================================`); //I can see this in the log with NULL value
The problem here is that lambda.invoke executes asynchronously and your last console.log executes before the invoke callback function completes.
If you need to access the result from outside one the asynchronous call completes, you could use a promise.
var promise = new Promise(function(resolve,reject){
lambda.invoke(params, function(err, data) {
if (err) {
reject(err);
} else {
resolve(JSON.stringify(data.Payload));
}
});
});
promise.then(function(listOfAliases){
console.log('This is the DB execution listOfAliases ' + listOfAliases);
});
This feels like an obvious question but it's perplexing me: I want a Node function that downloads a resource at a URI. I need it to work for several different content types without the user needing to specify which type it is.
I know how to pipe request to fs.createWriteStream when you know it's going to be an image, but not how to handle it when you've already invoked the callback from request. Here's where I am:
var request = require('request'),
fs = require('graceful-fs');
function cacheURI(uri, cache_path, cb) {
request(uri, function(err, resp, body) {
var content_type = resp.headers['content-type'].toLowerCase().split("; ")[0],
type = content_type.split("/")[0],
sub_type = content_type.split("/")[1];
if (sub_type == "json") {
body = JSON.parse(body);
}
if (type == "image") {
// this is where the trouble starts
var ws = fs.createWriteStream(cache_path);
ws.write(body);
ws.on('close', function() {
console.log('image done');
console.log(resp.socket.bytesRead);
ws.end();
cb()
});
} else {
// this works fine for text resources
fs.writeFile(cache_path, body, cb);
}
});
}
This answer to a previous question suggests the following:
request.get({url: 'https://someurl/somefile.torrent', encoding: 'binary'}, function (err, response, body) {
fs.writeFile("/tmp/test.torrent", body, 'binary', function(err) {
if(err)
console.log(err);
else
console.log("The file was saved!");
});
});
But I can't pass "binary" to request if I don't yet know the type of response I'll get.
UPDATE
Per the suggested answer, changing "close" to "finish" in the event handler does fire the callback:
if (opts.image) {
var ws = fs.createWriteStream(opts.path);
ws.on('finish', function() {
console.log('image done');
console.log(resp.socket.bytesRead);
});
//tried as buffer as well
//ws.write(new Buffer(body));
ws.write(body);
ws.end();
}
This does write the image file, but not correctly:
As suggested in here, try using the finish event (if you have node >= v0.10)
ws.on('finish', function() {
console.log('image done');
console.log(resp.socket.bytesRead);
ws.end();
cb()
});
I'm writing a simple request handler to return a pair of css files. Using fs.readFileSync this was easy. However, I'm having difficulty accomplishing the same task using the async version of readFile. Below is my code. Having my response.write() method calls split among two different callbacks seems to be problematic. Can someone point out what I've done wrong? Interestingly this code works if I put response.end() inside of the first else statement. However, that creates a problem in that the second css file does not get returned (because response.end() has already been fired).
function css(response) {
response.writeHead(200, {"Content-Type": "text/css"});
fs.readFile('css/bootstrap.css', function(error, content){
if(error){
console.log(error);
}
else{
response.write(content);
}
});
fs.readFile('css/bootstrap-responsive.css', function(error, content){
if(error){
console.log(error);
}
else{
response.write(content)
}
});
response.end();
}
The primary issue with what you have is that response.end() gets called right away. You need to only call it after the files have done their response.write calls.
The easiest way would be to use a control flow library. Managing multiple asynchronous callbacks is generally complicated.
https://github.com/joyent/node/wiki/modules#wiki-async-flow
I'm going to use the async library because it's the one I know best.
var fs = require('fs');
var async = require('async');
function css(response) {
response.writeHead(200, {"Content-Type": "text/css"});
async.eachSeries(
// Pass items to iterate over
['css/bootstrap.css', 'css/bootstrap-responsive.css'],
// Pass iterator function that is called for each item
function(filename, cb) {
fs.readFile(filename, function(err, content) {
if (!err) {
response.write(content);
}
// Calling cb makes it go to the next item.
cb(err);
});
},
// Final callback after each item has been iterated over.
function(err) {
response.end()
}
);
}
If you want to accomplish this without a library, or just want another way, this is how I would do it more directly. Basically you keep a count and call end once both file reads have finished.
function css(response) {
response.writeHead(200, {"Content-Type": "text/css"});
var count = 0;
var handler = function(error, content){
count++;
if (error){
console.log(error);
}
else{
response.write(content);
}
if (count == 2) {
response.end();
}
}
fs.readFile('css/bootstrap.css', handler);
fs.readFile('css/bootstrap-responsive.css', handler);
}
You can simply rely on html5 Promise. The code can be as simple as follows:
var promises= ['file1.css', 'file2.css'].map(function(_path){
return new Promise(function(_path, resolve, reject){
fs.readFile(_path, 'utf8', function(err, data){
if(err){
console.log(err);
resolve(""); //following the same code flow
}else{
resolve(data);
}
});
}.bind(this, _path));
});
Promise.all(promises).then(function(results){
//Put your callback logic here
response.writeHead(200, {"Content-Type": "text/css"});
results.forEach(function(content){response.write(content)});
response.end();
});
There's a simple common solution to get them all with an one callback.
You can place it anywhere in your project to reuse in many different cases.
var FS = require('fs');
/**
* Abstract helper to asyncly read a bulk of files
* Note that `cb` will receive an array of errors for each file as an array of files data
* Keys in resulting arrays will be the same as in `paths`
*
* #param {Array} paths - file paths array
* #param {Function} cb
* #param {Array} errors - a list of file reading error
* #param {Array} data - a list of file content data
*/
function FS_readFiles (paths, cb) {
var result = [], errors = [], l = paths.length;
paths.forEach(function (path, k) {
FS.readFile(path, function (err, data) {
// decrease waiting files
--l;
// just skip non-npm packages and decrease valid files count
err && (errors[k] = err);
!err && (result[k] = data);
// invoke cb if all read
!l && cb (errors.length? errors : undef, result);
});
});
}
Just put inside it a bulk of files and it will returns to you each of them as a buffer.
Simple example:
var cssFiles = [
'css/bootstrap.css',
'css/bootstrap-responsive.css'
];
function css(response) {
FS_readFiles(cssFiles, function (errors, data) {
response.writeHead(200, {"Content-Type": "text/css"});
data.forEach(function (v) {
response.write(v);
});
response.end();
});
}
Offtopic: Btw, requests like this you better to cache on front-end proxy server like nginx or varnish. It's never change.
const fs = require('fs');
function readFilePromise(fileName) {
return new Promise(function (resolve, reject) {
fs.readFile(fileName, 'utf-8', function(err, data){
if(err){reject(err)} else {
resolve(data)
}
})
})
}
Promise.all([readFilePromise("abc.txt"), readFilePromise("dec.txt")]).then(function(out){
console.log(out)
})
Async is an awesome lib. However the standard for these things is moving in the direction of promises for handling multiple asynchronous operations. In fact in ECMAScript6 this will be a standard part of the library. There are several libraries that implement promises including JQuery. However, for node, I like to use 'q'
Here is the same code using promises: One note.. you might want to move the first writeHead call to coincide with the first successful read.
var Q = require('q');
function css(response) {
response.writeHead(200, {"Content-Type": "text/css"});
var defer = Q.defer();
fs.readFile('css/bootstrap.css', function(error, content){
if(error){
defer.reject(error)
}
else{
response.write(content);
defer.resolve();
}
});
defer.promise.then(function() { //this gets executed when the first read succeeds and is written
var secondDefer = Q.defer();
fs.readFile('css/bootstrap-responsive.css', function(error, content){
if(error){
secondDefer.reject(error);
}
else{
response.write(content);
secondDefer.resolve();
}
});
return secondDefer.promise;
},
function(error) { //this gets called when the first read fails
console.log(error);
//other error handling
}).
done(function() {
response.end();
},
function(error) { //this is the error handler for the second read fails
console.log(error);
response.end(); //gotta call end anyway
});
}