I have created a NodeJS server and created promises for the HTTP.get method and calling the get method function in created server but it showing error options.uri
var http = require('http');
var request = require('request');
var rese = null;
function initialize() {
var options = {
host: 'httpbin.org',
path: '/json',
};
return new Promise(function (resolve, reject) {
request.get(options, function (err, res, body) {
if (err) {
reject(err);
} else {
resolve(JOSN.parse(body));
}
/** res.on("data", function(chunk) {
//console.log("BODY: " + chunk);
result=chunk;
});*/
})
})
}
http.createServer(function (req, res) {
if (req.url == '/my') {
/**result=res.on("data", function(chunk) {
console.log("BODY: " + chunk);
});*/
var initializePromise = initialize();
initializePromise.then(function (res) {
rese = result;
console.log("Initialized user details");
// Use user details from here
console.log(userDetails)
}, function (err) {
console.log(err);
});
//res.end(result);
}
else {
res.end('please find the correct path');
}
}).listen(2000);
error:options.uri is a required argument
Your get method should include uri, The request api get call structure is as follows,
request.get("http://bin.org/json", options, function (err, res, body) {
if (err) {
reject(err);
} else {
resolve(JSON.parse(body));
}
/** res.on("data", function(chunk) {
//console.log("BODY: " + chunk);
result=chunk;
});*/
})
Try to make this changes, it will work
Related
I have the code below, and it seems to call the var promiseFeedback is called and I don't know why... This means it is called even when an error occurs when I create document. Whereas is should only be called if there is no err in the createDocument.
Is anyone able to clear up why?
if (json) {
createDocument(documentUrl, context, json, function(res){
var promiseFeedback = callFB (context, res);
var collection = `mydb`
client.createDocument(collection, res, (err, result) => {
if(err) {
context.log(err);
return context.done();
} else {
Promise.all([promiseFeedback]).then(function(results){
context.log("promiseFeedback: " + results[0]);
context.done();
});
}
});
});
}
function callFB(context, res) {
return new Promise((resolve, reject) => {
var requestUrl = url.parse( URL );
var requestBody = {
"id": res.id
};
var body = JSON.stringify( requestBody );
const requestOptions = {
hostname: requestUrl.hostname,
path: requestUrl.path,
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Content-Length': Buffer.byteLength(body),
}
};
var request = https.request(requestOptions, function(res) {
var data ="";
res.on('data', function (chunk) {
data += chunk
});
res.on('end', function () {
resolve(true);
})
}).on('error', function(error) {
context.log("request error:", error);
resolve(false);
});
request.write(body);
request.end();
});
}
var promiseFeedback = callFB (context, res);
This statement executes callFB immediately, not just assigns another name to the promise. This promise callFB is out of the callback(scope) of err and Promise.all([promiseFeedback]), it runs no matter what the result of client.createDocument is.
To fix this:
Remove var promiseFeedback = callFB (context, res); and change Promise.all([promiseFeedback]) to callFB(context, res). You don't need to use Promise.all as you only have one promise to resolve.
Or you can just move var promiseFeedback = callFB (context, res); into else segment.
I need to implement system which
Get data from parent collection.
Check if particular key found in redis
If no then do a http call and get json data then set cache
if yes then get data from cache
save data into child collection for parent id.
I have working solution using callback something like that.
MongoClient.connect(dsn).then(function(db) {
parentcollection.findOne({"_id" : new ObjectId(pid)}, function(err, data) {
var redis = require("redis"),
client = redis.createClient();
client.on("error", function (err) {
console.log("Error " + err);
});
// If not set
client.get(cacheKey, function(err, data) {
// data is null if the key doesn't exist
if(err || data === null) {
var options = {
host: HOST,
port: 80,
path: URI
};
var req = http.get(options, function(res) {
res.setEncoding('utf8');
res.on('data', function (chunk) {
body += chunk;
//console.log('CHUNK: ' + chunk);
});
res.on('end', function () {
data = JSON.parse(body);
// Get childdata After process of data
childcollection.save(childdata, {w:1}, function(cerr, inserted) {
db.close();
});
});
});
} else {
// Get childdata from cache data
childcollection.save(childdata, {w:1}, function(cerr, inserted) {
db.close();
});
}
});
});
I want to use promise (native one, not external ones like bluebird / request ) instead of callbacks. I checkout manuals and thinking if I need to implement like that
var promise1 = new Promise((resolve, reject) => {
MongoClient.connect(dsn).then(function(db) {
parentcollection.findOne({"_id" : new ObjectId(pid)}, function(err, data) {
});
}}.then(function(data){
var promise2 = new Promise((resolve, reject) => {
var redis = require("redis"),
client = redis.createClient();
client.on("error", function (err) {
console.log("Error " + err);
});
// If not set
client.get(cacheKey, function(err, data) {
// data is null if the key doesn't exist
if(err || data === null) {
var options = {
host: HOST,
port: 80,
path: URI
};
var promise3 = new Promise((resolve, reject) => {
var req = http.get(options, function(res) {
res.setEncoding('utf8');
res.on('data', function (chunk) {
body += chunk;
//console.log('CHUNK: ' + chunk);
});
res.on('end', function () {
data = JSON.parse(body);
// Get childdata After process of data
});
})
}).then(function(data){
childcollection.save(childdata, {w:1}, function(cerr, inserted) {
db.close();
});
});
} else {
// Get childdata from cache data
childcollection.save(childdata, {w:1}, function(cerr, inserted) {
db.close();
});
}
});
}}.then(function(data){
});
});
Which look like as dirty as callback hell or any better approach which not used promises like above ?
One issue is that you never call the resolve functions provided to the promise constructor callbacks. Without calling them, promises never resolve.
I would suggest creating those new promises in separate, reusable functions. On the other hand, some MongoDb methods return promises already when you don't provide the callback argument.
You could do it like below.
// Two promisifying functions:
function promiseClientData(client, key) {
return new Promise(function (resolve, reject) {
return client.get(key, function (err, data) {
return err ? reject(err) : resolve(data); // fulfull the promise
});
});
}
function promiseHttpData(options) {
return new Promise(function (resolve, reject) {
return http.get(options, function(res) {
var body = ''; // You need to initialise this...
res.setEncoding('utf8');
res.on('data', function (chunk) {
body += chunk;
//console.log('CHUNK: ' + chunk);
});
res.on('end', function () {
data = JSON.parse(body);
resolve(data); // fulfull the promise
});
);
});
}
// Declare the db variable outside of the promise chain to avoid
// having to pass it through
var db;
// The actual promise chain:
MongoClient.connect(dsn).then(function (dbArg) {
db = dbArg;
return parentcollection.findOne({"_id" : new ObjectId(pid)}); // returns a promise
}).then(function (data) {
var redis = require("redis"),
client = redis.createClient();
client.on("error", function (err) {
console.log("Error " + err);
});
// Get somehow cacheKey...
// ...
return promiseClientData(client, cacheKey);
}).then(function (data) {
// If not set: data is null if the key doesn't exist
// Throwing an error will trigger the next `catch` callback
if(data === null) throw "key does not exist";
return data;
}).catch(function (err) {
var options = {
host: HOST,
port: 80,
path: URI
};
return promiseHttpData(options);
}).then(function (data) {
// Get childdata by processing data (in either case)
// ....
// ....
return childcollection.save(childdata, {w:1}); // returns a promise
}).then(function () {
db.close();
});
I assume that the promises returned by MongoDb are fully compliant. In doubt, you can turn them into native JavaScript promises by calling Promise.resolve() on them, for instance like this:
return Promise.resolve(parentcollection.findOne({"_id" : new ObjectId(pid)}));
or:
return Promise.resolve(childcollection.save(childdata, {w:1}));
I am new to async library. I have used async.eachSeries and async.waterfall for each iteration. I see, the async.waterfall runs only once.
Here is my code :
var fs = require('fs'),
async = require('async'),
Client = require('node-rest-client').Client;
// REST API Call and output in jsonOutput.results
console.log(jsonOutput.results.length); // jsonOutput.results has 124 records.
async.eachSeries(jsonOutput.results, function(account, callback) {
var dataObject = {};
dataObject.updatetime = new Date();
var setAccountInfoURL = ""; // Data Update REST API Request
async.waterfall([
function setAccountInfo(updateCallback) {
// client.get(setAccountInfoURL, function (data, response) {
// var jsonOutput = JSON.parse(data.toString('utf8'));
updateCallback(null, "output", account)
// });
},
function saveAccountInfo(jsonOutput, account, updateCallback) {
var debuglog = JSON.stringify(account) + "\n" + jsonOutput;
fs.appendFile("debuginfo.json", debuglog + "\n", function (err) {
if(err) {
console.log(err);
}
console.log("JSON saved to " + "debuginfo.json");
updateCallback(null);
});
}
],function asyncComplete(err) {
if (err) {
console.warn('Error setting account info.', err);
}
console.log('async completed');
});
}, function(err){
if (err) {
console.log('error in loop');
}
console.log('loop completed');
});
Output:
124
JSON saved to debuginfo.json
async completed
Any help is really appreciated.
I found my mistake. I missed calling the callback after each iteration just after async is completed.
var fs = require('fs'),
async = require('async'),
Client = require('node-rest-client').Client;
// REST API Call and output in jsonOutput.results
console.log(jsonOutput.results.length); // jsonOutput.results has 124 records.
async.eachSeries(jsonOutput.results, function(account, callback) {
var dataObject = {};
dataObject.updatetime = new Date();
var setAccountInfoURL = ""; // Data Update REST API Request
async.waterfall([
function setAccountInfo(updateCallback) {
// client.get(setAccountInfoURL, function (data, response) {
// var jsonOutput = JSON.parse(data.toString('utf8'));
updateCallback(null, "output", account)
// });
},
function saveAccountInfo(jsonOutput, account, updateCallback) {
var debuglog = JSON.stringify(account) + "\n" + jsonOutput;
fs.appendFile("debuginfo.json", debuglog + "\n", function (err) {
if(err) {
console.log(err);
}
console.log("JSON saved to " + "debuginfo.json");
updateCallback(null);
});
}
],function asyncComplete(err) {
if (err) {
console.warn('Error setting account info.', err);
}
console.log('async completed');
callback(null); // this is the change.
});
}, function(err){
if (err) {
console.log('error in loop');
}
console.log('loop completed');
});
I'm taking my first steps in NodeJS, and I'm having an issue with the async-module. I had the following code which works fine:
var http = require('http');
var fs = require('fs');
var async = require('async');
function load_albums(callback) {
fs.readdir("albums", function(err, content) {
console.log(content);
if(err) {
callback(err);
return;
}
var directories = [];
(function iterator(index) {
if(index == content.length) {
callback(null, directories);
return;
}
fs.stat("albums/" + content[index], function(err, stats) {
if (err) {
callback(err);
}
if(stats.isDirectory()) {
directories.push(content[index]);
}
console.log(index);
iterator(index + 1);
});
})(0);
});
}
function handle_request(request, response) {
load_albums(function(err, albums) {
if (err) {
response.writeHead(503, {"Content-Type": "application/json"});
response.end(JSON.stringify(err) + "\n");
return;
}
var out = { error: null,
data: { albums: albums}};
response.writeHead(200, { "Content-Type" : "application/json" });
response.end(JSON.stringify(out) + "\n");
});
}
var s = http.createServer(handle_request);
s.listen(8080);
This works fine, and gives the expected output:
{"error":null,"data":{"albums":["testdir1","testdir2"]}}
However, I intended to replace the iterator with the async.each function.
I ended up with this:
function load_albums(callback) {
fs.readdir("albums", function(err, content) {
console.log(content);
if(err) {
callback(err);
return;
}
var directories = [];
async.each(content, function(item, callback2) {
fs.stat("albums/" + item, function(err,stats) {
if(stats.isDirectory()) {
directories.push(item);
}
callback2();
});
});
callback(null, directories);
});
}
However, this doesn't seem to work, as "albums" seems to be empty now:
{"error":null,"data":{"albums":[]}}
What am I missing here? I guess it has something to do with calling the fs.stats() function, but I'm unsure about what I'm doing wrong.
async.each() takes three arguments. You are not passing the last one which is the one that tells you when it is done. You also haven't implemented error handling on fs.stat(). You can change to this:
function load_albums(callback) {
fs.readdir("albums", function(err, content) {
console.log(content);
if(err) {
callback(err);
return;
}
var directories = [];
async.each(content, function(item, callback2) {
fs.stat("albums/" + item, function(err,stats) {
if (!err && stats.isDirectory()) {
directories.push(item);
}
callback2(err);
});
}, function(err) {
callback(err, directories);
});
});
}
As answered by #jfriend00 final callback is third parameter of asyn.each. Currently this callback is running without waiting for async.each to complete.
Also you're serving albums for all the request. They should be served on a particular resource URL like /albums or /albums/
I have made these modifications to the code, now it loads albums on http://localhost:8080/albums otherwise it returns 'No Content.
var http = require('http');
var fs = require('fs');
var async = require('async');
function load_albums(loadCompleteCallback) {
fs.readdir("albums", function(err, content) {
console.log(content);
if(err) {
callback(err);
return;
}
var directories = [];
async.each(content, function(item, doneCallback) {
fs.stat("albums/" + item, function(err,stats) {
if(stats.isDirectory()) {
directories.push(item);
}
return doneCallback(err);
});
}
, function (err) {
loadCompleteCallback(err, directories);
});
});
}
function handle_request(request, response) {
console.log("requested path: " + request.url);
if(request.url.match(/^\/albums[\/]?$/) ) {
load_albums(function(err, albums) {
if (err) {
response.writeHead(503, {"Content-Type": "application/json"});
response.end(JSON.stringify(err) + "\n");
return;
}
var out = { error: null,
data: { albums: albums}};
response.writeHead(200, { "Content-Type" : "application/json" });
response.end(JSON.stringify(out) + "\n");
});
} else {
response.writeHead(200, { "Content-Type" : "application/json" });
response.end("No Content\n");
}
}
var s = http.createServer(handle_request);
s.listen(8080);
console.log("server running at : http://localhost:" + 8080);
I've set up a NodeJS server which can be accessed by a client. Every once in a while it's necessary to let the server connect to a second server and feed the information retrieved back to the client.
Connecting to the second server is the easy part, but to be honest I have no idea how to send it back to the client. res.write seems to be forbidden during the connection with the second server.
The connection from the client is handled by handleGetRequest. The connection with the second server starts at http.get.
var http = require('http');
var url = require('url');
var server = http.createServer(function(req, res) {
var url_parsed = url.parse(req.url, true);
if (req.method ==='GET') {
handleGetRequest(res, url_parsed);
} else {
res.end('Method not supported');
}
});
handleGetRequest = function(res, url_parsed) {
if (url_parsed.path == '/secondary') {
var OPTIONS = {
hostname: "localhost",
port: "8900",
path: "/from_primary"
}
http.get(OPTIONS, function(secget) {
resget.on('data', function(chunk) {
// either store 'chunk' for later use or send directly
});
}).on('error', function(e) {
console.log("Error " + e.message);
});
} else {
res.writeHead(404);
}
res.end('Closed');
};
server.listen(8000);
How do I send the chunk from http.request to the client?
I thinks passing the callback to the handleGetRequest will fix this issue:
if (req.method === 'GET') {
handleGetRequest(url_parsed, function (err, response) {
if (err) {
return res.sendStatus(500);
}
res.json(response);
});
} else {
res.end('Method not supported');
}
handleGetRequest = function (url_parsed, callback) {
// OPTIONS ...
http.get(OPTIONS, function(resget) {
var data = '';
resget.on('data', function(chunk) {
data += chunk;
});
resget.on('end', function() {
callback(null, data);
});
}).on('error', function(e) {
callback(e);
});
}
Thanks to #TalgatMedetbekov for the suggestions. I managed to implement it like this:
var http = require('http');
var url = require('url');
var server = http.createServer(function(req, res) {
var url_parsed = url.parse(req.url, true);
if (req.method ==='GET') {
handleGetRequest(res, url_parsed);
} else {
res.end('Method not supported');
}
});
handleGetSecondaryRequest = function(callback, res) {
var OPTIONS = {
hostname: "localhost",
port: "8900",
path: "/from_primary"
}
var data = null;
http.get(OPTIONS, function(func, data) {
func.on('data', function(chunk) {
data += chunk;
});
func.on('end', function() {
callback(res, data);
});
}).on('error', function(e) {
callback(res, e);
})
};
var secReqCallback = function(res, recData)
{
res.write(recData);
res.end("END");
};
handleGetRequest = function(res, url_parsed) {
if (url_parsed.path == '/secondary') {
handleGetSecondaryRequest(secReqCallback, res);
} else {
res.writeHead(404);
}
};
server.listen(8000);
It works, kind of. There's an 'undefined' in front of the string which I can't find the cause for, but the basic functionality works perfect.
The callback construction is necessary to synchronize the asynchronous nature of NodeJS.