sending multiple respone from nodejs using python-shell - node.js

I am trying to execute few python script inside nodejs. The code is shown below. What I am trying to do is executing different python script inside a for loop one by one. and send the json response to client as soon as one script gets over.
var PythonShell = require('python-shell');
var express = require('express'), app = express();
app.get('/', function (req, res) {
res.setHeader('Content-Type', 'text/html');
pl_list=["test", "test2"]
for (var i=0; i<= pl_list.length-1; i++) {
output="";
var pyshell = new PythonShell('./'+pl_list[i]+'.py')
pyshell.on('message', function (message)
{console.log(message);output+=message;});
pyshell.end(function (err) {
if (err){
console.log('error occured ---- '+err);
}
else{
console.log('update finished');
res.write(JSON.stringify({"finsihed":true, "product_line":pl_list[i]}));
}
});
}
//res.end()
});
app.listen(5000, function () {
console.log('The web server is running. Please open http://localhost:5000/ in your browser.');
});
unfortunately I am getting the response as {"finsihed":true} actual output must be
{"finsihed":true, "product_line":"test"}{"finsihed":true, "product_line":"test2"}
can anybody tell me what I am doing wrong here. Thanks in advance!

The execution of your python scripts is asynchronous, so when you write the response to the client with this line, the value of i changed:
res.write(JSON.stringify({"finsihed":true, "product_line":pl_list[i]})
Just display the value of i with console.log before the above line and you will see that i equals 2 twice (due to the increment of your for-loop). And because pl_list[i] is undefined, the serialization of a JSON object removes the attribute "product_line".
If you want to "save" the value of i, you have to learn what closure is.
This code should work:
var PythonShell = require('python-shell');
var express = require('express'),
app = express();
app.get('/', function (req, res) {
res.setHeader('Content-Type', 'text/html');
var nbFinishedScripts = 0;
pl_list = ["test", "test2"]
for (var i = 0; i <= pl_list.length - 1; i++) {
output = "";
var pyshell = new PythonShell('./' + pl_list[i] + '.py')
pyshell.on('message', function (message)
{
console.log(message);
output += message;
});
// closure
(function (i) {
return function () {
pyshell.end(function (err) {
if (err) {
console.log('error occured ---- ' + err);
} else {
console.log('update finished');
res.write(JSON.stringify({
"finsihed": true,
"product_line": pl_list[i]
}));
}
nbFinishedScripts++;
// end the reponse when the number of finished scripts is equal to the number of scripts
if (nbFinishedScripts === pl_list.length) {
res.end();
}
});
};
})(i)(); // immediately invoke the function
}
});
app.listen(5000, function () {
console.log('The web server is running. Please open http://localhost:5000/ in your browser.');
});
Edit code:
var PythonShell = require('python-shell');
var express = require('express'),
app = express();
var executePythonScript = function (script) {
return new Promise(function (resolve, reject) {
var pyshell = new PythonShell('./' + script + '.py');
pyshell.end(function (err) {
if (err) {
reject(err);
} else {
resolve(script);
}
});
});
};
app.get('/', function (req, res) {
res.setHeader('Content-Type', 'text/html');
var pl_list = ["test", "test2"];
Promise
.all(pl_list.map(executePythonScript))
.then(function (scripts) {
scripts.forEach(function (script) {
res.write(JSON.stringify({
finsihed: true,
product_line: script
}));
});
res.end();
})
.catch(function (err) {
res.end();
});
});
app.listen(5000, function () {
console.log('The web server is running. Please open http://localhost:5000/ in your browser.');
});

Related

Node.JS downloading hundreds of files simultaneously

I am trying to download more that 100 files at the same time. But when I execute the downloading function my macbook freezes(unable to execute new tasks) in windows also no download(but doesn't freeze) and no download progress in both case(idle network).
Here is my download module:
var express = require('express');
var router = express.Router();
var fs = require('fs');
var youtubedl = require('youtube-dl');
var links = require('../models/Links');
router.get('/', function (req, res, next) {
links.find({dlStatus: false}, function (err, docs) {
if (err) {
console.log(err);
res.end();
} else if (!docs) {
console.log('No incomplete downloads!');
res.end();
} else {
for (var i = 0; i < docs.length; i++) {
//todo scraping
var video = youtubedl(docs[i].url, [], {cwd: __dirname});
// Will be called when the download starts.
video.on('info', function (info) {
console.log('Download started');
console.log(info);
});
video.pipe(fs.createWriteStream('./downloads/' + docs[i].id + '-' + i + '.mp4'));
video.on('complete', function complete(info) {
links.findOneAndUpdate({url: info.webpage_url}, {dlStatus: true}, function (err, doc) {
if (err)console.log(err);
else console.log('Download completed!')
});
});
}
}
});
});
module.exports = router;
Now can anyone please help me here? I am using this module for downloading files.
The solution is using async in this case.
Try it this way....with async.each()
var express = require('express');
var router = express.Router();
var fs = require('fs');
var youtubedl = require('youtube-dl');
var links = require('../models/Links');
var async = require('async')
router.get('/', function (req, res, next) {
links.find({dlStatus: false}, function (err, docs) {
if (err) {
console.log(err);
res.end();
} else if (!docs) {
console.log('No incomplete downloads!');
res.end();
} else {
async.each(docs,function(doc,cb){
var video = youtubedl(doc.url, [], {cwd: __dirname});
// Will be called when the download starts.
video.on('info', function (info) {
console.log('Download started');
console.log(info);
});
video.pipe(fs.createWriteStream('./downloads/' + docs.id + '-' + i + '.mp4'));
video.on('complete', function complete(info) {
links.findOneAndUpdate({url: info.webpage_url}, {dlStatus: true}, function (err, doc) {
if (err){
console.log(err);
cb(err);
}
else {
console.log('Download completed!');
cb()
}
});
});
},function(err){
if(err)
return console.log(err);
console.log("Every thing is done,Here!!");
})
}
});
});
module.exports = router;
And you can process every thing in batch too using async.eachLimits().

async.eachSeries runs only once with async.waterfall inside for each iteration

I am new to async library. I have used async.eachSeries and async.waterfall for each iteration. I see, the async.waterfall runs only once.
Here is my code :
var fs = require('fs'),
async = require('async'),
Client = require('node-rest-client').Client;
// REST API Call and output in jsonOutput.results
console.log(jsonOutput.results.length); // jsonOutput.results has 124 records.
async.eachSeries(jsonOutput.results, function(account, callback) {
var dataObject = {};
dataObject.updatetime = new Date();
var setAccountInfoURL = ""; // Data Update REST API Request
async.waterfall([
function setAccountInfo(updateCallback) {
// client.get(setAccountInfoURL, function (data, response) {
// var jsonOutput = JSON.parse(data.toString('utf8'));
updateCallback(null, "output", account)
// });
},
function saveAccountInfo(jsonOutput, account, updateCallback) {
var debuglog = JSON.stringify(account) + "\n" + jsonOutput;
fs.appendFile("debuginfo.json", debuglog + "\n", function (err) {
if(err) {
console.log(err);
}
console.log("JSON saved to " + "debuginfo.json");
updateCallback(null);
});
}
],function asyncComplete(err) {
if (err) {
console.warn('Error setting account info.', err);
}
console.log('async completed');
});
}, function(err){
if (err) {
console.log('error in loop');
}
console.log('loop completed');
});
Output:
124
JSON saved to debuginfo.json
async completed
Any help is really appreciated.
I found my mistake. I missed calling the callback after each iteration just after async is completed.
var fs = require('fs'),
async = require('async'),
Client = require('node-rest-client').Client;
// REST API Call and output in jsonOutput.results
console.log(jsonOutput.results.length); // jsonOutput.results has 124 records.
async.eachSeries(jsonOutput.results, function(account, callback) {
var dataObject = {};
dataObject.updatetime = new Date();
var setAccountInfoURL = ""; // Data Update REST API Request
async.waterfall([
function setAccountInfo(updateCallback) {
// client.get(setAccountInfoURL, function (data, response) {
// var jsonOutput = JSON.parse(data.toString('utf8'));
updateCallback(null, "output", account)
// });
},
function saveAccountInfo(jsonOutput, account, updateCallback) {
var debuglog = JSON.stringify(account) + "\n" + jsonOutput;
fs.appendFile("debuginfo.json", debuglog + "\n", function (err) {
if(err) {
console.log(err);
}
console.log("JSON saved to " + "debuginfo.json");
updateCallback(null);
});
}
],function asyncComplete(err) {
if (err) {
console.warn('Error setting account info.', err);
}
console.log('async completed');
callback(null); // this is the change.
});
}, function(err){
if (err) {
console.log('error in loop');
}
console.log('loop completed');
});

Parse Objects not created from node in heroku

I have the following code running on a node server # heroku. The trouble I am having is that the application frequently fails to create a new parse.com object on post. What is strange is that this code works 100% of the time on my local machine. running through heroku introduces the issue.
I run a heroku log trail when the application posts and it does not throw any exceptions/errors, so I'm stumped as to what to look for.
BTW - I realize this code isn't the prettiest, this is my first attempt to get a node/heroku/parse application up and running.
var http = require('http');
var url = require('url');
var path = require('path');
var fs = require('fs');
var Parse = require('parse/node').Parse;
var mime = require('mime');
var server = http.createServer(router).listen(process.env.PORT || 5000);
Parse.initialize("key", "key");
console.log("Parse initialized");
function router (req, res) {
var pathname = url.parse(req.url, true).pathname;
if (pathname.slice(0, 4) === '/api') {
apiHandler(req, res);
} else {
if (pathname[pathname.length - 1] === '/')
pathname += 'index.html';
staticFileHandler(pathname, res);
}
}
function staticFileHandler (pathname, res) {
fs.readFile(__dirname + '/public_html' + pathname, function (err, data) {
if (err) return errHandler(err, res);
console.log('[200]: ' + pathname);
res.setHeader('Content-Type', mime.lookup(path.extname(pathname)));
res.end(data);
});
}
function errHandler (err, res) {
if (err.code === 'ENOENT') {
res.statusCode = 404;
res.end('File not found!');
console.log('[404]: File not found: ' + err.path);
} else {
console.error(err);
}
}
function apiHandler (req, res) {
if (req.method === 'GET') {
//send back a list of todos
// var toDo = new Parse.Object("ToDo");
var parseQuery = new Parse.Query("ToDo");
parseQuery.find({
success: function(toDoList){
res.setHeader('Content-Type', mime.lookup('json'));
res.end(JSON.stringify(toDoList));
},
error: function(toDoList, error) {
// error is an instance of Parse.Error.
console.log('Error encountered while getting Parse objects: ' + error.message);
}
});
} else if (req.method === "POST"){
var body = "";
req.on('data', function (chunk) {
body += chunk;
});
var today = new Date();
req.on('end', function () {
var toDo = new Parse.Object("ToDo");
toDo.set('Description', body);
toDo.set('Done', false);
toDo.set('DueDate',today )
toDo.save(null, {
success: function(toDo) {
// Execute any logic that should take place after the object is saved.
console.log('New object created with objectId: ' + toDo.id);
},
error: function(toDo, error) {
// Execute any logic that should take place if the save fails.
// error is a Parse.Error with an error code and message.
console.log('Failed to create new object, with error code: ' + error.message);
}
});
});
res.end();
}
}

NodeJS data throughput

I've set up a NodeJS server which can be accessed by a client. Every once in a while it's necessary to let the server connect to a second server and feed the information retrieved back to the client.
Connecting to the second server is the easy part, but to be honest I have no idea how to send it back to the client. res.write seems to be forbidden during the connection with the second server.
The connection from the client is handled by handleGetRequest. The connection with the second server starts at http.get.
var http = require('http');
var url = require('url');
var server = http.createServer(function(req, res) {
var url_parsed = url.parse(req.url, true);
if (req.method ==='GET') {
handleGetRequest(res, url_parsed);
} else {
res.end('Method not supported');
}
});
handleGetRequest = function(res, url_parsed) {
if (url_parsed.path == '/secondary') {
var OPTIONS = {
hostname: "localhost",
port: "8900",
path: "/from_primary"
}
http.get(OPTIONS, function(secget) {
resget.on('data', function(chunk) {
// either store 'chunk' for later use or send directly
});
}).on('error', function(e) {
console.log("Error " + e.message);
});
} else {
res.writeHead(404);
}
res.end('Closed');
};
server.listen(8000);
How do I send the chunk from http.request to the client?
I thinks passing the callback to the handleGetRequest will fix this issue:
if (req.method === 'GET') {
handleGetRequest(url_parsed, function (err, response) {
if (err) {
return res.sendStatus(500);
}
res.json(response);
});
} else {
res.end('Method not supported');
}
handleGetRequest = function (url_parsed, callback) {
// OPTIONS ...
http.get(OPTIONS, function(resget) {
var data = '';
resget.on('data', function(chunk) {
data += chunk;
});
resget.on('end', function() {
callback(null, data);
});
}).on('error', function(e) {
callback(e);
});
}
Thanks to #TalgatMedetbekov for the suggestions. I managed to implement it like this:
var http = require('http');
var url = require('url');
var server = http.createServer(function(req, res) {
var url_parsed = url.parse(req.url, true);
if (req.method ==='GET') {
handleGetRequest(res, url_parsed);
} else {
res.end('Method not supported');
}
});
handleGetSecondaryRequest = function(callback, res) {
var OPTIONS = {
hostname: "localhost",
port: "8900",
path: "/from_primary"
}
var data = null;
http.get(OPTIONS, function(func, data) {
func.on('data', function(chunk) {
data += chunk;
});
func.on('end', function() {
callback(res, data);
});
}).on('error', function(e) {
callback(res, e);
})
};
var secReqCallback = function(res, recData)
{
res.write(recData);
res.end("END");
};
handleGetRequest = function(res, url_parsed) {
if (url_parsed.path == '/secondary') {
handleGetSecondaryRequest(secReqCallback, res);
} else {
res.writeHead(404);
}
};
server.listen(8000);
It works, kind of. There's an 'undefined' in front of the string which I can't find the cause for, but the basic functionality works perfect.
The callback construction is necessary to synchronize the asynchronous nature of NodeJS.

Using supertest to check didFlash after a redirect

I'm trying to test what happens when the user destroy callback receives an error for the user controller. When destroy receives an error, it does the following:
flash('error', 'Can not destroy user');
redirect(path_to.users);
This is the test so far:
it('should fail on DELETE /users/:id if destroy receives an error', function (done) {
var User = app.models.User;
var user = new UserStub();
User.find = sinon.spy(function (id, callback) {
callback(null, user);
});
user.destroy = sinon.spy(function (callback) {
callback(new Error());
});
request(app)
.del('/users/55')
.end(function (err, res) {
res.header.location.should.include('/users');
app.didFlash('error').should.be.true;
done();
});
});
I've seen this question and the res.header.. portion works as expected. However, I'm still confused on how I can test the flash that happens after that redirect.
I ended up changing the users_controller to use the following code for a destroy callback (the redirect was having other issues):
if (error) {
flash('error', 'Can not destroy user');
} else {
flash('info', 'User successfully removed');
}
send(302, "'" + pathTo.users + "'");
The init.js file used with mocha.js has a few pieces in it when initializing the app object (some irrelevant code was omitted):
global.getApp = function(done) {
var app = require('compound').createServer();
app.renderedViews = [];
app.flashedMessages = {};
app._render = app.render;
app.render = function (viewName, opts, fn) {
app.renderedViews.push(viewName);
// Deep-copy flash messages
var flashes = opts.request.session.flash;
for(var type in flashes) {
app.flashedMessages[type] = [];
for(var i in flashes[type]) {
app.flashedMessages[type].push(flashes[type][i]);
}
}
return app._render.apply(this, arguments);
};
app.use(function (req, res, next) {
app._request = req;
next();
});
app.didFlash = function (type) {
var flashes = app._request.session.flash;
return !!(app.flashedMessages[type] || (flashes && flashes[type]));
};
return app;
};
The original way of checking for didFlash was limited to only rendering, but this checks if a flash message is created before a redirect or send.

Resources