Current post request:
app.post('/rImage', rawBody, function (req, res) {
// ---- check here, if this process is running ? ----
var data = req.rawBody;
var escapedData = data.replace(/([<>|&^])/g, "\^$1"); // Windows CMD escaping
exec('cscript /nologo C:/PS/Automation/render.vbs C:/PS/Automation/image.jsx"'+escapedData + '"',
function (error, stdout, stderr) {
console.log('stdout: ' + escapedData);
if(error !== null) {
console.log('exec error: ' + error);
}
res.send(stdout);
//request finished
});
});
How could I implement a queue system to this, so that the next request fires off when the last one is finished?
Thanks in adnvance!
There might be libraries for this. However, you could hack it like this:
var queue = [];
var active = false;
var check = function() {
if (!active && queue.length > 0) {
var f = queue.shift();
f();
}
}
app.post("..", body, function(req, res) {
queue.push(function() {
active = true;
exec("yourprogram arg1 arg2..", function() {
// this is the async callback
active = false;
check();
});
});
check();
});
Here's a fiddle that shows the functionality: https://jsfiddle.net/fc15afw5/1/
Related
I am new in nodejs and now days i am learnig http module.
I wrote a js program for node to check if content-type is application/json in request, then it should console at 'readable' event.
What happening is: on a single request 'readable' event called twice and print the value first time and second time it returns null.
Here is Code:
var connect = require('connect');
function jsonParse(req, res, next) {
if (req.headers['content-type'] == 'application/json' && req.method == 'POST') {
var readData = '';
req.on('readable', function() {
console.log('inside readable ' + req.read());
readData += req.read();
});
req.on('end', function() {
try {
req.body = JSON.parse(readData);
} catch (e) {
}
next();
})
} else {
next();
}
}
connect()
.use(jsonParse)
.use(function(req, res) {
if (req.body) {
res.end('JSON parsed !' + req.body);
} else {
res.end('no json detected !');
}
}).listen(3000);
I am calling this like:
output is :
inside readable {
"foo":"asdf"
}
inside readable null
Please guide me how can i handle this. Thanks in advance.
You should only read from the request when data is available, so when req.read() does not return null. You can check this with a while loop.
Replace:
var readData = '';
req.on('readable', function() {
console.log('inside readable ' + req.read());
readData += req.read();
});
With:
var readData = '';
req.on('readable', function(){
var chunk;
while (null !== (chunk = req.read())){
readData += chunk;
}
});
I am trying to execute few python script inside nodejs. The code is shown below. What I am trying to do is executing different python script inside a for loop one by one. and send the json response to client as soon as one script gets over.
var PythonShell = require('python-shell');
var express = require('express'), app = express();
app.get('/', function (req, res) {
res.setHeader('Content-Type', 'text/html');
pl_list=["test", "test2"]
for (var i=0; i<= pl_list.length-1; i++) {
output="";
var pyshell = new PythonShell('./'+pl_list[i]+'.py')
pyshell.on('message', function (message)
{console.log(message);output+=message;});
pyshell.end(function (err) {
if (err){
console.log('error occured ---- '+err);
}
else{
console.log('update finished');
res.write(JSON.stringify({"finsihed":true, "product_line":pl_list[i]}));
}
});
}
//res.end()
});
app.listen(5000, function () {
console.log('The web server is running. Please open http://localhost:5000/ in your browser.');
});
unfortunately I am getting the response as {"finsihed":true} actual output must be
{"finsihed":true, "product_line":"test"}{"finsihed":true, "product_line":"test2"}
can anybody tell me what I am doing wrong here. Thanks in advance!
The execution of your python scripts is asynchronous, so when you write the response to the client with this line, the value of i changed:
res.write(JSON.stringify({"finsihed":true, "product_line":pl_list[i]})
Just display the value of i with console.log before the above line and you will see that i equals 2 twice (due to the increment of your for-loop). And because pl_list[i] is undefined, the serialization of a JSON object removes the attribute "product_line".
If you want to "save" the value of i, you have to learn what closure is.
This code should work:
var PythonShell = require('python-shell');
var express = require('express'),
app = express();
app.get('/', function (req, res) {
res.setHeader('Content-Type', 'text/html');
var nbFinishedScripts = 0;
pl_list = ["test", "test2"]
for (var i = 0; i <= pl_list.length - 1; i++) {
output = "";
var pyshell = new PythonShell('./' + pl_list[i] + '.py')
pyshell.on('message', function (message)
{
console.log(message);
output += message;
});
// closure
(function (i) {
return function () {
pyshell.end(function (err) {
if (err) {
console.log('error occured ---- ' + err);
} else {
console.log('update finished');
res.write(JSON.stringify({
"finsihed": true,
"product_line": pl_list[i]
}));
}
nbFinishedScripts++;
// end the reponse when the number of finished scripts is equal to the number of scripts
if (nbFinishedScripts === pl_list.length) {
res.end();
}
});
};
})(i)(); // immediately invoke the function
}
});
app.listen(5000, function () {
console.log('The web server is running. Please open http://localhost:5000/ in your browser.');
});
Edit code:
var PythonShell = require('python-shell');
var express = require('express'),
app = express();
var executePythonScript = function (script) {
return new Promise(function (resolve, reject) {
var pyshell = new PythonShell('./' + script + '.py');
pyshell.end(function (err) {
if (err) {
reject(err);
} else {
resolve(script);
}
});
});
};
app.get('/', function (req, res) {
res.setHeader('Content-Type', 'text/html');
var pl_list = ["test", "test2"];
Promise
.all(pl_list.map(executePythonScript))
.then(function (scripts) {
scripts.forEach(function (script) {
res.write(JSON.stringify({
finsihed: true,
product_line: script
}));
});
res.end();
})
.catch(function (err) {
res.end();
});
});
app.listen(5000, function () {
console.log('The web server is running. Please open http://localhost:5000/ in your browser.');
});
I have the following code running on a node server # heroku. The trouble I am having is that the application frequently fails to create a new parse.com object on post. What is strange is that this code works 100% of the time on my local machine. running through heroku introduces the issue.
I run a heroku log trail when the application posts and it does not throw any exceptions/errors, so I'm stumped as to what to look for.
BTW - I realize this code isn't the prettiest, this is my first attempt to get a node/heroku/parse application up and running.
var http = require('http');
var url = require('url');
var path = require('path');
var fs = require('fs');
var Parse = require('parse/node').Parse;
var mime = require('mime');
var server = http.createServer(router).listen(process.env.PORT || 5000);
Parse.initialize("key", "key");
console.log("Parse initialized");
function router (req, res) {
var pathname = url.parse(req.url, true).pathname;
if (pathname.slice(0, 4) === '/api') {
apiHandler(req, res);
} else {
if (pathname[pathname.length - 1] === '/')
pathname += 'index.html';
staticFileHandler(pathname, res);
}
}
function staticFileHandler (pathname, res) {
fs.readFile(__dirname + '/public_html' + pathname, function (err, data) {
if (err) return errHandler(err, res);
console.log('[200]: ' + pathname);
res.setHeader('Content-Type', mime.lookup(path.extname(pathname)));
res.end(data);
});
}
function errHandler (err, res) {
if (err.code === 'ENOENT') {
res.statusCode = 404;
res.end('File not found!');
console.log('[404]: File not found: ' + err.path);
} else {
console.error(err);
}
}
function apiHandler (req, res) {
if (req.method === 'GET') {
//send back a list of todos
// var toDo = new Parse.Object("ToDo");
var parseQuery = new Parse.Query("ToDo");
parseQuery.find({
success: function(toDoList){
res.setHeader('Content-Type', mime.lookup('json'));
res.end(JSON.stringify(toDoList));
},
error: function(toDoList, error) {
// error is an instance of Parse.Error.
console.log('Error encountered while getting Parse objects: ' + error.message);
}
});
} else if (req.method === "POST"){
var body = "";
req.on('data', function (chunk) {
body += chunk;
});
var today = new Date();
req.on('end', function () {
var toDo = new Parse.Object("ToDo");
toDo.set('Description', body);
toDo.set('Done', false);
toDo.set('DueDate',today )
toDo.save(null, {
success: function(toDo) {
// Execute any logic that should take place after the object is saved.
console.log('New object created with objectId: ' + toDo.id);
},
error: function(toDo, error) {
// Execute any logic that should take place if the save fails.
// error is a Parse.Error with an error code and message.
console.log('Failed to create new object, with error code: ' + error.message);
}
});
});
res.end();
}
}
I would like to print documents through http requests on Node.js. Is there any way to send print jobs and query CUPS server using Node.js. I found this project while exploring around, is it the only/correct way to do that??
You could use the shell to do so. I built a project some time ago where I needed to read certain hashtag from instagram and print the photos uploaded to IG with that hashtag using a raspberry pi and a photo printer.
var fs = require('fs'),
exec = require('child_process').exec;
exec("lp /path/to/somepic.jpg");
// get printer jobs
exec("lpq",function (error, stdout, stderr) {
console.log('stdout: ' + stdout);
console.log('stderr: ' + stderr);
if (error !== null) {
console.log('exec error: ' + error);
}
});
The command lp /path/to/somepic.jpg sends /path/to/somepic.jpg to the default printer. The command lpq displays the printer queue. For better use, read the CUPS documentation.
Following snippet seems useful. Not tried it as I am no longer working on this problem!
It may be helpful for others. Original source: https://gist.github.com/vodolaz095/5325917
var ipp = require('ipp'); //get it from there - https://npmjs.org/package/ipp - $npm install ipp
var request = require('request'); //get it from there - https://npmjs.org/package/request - $npm install request
var fs = require('fs');
function getPrinterUrls(callback) {
var CUPSurl = 'http://localhost:631/printers';//todo - change of you have CUPS running on other host
request(CUPSurl, function (error, response, body) {
if (!error && response.statusCode == 200) {
var printersMatches = body.match(/<TR><TD><A HREF="\/printers\/([a-zA-Z0-9-^"]+)">/gm);//i know, this is terrible, sorry(
var printersUrls = [];
var i;
if (printersMatches) {
for (i = 0; i < printersMatches.length; i++) {
var a = (/"\/printers\/([a-zA-Z0-9-^"]+)"/).exec(printersMatches[i]);
if (a) {
printersUrls.push(CUPSurl + '/' + a[1]);
}
}
}
}
callback(error, printersUrls);
});
};
function doPrintOnSelectedPrinter(printer, bufferToBePrinted, callback) {
printer.execute("Get-Printer-Attributes", null, function(err, printerStatus){
if(printerStatus['printer-attributes-tag']['printer-state']=='idle'){
//printer ready to work
//*/
printer.execute("Print-Job",
{
"operation-attributes-tag":{
"requesting-user-name":"nap",
"job-name":"testing"
},
"job-attributes-tag":{},
data:bufferToBePrinted
},
function (err, res) {
if (res.statusCode == 'successful-ok') {
var jobUri = res['job-attributes-tag']['job-uri'];
var tries = 0;
var t = setInterval(function () {
printer.execute("Get-Job-Attributes",
{"operation-attributes-tag":{'job-uri':jobUri}},
function (err2, job) {
// console.log(job);
if (err2) throw err2;
tries++;
if (job && job["job-attributes-tag"]["job-state"] == 'completed') {
clearInterval(t);
// console.log('Testins if job is ready. Try N '+tries);
callback(null, job);//job is succesefully printed!
}
if (tries > 50) {//todo - change it to what you need!
clearInterval(t);
printer.execute("Cancel-Job", {
"operation-attributes-tag":{
//"job-uri":jobUri, //uncomment this
//*/
"printer-uri":printer.uri, //or uncomment this two lines - one of variants should work!!!
"job-id":job["job-attributes-tag"]["job-id"]
//*/
}
}, function (err, res) {
if (err) throw err;
console.log('Job with id '+job["job-attributes-tag"]["job-id"]+'is being canceled');
});
callback(new Error('Job is canceled - too many tries and job is not printed!'), null);
}
});
}, 2000);
} else {
callback(new Error('Error sending job to printer!'), null);
}
});
//*/
} else {
callback(new Error('Printer '+printerStatus['printer-attributes-tag']['printer-name']+' is not ready!'),null);
}
});
}
function doPrintOnAllPrinters(data, callback) {
var b = new Buffer(data, 'binary');
getPrinterUrls(function (err, printers) {
if (err) throw err;
if (printers) {
for (var i = 0; i < printers.length; i++) {
var printer = ipp.Printer(printers[i]);
doPrintOnSelectedPrinter(printer, b, callback);
}
} else {
throw new Error('Unable to find printer. Do you have printer installed and accessible via CUPS?');
}
});
}
/*
Example of usage
*/
fs.readFile('package.json', function (err, data) {
doPrintOnAllPrinters(data, function (err, job) {
if (err) {
console.error('Error printing');
console.error(err);
} else {
console.log('Printed. Job parameters are: ');
console.log(job);
}
}
);
});
I have a Web server that reads and writes to a data file on disk. I'd like a file only be written to in a single Web request.
Here's an example program that illustrates my problem. It keeps a state file in "/tmp/rw.txt" and increments the integer contents on each Web hit. Running this program, and then running something like ab -n 10000 -c 1000 http://localhost:3000/, shows that the same value is read from the file by multiple hits, and it's written multiple times.
NOTE: I know about flock() and fs-ext. However, flock() will lock the file to the current process; since all the access here is in the same process, flock() doesn't work (and complicates this example considerably).
Also note that I'd usually use express, async, etc. to get most of this done; just sticking to the basics for the sake of example.
var http = require("http"),
fs = require("fs");
var stateFile = "/tmp/rw.txt";
var server = http.createServer(function(req, res) {
var writeNum = function(num) {
var ns = num.toString(10);
console.log("Writing " + ns);
fs.writeFile(stateFile, ns, function(err) {
if (err) {
res.writeHead(500, {"Content-Type": "text/plain"});
res.end(err.message);
} else {
res.writeHead(200, {"Content-Type": "text/plain"});
res.end(ns);
}
});
};
switch (req.url) {
case "/reset":
writeNum(0);
break;
case "/":
fs.readFile(stateFile, function(err, data) {
if (err && err.code == "ENOENT") {
// First time, set it to zero
writeNum(0);
} else if (err) {
res.writeHead(500, {"Content-Type": "text/plain"});
res.end(err.message);
} else {
writeNum(parseInt(data, 10) + 1);
}
});
break;
default:
res.writeHead(404, {"Content-Type": "text/plain"});
res.end("No such resource: " + req.url);
}
});
server.listen(3000);
Storing data in files is not a preferred way in multi-user environment like web server. Databases are more suitable for this. But if you really want to stick with file, I suggest to use a buffer. That is, a memory object to which you write/read, and a separate function that periodically dumps its content to the disk, like this:
var server = http.createServer(function(req, res) {
----- cut ----
switch (req.url) {
case "/reset":
value = 0;
break;
case "/":
value ++;
break;
default:
res.writeHead(404, {"Content-Type": "text/plain"});
res.end("No such resource: " + req.url);
}
}
----- cut ----
}
(function() {
var cb = arguments.callee;
fs.writeFile(stateFile, value, function(err) {
// handle error
setTimeout(cb, 100); // schedule next call
});
})();
And another way to do this (in case there is something behind your question so you don't accept simple solutions ;)) is to create a processing queue. Below is a simple queue pattern, it executes requests in the order they submitted and returns error (if any) to the provided callback once the function is executed.
var Queue = function(fn) {
var queue = [];
var processingFn = fn;
var iterator = function(callback) {
return function(err) {
queue.shift(); // remove processed value
callback(err);
var next = queue[0];
if(next)
processingFn(next.arg, iterator(next.cb));
};
}
this.emit = function(obj, callback) {
var empty = !queue.length;
queue.push({ arg: obj, cb: callback});
if(empty) { // start processing
processingFn(obj, iterator(callback));
}
}
}
function writeNum(inc, continueCb) {
fs.readFile(stateFile, function(err, data) {
if(err)
return continueCb(err);
var value = data || 0;
fs.writeFile(stateFile, value + inc, function(err) {
continueCb(err);
});
});
}
var writer = new Queue(writeNum);
// on each request
writer.emit(1, function(err) {
if(err) {
// respond with error
}
else {
// value written
}
});
I wasn't able to find a library that did what I want, so I created one here:
https://npmjs.org/package/schlock
Here's the above example program using the read/write locking. I also used "Step" to make the whole thing more readable.
var http = require("http"),
fs = require("fs"),
Schlock = require("schlock"),
Step = require("step");
var stateFile = "/tmp/rw.txt";
var schlock = new Schlock();
var server = http.createServer(function(req, res) {
var num;
Step(
function() {
schlock.writeLock(stateFile, this);
},
function(err) {
if (err) throw err;
fs.readFile(stateFile, this);
},
function(err, data) {
if (err && err.code == "ENOENT") {
num = 0;
} else if (err) {
throw err;
} else {
num = parseInt(data, 10) + 1;
}
fs.writeFile(stateFile, num.toString(10), this);
},
function(err) {
if (err) throw err;
schlock.writeUnlock(stateFile, this);
},
function(err) {
if (err) {
res.writeHead(500, {"Content-Type": "text/plain"});
res.end(err.message);
} else {
res.writeHead(200, {"Content-Type": "text/plain"});
res.end(num.toString(10));
}
}
);
});
server.listen(3000);
You can use fs.writeFileSync and readFileSync.
One solution I just found via npm search is the locker server: https://github.com/bobrik/locker .
I think it's a good solution to the problem, and it's about how I'd design it.
The big problem is that it handles the general case (multiple processes using a resource) which requires a separate server.
I'm still looking for an in-process solution that does about the same thing.