Perform print operation on cups using Node.js - node.js

I would like to print documents through http requests on Node.js. Is there any way to send print jobs and query CUPS server using Node.js. I found this project while exploring around, is it the only/correct way to do that??

You could use the shell to do so. I built a project some time ago where I needed to read certain hashtag from instagram and print the photos uploaded to IG with that hashtag using a raspberry pi and a photo printer.
var fs = require('fs'),
exec = require('child_process').exec;
exec("lp /path/to/somepic.jpg");
// get printer jobs
exec("lpq",function (error, stdout, stderr) {
console.log('stdout: ' + stdout);
console.log('stderr: ' + stderr);
if (error !== null) {
console.log('exec error: ' + error);
}
});
The command lp /path/to/somepic.jpg sends /path/to/somepic.jpg to the default printer. The command lpq displays the printer queue. For better use, read the CUPS documentation.

Following snippet seems useful. Not tried it as I am no longer working on this problem!
It may be helpful for others. Original source: https://gist.github.com/vodolaz095/5325917
var ipp = require('ipp'); //get it from there - https://npmjs.org/package/ipp - $npm install ipp
var request = require('request'); //get it from there - https://npmjs.org/package/request - $npm install request
var fs = require('fs');
function getPrinterUrls(callback) {
var CUPSurl = 'http://localhost:631/printers';//todo - change of you have CUPS running on other host
request(CUPSurl, function (error, response, body) {
if (!error && response.statusCode == 200) {
var printersMatches = body.match(/<TR><TD><A HREF="\/printers\/([a-zA-Z0-9-^"]+)">/gm);//i know, this is terrible, sorry(
var printersUrls = [];
var i;
if (printersMatches) {
for (i = 0; i < printersMatches.length; i++) {
var a = (/"\/printers\/([a-zA-Z0-9-^"]+)"/).exec(printersMatches[i]);
if (a) {
printersUrls.push(CUPSurl + '/' + a[1]);
}
}
}
}
callback(error, printersUrls);
});
};
function doPrintOnSelectedPrinter(printer, bufferToBePrinted, callback) {
printer.execute("Get-Printer-Attributes", null, function(err, printerStatus){
if(printerStatus['printer-attributes-tag']['printer-state']=='idle'){
//printer ready to work
//*/
printer.execute("Print-Job",
{
"operation-attributes-tag":{
"requesting-user-name":"nap",
"job-name":"testing"
},
"job-attributes-tag":{},
data:bufferToBePrinted
},
function (err, res) {
if (res.statusCode == 'successful-ok') {
var jobUri = res['job-attributes-tag']['job-uri'];
var tries = 0;
var t = setInterval(function () {
printer.execute("Get-Job-Attributes",
{"operation-attributes-tag":{'job-uri':jobUri}},
function (err2, job) {
// console.log(job);
if (err2) throw err2;
tries++;
if (job && job["job-attributes-tag"]["job-state"] == 'completed') {
clearInterval(t);
// console.log('Testins if job is ready. Try N '+tries);
callback(null, job);//job is succesefully printed!
}
if (tries > 50) {//todo - change it to what you need!
clearInterval(t);
printer.execute("Cancel-Job", {
"operation-attributes-tag":{
//"job-uri":jobUri, //uncomment this
//*/
"printer-uri":printer.uri, //or uncomment this two lines - one of variants should work!!!
"job-id":job["job-attributes-tag"]["job-id"]
//*/
}
}, function (err, res) {
if (err) throw err;
console.log('Job with id '+job["job-attributes-tag"]["job-id"]+'is being canceled');
});
callback(new Error('Job is canceled - too many tries and job is not printed!'), null);
}
});
}, 2000);
} else {
callback(new Error('Error sending job to printer!'), null);
}
});
//*/
} else {
callback(new Error('Printer '+printerStatus['printer-attributes-tag']['printer-name']+' is not ready!'),null);
}
});
}
function doPrintOnAllPrinters(data, callback) {
var b = new Buffer(data, 'binary');
getPrinterUrls(function (err, printers) {
if (err) throw err;
if (printers) {
for (var i = 0; i < printers.length; i++) {
var printer = ipp.Printer(printers[i]);
doPrintOnSelectedPrinter(printer, b, callback);
}
} else {
throw new Error('Unable to find printer. Do you have printer installed and accessible via CUPS?');
}
});
}
/*
Example of usage
*/
fs.readFile('package.json', function (err, data) {
doPrintOnAllPrinters(data, function (err, job) {
if (err) {
console.error('Error printing');
console.error(err);
} else {
console.log('Printed. Job parameters are: ');
console.log(job);
}
}
);
});

Related

How to use a nested loop in request.head - Node JS

I am trying to move download images from parse and save it to my local. I have this piece of code that does the job for me. This works well when there is only one request but when I put in a loop, it doesn't hold good.
`for(var i = 0; i < 5; i++) {
console.log(i);//to debug
var filename = results_jsonObj[i].imageFile.name;
var uri = results_jsonObj[i].imageFile.url;
request.head(uri, function(err, res, body){
if (err){
console.log(err);
console.log(item);
return;
}else {
console.log(i); //to debug
var stream = request(uri);
stream.pipe(
fs.createWriteStream("images/"+filename)
.on('error', function(err){
callback(error, filename);
stream.read();
})
)
}
});
}`
Irrespective of the loop condition I have, only one image downloads to the mentioned directory.
Below is the op
The input is from a Json file and I have the request, fs, parse module included in the node js program.
Any help on how to go about this?
I have got this fixed now. As advised in the comments it was async which helped me do the trick.
for(var i = 0; i < 900; i++) {
async.forEachOf(results_jsonObj[i], function(value, key, callback){
var image = {};
image.key = key;
image.value = value;
if(image.key == 'imageFile')
{
var filename = image.value.name;
var uri = image.value.url;
// console.log(filename, uri);
}
request.head(uri, function(err, res, body){
if (err){
console.log(err);
// console.log(item);
return;
}else {
// console.log(i,res.headers['content-type']); //to debug
var stream = request(uri);
stream.pipe(
fs.createWriteStream("images/"+filename)
.on('error', function(err){
callback(error, filename);
stream.read();
})
)
}
});
callback();
}, function(err){
if (err) {
console.log('one of the api failed, the whole thing will fail now');
}
});
}

Return value with async function

Right now I have a piece of code which makes two http requests in parallel.
var request = require('request');
var async = require('async');
var fs = require('fs');
module.exports = {
req : function(options) {
//populate list with 2 lookups
var lookup_list = [];
for (var i = 0; i < 2; i++) {
if(i == 1) {
options.timeout = 200;
}
lookup_list.push(options);
}
//iterate through list with and do request calls
async.map(lookup_list, function(options, next) {
request(options, function (error, response, body) {
if (!error && response.statusCode == 200) {
var body = JSON.parse(body);
next(null, body);
} else {
next(error || response.statusCode);
}
});
//after both requests are finished, deal with responses
}, function(err, results) {
if (!err) {
console.log(results);
/*
fs.appendFile('log.txt', JSON.stringify(results[1]) + "\n",function callback(error){
if (error) {
return console.error('write failed: ', error);
}
});
*/
} else {
console.error('request failed: ', err);
}
});
}
}
I want a to return the array results from the async.map callback when parallel.req() is called.
I've tried to provide a callback for req() and have it be called in the callback for async.map but no luck. I also tried other returns and callbacks elsewhere but have had no success. I would like some guidance to resolve this issue.

How to queue the post request in Node.js?

Current post request:
app.post('/rImage', rawBody, function (req, res) {
// ---- check here, if this process is running ? ----
var data = req.rawBody;
var escapedData = data.replace(/([<>|&^])/g, "\^$1"); // Windows CMD escaping
exec('cscript /nologo C:/PS/Automation/render.vbs C:/PS/Automation/image.jsx"'+escapedData + '"',
function (error, stdout, stderr) {
console.log('stdout: ' + escapedData);
if(error !== null) {
console.log('exec error: ' + error);
}
res.send(stdout);
//request finished
});
});
How could I implement a queue system to this, so that the next request fires off when the last one is finished?
Thanks in adnvance!
There might be libraries for this. However, you could hack it like this:
var queue = [];
var active = false;
var check = function() {
if (!active && queue.length > 0) {
var f = queue.shift();
f();
}
}
app.post("..", body, function(req, res) {
queue.push(function() {
active = true;
exec("yourprogram arg1 arg2..", function() {
// this is the async callback
active = false;
check();
});
});
check();
});
Here's a fiddle that shows the functionality: https://jsfiddle.net/fc15afw5/1/

How can I synchronize access to a file within a single process in node.js?

I have a Web server that reads and writes to a data file on disk. I'd like a file only be written to in a single Web request.
Here's an example program that illustrates my problem. It keeps a state file in "/tmp/rw.txt" and increments the integer contents on each Web hit. Running this program, and then running something like ab -n 10000 -c 1000 http://localhost:3000/, shows that the same value is read from the file by multiple hits, and it's written multiple times.
NOTE: I know about flock() and fs-ext. However, flock() will lock the file to the current process; since all the access here is in the same process, flock() doesn't work (and complicates this example considerably).
Also note that I'd usually use express, async, etc. to get most of this done; just sticking to the basics for the sake of example.
var http = require("http"),
fs = require("fs");
var stateFile = "/tmp/rw.txt";
var server = http.createServer(function(req, res) {
var writeNum = function(num) {
var ns = num.toString(10);
console.log("Writing " + ns);
fs.writeFile(stateFile, ns, function(err) {
if (err) {
res.writeHead(500, {"Content-Type": "text/plain"});
res.end(err.message);
} else {
res.writeHead(200, {"Content-Type": "text/plain"});
res.end(ns);
}
});
};
switch (req.url) {
case "/reset":
writeNum(0);
break;
case "/":
fs.readFile(stateFile, function(err, data) {
if (err && err.code == "ENOENT") {
// First time, set it to zero
writeNum(0);
} else if (err) {
res.writeHead(500, {"Content-Type": "text/plain"});
res.end(err.message);
} else {
writeNum(parseInt(data, 10) + 1);
}
});
break;
default:
res.writeHead(404, {"Content-Type": "text/plain"});
res.end("No such resource: " + req.url);
}
});
server.listen(3000);
Storing data in files is not a preferred way in multi-user environment like web server. Databases are more suitable for this. But if you really want to stick with file, I suggest to use a buffer. That is, a memory object to which you write/read, and a separate function that periodically dumps its content to the disk, like this:
var server = http.createServer(function(req, res) {
----- cut ----
switch (req.url) {
case "/reset":
value = 0;
break;
case "/":
value ++;
break;
default:
res.writeHead(404, {"Content-Type": "text/plain"});
res.end("No such resource: " + req.url);
}
}
----- cut ----
}
(function() {
var cb = arguments.callee;
fs.writeFile(stateFile, value, function(err) {
// handle error
setTimeout(cb, 100); // schedule next call
});
})();
And another way to do this (in case there is something behind your question so you don't accept simple solutions ;)) is to create a processing queue. Below is a simple queue pattern, it executes requests in the order they submitted and returns error (if any) to the provided callback once the function is executed.
var Queue = function(fn) {
var queue = [];
var processingFn = fn;
var iterator = function(callback) {
return function(err) {
queue.shift(); // remove processed value
callback(err);
var next = queue[0];
if(next)
processingFn(next.arg, iterator(next.cb));
};
}
this.emit = function(obj, callback) {
var empty = !queue.length;
queue.push({ arg: obj, cb: callback});
if(empty) { // start processing
processingFn(obj, iterator(callback));
}
}
}
function writeNum(inc, continueCb) {
fs.readFile(stateFile, function(err, data) {
if(err)
return continueCb(err);
var value = data || 0;
fs.writeFile(stateFile, value + inc, function(err) {
continueCb(err);
});
});
}
var writer = new Queue(writeNum);
// on each request
writer.emit(1, function(err) {
if(err) {
// respond with error
}
else {
// value written
}
});
I wasn't able to find a library that did what I want, so I created one here:
https://npmjs.org/package/schlock
Here's the above example program using the read/write locking. I also used "Step" to make the whole thing more readable.
var http = require("http"),
fs = require("fs"),
Schlock = require("schlock"),
Step = require("step");
var stateFile = "/tmp/rw.txt";
var schlock = new Schlock();
var server = http.createServer(function(req, res) {
var num;
Step(
function() {
schlock.writeLock(stateFile, this);
},
function(err) {
if (err) throw err;
fs.readFile(stateFile, this);
},
function(err, data) {
if (err && err.code == "ENOENT") {
num = 0;
} else if (err) {
throw err;
} else {
num = parseInt(data, 10) + 1;
}
fs.writeFile(stateFile, num.toString(10), this);
},
function(err) {
if (err) throw err;
schlock.writeUnlock(stateFile, this);
},
function(err) {
if (err) {
res.writeHead(500, {"Content-Type": "text/plain"});
res.end(err.message);
} else {
res.writeHead(200, {"Content-Type": "text/plain"});
res.end(num.toString(10));
}
}
);
});
server.listen(3000);
You can use fs.writeFileSync and readFileSync.
One solution I just found via npm search is the locker server: https://github.com/bobrik/locker .
I think it's a good solution to the problem, and it's about how I'd design it.
The big problem is that it handles the general case (multiple processes using a resource) which requires a separate server.
I'm still looking for an in-process solution that does about the same thing.

Node.js login/register Redis problems

};
var login = function(name, socket, passcode, callback) {
db.get(name + ':name', function(res) {
db.get(name + ':pin', function(pin) {
if (name === res) {
if (passcode === pin) {
players[sockets.indexOf(socket)] = name;
}
}
});
});
};
var register = function(name, socket, passcode, callback) {
if (name === null) {
callback('Name null!');
return;
}
if (name === '') {
callback('Name empty!');
return;
}
db.get(name + ':name', function(data) {
if (data !== null) {
callback('This name has been taken!');
}
else {
db.set(name + ':name', name, function() {
db.set(name + ':pin', passcode, function() {
players[sockets.indexOf(socket)] = name;
callback('Logged in as: ' + players[sockets.indexOf(socket)]);
});
});
}
});
};
Why does the login part let me login with any PIN to any user, and the register part not callback?
They are called via
login(args[1], socket, args[2], function(data) {
callback(null, data)
}
db is a redis client.
Full code: github.com/creativemud, file server.js
You never execute the callback inside login. You need to call callback when you're done.
Also, you seem to lack some error handling. Is db a redis client? In that case, the first parameter to every callback is an error, which is custom in node.js land. I would do something like:
var login = function(name, socket, passcode, callback) {
db.get(name + ':name', function(err, res) {
if(err) return callback(err);
if(res !== name) return callback(new Error('invalid result'));
db.get(name + ':pin', function(err, pin) {
if(err) return callback(err);
if(pin !== passcode) return callback(new Error('invalid passcode'));
players[sockets.indexOf(socket)] = name;
callback(null, name);
});
});
};
Might not be very clean, but I hope you catch the drift.
Edit: You don't show how you call these functions. Socket.io has pretty nice support for authentication, you might want to check it out: Authorization and handshaking.

Resources