I'm very new to node and have run into issues with running some functions sequentially and the use of callbacks. I have tried to use the async module, but, I think I'm missing something because it does my functions out of order.
In the most simplest terms I would like to retrieve some data in the form of url params and then:
1. write them to a file
2. lpr print them
3. delete the file
My current code deletes the file before printing.ie step 3 before step 2.Would anyone offer some advice on how best to perform these in order? Thanks in advance.
router.get('/zplprint/:barcode/:zpl', function(req, res) {
var zpl = req.params.zpl;
var filename = appDir + "/uploads/" + req.params.barcode + ".zpl";
console.log(req.params.zpl);
res.send("received zpl: " + req.params.zpl);
async.series([
function(callback){
fs.writeFile(filename, zpl, function(err) {
if(err) {
callback(err);
return;
}
console.log("The file was saved! to "+filename);
callback();
});
},
function(callback){
CupsPrinterName = nconf.get('Print:PrinterName');
console.log(CupsPrinterName);
var cmd = 'lpr -P ' + CupsPrinterName + ' -o raw ' + filename;
exec(cmd, function(error, stdout, stderr) {
// command output is in stdout'
console.log(cmd);
console.log("file printed");
});
callback();
},
function(callback){
fs.unlink(filename, function (err) {
console.log(' deleting ' + filename);
});
callback();
}
]);
});
You are calling the callback() function at the same level of exec(), exec is asynchronous and will log "file printed" after deleting the file because callback() was called outside of exec() and not when the function ends. Try calling callback after printing:
function(callback){
CupsPrinterName = nconf.get('Print:PrinterName');
console.log(CupsPrinterName);
var cmd = 'lpr -P ' + CupsPrinterName + ' -o raw ' + filename;
exec(cmd, function(error, stdout, stderr) {
// command output is in stdout'
console.log(cmd);
console.log("file printed");
callback();
});
},
var Q = require('q')
router.get('/zplprint/:barcode/:zpl', function(req, res) {
var zpl = req.params.zpl;
var filename = appDir + "/uploads/" + req.params.barcode + ".zpl";
console.log(req.params.zpl);
res.send("received zpl: " + req.params.zpl);
function first(callback){
var d1 = Q.defer();
fs.writeFile(filename, zpl, function(err) {
if(err) {
d1.reject(err);
}
console.log("The file was saved! to "+filename);
d1.resolve("success");
});
return d1.promise
}
function second(callback){
var d2 = Q.defer()
CupsPrinterName = nconf.get('Print:PrinterName');
console.log(CupsPrinterName);
var cmd = 'lpr -P ' + CupsPrinterName + ' -o raw ' + filename;
exec(cmd, function(error, stdout, stderr) {
if(error){
d2.reject()
}
console.log(cmd);
console.log("file printed");
d2.resolve()
});
return d2.promise
}
function third(callback){
var d3 = Q.defer()
fs.unlink(filename, function (err) {
if(err){
d3.reject()
}
console.log(' deleting ' + filename);
d3.resolve()
});
return d3.promise
}
first().then(second).then(third).fail(function(){
console.log("Error !!!!")
})
});
Prerequisites:
npm install q
the best ever promise structure
Related
coin.js
app.post('/upload', upload.single('userfile'), function(req, res){
var filename = __dirname +'/'+ req.file.path;
var s = fs.ReadStream(filename);
s.on('data', function(data) {
shasum.update(data)
})
s.on('end', function() {
var hash = shasum.digest('hex') //this var
console.log("Hash : "+ hash + ' ' + filename)
fs.unlink(filename, function (err) { //파일제거
if (err) throw err;
console.log('successfully deleted '+ filename); });
res.send('Uploaded : ' + hash + " " + filename);
})
})
app.get('/stampid', function(req, res){
client.stampDocument(hash, function(err, stampId) { //i want to call hash
res.render('coin2', {si:stampId})
});
})
I have a this code, then I want to call a var 'hash' in the function below.
I don't know what to do, I want fixed code.
I need your help.
use global variable :
var hash; // <----- declare it here
app.post('/upload', upload.single('userfile'), function(req, res){
var filename = __dirname +'/'+ req.file.path;
var s = fs.ReadStream(filename);
s.on('data', function(data) {
shasum.update(data)
})
s.on('end', function() {
hash = shasum.digest('hex') //this var
console.log("Hash : "+ hash + ' ' + filename)
fs.unlink(filename, function (err) { //파일제거
if (err) throw err;
console.log('successfully deleted '+ filename); });
res.send('Uploaded : ' + hash + " " + filename);
})
})
app.get('/stampid', function(req, res){
client.stampDocument(hash, function(err, stampId) { //i want to call hash
res.render('coin2', {si:stampId})
});
})
I am building a web app with Node.js, and I am at a point where I need to produce a tar archive of a directory of PDFs. The application is running on a VM running Ubuntu 14.04 server. My code to do this is shown below:
function tarDirectory(path, token, callback) {
var exec = require('child_process').exec;
var cmd = 'cd ' + path + ' && tar -cvf genericName-' + token + '.tar' + ' ' + token;
exec(cmd, function(error, stdout, stderr) {
console.log(stdout);
console.log(stderr);
if (error) {
console.error(error);
}
if(callback) callback();
});
}
and this tarDirectory function is called by the following code:
router.post('/files/generate', function(req, res, next) {
IDList = req.body['IDs[]'];
token = req.body['token'];
// if just a single file being generated
if (typeof req.body['IDs[]'] === "string"){
filehelper.generateFile(IDList[0], req.app.locals.site.basedir + "temp/", token);
}
// if multiple files being generated
else {
IDList.forEach(function(id) {
filehelper.generateFile(id, req.app.locals.site.basedir + "temp/", token);
});
}
filehelper.tarDirectory(req.app.locals.site.basedir + "temp/", token, res.end);
});
The code expects a post request with dynamic data that is generated by a button click in my web app, and will then create files based on the data and tar it into a directory. This all works fine and good... the first time. When I click the button for the first time in a while, the tar is produced, and when I open it, the client-side PDFs are identical to the ones on the server. When I click again within an hour or so, though, I receive a tar file, but when I open the archive and unpack it, the PDFs are all corrupted and about half the expected byte size. I am at a loss here... I had a suspicion that it might be related to improper handling of stream closing, but I'm not sure.
This is the code that generates the PDFs into a directory, which is then tarred after the generation:
function generateFile(id, path, token) {
var dirPath = path + token;
var filePath = path + token + "/file" + id + ".pdf";
console.log("creating file for: " + id);
try{
fs.statSync(dirPath).isDirectory();
} catch (err) {
fs.mkdirSync(dirPath);
}
// start the file pdf generation
file = new PDFDocument();
output = fs.createWriteStream(filePath);
output.on('close', function(){
return;
});
file.pipe(output);
// handle the intricacies of the file generation
file.text("file" + id + ".pdf");
// end the file
file.end();
}
Is everything okay with pdf files, before compressing?
In your generateFile function you have WriteStream, which is async. But, you calling this function as sync., and start .tar compression without waiting before pdf generation be complete, which may cause this issue.
As recommendation: try to wrap generateFile with promise, or iterate async., and start compression only after all files generation completed.
Example with bluebird:
var Promise = require('bluebird');
function generateFile(id, path, token) {
return new Promise(function(resolve, reject) {
var dirPath = path + token;
var filePath = path + token + "/file" + id + ".pdf";
console.log("creating file for: " + id);
try{
fs.statSync(dirPath).isDirectory();
} catch (err) {
fs.mkdirSync(dirPath);
}
// start the file pdf generation
file = new PDFDocument();
output = fs.createWriteStream(filePath);
output.on('close', function(){
return resolve();
});
output.on('error', function(error) {
return reject(error);
});
file.pipe(output);
// handle the intricacies of the file generation
file.text("file" + id + ".pdf");
// end the file
file.end();
});
}
Pdfs generation and compressing.
var Promise = require('bluebird');
....
//IDList.forEach(function(id) {
// filehelper.generateFile(id, req.app.locals.site.basedir + "temp/", //token);});
//replace with
Promise.map(IDList, function(id) {
return filehelper.generateFile(id, req.app.locals.site.basedir + "temp/", token);
})
.then(function() {
//all files are ready, start compressing
})
.catch(function(error) {
//we have error
});
So I implemented the promises that Nazar suggested. The total operation now works mostly fine if I am generating just one file, but if I generate more I get the same corrupt PDFs.
My code to generate the individual files:
function generateFile(id, path, token) {
return new Promise(function(resolve, reject){
var dirPath = path + token;
var filePath = path + token + "/file" + id + ".pdf";
console.log("creating file for: " + id);
try{
fs.statSync(dirPath).isDirectory();
} catch (err) {
fs.mkdirSync(dirPath);
}
// start the file pdf generation
file = new PDFDocument();
output = fs.createWriteStream(filePath);
// stream handling
output.on('finish', function(){
console.log(fs.statSync(filePath)["size"]);
return resolve();
});
output.on('error', function(error) {
return reject(error);
});
// pipe the generated PDF to the output file
file.pipe(output);
// handle the intricacies of the transcript generation
file.text("file" + id + ".pdf");
// end the file
file.end();
});
}
My code to tar the directory:
function tarDirectory(path, token) {
return new Promise(function(resolve, reject){
var exec = require('child_process').exec;
var cmd = 'cd ' + path + ' && tar -cvf Files-' + token + '.tar' + ' ' + token;
exec(cmd, function(error, stdout, stderr) {
if (stdout != "") console.log(stdout);
if (stderr != "") console.log(stderr);
if (error) return reject(error);
return resolve();
});
});
}
and my code that calls the two helper functions:
// submit request to generate files
router.post('/files/generate', function(req, res, next) {
IDList = req.body['IDs[]'];
token = req.body['token'];
// convert single fileID into list because Promise.map() needs iterable
if (typeof IDList === "string") {
IDList = [IDList];
}
Promise.map(IDList, function(id) {
filehelper.generateFile(id, req.app.locals.site.basedir + "temp/", token);
})
.then(function() {
return filehelper.tarDirectory(req.app.locals.site.basedir + "temp/", token);
})
.then(function() {
res.end();
})
.catch(function(error) {
throw new Error('Something went wrong while generating the tar file! :(\n' + error);
});
});
Any further insights as to what I could be doing wrong here are much appreciated.
I'd like to know how to execute async functions, my goal is to uploads files and compress them... But it doesn't work because my files are not yet uploading when I write the .zip...
SO I get an empty .zip file...
var asyncTasks = [];
selectedPhotos.forEach(function(id){
asyncTasks.push(function(callback){
var newFileName = pathDir + '/' + id + '.jpg';
api.media(id, function(err, media, remaining, limit) {
gm()
.in('-page', '+0+0')
.in('./public/images/instabox.jpg')
.in('-page', '+10+10')
.in(media.images.thumbnail.url)
.mosaic()
.minify()
.write(newFileName, function (err) {
if (!err) console.log('done');
if (err) console.log(err);
});
});
callback();
});
});
async.parallel(asyncTasks, function(){
var admZip = new AdmZip();
var pathDir = './public/uploads/'+reference;
admZip.addLocalFolder(pathDir);
var willSendthis = admZip.toBuffer();
admZip.writeZip('./public/uploads/'+reference+'.zip');
});
You're calling the callback() too early. Move callback(); inside of your .write() callback like so:
.write(newFileName, function (err) {
if (!err) console.log('done');
if (err) console.log(err);
callback(err);
});
When you execute the callback, that signifies that the task is finished. So without the above change, you're basically telling async that you're done immediately.
I wrote a function to copy a directory to another... But there's a problem : I use callback function to send the copied size. This callback comes too early (before the end of the copy). I think the problem is that the process is asynchronous. Can you help me?
var fs=require('fs');
var copyDir=function copyDir(from, to, callback){
if(!fs.existsSync(to)){
fs.mkdirSync(to);
}
console.log(from+" ==> "+to);
var count = 0;
fs.readdir(from, function(err,files){
for(var i=0;i<files.length;i++){
var f = from+"/"+files[i];
var d = f.replace(from, to);
console.log(f+" ("+i+")"+ " : "+d);
if(!fs.existsSync(d)){
if(!fs.statSync(f).isFile()){
//fs.mkdirSync(f.replace(from, to));
count += fs.statSync(f).size;
console.log(f + " will make an inception!")
copyDir(f, f.replace(from, to), function(err, cp){callback(err, cp)});
}else{
var size = fs.statSync(f).size;
copyFile(f, f.replace(from, to), function(err){
if(err) callback(err, count)
});
count += size;
callback(null, count);
}
}
}
});
}
function copyFile(source, target, cb) {
fs.readFile(source, function (err, data) {
if (err) throw err;
fs.writeFileSync(target, data, function (err, data){
if(err) throw err;
cb(null, fs.statSync(source).size); //This callback comes before the copy end.
});
});
}
exports.copyDir = copyDir;
copyDir is called by:
io.sockets.on('connection', function(socket){
console.log('connection');
socket.on('startCopy', function(data){
sizeDir('templates', function(e, r){
copyDir('templates', 'tmp', function(err, cp){
console.log("copy % " + Math.round(100*cp/r));
socket.emit('copy', {prog: Math.round(100*cp/r)});
});
});
});
});
You can rewrite your else code with following:
(function() {
var size = fs.statSync(f).size;
copyFile(f, f.replace(from, to), function(err){
if(err) {
callback(err, count);
return;
}
count += size;
callback(null, count);
});
})();
But, you have a lot of synchronous function in your code. You should know about all caveats of this approach. This article may be helpful
I'm new to NodeJS and I#m trying to read a directory recursively this is my code
var fs = require('fs');
var readDir = function (dir, calback) {
fs.readdir(dir, function (err, files) {
if (err)
console.log(err);
for (var file in files) {
fs.stat(dir + "/" + file, function (err, stats) {
if (err)
console.log(err);
if (stats.isFile()) {
calback(dir + "/" + file);
}
if (stats.isDirectory()) {
walk(file, calback);
}
});
}
});
};
This is my ErrorMessage
C:\Users\Lukas\Desktop\Enide-Studio-05-kepler-win32\ws\PlayerTest\hello-world-server.js:24
if (fs.stats.isFile()) {
^
TypeError: Cannot call method 'isFile' of undefined
at C:\Users\Lukas\Desktop\Enide-Studio-05-kepler-win32\ws\PlayerTest\hello-world-server.js:24:30
at Object.oncomplete (fs.js:107:15)
What is my mistake??
You are not exiting your function after encountering an error.
fs.stat(dir + "/" + file, function (err, stats) {
if (err) {
console.log(err);
return; // exit here since stats will be undefined
}
if (stats.isFile()) {
calback(dir + "/" + file);
}
if (stats.isDirectory()) {
walk(file, calback);
}
});