I need to make two resized images from another one.
var fs = require('fs')
, gm = require('gm')
, async = require('async');
var worker = function(filename) {
img = gm(fs.createReadStream(filename));
img.flip();
this.run = function() {
async.series([
function(callback) {
img.resize(640, 480);
img.toBuffer(function(err, buffer) {
if (err) {
callback(err);
return;
}
callback(null, true);
})
},
function(callback) {
img.resize(320, 240);
img.toBuffer(function(err, buffer) {
if (err) {
callback(err);
return;
}
callback(null, true);
})
},
],
function(err, results) {
console.log(err, results);
});
};
}
new worker('test.jpg').run();
This code generates error:
Error: gm().stream() or gm().write() with a non-readable stream.
If I replace fs.createReadStream with a filename than everything works just fine. It looks like gm doesn't store the source image from stream in it's internal buffer. Is it a bug or I should know something else about using it in proper way?
Notice: Async is used because in real project I need both results to perform other actions with them.
try this, i dont think you need to use writeStreams in this case:
gm(imagePath)
.resize(640, 480)
.autoOrient()
.flip()
.write(newImagePath, function(e) {
if (e) throw e;
gm(newImagePath)
.resize(320, 240)
.write(newImagePathSmall, function(e) {
if (e) throw e;
console.log('resized successfuly');
});
});
Related
I am new to nodejs and trying to cat multiple css files on-the-fly while coding. The package chokidar allow me to call a function when a file is modified, however I have a problem with the execution.
var goconcat =
fs.readdir(paths, function (err, files) {
if (err) {console.log(err);}
fs.unlink(paths + 'concat.css', function (err) {
if (err) throw err;
var list = files.map(function (files) {
return path.join(paths, files);
});
concat(list, paths + 'concat.css', function(err) {
if (err) throw err
});
});
});
I want to first delete the previous file, then read the directory and then write a new "concat.css". However I have an error;
Error: ENOENT: no such file or directory, open 'public/css/concat.css'
at error (native)
It appears that the function concat() is executed before the directory update and not after, and therefore it is trying to cat a file that just have been deleted. Why ?
I know that nodejs is executing functions in a synchronous way but I can't find a way to solve this problem. I tried async but I can't declare a variable between two functions and I couldn't manage to make it work.
If it cannot exist in a callback, using the setTimeout(fn, 0) trick may help make sure it's executed after the variable assignment.
var goconcat =
fs.readdir(paths, function (err, files) {
if (err) {console.log(err);}
fs.unlink(paths + 'concat.css', function (err) {
if (err) throw err;
var list = files.map(function (files) {
return path.join(paths, files);
});
setTimeout(function() {
concat(list, paths + 'concat.css', function(err) {
if (err) throw err
})}, 0);
});
});
The problem you're having is that your concat function is being invoked before the file is deleted by invoking unlink. You can prevent this by having nested callbacks; however, you can probably have better control flow if you use a module like async, and prevent yourself from dealing with Callback Hell.
Below is an example on how you can use the async module.
var fs = require('fs');
var async = require('async');
var myDir = __dirname + '/data';
async.waterfall([function(callback) {
fs.readdir(myDir, 'utf-8', function(error, files) {
if (error) {
return callback(error);
}
return callback(null, files);
});
}, function(files, callback) {
fs.open(myDir + '/myFile', 'wx', function(error, f) {
if (error && error.code === 'EEXIST') {
return callback(null, 'EEXIST');
}
return callback(null, 'CREATE');
});
}, function(fileStatus, callback) {
if (fileStatus === 'EEXIST') {
console.log('File exists. Deleting file...');
fs.unlink(myDir + '/myFile', function(error) {
if (error) {
return callback(error);
} else {
return callback(null);
}
});
} else {
console.log('File does not exist...');
return callback(null);
}
}, function(callback) {
fs.writeFile(myDir + '/myFile', "Hello World", function(err) {
if(err) {
return callback(error);
}
return callback(null, 'File Created');
});
}], function(error, results) {
console.error(error);
console.log(results);
});
The waterfall function runs the tasks array of functions in series,
each passing their results to the next in the array. However, if any
of the tasks pass an error to their own callback, the next function is
not executed, and the main callback is immediately called with the
error.
I'm resizing an image using gm library but it create an empty image. I use different ways for writing result but everytime get empty image.
gm(readStream)
.size({bufferStream: true}, function(err, size) {
this.resize(100, 100, '%')
.stream()
.on('end',function(){
console.log('done');
})
.on('error',function(err){
console.log(err);
})
.pipe(writeStream);
});
I also use this way but result was the same.
gm(srcPath)
.resize(100, 100)
.stream(function(err, stdout, stderr) {
if(err){
console.log('ERRoor:', err);
}
// ws = fs.createWriteStream( output );
i = [];
stdout.on( 'data', function(data){
// console.log('data');
i.push( data );
});
stdout.on( 'close', function(){
console.log( 'close' );
var image = Buffer.concat( i );
writeStream.write( image.toString('base64'), 'base64' );
writeStream.end();
});
});
And this way too:
gm(srcPath)
.resize(100, 100)
.write(writeStream, function (err) {
var thumbUrl, error;
if (err) {
error = err;
} else{
thumbUrl = thumbPath.replace(/\\/g, '/');
thumbUrl = thumbUrl.replace(/^\.\/public/, '');
}
callback(error, thumbUrl);
});
this way every time return en empty object({}) as error!
OK I have a NodeJS app and I'm trying to download lots of images from a web server (about 500 for now but the number will increase). The problem I get is a "Unhandled stream error in pipe Error: EMFILE" because it seems that too much files get opened at the same time.
So I'm trying to use async.queue to process files by batches of 20. But I still get the error.
SomeModel.find({}, function(err, photos){
if (err) {
console.log(err);
}
else {
photos.forEach(function(photo){
var url = photo.PhotoURL;
var image = url.replace('http://someurl.com/media.ashx?id=', '').replace('&otherstuffattheend', '.jpg');
photo.target = image;
var q = async.queue(function (task) {
request
.get(task.PhotoURL)
.on('response', function(response) {
console.log(task.PhotoURL + ' : ' + response.statusCode, response.headers['content-type']);
console.log(task.target);
})
.on('error', function(err) {
console.log(err);
})
.pipe(fs.createWriteStream(task.target));
}, 20);
q.push(photo, function(err) {
if (err) {
console.log(err);
}
});
q.drain = function() {
console.log('Done.')
}
});
}
});
What am I doing wrong ? Many thanks for your time and help.
The problem is that you're creating a new queue for each photo and each queue receives just one photo. Instead, only create a queue once (outside of the forEach()) and push the photo objects to it. You're also missing the callback in your task handler. For example:
var q = async.queue(function(task, cb) {
request
.get(task.PhotoURL)
.on('response', function(response) {
console.log(task.PhotoURL + ' : ' + response.statusCode, response.headers['content-type']);
console.log(task.target);
// the call to `cb` could instead be made on the file stream's `finish` event
// if you want to wait until it all gets flushed to disk before consuming the
// next task in the queue
cb();
})
.on('error', function(err) {
console.log(err);
cb(err);
})
.pipe(fs.createWriteStream(task.target));
}, 20);
q.drain = function() {
console.log('Done.')
};
photos.forEach(function(photo) {
var url = photo.PhotoURL;
var image = url.replace('http://someurl.com/media.ashx?id=', '').replace('&otherstuffattheend', '.jpg');
photo.target = image;
q.push(photo, function(err) {
if (err) {
console.log(err);
}
});
});
I'm trying to design a webpage. I have a function that I call to get all info needed for an individual's home page. A snippet of the code is:
exports.getHomePageData = function(userId, cb) {
var pageData = {};
pageData.userFullName = dbUtil.findNameByUserId(userId, function(err){
if (err) cb(err);
});
pageData.classes = dbUtil.findUserClassesByUserId(userId, function(err){
if (err) cb(err);
});
cb(pageData);
}
The problem I'm having is that the cb(pageData) is being called before I even finish setting the elements.
I've seen that people use the async library to solve this, but I was wondering if there was any other way for me to do it without needing more modules.
One possible approach:
exports.getHomePageData = function(userId, cb) {
var pageData = {},
filler = function() {
if ('userFullName' in pageData
&& 'classes' in pageData)
cb(null, pageData);
};
dbUtil.findNameByUserId(userId, function(err, name) {
if (err) {
cb(err);
return;
}
pageData.userFullName = name;
filler();
});
dbUtil.findUserClassesByUserId(userId, function(err, classes) {
if (err) {
cb(err);
return;
}
pageData.classes = classes;
filler();
});
}
It looks like dbUtil.findUserClassesByUserId and dbUtil.findNameByUserId are asynchronous methods; that usually indicates that they do not return a value, and instead use the callback to give you the data.
Both functions are most likely expecting a signature like follows:
function(err, data) {
// if err is set, an error occurred, otherwise data is set with the result
}
Thus, your function should look like this instead:
exports.getHomePageData = function(userId, cb) {
dbUtil.findNameByUserId(userId, function(err, userFullName){
if (err) {
cb(err);
return;
}
dbUtil.findUserClassesByUserId(userId, function(err, classes){
if (err) {
cb(err);
return;
}
var pageData = {
userFullName: userFullName,
classes: classes
};
cb(pageData);
});
});
}
I'm trying to move uploaded file from /tmp to home directory using NodeJS/ExpressJS:
fs.rename('/tmp/xxxxx', '/home/user/xxxxx', function(err){
if (err) res.json(err);
console.log('done renaming');
});
But it didn't work and no error encountered. But when new path is also in /tmp, that will work.
Im using Ubuntu, home is in different partition. Any fix?
Thanks
Yes, fs.rename does not move file between two different disks/partitions. This is the correct behaviour. fs.rename provides identical functionality to rename(2) in linux.
Read the related issue posted here.
To get what you want, you would have to do something like this:
var source = fs.createReadStream('/path/to/source');
var dest = fs.createWriteStream('/path/to/dest');
source.pipe(dest);
source.on('end', function() { /* copied */ });
source.on('error', function(err) { /* error */ });
Another way is to use fs.writeFile. fs.unlink in callback will remove the temp file from tmp directory.
var oldPath = req.files.file.path;
var newPath = ...;
fs.readFile(oldPath , function(err, data) {
fs.writeFile(newPath, data, function(err) {
fs.unlink(oldPath, function(){
if(err) throw err;
res.send("File uploaded to: " + newPath);
});
});
});
Updated ES6 solution ready to use with promises and async/await:
function moveFile(from, to) {
const source = fs.createReadStream(from);
const dest = fs.createWriteStream(to);
return new Promise((resolve, reject) => {
source.on('end', resolve);
source.on('error', reject);
source.pipe(dest);
});
}
This example taken from: Node.js in Action
A move() function that renames, if possible, or falls back to copying
var fs = require('fs');
module.exports = function move (oldPath, newPath, callback) {
fs.rename(oldPath, newPath, function (err) {
if (err) {
if (err.code === 'EXDEV') {
copy();
} else {
callback(err);
}
return;
}
callback();
});
function copy () {
var readStream = fs.createReadStream(oldPath);
var writeStream = fs.createWriteStream(newPath);
readStream.on('error', callback);
writeStream.on('error', callback);
readStream.on('close', function () {
fs.unlink(oldPath, callback);
});
readStream.pipe(writeStream);
}
}