I'm resizing an image using gm library but it create an empty image. I use different ways for writing result but everytime get empty image.
gm(readStream)
.size({bufferStream: true}, function(err, size) {
this.resize(100, 100, '%')
.stream()
.on('end',function(){
console.log('done');
})
.on('error',function(err){
console.log(err);
})
.pipe(writeStream);
});
I also use this way but result was the same.
gm(srcPath)
.resize(100, 100)
.stream(function(err, stdout, stderr) {
if(err){
console.log('ERRoor:', err);
}
// ws = fs.createWriteStream( output );
i = [];
stdout.on( 'data', function(data){
// console.log('data');
i.push( data );
});
stdout.on( 'close', function(){
console.log( 'close' );
var image = Buffer.concat( i );
writeStream.write( image.toString('base64'), 'base64' );
writeStream.end();
});
});
And this way too:
gm(srcPath)
.resize(100, 100)
.write(writeStream, function (err) {
var thumbUrl, error;
if (err) {
error = err;
} else{
thumbUrl = thumbPath.replace(/\\/g, '/');
thumbUrl = thumbUrl.replace(/^\.\/public/, '');
}
callback(error, thumbUrl);
});
this way every time return en empty object({}) as error!
Related
i am working with the filesystem protocol of nodejs. i did writing the file, reading the file and now trying to rename it. it actually renames the file but throws me this error and my localhost stops running.
this is the error:
_http_outgoing.js:690
throw new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk);
TypeError [ERR_INVALID_ARG_TYPE]: The "chunk" argument must be one of type string or Buffer. Received type object
file1: var fs = require('fs');
// write
function write(fileName, content) {
return new Promise(function (resolve, reject) {
fs.writeFile(`./test/${fileName}`, content, function (err, done) {
if (err) {
reject(err);
return;
}
resolve(done);
// console.log('file created >>', done);
});
})
}
function readFile(fileName, cb) {
fs.readFile(`./test/${fileName}`, 'utf-8', function (err, done) {
if (err) {
cb(err);
return;
}
cb(null, done);
})
}
function rename(oldname, newname, cb){
fs.rename(`./bands/${oldname}`, `./bands/${newname}`, function(err,done){
if(err){
cb(err)
return
}
cb(null,done)
})
}
rename('pinkfloyd.txt', 'tool.txt', function(err, done){
if(err){
console.log('error in renaming')
}
else{
console.log('renamed>>')
}
})
readFile('kisor.txt', function(err,done){
if(err){
console.log("error in file reading >>",err);
}else{
console.log('success >>',done);
}
})
write('kisor.txt', 'i am javascript')
.then(function (data) {
console.log('write success ', data);
})
.catch(function (err) {
console.log('error in wirte >>', err);
})
// objec short hand
module.exports = {
write, readFile, rename
}
i have imported the exported stuff from file 1 here in this file:
//var { write,readFile, } = require('./file');// object destruct
var fileOp = require('./file1');
//console.log('file operation >>', fileOp);
fileOp.write('brodway.txt', 'i am infosys of nepal')
.then(function (data) {
console.log('done >>>', data);
})
.catch(function(err){
console.log('err',err);
})
fileOp.readFile('kisor.txt',function(err,done){
if(err){
console.log('err',err);
}else{
console.log('success >>',done);
}
and lastly, here is the server:
var http = require('http');
var fileOp = require('./file1');
var server = http.createServer(function (request, response) {
// this function will be executed whenever a client is connected
// request or 1st argument is http request object
// response or 2nd argument is http response object
var url = request.url;
if (url == '/write') {
fileOp.write('abcd.js', 'hi')
.then(function (data) {
response.end('data', data);
})
.catch(function (err) {
response.end(err);
})
} else if (url == '/read') {
fileOp.readFile('abcd.js', function (err, done) {
if (err) {
response.end(err);
} else {
response.end('done' + done);
}
})
} else if(url == '/rename'){
fileOp.rename('pinkfloyd.txt', 'tool.txt', function(err, done){
if(err){
response.end(err)
}
else{
response.end('done', done)
}
})
} else {
response.end('form default page');
}
console.log('client connected to server');
console.log('request url >>', request.url);
// request response cycle must be completed
// response.end('hi from node server'); response cannot sent more than once
});
server.listen(8080, function (err, done) {
if (err) {
console.log('server listening failed');
}
else {
console.log('server listening at port 8080');
console.log('press CTRL + C to exit from server');
}
});
In this section of code:
fileOp.rename('pinkfloyd.txt', 'tool.txt', function(err, done){
if(err){
response.end(err)
}
else{
response.end('done', done)
}
You are calling response.end('done', done). But, the fs.rename() callback does not have a done argument - it only has the err argument because there's no data to communicate back - the rename either succeeded or it didn't . So, done will be undefined. So you're calling:
response.end('done', undefined);
The function signature for response.end() is this:
response.end([data[, encoding]][, callback])
So, you're trying to send undefined for the encoding. That is not correct.
Your rename function should be change from this:
function rename(oldname, newname, cb){
fs.rename(`./bands/${oldname}`, `./bands/${newname}`, function(err,done){
if(err){
cb(err)
return
}
cb(null,done)
})
}
to this:
function rename(oldname, newname, cb){
fs.rename(`./bands/${oldname}`, `./bands/${newname}`, function(err){
if(err){
cb(err);
return
}
cb(null); // no 2nd argument here
})
}
or even simpler:
function rename(oldname, newname, cb){
fs.rename(`./bands/${oldname}`, `./bands/${newname}`, cb);
}
FYI, it appears you're using .end() improperly a bunch of places in your code where you do this:
response.end('done', someData);
That's not how it works. You aren't emitting an event. You're commanding the end of the http response and you need to follow this function signature:
response.end([data[, encoding]][, callback])
So, you'd probably just do response.end(data), if data was a string.
I am uploading an image from nodeJs.
Control is not getting into req.on('end'). Nothing is getting printed inside req.on('end'). I could not identify where is the problem. I am making an API and calling from js to upload an image in the server at a specific location.
app.post('/tde/api/photo/:widgetId/:choosenFileName',function(req,res){
console.log("In file Upload..");
console.log(req.params.widgetId);
console.log(req.params.choosenFileName);
res.writeHead(200, { 'Content-Type': 'application/binary' });
var filedata = '';
var chunks = [];
//req.setEncoding('binary');
req.on('data', function(chunk){
//filedata+= chunk;
chunks.push(chunk);
})
req.on('end', function (chunk) {
var dir = 'uploads/'+req.params.widgetId
if (!fs.existsSync(dir)){
fs.mkdirSync(dir);
console.log("directory created..");
}
fs.readdir(dir, function(err, filenames) {
if (err) {
onError(err);
return;
}
filenames.forEach(function(filename) {
console.log(filename);
fs.unlink(dir+'/'+filename, function(err) {
if (err) {
return console.error(err);
}
console.log("File deleted successfully!");
});
});
//fs.writeFile('uploads/'+req.params.widgetId+'/sanmoy.jpg', chunk, function(err) {
var fileName = req.params.choosenFileName;
var widgetId = req.params.widgetId;
//fs.writeFile('uploads/'+widgetId+'/'+fileName, filedata, 'binary', function(err) {
var buffer = Buffer.concat(chunks)
fs.writeFile('uploads/'+widgetId+'/'+fileName, buffer, function(err) {
if (err) {
return console.error(err);
}
console.log("writing file success!");
})
});
});
res.end("File is uploaded");
});
OK I have a NodeJS app and I'm trying to download lots of images from a web server (about 500 for now but the number will increase). The problem I get is a "Unhandled stream error in pipe Error: EMFILE" because it seems that too much files get opened at the same time.
So I'm trying to use async.queue to process files by batches of 20. But I still get the error.
SomeModel.find({}, function(err, photos){
if (err) {
console.log(err);
}
else {
photos.forEach(function(photo){
var url = photo.PhotoURL;
var image = url.replace('http://someurl.com/media.ashx?id=', '').replace('&otherstuffattheend', '.jpg');
photo.target = image;
var q = async.queue(function (task) {
request
.get(task.PhotoURL)
.on('response', function(response) {
console.log(task.PhotoURL + ' : ' + response.statusCode, response.headers['content-type']);
console.log(task.target);
})
.on('error', function(err) {
console.log(err);
})
.pipe(fs.createWriteStream(task.target));
}, 20);
q.push(photo, function(err) {
if (err) {
console.log(err);
}
});
q.drain = function() {
console.log('Done.')
}
});
}
});
What am I doing wrong ? Many thanks for your time and help.
The problem is that you're creating a new queue for each photo and each queue receives just one photo. Instead, only create a queue once (outside of the forEach()) and push the photo objects to it. You're also missing the callback in your task handler. For example:
var q = async.queue(function(task, cb) {
request
.get(task.PhotoURL)
.on('response', function(response) {
console.log(task.PhotoURL + ' : ' + response.statusCode, response.headers['content-type']);
console.log(task.target);
// the call to `cb` could instead be made on the file stream's `finish` event
// if you want to wait until it all gets flushed to disk before consuming the
// next task in the queue
cb();
})
.on('error', function(err) {
console.log(err);
cb(err);
})
.pipe(fs.createWriteStream(task.target));
}, 20);
q.drain = function() {
console.log('Done.')
};
photos.forEach(function(photo) {
var url = photo.PhotoURL;
var image = url.replace('http://someurl.com/media.ashx?id=', '').replace('&otherstuffattheend', '.jpg');
photo.target = image;
q.push(photo, function(err) {
if (err) {
console.log(err);
}
});
});
Users upload files into my express app. I need to calc hash of the uploaded file and then write file to disk using calculated hash as a filename. I try to do it using the following code:
function storeFileStream(file, next) {
createFileHash(file, function(err, hash) {
if (err) {
return next(err);
}
var fileName = path.join(config.storagePath, hash),
stream = fs.createWriteStream(fileName);
stream.on('error', function(err) {
return next(err);
});
stream.on('finish', function() {
return next();
});
file.pipe(stream);
});
}
function createFileHash(file, next) {
var hash = crypto.createHash('sha1');
hash.setEncoding('hex');
file.on('error', function(err) {
return next(err);
});
file.on('end', function(data) {
hash.end();
return next(null, hash.read());
});
file.pipe(hash);
}
The problem is that after I calc file hash the writed file size is 0. What is the best way do solve this task?
Update
According #poke suggestion I try to duplicate my stream. Now my code is:
function storeFileStream(file, next) {
var s1 = new pass;
var s2 = new pass;
file.pipe(s1);
file.pipe(s2);
createFileHash(s1, function(err, hash) {
if (err) {
return next(err);
}
var fileName = path.join(config.storagePath, hash),
stream = fs.createWriteStream(fileName);
stream.on('error', function(err) {
return next(err);
});
stream.on('finish', function() {
return next();
});
s2.pipe(stream);
});
}
function createFileHash(file, next) {
var hash = crypto.createHash('sha1');
hash.setEncoding('hex');
file.on('error', function(err) {
return next(err);
});
file.on('end', function(data) {
hash.end();
return next(null, hash.read());
});
file.pipe(hash);
}
The problem of this code is that events end and finish are not emited. If I comment file.pipe(s2); events are emited, but I again get my origin problem.
This code fix the problem:
var s1 = new passThrough,
s2 = new passThrough;
file.on('data', function(data) {
s1.write(data);
s2.write(data);
});
file.on('end', function() {
s1.end();
s2.end();
});
The correct and simple way should be as follow:
we should resume the passthroughed stream
function storeFileStream(file, directory, version, reject, resolve) {
const fileHashSource = new PassThrough();
const writeSource = new PassThrough();
file.pipe(fileHashSource);
file.pipe(writeSource);
// this is the key point, see https://nodejs.org/api/stream.html#stream_three_states
fileHashSource.resume();
writeSource.resume();
createFileHash(fileHashSource, function(err, hash) {
if (err) {
return reject(err);
}
const fileName = path.join(directory, version + '_' + hash.slice(0, 8) + '.zip');
const writeStream = fs.createWriteStream(fileName);
writeStream.on('error', function(err) {
return reject(err);
});
writeStream.on('finish', function() {
return resolve();
});
writeSource.pipe(writeStream);
});
}
function createFileHash(readStream, next) {
const hash = crypto.createHash('sha1');
hash.setEncoding('hex');
hash.on('error', function(err) {
return next(err);
});
hash.on('finish', function(data) {
return next(null, hash.read());
});
readStream.pipe(hash);
}
You could use the async module (not tested but should work):
async.waterfall([
function(done) {
var hash = crypto.createHash('sha1');
hash.setEncoding('hex');
file.on('error', function(err) {
done(err);
});
file.on('end', function(data) {
done(null, hash.read);
});
file.pipe(hash);
},
function(hash, done) {
var fileName = path.join(config.storagePath, hash),
stream = fs.createWriteStream(fileName);
stream.on('error', function(err) {
done(err);
});
stream.on('finish', function() {
done(null);
});
file.pipe(stream);
}
], function (err) {
console.log("Everything is done!");
});
I need to make two resized images from another one.
var fs = require('fs')
, gm = require('gm')
, async = require('async');
var worker = function(filename) {
img = gm(fs.createReadStream(filename));
img.flip();
this.run = function() {
async.series([
function(callback) {
img.resize(640, 480);
img.toBuffer(function(err, buffer) {
if (err) {
callback(err);
return;
}
callback(null, true);
})
},
function(callback) {
img.resize(320, 240);
img.toBuffer(function(err, buffer) {
if (err) {
callback(err);
return;
}
callback(null, true);
})
},
],
function(err, results) {
console.log(err, results);
});
};
}
new worker('test.jpg').run();
This code generates error:
Error: gm().stream() or gm().write() with a non-readable stream.
If I replace fs.createReadStream with a filename than everything works just fine. It looks like gm doesn't store the source image from stream in it's internal buffer. Is it a bug or I should know something else about using it in proper way?
Notice: Async is used because in real project I need both results to perform other actions with them.
try this, i dont think you need to use writeStreams in this case:
gm(imagePath)
.resize(640, 480)
.autoOrient()
.flip()
.write(newImagePath, function(e) {
if (e) throw e;
gm(newImagePath)
.resize(320, 240)
.write(newImagePathSmall, function(e) {
if (e) throw e;
console.log('resized successfuly');
});
});