I'm using node-resemble-js to compare two PNG images.
The comparison happens without issue and I get a successful/relevant response however I'm having trouble outputting the image diff.
var express = require('express');
var fs = require('fs');
var resemble = require('node-resemble-js');
var router = express.Router();
router.get('/compare', function(req, res, next) {
compareImages(res);
});
var compareImages = function (res) {
resemble.outputSettings({
largeImageThreshold: 0
});
var diff = resemble('1.png')
.compareTo('2.png')
.ignoreColors()
.onComplete(function(data){
console.log(data);
var png = data.getDiffImage();
fs.writeFile('diff.png', png.data, null, function (err) {
if (err) {
throw 'error writing file: ' + err;
}
console.log('file written');
});
res.render('compare');
});
};
module.exports = router;
It writes to diff.png as expected however it's not creating a valid image.
Any ideas where I'm going wrong? Feel like I'm pretty close but just unsure of final piece.
Thanks
Looks like there is a pack() method that needs to be called, which does some work and then streamifies the data. In that case you can buffer the stream and then call writeFile like this:
var png = data.getDiffImage();
var buf = new Buffer([])
var strm = png.pack()
strm.on('data', function (dat) {
buf = Buffer.concat([buf, dat])
})
strm.on('end', function() {
fs.writeFile('diff.png', buf, null, function (err) {
if (err) {
throw 'error writing file: ' + err;
}
console.log('file written');
})
})
or you can just pipe it like this, which is a little simpler:
png.pack().pipe(fs.createWriteStream('diff.png'))
Honestly, your approach made sense to me (grab the Buffer and write it) but I guess that data Buffer attached to what comes back from getDiffImage isn't really the final png. Seems like the docs are a bit thin but there's some info here: https://github.com/lksv/node-resemble.js/issues/4
Related
I'm learning how to use the csv-parse module for nodejs. I wrote this code and it works perfectly:
var fs = require('fs');
const fileName = './spreadsheet.csv';
const assert = require('assert');
const { parse } = require('csv-parse');
const records = [];
// Initialize the parser
const parser = parse({
delimiter: ','
});
// Use the readable stream api to consume records
parser.on('readable', function(){
let record;
while ((record = parser.read()) !== null) {
records.push(record);
}
});
// Catch any error
parser.on('error', function(err){
console.error(err.message);
});
fs.readFile(fileName, 'utf8', function (err, f) {
if (err) {
return console.error(err);
}
const rows = f.split("\r\n");
for(let x in rows) {
parser.write(rows[x]+"\n");
}
parser.end();
console.log(records);
});
But right now, I depend on the fs module and fs.readFile to consume my csv file. Does the csv-parse have an option to read ffrom file? I ask because as you can see in my code, I ahve to specify my own line-break characters, which could differ between csv files. I thought maybe the csv-parse module would have something that can more readily address such a situation?
The parser object will do most of the work for you. It is expecting the data to arrive on its stream interface and it will do everything else. All you have to do is open a stream and the pipe it to the parser like this:
fs.createReadStream(fileName).pipe(parser);
And, here it is combined with your code:
const fs = require('fs');
const fileName = './spreadsheet.csv';
const { parse } = require('csv-parse');
const records = [];
// Initialize the parser
const parser = parse({
delimiter: ','
});
// Use the readable stream api to consume records
parser.on('readable', function(){
let record;
while ((record = parser.read()) !== null) {
records.push(record);
}
});
// Catch any error
parser.on('error', function(err){
console.error(err.message);
});
parser.on('end', function() {
console.log(records);
});
// open the file and pipe it into the parser
fs.createReadStream(fileName).pipe(parser);
P.S. It's amazing that such a simple example of getting the CSV data from a file is not shown in the documentation (at least not anywhere I could find it). I'm also surprised, they don't offer an option where they will automatically read the data from the stream, instead requiring you to implement the readable event handler. Odd, for such an otherwise complete package.
How can I stream a response using an in memory DB?
I'm using Loki JS as an in memory DB. There is a particular resource where I must return the entire contents of a table (cannot be paginated) and that table can grow to 500,000 items or so, which is about 300mb.
In other cases, I have used fs.createReadStream to get a file and stream it back to the user:
fs.createReadStream('zips.json')
.on('data', function() {
res.write(...)
})
.on('end', function() {
res.end();
})
This has worked great for large files, but how can I do something equivalent using an in memory DB?
const items = lokiDb.addCollection('items');
items.insert('a bunch of items ...');
// I would now like to stream items via res.write
res.write(items)
Currently, res.write(items) will cause memory problems as Node is trying to return the entire response at once.
As far as I can tell, there is no native stream provider in Loki, though I may have missed it. What you may want to do instead is listen to the 'insert' event on the collection and write that, like so:
const items = lokiDb.addCollection('items');
items.on('insert', (results) => {
res.write(results);
});
items.insert('a bunch of items ...');
If I'm correct, basically your problem is that readStreams only read from files, and that you want to read from an in-memory data structure. A solution might be to define your own readStream class, slightly modifying the prototype stream.Readable._read method:
var util = require('util');
var stream = require('stream');
"use strict";
var begin=0, end=0;
var options = {
highWaterMark: 16384,
encoding: null,
objectMode: false
};
util.inherits(InMemoryStream, stream.Readable);
function InMemoryStream(userDefinedOptions, resource){
if (userDefinedOptions){
for (var key in userDefinedOptions){
options.key = userDefinedOptions[key];
}
}
this.resource = resource;
stream.Readable.call(this, options);
}
InMemoryStream.prototype._read = function(size){
end += size;
this.push(this.resource.slice(begin, end));
begin += size;
}
exports.InMemoryStream = InMemoryStream;
exports.readStream = function(UserDefinedOptions, resource){
return new InMemoryStream(UserDefinedOptions, resource);
}
You convert your in-memory datastructure (in the following example an array) to a readStream, and pipe this through to a writeStream, as follows:
"use strict";
var fs = require('fs');
var InMemoryStream = require('/home/regular/javascript/poc/inmemorystream.js');
var stored=[], writestream, config={};
config = {
encoding: null,
fileToRead: 'raphael.js',
fileToWrite: 'secondraphael.js'
}
fs.readFile(config.fileToRead, function(err, data){
if (err) return console.log('Error when opening file', err);
stored = data;
var inMemoryStream = InMemoryStream.readStream({encoding: config.encoding}, stored);
writestream = fs.createWriteStream(config.fileToWrite);
inMemoryStream.pipe(writestream);
inMemoryStream.on('error', function(err){
console.log('in memory stream error', err);
});
});
I am using Node.js with Express.
I am trying to delete a file after sending it to client with express js.
function deleteFile (file) {
fs.unlink(file, function (err) {
if (err) {
logger.error(err);
}
});
}
app.get("/deleteFileAfterDownload", function (req, res){
var fileName = "a.pdf"
var stream = fs.createReadStream(fileName);
var streamClosed = false;
req.on('end',function(){
if (!streamClosed){
stream.emit('close');
// I tried stream.destroy() but that is also not working
}
});
stream.on('close', function () {
streamClosed = true;
deleteFile(fileName);
});
req.on('data', function(){});
stream.pipe(res);
});
But the file is not getting deleted. it seems the process is still using file because just after I end the process, the file is getting deleted.
Can anybody tell me why?
If I am doing it wrong, please tell me a good way.
Please add a log in deleteFile, makesure it is called.
Try simplify it:
var fileName = "a.pdf"
var stream = fs.createReadStream(fileName);
stream.pipe(res);
res.once("finish", function () {
deleteFile(fileName);
});
The previous example only delete file if download finished,
if you want delete file unconditionly, try the following:
var fileName = "a.pdf";
var stream = fs.createReadStream(fileName);
stream.pipe(res).once("close", function () {
stream.close();
deleteFile(fileName);
});
stream.close() is important here, because stream not close if pipe aborted.
I am uploading file using restify api.
It gives me an error,
{
"code": "InternalError",
"message": "This socket is closed."
}
The read api still works great but unable to upload the image. What could be the possible issue?
The code I was using,
exports.uploadfile = function (req, res, next) {
var file_name = req.files.image.name;
var file_name_arr = file_name.split('.');
var imageExtension = file_name_arr.pop();
var imageName = Date.now()+"."+imageExtension;
var newPath = process.cwd() + "/images/" + imageName;
fs.readFile(req.files.image.path, function (err, data) {
fs.writeFile(newPath, data, function (err){
if(err) return next(err);
res.json({
image: imageName
})
return next();
});
});
}
After digging up a bit, here i found the answer. The above code reads the entire file into memory and then write it on the disk which is slow. Rather i have used streaming to write the file which works like a charm.
replacing fs.readfile in above code with
rr = fs.createReadStream(req.files.image.path).pipe(fs.createWriteStream(imagePath))
rr.on('error', function(err){
console.log(error)
});
rr.on('finish', function(){
//do ur work here
});
reference link: https://nodejs.org/api/stream.html
Other way is to use fs.rename which is faster than the above specified method as it doesn't have to read the file at all.
fs.rename(req.files.image.path, new_file_path);
I want to stream multiple files, one after each other, to the browser. To illustrate, think of having multiple CSS files which shall be delivered concatenated as one.
The code I am using is:
var directory = path.join(__dirname, 'css');
fs.readdir(directory, function (err, files) {
async.eachSeries(files, function (file, callback) {
if (!endsWith(file, '.css')) { return callback(); } // (1)
var currentFile = path.join(directory, file);
fs.stat(currentFile, function (err, stats) {
if (stats.isDirectory()) { return callback(); } // (2)
var stream = fs.createReadStream(currentFile).on('end', function () {
callback(); // (3)
});
stream.pipe(res, { end: false }); // (4)
});
}, function () {
res.end(); // (5)
});
});
The idea is that I
filter out all files that do not have the file extension .css.
filter out all directories.
proceed with the next file once a file has been read completely.
pipe each file to the response stream without closing it.
end the response stream once all files have been piped.
The problem is that only the first .css file gets piped, and all remaining files are missing. It's as if (3) would directly jump to (5) after the first (4).
The interesting thing is that if I replace line (4) with
stream.on('data', function (data) {
console.log(data.toString('utf8'));
});
everything works as expected: I see multiple files. If I then change this code to
stream.on('data', function (data) {
res.write(data.toString('utf8'));
});
all files expect the first are missing again.
What am I doing wrong?
PS: The error happens using Node.js 0.8.7 as well as using 0.8.22.
UPDATE
Okay, it works if you change the code as follows:
var directory = path.join(__dirname, 'css');
fs.readdir(directory, function (err, files) {
var concatenated = '';
async.eachSeries(files, function (file, callback) {
if (!endsWith(file, '.css')) { return callback(); }
var currentFile = path.join(directory, file);
fs.stat(currentFile, function (err, stats) {
if (stats.isDirectory()) { return callback(); }
var stream = fs.createReadStream(currentFile).on('end', function () {
callback();
}).on('data', function (data) { concatenated += data.toString('utf8'); });
});
}, function () {
res.write(concatenated);
res.end();
});
});
But: Why? Why can't I call res.write multiple times instead of first summing up all the chunks, and then write them all at once?
Consider also using multistream, that allows you to combine and emit multiple streams one after another.
The code was perfectly fine, it was the unit test that was wrong ...
Fixed that, and now it works like a charme :-)
May help someone else:
const fs = require("fs");
const pth = require("path");
let readerStream1 = fs.createReadStream(pth.join(__dirname, "a.txt"));
let readerStream2 = fs.createReadStream(pth.join(__dirname, "b.txt"));
let writerStream = fs.createWriteStream(pth.join(__dirname, "c.txt"));
//only readable streams have "pipe" method
readerStream1.pipe(writerStream);
readerStream2.pipe(writerStream);
I also checked Rocco's answer and its working like a charm:
//npm i --save multistream
const multi = require('multistream');
const fs = require('fs');
const pth = require("path");
let streams = [
fs.createReadStream(pth.join(__dirname, "a.txt")),
fs.createReadStream(pth.join(__dirname, "b.txt"))
];
let writerStream = fs.createWriteStream(pth.join(__dirname, "c.txt"));
//new multi(streams).pipe(process.stdout);
new multi(streams).pipe(writerStream);
and to send the results to client:
const multi = require('multistream');
const fs = require('fs');
const pth = require("path");
const exp = require("express");
const app = exp();
app.listen(3000);
app.get("/stream", (q, r) => {
new multi([
fs.createReadStream(pth.join(__dirname, "a.txt")),
fs.createReadStream(pth.join(__dirname, "b.txt"))
]).pipe(r);
});