NodeJS writeStream empty file - node.js

I am trying to use nodeJS to save a processed image stored in a base64 string.
var buff = new Buffer(base64data,'base64');
console.log(base64data);
var stream = fs.createWriteStream('/path/to/thefile.png');
stream.write(buff)
stream.end()
However, the resulting file is empty.
When I take the output of console.log(base64data); and decode it locally, it produces a valid png binary, so why is the file empty?
The file is a 3600x4800 px png file (i.e. it's huge), could this be a factor?
Also, I tried writeFile as well, no luck.
And yes, fs is require('fs')
Thanks

your stream.end() too soon as nothing is written. it is async function remember.
var buff = new Buffer(base64data,'base64');
console.log(base64data);
var stream = fs.createWriteStream('/path/to/thefile.png');
stream.write(buff);
stream.on("end", function() {
stream.end();
});

Better:
var buff = new Buffer(base64data,'base64');
console.log(base64data);
var stream = fs.createWriteStream('/path/to/thefile.png');
stream.write(buff);
stream.end();
stream.on('finish', () => {
//'All writes are now complete.'
});
stream.on('error', (error) => {...});

Related

Are there any node library to create memory streams similar to file streams?

I use officegen library to create a docx file, then I need to zip it and return as the response. I can easily save the docx file and then create a zip file and return, but are there any alternative way to write docx file to something like memory stream instead of file streams and create a zip file?
Following code I tried with memory-streams library, this is not working. I think WritableStream is not similar to file stream. So if you replace WritableStream with output of createWriteStream, its working.
var streams = require('memory-streams');
var AdmZip = require('adm-zip');
var writer = new streams.WritableStream();
writer.on('close', function() {
var zip = new AdmZip();
zip.addFile("test2.docx", writer.toBuffer());
var willSendthis = zip.toBuffer();
return res.end(Buffer.from(willSendthis, 'binary'))
})
writer.on('error', function(){
console.log("error")
})
doc.on('error', function(){
console.log("error")
})
doc.generate(writer);
This is not something very specific to officegen, it just needs a writable stream similar to a file stream with close 'event'
I think the following library is better than the one you are using.
https://www.npmjs.com/package/memorystream
Example:
var MemoryStream = require('memorystream');
var memStream = new MemoryStream(['Hello',' ']);
var data = '';
memStream.on('data', function(chunk) {
data += chunk.toString();
});
memStream.write('World');
memStream.on('end', function() {
// outputs 'Hello World!'
console.log(data);
});
memStream.end('!');

How to combine video upload chunks Node.js

I'm trying to upload a large (8.3GB) video to my Node.js (Express) server by chunking using busboy. How to I receive each chunk (busboy is doing this part) and piece it together as one whole video?
I have been looking into readable and writable streams but I'm not ever getting the whole video. I keep overwriting parts of it, resulting in about 1 GB.
Here's my code:
req.busboy.on('file', (fieldname, file, filename) => {
logger.info(`Upload of '${filename}' started`);
const video = fs.createReadStream(path.join(`${process.cwd()}/uploads`, filename));
const fstream = fs.createWriteStream(path.join(`${process.cwd()}/uploads`, filename));
if (video) {
video.pipe(fstream);
}
file.pipe(fstream);
fstream.on('close', () => {
logger.info(`Upload of '${filename}' finished`);
res.status(200).send(`Upload of '${filename}' finished`);
});
});
After 12+ hours, I got it figured out using pieces from this article that was given to me. I came up with this code:
//busboy is middleware on my index.js
const fs = require('fs-extra');
const streamToBuffer = require('fast-stream-to-buffer');
//API function called first
uploadVideoChunks(req, res) {
req.pipe(req.busboy);
req.busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
const fileNameBase = filename.replace(/\.[^/.]+$/, '');
//save all the chunks to a temp folder with .tmp extensions
streamToBuffer(file, function (error, buffer) {
const chunkDir = `${process.cwd()}/uploads/${fileNameBase}`;
fs.outputFileSync(path.join(chunkDir, `${Date.now()}-${fileNameBase}.tmp`), buffer);
});
});
req.busboy.on('finish', () => {
res.status(200).send(`Finshed uploading chunk`);
});
}
//API function called once all chunks are uploaded
saveToFile(req, res) {
const { filename, profileId, movieId } = req.body;
const uploadDir = `${process.cwd()}/uploads`;
const fileNameBase = filename.replace(/\.[^/.]+$/, '');
const chunkDir = `${uploadDir}/${fileNameBase}`;
let outputFile = fs.createWriteStream(path.join(uploadDir, filename));
fs.readdir(chunkDir, function(error, filenames) {
if (error) {
throw new Error('Cannot get upload chunks!');
}
//loop through the temp dir and write to the stream to create a new file
filenames.forEach(function(tempName) {
const data = fs.readFileSync(`${chunkDir}/${tempName}`);
outputFile.write(data);
//delete the chunk we just handled
fs.removeSync(`${chunkDir}/${tempName}`);
});
outputFile.end();
});
outputFile.on('finish', async function () {
//delete the temp folder once the file is written
fs.removeSync(chunkDir);
}
});
}
Use streams
multer allow you to easily handle file uploads as part of an express route. This works great for small files that don’t leave a significant memory footprint.
The problem with loading a large file into memory is that you can actually run out of memory and cause your application to crash.
use multipart/form-data request. This can be handled by assigning the readStream to that field instead in your request options
streams are extremely valuable for optimizing performance.
Try with this code sample, I think it will work for you.
busboy.on("file", function(fieldName, file, filename, encoding, mimetype){
const writeStream = fs.createWriteStream(writePath);
file.pipe(writeStream);
file.on("data", data => {
totalSize += data.length;
cb(totalSize);
});
file.on("end", () => {
console.log("File "+ fieldName +" finished");
});
});
You can refer this link also for resolve this problem
https://github.com/mscdex/busboy/issues/143
I think multer is good with this, did you try multer?

downloading torrent from peers in nodejs

I use this piece of code to download torrent:
var torrentStream = require('torrent-stream');
var engine = torrentStream('magnet:?xt=urn:btih:44A91362AFFF802F9058993B109C544ACC6B4813');
engine.on('ready', function(e) {
engine.files.forEach(function(file) {
console.log('filename:', file.name);
var stream = file.createReadStream();
// stream is readable stream to containing the file content
});
});
This torrent is correctly downloaded by utorrent, but it doesn't work in nodejs (nothing happens). Any ideas why? May be p2p network was not bootstrapped? How can I do this?
Thanx
Of corse there happens nothing, because you dont do anything with the stream in the example.
If you want to save it to a file you can create a write stream:
var writeStream = fs.createWriteStream('file2.txt')
stream.pipe(writeStream)
or you can use the stream with events:
var data = ''
stream.on('data', chunk => {
data += chunk
})
stream.on('end', () => {
console.log(data)
})

Pipe image through graphics magic only half complete

I am trying to set up a transform stream to pipe an image through with GM https://github.com/aheckmann/gm. So I can do something like:
readStream.pipe(resize()).pipe(writeStream);
I have used through2 along with gm to try and achieve this. It works but only parses half the image, leaving a large portion just grey.
'use strict';
var fs = require('fs')
, gm = require('gm').subClass({imageMagick: true})
, through2 = require('through2');
let readStream = fs.createReadStream('landscape.jpg');
let writeStream = fs.createWriteStream('landscape-out.jpg');
let resize = function(width, height) {
return through2(function(chunk, enc, callback){
gm(chunk)
.resize(800)
.gravity('Center')
.crop(800, 500, 0, 0)
.stream((err, stdout, stderr) => {
stdout.on('data', chunk => {
this.push(chunk);
});
stdout.on('end', () => {
callback();
});
});
});
}
readStream.pipe(resize()).pipe(writeStream);
In
through2(function(chunk, enc, callback){
chunk is only a small part of the image.
So the behavior you got seems normal, you are resizing the chunks of the image, not the image itself.
This said, in the doc,
// GOTCHA:
// when working with input streams and any 'identify'
// operation (size, format, etc), you must pass "{bufferStream: true}" if
// you also need to convert (write() or stream()) the image afterwards
// NOTE: this buffers the readStream in memory!
var readStream = fs.createReadStream('/path/to/my/img.jpg');
gm(readStream)
.size({bufferStream: true}, function(err, size) {
this.resize(size.width / 2, size.height / 2)
this.write('/path/to/resized.jpg', function (err) {
if (!err) console.log('done');
});
});
but it s going to buffer the picture in memory, so it s not optimum.
As you are using imagemagick, you shall just let it manage all that part of the processing. And later fs.readStream, the output.
From the doc
var writeStream = fs.createWriteStream('/path/to/my/reformatted.png');
gm('/path/to/my/img.jpg')
.stream('png')
.pipe(writeStream);

How to chain write stream, immediately with a read stream in Node.js 0.10?

The following line will download an image file from a specified url variable:
var filename = path.join(__dirname, url.replace(/^.*[\\\/]/, ''));
request(url).pipe(fs.createWriteStream(filename));
And these lines will take that image and save to MongoDB GridFS:
var gfs = Grid(mongoose.connection.db, mongoose.mongo);
var writestream = gfs.createWriteStream({ filename: filename });
fs.createReadStream(filename).pipe(writestream);
Chaining pipe like this throws Error: 500 Cannot Pipe. Not Pipeable.
request(url).pipe(fs.createWriteStream(filename)).pipe(writestream);
This happens because the image file is not ready to be read yet, right? What should I do to get around this problem?Error: 500 Cannot Pipe. Not Pipeable.
Using the following: Node.js 0.10.10, mongoose, request and gridfs-stream libraries.
request(url).pipe(fs.createWriteStream(filename)).pipe(writestream);
is the same as this:
var fileStream = fs.createWriteStream(filename);
request(url).pipe(fileStream);
fileStream.pipe(writestream);
So the issue is that you are attempting to .pipe one WriteStream into another WriteStream.
// create 'fs' module variable
var fs = require("fs");
// open the streams
var readerStream = fs.createReadStream('inputfile.txt');
var writerStream = fs.createWriteStream('outputfile.txt');
// pipe the read and write operations
// read input file and write data to output file
readerStream.pipe(writerStream);
I think the confusion in chaining the pipes is caused by the fact that the pipe method implicitly "makes choices" on it's own on what to return. That is:
readableStream.pipe(writableStream) // Returns writable stream
readableStream.pipe(duplexStream) // Returns readable stream
But the general rule says that "You can only pipe a Writable Stream to a Readable Stream." In other words only Readable Streams have the pipe() method.
You cannot chain the ReadStream to the WriteStream because the latter is not duplex, therefore you would do - for a gzipped archive
request.get(url, {
gzip: true,
encoding: null
})
.pipe(fs.createWriteStream(tmpPath))
.on('close', function() {
console.info("downloaded %s", tmpPath);
fs.createReadStream(tmpPath)
.pipe(gunzip)
.pipe(fs.createWriteStream(destPath))
.on('close', function() {
console.info("unarchived %s", destPath);
})
.on('error', (error) => {
console.warn("gzip error:%#", error);
})
})
.on('error', (error) => {
console.warn("download error:%#", error);
})

Resources