node.js - reading child process stdout 100 bytes at a time - node.js

I'm spawning a child that produces lots of data (I'm using 'ls -lR /' here as an example). I want to asynchronously read the child's stdout 100 bytes at a time.
So I want to do: get100().then(process100).then(get100).then(process100).then(...
For some reason, this code only loops 3 times and I stop getting Readable events. I can't figure out why?
var Promise = require('bluebird');
var spawn = require("child_process").spawn;
var exec = spawn( "ls", [ "-lR", "/"] );
var get100 = function () {
return new Promise(function(resolve, reject) {
var tryTransfer = function() {
var block = exec.stdout.read(100);
if (block) {
console.log("Got 100 Bytes");
exec.stdout.removeAllListeners('readable');
resolve();
} else console.log("Read Failed - not enough bytes?");
};
exec.stdout.on('readable', tryTransfer);
});
};
var forEver = Promise.method(function(action) {
return action().then(forEver.bind(null, action));
});
forEver(
function() { return get100(); }
)

Using event-stream, you can emit 100 bytes data from the spawned process as long as there is data to read (streams are async):
var es = require('event-stream');
var spawn = require("child_process").spawn;
var exec = spawn("ls", ["-lR", "/"]);
var stream = es.readable(function (count, next) {
// read 100 bytes
while (block = exec.stdout.read(100)) {
// if you have tons of data, it's not a good idea to log here
// console.log("Got 100 Bytes");
// emit the block
this.emit('data', block.toString()); // block is a buffer (bytes array), you may need toString() or not
}
// no more data left to read
this.emit('end');
next();
}).on('data', function(data) {
// data is the 100 bytes block, do what you want here
// the stream is pausable and resumable at will
stream.pause();
doStuff(data, function() {
stream.resume();
});
});

Related

How to disconnect a socket after streaming data?

I am making use of "socket.io-client" and "socket.io stream" to make a request and then stream some data. I have the following code that handles this logic
Client Server Logic
router.get('/writeData', function(req, res) {
var io = req.app.get('socketio');
var nameNodeSocket = io.connect(NAMENODE_ADDRESS, { reconnect: true });
var nameNodeData = {};
async.waterfall([
checkForDataNodes,
readFileFromS3
], function(err, result) {
if (err !== null) {
res.json(err);
}else{
res.json("Finished Writing to DN's");
}
});
function checkForDataNodes(cb) {
nameNodeSocket.on('nameNodeData', function(data) {
nameNodeData = data;
console.log(nameNodeData);
cb(null, nameNodeData);
});
if (nameNodeData.numDataNodes === 0) {
cb("No datanodes found");
}
}
function readFileFromS3(nameNodeData, cb) {
for (var i in nameNodeData['blockToDataNodes']) {
var IP = nameNodeData['blockToDataNodes'][i]['ipValue'];
var dataNodeSocket = io.connect('http://'+ IP +":5000");
var ss = require("socket.io-stream");
var stream = ss.createStream();
var byteStartRange = nameNodeData['blockToDataNodes'][i]['byteStart'];
var byteStopRange = nameNodeData['blockToDataNodes'][i]['byteStop'];
paramsWithRange['Range'] = "bytes=" + byteStartRange.toString() + "-" + byteStopRange.toString();
//var file = require('fs').createWriteStream('testFile' + i + '.txt');
var getFileName = nameNodeData['blockToDataNodes'][i]['key'].split('/');
var fileData = {
'mainFile': paramsWithRange['Key'].split('/')[1],
'blockName': getFileName[1]
};
ss(dataNodeSocket).emit('sendData', stream, fileData);
s3.getObject(paramsWithRange).createReadStream().pipe(stream);
//dataNodeSocket.disconnect();
}
cb(null);
}
});
Server Logic (that gets the data)
var dataNodeIO = require('socket.io')(server);
var ss = require("socket.io-stream");
dataNodeIO.on('connection', function(socket) {
console.log("Succesfully connected!");
ss(socket).on('sendData', function(stream, data) {
var IP = data['ipValue'];
var blockName = data['blockName'];
var mainFile = data['mainFile'];
dataNode.makeDir(mainFile);
dataNode.addToReport(mainFile, blockName);
stream.pipe(fs.createWriteStream(mainFile + '/' + blockName));
});
});
How can I properly disconnect the connections in function readFileFromS3. I have noticed using dataNodeSocket.disconnect() at the end does not work as I cannot verify the data was received on the 2nd server. But if I comment it out, I can see the data being streamed to the second server.
My objective is to close the connections in Client Server side
It appears that the main problem with closing the socket is that you weren't waiting for the stream to be done writing before trying to close the socket. So, because the writing is all asynchronous and finishes sometime later, you were trying to close the socket before the data had been written.
Also because you were putting asynchronous operations inside a for loop, you were also running all your operations in parallel which may not be exactly what you want as it makes error handling more difficult and server load more difficult.
Here's the code I would suggest that does the following:
Create a function streamFileFromS3() that streams a single file and returns a promise that will notify when it's done.
Use await in a for loop with that streamFileFromS3() to serialize the operations. You don't have to serialize them, but then you would have to change your error handling to figure out what to do if one errors while the others are already running and you'd have to be more careful about concurrency issues.
Use try/catch to catch any errors from streamFileFromS3().
Add error handling on the stream.
Change all occurrences of data['propertyName'] to data.propertyName. The only time you need to use brackets is if the property name contains a character that is not allowed in a Javascript identifier or if the property name is in a variable. Otherwise, the dot notation is preferred.
Add socket.io connection error handling logic for both socket.io connections.
Set returned status to 500 when there's an error processing the request
So, here's the code for that:
const ss = require("socket.io-stream");
router.get('/writeData', function(req, res) {
const io = req.app.get('socketio');
function streamFileFromS3(ip, data) {
return new Promise((resolve, reject) => {
const dataNodeSocket = io.connect(`http://${ip}:5000`);
dataNodeSocket.on('connect_error', reject);
dataNodeSocket.on('connect_timeout', () {
reject(new Error(`timeout connecting to http://${ip}:5000`));
});
dataNodeSocket.on('connection', () => {
// dataNodeSocket connected now
const stream = ss.createStream().on('error', reject);
paramsWithRange.Range = `bytes=${data.byteStart}-${data.byteStop}`;
const filename = data.key.split('/')[1];
const fileData = {
'mainFile': paramsWithRange.Key.split('/')[1],
'blockName': filename
};
ss(dataNodeSocket).emit('sendData', stream, fileData);
// get S3 data and pipe it to the socket.io stream
s3.getObject(paramsWithRange).createReadStream().on('error', reject).pipe(stream);
stream.on('close', () => {
dataNodeSocket.disconnect();
resolve();
});
});
});
}
function connectError(msg) {
res.status(500).send(`Error connecting to ${NAMENODE_ADDRESS}`);
}
const nameNodeSocket = io.connect(NAMENODE_ADDRESS, { reconnect: true });
nameNodeSocket.on('connect_error', connectError).on('connect_timeout', connectError);
nameNodeSocket.on('nameNodeData', async (nameNodeData) => {
try {
for (let item of nameNodeData.blockToDataNodes) {
await streamFileFromS3(item.ipValue, item);
}
res.json("Finished Writing to DN's");
} catch(e) {
res.status(500).json(e);
}
});
});
Other notes:
I don't know what paramsWithRange is as it is not declared here and when you were doing everything in parallel, it was getting shared among all the connections which is asking for a concurrency issue. In my serialized implementation, it's probably safe to share it, but the way it is now bothers me as it's a concurrency issue waiting to happen.

Intro to node.js - Print data from 3 urls (http.get)

I'm doing an introduction to node.js using learnyounode. I wonder if you could help realize this thing: asynchronism.
So, here is the problem:
This problem is the same as the previous problem (HTTP COLLECT) in
that you need to use http.get(). However, this time you will be
provided with three URLs as the first three command-line
arguments.
You must collect the complete content provided to you by each of the URLs and print it to the console (stdout). You don't need to
print out the length, just the data as a String; one line per URL.
The catch is that you must print them out in the same order as the
URLs are provided to you as command-line arguments.
and here is my bad solution who, in fact, don't work.
var http = require('http');
var message = [];
for (var i = 2; i < 5; i++)
http.get(process.argv[i], function (res) {
res.setEncoding('utf8');
res.on('data', function(line) {
message[i] += line.toString();
});
res.on('end', function(line) {
for (var i = 0; i < 3; i++)
console.log(message[i]);
});
});
UPDATE
So I tried a similar approach to your solution.
Here goes:
var http = require('http');
var count = 0;
var message = ["","",""];
for (var i = 2; i < 5; i++)
{
http.get(process.argv[i], function (res) {
res.setEncoding('utf8');
res.on('data', function( line ) {
message[count] += line.toString();
});
res.on('end', function(line) {
count++;
if(count !== 3)
return;
else
printOutput();
});
});
}
function printOutput(){
for (var i = 0; i < 3; i++)
console.log(message[i]);
}
But the output is lagged: / (not in the right order)
CURRENT: "He has not got the skite and watch out for the bogged Trent from punchy blue with the dry to the Vinnie's It'll be flanno
where flat out like the slabs..."
EXPECTED: "He's got a massive coldie my watch out for the smoko We're jackaroo going on she'll be right servo dramas.."
CURRENT ". He has not got a banana bender piece of piss the dry as a budgie smugglers Come a flamin clacker you little bog standard
ripper The cross them to his blood's worth bottling flamin the cunning
of a rip snorter.."
EXPECTED: "He has not got the skite and watch out for the bogged Trent from punchy blue with the dry to the Vinnie's It'll be flanno
where flat out like the slabs..."
CURRENT: "He's got a massive coldie my watch out for the smoko We're jackaroo going on she'll be right servo dramas.."
EXPECTED: "He has not got a banana bender piece of piss the dry as a budgie smugglers Come a flamin clacker you little bog standard
ripper The cross them to his blood's worth bottling flamin the cunning
of a rip snorter..."
CURRENT: ""
EXPECTED ""
a more cleaner way to do it asynchronously is by having all Promises in an array and calling Promise.all() on that array
var http = require('http');
promises = [
promiseLoad(process.argv[2]),
promiseLoad(process.argv[3]),
promiseLoad(process.argv[4])
];
Promise.all(promises).then(function(res){
console.log(res);
});
function promiseLoad(url) {
var body = '';
return new Promise(function(resolve, reject) {
http.get(url, function(res) {
res.on('data', function(d) {
body += d;
});
res.on('end', function() {
resolve(body);
});
});
});
}
You have to wait for the prior request to reach the 'end' event before processing the next request, hence the asynchronous challenge. This can be accomplished via callbacks, or promises.
Promise implementation:
var http = require('http');
promiseLoad(process.argv[2])
.then(promiseLoad(process.argv[3])
.then(promiseLoad(process.argv[4]);
function promiseLoad(url) {
var body = '';
return new Promise(function(resolve, reject) {
http.get(url, function(res) {
res.on('data', function(d) {
body += d;
});
res.on('end', function() {
console.log(body);
resolve();
});
});
});
}
I will leave the callback implementation to you as an exercise. As a starting point, the next request will have to be fired only once the end event if fired.
UPDATE:
To load these truly asynchronously and at the same time, your code will work with minor modifications. You need to simply wait for end to be called 3 times and only log at that point indicating that all loading is complete:
var http = require('http');
var count = 0;
var message = [];
for (var i = 2; i < 5; i++)
http.get(process.argv[i], function (res) {
res.setEncoding('utf8');
var correctIndex = i;
res.on('data', function(line) {
message[correctIndex] += line.toString();
});
res.on('end', function(line) {
count++;
if(count !== 3) return;
for (var i = 0; i < 3; i++)
console.log(message[i]);
});
});
First, I want to say that the answer already here that says to use Promise.all() is the way that I would suggest. However, I want to point out a particular scenario where it might not cover your needs.
Consider that you have 3 requests:
"Service" | "Time to complete"
----------------------------
A | 3
B | 1
C | 5
D | 4
And you're going to use a load handler similar to what has already been mentioned:
// Url loader
function load(url) {
var message = "";
return new Promise(function (resolve, reject) {
http.get(url, function (res) {
// Add message piece
res.on("data", function (data) {
message += data;
});
// Resolve whole message
res.on("end", function (data) {
resolve(message);
});
});
});
}
Printing After Everything Finishes
If you use the Promise.all(), you are going to have to wait for all of the requests to finish before you see any output. So if we output a timestamp with our data, we will get the following:
Code
/*
Wait for all promises to complete and then
print out all of their collected data
*/
Promise.all(promises).then(function (res) {
res.forEach(function (data) {
timestamp(data);
});
});
Output
[14:9:4.106] Start
[14:9:10.335] aaaa
[14:9:10.336] bbbb
[14:9:10.336] cccc
[14:9:10.336] dddd
Where it takes 6 seconds after we start to see any output from the result of our services.
Printing As Soon As Possible
Comparatively, if we wanted to print output while we are getting results from our service calls, we need to print the result as the service finishes, but not until all "prior" services are done. With that in mind, we could do could do something like this:
Code
promises[0].then(function (dataA) {
timestamp(dataA);
promises[1].then(function (dataB) {
timestamp(dataB);
promises[2].then(function (dataC) {
timestamp(dataC);
promises[3].then(function (dataD) {
timestamp(dataD);
});
});
});
});
Output
[14:16:19.245] Start
[14:16:22.974] aaaa
[14:16:22.975] bbbb
[14:16:25.474] cccc
[14:16:25.474] dddd
Here, we see the start, then only 3 seconds later we print out both Service A and Service B. We see A because its service just resolved and B because its service was already done, but we didn't want to print until A was finished. Similarly, C and D show up about 2 seconds after B.
Now, that code is somewhat verbose, so we could write a recursive function to handle all that nesting for us.
// Function to print an array of promises in order
function cascadeInOrder(promiseArr) {
var curr = 0;
// This closure is going to recursively print out our promises
function nexter(data) {
if (data) {
timestamp(data);
}
// Have the next promise print its data whenever it is done
promiseArr[curr += 1].then(nexter);
}
// Wait for our first promise to finish and have it kick off the next
promiseArr[curr].then(nexter);
}
I haven't really run into many uses cases where we need to make "synchronous" usage of asynchronous data, but I'm sure that there is a potential need for it somewhere.
Test Code Used:
Change the method variable if you want to use the other methods.
/*global Promise*/
"use strict";
// Provide response times for fake services
function getUrlTiming(url) {
var timing = 0;
switch (url) {
case "a":
timing = 3000;
break;
case "b":
timing = 1000;
break;
case "c":
timing = 5000;
break;
case "d":
timing = 4000;
break;
default:
timing = 0;
break;
}
return timing;
}
// Service to wrap events
function Service() {
this.listeners = [];
}
Service.prototype = {
on: function (event, cb) {
if (!this.listeners[event]) {
this.listeners[event] = [];
}
this.listeners[event].push(cb);
},
emit: function (event, details) {
if (this.listeners[event]) {
this.listeners[event].forEach(function (cb) {
cb(details);
});
}
}
};
// Make a fake http module
var http = {
get: function (url, cb) {
// Make an event emiiter
var req = new Service();
// If we got a callback
if (cb && (typeof cb === "function")) {
// Call it to set up listeners
cb(req);
}
// Make a promise to resolve after the service finishes
return new Promise(function (resolve, reject) {
var network,
message = "",
part = 0,
maxParts = 4;
/*
Create a network simulation to send a massage in parts
until the request finishes
*/
network = setInterval(function () {
// If the message isn't complete
if (part < 4) {
// Add to the whole message tracker
message += url;
// Emit that we got data
req.emit("data", url);
// Increment how far in the message we are
part += 1;
} else {
// Stop transmitting
clearInterval(network);
// Emit the end of the request
req.emit("end", message);
// Resolve the request
resolve(url);
}
}, (getUrlTiming(url) / maxParts));
});
}
};
// Url loader
function load(url) {
var message = "";
return new Promise(function (resolve, reject) {
http.get(url, function (res) {
// Add message piece
res.on("data", function (data) {
message += data;
});
// Resolve whole message
res.on("end", function (data) {
resolve(message);
});
});
});
}
// Get a readable time
function getTime() {
var now = new Date();
return (now.getHours() + ":" + now.getMinutes() + ":" + now.getSeconds() + "." + now.getMilliseconds());
}
// Print a timestamped message
function timestamp(message) {
console.log("[%s] %s", getTime(), message);
}
// Function to print an array of promises in order
function cascadeInOrder(promiseArr) {
var curr = 0;
// This closure is going to recursively print out our promises
function nexter(data) {
if (data) {
timestamp(data);
}
// Have the next promise print its data whenever it is done
promiseArr[curr += 1].then(nexter);
}
// Wait for our first promise to finish and have it kick off the next
promiseArr[curr].then(nexter);
}
/*
No matter what, we want all of our requests to
start right now, and effectively at the same time.
We don't want to start one after another finishes
*/
var promises = [
load("a"),
load("b"),
load("c"),
load("d")
];
/*
Which method we want to use to test our stuff
Change between [1, 2, 3] for each method listed
below. 1 for Promise.all(), 2 for ASAP printing,
and 3 for the verbose version of 2.
*/
var method = 3;
// Note when we started
timestamp("Start");
if (method === 1) {
/*
Wait for all promises to complete and then
print out all of their collected data
*/
Promise.all(promises).then(function (res) {
res.forEach(function (data) {
timestamp(data);
});
});
} else if (method === 2) {
/*
Print each ones data as soon as it is
available; but make sure to do it in order
*/
cascadeInOrder(promises);
} else if (method === 3) {
/*
This is the same as the "cascadeInOrder" function,
except written without recursion and more verbosely.
*/
promises[0].then(function (dataA) {
timestamp(dataA);
promises[1].then(function (dataB) {
timestamp(dataB);
promises[2].then(function (dataC) {
timestamp(dataC);
promises[3].then(function (dataD) {
timestamp(dataD);
});
});
});
});
}
#Luís Melo
Here's my solution after going through this thread:
var http = require('http');
var bl = require('bl')
promises = [
promiseLoad(process.argv[2]),
promiseLoad(process.argv[3]),
promiseLoad(process.argv[4])
];
Promise.all(promises).then(function(res) {
for(i=0; i<promises.length; i++) {
console.log(res[i]);
}
});
function promiseLoad(url) {
var body = '';
return new Promise(function(resolve, reject) {
http.get(url, function (response) {
response.setEncoding('utf8');
response.pipe(bl(function (err, data) {
resolve(data.toString())
}))
})
});
}
Here's the official solution in case you want to compare notes:
var http = require('http')
var bl = require('bl')
var results = []
var count = 0
function printResults () {
for (var i = 0; i < 3; i++) {
console.log(results[i])
}
}
function httpGet (index) {
http.get(process.argv[2 + index], function (response) {
response.pipe(bl(function (err, data) {
if (err) {
return console.error(err)
}
results[index] = data.toString()
count++
if (count === 3) {
printResults()
}
}))
})
}
for (var i = 0; i < 3; i++) {
httpGet(i)
}

ReadStream: Internal buffer does not fill up anymore

I have a fs.ReadStream object that points to a pretty big file. Now I would like to read 8000 bytes from the ReadStream, but the internal buffer is only 6000 bytes. So my approach would be to read those 6000 bytes and wait for the internal buffer to fill up again by using a while-loop that checks whether the internal buffer length is not 0 anymore.
Something like this:
BinaryObject.prototype.read = function(length) {
var value;
// Check whether we have enough data in the internal buffer
if (this.stream._readableState.length < length) {
// Not enough data - read the full internal buffer to
// force the ReadStream to fill it again.
value = this.read(this.stream._readableState.length);
while (this.stream._readableState.length === 0) {
// Wait...?
}
// We should have some more data in the internal buffer
// here... Read the rest and add it to our `value` buffer
// ... something like this:
//
// value.push(this.stream.read(length - value.length))
// return value
} else {
value = this.stream.read(length);
this.stream.position += length;
return value;
}
};
The problem is, that the buffer is not filled anymore - the script will just idle in the while loop.
What is the best approach to do this?
It's quite simple. You don't need to do any buffering on your side:
var fs = require('fs'),
rs = fs.createReadStream('/path/to/file');
var CHUNK_SIZE = 8192;
rs.on('readable', function () {
var chunk;
while (null !== (chunk = rs.read(CHUNK_SIZE))) {
console.log('got %d bytes of data', chunk.length);
}
});
rs.on('end', function () {
console.log('end');
});
If CHUNK_SIZE is larger than the internal buffer, node will return null and buffer some more before emitting readable again. You can even configure the initial size of the buffer by passing:
var rs = fs.createReadStream('/path/to/file', {highWatermark: CHUNK_SIZE});
Below is the sample for reading file in streams.
var fs = require('fs'),
readStream = fs.createReadStream(srcPath);
readStream.on('data', function (chunk) {
console.log('got %d bytes of data', chunk.length);
});
readStream.on('readable', function () {
var chunk;
while (null !== (chunk = readStream.read())) {
console.log('got %d bytes of data', chunk.length);
}
});
readStream.on('end', function () {
console.log('got all bytes of data');
});

How to concurrently write a stream and read the data in node.js?

I have a node.js stream that I am temporarily writing to an array like this:
var tempCrossSection = [];
stream.on('data', function(data) {
tempCrossSection.push(data);
});
Then I am periodically taking the data in that array (and clearing it) and doing some processing on it like this:
var crossSection = [];
setInterval(function() {
crossSection = tempCrossSection;
tempCrossSection = [];
someOtherFunction(crossSection, function(data) {
console.log(data);
}
}, 30000);
The problem is that I get some odd behavior with the order that the stream is being written to the array and the number of setInterval callbacks that are fired as the stream rate increases and/or the someOtherFunction callback takes too long.
How should I implement this so that the stream is correctly writing data to the array (in order) and the data processing is being conducted once per setInterval callback.
There are a few issues with your code. First of all you are sharing to much state.
For example crossSection should be solely defined in the anonymous Interval function.
Why is "crossSection" defined as a closure? If someOtherFunction runs for a long period you might indeed into somekind of race conditions.
var source = [];
stream.on('data', function(data) {
source.push(data);
});
setInterval(function() {
var target = source;
source = [];
someOtherFunction(target, function(data) {
console.log(data);
}
}, 30000);
If you have access to someOtherFunction then I would rewrite the whole thing like this
var source = [];
stream.on('data', function(data) {
source.push(data);
});
setInterval(function() {
var processing = true;
while (processing) {
var elem = source.shift();
someOtherFunction(elem, function(data) {
console.log(data);
});
processing = checkForBreakConditionAndReturnFalseIfBreak();
}
}, 30000);
Still you might run into some issues if the number of elements is to big and someOtherFunctions takes to long. So I'd probably do something like this
var source = [];
var timerId = 0;
stream.on('data', function(data) {
source.push(data);
});
function processSource() {
clearTimeout(timerId);
var processing = true;
while (processing) {
var elem = source.shift();
someOtherFunction(elem, function(data) {
console.log(data);
});
processing = checkForBreakConditionAndReturnFalseIfBreak();
}
setTimeout(processSource, calcTimeoutForNextProcessingDependentOnPastData());
};
setTimeout(processSource, 30000); //initial Timeout

Node.js: How to read a stream into a buffer?

I wrote a pretty simple function that downloads an image from a given URL, resize it and upload to S3 (using 'gm' and 'knox'), I have no idea if I'm doing the reading of a stream to a buffer correctly. (everything is working, but is it the correct way?)
also, I want to understand something about the event loop, how do I know that one invocation of the function won't leak anything or change the 'buf' variable to another already running invocation (or this scenario is impossible because the callbacks are anonymous functions?)
var http = require('http');
var https = require('https');
var s3 = require('./s3');
var gm = require('gm');
module.exports.processImageUrl = function(imageUrl, filename, callback) {
var client = http;
if (imageUrl.substr(0, 5) == 'https') { client = https; }
client.get(imageUrl, function(res) {
if (res.statusCode != 200) {
return callback(new Error('HTTP Response code ' + res.statusCode));
}
gm(res)
.geometry(1024, 768, '>')
.stream('jpg', function(err, stdout, stderr) {
if (!err) {
var buf = new Buffer(0);
stdout.on('data', function(d) {
buf = Buffer.concat([buf, d]);
});
stdout.on('end', function() {
var headers = {
'Content-Length': buf.length
, 'Content-Type': 'Image/jpeg'
, 'x-amz-acl': 'public-read'
};
s3.putBuffer(buf, '/img/d/' + filename + '.jpg', headers, function(err, res) {
if(err) {
return callback(err);
} else {
return callback(null, res.client._httpMessage.url);
}
});
});
} else {
callback(err);
}
});
}).on('error', function(err) {
callback(err);
});
};
Overall I don't see anything that would break in your code.
Two suggestions:
The way you are combining Buffer objects is a suboptimal because it has to copy all the pre-existing data on every 'data' event. It would be better to put the chunks in an array and concat them all at the end.
var bufs = [];
stdout.on('data', function(d){ bufs.push(d); });
stdout.on('end', function(){
var buf = Buffer.concat(bufs);
})
For performance, I would look into if the S3 library you are using supports streams. Ideally you wouldn't need to create one large buffer at all, and instead just pass the stdout stream directly to the S3 library.
As for the second part of your question, that isn't possible. When a function is called, it is allocated its own private context, and everything defined inside of that will only be accessible from other items defined inside that function.
Update
Dumping the file to the filesystem would probably mean less memory usage per request, but file IO can be pretty slow so it might not be worth it. I'd say that you shouldn't optimize too much until you can profile and stress-test this function. If the garbage collector is doing its job you may be overoptimizing.
With all that said, there are better ways anyway, so don't use files. Since all you want is the length, you can calculate that without needing to append all of the buffers together, so then you don't need to allocate a new Buffer at all.
var pause_stream = require('pause-stream');
// Your other code.
var bufs = [];
stdout.on('data', function(d){ bufs.push(d); });
stdout.on('end', function(){
var contentLength = bufs.reduce(function(sum, buf){
return sum + buf.length;
}, 0);
// Create a stream that will emit your chunks when resumed.
var stream = pause_stream();
stream.pause();
while (bufs.length) stream.write(bufs.shift());
stream.end();
var headers = {
'Content-Length': contentLength,
// ...
};
s3.putStream(stream, ....);
Javascript snippet
function stream2buffer(stream) {
return new Promise((resolve, reject) => {
const _buf = [];
stream.on("data", (chunk) => _buf.push(chunk));
stream.on("end", () => resolve(Buffer.concat(_buf)));
stream.on("error", (err) => reject(err));
});
}
Typescript snippet
async function stream2buffer(stream: Stream): Promise<Buffer> {
return new Promise < Buffer > ((resolve, reject) => {
const _buf = Array < any > ();
stream.on("data", chunk => _buf.push(chunk));
stream.on("end", () => resolve(Buffer.concat(_buf)));
stream.on("error", err => reject(`error converting stream - ${err}`));
});
}
You can easily do this using node-fetch if you are pulling from http(s) URIs.
From the readme:
fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png')
.then(res => res.buffer())
.then(buffer => console.log)
Note: this solely answers "How to read a stream into a buffer?" and ignores the context of the original question.
ES2018 Answer
Since Node 11.14.0, readable streams support async iterators.
const buffers = [];
// node.js readable streams implement the async iterator protocol
for await (const data of readableStream) {
buffers.push(data);
}
const finalBuffer = Buffer.concat(buffers);
Bonus: In the future, this could get better with the stage 2 Array.fromAsync proposal.
// 🛑 DOES NOT WORK (yet!)
const finalBuffer = Buffer.concat(await Array.fromAsync(readableStream));
You can convert your readable stream to a buffer and integrate it in your code in an asynchronous way like this.
async streamToBuffer (stream) {
return new Promise((resolve, reject) => {
const data = [];
stream.on('data', (chunk) => {
data.push(chunk);
});
stream.on('end', () => {
resolve(Buffer.concat(data))
})
stream.on('error', (err) => {
reject(err)
})
})
}
the usage would be as simple as:
// usage
const myStream // your stream
const buffer = await streamToBuffer(myStream) // this is a buffer
I suggest loganfsmyths method, using an array to hold the data.
var bufs = [];
stdout.on('data', function(d){ bufs.push(d); });
stdout.on('end', function(){
var buf = Buffer.concat(bufs);
}
IN my current working example, i am working with GRIDfs and npm's Jimp.
var bucket = new GridFSBucket(getDBReference(), { bucketName: 'images' } );
var dwnldStream = bucket.openDownloadStream(info[0]._id);// original size
dwnldStream.on('data', function(chunk) {
data.push(chunk);
});
dwnldStream.on('end', function() {
var buff =Buffer.concat(data);
console.log("buffer: ", buff);
jimp.read(buff)
.then(image => {
console.log("read the image!");
IMAGE_SIZES.forEach( (size)=>{
resize(image,size);
});
});
I did some other research
with a string method but that did not work, per haps because i was reading from an image file, but the array method did work.
const DISCLAIMER = "DONT DO THIS";
var data = "";
stdout.on('data', function(d){
bufs+=d;
});
stdout.on('end', function(){
var buf = Buffer.from(bufs);
//// do work with the buffer here
});
When i did the string method i got this error from npm jimp
buffer: <Buffer 00 00 00 00 00>
{ Error: Could not find MIME for Buffer <null>
basically i think the type coersion from binary to string didnt work so well.
I suggest to have array of buffers and concat to resulting buffer only once at the end. Its easy to do manually, or one could use node-buffers
I just want to post my solution. Previous answers was pretty helpful for my research. I use length-stream to get the size of the stream, but the problem here is that the callback is fired near the end of the stream, so i also use stream-cache to cache the stream and pipe it to res object once i know the content-length. In case on an error,
var StreamCache = require('stream-cache');
var lengthStream = require('length-stream');
var _streamFile = function(res , stream , cb){
var cache = new StreamCache();
var lstream = lengthStream(function(length) {
res.header("Content-Length", length);
cache.pipe(res);
});
stream.on('error', function(err){
return cb(err);
});
stream.on('end', function(){
return cb(null , true);
});
return stream.pipe(lstream).pipe(cache);
}
in ts, [].push(bufferPart) is not compatible;
so:
getBufferFromStream(stream: Part | null): Promise<Buffer> {
if (!stream) {
throw 'FILE_STREAM_EMPTY';
}
return new Promise(
(r, j) => {
let buffer = Buffer.from([]);
stream.on('data', buf => {
buffer = Buffer.concat([buffer, buf]);
});
stream.on('end', () => r(buffer));
stream.on('error', j);
}
);
}
You can do this by:
async function toBuffer(stream: ReadableStream<Uint8Array>) {
const list = []
const reader = stream.getReader()
while (true) {
const { value, done } = await reader.read()
if (value)
list.push(value)
if (done)
break
}
return Buffer.concat(list)
}
or using buffer consumer
const buf = buffer(stream)
You can check the "content-length" header at res.headers. It will give you the length of the content you will receive (how many bytes of data it will send)

Resources