node.js: create a connected writable and readable stream pair - node.js

I am trying to create a function that returns a connected writable and readable stream pair. eg:
const { writable, readable } = createStreamPair();
where each end has the right interface (writable instanceof stream.Readable === false and readable instanceof stream.Writable === false) unlike the PassThrough stream.
use case:
createWriteStream(filePath) {
const { writable, readable } = createStreamPair();
writeFile(filePath, readable);
return writable;
}
How to create my createStreamPair() function ?
Edit1
A naive approach that obviously does not work ...
function createStreamPair() {
var readable = new stream.Readable();
var writable = new stream.Writable();
readable.pipe(writable);
return { writable, readable }
}

The Node.js tests uses a function that creates two Duplex streams, writes to one can be read from the other, and vice-versa: https://github.com/nodejs/node/blob/master/test/common/duplexpair.js
It isn't part of the Node.js standard library, but you can write your own.
I'll present a slightly modified, annotated version here:
const Duplex = require('stream').Duplex;
const assert = require('assert');
// Define some unique property names.
// The actual value doesn't matter,
// so long as they're not used by Node.js for anything else.
const kCallback = Symbol('Callback');
const kOtherSide = Symbol('Other');
// Define a function `DuplexSocket` whose prototype inherits from `Duplex`
class DuplexSocket extends Duplex {
constructor() {
// Let Node.js initialize everything it needs to
super();
// Define two values we will be using
// kCallback saves a temporary reference to a function while
this[kCallback] = null;
// kOtherSide will be the reference to the other side of the stream
this[kOtherSide] = null;
}
_read() {
// This is called when this side receives a push() call
// If the other side set a callback for us to call,
// then first clear that reference
// (it might be immediately set to a new value again),
// then call the function.
const callback = this[kCallback];
if (callback) {
this[kCallback] = null;
callback();
}
}
_write(chunk, encoding, callback) {
// This is called when someone writes to the stream
// Ensure there's a reference to the other side before trying to call it
assert.notStrictEqual(this[kOtherSide], null);
// Ensure that the other-side callback is empty before setting it
// If push immediately calls _read, this should never be a problem
assert.strictEqual(this[kOtherSide][kCallback], null);
if (chunk.length === 0) {
// callback is not called for zero-length chunks
process.nextTick(callback);
} else {
// Let Node.js know when _read has been called
this[kOtherSide][kCallback] = callback;
// And finally, send the other side the data to be read
this[kOtherSide].push(chunk);
}
}
_final(callback) {
// Ask the other side to let us know it received our EOF request
this[kOtherSide].on('end', callback);
// And finally, pushing null signals the end of the stream
this[kOtherSide].push(null);
}
}
function makeDuplexPair() {
// Create two pairs of
const clientSide = new DuplexSocket();
const serverSide = new DuplexSocket();
// Set the other-side reference
clientSide[kOtherSide] = serverSide;
serverSide[kOtherSide] = clientSide;
// Both instances behave the same, so choice of name doesn't matter,
// So long as they're distinguishable.
return { clientSide, serverSide };
}
module.exports = makeDuplexPair;
Here's another way of creating two streams, one Readable and one Writable in this case:
function makeAsymmetricalStreamPair() {
var readableCallback;
const readableSide = new ReadableStream;
readableSide._read = function _read(){
if(!readableCallback) return;
var callback = readableCallback;
readableCallback = null;
callback();
}
const writableSide = new WritableStream;
writableSide._write = function _write(chunk, enc, callback){
if (readableCallback) throw new Error;
if (chunk.length === 0) {
process.nextTick(callback);
} else {
readableCallback = callback;
readableSide.push(chunk);
}
}
writableSide._final = function _final(callback){
readableSide.on('end', callback);
readableSide.push(null);
}
return { readableSide, writableSide };
}

As of today you can use stream.PassTrough

Related

passing function to a class in nodejs

I have a function that I need to pass to a class I have defined in nodeJs.
The use case scenario is I want to give the implementer of the class the control of what to do with the data received from createCall function. I don't mind if the method becomes a member function of the class. Any help would be appreciated.
//Function to pass. Defined by the person using the class in their project.
var someFunction = function(data){
console.log(data)
}
//And I have a class i.e. the library.
class A {
constructor(user, handler) {
this.user = user;
this.notificationHandler = handler;
}
createCall(){
var result = new Promise (function(resolve,reject) {
resolve(callApi());
});
//doesn't work. Keeps saying notificationHandler is not a function
result.then(function(resp) {
this.notificationHandler(resp);
}) ;
//I want to pass this resp back to the function I had passed in the
// constructor.
//How do I achieve this.
}
callApi(){ ...somecode... }
}
// The user creates an object of the class like this
var obj = new A("abc#gmail.com", someFunction);
obj.createCall(); // This call should execute the logic inside someFunction after the resp is received.
Arrow functions (if your Node version supports them) are convenient here:
class A {
constructor(user, handler) {
this.user = user;
this.notificationHandler = handler;
}
createCall() {
var result = new Promise(resolve => {
// we're fine here, `this` is the current A instance
resolve(this.callApi());
});
result.then(resp => {
this.notificationHandler(resp);
});
}
callApi() {
// Some code here...
}
}
Inside arrow functions, this refers to the context that defined such functions, in our case the current instance of A. The old school way (ECMA 5) would be:
createCall() {
// save current instance in a variable for further use
// inside callback functions
var self = this;
var result = new Promise(function(resolve) {
// here `this` is completely irrelevant;
// we need to use `self`
resolve(self.callApi());
});
result.then(function(resp) {
self.notificationHandler(resp);
});
}
Check here for details: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Functions/Arrow_functions#No_separate_this

Sending extra parameters to handler

Basically I have this
CSGOCli.playerProfileRequest(CSGOCli.ToAccountID(userId));
But I want some extra information I want in the event listener. So I thought I could change it to this:
CSGOCli.playerProfileRequest(CSGOCli.ToAccountID(row.steam64ID), "checkRank", myData[0]);
This is the event listener right now:
CSGOCli.on("playerProfile", function(profile) {
So I'm guessing it would have to look like this:
CSGOCli.on("playerProfile", function(profile, eventType, userData) {
Now the handler that handles these functions is this:
CSGO.CSGOClient.prototype.playerProfileRequest = function(accountId, even_type, my_data req_level, callback) {
callback = callback || null;
if (!this._gcReady) {
if (this.debug) {
util.log("GC not ready");
}
return null;
}
if (this.debug) {
util.log("Sending player profile request");
}
var payload = new protos.CMsgGCCStrike15_v2_ClientRequestPlayersProfile({
account_id: accountId,
request_level: req_level || 32
});
this._gc.send({msg:CSGO.ECSGOCMsg.k_EMsgGCCStrike15_v2_ClientRequestPlayersProfile, proto: {}}, payload.toBuffer(), callback);
};
var handlers = CSGO.CSGOClient.prototype._handlers;
handlers[CSGO.ECSGOCMsg.k_EMsgGCCStrike15_v2_PlayersProfile] = function onPlayerProfileResponse(message) {
var playerProfileResponse = protos.CMsgGCCStrike15_v2_PlayersProfile.decode(message);
if (this.debug) {
util.log("Received player profile");
}
this.emit("playerProfile", playerProfileResponse, eventType, userData);
};
Like you can see, in the last line "this.emit" I already changed how I want it to look but now this is the place where my problem comes. How could I edit this handler so in the last emitter it would send my info from the last function.
I'm sorry if I'm being unclear here.
EDIT:
Solved by just using global variables.
So:
var event_type = null;
var myData = null;
Without editing the handler.
and then set the variables in the function and later you can use it in the event listener.
Not a very pro-efficient solution, but it worked for me.

Javascript accessing object and array defined in modular function

This is a bit foreign to me and I'm probably not understanding it correctly. This is what I have:
var imgModule = (function() {
var imgLocations = {};
var images = [];
imgLocations.setImage = function(img, location) {
imgLocations[img] = location;
}
imgLocations.getImg = function(img) {
return imgLocations[img];
}
imgLocations.setImageArray = function(img) {
images.push(img);
}
imgLocations.getImageArray = function() {
return images;
}
return imgLocations;
}());
I want to be able to access the imgLocations Object and images array from outside this function. The setting functions work, but
document.getElementById("but").onclick = function() {
console.log(imgModule.imgLocations.getImageArray());
console.log(imgModule.imgLocations.getImg(imgName));
}
Both return "undefined". How do I access these variables? And how can I improve this function? Please be patient with me and explain what I'm doing wrong :) I'm trying to learn it the right way instead of defining a global variable outside all functions.
The reason why this isn't working, is because your imgModule is returning the imgLocations object. That being the case, imgModule will actually be the imgLocations object. So you would access your methods like so:
imgModule.setImage()
imgModule.getImg()
imgModule.getImageArray()
imgModule.setImageArray()
And as #gillesc stated. If you are wanting to keep the current syntax of imgModule.imgLocations.getImg() then you could return the imgLocations like so
return {
imgLocations: imgLocations
}
doing so would allow you to add more functionality to your module
return {
imgLocations: imgLocations,
otherObject: otherObject
}
...
imgModule.otherObject.someFunctionCall();
The problem is you are returning the object created and are not setting it as a property of an object.
So in your case this is how it would work.
document.getElementById("but").onclick = function() {
console.log(imgModule.getImageArray());
console.log(imgModule.getImg(imgName));
}
What you need to do is return it like this
return {
imgLocations: imgLocations
}
If you want the API you are attending to create and still have access to the array which you can not do currently.
You don't access imgModule.imgLocations, since what you return is imgLocations, you should access them as:
document.getElementById("but").onclick = function() {
console.log(imgModule.getImageArray());
console.log(imgModule.getImg(imgName));
}
It seems you try to write module pattern.
For deep understanding, I recommend you following article:
The Module Pattern, by Addy Osmani
and pay attention to example with counter:
var testModule = (function () {
var counter = 0;
return {
incrementCounter: function () {
return counter++;
},
resetCounter: function () {
console.log( "counter value prior to reset: " + counter );
counter = 0;
}
};
})();
// Usage:
// Increment our counter
testModule.incrementCounter();
// Check the counter value and reset
// Outputs: counter value prior to reset: 1
testModule.resetCounter();

How do you implement a stream that properly handles backpressure in node.js?

I can't for the life of me figure out how to implement a stream that properly handles backpressure. Should you never use pause and resume?
I have this implementation I'm trying to get to work correctly:
var StreamPeeker = exports.StreamPeeker = function(myStream, callback) {
stream.Readable.call(this, {highWaterMark: highWaterMark})
this.stream = myStream
myStream.on('readable', function() {
var data = myStream.read(5000)
//process.stdout.write("Eff: "+data)
if(data !== null) {
if(!this.push(data)) {
process.stdout.write("Pause")
this.pause()
}
callback(data)
}
}.bind(this))
myStream.on('end', function() {
this.push(null)
}.bind(this))
}
util.inherits(StreamPeeker, stream.Readable)
StreamPeeker.prototype._read = function() {
process.stdout.write("resume")
//this.resume() // putting this in for some reason causes the stream to not output???
}
It correctly sends output, but doesn't correctly produce backpressure. How can I change it to properly support backpressure?
Ok I finally figured it out after lots of trial and error. A couple guidelines:
Never ever use pause or resume (otherwise it'll go into legacy "flowing" mode)
Never add a "data" event listener (otherwise it'll go into legacy "flowing" mode)
Its the implementor's responsibility to keep track of when the source is readable
Its the implementor's responsibility to keep track of when the destination wants more data
The implementation should not read any data until the _read method is called
The argument to read tells the source to give it that many bytes, it probably best to pass the argument passed to this._read into the source's read method. This way you should be able to configure how much to read at a time at the destination, and the rest of the stream chain should be automatic.
So this is what I changed it to:
Update: I created a Readable that is much easier to implement with proper back-pressure, and should have just as much flexibility as node's native streams.
var Readable = stream.Readable
var util = require('util')
// an easier Readable stream interface to implement
// requires that subclasses:
// implement a _readSource function that
// * gets the same parameter as Readable._read (size)
// * should return either data to write, or null if the source doesn't have more data yet
// call 'sourceHasData(hasData)' when the source starts or stops having data available
// calls 'end()' when the source is out of data (forever)
var Stream666 = {}
Stream666.Readable = function() {
stream.Readable.apply(this, arguments)
if(this._readSource === undefined) {
throw new Error("You must define a _readSource function for an object implementing Stream666")
}
this._sourceHasData = false
this._destinationWantsData = false
this._size = undefined // can be set by _read
}
util.inherits(Stream666.Readable, stream.Readable)
Stream666.Readable.prototype._read = function(size) {
this._destinationWantsData = true
if(this._sourceHasData) {
pushSourceData(this, size)
} else {
this._size = size
}
}
Stream666.Readable.prototype.sourceHasData = function(_sourceHasData) {
this._sourceHasData = _sourceHasData
if(_sourceHasData && this._destinationWantsData) {
pushSourceData(this, this._size)
}
}
Stream666.Readable.prototype.end = function() {
this.push(null)
}
function pushSourceData(stream666Readable, size) {
var data = stream666Readable._readSource(size)
if(data !== null) {
if(!stream666Readable.push(data)) {
stream666Readable._destinationWantsData = false
}
} else {
stream666Readable._sourceHasData = false
}
}
// creates a stream that can view all the data in a stream and passes the data through
// correctly supports backpressure
// parameters:
// stream - the stream to peek at
// callback - called when there's data sent from the passed stream
var StreamPeeker = function(myStream, callback) {
Stream666.Readable.call(this)
this.stream = myStream
this.callback = callback
myStream.on('readable', function() {
this.sourceHasData(true)
}.bind(this))
myStream.on('end', function() {
this.end()
}.bind(this))
}
util.inherits(StreamPeeker, Stream666.Readable)
StreamPeeker.prototype._readSource = function(size) {
var data = this.stream.read(size)
if(data !== null) {
this.callback(data)
return data
} else {
this.sourceHasData(false)
return null
}
}
Old Answer:
// creates a stream that can view all the data in a stream and passes the data through
// correctly supports backpressure
// parameters:
// stream - the stream to peek at
// callback - called when there's data sent from the passed stream
var StreamPeeker = exports.StreamPeeker = function(myStream, callback) {
stream.Readable.call(this)
this.stream = myStream
this.callback = callback
this.reading = false
this.sourceIsReadable = false
myStream.on('readable', function() {
this.sourceIsReadable = true
this._readMoreData()
}.bind(this))
myStream.on('end', function() {
this.push(null)
}.bind(this))
}
util.inherits(StreamPeeker, stream.Readable)
StreamPeeker.prototype._read = function() {
this.reading = true
if(this.sourceIsReadable) {
this._readMoreData()
}
}
StreamPeeker.prototype._readMoreData = function() {
if(!this.reading) return;
var data = this.stream.read()
if(data !== null) {
if(!this.push(data)) {
this.reading = false
}
this.callback(data)
}
}

Why doesn't this program output any data?

I'm messing with the Node.js 0.10 Stream classes to try and figure out how to use them. I'm not sure why this experiment isn't working. It's supposed to output letters of the alphabet to an HTTP response object, but does not. I've annotated the source with a few comments.
Thank you!
var Readable = require('stream').Readable
, inherits = require('util').inherits
, http = require('http');
/**
* A stream that streams the English alphabet
*/
function AlphabetStream() {
Readable.call(this);
this.code = this.offset = 'a'.charCodeAt(0);
this.last = 'z'.charCodeAt(0);
}
inherits(AlphabetStream, Readable);
AlphabetStream.prototype._read = function(size) {
for (var i = 0; i < size; i++)
this.push(this.next_char());
this.push(null);
};
AlphabetStream.prototype.next_char = function() {
var cycle = this.last+1;
return String.fromCharCode((++this.code % cycle) + this.offset);
};
/**
* An HTTP server, prints the first n letters of the English alphabet
*/
var server = http.createServer(function(req, res) {
// $ curl localhost:3001/?size=11
var size = require('url').parse(req.url, true).query.size;
if (size) {
var rs = new AlphabetStream;
rs.pipe(res); // This calls `_read()` with `size` 16kb
rs.read(parseInt(size)); // This also calls `_read()` with `size` 16kb
}
res.end(''); // Nothing gets printed, despite the pipe and the reading.
});
server.listen(3001, function() {
console.log('Listening on 3001');
});
Have a look at this piece of code:
if (size) {
var rs = new AlphabetStream;
rs.pipe(res); // This calls `_read()` with `size` 16kb
rs.read(parseInt(size)); // This also calls `_read()` with `size` 16kb
}
res.end(''); // Nothing gets printed, despite the pipe and the reading.
You end the response (last line) before the actual piping can take place (this happens because .pipe is asynchronous). What you should do is something like that:
if (size) {
var rs = new AlphabetStream;
rs.pipe(res);
rs.read(parseInt(size));
} else {
// NOTE THE ELSE STATEMENT
res.end('');
}
.pipe function will take care of ending the destination stream (i.e. the response) unless explicitely stated otherwise, see the docs:
http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
EDIT As for why 16kb? Well, I had to do some tests and it seems that this is the default behaviour of .pipe (and I'm not sure how to change that to be honest). First of all note that this line:
rs.read(parseInt(size));
is totally useless (you can remove it). .pipe will take care of reading the data. Now the default behaviour is to read chunks of 16kb of data. So in order to do what you are trying to do you should probably pass size to the constructor of AlphabetStream, like this:
function AlphabetStream(size) {
Readable.call(this);
this.code = this.offset = 'a'.charCodeAt(0);
this.last = 'z'.charCodeAt(0);
this.size = size; // <--- store size here
}
inherits(AlphabetStream, Readable);
AlphabetStream.prototype._read = function(size) {
// this allows the stream to be a true stream
// it reads only as much data as it can
// but .read can be called multiple times without issues
// with a predefined limit
var chunk = Math.min(size, this.size);
this.size -= chunk;
for (var i = 0; i < chunk; i++) {
this.push(this.next_char());
}
if (!this.size) {
// end the stream only when the limit is reached
this.push(null);
}
};
after all a stream should not depend on how much data you read. Then you do:
if (size) {
var rs = new AlphabetStream(parseInt(size));
rs.pipe(res);
} else {
res.end('');
}

Resources