I am receiving a base 64 encode string in my Nodejs server in chunks, and I want to convert it to a stream that can be read by another process but I am not finding how to do it. Currently, I have this code.
const stream = Readable.from(Buffer.from(data, 'base64'));
But this creates a new instance of stream, but what I would like to do it to keep appending to the open stream until no more data is received from my front end. How do I create an appending stream that I can add to and can be read by another process?
--- Additional information --
Client connect to the NodeJS server via websocket. I read the "data" from the payload on the websocket message received.
socket.on('message', async function(res) {
try
{
let payload = JSON.parse(res);
let payloadType = payload['type'];
let data = payload['data'];
---Edit --
I am getting this error message after pushing to the stream.
Error [ERR_METHOD_NOT_IMPLEMENTED]: The _read() method is not implemented
at Readable._read (internal/streams/readable.js:642:9)
at Readable.read (internal/streams/readable.js:481:10)
at maybeReadMore_ (internal/streams/readable.js:629:12)
at processTicksAndRejections (internal/process/task_queues.js:82:21) {
code: 'ERR_METHOD_NOT_IMPLEMENTED'
}
This is the code where I am reading it from, and connected to the stream:
const getAudioStream = async function* () {
for await (const chunk of micStream) {
if (chunk.length <= SAMPLE_RATE) {
yield {
AudioEvent: {
AudioChunk: encodePCMChunk(chunk),
},
};
}
}
};
Related
I'm trying to pipe one Stream Axios Response into multiple files. It's not working, and I can reproduce it with the simple code below:
Will work:
const { PassThrough } = require('stream')
const inputStream = new PassThrough()
inputStream.write('foo')
// Now I have a stream with content
inputStream.pipe(process.stdout)
inputStream.pipe(process.stderr)
// will print 'foofoo', for both stdout and stderr
Will not work:
const { PassThrough } = require('stream')
const inputStream = new PassThrough()
inputStream.write('foo')
inputStream.pipe(process.stdout)
setImmediate(() => {
inputStream.pipe(process.stderr)
})
// Will print only 'foo'
The question is, Can I say that the existed content in the stream will be piped only if the two pipe commands will execute in the same Event-Loop iteration?
Doesn't that make the situation non-deterministic?
By the time the callback scheduled with setImmediate is executed, the stream data is already flushed. This can checked by .readableLength stream property.
You can use cork and uncork in order to control when the buffered stream data is flushed.
const { PassThrough } = require('stream')
const inputStream = new PassThrough()
inputStream.cork()
inputStream.write('foo')
inputStream.pipe(process.stdout)
setImmediate(() => {
inputStream.pipe(process.stderr)
inputStream.uncork()
})
I have a need to pipe a readable stream into both a buffer (to be converted into a string) and a file. The stream is coming from node-fetch.
NodeJS streams have two states: paused and flowing. From what I understand, as soon as a 'data' listener is attached, the stream will change to flowing mode. I want to make sure the way I am reading a stream will not lose any bytes.
Method 1: piping and reading from 'data':
fetch(url).then(
response =>
new Promise(resolve => {
const buffers = []
const dest = fs.createWriteStream(filename)
response.body.pipe(dest)
response.body.on('data', chunk => buffers.push(chunk))
dest.on('close', () => resolve(Buffer.concat(buffers).toString())
})
)
Method 2: using passthrough streams:
const { PassThrough } = require('stream')
fetch(url).then(
response =>
new Promise(resolve => {
const buffers = []
const dest = fs.createWriteStream(filename)
const forFile = new PassThrough()
const forBuffer = new PassThrough()
response.body.pipe(forFile).pipe(dest)
response.body.pipe(forBuffer)
forBuffer.on('data', chunk => buffers.push(chunk))
dest.on('close', () => resolve(Buffer.concat(buffers).toString())
})
)
Is the second method required so there is no lost data? Is the second method wasteful since two more streams could be buffered? Or, is there another way to fill a buffer and write stream simultaneously?
You won't miss any data, since .pipe internally calls src.on('data') and writes any chunk to the target stream.
So any chunk written to your dest stream, will also be emitted to response.body.on('data') where you're buffering the chunks.
In any case, you should listen to 'error' events and reject if any error occurs.
And While your second mode will work, you don't need it.
This is a chunk of code from the .pipe function
src.on('data', ondata);
function ondata(chunk) {
debug('ondata');
var ret = dest.write(chunk);
debug('dest.write', ret);
if (ret === false) {
// If the user unpiped during `dest.write()`, it is possible
// to get stuck in a permanently paused state if that write
// also returned false.
// => Check whether `dest` is still a piping destination.
if (((state.pipesCount === 1 && state.pipes === dest) ||
(state.pipesCount > 1 && state.pipes.indexOf(dest) !== -1)) &&
!cleanedUp) {
debug('false write response, pause', state.awaitDrain);
state.awaitDrain++;
}
src.pause();
}
}
So, I've created a read stream that first connects to an SFTP and starts reading from a file. At any point, my code can unpipe that readstream and do something else. For example, I might use this to get the first few rows of a CSV and stop reading.
The problem is, I don't know how to listen for the unpipe event in my readStream constructor so that I can correctly close the SFTP connection. I use a flush method in write streams, is there something like that for read streams?
Here's a simplified portion of my readStream constructor:
const Client = require('ssh2').Client,
nom = require('noms');
function getStream (get) {
const self = this;
const conn = new Client();
let client,
fileData,
buffer,
totalBytes = 0,
bytesRead = 0;
let read = function(size,next) {
const read = this;
// Read each chunk of the file
client.read(fileData, buffer, bytesRead, size, bytesRead,
function (err, byteCount, buff, pos) {
bytesRead += byteCount;
read.push(buff);
next();
}
);
};
let before = function(start) {
// setup the connection BEFORE we start _read
conn.on('ready', function(){
conn.sftp(function(err,sftp) {
sftp.open(get, 'r', function(err, fd){
sftp.fstat(fd, function(err, stats) {
client = sftp;
fileData = fd;
totalBytes = stats.size;
buffer = new Buffer(totalBytes);
start();
});
});
});
}).connect(credentials);
};
return nom(read,before);
}
Later I might call myStream.pipe(writeStream) and then myStream.unpipe(). But because I have no way of listening for that unpipeevent, the reading stops, but the SFTP connection stays open and eventually times out.
Any ideas?
So, after doing more research, I learned that ReadStreams are not passed the unpipe event when you call readStream.unpipe(writeStream). That event is passed to just the writeStream. In order to listen for the unpipe, you need to explicitly emit an event on the readStream, like so:
readStream.emit('unpipe');
You can listen for this event anywhere, inside or outside your stream constructor, which is really convenient. So, that would make the code above look like this:
function getStream (get) {
/**
* ... stuff
* ... read()
* ... before()
* ... etc
*/
let readStream = nom(read,before);
readStream.on('unpipe', function(){
console.log('called unpipe on read stream');
});
return readStream;
}
Moral of the story, streams already have the Event Emitter class methods, so you can emit and listen for custom events out of the box.
I have an infinite data stream from a forked process. I want this stream to be processed by a module and sometimes I want to duplicate the data from this stream to be processed by a different module (e.g. monitoring a data stream but if anything interesting happens I want to log the next n bytes to file for further investigation).
So let's suppose the following scenario:
I start the program and start consuming the readable stream
2 secs later I want to process the same data for 1 sec by a different stream reader
Once the time is up I want to close the second consumer but the original consumer must stay untouched.
Here is a code snippet for this:
var stream = process.stdout;
stream.pipe(detector); // Using the first consumer
function startAnotherConsumer() {
stream2 = new PassThrough();
stream.pipe(stream2);
// use stream2 somewhere else
}
function stopAnotherConsumer() {
stream.unpipe(stream2);
}
My problem here is that unpiping the stream2 doesn't get it closed. If I call stream.end() after the unpipe command, then it crashes with the error:
events.js:160
throw er; // Unhandled 'error' event
^
Error: write after end
at writeAfterEnd (_stream_writable.js:192:12)
at PassThrough.Writable.write (_stream_writable.js:243:5)
at Socket.ondata (_stream_readable.js:555:20)
at emitOne (events.js:101:20)
at Socket.emit (events.js:188:7)
at readableAddChunk (_stream_readable.js:176:18)
at Socket.Readable.push (_stream_readable.js:134:10)
at Pipe.onread (net.js:548:20)
I even tried to pause the source stream to help the buffer to be flushed from the second stream but it didn't work either:
function stopAnotherConsumer() {
stream.pause();
stream2.once('unpipe', function () {
stream.resume();
stream2.end();
});
stream.unpipe(stream2);
}
Same error as before here (write after end).
How to solve the problem? My original intent is to duplicate the streamed data from one point, then close the second stream after a while.
Note: I tried to use this answer to make it work.
As there were no answers, I post my (patchwork) solution. In case anyone'd have a better one, don't hold it back.
A new Stream:
const Writable = require('stream').Writable;
const Transform = require('stream').Transform;
class DuplicatorStream extends Transform {
constructor(options) {
super(options);
this.otherStream = null;
}
attachStream(stream) {
if (!stream instanceof Writable) {
throw new Error('DuplicatorStream argument is not a writeable stream!');
}
if (this.otherStream) {
throw new Error('A stream is already attached!');
}
this.otherStream = stream;
this.emit('attach', stream);
}
detachStream() {
if (!this.otherStream) {
throw new Error('No stream to detach!');
}
let stream = this.otherStream;
this.otherStream = null;
this.emit('detach', stream);
}
_transform(chunk, encoding, callback) {
if (this.otherStream) {
this.otherStream.write(chunk);
}
callback(null, chunk);
}
}
module.exports = DuplicatorStream;
And the usage:
var stream = process.stdout;
var stream2;
duplicatorStream = new DuplicatorStream();
stream.pipe(duplicatorStream); // Inserting my duplicator stream in the chain
duplicatorStream.pipe(detector); // Using the first consumer
function startAnotherConsumer() {
stream2 = new stream.PassThrough();
duplicatorStream.attachStream(stream2);
// use stream2 somewhere else
}
function stopAnotherConsumer() {
duplicatorStream.once('detach', function () {
stream2.end();
});
duplicatorStream.detachStream();
}
var http = require('http');
var map = require('through2-map');
uc = map(function(ch) {
return ch.toString().toUpperCase();
});
server = http.createServer(function(request, response) {
request.on('data',function(chunk){
if (request.method == 'POST') {
//change the data from request to uppercase letters and
//pipe to response.
}
});
});
server.listen(8000);
I have two questions about the code above. First, I read the documentation for request, it said that request is an instance of IncomingMessage, which implements Readable Stream. However, I couldn't find .on method in the Stream documentation. So I don't know what chunk in the callback function in request.on does. Secondly, I want to do some manipulation to the data from request and pipe it to response. Should I pipe from chunk or from request? Thank you for consideration!
is chunk a stream?
nop. The stream is the flow among what the chunks of the whole data are sent.
A simple example, If you read a 1gb file, a stream will read it by chunks of 10k, each chunk will go through your stream, from the beginning to the end, with the right order.
I use a file as example, but a socket, request or whatever streams is based on that idea.
Also, whenever someone sends a request to this server would that entire thing be a chunk?
In the particular case of http requests, only the request body is a stream. It can be the posted files/data. Or the response body of the response. Headers are treated as Objects to apply on the request before the body is written on the socket.
A small example to help you with some concrete code,
var through2 = require('through2');
var Readable = require('stream').Readable;
var s1 = through2(function transform(chunk, enc, cb){
console.log("s1 chunk %s", chunk.toString())
cb(err=null, chunk.toString()+chunk.toString() )
});
var s2 = through2(function transform(chunk, enc, cb){
console.log("s2 chunk %s", chunk.toString())
cb(err=null, chunk)
});
s2.on('data', function (data) {
console.log("s2 data %s", data.toString())
})
s1.on('end', function (data) {
console.log("s1 end")
})
s2.on('end', function (data) {
console.log("s2 end")
})
var rs = new Readable;
rs.push('beep '); // this is a chunk
rs.push('boop'); // this is a chunk
rs.push(null); // this is a signal to end the stream
rs.on('end', function (data) {
console.log("rs end")
})
console.log(
".pipe always return piped stream: %s", rs.pipe(s1)===s1
)
s1.pipe(s2)
I would like to suggest you to read more :
https://github.com/substack/stream-handbook
http://maxogden.com/node-streams.html
https://github.com/maxogden/mississippi
All Streams are instances of EventEmitter (docs), that is where the .on method comes from.
Regarding the second question, you MUST pipe from the Stream object (request in this case). The "data" event emits data as a Buffer or a String (the "chunk" argument in the event listener), not a stream.
Manipulating Streams is usually done by implementing a Transform stream (docs). Though there are many NPM packages available that make this process simpler (like through2-map or the like), though in reality, they produce Transform streams.
Consider the following:
var http = require('http');
var map = require('through2-map');
// Transform Stream to uppercase
var uc = map(function(ch) {
return ch.toString().toUpperCase();
});
var server = http.createServer(function(request, response) {
// Pipe from the request to our transform stream
request
.pipe(uc)
// pipe from transfrom stream to response
.pipe(response);
});
server.listen(8000);
You can test by running curl:
$ curl -X POST -d 'foo=bar' http://localhost:8000
# logs FOO=BAR