fs.createReadStream('b.mp3') reduce latency - node.js

I'm playing around with events triggering sounds (500 msec long), therefore I use the lame library.
var lame = require('lame');
var fs = require('fs');
var Speaker = require('speaker');
while(listening) {
if(eventIsFired) {
fs.createReadStream('b.mp3')
.pipe(new lame.Decoder)
.pipe(new Speaker);
}
}
Is there any way to preload the stream/file, so I won't need to load it on every single event? it actually blocks my whole while loop and making it async didn't work. How can I reduce latency and make it more efficient?

You can cache the mp3 file to a buffer, then convert that buffer to a readable stream when you need.
var lame = require('lame');
var fs = require('fs');
var Speaker = require('speaker');
var Readable = require('stream').Readable;
var mp3Buffer = fs.readFileSync('b.mp3');
while(listening) {
if(eventIsFired) {
bufferToReadableStream(mp3Buffer)
.pipe(new lame.Decoder)
.pipe(new Speaker);
}
}
function bufferToReadableStream(buffer) {
let stream = new Readable();
stream.push(buffer);
stream.push(null);
return stream;
}

Related

How to convert buffer to stream in Nodejs

I confront with a problem about converting buffer into stream in Nodejs.Here is the code:
var fs = require('fs');
var b = Buffer([80,80,80,80]);
var readStream = fs.createReadStream({path:b});
The code raise an exception:
TypeError: path must be a string or Buffer
However the document of Nodejs says that Buffer is acceptable by fs.createReadStream().
fs.createReadStream(path[, options])
  path <string> | <Buffer> | <URL>
  options <string> | <Object>
Anybody could answer the question? Thanks very much!
NodeJS 8+ ver.
convert Buffer to Stream
const { Readable } = require('stream');
/**
* #param binary Buffer
* returns readableInstanceStream Readable
*/
function bufferToStream(binary) {
const readableInstanceStream = new Readable({
read() {
this.push(binary);
this.push(null);
}
});
return readableInstanceStream;
}
Node 0.10 +
convert Buffer to Stream
var Readable = require('stream').Readable;
function bufferToStream(buffer) {
var stream = new Readable();
stream.push(buffer);
stream.push(null);
return stream;
}
const { Readable } = require('stream');
class BufferStream extends Readable {
constructor ( buffer ){
super();
this.buffer = buffer;
}
_read (){
this.push( this.buffer );
this.push( null );
}
}
function bufferToStream( buffer ) {
return new BufferStream( buffer );
}
I have rewritten solution from Alex Dykyi in functional style:
var Readable = require('stream').Readable;
[file_buffer, null].reduce(
(stream, data) => stream.push(data) && stream,
new Readable()
)

nodejs memory buffer synchronized .pipe()

I am trying to migrate pdfkit from png-js to pngsj2, because png-js doesn't support interlaced png. I need to load PNG file in a sync way. I try to do it this way:
var fs = require('fs'),
PNG = require('pngjs2').PNG;
var stream = require('stream');
var bufferStream = new stream.PassThrough();
var buf = fs.readFileSync('./logs/rid12.png');
bufferStream.end(buf);
var png = null;
bufferStream.pipe(new PNG())
.on('parsed', function() {
console.log("here");
png = this;
});
console.log("there",png);
"there" happens before "here", so png is null. Is it possible to pipe inmemory buffer to PNG parser so that I don't have to make callback architecture?
There is no way to make a async method synchronous.
There are librarys such as https://github.com/scriby/asyncblock which may be able to fake it.
asyncblock(function(flow) {
var png = null
flow.sync( bufferStream.pipe(new PNG())
.on('parsed', function() {
console.log("here");
png = this;
flow.callback()
})
);
// png not null
});

One symbol at time stream with node

I'm trying implement a stream which returns one symbol from file on each 'data' event.
I finished with code bellow:
var util = require('util'),
fs = require('fs'),
Readable = require('stream').Readable;
var util = require('util');
var Readable = require('stream').Readable;
var SymbolReadStream = function(filename, options) {
Readable.call(this);
this._readable = fs.createReadStream(filename, options).pause();
self = this;
this._readable.on('readable', function() {
var chunk;
chunk = self._readable.read(1);
// I believe the problem is here
self._readable.pause();
});
};
util.inherits(SymbolReadStream, Readable); // inherit the prototype methods
SymbolReadStream.prototype._read = function() {
this._readable.resume();
};
var r = new SymbolReadStream("test.txt", {
encoding: 'utf8',
});
r.on('data', function(el) {
console.log(el);
});
but this code doesn't work. Please help.
Is there an easier way to achieve the behavior?
This post give a great clue how to answer your question.
Also, you should take a loop at pipe that would be a cleaner way to accomplish what you're trying to do: piping an adapter to the filestream instead of wrapping it up
That said, personaly I wont reinvent the wheel here, and would just search for modules that can accomplish that. Especially "split" modules, making them split on every char, instead on new lines. As an example, event-stream has a split method that "takes the same arguments as string.split except it defaults to '\n' instead of ','". So the logic would be to try myStream.pipe(es.split('')) but the modules takes this like myStream.pipe(es.split()) which breaks on lines. So here's my solution, using a regex to say "break on each char"
var es = require('event-stream');
var fs = require('fs');
var symbolStream = fs.createReadStream(filename, options).pipe(es.split(/(?!$)/));
EDIT: event-stream seems to use split module internally, so you can even try
var split = require('split');
var fs = require('fs');
var symbolStream = fs.createReadStream(filename, options).pipe(split(/(?!$)/));
(this loose test is responsible of converting '' to \r\n)
In you stream implementation there is no emitting 'data' event to handler. Because of it, console.log are never called. After adding events, they will be streamed symbol by symbol. Example below:
var util = require('util'),
fs = require('fs'),
Readable = require('stream').Readable;
function SymbolReadStream(filename, options) {
if (!(this instanceof SymbolReadStream)) {
return new SymbolReadStream(length, options);
}
Readable.call(this);
this._readable = fs.createReadStream(filename, options);
}
util.inherits(SymbolReadStream, Readable); // inherit the prototype methods
SymbolReadStream.prototype._read = function() {
var self = this;
this._readable.on('readable', function() {
var chunk;
while (null !== (chunk = self._readable.read(1))) {
self.emit('data', chunk);
}
});
this._readable.on('end', function() {
self.emit('end');
});
};
var r = new SymbolReadStream("test.txt", {
encoding: 'utf8',
});
r.on('data', function(el) {
console.log(el);
});
r.on('end', function(el) {
console.log('done');
});

Collect data from a readable stream into a variable

I'm trying to implement a writable stream that will save the data that is written to it into a variable. This is the implementation of the writable stream:
var util = require('util');
var Writable = require('stream').Writable;
function Collector()
{
Writable.call(this, {objectMode: true});
this.entities = [];
};
util.inherits(Collector, Writable);
Collector.prototype._write = function (chunk, encoding, callback)
{
this.entities.push(chunk);
callback();
};
module.exports = Collector;
and this is how I'm trying to test it it out:
var fs = require('fs');
var Tokenizer = require('./tokenizer');
var Processor = require('../parser');
var Collector = require('./collector.js');
var tokenizer = new Tokenizer();
var processor = new Processor();
var collector = new Collector();
var readable = fs.createReadStream('./test/fixtures/test.dxf');
readable.pipe(tokenizer)
.pipe(parser)
.pipe(processor); // if this is piped to stdout, lots of data
console.log(collector.entities); // logs an empty array
I'm not sure why, but the entities property is empty after all it has been piped. If I console log this.entities within the _write function, the data is available.
Ultimately I want to be to call a function that returns an array whose elements are made up of data chunks from Processor. Collector was some hacking to see how I could do it, but I haven't gotten very far.
How can I store chunks from a readable stream into a variable and return them from a function?
It returns empty array because your streaming has not finished yet. You should listen finish event in order to poperly get your entities array:
var fs = require('fs');
var Tokenizer = require('./tokenizer');
var Processor = require('../parser');
var Collector = require('./collector.js');
var tokenizer = new Tokenizer();
var processor = new Processor();
var collector = new Collector();
var readable = fs.createReadStream('./test/fixtures/test.dxf');
readable.pipe(tokenizer)
.pipe(parser)
.pipe(processor)
.on('finish', function() {
console.log(collector.entities);
});

Expressjs/connect - Simulate slow file upload

Is there a way of setting a mode or value in connect or express
to simulate slow file uploads??
First, install node-limiter then create a Transform stream that will throttle any Streams:
var util = require('util');
var Transform = require('stream').Transform;
var TokenBucket = require('limiter').TokenBucket;
function BucketStream(rate, interval, parentBucket, options) {
Transform.call(this, options);
this.bucket = new TokenBucket(Number.POSITIVE_INFINITY, rate, interval, parentBucket);
}
util.inherits(BucketStream, Transform);
BucketStream.prototype._transform = function(chunk, encoding, callback) {
this.bucket.removeTokens(chunk.length, function(err) {
callback(err, chunk);
});
};
Then the request is a ReadableStream:
var bucketStream = new BucketStream(1024 * 500, 'second'); // 500KB/sec
req.pipe(bucketStream);
Now read from bucketStream (instead from req) as fast as you want, you'll only get 500KB/sec
I did this on top of my head so beware :)

Resources