I have an object in node that has .stdin, .stderr and .stdout objects attached to it. I have stdout and stderr working as Writable streams and I'm having some trouble getting stdin to work as a Readable stream.
In the use case I'm building for I could do the following:
var stream = require('stream');
var util = require('util');
var Readable = stream.Readable;
var My_Stdin = function(){
Readable.call(this, {objectMode: true});
};
util.inherits(My_Stdin, Readable);
My_Stdin.prototype._read = function(data) {
this.emit(data);
};
function myObject(){
this.stdin = new My_Stdin;
this.stdin.on('data', function(data){
process.stdout.write(data);
});
}
var object = new myObject;
process.stdin.pipe(object.stdin);
Right now I'm getting an error that says:
_stream_readable.js:525
var ret = dest.write(chunk);
^
TypeError: dest.write is not a function
How do I properly implement a stdin on my object similar to child_process.spawn?
Related
I try downloading files with the fetch() function from github.
Then i try to save the fetched file Stream as a file with the fs-module.
When doing it, i get this error:
TypeError [ERR_INVALID_ARG_TYPE]: The "transform.writable" property must be an instance of WritableStream. Received an instance of WriteStream
My problem is, that i don't know the difference between WriteStream and WritableStream or how to convert them.
This is the code i run:
async function downloadFile(link, filename = "download") {
var response = await fetch(link);
var body = await response.body;
var filepath = "./" + filename;
var download_write_stream = fs.createWriteStream(filepath);
console.log(download_write_stream.writable);
await body.pipeTo(download_write_stream);
}
Node.js: v18.7.0
You can use Readable.fromWeb to convert body, which is a ReadableStream from the web streams API, into a NodeJS Readable stream that can be used with the fs methods.
Note that readable.pipe returns another stream instantly. To wait for it to finish, you can use the promise version of stream.finished to convert it into a Promise, or else you could add listeners for the 'finish' and 'error' events to detect success or failure.
const fs = require('fs');
const { Readable } = require('stream');
const { finished } = require('stream/promises');
async function downloadFile(link, filepath = './download') {
const response = await fetch(link);
const body = Readable.fromWeb(response.body);
const download_write_stream = fs.createWriteStream(filepath);
await finished(body.pipe(download_write_stream));
}
Good question. Web streams are something new, and they are different way of handling streams. WritableStream tells us that we can create WritableStreams as follows:
import {
WritableStream
} from 'node:stream/web';
const stream = new WritableStream({
write(chunk) {
console.log(chunk);
}
});
Then, you could create a custom stream that writes each chunk to disk. An easy way could be:
const download_write_stream = fs.createWriteStream('./the_path');
const stream = new WritableStream({
write(chunk) {
download_write_stream.write(chunk);
},
});
async function downloadFile(link, filename = 'download') {
const response = await fetch(link);
const body = await response.body;
await body.pipeTo(stream);
}
I am working with Node csv-parser and my read stream is from std in. I am piping that to the parser. Inside the parser readable function, I am making an async HTTP call to upload this data. During this time, I want the parser to pause reading. Until the async call has finished excecuting.
var parse = require('csv-parse');
var output = [];
// Create the parser
var parser = parse({delimiter: ',', columns: true, trim: true});
parser.on('readable', function () {
while (record = parser.read()) {
console.log('Still reading');
var jsonRec = convertIpToInt(record);
var jsonData = JSON.stringify(jsonRec);
output.push(new Buffer(jsonData + "\n"));
//parser.pause();
var assertPromise = uploadBatches1(indexName, function() {
//parser.resume();
console.log('Returned');
});
}
});
function uploadBatches1(indexToAssert, cb) {
//uploads data got form parser
}
Since csv-parse exposes a readable stream, you can pause the stream with parser.pause()
Likewise, when your update is complete, you can call parser.resume(). You can also check if you need to resume beforehand using parser.isPaused().
You can read more about readable streams and what the pause method does here:
https://nodejs.org/api/stream.html#stream_readable_pause
Based on the above, you should be implementing the parser.on('data') readable stream event, instead of the while (record = parser.read()) loop. Pausing the stream does nothing if you are actively calling the read method yourself.
Consider restructuring your code like so, and then use parser.pipe from a filesystem read:
var parse = require('csv-parse');
var output = [];
// Create the parser
var parser = parse({delimiter: ',', columns: true, trim: true});
parser.on('data', function () {
console.log('Still reading');
var jsonRec = convertIpToInt(record);
var jsonData = JSON.stringify(jsonRec);
output.push(new Buffer(jsonData + "\n"));
parser.pause();
var assertPromise = uploadBatches1(indexName, function() {
parser.resume();
console.log('Returned');
});
});
function uploadBatches1(indexToAssert, cb) {
//uploads data got form parser
}
I'm trying implement a stream which returns one symbol from file on each 'data' event.
I finished with code bellow:
var util = require('util'),
fs = require('fs'),
Readable = require('stream').Readable;
var util = require('util');
var Readable = require('stream').Readable;
var SymbolReadStream = function(filename, options) {
Readable.call(this);
this._readable = fs.createReadStream(filename, options).pause();
self = this;
this._readable.on('readable', function() {
var chunk;
chunk = self._readable.read(1);
// I believe the problem is here
self._readable.pause();
});
};
util.inherits(SymbolReadStream, Readable); // inherit the prototype methods
SymbolReadStream.prototype._read = function() {
this._readable.resume();
};
var r = new SymbolReadStream("test.txt", {
encoding: 'utf8',
});
r.on('data', function(el) {
console.log(el);
});
but this code doesn't work. Please help.
Is there an easier way to achieve the behavior?
This post give a great clue how to answer your question.
Also, you should take a loop at pipe that would be a cleaner way to accomplish what you're trying to do: piping an adapter to the filestream instead of wrapping it up
That said, personaly I wont reinvent the wheel here, and would just search for modules that can accomplish that. Especially "split" modules, making them split on every char, instead on new lines. As an example, event-stream has a split method that "takes the same arguments as string.split except it defaults to '\n' instead of ','". So the logic would be to try myStream.pipe(es.split('')) but the modules takes this like myStream.pipe(es.split()) which breaks on lines. So here's my solution, using a regex to say "break on each char"
var es = require('event-stream');
var fs = require('fs');
var symbolStream = fs.createReadStream(filename, options).pipe(es.split(/(?!$)/));
EDIT: event-stream seems to use split module internally, so you can even try
var split = require('split');
var fs = require('fs');
var symbolStream = fs.createReadStream(filename, options).pipe(split(/(?!$)/));
(this loose test is responsible of converting '' to \r\n)
In you stream implementation there is no emitting 'data' event to handler. Because of it, console.log are never called. After adding events, they will be streamed symbol by symbol. Example below:
var util = require('util'),
fs = require('fs'),
Readable = require('stream').Readable;
function SymbolReadStream(filename, options) {
if (!(this instanceof SymbolReadStream)) {
return new SymbolReadStream(length, options);
}
Readable.call(this);
this._readable = fs.createReadStream(filename, options);
}
util.inherits(SymbolReadStream, Readable); // inherit the prototype methods
SymbolReadStream.prototype._read = function() {
var self = this;
this._readable.on('readable', function() {
var chunk;
while (null !== (chunk = self._readable.read(1))) {
self.emit('data', chunk);
}
});
this._readable.on('end', function() {
self.emit('end');
});
};
var r = new SymbolReadStream("test.txt", {
encoding: 'utf8',
});
r.on('data', function(el) {
console.log(el);
});
r.on('end', function(el) {
console.log('done');
});
I'm trying to implement a writable stream that will save the data that is written to it into a variable. This is the implementation of the writable stream:
var util = require('util');
var Writable = require('stream').Writable;
function Collector()
{
Writable.call(this, {objectMode: true});
this.entities = [];
};
util.inherits(Collector, Writable);
Collector.prototype._write = function (chunk, encoding, callback)
{
this.entities.push(chunk);
callback();
};
module.exports = Collector;
and this is how I'm trying to test it it out:
var fs = require('fs');
var Tokenizer = require('./tokenizer');
var Processor = require('../parser');
var Collector = require('./collector.js');
var tokenizer = new Tokenizer();
var processor = new Processor();
var collector = new Collector();
var readable = fs.createReadStream('./test/fixtures/test.dxf');
readable.pipe(tokenizer)
.pipe(parser)
.pipe(processor); // if this is piped to stdout, lots of data
console.log(collector.entities); // logs an empty array
I'm not sure why, but the entities property is empty after all it has been piped. If I console log this.entities within the _write function, the data is available.
Ultimately I want to be to call a function that returns an array whose elements are made up of data chunks from Processor. Collector was some hacking to see how I could do it, but I haven't gotten very far.
How can I store chunks from a readable stream into a variable and return them from a function?
It returns empty array because your streaming has not finished yet. You should listen finish event in order to poperly get your entities array:
var fs = require('fs');
var Tokenizer = require('./tokenizer');
var Processor = require('../parser');
var Collector = require('./collector.js');
var tokenizer = new Tokenizer();
var processor = new Processor();
var collector = new Collector();
var readable = fs.createReadStream('./test/fixtures/test.dxf');
readable.pipe(tokenizer)
.pipe(parser)
.pipe(processor)
.on('finish', function() {
console.log(collector.entities);
});
I'm working on the nodeschool stream adventure and got to the duplex redux challenge.
I solved the challenge using the duplexer module but wanted to solve it using native stream functionality.
Here is the solution using duplexer:
var duplexer = require('duplexer');
var through = require('through');
module.exports = function (counter) {
var counts = {};
var input = through(write, end);
return duplexer(input, counter);
function write (row) {
counts[row.country] = (counts[row.country] || 0) + 1;
}
function end () { counter.setCounts(counts) }
};
So, it gets a readable stream in, and is returned a duplex stream. I'm trying to solve this again with:
var duplexer = require('duplexer');
var stream = require('stream');
module.exports = function (counter) {
var counts = {};
var ts = stream.Transform({ objectMode: true })
ts._transform = function(obj, enc, next) {
var country = obj.country;
var count = counts[country] || 0;
counts[obj.country] = count + 1;
next();
}
ts.on('finish', function() {
counter.setCounts(counts);
});
counter.pipe(ts)
return ts;
};
When run as is, it results in no output at all, so I changed next() to next(null, JSON.stringify(counts)). The stringify is because the next stream is not set to object mode. I get output, but its wrong and it throws stream.push() after EOF.