I use this piece of code to download torrent:
var torrentStream = require('torrent-stream');
var engine = torrentStream('magnet:?xt=urn:btih:44A91362AFFF802F9058993B109C544ACC6B4813');
engine.on('ready', function(e) {
engine.files.forEach(function(file) {
console.log('filename:', file.name);
var stream = file.createReadStream();
// stream is readable stream to containing the file content
});
});
This torrent is correctly downloaded by utorrent, but it doesn't work in nodejs (nothing happens). Any ideas why? May be p2p network was not bootstrapped? How can I do this?
Thanx
Of corse there happens nothing, because you dont do anything with the stream in the example.
If you want to save it to a file you can create a write stream:
var writeStream = fs.createWriteStream('file2.txt')
stream.pipe(writeStream)
or you can use the stream with events:
var data = ''
stream.on('data', chunk => {
data += chunk
})
stream.on('end', () => {
console.log(data)
})
Related
I use officegen library to create a docx file, then I need to zip it and return as the response. I can easily save the docx file and then create a zip file and return, but are there any alternative way to write docx file to something like memory stream instead of file streams and create a zip file?
Following code I tried with memory-streams library, this is not working. I think WritableStream is not similar to file stream. So if you replace WritableStream with output of createWriteStream, its working.
var streams = require('memory-streams');
var AdmZip = require('adm-zip');
var writer = new streams.WritableStream();
writer.on('close', function() {
var zip = new AdmZip();
zip.addFile("test2.docx", writer.toBuffer());
var willSendthis = zip.toBuffer();
return res.end(Buffer.from(willSendthis, 'binary'))
})
writer.on('error', function(){
console.log("error")
})
doc.on('error', function(){
console.log("error")
})
doc.generate(writer);
This is not something very specific to officegen, it just needs a writable stream similar to a file stream with close 'event'
I think the following library is better than the one you are using.
https://www.npmjs.com/package/memorystream
Example:
var MemoryStream = require('memorystream');
var memStream = new MemoryStream(['Hello',' ']);
var data = '';
memStream.on('data', function(chunk) {
data += chunk.toString();
});
memStream.write('World');
memStream.on('end', function() {
// outputs 'Hello World!'
console.log(data);
});
memStream.end('!');
So I'm trying to take a file used as a starting template, add data to it in the stream (not altering original file), and serve it to the client without saving a new file on the server (I'm currently using the express module as well).
So far I pass the data in a post request add add it to the end of the stream. Unfortunately when you pipe the read stream to a write stream you have to specify the output file and location for the write stream. Is there any way around that? Can you set the output file location as the relevant port?
This is what I currently have (getting error: Cannot pipe, not readable):
app.post("/output_xml", function(req, res) {
var data = validateJSON(req.body);
stream_xml(data, res);
});
function stream_xml(data, res)
{
var read_stream = fs.createReadStream(__dirname + '/Static/input_template.xml')
var write_stream = fs.createWriteStream(__dirname + '/Static/output.xml') // trying to prevent saving a file to the server though
read_stream.pipe(write_stream);
read_stream.on('end', () => {
write_stream.write(data);
write_stream.write("\nAdding more stuff");
});
write_stream.pipe(res);
}
Would I be able to swap the write_stream line for anything like:
var write_stream = fs.createWriteStream('http://localhost:3000/output_xml/output.xml')
You cannot pipe from a write stream, but you can certainly pipe from a transform/duplex stream.
So you can do something like:
const { Transform } = require('stream');
const custom_response = new Transform({
transform(chunk, encoding, callback) {
this.push(chunk, encoding);
callback();
},
flush(callback) {
this.push(data);
this.push("\nAdding more stuff");
callback();
},
});
read_stream.pipe(custom_response).pipe(res);
An alternative to stream.Transform may also be stream.PassThrough which takes the same parameters as Transform, but you only need to specify the flush method.
https://nodejs.org/api/stream.html#stream_implementing_a_transform_stream
After i emit error event in MyWritableStream, data transmission stops. What i need to do to resume data transfer?
var readable = fs.createReadStream('test.txt');
var writable = new MyWritableStream();
writable.on('error', function(error) {
console.log('error', error);
// How i can resume?
});
writable.on('finish', function(){
console.log('finished');
})
readable.pipe(writable);
I know this question is old, but you might wanna check out https://github.com/miraclx/xresilient
I built this for this exact same reason (works best with seekable streams).
You define a function that returns a readable stream, the library measures the number of bytes that have passed through until an error is met.
Once the readable stream encounters an error event, it recalls the defined function with the number of bytes read so you can index the stream source.
Example:
const fs = require('fs');
const xresilient = require('xresilient');
const readable = xresilient(({bytesRead}) => {
return generateSeekableStreamSomehow({start: bytesRead});
}, {retries: 5});
const writable = fs.createWriteStream('file.test');
readable.pipe(writable);
File streams are indexable with the start option of the fs.createReadStream() function.
HTTP Requests are indexable with the Range HTTP Header.
Check it out.
https://www.npmjs.com/package/xresilient
I am not sure, if it is a normal practice, but i can't see another solution for now & it works for me. If you can advise more accurate solution, please do it.
We can track readable stream instance using pipe event in writeable one:
function WriteableStream(options) {
Writable.call(this, options);
this.source = null;
var instance = this;
this.on('pipe', function(source){
instance.source = source;
});
}
util.inherits(WriteableStream, Writable);
So, when we emit error event, and readable stream is unpiped automatically, we can re-pipe it ourself:
WriteableStream.prototype._write = function(chunk, encoding, done) {
this.emit('error', new Error('test')); // unpipes readable
done();
};
WriteableStream.prototype.resume = function() {
this.source.pipe(this); // re-pipes readable
}
Finally, we will use it the following way:
var readable = fs.createReadStream(file);
var writeable = new WriteableStream();
writeable.on('error', function(error) {
console.log('error', error);
writeable.resume();
});
readable.pipe(writeable);
The following line will download an image file from a specified url variable:
var filename = path.join(__dirname, url.replace(/^.*[\\\/]/, ''));
request(url).pipe(fs.createWriteStream(filename));
And these lines will take that image and save to MongoDB GridFS:
var gfs = Grid(mongoose.connection.db, mongoose.mongo);
var writestream = gfs.createWriteStream({ filename: filename });
fs.createReadStream(filename).pipe(writestream);
Chaining pipe like this throws Error: 500 Cannot Pipe. Not Pipeable.
request(url).pipe(fs.createWriteStream(filename)).pipe(writestream);
This happens because the image file is not ready to be read yet, right? What should I do to get around this problem?Error: 500 Cannot Pipe. Not Pipeable.
Using the following: Node.js 0.10.10, mongoose, request and gridfs-stream libraries.
request(url).pipe(fs.createWriteStream(filename)).pipe(writestream);
is the same as this:
var fileStream = fs.createWriteStream(filename);
request(url).pipe(fileStream);
fileStream.pipe(writestream);
So the issue is that you are attempting to .pipe one WriteStream into another WriteStream.
// create 'fs' module variable
var fs = require("fs");
// open the streams
var readerStream = fs.createReadStream('inputfile.txt');
var writerStream = fs.createWriteStream('outputfile.txt');
// pipe the read and write operations
// read input file and write data to output file
readerStream.pipe(writerStream);
I think the confusion in chaining the pipes is caused by the fact that the pipe method implicitly "makes choices" on it's own on what to return. That is:
readableStream.pipe(writableStream) // Returns writable stream
readableStream.pipe(duplexStream) // Returns readable stream
But the general rule says that "You can only pipe a Writable Stream to a Readable Stream." In other words only Readable Streams have the pipe() method.
You cannot chain the ReadStream to the WriteStream because the latter is not duplex, therefore you would do - for a gzipped archive
request.get(url, {
gzip: true,
encoding: null
})
.pipe(fs.createWriteStream(tmpPath))
.on('close', function() {
console.info("downloaded %s", tmpPath);
fs.createReadStream(tmpPath)
.pipe(gunzip)
.pipe(fs.createWriteStream(destPath))
.on('close', function() {
console.info("unarchived %s", destPath);
})
.on('error', (error) => {
console.warn("gzip error:%#", error);
})
})
.on('error', (error) => {
console.warn("download error:%#", error);
})
I am trying to use nodeJS to save a processed image stored in a base64 string.
var buff = new Buffer(base64data,'base64');
console.log(base64data);
var stream = fs.createWriteStream('/path/to/thefile.png');
stream.write(buff)
stream.end()
However, the resulting file is empty.
When I take the output of console.log(base64data); and decode it locally, it produces a valid png binary, so why is the file empty?
The file is a 3600x4800 px png file (i.e. it's huge), could this be a factor?
Also, I tried writeFile as well, no luck.
And yes, fs is require('fs')
Thanks
your stream.end() too soon as nothing is written. it is async function remember.
var buff = new Buffer(base64data,'base64');
console.log(base64data);
var stream = fs.createWriteStream('/path/to/thefile.png');
stream.write(buff);
stream.on("end", function() {
stream.end();
});
Better:
var buff = new Buffer(base64data,'base64');
console.log(base64data);
var stream = fs.createWriteStream('/path/to/thefile.png');
stream.write(buff);
stream.end();
stream.on('finish', () => {
//'All writes are now complete.'
});
stream.on('error', (error) => {...});