Can I redirect nodejs eval output to string or any other way?
edit: I need to return it as string as response for my web application (post data)
var http = require('http');
var server = http.createServer(function(req, res) {
var script = '';
if (req.method === 'POST' && req.url === '/doeval') {
req.on('data', function(chunk) {
script += chunk;
});
req.on('end', function() {
// script = 'console.log("aaaa")';
var result = eval(script);
res.write(String(result));
res.end();
});
}
}
}
server.listen(80);
output:
result: undefined
expected:
result: aaaa
Note: Running untrusted code is extremely dangerous and you should be extremely careful with whatever solution you are choosing. The one proposed here has flaws as well. To be safer, consider using a container, such as docker or lxc.
Don't use eval in this case. It's the wrong tool for the job. Do you really want arbitrary code to be evaled in your webserver context? What if the sent code is throw new Error('haha') or process.exit()? It's a huge security risk! Write the code to a temporary file, spawn a node process that executes the file and read its output. See https://nodejs.org/api/child_process.html .
Example:
var http = require('http');
var fs = require('fs');
var childProcess = require('child_process');
var server = http.createServer(function(req, res) {
var script = '';
if (req.method === 'POST' && req.url === '/doeval') {
req.on('data', function(chunk) {
script += chunk;
});
req.on('end', function() {
var tempFileName = Date.now() + '.js';
fs.writeFile(tempFileName, script, function(err) {
if (err) {
// handle error
}
child_process.execFile('node', [tempFileName], function(err, stdout, stderr) {
if (err) {
// handle err
}
// Delete tempfile...
res.write(stdout);
res.end();
});
});
});
}
}
}
server.listen(80);
There are existing npm packages that help creating and cleaning up temporary files. Also have a look at other methods of child_process.
But, this is not secure yet because the subprocess will run with the same privileges as your server (which by the looks of it runs as root :-/ ).
You should set the owner (and the group) of the file and the subprocess to nobody or some other system user that basically doesn't have any rights to access anything. Or chroot the subprocess.
Note: It was downvoted but read the entire answer before jumping to conclusions
First of all this looks like a completely insecure functionality that can potentially open your system to countless vulnerabilities. But it's an interesting question so I'll answer how you can do what you ask for, but I strongly suggest to reconsider your requirements nonetheless.
That having been said, you could pass a fake console object to you evaluated script, by wrapping it in a closure in a similar way like modules are wrapped when they are required.
Instead of eval you can use the vm module to run the script in a separate V8 context with no access file system or even require().
Note that I don't recommend saving it in a file and running it as a child process because that way the script will have access to your file system which is a serious vulnerability. The only option I would ever consider running untrusted code as a standalone process would be inside of a container that has no access to the network or any shared storage outside of that container.
With eval
For example:
const util = require('util');
const script = 'console.log("aaaa");';
let result = '';
const cons = {
log: (...args) => result += (util.format(...args) + '\n'),
};
eval(`((console) => { ${script} })`)(cons);
console.log('result:', result);
This will work if everything is synchronous. If the console.log happens asynchronously then you will have to add some way of waiting for the changes.
So this will not work:
const util = require('util');
const script = 'setTimeout(() => console.log("aaaa"), 1000);';
let result = '';
const cons = {
log: (...args) => result += (util.format(...args) + '\n'),
};
eval(`((console) => { ${script} })`)(cons);
console.log('result:', result);
but this will:
const util = require('util');
const script = 'setTimeout(() => console.log("aaaa"), 1000);';
let result = '';
const cons = {
log: (...args) => result += (util.format(...args) + '\n'),
};
eval(`((console) => { ${script} })`)(cons);
setTimeout(() => console.log('result:', result), 1500);
because it waits before inspecting the collected output longer than it takes the evaluated code to create that output.
Without eval
You can run that code in a separate V8 context that has no access to other modules, file system, etc. For example:
const vm = require('vm');
const util = require('util');
const script = 'console.log("aaaa");';
let result = '';
const cons = {
log: (...args) => result += (util.format(...args) + '\n'),
};
const context = vm.createContext({console: cons});
vm.runInContext(script, context);
console.log('result:', result);
Handling syntax errors
You can handle syntax errors to make sure that this script will not crash your application like this:
const vm = require('vm');
const util = require('util');
const script = 'console.lo("aaaa");';
let result = '';
const cons = {
log: (...args) => result += (util.format(...args) + '\n'),
};
const context = vm.createContext({console: cons});
try {
vm.runInContext(script, context);
console.log('result:', result);
} catch (err) {
console.log('error:', err.message);
}
Now instead of crashing it will print:
error: console.lo is not a function
Related
Ultimately, I'd like to have an extra feature in my app if the app is running on AWS EC2.
How do I check and set a variable to indicate if it is on AWS or not? I found this thread to do the check, but upon startup how do I set a variable across the app like a boolean? Something like:
let checkAWS;
metadata.isEC2().then(function (onEC2) {
checkAWS = true;
console.log("EC2: " + onEC2);
});
let app = express();
app.locals.isAWS = checkAWS;
console.log(checkAWS);
Every time, I always get the same output:
undefined
EC2: true
I am using the isAWS variable in my .ejs file to decide on that functionality.
metadata.isEC2() is asynchronous so its value is available some time later when the .then() handler runs. Meanwhile, you're trying to use the value of checkAWS BEFORE that .then() handler has run.
Since you want access to that value before you start your Express sever, you could only start it inside the .then() handler like this:
let isAWS;
metadata.isEC2().then(function (onEC2) {
console.log("EC2: " + onEC2);
isAWS = true;
}).catch(err => {
isAWS = false;
}).finally(err => {
const app = express();
app.locals.isAWS = isAWS;
// do the rest of your app initialization here
app.listen(...);
});
// do not use app here (it won't be defined here)
If you're trying to export app for other modules to use, you will have to take a slightly different approach. We'd have to see your broader code context to know what to recommend for that. But, basically the idea here is that you shouldn't start your server until you have the asynchronously retrieved isAWS result.
Also, you should know that with that metadata.isEC2() call you're using, that looks for a known endpoint in an EC2 instance to detect EC2. If that endpoint does not exist, then this will take 500ms to timeout and hit the .catch() branch above. If it is an EC2 instance, it will be quick.
Note, it might be simpler to just check for the presence of some environment variables that are automatically set by the AWS environment such as AWS_REGION or AWS_EXECUTION_ENV. Those can be checked for synchronously.
let app = express();
app.locals.isAWS = !!process.env.AWS_REGION;;
For future reference in case anyone else is looking, this is what worked for me in order to be able to tell if you are running on AWS.
let isAwsSet = false; //has the app checked whether it is running on AWS by setting app.locals.isAWS
app.locals.isAWS = false;
app.get('/home', function(request, response) {
if (!isAwsSet){
urlExists().then(function (onAWS){
app.locals.isAWS = true;
isAwsSet = true;
response.render('home');
}).catch(function(error){
app.locals.isAWS = false;
isAwsSet = true;
response.render('home');
})
} else {
response.render('home');
}
});
function urlExists() {
return new Promise((resolve, reject) => {
//https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instancedata-data-retrieval.html
const options = {
method: 'HEAD',
host: '169.254.169.254',
path: '/latest/meta-data/',
port: 80,
timeout: 1500
};
const req = http.request(options, (res) => {
// reject on bad status
if (res.statusCode !== 200) {
return reject(new Error('statusCode=' + res.statusCode));
}
resolve(res.statusCode = 200);
});
req.on('timeout', () => {
req.destroy();
});
req.on('error', function(err) {
reject(err);
});
req.end();
});
}
I am making use of "socket.io-client" and "socket.io stream" to make a request and then stream some data. I have the following code that handles this logic
Client Server Logic
router.get('/writeData', function(req, res) {
var io = req.app.get('socketio');
var nameNodeSocket = io.connect(NAMENODE_ADDRESS, { reconnect: true });
var nameNodeData = {};
async.waterfall([
checkForDataNodes,
readFileFromS3
], function(err, result) {
if (err !== null) {
res.json(err);
}else{
res.json("Finished Writing to DN's");
}
});
function checkForDataNodes(cb) {
nameNodeSocket.on('nameNodeData', function(data) {
nameNodeData = data;
console.log(nameNodeData);
cb(null, nameNodeData);
});
if (nameNodeData.numDataNodes === 0) {
cb("No datanodes found");
}
}
function readFileFromS3(nameNodeData, cb) {
for (var i in nameNodeData['blockToDataNodes']) {
var IP = nameNodeData['blockToDataNodes'][i]['ipValue'];
var dataNodeSocket = io.connect('http://'+ IP +":5000");
var ss = require("socket.io-stream");
var stream = ss.createStream();
var byteStartRange = nameNodeData['blockToDataNodes'][i]['byteStart'];
var byteStopRange = nameNodeData['blockToDataNodes'][i]['byteStop'];
paramsWithRange['Range'] = "bytes=" + byteStartRange.toString() + "-" + byteStopRange.toString();
//var file = require('fs').createWriteStream('testFile' + i + '.txt');
var getFileName = nameNodeData['blockToDataNodes'][i]['key'].split('/');
var fileData = {
'mainFile': paramsWithRange['Key'].split('/')[1],
'blockName': getFileName[1]
};
ss(dataNodeSocket).emit('sendData', stream, fileData);
s3.getObject(paramsWithRange).createReadStream().pipe(stream);
//dataNodeSocket.disconnect();
}
cb(null);
}
});
Server Logic (that gets the data)
var dataNodeIO = require('socket.io')(server);
var ss = require("socket.io-stream");
dataNodeIO.on('connection', function(socket) {
console.log("Succesfully connected!");
ss(socket).on('sendData', function(stream, data) {
var IP = data['ipValue'];
var blockName = data['blockName'];
var mainFile = data['mainFile'];
dataNode.makeDir(mainFile);
dataNode.addToReport(mainFile, blockName);
stream.pipe(fs.createWriteStream(mainFile + '/' + blockName));
});
});
How can I properly disconnect the connections in function readFileFromS3. I have noticed using dataNodeSocket.disconnect() at the end does not work as I cannot verify the data was received on the 2nd server. But if I comment it out, I can see the data being streamed to the second server.
My objective is to close the connections in Client Server side
It appears that the main problem with closing the socket is that you weren't waiting for the stream to be done writing before trying to close the socket. So, because the writing is all asynchronous and finishes sometime later, you were trying to close the socket before the data had been written.
Also because you were putting asynchronous operations inside a for loop, you were also running all your operations in parallel which may not be exactly what you want as it makes error handling more difficult and server load more difficult.
Here's the code I would suggest that does the following:
Create a function streamFileFromS3() that streams a single file and returns a promise that will notify when it's done.
Use await in a for loop with that streamFileFromS3() to serialize the operations. You don't have to serialize them, but then you would have to change your error handling to figure out what to do if one errors while the others are already running and you'd have to be more careful about concurrency issues.
Use try/catch to catch any errors from streamFileFromS3().
Add error handling on the stream.
Change all occurrences of data['propertyName'] to data.propertyName. The only time you need to use brackets is if the property name contains a character that is not allowed in a Javascript identifier or if the property name is in a variable. Otherwise, the dot notation is preferred.
Add socket.io connection error handling logic for both socket.io connections.
Set returned status to 500 when there's an error processing the request
So, here's the code for that:
const ss = require("socket.io-stream");
router.get('/writeData', function(req, res) {
const io = req.app.get('socketio');
function streamFileFromS3(ip, data) {
return new Promise((resolve, reject) => {
const dataNodeSocket = io.connect(`http://${ip}:5000`);
dataNodeSocket.on('connect_error', reject);
dataNodeSocket.on('connect_timeout', () {
reject(new Error(`timeout connecting to http://${ip}:5000`));
});
dataNodeSocket.on('connection', () => {
// dataNodeSocket connected now
const stream = ss.createStream().on('error', reject);
paramsWithRange.Range = `bytes=${data.byteStart}-${data.byteStop}`;
const filename = data.key.split('/')[1];
const fileData = {
'mainFile': paramsWithRange.Key.split('/')[1],
'blockName': filename
};
ss(dataNodeSocket).emit('sendData', stream, fileData);
// get S3 data and pipe it to the socket.io stream
s3.getObject(paramsWithRange).createReadStream().on('error', reject).pipe(stream);
stream.on('close', () => {
dataNodeSocket.disconnect();
resolve();
});
});
});
}
function connectError(msg) {
res.status(500).send(`Error connecting to ${NAMENODE_ADDRESS}`);
}
const nameNodeSocket = io.connect(NAMENODE_ADDRESS, { reconnect: true });
nameNodeSocket.on('connect_error', connectError).on('connect_timeout', connectError);
nameNodeSocket.on('nameNodeData', async (nameNodeData) => {
try {
for (let item of nameNodeData.blockToDataNodes) {
await streamFileFromS3(item.ipValue, item);
}
res.json("Finished Writing to DN's");
} catch(e) {
res.status(500).json(e);
}
});
});
Other notes:
I don't know what paramsWithRange is as it is not declared here and when you were doing everything in parallel, it was getting shared among all the connections which is asking for a concurrency issue. In my serialized implementation, it's probably safe to share it, but the way it is now bothers me as it's a concurrency issue waiting to happen.
I am just learning NodeJS and I come from a Java/Scala background.
I am writing a service that communicates with Amazon SNS and handles endpoints/tokens.
Basically there is a list of SNS applications that I have in my environment, and this list is rarely modified, so I would like to pre-load its values into a variable or constant on server startup.
The SNS-SDK provided by Amazon has this function for listing the applications:
listPlatformApplications(params, callback)
So what I naively tried to do was this:
var applications = [];
var loadApplications = function() {
sns.listPlatformApplications({}, function(err, data){
if (err) {
console.log(err);
} else {
return data['PlatformApplications'].map(function (app) {
return app['PlatformApplicationArn']
});
}
});
}
loadApplications();
And basically what happens is that some calls come in before this callback finishes, when the list is still empty.
How would I go about pre-loading this data, or any other data, before the server starts responding to requests?
Or maybe my reasoning for this is wrong, and there would be another approach to handle this on NodeJS that is more idiomatic
If you absolutely must use a callback instead of a promise (spoiler alert: you don't), you have to call the server startup command within the SNS callback. So you would do something like this:
var startServer = require('my-server.js');
var applications = [];
var loadApplications = function() {
sns.listPlatformApplications({}, function(err, data){
if (err) {
console.log(err);
} else {
var snsData = data['PlatformApplications'].map(function (app) {
return app['PlatformApplicationArn']
});
startServer(snsData)
}
});
}
loadApplications();
== RECOMMENDED SOLUTION ==
If instead, you can use promises (which you absolutely should!), you could start the SNS request at server start and await the result whenever needed. Your code would look something like this:
const startServer = require('my-server.js');
const loadApplications = async () => {
const data = sns.listPlatformApplications({}).promise();
const snsData = data['PlatformApplications'].map(function (app) {
return app['PlatformApplicationArn']
});
return snsData;
};
const applications = loadApplications();
startServer(applications);
// inside my-server.js
const startServer = async (applications) => {
const doSomethingWithApplications = await applications;
...
}
I'm facing a weird situation. I wrote an application that is performing a HTTP GET request every five minutes. Something like this:
// pingy
'use strict';
var https = require('https'),
url = require('url');
exports.ping = function ping (callback) {
var options = url.parse('https://host.tld/ping');
callback = callback || function () {};
https.get(options, function handler (response) {
var body = '';
response
.on('readable', function onReadable() {
body = body + response.read();
})
.on('end', function onEnd() {
return callback(null, body);
})
.on('error', function onError (err) {
return callback(err);
});
});
};
// in other module
var pingy = require('./lib/pingy');
setInterval(pingy.ping, 300000);
Pretty straightforward. The thing is that after some time, the "rss" from process.memoryUsage() climbs and climbs. Looks like that the created ClientRequest objects will never be GCed(). Although I'm using https here in this example, the same happens if using the http module.
Do you have any idea what is wrong here?
EDIT:
I've solved the problem above (see below in my comment). Now I'm facing a different problem, which is really, really hard to track down (used node-webkit-agent in order to analyze the memory usage, but nothing really special. The heap looks stable to me). The scenario is also nothing special I'm copying round about 200 images from source to dest via Streams (see code below). What happens is, that the "RSS" increases also. I'm pretty sure that there is something wrong with my code regarding how to pipe the files. Don't get me wrong I have no problem with a high memory usage. With what I have a problem is, that the memory never will be freed. In order to verify that the memory will be cleared in some point in the future, I start a http.createServer after every single file has been copied. Even after a couple of hours the "rss" value stays the same.
So, well, again, do you have any idea what is wrong here? Thanks in advance for every hint! :)
'use strict';
var http = require('http'),
fs = require('fs'),
path = require('path'),
util = require('util'),
directory = '/path/to/your/files/',
jobs = [];
function printMemoryUsage () {
var memory = process.memoryUsage();
memory.rss = (memory.rss / 1024) | 0;
memory.heapTotal = (memory.heapTotal / 1024) | 0;
memory.heapUsed = (memory.heapUsed / 1024) | 0;
console.log(JSON.stringify(memory));
}
function pipeFile() {
var infile = jobs.pop(),
outfile = jobs.pop(),
instream = fs.createReadStream(infile),
outstream = fs.createWriteStream(outfile);
instream.pipe(outstream);
instream.on('close', outstream.end.bind(outstream));
outstream.on('finish', function onFinish () {
// console.log('Finished %s -> %s', infile, outfile);
instream.destroy();
outstream.destroy();
next();
});
}
function next() {
if (jobs.length) {
setTimeout(pipeFile, 2000);
} else {
http.createServer(function (req, res) {
res.writeHead(200, {'Content-Type': 'text/plain'});
res.end('Fooboot\n');
}).listen(1337, '127.0.0.1');
}
}
fs.readdir(directory, function (err, files) {
files.forEach(function onIteration (file) {
jobs.push(path.join(__dirname, file)); // outfile
jobs.push(path.join(directory, file)); // infile
});
next();
});
setInterval(printMemoryUsage, 3000);
These are the memory footprints:
{"rss":13904,"heapTotal":6963,"heapUsed":1758}
{"rss":16012,"heapTotal":6963,"heapUsed":2016}
{"rss":26040,"heapTotal":6963,"heapUsed":2265}
{"rss":31468,"heapTotal":6963,"heapUsed":2453}
{"rss":41080,"heapTotal":6963,"heapUsed":2712}
{"rss":46620,"heapTotal":6963,"heapUsed":2844}
{"rss":49260,"heapTotal":6963,"heapUsed":1999}
{"rss":49524,"heapTotal":6963,"heapUsed":2249}
{"rss":49524,"heapTotal":6963,"heapUsed":2362}
{"rss":49788,"heapTotal":6963,"heapUsed":2621}
{"rss":49788,"heapTotal":6963,"heapUsed":2755}
{"rss":52692,"heapTotal":6963,"heapUsed":2001}
{"rss":52692,"heapTotal":6963,"heapUsed":2138}
{"rss":52692,"heapTotal":6963,"heapUsed":2270}
{"rss":52692,"heapTotal":6963,"heapUsed":2483}
{"rss":52692,"heapTotal":6963,"heapUsed":2600}
{"rss":52692,"heapTotal":6963,"heapUsed":2796}
{"rss":52956,"heapTotal":6963,"heapUsed":1951}
{"rss":52956,"heapTotal":6963,"heapUsed":2079}
{"rss":52956,"heapTotal":6963,"heapUsed":2343}
{"rss":52956,"heapTotal":6963,"heapUsed":2462}
{"rss":52956,"heapTotal":6963,"heapUsed":2689}
{"rss":52956,"heapTotal":6963,"heapUsed":2831}
{"rss":53136,"heapTotal":9011,"heapUsed":1927}
{"rss":53136,"heapTotal":9011,"heapUsed":2176}
{"rss":53136,"heapTotal":9011,"heapUsed":2273}
{"rss":53136,"heapTotal":9011,"heapUsed":2447}
{"rss":53136,"heapTotal":9011,"heapUsed":2545}
{"rss":53136,"heapTotal":9011,"heapUsed":2627}
{"rss":53136,"heapTotal":9011,"heapUsed":2804}
{"rss":53136,"heapTotal":9011,"heapUsed":2890}
{"rss":59732,"heapTotal":9011,"heapUsed":3100}
{"rss":65012,"heapTotal":9011,"heapUsed":3211}
{"rss":73496,"heapTotal":9011,"heapUsed":3409}
{"rss":79304,"heapTotal":9011,"heapUsed":3536}
{"rss":83792,"heapTotal":9011,"heapUsed":3633}
{"rss":95408,"heapTotal":9011,"heapUsed":3865}
{"rss":98840,"heapTotal":9011,"heapUsed":1824}
{"rss":98840,"heapTotal":9011,"heapUsed":2003}
{"rss":98840,"heapTotal":9011,"heapUsed":2205}
{"rss":98840,"heapTotal":9011,"heapUsed":2297}
{"rss":98840,"heapTotal":9011,"heapUsed":2491}
{"rss":98840,"heapTotal":9011,"heapUsed":2608}
{"rss":98840,"heapTotal":9011,"heapUsed":2717}
{"rss":98840,"heapTotal":9011,"heapUsed":2919}
{"rss":99368,"heapTotal":9011,"heapUsed":3036}
{"rss":99368,"heapTotal":9011,"heapUsed":3247}
{"rss":99632,"heapTotal":9011,"heapUsed":3351}
{"rss":99632,"heapTotal":9011,"heapUsed":3452}
{"rss":99896,"heapTotal":9011,"heapUsed":3606}
{"rss":99896,"heapTotal":9011,"heapUsed":3686}
{"rss":105968,"heapTotal":9011,"heapUsed":3824}
{"rss":106760,"heapTotal":9011,"heapUsed":1936}
{"rss":106760,"heapTotal":9011,"heapUsed":2022}
{"rss":106760,"heapTotal":9011,"heapUsed":2187}
{"rss":106760,"heapTotal":9011,"heapUsed":2279}
{"rss":106760,"heapTotal":9011,"heapUsed":2474}
{"rss":106760,"heapTotal":9011,"heapUsed":2614}
{"rss":106760,"heapTotal":9011,"heapUsed":2690}
{"rss":106760,"heapTotal":9011,"heapUsed":2854}
{"rss":106760,"heapTotal":9011,"heapUsed":2953}
{"rss":106760,"heapTotal":9011,"heapUsed":3241}
{"rss":106760,"heapTotal":9011,"heapUsed":3391}
{"rss":106760,"heapTotal":9011,"heapUsed":3535}
{"rss":107288,"heapTotal":9011,"heapUsed":3797}
{"rss":108248,"heapTotal":9011,"heapUsed":1908}
I want to stream multiple files, one after each other, to the browser. To illustrate, think of having multiple CSS files which shall be delivered concatenated as one.
The code I am using is:
var directory = path.join(__dirname, 'css');
fs.readdir(directory, function (err, files) {
async.eachSeries(files, function (file, callback) {
if (!endsWith(file, '.css')) { return callback(); } // (1)
var currentFile = path.join(directory, file);
fs.stat(currentFile, function (err, stats) {
if (stats.isDirectory()) { return callback(); } // (2)
var stream = fs.createReadStream(currentFile).on('end', function () {
callback(); // (3)
});
stream.pipe(res, { end: false }); // (4)
});
}, function () {
res.end(); // (5)
});
});
The idea is that I
filter out all files that do not have the file extension .css.
filter out all directories.
proceed with the next file once a file has been read completely.
pipe each file to the response stream without closing it.
end the response stream once all files have been piped.
The problem is that only the first .css file gets piped, and all remaining files are missing. It's as if (3) would directly jump to (5) after the first (4).
The interesting thing is that if I replace line (4) with
stream.on('data', function (data) {
console.log(data.toString('utf8'));
});
everything works as expected: I see multiple files. If I then change this code to
stream.on('data', function (data) {
res.write(data.toString('utf8'));
});
all files expect the first are missing again.
What am I doing wrong?
PS: The error happens using Node.js 0.8.7 as well as using 0.8.22.
UPDATE
Okay, it works if you change the code as follows:
var directory = path.join(__dirname, 'css');
fs.readdir(directory, function (err, files) {
var concatenated = '';
async.eachSeries(files, function (file, callback) {
if (!endsWith(file, '.css')) { return callback(); }
var currentFile = path.join(directory, file);
fs.stat(currentFile, function (err, stats) {
if (stats.isDirectory()) { return callback(); }
var stream = fs.createReadStream(currentFile).on('end', function () {
callback();
}).on('data', function (data) { concatenated += data.toString('utf8'); });
});
}, function () {
res.write(concatenated);
res.end();
});
});
But: Why? Why can't I call res.write multiple times instead of first summing up all the chunks, and then write them all at once?
Consider also using multistream, that allows you to combine and emit multiple streams one after another.
The code was perfectly fine, it was the unit test that was wrong ...
Fixed that, and now it works like a charme :-)
May help someone else:
const fs = require("fs");
const pth = require("path");
let readerStream1 = fs.createReadStream(pth.join(__dirname, "a.txt"));
let readerStream2 = fs.createReadStream(pth.join(__dirname, "b.txt"));
let writerStream = fs.createWriteStream(pth.join(__dirname, "c.txt"));
//only readable streams have "pipe" method
readerStream1.pipe(writerStream);
readerStream2.pipe(writerStream);
I also checked Rocco's answer and its working like a charm:
//npm i --save multistream
const multi = require('multistream');
const fs = require('fs');
const pth = require("path");
let streams = [
fs.createReadStream(pth.join(__dirname, "a.txt")),
fs.createReadStream(pth.join(__dirname, "b.txt"))
];
let writerStream = fs.createWriteStream(pth.join(__dirname, "c.txt"));
//new multi(streams).pipe(process.stdout);
new multi(streams).pipe(writerStream);
and to send the results to client:
const multi = require('multistream');
const fs = require('fs');
const pth = require("path");
const exp = require("express");
const app = exp();
app.listen(3000);
app.get("/stream", (q, r) => {
new multi([
fs.createReadStream(pth.join(__dirname, "a.txt")),
fs.createReadStream(pth.join(__dirname, "b.txt"))
]).pipe(r);
});