How do I run multiple node streams in sequence? - node.js

Given two streams, stream1, stream2, how can I run them in sequence, throwing if any fails?
I'm looking for something simpler than this:
stream1.on('end',function(){
stream2.on('end',done);
});
stream1.on('error',function(error){done(error);});
stream2.on('error',function(error){done(error);});
Thanks.

there are some ways to do that, check next link, it gonna help to how to write some code in node in a elegant way:
Node.js FLOW
Hope it what you need.

Depending on your use case you could combine the two streams into one by using the multistream module.
Multistreams are constructed from an array of streams
var MultiStream = require('multistream')
var fs = require('fs')
var streams = [
fs.createReadStream(__dirname + '/numbers/1.txt'), // contains a single char '1'
fs.createReadStream(__dirname + '/numbers/2.txt'), // contains a single char '2'
fs.createReadStream(__dirname + '/numbers/3.txt') // contains a single char '3'
]
MultiStream(streams).pipe(process.stdout) // => 123
In case combining streams does not fit the use case you could build your stream on end event sending functionality on your own
const fs = require('fs');
var number1 = fs.createReadStream('./numbers1.txt')
.on('data', d => console.log(d.toString()));
var number2 = fs.createReadStream('./numbers2.txt')
.on('data', d => console.log(d.toString()));
onEnd([number1, number2], function(err) {
console.log('Ended with', err);
});
function onEnd(streams, cb) {
var count = streams.length;
var ended = 0;
var errored = null;
function shouldEnd() {
ended++;
if (errored) { return; }
if (count == ended) {
cb();
}
}
function endWithError(err) {
if (errored) { return; }
errored = true;
cb(err);
}
streams.forEach(s => s
.on('end', shouldEnd)
.on('error', endWithError)
);
}
The onEnd function can be used to wait for a stream array to end or in case an error event is emitted for the first emitted error event.

Try do it with async functions:
const { createReadStream } = require("fs")
async function main() {
const stream1 = createReadStream(__dirname + "/1.txt")
await pipe(stream1, process.stdout)
const stream2 = createReadStream(__dirname + "/2.txt")
await pipe(stream2, process.stdout)
const stream3 = createReadStream(__dirname + "/3.txt")
await pipe(stream3, process.stdout)
}
async function pipe(tap, sink) {
return new Promise((resolve, reject) => {
tap.pipe(sink, { end: false })
tap.on("end", resolve)
tap.on("error", reject)
})
}

Try do it with Promise
function doStream1(cb) {
// put those operation on stream1 in the callback function...
cb && cb();
var p = new Promise(function(resolve, reject) {
stream1.on( 'end', resolve );
stream1.on( 'error', reject );
});
return p;
}
function doStream2(cb) {
// some operation on stream2 on callback
cb && cb();
var p = new Promise(function(resolve, reject) {
stream2.on( 'end', resolve );
stream2.on( 'error', reject );
});
return p;
}
doStream1(cb).then(function() {
return doStream2(cb);
}).catch(function(e) {
// error handling is here
});

Try the code below(sequenceStream function). The thing which I am worried about is error handling, but it should work.
Also, if you use Node < 10.* then you need end-of-stream instead of stream.finished
const stream = require('stream');
const process = require('process')
const { promisify } = require('util')
const fromList = (lst) => new stream.Readable({
read() {
if (lst.length) {
this.push(String(lst.shift()))
} else {
this.push(null)
}
}
})
const _finished = promisify(stream.finished)
const sequenceStream = (streams) => {
const resultingStream = new stream.PassThrough()
let isNext = Promise.resolve()
for(const [i, curStream] of streams.entries()) {
isNext = isNext.then(() => {
curStream.pipe(resultingStream, {end: i === streams.length -1})
return _finished(curStream)
}).catch((err) => {
resultingStream.write(err)
})
}
return resultingStream
}
sequenceStream([
fromList([1, 2, 3, 4]),
fromList([5, 6, 7, 8]),
fromList([9, 10])
]).pipe(process.stdout)

Related

Nodejs Stream chaining promises write only the first one

i'm trying to make a Stream that is writing to multiples files using an async Generator.
Using reduce promises are executed sequentially so the desired output is :
$ wc -l *
42 f1.csv
98 f2.csv
78 f3.csv
but i'm getting the following result :
$ wc -l *
42 f1.csv // only the first one is good
0 f2.csv // nothing in
0 f3.csv // same
AsyncGenerator() is a ElasticSearch Scroll Search (Doc)
I send him different query on each promise.
const { Readable } = require('stream');
const fs = require('fs');
const util = require('util');
const stream = require('stream');
const pipeline = util.promisify(stream.pipeline);
async function* Query(params) {
var response = await ESClient.search(params);
while (true) {
const sourceHits = response.hits.hits;
if (sourceHits.length === 0) {
break;
}
for (const hit of sourceHits) {
yield hit;
}
if (!response._scroll_id) {
break;
}
response = await ESClient.scroll({
scrollId: response._scroll_id,
scroll: params.scroll
});
}
}
async function* AsyncGenerator() {
const params = {
index: 'apache-logs',
scroll: '5s',
_source: ["foo"],
size: 2048,
body: { query } // ElasticSearch DSL Query
};
for await (const res of Query(params)) {
yield res;
}
}
async function WriteToFile(file) {
return new Promise(async (resolve, reject) => {
const input = Readable.from(AsyncGenerator());
const output = fs.createWriteStream(file + '.csv');
await pipeline(
input,
json2csv,
output
);
resolve();
});
}
const lists = ["f1", "f2", "f3"];
const last = lists.reduce(async (previous, next) => {
await previous;
return WriteToFile(next);
}, Promise.resolve());
last.then(_ => {
console.log("DONE");
}).catch((e) => {
console.log(e);
});
(Lorem ipsum lalalaalalal (sry stackoverflow is asking to provide more details but i dont have lol))
Hi I am not sure because my acquaintances are poor in NodeJs. I think you are only calling one time to the function which is creating your .csv
const lists = ["f1", "f2", "f3"];
const last = lists.reduce(async (previous, next) => {
await previous;
return WriteToFile(next);
}, Promise.resolve());

Get value out of function in Node.js

I don't know about the correct title for my problem. I just need a value going out of a function, just like return but I think it's not same.
i have code snippet from controller in adonisjs framework:
var nmeadata = "";
jsondata.forEach(element => {
var x = element.nmea
var buff = new Buffer(x, 'base64')
zlib.unzip(buff, (err, res) => {
if(err)
{
//
}
else
{
nmeadata += "greed island"
nmeadata += res.toString()
}
})
});
return view.render('admin.index', {
data: datanmea.toJSON(),
nmea: nmeadata
})
I need the result of unzipped string data that inserted to nmeadata from zlib function then send it to view. But, for this time, even I cannot displaying a simple output like greed island to my view.
thank you.
UPDATE
Still not working after using promises:
class NmeaController {
async index({view})
{
const datanmea = await NmeaModel.all()
const jsondata = datanmea.toJSON()
var promises = [];
var nmeadata = "";
jsondata.forEach(element => {
promises.push(
new Promise(resolve => {
let x = element.nmea
let buff = new Buffer(x, 'base64')
zlib.unzip(buff,
(err, res) => {
if (err) {
//
} else {
nmeadata += "test add text"
// nmeadata += res.toString()
}
//im also try using resolve() and resolve("any text")
resolve(nmeadata);
})
}
)
)
});
await Promise.all(promises);
return view.render('admin.index', {
data: datanmea.toJSON(),
nmea: nmeadata
});
}
UPDATE AUGUST 22 2019
i'm already tried solution from maksbd19 but still not working
class NmeaController {
async index({view})
{
const datanmea = await NmeaModel.all()
const jsondata = datanmea.toJSON()
var promises = [];
var nmeadata = "";
jsondata.forEach(element => {
promises.push(
new Promise(resolve => {
let x = element.nmea
let buff = new Buffer(x, 'base64')
zlib.unzip(buff,
(err, res) => {
if (err) {
// since you are interested in the text only, so no need to reject here
return resolve("");
}
return resolve("greed island")
})
}
)
)
});
const result = await Promise.all(promises); // this will be an array of results of each promises respectively.
nmeadata = result.join(""); // process the array of result
return view.render('admin.index', {
data: datanmea.toJSON(),
nmea: nmeadata
});
}
}
I'd suggest two things-
modify zlib.unzip callback function to resolve properly;
(err, res) => {
if (err) {
// since you are interested in the text only, so no need to reject here
return resolve("");
}
return resolve(res.toString())
}
retrieve the final data from the result of Promise.all
const result = await Promise.all(promises); // this will be an array of results of each promises respectively.
nmeadata = result.join(""); // process the array of result
In this approach every promise will resolve and finally you will get the expected result in array.

how to stop process nodejs within promises

I've created nodejs to trigger(with cronjobs) firebase realtime database as follow:
var db = admin.database();
var ref = db.ref('myusers');
var promises = [];
function updateUnlocked(isLocked, locked, msisdn) {
return new Promise(function (resolve, reject) {
if (isLocked === 1) {
var startDate = moment(locked);
var endDate = moment();
var result = endDate.diff(startDate, 'minutes');
if (result > 5) {
var ref = db.ref('myusers/' + msisdn);
ref.update({isLocked: 2});
}
}
resolve('done');
});
}
ref.once('value', function(snapshot) {
snapshot.forEach(childSnapshot => {
promises.push(updateUnlocked(childSnapshot.val().isLocked, childSnapshot.val().locked, childSnapshot.key));
});
});
Promise.all(promises).then(function(data) {
console.log(data);
}).catch(function(err) {
console.log('error');
});
Please let me know where can I add process.exit(). Thanks.
You must wait for the "once" callback to get executed. Else the promise array is empty and the process could exit immediately.
var db = admin.database();
var ref = db.ref('myusers');
function updateUnlocked(isLocked, locked, msisdn) {
...
}
ref.once('value', function(snapshot) {
const promises = snapshot.map(childSnapshot => {
return updateUnlocked(childSnapshot.val().isLocked, childSnapshot.val().locked, childSnapshot.key);
})
Promise.all(promises).then(() => {
console.log('done')
process.exit(0)
}).catch(err => {
console.log('error', err)
process.exit(1)
})
});
Demonstrating the control flow.
setTimeout(() => {
const x = [1, 2, 3]
const promises = x.map(i => {
return new Promise(resolve => resolve(i))
})
Promise.all(promises).then(() => {
console.log('done. process.exit(0) here')
})
}, 200)
If you want to exit on successful completion then refer below code:
Promise.all(promises).then(function(data) {
console.log(data);
process.exit(0);
}).catch(function(err) {
console.log('error');
});
If you want to exit on error as well then add process.exit(1) in catch block.

Unable to get callback from function

Trying to get the variable random_song from the function functions.random_song
Function:
functions.random_song = () => {
fs.readFile('auto_playlist.txt', 'utf8', function(err, data) {
if (err) {
console.log(`${`ERR`.red} || ${`Error fetching song from auto playlist ${err}`.red}`);
}
let songs = data.split('\n');
songs.splice(-1, 1);
var random = Math.floor(Math.random()*songs.length);
let random_song = (songs[random]);
return random_song;
})
}
Attempting to callback the random song
functions.random_song(song => {
console.log(song)
})
The code just return undefined Ideas?
Your function:
functions.random_song = (callback) => {
fs.readFile('auto_playlist.txt', 'utf8', function(err, data) {
if (err) {
// Do stuff
return callback(err);
}
// Do stuff
callback(null, random_song);
})
}
And then call it:
functions.random_song((err, song) => {
// Do stuff
});
You may want to read more about using Promise/async-await instead of callback.
functions.random_song = () => {
let data = fs.readFileSync('auto_playlist.txt', 'utf8');
let songs = data.split('\n');
songs.splice(-1, 1);
var random = Math.floor(Math.random()*songs.length);
let rand_song = (songs[random]);
return rand_song;
}
console.log(functions.random_song())
got it working, thanks for the help <3
Following is the code which use Promise to handle file read and data processing task asynchronously
const fs = require('fs')
const functions = {}
functions.random_song = () => {
return new Promise((resolve, reject) => {
fs.readFile('auto_playlist.txt', 'utf8', function(err, data) {
if (err) {
console.log(err)
reject(err)
}
let songs = data.split('\n');
songs.splice(-1, 1);
var random = Math.floor(Math.random()*songs.length);
let random_song = (songs[random]);
resolve(random_song);
})
})
}
functions.random_song()
.then(song => console.log('Song Name', song))
.catch(err => console.error('Error fetching songs', err))
console.log('It will be executed before promise is resolved')
fs.readFile is asynchronus and your function will return before the file is read. Use fs.readFileSync instead. Like this:
functions.random_song = () => {
const data = fs.readFileSync('auto_playlist.txt', 'utf8');
let songs = data.split('\n');
songs.splice(-1, 1);
var random = Math.floor(Math.random()*songs.length);
let random_song = (songs[random]);
return random_song;
}

Nodejs check file exists, if not, wait till it exist

I'm generating files automatically, and I have another script which will check if a given file is already generated, so how could I implement such a function:
function checkExistsWithTimeout(path, timeout)
which will check if a path exists, if not, wait for it, util timeout.
Assuming you're planning on using Promises since you did not supply a callback in your method signature, you could check if the file exists and watch the directory at the same time, then resolve if the file exists, or the file is created before the timeout occurs.
function checkExistsWithTimeout(filePath, timeout) {
return new Promise(function (resolve, reject) {
var timer = setTimeout(function () {
watcher.close();
reject(new Error('File did not exists and was not created during the timeout.'));
}, timeout);
fs.access(filePath, fs.constants.R_OK, function (err) {
if (!err) {
clearTimeout(timer);
watcher.close();
resolve();
}
});
var dir = path.dirname(filePath);
var basename = path.basename(filePath);
var watcher = fs.watch(dir, function (eventType, filename) {
if (eventType === 'rename' && filename === basename) {
clearTimeout(timer);
watcher.close();
resolve();
}
});
});
}
fs.watch() API is what you need.
Be sure to read all the caveats mentioned there before you use it.
import fs from 'node:fs'; //es6
//or
const fs = require('fs'); //commonjs
/**
*
* #param {String} filePath
* #param {Number} timeout
* #returns {Promise<Boolean>}
*/
const holdBeforeFileExists = async (filePath, timeout) => {
timeout = timeout < 1000 ? 1000 : timeout
try {
var nom = 0
return new Promise(resolve => {
var inter = setInterval(() => {
nom = nom + 100
if (nom >= timeout) {
clearInterval(inter)
//maybe exists, but my time is up!
resolve(false)
}
if (fs.existsSync(filePath) && fs.lstatSync(filePath).isFile()) {
clearInterval(inter)
//clear timer, even though there's still plenty of time left
resolve(true)
}
}, 100)
})
} catch (error) {
return false
}
}
(async()=>{
const maxTimeToCheck = 3000; //3 second
const fileCreated = '/path/filename.ext';
const isFile = await holdBeforeFileExists(fileCreated, maxTimeToCheck);
//Result boolean true | false
})();
It's work goodssssssssssss................!!!
Try it before giving bad comments.
Enjoy your Kopi mana kopi obat kagak ngantuk???
express js:
router.get('some_url', async(req, res)=>{
const fileCreated = someFunctionCreateFileWithResultStringPathName();
const maxTimeToCheck = 3000; //3 second
const isFile = await holdBeforeFileExists(fileCreated, maxTimeToCheck);
if(isFile){
res.sendFile(fileCreated)
} else {
res.send('Failed to generate file, because use a bad function to generate file. or too long to create a file');
}
})
Here is the solution:
// Wait for file to exist, checks every 2 seconds by default
function getFile(path, timeout=2000) {
const intervalObj = setInterval(function() {
const file = path;
const fileExists = fs.existsSync(file);
console.log('Checking for: ', file);
console.log('Exists: ', fileExists);
if (fileExists) {
clearInterval(intervalObj);
}
}, timeout);
};
You could implement it like this if you have node 6 or higher.
const fs = require('fs')
function checkExistsWithTimeout(path, timeout) {
return new Promise((resolve, reject) => {
const timeoutTimerId = setTimeout(handleTimeout, timeout)
const interval = timeout / 6
let intervalTimerId
function handleTimeout() {
clearTimeout(timerId)
const error = new Error('path check timed out')
error.name = 'PATH_CHECK_TIMED_OUT'
reject(error)
}
function handleInterval() {
fs.access(path, (err) => {
if(err) {
intervalTimerId = setTimeout(handleInterval, interval)
} else {
clearTimeout(timeoutTimerId)
resolve(path)
}
})
}
intervalTimerId = setTimeout(handleInterval, interval)
})
}
Here another version that works for me :
async function checkFileExist(path, timeout = 2000)
{
let totalTime = 0;
let checkTime = timeout / 10;
return await new Promise((resolve, reject) => {
const timer = setInterval(function() {
totalTime += checkTime;
let fileExists = fs.existsSync(path);
if (fileExists || totalTime >= timeout) {
clearInterval(timer);
resolve(fileExists);
}
}, checkTime);
});
}
You can simply use it :
await checkFileExist("c:/tmp/myfile.png");
function verifyFileDownload(extension) {
browser.sleep(150000); //waiting for file to download
const fs = require('fs');
let os = require('os');
var flag = true;
console.log(os.userInfo());
fs.readdir('/Users/' + require("os").userInfo().username + '/Downloads/', (error, file) => {
if (error) {
throw error;
}
console.log('File name' + file);
for (var i = 0; i < file.length; i++) {
const fileParts = file[i].split('.');
const ext = fileParts[fileParts.length - 1];
if (ext === extension) {
flag = false;
}
}
if (!flag) {
return;
}
throw error;
});
};
This is very much a hack, but works for quick stuff.
function wait (ms) {
var now = Date.now();
var later = now + ms;
while (Date.now() < later) {
// wait
}
}

Resources