Exit process when pipe finish - node.js

I'm trying to import users from a CSV file with node.js in CLI.
But I don't know how to exit program properly when all the tasks are completed.
Here my code using Sails.js and node-csv
"use strict";
const Sails = require('sails');
const fs = require('fs');
const csv = require('csv');
const filename = './test.csv';
Sails.load({
models : { migrate : 'safe'},
log : { level : 'info' },
hooks : { grunt : false, sockets : false, pubsub : false },
}, (errSails, sails) => {
if ( errSails ) process.exit(1);
let input = fs.createReadStream(filename);
let parser = csv.parse();
let transformer = csv.transform( (record) => {
sails.log.debug('record : ', record);
sails.log.info('Transformer : searching user...');
return User.findOne({email: record.email})
.then( (user) => {
sails.log.info('Transformer : search complete');
if ( !user ) {
sails.log.info('Transformer : no users found');
return User.create(record);
}
return record;
})
.catch( (err) => sails.log.error(err) );
});
parser
.on('readable', () => sails.log.info('Parser readable'))
.on('finish', () => sails.log.info('Parser finish'))
.on('error', (err) => sails.log.error('Parser error : ', err));
transformer
.on('finish', () => sails.log.info('Transformer finish'))
.on('error', (err) => sails.log.error('Transformer error : ', err));
input.pipe(parser).pipe(transformer).pipe(process.stdout);
//--- Exit program
// process.exit();
});
Here is what is inside the terminal
debug: record : { ... }
info: Transformer : searching user...
info: Parser readable
info: Parser finish
info: Transformer finish
info: Transformer : search complete
info: Transformer : no users found
Without process.exit();, the program continue forever.
If I uncomment the process.exit(); line, the program terminate immediately without parsing anything. But, if I put this line into the finish event of the transformer, the program exit after info: Transformer finish without the last two lines.
I want to know how and where I should put process.exit(); to exit program to be sure that all users of my CSV file are created in the database.

First, don't try to user Process exit on error happening, use throws new Error. read more here.
and about your problem: the second thing you have an async function and put your exit program command out of async function body, so it makes your second problem, for your first problem, to make exit when everything will be done, you should change your async function first.to something like this:
.on('finish', () => {sails.log.info('Transformer finish');process.exit(1);})

Related

How can I send an "end" event to Nodejs stream

Im' working with a pelias project and using package wof-admin-lookup to handle data that is read from a file.
There is a case, there is no valid data for being pushed to stream. The wof-admin-lookup will never end.
Here is my code:
const stream = recordStream.create(filePath)
.pipe(blacklistStream())
.pipe(adminLookup.create())
.pipe(model.createDocumentMapperStream())
.pipe(peliasDbclient())
stream
.on('data', data => {
count++
})
.on('finish', () => {
console.log(`Imported ${count} addresses`)
resolve()
})
.on('error', (e) => {
reject(e)
})
Here is the code in wof-admin-lookup:
module.exports = function(pipResolver, config) {
if (!pipResolver) {
throw new Error('valid pipResolver required to be passed in as the first parameter');
}
// pelias 'imports.adminLookup' config section
config = config || {};
const pipResolverStream = createPipResolverStream(pipResolver, config);
const end = createPipResolverEnd(pipResolver);
const stream = parallelTransform(config.maxConcurrentReqs || 1, pipResolverStream);
stream.on('end', end);
return stream;
};
Although the console logged "Imported 0 addresses" but the pipResolverStream will stay forever if I do not shut down it manually by Ctrl+C.
Update, this case only happens if there is no data passed through stream.
"the end event will never trigger without something like < /dev/null to generate that EOF. Otherwise the program waits for the terminal to send a ^D."
node.js: How to detect an empty stdin stream?

MongoDB change stream timeouts if database is down for some time

I am using mongoDB change stream in nodejs, everything works fine but if database is down has taken more than 10 5 seconds to get up change stream throws timeout error, here is my change stream watcher code
Service.prototype.watcher = function( db ){
let collection = db.collection('tokens');
let changeStream = collection.watch({ fullDocument: 'updateLookup' });
let resumeToken, newChangeStream;
changeStream.on('change', next => {
resumeToken = next._id;
console.log('data is ', JSON.stringify(next))
changeStream.close();
// console.log('resumeToken is ', JSON.stringify(resumeToken))
newChangeStream = collection.watch({ resumeAfter : resumeToken });
newChangeStream.on('change', next => {
console.log('insert called ', JSON.stringify( next ))
});
});
however on database end i have handled it, i.e if database is down or reconnected by using this code
this.db.on('reconnected', function () {
console.info('MongoDB reconnected!');
});
this.db.on('disconnected', function() {
console.warn('MongoDB disconnected!');
});
but i am not able to handle change stream watcher to stop it when database is down and start it again when database is reconnected or if there is any other better way to do it ?
What you want to do is to encapsulate the watch() call in a function. This function will then call itself on error, to rewatch the collection using a previously saved resume token. What is missing from the code you have is the error handler. For example:
const MongoClient = require('mongodb').MongoClient
const uri = 'mongodb://localhost:27017/test?replicaSet=replset'
var resume_token = null
run()
function watch_collection(con, db, coll) {
console.log(new Date() + ' watching: ' + coll)
con.db(db).collection(coll).watch({resumeAfter: resume_token})
.on('change', data => {
console.log(data)
resume_token = data._id
})
.on('error', err => {
console.log(new Date() + ' error: ' + err)
watch_collection(con, coll)
})
}
async function run() {
con = await MongoClient.connect(uri, {"useNewUrlParser": true})
watch_collection(con, 'test', 'test')
}
Note that watch_collection() contains the watch() method along with its handler. On change, it will print the change and store the resume token. On error, it will call itself to rewatch the collection again.
This is the solution i developed, just add the stream.on(error) function so it will not crash when there is error, as restart the stream on database reconnect, also save resume token in file for every event, this is helpful when application is crashed or stopped and you run again and during that time if x number of records were added, so on application restart just get last resume token from file and start watcher from there it will get all records inserted after that and hence no record will be missed, here is code below
var rsToken ;
try {
rsToken = await this.getResumetoken()
} catch (error) {
rsToken = null ;
}
if (!rsToken)
changeStream = collection.watch({ fullDocument: 'updateLookup' });
else
changeStream = collection.watch({ fullDocument: 'updateLookup', resumeAfter : rsToken });
changeStream.on('change', next => {
resumeToken = next._id;
THIS.saveTokenInfile(resumeToken)
cs_processor.process( next )
});
changeStream.on('error', err => {
console.log('changestream error ')
})

how can I test fail error function with chai and mocha

Hello guys i'm newbie with testing
I'm using socket.io and I want to simulate throwing error by my function when something happens on the insertion.
socket.on('request', function(request) {
bookingService.addBooking(request)
.then(function (booking) {
winston.info('Client Order saved');
io.emit('order to driver', {user: request[2], random : request[3]});
})
.catch(function (err) {
winston.error(' Client error on save order ==> '+err);
});
});
addBooking
function addBooking(msgParam) {
var deferred = Q.defer();
db.booking.insert(
{ user : msgParam[2],
adress : msgParam[0],
random : msgParam[3],
driver : [],
isTaken : false,
isDone : false,
declineNbr : 0 ,
createdOn : msgParam[1],
createdBy : msgParam[2],
updatedOn : null,
updatedBy : []},
function (err, doc) {
if (err) deferred.reject(err.name + ': ' + err.message);
deferred.resolve();
});
return deferred.promise;
}
I tried to just test the addBokking function
it('should throw error if something wrong happend on adding new order ', function(done){
(bookingService.addBooking(request)).should.throw()
done();
});
but I get this error
AssertionError: expected { state: 'pending' } to be a function
You can use the following syntax with chai:
it("throw test", () => {
expect(()=>{myMethodThatWillThrowError()}).to.throw();
});
For promises, you can use the following pattern:
it("should throw on unsuccessfull request", (done: MochaDone) => {
repo.patch({
idOrPath: "Root/Sites/Default_Site",
content: ConstantContent.PORTAL_ROOT,
}).then(() => {
done("Should throw"); // The test will fail with the "Should throw" error
}).catch(() => {
done(); // The test will report success
});
});
You are checking the promise and not the result
This error:
AssertionError: expected { state: 'pending' } to be a function
... means you are checking the promise returned from the addBooking function and not the resolved/rejected result of the promise.
With chai-as-promised you can do that easily!
With chai-as-promised these should work for example (from the documentation):
return promise.should.be.rejected;
return promise.should.be.rejectedWith(Error); // other variants of Chai's `throw` assertion work too.
or in your specific case (after installing and connecting chai-as-promised), this should work:
(bookingService.addBooking(request)).should.be.rejected
(maybe should.throw() will work with chai-as-promised too, I'm less familiar with it)
Check it out here: chai-as-promised

Using Electron / Node.js, how can I detect all installed browsers on macOS?

I'm trying to find a way to list all installed web browsers in my macOS Electron app. What would be the best way to do this? Or... I'm happy to maintain a list of possible browsers but need a way to check they are present.
You'll need to create a child process which executes a command to receive the currently installed applications. Luckely macOS offers the system_profiler utility for doing so and even better it allows XML export via the -xml argument. But be aware it is by far not the fastest function.
You'll need to get the buffer chunks from the subprocess callback, encode it as utf-8 and then parse the XML string through something like xml2js. After that it is a simple check of the property of the browser is checked or not.
Updated code by Will Stone
import jp from 'jsonpath' // for easier json traversal
import { spawn } from 'child_process'
import parser from 'xml2json'
const sp = spawn('system_profiler', ['-xml', 'SPApplicationsDataType'])
let profile = ''
const browsers = [
'Brave',
'Chromium',
'Firefox',
'Google Chrome',
'Maxthon',
'Opera',
'Safari',
'SeaMonkey',
'TorBrowser',
'Vivaldi'
]
sp.stdout.setEncoding('utf8')
sp.stdout.on('data', data => {
profile += data // gather chunked data
})
sp.stderr.on('data', data => {
console.log(`stderr: ${data}`)
})
sp.on('close', code => {
console.log(`child process exited with code ${code}`)
})
sp.stdout.on('end', function() {
profile = parser.toJson(profile, { object: true })
const installedBrowsers = jp
.query(profile, 'plist.array.dict.array[1].dict[*].string[0]')
.filter(item => browsers.indexOf(item) > -1)
console.log(installedBrowsers)
console.log('Finished collecting data chunks.')
})
Initial code:
const { spawn } = require('child_process');
const parser = new xml2js.Parser();
const sp = spawn('system_profiler', ['-xml', 'SPApplicationsDataType']);
sp.stdout.on('data', (data) => {
parser.parseString(data, function(err, result){
console.log(result)
});
});
sp.stderr.on('data', (data) => {
console.log(`stderr: ${data}`);
});
sp.on('close', (code) => {
console.log(`child process exited with code ${code}`);
});

createWriteStream 'close' event not being triggered

I am trying to extract images from a csv file by doing the following:
Parsing/streaming in a large csv file using csv-parse and the fs createReadStream method
Grabbing each line for processing using stream-transform
Extraction of image and other row data for processing using the async waterfall method.
Download and write image to server using request and the fs createWriteStream method
For some reason after the data gets piped into createWriteStream, there is some event in which an async callback never gets called. I have run this same code only using request, without piping to createWriteStream, and it works. I've also run createWriteStream w/ a drain event, and then some how it works? Can anyone explain this to me?
In the code below, request is trying to pipe 14,970 images, but the createWriteStream close or finish events only fire 14,895 times, with error firing 0 times. Could this be a draining issue? Could highWaterMark be exceeded and a write fail could be occurring undetected?
Here is my csv line getting code:
var first = true;
var parser = parse();
var transformer = transform( (line, complete) => {
if(!first)
extractData(line,complete)
else {
first = false;
complete(null);
}
},
() => {
console.log('Done: parseFile');
});
fs.createReadStream(this.upload.location).pipe(parser).pipe(transformer);
extractData function that doesn't always do a required async callback:
extractData(line,complete){
var now = new Date();
var image = {
createdAt: now,
updatedAt: now
};
async.waterfall([
next => { // Data Extraction
async.forEachOf(line, (data, i, complete) => {
if(i === 2) image.src = data;
if(i === 3) image.importSrc = data;
complete(null);
}, err => {
if(err) throw err;
next(null);
});
},
next => { // Download Image
var file = fs.createWriteStream('public/'+image.src);
var sendReq = request.get(image.importSrc);
sendReq.on('response', response => {
if (response.statusCode !== 200) {
this.upload.report.image.errors++;
return next(null);
}
});
sendReq.on('error', err => {
this.upload.report.image.errors++;
next(null);
});
sendReq.pipe(file);
file.on('finish', () => {
this.upload.report.image.inserts++;
file.close(next); // Close file and callback
});
file.on('error', err => {
this.upload.report.image.errors++;
next(null);
});
}
], err => {
if(err) throw err;
complete(null);
});
}
As suggested by #mscdex, I've also tried switching out finish for his replacement close approach.
file.close(next); is unnecessary as the file stream is closed automatically by default. What you can do instead is to listen for the close event to know when the file descriptor for the stream has been closed. So replace the entire finish event handler with:
file.on('close', () => {
this.upload.report.image.inserts++;
next(null);
});

Resources