How to read line by line and detect eof (end of file)? - node.js

Followup to a solution to reading a file line by line, as described here: https://stackoverflow.com/a/16013228/570796
var fs = require('fs'),
readline = require('readline'),
instream = fs.createReadStream('/path/to/file');
var rl = readline.createInterface(
{
input: instream,
terminal: false
});
rl.on('line', function(line)
{
console.log(line);
// if(instream.isEnd()) ...
});
How do I detect if I reached the end of the file?
I understand that there is an event on the ReadStream on('end', () => {/*...*/}) But I need a solution where I can check it through an if statement.

Here's a solution:
let ended = false;
instream.on('end', () => { ended = true });
rl.on('line', function(line) {
if (ended) {
//...
However, there's a reasonable chance you don't actually need this, and your application could be structured differently.
I'm not sure whether the line event can even happen after the end event.
Turns out my suspicions were true, so you need to do it this way around.
let lastLine;
rl.on('line', line => { lastLine = line })
instream.on('end', () => {
assert.notStrictEqual(lastLine, undefined, 'There were no lines!');
// ...
});

You can also use the close event.
const fs = require("fs");
const readline = require('readline');
const readInterface = readline.createInterface({
input: fs.createReadStream("path/to/file.txt"),
output: process.stdout,
terminal: false,
})
rl.on("line", function(line){
console.log(line);
}).on("close", function() {
console.log("EOF");
})
The close event will run when the file has no more data left to be read from.

Another (elegant), approach could be implementing a Promise.
You could furthermore add a reject("Error while reading File"), linked to input.on('error') while reading. But it's not 100% required for your problem.
var fs = require('fs');
var input = require('fs').createReadStream('./inputFile.txt')
var promise = new Promise(function(resolve, reject) {
var lineReader = require('readline').createInterface({
input: input
});
input.on('end', () => {
resolve("I reached the end!");
});
lineReader.on('line', (line) => {
// DO STH. WITH EACH LINE (IF DESIRED)
});
});
promise.then((resolveResult) => {
// DO WHATEVER YOU WANT TO DO AFTER
}
For more information about promises, I'd check the following introduction:
https://developers.google.com/web/fundamentals/primers/promises

Related

Node.js readline property doesn't ask for input

Node.js readline property doesn't stop for input, instead continues program, causing app to crash. While trying to solve this I found out that apparently node does the whole code simultaneously and doesn't because of that stop for input. I found out ways to run this code but they didn't work for me.
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
var token;
var pass;
rl.question('token: ', (tok) => {
token = tok;
rl.close();
});
rl.question('pass: ', (pas) => {
pass = pas;
rl.close();
});
What can I do to solve this?
I hope you are looking something like below:
const readLine = require('readline');
const util = require('util')
const { promisify } = util;
const readline = readLine.createInterface({
input: process.stdin,
output: process.stdout,
});
// Prepare readline.question for promisification
readline.question[util.promisify.custom] = (question) => {
return new Promise((resolve) => {
readline.question(question, resolve);
});
};
let questionPs = promisify(readline.question);
async function askQuestions (questions,readline) {
let answers= [];
for(let i=0;i<2;i++){
let tmp = await questionPs(q[i]);
answers.push(tmp)
}
console.log(answers);
readline.close();
}
// Usage example:
let q = ['token:','pass:']
askQuestions(q,readline)

readline doesn't stop line reading after rl.close() emit in nodejs

I have the following file I want to read line by line and stop reading it once I have found "nameserver 8.8.8.8".
nameserver 8.8.8.8
nameserver 45.65.85.3
nameserver 40.98.3.3
I am using nodejs and the readline module to do so
const readline = require('readline');
const fs = require('fs');
function check_resolv_nameserver(){
// flag indicates whether namerserver_line was found or not
var nameserver_flag = false;
const rl = readline.createInterface({
input: fs.createReadStream('file_to_read.conf')
});
rl.on('line', (line) => {
console.log(`Line from file: ${line}`);
if (line === 'nameserver 8.8.8.8'){
console.log('Found the right file. Reading lines should stop here.');
nameserver_flag = true;
rl.close();
}
});
rl.on('close', function(){
if (nameserver_flag === true){
console.log('Found nameserver 8.8.8.8');
}
else {
console.log('Could not find nameserver 8.8.8.8');
}
});
}
check_resolv_nameserver();
Since I emit a close event with rl.close() as soon as I read the first match, I would expect my Code to read only the first line and then stop reading further. But instead my output looks like this
Line from file: nameserver 8.8.8.8
Found the right file. Reading lines should stop here.
Found nameserver 8.8.8.8
Line from file: nameserver 45.65.85.3
Line from file: nameserver 40.98.3.3
How can I make readline stop after first match and let me proceed with a something else?
for those of you who can't make the linereader stop, do this (in your readline callback):
lineReader.close()
lineReader.removeAllListeners()
It appears readline buffers some lines, so you'll have to add your own check.
Example:
#! /usr/bin/node
const fs = require('fs')
const readline = require('readline')
const reader = readline.createInterface({
input: fs.createReadStream('test.js')
})
let wasRead = false
reader.on('line', line => {
if (wasRead) return undefined
console.log('hello world')
wasRead = true
reader.close()
})
You should close the stream as well:
const readline = require('readline');
const fs = require('fs');
const readStream = fs.createReadStream('file_to_read.conf');
// More code here ...
const rl = readline.createInterface({
input: readStream
});
// Rest of your code
rl.close();
readStream.destroy();
I Searched for a long time I didn't get the chance to get this working... SO I managed to get what I want thanks to a node module : line-reader
It's good as it can read from file but also from buffer.
Here is a simple code sample where you can read 2 lines then stop.
const lineReader = require('line-reader');
const stream = require('stream');
let bufferStream = new stream.PassThrough();
bufferStream.end(yourBuffer);
let lineNumber = 0;
lineReader.eachLine(bufferStream, function(line) {
lineNumber++;
if (lineNumber === 1 || lineNumber === 2) {
// Perform whatever
} else {
// returning false breaks the reading
return false;
}
}, async function finished (err) {
if (err) {
// throw error or whatever
}
// Do after reading processing here
});
EDIT: I Found a clean way to achieve everything exactly as planned :
1st create a splitter to read string chunks
class Splitter extends Transform {
constructor(options){
super(options);
this.splitSize = options.splitSize;
this.buffer = Buffer.alloc(0);
this.continueThis = true;
}
stopIt() {
this.continueThis = false;
}
_transform(chunk, encoding, cb){
this.buffer = Buffer.concat([this.buffer, chunk]);
while ((this.buffer.length > this.splitSize || this.buffer.length === 1) && this.continueThis){
try {
let chunk = this.buffer.slice(0, this.splitSize);
this.push(chunk);
this.buffer = this.buffer.slice(this.splitSize);
if (this.buffer[0] === 26){
console.log('EOF : ' + this.buffer[0]);
}
} catch (err) {
console.log('ERR OCCURED => ', err);
break;
}
}
console.log('WHILE FINISHED');
cb();
}
}
Then pipe it to your stream :
let bufferStream = new stream.PassThrough();
bufferStream.end(hugeBuffer);
let splitter = new Splitter({splitSize : 170}); // In my case I have 170 length lines, so I want to process them line by line
let lineNr = 0;
bufferStream
.pipe(splitter)
.on('data', async function(line){
line = line.toString().trim();
splitter.pause(); // pause stream so you can perform long time processing with await
lineNr++;
if (lineNr === 1){
// DO stuff with 1st line
} else {
splitter.stopIt(); // Break the stream and stop reading so we just read 1st line
}
splitter.resume() // resumestream so you can process next chunk
}).on('error', function(err){
console.log('Error while reading file.' + err);
// whatever
}).on('end', async function(){
console.log('end event');
// Stream has ended, do whatever...
});
This code enables perfect read streams, line by line. No need to use the Splitter if the whole file is not so long
What works for me was add on pause a resume, it allow me to edit the file after read.
var lineReader = require('readline').createInterface({
input: require('fs').createReadStream(require('path').resolve('test.js'))
});
lineReader.on('line', function (line) { console.log(line) }) /*loop all lines*/
.on('pause', function () {
/* resume after read lines is finished to close file */
lineReader.resume();
})
.on('close', function () {
/*action after file read is close*/
console.log('Close ok')
});
You can declare a line event listener and remove it when needed.
const lineEventListener = (line) => {
// do sth
// Close
rl.close();
rl.removeListener('line', lineEventListener);
}
rl.on('line', lineEventListener);

How to get synchronous readline, or "simulate" it using async, in nodejs?

I am wondering if there is a simple way to get "synchronous" readline or at least get the appearance of synchronous I/O in node.js
I use something like this but it is quite awkward
var readline = require('readline');
var rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
terminal: false
});
var i = 0;
var s1 = '';
var s2 = '';
rl.on('line', function(line){
if(i==0) { s1 = line; }
else if(i==1) { s2 = line; }
i++;
})
rl.on('close', function() {
//do something with lines
})'
Instead of this I would prefer if it were as easy as something like
var s1 = getline(); // or "await getline()?"
var s2 = getline(); // or "await getline()?"
Helpful conditions:
(a) Prefer not using external modules or /dev/stdio filehandle, I am submitting code to a code submission website and these do not work there
(b) Can use async/await or generators
(c) Should be line based
(d) Should not require reading entire stdin into memory before processing
Just in case someone stumbles upon here in future
Node 11.7 added support for this using async await
const readline = require('readline');
//const fileStream = fs.createReadStream('input.txt');
const rl = readline.createInterface({
input: process.stdin, //or fileStream
output: process.stdout
});
for await (const line of rl) {
console.log(line)
}
Remember to wrap it in async function(){} otherwise you will get a reserved_keyword_error.
const start = async () =>{
for await (const line of rl) {
console.log(line)
}
}
start()
To read an individual line, you can use the async iterator manually
const it = rl[Symbol.asyncIterator]();
const line1 = await it.next();
You can just wrap it in a promise -
const answer = await new Promise(resolve => {
rl.question("What is your name? ", resolve)
})
console.log(answer)
Like readline module, there is another module called readline-sync, which takes synchronous input.
Example:
const reader = require("readline-sync"); //npm install readline-sync
let username = reader.question("Username: ");
const password = reader.question("Password: ",{ hideEchoBack: true });
if (username == "admin" && password == "foobar") {
console.log("Welcome!")
}
I think this is what you want :
const readline = require('readline');
const rl = readline.createInterface({ input: process.stdin , output: process.stdout });
const getLine = (function () {
const getLineGen = (async function* () {
for await (const line of rl) {
yield line;
}
})();
return async () => ((await getLineGen.next()).value);
})();
const main = async () => {
let a = Number(await getLine());
let b = Number(await getLine());
console.log(a+b);
process.exit(0);
};
main();
Note: this answer use experimental features and need Node v11.7
Try this. It's still not a perfect replication of a synchronous line reading function -- e.g. async functions still happen later, so some of your calling code may execute out of order, and you can't call it from inside a normal for loop -- but it's a lot easier to read than the typical .on or .question code.
// standard 'readline' boilerplate
const readline = require('readline');
const readlineInterface = readline.createInterface({
input: process.stdin,
output: process.stdout
});
// new function that promises to ask a question and
// resolve to its answer
function ask(questionText) {
return new Promise((resolve, reject) => {
readlineInterface.question(questionText, (input) => resolve(input) );
});
}
// launch your program since `await` only works inside `async` functions
start()
// use promise-based `ask` function to ask several questions
// in a row and assign each answer to a variable
async function start() {
console.log()
let name = await ask("what is your name? ")
let quest = await ask("what is your quest? ")
let color = await ask("what is your favorite color? ")
console.log("Hello " + name + "! " +
"Good luck with " + quest +
"and here is a " + color + " flower for you.");
process.exit()
}
UPDATE: https://www.npmjs.com/package/readline-promise implements it (source code here: https://github.com/bhoriuchi/readline-promise/blob/master/src/index.js#L192 ). It implements several other features as well, but they seem useful too, and not too overengineered, unlike some other NPM packages that purport to do the same thing. Unfortunately, I can't get it to work due to https://github.com/bhoriuchi/readline-promise/issues/5 but I like its implementation of the central function:
function ask(questionText) {
return new Promise((resolve, reject) => {
readlineInterface.question(questionText, resolve);
});
}
Using generators your example would look like this:
var readline = require('readline');
var rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
terminal: false
});
var i = 0;
var s1 = '';
var s2 = '';
var iter=(function* () {
s1 = yield;
i++;
s2 = yield;
i++;
while (true) {
yield;
i++;
}
})(); iter.next();
rl.on('line', line=>iter.next(line))
rl.on('close', function() {
//do something with lines
})
So yield here acts as if it were a blocking getline() and you can handle lines in the usual sequential fashion.
UPD:
And an async/await version might look like the following:
var readline = require('readline');
var rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
terminal: false
});
var i = 0;
var s1 = '';
var s2 = '';
var continuation;
var getline = (() => {
var thenable = {
then: resolve => {
continuation = resolve;
}
};
return ()=>thenable;
})();
(async function() {
s1 = await getline();
i++;
s2 = await getline();
i++;
while (true) {
await getline();
i++;
}
})();
rl.on('line', line=>continuation(line))
rl.on('close', function() {
//do something with lines
})
In both of these "synchronous" versions, i is not used for distinguishing lines and only useful for counting the total number of them.
Here's an example but it requires reading entire stdin before giving results however which is not ideal
var rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
terminal: false
});
function lineiterator() {
var currLine = 0;
var lines = [];
return new Promise(function(resolve, reject) {
rl.on('line', function (line){
lines.push(line)
})
rl.on('close', function () {
resolve({
next: function() {
return currLine < lines.length ? lines[currLine++]: null;
}
});
})
})
}
Example
lineiterator().then(function(x) {
console.log(x.next())
console.log(x.next())
})
$ echo test$\ntest | node test.js
test
test
The simplest (and preferred) option is available in the docs. https://nodejs.org/api/readline.html#rlquestionquery-options-callback
const util = require('util');
const question = util.promisify(rl.question).bind(rl);
async function questionExample() {
try {
const answer = await question('What is you favorite food? ');
console.log(`Oh, so your favorite food is ${answer}`);
} catch (err) {
console.error('Question rejected', err);
}
}
questionExample();
Since I don't know how many strings you need I put them all in an Array
Don't hesitate to comment if you need a more detailed answer or if my answer is not exact :
var readline = require('readline');
var rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
terminal: false
});
var i = 0;
var strings = [];
rl.on('line', function(line) {
// 2 lines below are in case you want to stop the interface after 10 lines
// if (i == 9)
// rl.close()
strings[i] = line
i++
}).on('close', function() {
console.log(strings)
})
// this is in case you want to stop the program when you type ctrl + C
process.on('SIGINT', function() {
rl.close()
})
We can use promise and process.stdin events together to simulate a synchronous input system
const { EOL } = require("os");
const getLine = async () =>
(
await new Promise((resolve) => {
process.stdin.on("data", (line) => {
resolve("" + line);
});
})
).split(EOL)[0];
const line = await getLine();
console.log(line);

Nodejs - read line by line from file, perform async action for each line and resume

I'm trying to read a file line by line, perform some action that has a callback and when the function finishes to resume line reading. For example:
var fs = require('fs');
var readline = require('readline');
var stream = require('stream');
var instream = fs.createReadStream('./phrases.txt');
var outstream = new stream;
var rl = readline.createInterface(instream, outstream);
rl.on('line', function (line) {
rl.pause();
setTimeout(function () {
console.log('resuming');
rl.resume();
}, 2000);
});
I was under the impression the example above should basically read a line, wait for 2 seconds, console.log and then continue to the next line. What really happens is that it waits for the first 2 seconds and then spews out lots of console.log
Line by Line module helps you reading large text files, line by line, without buffering the files into memory.
You can process the lines asynchronously. This is the example provided:
var LineByLineReader = require('line-by-line'),
lr = new LineByLineReader('big_file.txt');
lr.on('error', function (err) {
// 'err' contains error object
});
lr.on('line', function (line) {
// pause emitting of lines...
lr.pause();
// ...do your asynchronous line processing..
setTimeout(function () {
// ...and continue emitting lines.
lr.resume();
}, 100);
});
lr.on('end', function () {
// All lines are read, file is closed now.
});
Solution without installing any external library. You only need the native node.js "readline" module. Just do the following:
import fs from "fs";
import readline from "readline";
const readInterface = readline.createInterface({
input: fs.createReadStream(path.join(__dirname, 'myfile.txt'))
});
for await (const line of readInterface){
await someAsynchronousOperation();
}
Source (Official documentation): https://nodejs.org/api/readline.html#rlsymbolasynciterator
A very nice line-reader module exists,
https://github.com/nickewing/line-reader
simple code:
var lineReader = require('line-reader');
lineReader.eachLine('file.txt', function(line, last) {
// do whatever you want with line...
console.log(line);
if(last){
// or check if it's the last one
}
});
also "java-style" interface for more control:
lineReader.open('file.txt', function(reader) {
if (reader.hasNextLine()) {
reader.nextLine(function(line) {
console.log(line);
});
}
});
Another cool solution:
var fs = require('fs'),
sleep = require('sleep'),
readline = require('readline');
var rd = readline.createInterface({
input: fs.createReadStream('phrases.txt'),
output: process.stdout,
terminal: false
});
rd.on('line', function(line) {
console.log('-------')
console.log(line);
sleep.sleep(2)
});
function createLineReader(fileName){
var EM = require("events").EventEmitter
var ev = new EM()
var stream = require("fs").createReadStream(fileName)
var remainder = null;
stream.on("data",function(data){
if(remainder != null){//append newly received data chunk
var tmp = new Buffer(remainder.length+data.length)
remainder.copy(tmp)
data.copy(tmp,remainder.length)
data = tmp;
}
var start = 0;
for(var i=0; i<data.length; i++){
if(data[i] == 10){ //\n new line
var line = data.slice(start,i)
ev.emit("line", line)
start = i+1;
}
}
if(start<data.length){
remainder = data.slice(start);
}else{
remainder = null;
}
})
stream.on("end",function(){
if(null!=remainder) ev.emit("line",remainder)
})
return ev
}
//---------main---------------
fileName = process.argv[2]
lineReader = createLineReader(fileName)
lineReader.on("line",function(line){
console.log(line.toString())
//console.log("++++++++++++++++++++")
})
Here is a simple solution in typescript using line-reader that can run in nodejs 8:
import lineReader from 'line-reader';
function readLines(filename: string, processLine: (line: string) => Promise<void>): Promise<void> {
return new Promise((resolve, reject) => {
lineReader.eachLine(filename, (line, last, callback) => {
if (!callback) throw new Error('panic');
processLine(line)
.then(() => last ? resolve() : callback())
.catch(reject);
});
});
}
async function echo(): Promise<void> {
await readLines('/dev/stdin', async (line) => {
console.log(line);
});
}
echo();
Note that it does not buffer the whole file before executing, therefore it is suitable for processing large text files.
I suggest to use stdio for this kind of things, as input stream is paused and resumed automatically and you don't need to worry about your system resources. You'll be able to read really huge files with just a few MBs of memory:
This example prints a line every 2 seconds:
$ node myprogram.js < file.txt
import { read } from 'stdio';
async function onLine (line) {
console.log(line);
await sleep(2000);
}
read(onLine)
.then(() => console.log('finished'));
Note I'm using an asynchronous sleep to represent any asynchronous task. It is not included in Node.js by default but it would be as follows:
const sleep = (delay) => new Promise((resolve) => setTimeout(resolve, delay));
const readline = require('readline');
const fs = require('fs');
const rl = readline.createInterface({
input: fs.createReadStream('sample.txt')
});
rl.on('line', (line) => {
console.log(`Line from file: ${line}`);
});
source: https://nodejs.org/api/readline.html#readline_example_read_file_stream_line_by_line

How to read from stdin line by line in Node

I'm looking to process a text file with node using a command line call like:
node app.js < input.txt
Each line of the file needs to be processed individually, but once processed the input line can be forgotten.
Using the on-data listener of the stdin, I get the input steam chunked by a byte size so I set this up.
process.stdin.resume();
process.stdin.setEncoding('utf8');
var lingeringLine = "";
process.stdin.on('data', function(chunk) {
lines = chunk.split("\n");
lines[0] = lingeringLine + lines[0];
lingeringLine = lines.pop();
lines.forEach(processLine);
});
process.stdin.on('end', function() {
processLine(lingeringLine);
});
But this seems so sloppy. Having to massage around the first and last items of the lines array. Is there not a more elegant way to do this?
You can use the readline module to read from stdin line by line:
const readline = require('readline');
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
terminal: false
});
rl.on('line', (line) => {
console.log(line);
});
rl.once('close', () => {
// end of input
});
// Work on POSIX and Windows
var fs = require("fs");
var stdinBuffer = fs.readFileSync(0); // STDIN_FILENO = 0
console.log(stdinBuffer.toString());
readline is specifically designed to work with terminal (that is process.stdin.isTTY === true). There are a lot of modules which provide split functionality for generic streams, like split. It makes things super-easy:
process.stdin.pipe(require('split')()).on('data', processLine)
function processLine (line) {
console.log(line + '!')
}
#!/usr/bin/env node
const EventEmitter = require('events');
function stdinLineByLine() {
const stdin = new EventEmitter();
let buff = '';
process.stdin
.on('data', data => {
buff += data;
lines = buff.split(/\r\n|\n/);
buff = lines.pop();
lines.forEach(line => stdin.emit('line', line));
})
.on('end', () => {
if (buff.length > 0) stdin.emit('line', buff);
});
return stdin;
}
const stdin = stdinLineByLine();
stdin.on('line', console.log);
read stream line by line,should be good for large files piped into stdin, my version:
var n=0;
function on_line(line,cb)
{
////one each line
console.log(n++,"line ",line);
return cb();
////end of one each line
}
var fs = require('fs');
var readStream = fs.createReadStream('all_titles.txt');
//var readStream = process.stdin;
readStream.pause();
readStream.setEncoding('utf8');
var buffer=[];
readStream.on('data', (chunk) => {
const newlines=/[\r\n]+/;
var lines=chunk.split(newlines)
if(lines.length==1)
{
buffer.push(lines[0]);
return;
}
buffer.push(lines[0]);
var str=buffer.join('');
buffer.length=0;
readStream.pause();
on_line(str,()=>{
var i=1,l=lines.length-1;
i--;
function while_next()
{
i++;
if(i<l)
{
return on_line(lines[i],while_next);
}
else
{
buffer.push(lines.pop());
lines.length=0;
return readStream.resume();
}
}
while_next();
});
}).on('end', ()=>{
if(buffer.length)
var str=buffer.join('');
buffer.length=0;
on_line(str,()=>{
////after end
console.error('done')
////end after end
});
});
readStream.resume();
Explanation:
to cut it correctly on utf8 letter and not in middle byte set encoding to utf8 it ensures it emits each time full multibyte letter.
When data is received the input is paused. It is used to block the input until all lines are used up. It prevents overflowing the buffet if the lines processing function is slower than input.
If there is every time a line without newlines each time. need to accommulate it for all calls and do nothing, return . once there are more than one line also append it and use the accommulated buffer.
after all the splitted lines were consumed. On the last line push the last line to buffer and resume paused stream.
es6 code
var n=0;
async function on_line(line)
{
////one each line
console.log(n++,"line ",line);
////end of one each line
}
var fs = require('fs');
var readStream = fs.createReadStream('all_titles.txt');
//var readStream = process.stdin;
readStream.pause();
readStream.setEncoding('utf8');
var buffer=[];
readStream.on('data', async (chunk) => {
const newlines=/[\r\n]+/;
var lines=chunk.split(newlines)
if(lines.length==1)
{
buffer.push(lines[0]);
return;
}
readStream.pause();
// let i=0;
buffer.push(lines[0]); // take first line
var str=buffer.join('');
buffer.length=0;//clear array, because consumed
await on_line(str);
for(let i=1;i<lines.length-1;i++)
await on_line(lines[i]);
buffer.push(lines[lines.length-1]);
lines.length=0; //optional, clear array to hint GC.
return readStream.resume();
}).on('end', async ()=>{
if(buffer.length)
var str=buffer.join('');
buffer.length=0;
await on_line(str);
});
readStream.resume();
I did not test the es6 code
In my case the program (elinks) returned lines that looked empty, but in fact had special terminal characters, color control codes and backspace, so grep options presented in other answers did not work for me. So I wrote this small script in Node.js. I called the file tight, but that's just a random name.
#!/usr/bin/env node
function visible(a) {
var R = ''
for (var i = 0; i < a.length; i++) {
if (a[i] == '\b') { R -= 1; continue; }
if (a[i] == '\u001b') {
while (a[i] != 'm' && i < a.length) i++
if (a[i] == undefined) break
}
else R += a[i]
}
return R
}
function empty(a) {
a = visible(a)
for (var i = 0; i < a.length; i++) {
if (a[i] != ' ') return false
}
return true
}
var readline = require('readline')
var rl = readline.createInterface({ input: process.stdin, output: process.stdout, terminal: false })
rl.on('line', function(line) {
if (!empty(line)) console.log(line)
})
if you want to ask the user number of lines first:
//array to save line by line
let xInputs = [];
const getInput = async (resolve)=>{
const readline = require('readline').createInterface({
input: process.stdin,
output: process.stdout,
});
readline.on('line',(line)=>{
readline.close();
xInputs.push(line);
resolve(line);
})
}
const getMultiInput = (numberOfInputLines,callback)=>{
let i = 0;
let p = Promise.resolve();
for (; i < numberOfInputLines; i++) {
p = p.then(_ => new Promise(resolve => getInput(resolve)));
}
p.then(()=>{
callback();
});
}
//get number of lines
const readline = require('readline').createInterface({
input: process.stdin,
output: process.stdout,
terminal: false
});
readline.on('line',(line)=>{
getMultiInput(line,()=>{
//get here the inputs from xinputs array
});
readline.close();
})
process.stdin.pipe(process.stdout);

Resources