I'm trying to understand process.stdin.
For example - I need to show array elements in console. And i should allow user choose which element will be shown.
I have code:
var arr = ['elem1','elem2','elem3','elem4','elem5'],
lastIndx = arr.length-1;
showArrElem();
function showArrElem () {
console.log('press number from 0 to ' + lastIndx +', or "q" to quit');
process.stdin.on('readable', function (key) {
var key = process.stdin.read();
if (!process.stdin.isRaw) {
process.stdin.setRawMode( true );
} else {
var i = String(key);
if (i == 'q') {
process.exit(0);
} else {
console.log('you press ' +i); // null
console.log('e: ' +arr[i]);
showArrElem();
};
};
});
};
Why the "i" is null when i type number a second time? How to use "process.stdin.on" correctly?
You're attaching a readable listener on process.stdin after every input character, which is causing process.stdin.read() to be invoked more than one time for each character. stream.Readable.read(), which process.stdin is an instance of, returns null if there's no data in the input buffer. To work around this, attach the listener once.
process.stdin.setRawMode(true);
process.stdin.on('readable', function () {
var key = String(process.stdin.read());
showArrEl(key);
});
function showArrEl (key) {
console.log(arr[key]);
}
Alternatively, you can attach a one-time listener using process.stdin.once('readable', ...).
This is typically how I get input when using stdin (node.js) This is the ES5 version, I don't use ES6 yet.
function processThis(input) {
console.log(input); //your code goes here
}
process.stdin.resume();
process.stdin.setEncoding("ascii");
_input = "";
process.stdin.on("data", function (input) {
_input += input;
});
process.stdin.on("end", function () {
processThis(_input);
});
hope this helps.
Related
Little info, i have an arp.js file which takes a subnet address "192.168.2" and gets all strings returned from arp -a and stores in an array.
I can't figure out why my arpList function is returning an undefined value in my index.js file.
All the console.logs are returning the correct values in the arp.js page when called from the index.js, but the ipObj is coming up undefined. Even the console.log before i return of ipObj works.
Any help would be greatly appreciated.
var { spawn } = require('child_process');
const arpLs = spawn('arp', ['-a']);
var bufferData;
module.exports = {
arpList: function (subnet) {
arpLs.stdout.on('data', data => {
bufferData += data
})
arpLs.stderr.on('data', data => {
console.log('error: ' + data);
});
arpLs.on('exit', function (code) {
if (code != 0) {
console.log("Error exiting"); //if error occurs
}
console.log("exit start 1"); // checking internal processes at stages
var dataArray = bufferData.split(' ');
var ipArray = [];
for (i = 0; i < dataArray.length; i++) {
if (dataArray[i].includes(subnet)) {
ipArray.push(dataArray[i]);
console.log("loop working");
}
}
var ipObj = { "lanIps": ipArray };
console.log("Object is there: "+ipObj)
return ipObj; // this obj should be returned to the index.js call using
})
},
sayMyName: function () {
return "Hello";
}
}
//arpList(ipSubnet);
//INDEX.js
//the index page looks like this
//var arp = require('./arp.js);
//var ipSubnet = "192.168.2";
//var lanIps = arp.arpList(ipSubnet);
//console.log(lanIps);
I ended up adding a callback function to arpList - function (subnet, callback)
Then instead of returning the value pass it into the callback
Then on the index.js side instead of
var lanIps = arp.arpList(value)
i used
arp.arpList(value, function(res){lanIps = res}
return ipObj; // this obj should be returned to the index.js call using
It won't be returned. The reference say nothing about return value. Node-style callbacks rarely work like that because they are potentially asynchronous and returned value cannot be taken into account.
This a special case of this well-known problem. The process is asynchronous and is finished after arp.arpList(ipSubnet) call, there's nothing to assign to lanIps. This is a use case for promises. There are already third-party promisified counterparts like child-process-promise.
The problem can be also solved by moving to synchronous API. child_process functions have synchronous counterparts, including spawnSync.
In the code below, I am trying to get a value from a 'nodehun' method called spellSuggestions. In the documentation I'm told the syntax to use this method is as follows: dict.spellSuggestions(w, handleMisspellings);
where w is a list of words and handleMisspellings is a function (which is posted below). I can see the output on the console for handleMisspellings, but for the life of me, I cannot return or find a way to return a variable from this call: [dict.spellSuggestions(w, handleMisspellings);]. After setting a var equal to 'dict.spellSuggestions(w, handleMisspellings);' the return value is undefined. Please help!
var debugFlag = process.argv.indexOf('debug') > -1;
var nodehun = require('./../build/' + (debugFlag ? 'Debug' : 'Release') + '/nodehun');
var fs = require('fs');
var dict = new nodehun(fs.readFileSync(__dirname+'/dictionaries/en_US.aff'),fs.readFileSync(__dirname+'/dictionaries/en_US.dic'));
//var words = ['original', 'roach', 'erasee', 'come', 'consol', 'argumnt', 'gage',
// 'libary', 'lisence', 'principal', 'realy', 'license', 'suprise', 'writting'];
var handleMisspellings = function(err, correct, suggestions, origWord, callback) {
if (err) throw err;
if (correct) {
console.log(origWord + ' is spelled correctly!');
}
else {
console.log(origWord + ' not recognized. Suggestions: ' + suggestions);
}
var value = {
err: err,
correct: correct,
suggestions: suggestions,
origWord: origWord
};
console.log('VALUE+++++: ' + value);
return value;
}
var foo = function(words) {
words.forEach(function(w) {
dict.spellSuggestions(w, handleMisspellings);
some = dict;
console.log(JSON.stringify(some, null, 2));
});
}
module.exports = {
foo: foo
}
Thanks Dave. I eventually discovered the practical use of callback functions. For each method that contained data that I desired to access outside of the method, I declared an individual function to wrap the method. The function accepted two input arguments. The first was the input variable to drive the method call. The second was literally 'callback'. Inside the method, I would perform whatever operation I wanted to package the data into a JSON object before returning any desired data with 'return callback(var)'. In the call of the created wrapper function, I would actually call the function using the input variable of choice to drive the method in the function definition, and pass 'function(return_variable)' as the second argument. This creates a new method in which the desired data may be accessed or even again called back. The final code I desired performs a for loop on each element of a list of words, creates metadata about those words, and appends the unique data for each word to each word in a single array. The final array is a single object which contains all input words, and all data about those words. It required 4 individual functions (one of which was recursive), and a function call. Please see the code snippet of the function described above [doCall]. Note the use of the code begins at the call of 'analyze' [which is commented out here] and works its way up to each previous function declaration. I hope this helps someone else in the future to understand the functional use of 'callbacks'. Please ask if you have any questions, and Thanks again =D.
function doCall(word, callback) {
dict.spellSuggestions(word, function(err, correct, suggestions, origWord) {
if (err) throw err;
// if (correct)
// console.log(origWord + ' is spelled correctly!');
// else
// console.log(origWord + ' not recognized. Suggestions: ' + suggestions);
val = {
err: err,
correct: correct,
origWord: origWord,
suggestions: suggestions
}
return callback(val);
});
}
function addMember(array, index, callback){
doCall(array[index], function(val){
// console.log(val);
// console.log(index);
// console.log(array[index]);
// console.log(val.origWord);
array[val.origWord] = val;
// console.log(array[val.origWord]);
index = index + 1;
return callback(array, index);
});
}
function Loop(array, index, callback) {
addMember(array, index, function(array2, index2){
// console.log(index);
// console.log(index2);
if(index2 === array2.length) {
return callback(array2);
}
else{
Loop(array2, index2, callback);
}
});
}
function analyze(array, index, callback){
Loop(array, index, function(complete_array){
console.log('!!!!!!!!!!!!!!!!!' + complete_array);
return callback(complete_array);
});
}
/*
analyze(words, 0, function(complete_array){
// for(i = 0; i < complete_array.length; i++) {
console.log(complete_array);
// }
});
*/
module.exports = {
analyze
}
Is there a way to make Node.js stream as coroutine.
Example
a Fibonacci numbers stream.
fibonacci.on('data', cb);
//The callback (cb) is like
function cb(data)
{
//something done with data here ...
}
Expectation
function* fibonacciGenerator()
{
fibonacci.on('data', cb);
//Don't know what has to be done further...
};
var fibGen = fibonacciGenerator();
fibGen.next().value(cb);
fibGen.next().value(cb);
fibGen.next().value(cb);
.
.
.
Take desired numbers from the generator. Here Fibonacci number series is just an example, in reality the stream could be of anything a file, mongodb query result, etc.
Maybe something like this
Make the 'stream.on' function as a generator.
Place yield inside the callback function.
Obtain generator object.
Call next and take the next value in stream.
Is it at-least possible if yes how and if not why? Maybe a dumb question :)
If you don't want to use a transpiler (e.g. Babel) or wait until async/await make it to Node.js, you can implement it yourself using generators and promises.
The downside is that your code must live inside a generator.
First, you can make a helper that receives a stream and returns a function that, when called, returns a promise for the next "event" of the stream (e.g. data).
function streamToPromises(stream) {
return function() {
if (stream.isPaused()) {
stream.resume();
}
return new Promise(function(resolve) {
stream.once('data', function() {
resolve.apply(stream, arguments);
stream.pause();
});
});
}
}
It pauses the stream when you're not using it, and resumes it when you ask it the next value.
Next, you have a helper that receives a generator as an argument, and every time it yields a promise, it resolves it and passes its result back to the generator.
function run(fn) {
var gen = fn();
var promise = gen.next().value;
var tick = function() {
promise.then(function() {
promise = gen.next.apply(gen, arguments).value;
}).catch(function(err) {
// TODO: Handle error.
}).then(function() {
tick();
});
}
tick();
}
Finally, you would do your own logic inside a generator, and run it with the run helper, like this:
run(function*() {
var nextFib = streamToPromises(fibonacci);
var n;
n = yield nextFib();
console.log(n);
n = yield nextFib();
console.log(n);
});
Your own generator will yield promises, pausing its execution and passing the control to the run function.
The run function will resolve the promise and pass its value back to your own generator.
That's the gist of it. You'd need to modify streamToPromises to check for other events as well (e.g. end or error).
class FibonacciGeneratorReader extends Readable {
_isDone = false;
_fibCount = null;
_gen = function *() {
let prev = 0, curr = 1, count = 1;
while (this._fibCount === -1 || count++ < this._fibCount) {
yield curr;
[prev, curr] = [curr, prev + curr];
}
return curr;
}.bind(this)();
constructor(fibCount) {
super({
objectMode: true,
read: size => {
if (this._isDone) {
this.push(null);
} else {
let fib = this._gen.next();
this._isDone = fib.done;
this.push(fib.value.toString() + '\n');
}
}
});
this._fibCount = fibCount || -1;
}
}
new FibonacciGeneratorReader(10).pipe(process.stdout);
Output should be:
1
1
2
3
5
8
13
21
34
55
I'm just learning node.js and wanted to write a simple test program that copied a file from a source folder to a destination folder. I piped a fs.ReadStream to a fs.WriteStream and that worked perfectly. I next tried to use non-flowing mode but the following program fails 99% of the time on larger files (anything over 1MB.) I'm assuming that given certain timing the event queue becomes empty and so exits. Should the following program work?
var sourcePath = "./source/test.txt";
var destinationPath = "./destination/test.txt";
// Display number of times 'readable' callback fired
var callbackCount = 0;
process.on('exit', function() {
console.log('Readable callback fired %d times', callbackCount);
})
var fs = require('fs');
var sourceStream = fs.createReadStream(sourcePath);
var destinationStream = fs.createWriteStream(destinationPath);
copyStream(sourceStream, destinationStream);
function copyStream(src, dst) {
var drained = true;
// read chunk of data when ready
src.on('readable', function () {
++callbackCount;
if (!drained) {
dst.once('drain', function () {
writeToDestination();
});
} else {
writeToDestination();
}
function writeToDestination() {
var chunk = src.read();
if (chunk !== null) {
drained = dst.write(chunk);
}
}
});
src.on('end', function () {
dst.end();
});
}
NOTE: If I remove the drain related code the program always works but the node.js documentation indicates that you should wait on a drain event if the write function returns false.
So should the above program work as is? If it shouldn't how should I reorganize it to work with both readable and drain events?
It looks like you're most of the way there; there are just a couple of things you need to change.
When writing to dst you need to keep reading from src until either you get a null chunk, or dst.write() returns false.
Instead of listening for all readable events on src, you should only be listening for those events when it's ok to write to dst and you currently have nothing to write.
Something like this:
function copyStream(src, dst) {
function writeToDestination() {
var chunk = src.read(),
drained = true;
// write until dst is saturated or there's no more data available in src
while (drained && (chunk !== null)) {
if (drained = dst.write(chunk)) {
chunk = src.read();
}
}
if (!drained) {
// if dst is saturated, wait for it to drain and then write again
dst.once('drain', function() {
writeToDestination();
});
} else {
// if we ran out of data in src, wait for more and then write again
src.once('readable', function() {
++callbackCount;
writeToDestination();
});
}
}
// trigger the initial write when data is available in src
src.once('readable', function() {
++callbackCount;
writeToDestination();
});
src.on('end', function () {
dst.end();
});
}
I have some node code and I'm trying to set this up but it seems like it's processing one item before another is complete and I'm wondering if I'm right and if so if there is some kind of work around
var input = function(text){
process.stdout.write(text);
process.stdin.setEncoding('utf-8')
process.stdin.once("data", function(input_data){
return input_data;
}).resume();
}
var test = function(var1, var2){
var var1 = (var1 == null) ? input("Var One: ") : var1;
var var2 = (var2 == null) ? input("Var Two: ").split(" ") : var1;
console.log(var1, var2);
}
Though, when I execute test(null,null) I expected it to ask me for Var one then define var1 as the data then after that I thought it would prompt me with Var Two and then split that by spaces into a list however that did not work and it just errored saying "Cannot call method of undefined "
I came from python where this was possible and it wouldn't do any other executions till the previously defined one was completed so I'm wondering if node has something similar to this and note I am not using a server or website for this I'm just testing code on my computer.
I'm fairly new to node myself, but here's my understanding of how it will execute:
test(null, null);
// enter test() function
// see that var1 == null, run input("Var One: ")
// write "Var One: " to stdout
// set stdin encoding to utf-8
// set an event listener for 'data' on stdin, and provide a callback for that event
... let's pause there for a moment, because this is the important bit.
When you set a listener and a callback, you've entered the land of asynchronicity. Your code will continue to march on without waiting and do the next things that you've told it to do. Which, in this case, is just sending back an undefined return from input(), and then moving on to handle var2 similarly, where you try to call undefined.split(), and the whole process pukes.
If, on the other hand, you just remove .split(), you'll probably get an output like this:
Var One: Var Two : [waiting for input]
At this point, when you enter the first value, it'll take it and return it from your callback to... nowhere... then it'll wait for your next value and do the same.
This is where you have to start to break your mind away form your python background and procedural, synchronous habits.
I'll risk beating a dead horse, and comment up your code directly:
var input = function(text){
process.stdout.write(text);
process.stdin.setEncoding('utf-8')
// .once() is an asynchronous call
process.stdin.once("data", function(input_data){
// the return value of this anonymous function isn't used anywhere
return input_data;
}).resume();
// there is no return value of the "input" function
}
Functionally, what you're looking for is something like the following (though this is ugly, something like async.waterfall makes this sort of structure much more palatable, and there may be even better ways to do it that I haven't learned yet):
function test(var1, var2) {
if (!var1 || !var2) {
process.stdin.resume();
process.stdin.setEncoding('utf-8');
if (!var1) {
process.stdout.write('Var One: ');
process.stdin.once('data', function(input_data) {
// var1 & var2 get pulled in from the parent context
var1 = input_data;
if (!var2) {
process.stdout.write('Var Two: ');
process.stdin.once('data', function(input_data) {
var2 = input_data;
console.log(var1, var2);
});
}
else {
console.log(var1, var2);
}
});
}
else if (!var2) {
process.stdout.write('Var Two: ');
process.stdin.once('data', function(input_data) {
var2 = input_data;
console.log(var1, var2);
});
}
else {
// there is no else, either var1 or var2 is missing
// per the first conditional
}
}
else {
console.log(var1, var2);
}
}
Here is how you can do it :
function input (text, val, cb) {
if (val) return cb(null, val)
process.stdout.write(text)
process.stdin.setEncoding('utf-8')
process.stdin.once('data', function(data){
process.stdin.pause()
cb(null, data)
}).resume()
}
function test (var1, var2) {
input('Var One: ', var1, function (err, var1) {
input('Var Two: ', var2, function (err, var2) {
console.log(var1)
console.log(var2)
})
})
}
test(null, null)
Basically, since stdin is async, so is input function. You need to use callback-based function style. This works, though you strongly recommend not using stdin this way. Try readline core module or some special userland modules from npm.
You can see that writing callback based code can be a little messy (aka callback hell). Here is a fancy way to address this issue using co module (you need node 0.11.x for this and use --harmony-generators flag):
var co = require('co')
function input (text) {
return function (cb) {
process.stdout.write(text)
process.stdin.setEncoding('utf-8')
process.stdin.once('data', function(data){
process.stdin.pause()
cb(null, data)
}).resume()
}
}
function * test (var1, var2) {
var1 = var1 || (yield input('Var One: '))
var2 = var2 || (yield input('Var Two: '))
console.log(var1)
console.log(var2)
}
co(test)(null, null)
I would use this built-in Node.js module: http://nodejs.org/api/readline.html
var readline = require('readline');
var rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
rl.question("What do you think of node.js? ", function(answer) {
// TODO: Log the answer in a database
console.log("Thank you for your valuable feedback:", answer);
rl.question("What do you think of JavaScript?", function(answer2) {
console.log("Answer2 is ", answer2);
});
rl.close();
});
There is a relatively new language derived from CoffeeScript that makes async code more readable. It basically looks like sync code, and you don't have to nest indents for callbacks all the time. Its called ToffeeScript https://github.com/jiangmiao/toffee-script Coming from Python you may appreciate it.
readline = require 'readline'
rl = readline.createInterface { input: process.stdin, output: process.stdout }
answer = rl.question! "What do you think of node.js?"
console.log "Thank you for your valuable feedback:", answer
answer2 = rl.question! "What do you think of ToffeeScript?"
console.log "Thanks again."
rl.close()