Node.js file/folder auto completion on user input - node.js

I'm creating a cmd program in Node.js that receives user input and one of those inputs is a folder.
Now I want to make it easier for the user to choose a folder (like the cmd autocompletion for files when using commands such as 'cd'), rather than actually type the whole path.
Is there any best practice for doing that?
Thanks in advance!

I saw someone implemented that in cofmon before. So these links can helpful:
https://github.com/rbrcurtis/cofmon/blob/master/cofmon.coffee#L6
https://nodejs.org/api/readline.html#readline_use_of_the_completer_function
https://stackoverflow.com/a/31434172/883571
https://gist.github.com/DTrejo/901104

Ok, so after looking at what jiyinyiyong answered I was able to get what I wanted.
Basically this is it:
var readline = require('readline');
var fs = require('fs');
function completer(line) {
var currAddedDir = (line.indexOf('/') != - 1) ? line.substring(0, line.lastIndexOf('/') + 1) : '';
var currAddingDir = line.substr(line.lastIndexOf('/') + 1);
var path = __dirname + '/' + currAddedDir;
var completions = fs.readdirSync(path);
var hits = completions.filter(function(c) { return c.indexOf(currAddingDir) === 0});
var strike = [];
if (hits.length === 1) strike.push(currAddedPath + hits[0] + '/');
return (strike.length) ? [strike, line] : [hits.length ? hits : completions, line];
}
var rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
completer: completer
});
rl.question('whatever', function(answer) {
// Do what ever
});

Related

error by using in PDFTron:' NetworkError(`Unsupported protocol ${this._url.protocol}`'

I trying to convert pdf file to pdfa3 file by using PDFTron.
I added current url_path.
the my code below:
var input_url = './utils/';
var input_filename = 'test.pdf';
var output_filename = 'test_pdfa.pdf';
var convert = true;
var pwd = '';
var exceptions;
var max_ref_objs = 10;
var url_input = input_url + input_filename;
console.log('Converting input document: ' + input_filename);
var pdfa = await PDFNet.PDFACompliance.createFromUrl(true, url_input, '', PDFNet.PDFACompliance.Conformance.e_Level2B, exceptions, max_ref_objs);
get error:
'NetworkError(Unsupported protocol ${this._url.protocol})',
Does anyone know what the problem is,
And why doesn't it recognize the location?
I changed the code to :
here.
Now it's working!!

const usage inside for loop, why this behaviour?

I have a for loop in my nodejs code
const saveDocument = co.wrap(function *(documentData, user, locale) {
var now = moment();
var creationDateLongString = now.format("YYYYMMDDHHmmss");
var creationDateShortString = now.format("YYYYMMDD");
var outputChildFolder = documentData.code + '_' + creationDateLongString + '_' + documentCounter;
var outputFolder = config.files.incomingDocumentsDir + '/' + outputChildFolder;
++documentCounter;
yield fs.mkdir(outputFolder)
var xmlFileName = documentData.code + "-" + creationDateLongString + ".xml";
var pdfFileName = documentData.code + "-" + creationDateLongString + ".pdf";
const pages = [];
for(var index=0;index < documentData.pages.length; ++index) {
const page = documentData.pages[index];
var data = new Buffer(page, "base64");
var dataEncoding = imageType(data).mime === "image/png" ? "png" : "jpg";
var fileName = "page" + index + "." + dataEncoding;
var targetFilePath = outputFolder + "/" + fileName
yield fs.writeFile(targetFilePath,data);
pages.push(fileName);
}
...
}
What I don't understand is why in the above code page only gets assigned once, on the first iteration, and holds that same value during the other iterations. So if I have 5 pages I end up 5 times with the data of the first page in that variable.
I am running node 4 without any special arguments or postprocessor. Simply npm run ... which maps to a node src/main/myApp.js in my package.json
I am probably missing something simple here but I've never seen this before when doing client side ES6 code. The big difference of course being that the client side code goes through Babel + Webpack and the server side code is ran directly through node.
Small addendum: if you are wondering why the "old school" for syntax and not something along the lines of pages.forEach(...., it's because this is existing code where I just did a few minor modifications.
This will work as you are expecting in strict mode. Try adding...
"use strict";
You will only see this behavior in environments (like Node) that actually respect and enforce the keyword. Babel simply converts all let and const to var right now to provide ES5 compatibility. To demonstrate, take a look at this Babel example. You can see in the output that const has been changed to var

nodejs: each line in separate file

I want to split a file: each line in a separate file. The initial file is really big. I finished with code bellow:
var fileCounter = -1;
function getWritable() {
fileCounter++;
writable = fs.createWriteStream('data/part'+ fileCounter + '.txt', {flags:'w'});
return writable;
}
var readable = fs.createReadStream(file).pipe(split());
readable.on('data', function (line) {
var flag = getWritable().write(line, function() {
readable.resume();
});
if (!flag) {
readable.pause();
}
});
It works but it is ugly. Is there more nodish way to do that? maybe with piping and without pause/resume.
NB: it's not a question about lines/files/etc . The question is about streams and I just try to illustrate it with the problem
You can use Node's built-in readline module.
var fs = require('fs');
var readline = require('readline');
var fileCounter = -1;
var file = "foo.txt";
readline.createInterface({
input: fs.createReadStream(file),
terminal: false
}).on('line', function(line) {
var writable = fs.createWriteStream('data/part'+ fileCounter + '.txt', {flags:'w'});
writable.write(line);
fileCounter++
});
Note that this will lose the last line of the file if there is no newline at the end, so make sure your last line of data is followed by a newline.
Also note that the docs indicate that it is Stability index 2, meaning:
Stability: 2 - Unstable The API is in the process of settling, but has
not yet had sufficient real-world testing to be considered stable.
Backwards-compatibility will be maintained if reasonable.
How about the following? Did you try? Pause and resume logic isn't realy needed here.
var split = require('split');
var fs = require('fs');
var fileCounter = -1;
var readable = fs.createReadStream(file).pipe(split());
readable.on('data', function (line) {
fileCounter++;
var writable = fs.createWriteStream('data/part'+ fileCounter + '.txt', {flags:'w'});
writable.write(line);
writable.close();
});
Piping dynamically would be hard...
EDIT: You could create a writable (so pipe()able) object that would, on('data') event, do the "create file, open it, write the data, close it" but it :
wouldn't be reusable
wouldn't follow the KISS principle
would require a special and specific logic for file naming (It would accept a string pattern as an argument in its constructor with a placeholder for the number. Etc...)
I realy don't recommend that path, or you're going to take ages implementing a non-realy-reusable module. Though, that would make a good writable implementation exercise.

nodejs child_process.spawn msdeploy.exe with space in dest site

I am trying to use child_process.spawn with msdeploy.exe to automate deployement of some applications in IIS.
Whenever i have a space in my dest site name this makes msdeploy crash.
var command = 'C:/Program Files/IIS/Microsoft Web Deploy V3/msdeploy.exe';
var args = [];
args.push('-verb=sync');
args.push('-source:iisApp=C:/Users/PATH_TO_DEPLOY/dist');
args.push('-dest:iisApp=Default Web Site/test');
var process = spawn(command,args);
process.stdout.on('data', function(data) { grunt.log.write(data) });
process.stderr.on('data', function(data) { grunt.log.error(data); });
process.on('exit', function(code) {
if (code !== 0) {
grunt.fail.warn('Something went wrong');
}
done();
});
I've tried some others alternative like put " '-dest:iisApp="Default Web Site/test"' but msdeploy give me an error too.
This error is like : Argument '"-dest:iisApp=Default Web Site/haha"' not recognized. All arguments must begin with "-" char.
When i try to escape the space char or put " like describe above this gave me a similar error.
Is this is a bug in nodejs ? Maybe i've made something wrong ?
Thank.
How to accomplish this:
var path = require('path');
var platform = require('platform');
var cp = require('child_process');
var full_cmd = '/path/to/dir with space/program.exe';
var cmd = '.' + path.sep + path.basename(full_cmd); // cannot include double quotes -- the work-around is to use the 'cmd_opts.cwd'
var cmd_args = ['"--import-path=/path/to/dir with space/import_file"']; // can wrap each with double-quotes (Windows only -- fails on Unix)
var cmd_opts = {
cwd: path.dirname(full_cmd),
encoding: 'utf8'
};
if (platform.os() === 'win32') {
cmd_opts.windowsVerbatimArguments = true;
}
var proc = cp.spawn(
cmd,
cmd_args,
cmd_opts
);
The only way this doesn't work is if 'program.exe' is named something like 'program name with space.exe'

node.js: using a function in several modules

I am trying to define a function, which can be used in all parts of my program. The program consists of several files with code, and I define the function in the first part like this:
file 1:
var debug_log = fs.createWriteStream('./' + '/debug.log', {flags : 'w'});
var debug = function(d){
debug_log.write(util.format(d) + '\n');
};
var part2 = require('./somefile.js');
part2.part2(function(result){
//some work is done with the result
});
file 2:
function part2(callback){
//some work is done with initial data
debug('done'); //here is were I am trying to use the function
callback(data);
}
exports.part2 = part2;
The important part is the function "debug" which I am using to log my results. I was using console.log before with some small changes like this:
var console_log = fs.createWriteStream('./' + '/console.log', {flags : 'w'});
var log_stdout = process.stdout;
console.log = function(d){
console_log.write(util.format(d) + '\n');
log_stdout.write(util.format(d) + '\n');
};
and it worked fine in every part of the program, so why doesnt the other (similar) function work? Is it because console.log was already defined before (by default)?
Yes.
debug is not defined anywhere in file 2, so it does not work in file 2.
You can use events, this is a example from cordova code:
//file_events.js
module.exports = new (require('events').EventEmitter)();
//main.js
var events = require('./file_events.js');
var debug_log = fs.createWriteStream('./' + '/debug.log', {flags : 'w'});
var debug = function(d){
debug_log.write(util.format(d) + '\n');
};
var part2 = require('./somefile.js');
part2.part2(function(result){
//some work is done with the result
});
events.on("log", debug)
//somefile.js
var events = require('./file_events.js');
function part2(callback){
//some work is done with initial data
events.emit('log', 'done'); //emit a event catched for the main file on
callback(data);
}
exports.part2 = part2;
PD: The code is not tested but must work, with little fixs. The main strategy es call the function by the event library.
I think you'll want to take a look at:
http://nodejs.org/api/modules.html
To access the debug function from other files, you'll want to expose your debug function in module.exports
In file1 (lets call it debug.js):
var debug_log = fs.createWriteStream('./' + '/debug.log', {flags : 'w'});
var debug = function(d){
debug_log.write(util.format(d) + '\n');
};
var part2 = require('./somefile.js');
part2.part2(function(result){
//some work is done with the result
});
module.exports = {
debug: debug
}
Then in file2:
var debug = require('./debug').debug; //assuming debug.js is in the same directory
function part2(callback){
//some work is done with initial data
debug('done'); //here is were I am trying to use the function
callback(data);
}
exports.part2 = part2;

Resources