How to use fs.createReadStream() in sync without call back - node.js

I am planning to copy a file content from the zip and place the binary contents in another file. which will be used when I require a package.
example:-
func_name(CURRENT_DIR);
const MainController = require('./src/controllers/mainController');
// This mainController file will require the binary file which is create the func_name function
controller = new MainController(
context,
db2ConnectOutputChannel,
undefined /*vscodeWrapper*/
);
context.subscriptions.push(controller);
controller.activate();
func_name defination
var odbcBindingsNode;
var ODBC_BINDINGS = path.resolve(CURRENT_DIR, 'node_modules\/ibm_db\/build\/Release\/odbc_bindings.node');
odbcBindingsNode = 'build\/Release\/odbc_bindings_e' + electron_version + '.node';
readStream = fs.createReadStream(BUILD_FILE);
readStream.pipe(unzipper.Parse())
.on('entry', function (entry) {
if(entry.path === odbcBindingsNode) {
entry.pipe(fstream.Writer(ODBC_BINDINGS));
} else {
entry.autodrain();
}
})
.on('error', function(e) {
console.log('Installation Failed! \n',e);
})
.on('finish', function() {
console.log("\n" +
"===================================\n"+
"installed successfully!\n"+
"===================================\n");
})
The problem is the first function will not wait till the second function completes. It moves to the next line and tries to require maincontroller file which requires this .node file and returns .node is not found.
But .node is created after the require is called. Is there a way to make it in sync?
I tried callback which returns that require cannot be used in callback.
CallBack Code:-
function akhil(CURRENT_DIR){
var BUILD_FILE = path.resolve(CURRENT_DIR, 'folder\/build.zip');
var odbcBindingsNode;
var ODBC_BINDINGS = path.resolve(CURRENT_DIR, 'folder\/build\/Release\/odbc_bindings.node');
odbcBindingsNode = 'build\/Release\/odbc_bindings_e' + electron_version + '.node'
readStream = fs.createReadStream(BUILD_FILE);
/*
* unzipper will parse the build.zip file content and
* then it will check for the odbcBindingsNode
* (node Binary), when it gets that binary file,
* fstream.Writer will write the same node binary
* but the name will be odbc_bindings.node, and the other
* binary files and build.zip will be discarded.
*/
readStream.pipe(unzipper.Parse())
.on('entry', function (entry) {
if(entry.path === odbcBindingsNode) {
entry.pipe(fstream.Writer(ODBC_BINDINGS));
} else {
entry.autodrain();
}
})
.on('error', function(e) {
console.log('Installation Failed! \n',e);
})
.on('finish', function() {
console.log("\n" +
"===================================\n"+
"installed successfully!\n"+
"===================================\n");
console.log("This is rebuild");
const MainController = require('./src/controllers/mainController');
controller = new MainController(
context,
db2ConnectOutputChannel,
undefined /*vscodeWrapper*/
);
context.subscriptions.push(controller);
controller.activate();
})
return 1;
}

Using the ADM-zip package I was able to solve this issue.

Related

Why return doesn't work in NodeJS/Electron

I have a problem with my NodeJS script.
Basically I want to add every file path to an array then display it in the bash console.
But when I try, it gives me undefined.
Here is my code:
const { app, BrowserWindow } = require('electron');
const fs = require('fs');
const path = require('path');
function repList(){
var directoryPath = path.join('Q:/Programmes');
let forbiddenDir = [".VERSIONS", "INSTALL"];
fs.readdir(directoryPath, function (err, files) { //Scans the files in the directory
if (err) {
return console.log('Unable to scan directory: ' + err);
}
else{
files.forEach(function (file){ //Loops through each file
var name = directoryPath+"/"+file;
if(forbiddenDir.includes(file)){ //Don't accept the file if unvalid
console.log(`${file} is a forbidden name.`);
}
else{ //Filename is valid
fs.stat(name, (error, stats) => {
if (stats.isDirectory()) { //If directory...
tabRep.push(name); //... add the full filename path to the tabRep array
}
else if (error) {
console.error(error);
}
});
};
}); //End of loop
return tabRep; //<-- THIS RETURN DOESN'T WORK
}
});
}
app.whenReady().then(() => {
console.log(repList());
})
It gives me this output instead of tabRep's elements:
undefined
.VERSIONS is a forbidden name.
INSTALL is a forbidden name.
Inside the Programmes folder :
\ Programmes
\ .VERSIONS
\ Folder1
\ File1
\ Folder2
\ INSTALL
\ FolderN
\ FileN
If anyone could give me some help, it would be really appreciated.
fs.readdir() expects a callback function as second parameter (you passed that). The return you point at is the return of the callback function - not the return of the repList() function. Please read about async functions and callbacks in JavaScript to fully understand this concept, as this is very important in JavaScript. Also, your function repList() does not return anything! And declaration of variable tabRep is missing I think.
For so long, the the synchronous variant of fs.readdirSync(), like so:
const { app, BrowserWindow } = require('electron');
const fs = require('fs');
const path = require('path');
function repList(){
var directoryPath = path.join('Q:/Programmes');
let forbiddenDir = [".VERSIONS", "INSTALL"];
const files = fs.readdirSync(directoryPath)
const tabRep = []
files.forEach(function (file){ //Loops through each file
var name = directoryPath+"/"+file;
if(forbiddenDir.includes(file)){ //Don't accept the file if unvalid
console.log(`${file} is a forbidden name.`);
}
else{ //Filename is valid
const stats = fs.statSync(name)
if (stats.isDirectory()) { //If directory...
tabRep.push(name); //... add the full filename path to the tabRep array
}
}
}); //End of loop
return tabRep; //<-- THIS RETURN DOES WORK NOW since now the function executes synchronously.
}

How to use readline in NodeJs on an event (wait stream close)

I'm creating a bot that when it receives a message begins to read a text file and responds to the message with the contents of the file.
Unfortunately, I can't get out of this asynchronous hell and I only get errors, undefined or promise
The last experiment was this:
const fs = require('fs');
const readline = require('readline');
// bot.listen("message").reply(responseText())
function readFile(file) {
var text = '';
var readInterface = readline.createInterface({
input: fs.createReadStream(file),
terminal: false
});
readInterface.on('line', function(line) {
linea = line.trim();
console.log(linea);
text += linea;
}).on('close', function() {
return text;
});
});
}
async function responseText() {
var content = await readFile("file.txt");
content.then(function(data) {
return data;
})
}
What I would like to get then is delay the response until I get the contents of the file.
I know that node is based on async but I can't figure out how to handle it!
Thanks all
If you want to use async-await need to create a promise and return it.
function readFile(file) {
return new Promise((res, rej) => {
try {
var text = '';
var readInterface = readline.createInterface({
input: fs.createReadStream(file),
terminal: false
});
readInterface
.on('line', function (line) {
linea = line.trim();
text += linea;
})
.on('close', function () {
res(text);
});
} catch(err){
rej(err)
}
});
}
If your using express.js or any framework built on top of it, you can simply pipe the readstream to the response since express's responses are streams to begin with:
const es = require('event-stream')
...
let getFileStream = path => (
fs.createReadStream(path)
.pipe(es.split())
.pipe(es.map(function (data, cb) {
cb(null
, inspect(JSON.parse(data)))
}))
);
router.get('/message', function (req, res, next) {
let file$ = getFileStream(yourFilePath)
file$.on('error', next).pipe(res)
})
If you need to transform the file content, you can use a transform stream or as shown in the example above, a synchronous event-stream mapping. The idea is to always play around with the file content at stream level to avoid having to load the entire file content in memory.
You don't really want to buffer the whole file content in memory. It can quickly become a problem with huge files on a busy day. what you need is to pipe the file stream directly to the browser. Same principle applies for any kind of consumer.
Of course, if the mechanism is all internal, you should only pass the file path along or the actual stream until you need to actually open the file and do something with the content. In this case, you go back to your stream toolbox, whether it be the native node.js stream API implementation, the event-stream package or some kind of observable library like rxjs.
I had a similar issue in an app that watches a directory for new files, reads the file(s) and returns derived data based on the file content. My Reader function is based on this async example from the nodejs docs. I return options, which contains the context, only after the file is read completely.
const { createReadStream } = require('fs')
const { createInterface } = require('readline')
const { once } = require('events')
// Reader.js
async function Reader (options) {
let { file, cb } = options
let fileStream = createReadStream(file)
const readInterface = createInterface({
input: fileStream,
crlfDelay: Infinity
})
readInterface.on('line', (line) => {
cb(line)
})
await once(readInterface, 'close')
return options
}
module.exports = Reader
I then have a file which imports my Reader and defines how to use it. I define a callback function to pass to the line event listener. I bind the callback to the options object that I pass to my Reader function. In the
readFile function I make sure to return the call to Reader, which is a Promise.
/**
* #desc callback to instruct what to do with each line read
*
* #param {*} line
*/
const readFileLine = function (line) {
linea = line.trim();
console.log(linea);
text += linea;
this.context += linea
}
/**
* #desc once the added file is ready to be processed read file line by line
* #listens {Event} listens for `process` event
*/
const readFile = (options) => {
return Reader(options)
}
/**
* #desc Call the file reader and do what you need with the reponse
*
*/
const getResponseFromFiles = (file) => {
const opts = {}
opts.cb = readFileLine.bind(opts)
opts.context = ''
opts.file = file
readFile(opts)
.then(data => {
process.exitCode = 0
console.log(data)
return data
})
.catch(err => {
process.exitCode = 1
console.log(err.message)
})
}

Gulp: Abnormal behavior of program

I'm new to Gulp and I'm having a problem with gulp,here are some points that I want to be done
I want to lookup for a file that has an .storyboard extension
(it is already DONE)
I want to perform a task whenever a certain file's content is
changed,
I want to Watch that file and when something is being changed in
that file
I want to rewrite its content by removing all other content that was
already in the file.
When I make changes in file with .storyboard extension, it just keep on displaying a message done, file has been saved
Here is my Code:
//fs to read and write files while path is for iterating directories
fs = require('fs'),
path = require('path')
//DomParser to Parse Xml
var DOMParser = new (require('xmldom')).DOMParser({ normalizeTags: { default: false } });
//Gulp for detecting changes
var gulp = require('gulp')
var mainStoryBoardFile;
function crawl(dir) {
// console.log('[+]', dir);
var files = fs.readdirSync(dir);
for (var file in files) {
var next = path.join(dir, files[file]);
//iterate through files to check whether next is a file or direcory
if (fs.lstatSync(next).isDirectory()) {
//if its a directory dive into it
crawl(next);
} else if (next.indexOf('.storyboard') >= 0) {
//if its a file just check it whether it is a .storyboard file or not
mainStoryBoardFile = next;
mainStoryBoardFile = mainStoryBoardFile.replace(/\\/g, "/");
};
}
}
//calling function
crawl(__dirname);
var newFilePath = './data.xml'
var document;
var dataFound;
//What to do
gulp.task('read', function (done) {
dataFound = fs.readFileSync(mainStoryBoardFile, "utf-8");
document = DOMParser.parseFromString(
dataFound.toString()
);
done();
});
gulp.task('write', function (done) {
fs.writeFile(mainStoryBoardFile, '', function () { console.log('done') })
fs.writeFile(mainStoryBoardFile, document, (err) => {
if (err) throw err;
console.log('The file has been saved!');
});
done();
});
gulp.task('watch', function (done) {
gulp.watch(mainStoryBoardFile, gulp.series('read', 'write'));
});
Here is a solution to solve this problem, You can watch changes on a single file and you can also perform some sort of function whenever a file is changed. in xml case, you can watch a file, when it changes you can add new properties or attributes or you can create new elements in xml file.
//Dependencies
//fs to read and write files while path is for iterating directories
var fs = require('fs'),
path = require('path'),
DOMParser = new (require('xmldom')).DOMParser({ normalizeTags: { default: false } }),
gulp = require('gulp'),
arrayOfControls = require('./object.json'),
RandExp = require('randexp');
console.log("GulpService has been Started\n");
function crawl(dir) {
var files = fs.readdirSync(dir);
for (var file in files) {
var next = path.join(dir, files[file]);
//iterate through files to check whether next is a file or direcory
if (fs.lstatSync(next).isDirectory()) {
//if its a directory dive into it
crawl(next);
} else if (next.indexOf('.storyboard') >= 0) {
//if its a file just check it whether it is a .storyboard file or not
mainStoryBoardFile = next;
mainStoryBoardFile = mainStoryBoardFile.replace(/\\/g, "/");
}
}
}
//calling function
crawl(__dirname);
var mainStoryBoardFile;
var document, dataFound;
function readWrite() {
crawl(__dirname);
dataFound = fs.readFileSync(mainStoryBoardFile, "utf-8");
document = DOMParser.parseFromString(
dataFound.toString()
);
fs.writeFileSync(mainStoryBoardFile, '', function () {
console.log('done')
});
fs.writeFileSync(mainStoryBoardFile, document, (err) => {
if (err) throw err;
console.log('The file has been saved!');
});
}
var watcher = gulp.watch(mainStoryBoardFile);
watcher.on('change', function (path, stats) {
readWrite();
console.log('File ' + path + ' was changed');
watcher.unwatch(mainStoryBoardFile);
watcher.add(mainStoryBoardFile);
});

Using gulp-watch with babel.js

Below is a Gulp ES6 transpilation task. It works fine, but I'm trying to replace gulp.watch with the gulp-watch plugin so new files will be caught. The problem is that gulp-watch isn't giving me what gulp.watch does in the callback, and I'm not sure what to do about it.
Here's my original working task:
var gulp = require('gulp'),
rename = require('gulp-rename'),
plumber = require('gulp-plumber'),
gprint = require('gulp-print'),
notify = require('gulp-notify'),
babel = require('gulp-babel');
gulp.task('default', function() {
return gulp.watch('../**/**-es6.js', function(obj){
if (obj.type === 'changed') {
gulp.src(obj.path, { base: './' })
.pipe(plumber({
errorHandler: function (error) { /* elided */ }
}))
.pipe(babel())
.pipe(rename(function (path) {
path.basename = path.basename.replace(/-es6$/, '');
}))
.pipe(gulp.dest(''))
.pipe(gprint(function(filePath){ return "File processed: " + filePath; }));
}
});
});
And here's all that I have so far with gulp-watch:
var gulp = require('gulp'),
rename = require('gulp-rename'),
plumber = require('gulp-plumber'),
gprint = require('gulp-print'),
notify = require('gulp-notify'),
babel = require('gulp-babel'),
gWatch = require('gulp-watch');
gulp.task('default', function() {
return gWatch('../**/**-es6.js', function(obj){
console.log('watch event - ', Object.keys(obj).join(','));
console.log('watch event - ', obj.event);
console.log('watch event - ', obj.base);
return;
if (obj.type === 'changed') {
gulp.src(obj.path, { base: './' })
.pipe(plumber({
errorHandler: function (error) { /* elided */ }
}))
.pipe(babel())
.pipe(rename(function (path) {
path.basename = path.basename.replace(/-es6$/, '');
}))
.pipe(gulp.dest(''))
.pipe(gprint(function(filePath){ return "File processed: " + filePath; }));
}
});
});
The output of the logging is this:
watch event - history,cwd,base,stat,_contents,event
watch event - change
watch event - ..
How do I get gulp-watch to give me the info I had before, or, how can I change my task's code to get this working again with gulp-watch?
According to the tests, obj.relative should contain the relative filename, and obj.path will still hold the absolute file path, just as it did in your original code. Also, the callback accepts a Vinyl object, which is documented here: https://github.com/wearefractal/vinyl
You probably can't see them in your logs since Object.keys doesn't enumerate properties in the prototype chain.
Using a for..in loop, you should be able to see all the properties.

Need to ZIP an entire directory using Node.js

I need to zip an entire directory using Node.js. I'm currently using node-zip and each time the process runs it generates an invalid ZIP file (as you can see from this Github issue).
Is there another, better, Node.js option that will allow me to ZIP up a directory?
EDIT: I ended up using archiver
writeZip = function(dir,name) {
var zip = new JSZip(),
code = zip.folder(dir),
output = zip.generate(),
filename = ['jsd-',name,'.zip'].join('');
fs.writeFileSync(baseDir + filename, output);
console.log('creating ' + filename);
};
sample value for parameters:
dir = /tmp/jsd-<randomstring>/
name = <randomstring>
UPDATE: For those asking about the implementation I used, here's a link to my downloader:
I ended up using archiver lib. Works great.
Example
var file_system = require('fs');
var archiver = require('archiver');
var output = file_system.createWriteStream('target.zip');
var archive = archiver('zip');
output.on('close', function () {
console.log(archive.pointer() + ' total bytes');
console.log('archiver has been finalized and the output file descriptor has closed.');
});
archive.on('error', function(err){
throw err;
});
archive.pipe(output);
// append files from a sub-directory, putting its contents at the root of archive
archive.directory(source_dir, false);
// append files from a sub-directory and naming it `new-subdir` within the archive
archive.directory('subdir/', 'new-subdir');
archive.finalize();
I'm not going to show something new, just wanted to summarise the solutions above for those who like Promises as much as I do 😉.
const archiver = require('archiver');
/**
* #param {String} sourceDir: /some/folder/to/compress
* #param {String} outPath: /path/to/created.zip
* #returns {Promise}
*/
function zipDirectory(sourceDir, outPath) {
const archive = archiver('zip', { zlib: { level: 9 }});
const stream = fs.createWriteStream(outPath);
return new Promise((resolve, reject) => {
archive
.directory(sourceDir, false)
.on('error', err => reject(err))
.pipe(stream)
;
stream.on('close', () => resolve());
archive.finalize();
});
}
Hope it will help someone 🤞
Use Node's native child_process api to accomplish this.
No need for third party libs. Two lines of code.
const child_process = require("child_process");
child_process.execSync(`zip -r <DESIRED_NAME_OF_ZIP_FILE_HERE> *`, {
cwd: <PATH_TO_FOLDER_YOU_WANT_ZIPPED_HERE>
});
The example above showcases the synchronous API. You can also use child_process.exec(path, options, callback) if you want async behavior. There are a lot more options you can specify other than cwd to further fine-tune your request.
If you don't have the ZIP utility:
This question is specifically asks about the zip utility for archiving/compression purposes. Therefore, this example assumes you have the zip utility installed on your system. For completeness sakes, some operating systems may not have utility installed by default. In that case you have at least three options:
Work with the archiving/compression utility that is native to your platform
Replace the shell command in the above Node.js code with code from your system. For example, linux distros usually come with tar/gzip utilities:
tar -cfz <DESIRED_NAME_OF_ZIP_FILE_HERE> <PATH_TO_FOLDER_YOU_WANT_ZIPPED_HERE>.
This is a nice option as you don't need to install anything new onto your operating system or manage another dependency (kind of the whole point for this answer).
Obtain the zip binary for your OS/distribution.
For example on Ubuntu: apt install zip.
The ZIP utility is tried and tested for decades, it's fairly ubiquitous and it's a safe choice. Do a quick google search or go to the creator, Info-ZIP's, website for downloadable binaries.
Use a third party library/module (of which there are plenty on NPM).
I don't prefer this option. However, if you don't really care to understand the native methods and introducing a new dependency is a non-issue, this is also a valid option.
This is another library which zips the folder in one line :
zip-local
var zipper = require('zip-local');
zipper.sync.zip("./hello/world/").compress().save("pack.zip");
Archive.bulk is now deprecated, the new method to be used for this is glob:
var fileName = 'zipOutput.zip'
var fileOutput = fs.createWriteStream(fileName);
fileOutput.on('close', function () {
console.log(archive.pointer() + ' total bytes');
console.log('archiver has been finalized and the output file descriptor has closed.');
});
archive.pipe(fileOutput);
archive.glob("../dist/**/*"); //some glob pattern here
archive.glob("../dist/.htaccess"); //another glob pattern
// add as many as you like
archive.on('error', function(err){
throw err;
});
archive.finalize();
To include all files and directories:
archive.bulk([
{
expand: true,
cwd: "temp/freewheel-bvi-120",
src: ["**/*"],
dot: true
}
]);
It uses node-glob(https://github.com/isaacs/node-glob) underneath, so any matching expression compatible with that will work.
To pipe the result to the response object (scenarios where there is a need to download the zip rather than store locally)
archive.pipe(res);
Sam's hints for accessing the content of the directory worked for me.
src: ["**/*"]
I have found this small library that encapsulates what you need.
npm install zip-a-folder
const zip-a-folder = require('zip-a-folder');
await zip-a-folder.zip('/path/to/the/folder', '/path/to/archive.zip');
https://www.npmjs.com/package/zip-a-folder
Adm-zip has problems just compressing an existing archive https://github.com/cthackers/adm-zip/issues/64 as well as corruption with compressing binary files.
I've also ran into compression corruption issues with node-zip https://github.com/daraosn/node-zip/issues/4
node-archiver is the only one that seems to work well to compress but it doesn't have any uncompress functionality.
Since archiver is not compatible with the new version of webpack for a long time, I recommend using zip-lib.
var zl = require("zip-lib");
zl.archiveFolder("path/to/folder", "path/to/target.zip").then(function () {
console.log("done");
}, function (err) {
console.log(err);
});
As today, I'm using AdmZip and works great:
import AdmZip = require('adm-zip');
export async function archiveFile() {
try {
const zip = new AdmZip();
const outputDir = "/output_file_dir.zip";
zip.addLocalFolder("./yourFolder")
zip.writeZip(outputDir);
} catch (e) {
console.log(`Something went wrong ${e}`);
}
}
import ... from answer based on https://stackoverflow.com/a/51518100
To zip single directory
import archiver from 'archiver';
import fs from 'fs';
export default zipDirectory;
/**
* From: https://stackoverflow.com/a/51518100
* #param {String} sourceDir: /some/folder/to/compress
* #param {String} outPath: /path/to/created.zip
* #returns {Promise}
*/
function zipDirectory(sourceDir, outPath) {
const archive = archiver('zip', { zlib: { level: 9 }});
const stream = fs.createWriteStream(outPath);
return new Promise((resolve, reject) => {
archive
.directory(sourceDir, false)
.on('error', err => reject(err))
.pipe(stream)
;
stream.on('close', () => resolve());
archive.finalize();
});
}
To zip multiple directories:
import archiver from 'archiver';
import fs from 'fs';
export default zipDirectories;
/**
* Adapted from: https://stackoverflow.com/a/51518100
* #param {String} sourceDir: /some/folder/to/compress
* #param {String} outPath: /path/to/created.zip
* #returns {Promise}
*/
function zipDirectories(sourceDirs, outPath) {
const archive = archiver('zip', { zlib: { level: 9 }});
const stream = fs.createWriteStream(outPath);
return new Promise((resolve, reject) => {
var result = archive;
sourceDirs.forEach(sourceDir => {
result = result.directory(sourceDir, false);
});
result
.on('error', err => reject(err))
.pipe(stream)
;
stream.on('close', () => resolve());
archive.finalize();
});
}
You can try in a simple way:
Install zip-dir :
npm install zip-dir
and use it
var zipdir = require('zip-dir');
let foldername = src_path.split('/').pop()
zipdir(<<src_path>>, { saveTo: 'demo.zip' }, function (err, buffer) {
});
I ended up wrapping archiver to emulate JSZip, as refactoring through my project woult take too much effort. I understand Archiver might not be the best choice, but here you go.
// USAGE:
const zip=JSZipStream.to(myFileLocation)
.onDone(()=>{})
.onError(()=>{});
zip.file('something.txt','My content');
zip.folder('myfolder').file('something-inFolder.txt','My content');
zip.finalize();
// NodeJS file content:
var fs = require('fs');
var path = require('path');
var archiver = require('archiver');
function zipper(archive, settings) {
return {
output: null,
streamToFile(dir) {
const output = fs.createWriteStream(dir);
this.output = output;
archive.pipe(output);
return this;
},
file(location, content) {
if (settings.location) {
location = path.join(settings.location, location);
}
archive.append(content, { name: location });
return this;
},
folder(location) {
if (settings.location) {
location = path.join(settings.location, location);
}
return zipper(archive, { location: location });
},
finalize() {
archive.finalize();
return this;
},
onDone(method) {
this.output.on('close', method);
return this;
},
onError(method) {
this.output.on('error', method);
return this;
}
};
}
exports.JSzipStream = {
to(destination) {
console.log('stream to',destination)
const archive = archiver('zip', {
zlib: { level: 9 } // Sets the compression level.
});
return zipper(archive, {}).streamToFile(destination);
}
};

Resources