readFile synchronously nodejs - node.js

I am new to nodejs and just started learning. I need to read 5 json files and place them in an array. I have created 2 functions: readDirectory and processFile.
let transactionArray = [];
router.get('/', (req,res) => {
//joining path of directory
const directoryPath = path.join(__dirname, '../data');
readDirectory(directoryPath);
res.send(JSON.stringify(transactionArray))
})
readDirectory will get the dir and will read the filenames.
function readDirectory(directoryPath){
//passsing directoryPath and callback function
fs.readdir(directoryPath, function (err, files) {
//handling error
if (err) {
return console.log('Unable to scan directory: ' + err);
}
//listing all files using map
let fileSummary = files.map(file => {
//get the filename
let categoryName = ''
if (file.includes('category1')) {
categoryName = 'category1'
} else if (file.includes('category2')) {
categoryName = 'category2'
} else {
categoryName = 'Others'
}
// read the file
const filePath = directoryPath +'/'+ file
fs.readFile(filePath, 'utf8', (err, fileContents) => {
if (err) {
console.error(err)
return
}
try {
let data = JSON.parse(fileContents, categoryName)
processFile(data, categoryName);
} catch(err) {
console.error(err)
}
})
})
});
}
Then it will read the file using function processFile.
function processFile(data, categoryName)
{
let paymentSource = ''
if (categoryName == 'category1'){
paymentSource = categoryName +': '+ categoryName +' '+ data.currency_code
} else if (categoryName == 'category2') {
paymentSource = categoryName +': '+ data.extra.payer +'-'+ data.currency_code
} else {
paymentSource = 'Others'
}
let transactionDetails = new Transaction(
data.id,
data.description,
categoryName,
data.made_on,
data.amount,
data.currency_code,
paymentSource)
transactionArray.push(transactionDetails)
console.log(transactionArray);
}
The console log is something like this:
[{Transaction1}] [{Transaction1},{Transaction2}] [{Transaction1},{Transaction2},{Transaction3}]
but the result on the UI is only []
During debug, I noticed that it is not reading synchronously so I tried using readFileSync but it did not work. How can I read both functions synchronously so it will not give an empty array?

Do some playing around to understand what the fs functions do when they have callbacks, and when they're synchronous. From the code that you have we have make a few changes so that you don't have to use the synchronous functions from the file system library.
First of all you need to wait for all the asynchronous tasks to complete before returning response.
router.get('/', async (req, res) => {
// joining path of directory
const directoryPath = path.join(__dirname, '../data')
readDirectory(directoryPath).then(() => {
res.send(JSON.stringify(transactionArray))
}).catch(err => {
res.status(500).json(err)
})
})
Secondly, to keep the code as is as to teach you something about promises, lets wrap the first function in a promise.
function readDirectory (directoryPath) {
return new Promise((resolve, reject) => {
// passsing directoryPath and callback function
fs.readdir(directoryPath, function (err, files) {
// handling error
if (err) {
return console.log('Unable to scan directory: ' + err)
}
// listing all files using map
const fileSummary = Promise.all(
files.map(file => {
return new Promise((resolve, reject) => {
// get the filename
let categoryName = ''
if (file.includes('category1')) {
categoryName = 'category1'
} else if (file.includes('category2')) {
categoryName = 'category2'
} else {
categoryName = 'Others'
}
// read the file
const filePath = directoryPath + '/' + file
fs.readFile(filePath, 'utf8', (err, fileContents) => {
if (err) {
console.error(err)
reject(err)
}
try {
const data = JSON.parse(fileContents, categoryName)
processFile(data, categoryName).then(data => {
data()
})
} catch (err) {
console.error(err)
reject(err)
}
})
})
})
).then(() => {
resolve()
}).catch(err => {
reject(err)
})
})
})
}
Please refer to the bible (MDN) for javascript about promises -> https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise
And finally wrap the processFile function in a promise
function processFile (data, categoryName) {
return new Promise((resolve, reject) => {
let paymentSource = ''
if (categoryName == 'category1') {
paymentSource = categoryName + ': ' + categoryName + ' ' + data.currency_code
} else if (categoryName == 'category2') {
paymentSource = categoryName + ': ' + data.extra.payer + '-' + data.currency_code
} else {
paymentSource = 'Others'
}
const transactionDetails = new Transaction(
data.id,
data.description,
categoryName,
data.made_on,
data.amount,
data.currency_code,
paymentSource)
transactionArray.push(transactionDetails)
console.log(transactionArray)
resolve()
})
}
What the heck am I doing? I'm just making your code execute asynchronous task, but wait for them to be completed before moving on. Promises are a way to handle this. You can easily pull this off with the FS synchronous functions, but this way you can learn about promises!

Related

unzip multiple files with nodejs and StreamZip

i am trying to unzip multiple files using NodeJs and StreamZip. this is what i am trying:
export const unpackZip = async (folder: string) => {
const zipPath = await getFilesByExtension(folder, ".zip").then((zipFile)=>{console.log("ZIPFILE", zipFile)
return zipFile})
console.log("DEBUG PATH: ", zipPath)
let zip: StreamZip
await Promise.all(zipPath.map((zipFile) => {
return new Promise<string>((resolve, reject) => {
zip = new StreamZip({ storeEntries: true, file: `${folder}/${zipFile}` });
zip.on('error', function (err) { console.error('[ERROR]', err); });
zip.on('ready', function () {
console.log('All entries read: ' + zip.entriesCount);
console.log(zip.entries());
});
zip.on('entry', function (entry) {
const pathname = path.resolve('./tmp', entry.name);
if (/\.\./.test(path.relative('./tmp', pathname))) {
console.warn("[zip warn]: ignoring maliciously crafted paths in zip file:", entry.name);
return;
}
if ('/' === entry.name[entry.name.length - 1]) {
console.log('[DIR]', entry.name);
return;
}
zip.extract(entry, `tmp/${entry.name}`, (err?: string, res?: number | undefined) => {
resolve(entry.name)
})
})
})
}))
};
the problem is, that it does indeed go through all the zip files in the folder (getFilesByExtension returns an array of filename strings like [asdf.zip, asdf1.zip, ... ])
but the actual filecontent from all unpacked zips is from the first zip. a Screenshot may say more than i can:
can someone spot the problem in the code?! i am kind of clueless where the issue could be :/
any help would be awesome!! Thanks!!
Never mind... i cannot use map here - if i use a for loop and await my new Promise it works:
for (const zipFile of zipPath) {
await new Promise<string>((resolve, reject) => {
zip = new StreamZip({ storeEntries: true, file: `${folder}/${zipFile}` });
zip.on('error', function (err) { console.error('[ERROR]', err); });
zip.on('ready', function () {
console.log('All entries read: ' + zip.entriesCount);
console.log(zip.entries());
});
zip.on('entry', function (entry) {
const pathname = path.resolve('./tmp', entry.name);
if (/\.\./.test(path.relative('./tmp', pathname))) {
console.warn("[zip warn]: ignoring maliciously crafted paths in zip file:", entry.name);
return;
}
if ('/' === entry.name[entry.name.length - 1]) {
console.log('[DIR]', entry.name);
return;
}
zip.extract(entry, `tmp/${entry.name}`, (err?: string, res?: number | undefined) => {
resolve(entry.name)
})
})
})
}

Force NodeJS to wait pipe stream in loop?

I'm trying to import .csv files into SQLite using NodeJS:
function get_files_array(path) {
let arr = [];
const files = fs.readdirSync(path);
for (const file of files) {
arr.push(path + file);
}
return arr;
}
let file_path = "./insert.sql";
let files_array = "./lots_of_csv/"
for (const file of get_files_array(files_array)) {
let csv2sql = CSV2SQL({
tableName: table_name,
});
let rstream = fs.createReadStream(file);
let wstream = fs.createWriteStream(file_path);
let stream = rstream.pipe(csv2sql).pipe(wstream);
stream.on("finish", () => {
const dataSql = fs.readFileSync(file_path).toString();
db.run("BEGIN TRANSACTION;");
db.run(dataSql, [], (err) => {
if (err) return console.error(err.message);
});
db.run("COMMIT;");
db.all("select count(*) from table_name", [], (err, rows) => {
if (err) return console.error(err.message);
rows.forEach((row) => {
console.log(row);
});
});
});
}
The problem is: as soon as it enters the stream.on("finish" line, it will start executing the next file in the loop simultaneously and crash.

How to copy files to folder with same name even the files are different format ( PDF, EXCEL, PPT, WORD ) ? ( Using Node )

I am trying to copy multiple files even the same name with a different format. ( file.ppt , file.pdf, etc.. )
So far I can able to copy to a folder in any one format file ( if the files are the same name with different format )
For Example: If I have name1.ppt, name1.doc, name1.pdf. I need to copy all the files into the respective folder. But now anyone of a format file from the same name is copying to the folder. I need to copy all the files from the same name.
Please check my below code,
router.get('/segregate', async(req, res, next) => {
removeDir('C:\\IND\\Processfile') // I just resetting folder.
createDir();
try {
// console.log(res)
return res.status(200).json({ status: 200, data: 'success' });
} catch (err) {
next(err);
}
})
const removeDir = function(path) {
if (fs.existsSync(path)) {
const files = fs.readdirSync(path)
if (files.length > 0) {
files.forEach(function(filename) {
if (fs.statSync(path + "/" + filename).isDirectory()) {
removeDir(path + "/" + filename)
} else {
fs.unlinkSync(path + "/" + filename)
}
})
fs.rmdirSync(path)
} else {
fs.rmdirSync(path)
}
} else {
console.log("Directory path not found.")
}
}
async function createDir() {
console.log("Yes ssssssss");
// let newList = [];
let clearDirFlag = false;
let masterData = await MasterModel.find().lean();
// This is the folder where i need to copy the segregated files
fs.mkdir("C:\\IND\\Processfile", (err) => {
if (err) {
return;
}
clearDirFlag = true;
// console.log("clearDirFlagclearDirFlagclearDirFlagclearDirFlag ", clearDirFlag)
});
if(masterData.length!=0){
Object.keys(masterData).map( value => {
let newList = []
if (!masterData[value]['CLIENTCODE'].toString().match(/^[0-9a-z]+$/)){
// console.log("Yes ", data)
let answer = masterData[value]['CLIENTCODE'].toString().replace(/[^0-9]/g, '');
// console.log("answer ", answer);
newList.push(
{
"clientcode": answer,
"interm" : masterData[value]['INTERMEDIARYNAME']
}
)
} else{
// console.log("No ")
newList.push(
{
"clientcode": masterData[value]['CLIENTCODE'],
"interm" : masterData[value]['INTERMEDIARYNAME']
}
)
}
let filename;
let interm;
let boo = false;
let name = newList[0]['clientcode'];
var to_zip = fs.readdirSync("C:\\IND\\BGLHYD");
to_zip.forEach((name1) => {
if (name1.toString().includes(name)) {
// console.log(name1);
filename = name1;
interm = newList[0]['interm'];
boo = true;
}
});
if(boo == true) {
const pathToFile = "C:\\IND\\BGLHYD\\"+ filename;
// console.log("hello123", pathToFile);
fs.mkdir("C:\\IND\\Processfile\\" + interm, (err) => {
if (err) {
return;
}
});
const pathToNewDestination = "C:\\IND\\Processfile\\" + interm +"\\"+ filename;
let readStream = fs.createReadStream(pathToFile);
readStream.once('error', (err) => {
console.log(err);
});
readStream.once('end', () => {
console.log('done copying');
});
readStream.pipe(fs.createWriteStream(pathToNewDestination));
}
})
}
}
Here is the place i doing the copy functionality
if(boo == true) {
const pathToFile = "C:\\IND\\BGLHYD\\"+ filename;
// console.log("hello123", pathToFile);
fs.mkdir("C:\\IND\\Processfile\\" + interm, (err) => {
if (err) {
return;
}
});
const pathToNewDestination = "C:\\IND\\Processfile\\" + interm +"\\"+ filename;
let readStream = fs.createReadStream(pathToFile);
readStream.once('error', (err) => {
console.log(err);
});
readStream.once('end', () => {
console.log('done copying');
});
readStream.pipe(fs.createWriteStream(pathToNewDestination));
}
Please help me to solve this issue.

Export CSV without crash from node js

I need to export CSV for large amount of data, like it's having 100k rows, I'm using Json2CSV but sometimes it's taking a long time and crashed. I'm using node js stream but no use. I am googling but I don't have idea to clear it.Please any one can guide me how can I fix it.
Node JS
var formatedData = {......} //object with data
let filename = 'test' + '.csv';
let pathName = await writeToCSV(filename, formatedData, fields);
let readStream = fs.createReadStream(pathName);
res.setHeader('Content-disposition', `attachment; filename=${filename}`);
res.set('Content-Type', 'text/csv');
let downloadStream = readStream.pipe(res);
fields = null;
formatedData = null;
downloadStream.on('finish', function() {
fs.unlink(pathName, function() {});
downloadStream = null;
readStream = null;
});
writeToCSV
function writeToCSV(filename, data, fields, option) {
return new Promise((resolve, reject) => {
if (typeof data !== 'object') {
return reject(new Error('Data is not an object'));
}
let options = {
fields
};
if (typeof option === 'object') {
for (let key in option) {
options[key] = option[key];
}
}
let tmpPath = path.join(__dirname, '..', tmp);
let pathFile = tmpPath + filename;
return Promise.all([Promise.resolve(json2csv(data, options).split('\n')), checkTMPExist(tmpPath)]).then(data => {
let csvFormat = data[0];
let writeStream = fs.createWriteStream(pathFile);
csvFormat.forEach((lines, index) => {
if (index === csvFormat.length - 1) {
writeStream.end(lines + '\n');
} else {
writeStream.write(lines + '\n');
}
});
writeStream.on('finish', function() {
this.end();
return resolve(pathFile);
});
writeStream.on('error', function(err) {
this.end();
fs.unlink(pathFile, () => {});
return reject(err);
});
}).catch(err => {
fs.unlink(pathFile, () => {});
return reject(err);
});
});
}
Front end Ajax call
function export_csv(url, fileName) {
$.ajax({
url: url,
type: "GET",
success: function (result) {
var encodedUri = 'data:application/csv;charset=utf-8,' + encodeURIComponent(result);
var link = document.createElement("a");
link.setAttribute("href", encodedUri);
link.setAttribute("download", fileName);
document.body.appendChild(link);
link.click();
},
error: function (xhr) {
console.log("Export csv have some issue:" + JSON.stringify(xhr));
}
});
}

How to convert all djvu files to pdf

it's answer. Just use nodejs and ddjvu from DJView lib.
There
imports
const fs = require('fs');
const os = require('os');
const {spawn} = require('child_process');
const path = require('path');
const maxProcess = os.cpus().length - 1;// count of procces - 1 for system needs
let nowPlayed = 0;
method for convert file, and delete when converted.
function chpoc(args) {
console.log(args[1] + " start converting");
spawn(`ddjvu`, ["-format=pdf", args[0], args[1] + ".pdf"]).on('close', (data) => {
console.log(args[1] + ".pdf converted");
fs.unlink(args[0], (err) => {
if (err) throw err;
console.log(args[0] + ' successfully deleted!');
nowPlayed--;
})
});
}
queue for optimize max convertions at one time
let queue = [];
function startQueue() {
if (nowPlayed < maxProcess && queue.length) {
nowPlayed++;
queue.pop()();
}
}
setInterval(startQueue, 500)
fillthe queue and start it
function workWithFile(filepath) {
const args = filepath.match(/(.*)\.djvu/)
if (args && args.length) {
queue.push(() => {
chpoc(args);
});
}
}
show errors
const eachCallback = function (err) {
err && console.error(err);
}
catalog three and finde the djvus
let filePaths = [];
function getFiles(dirPath, callback) {
fs.readdir(dirPath, function (err, files) {
if (err) return callback(err);
files.forEach((fileName) => {
setTimeout(() => {
let filePath = path.join(dirPath, fileName);
if (filePath) {
fs.stat(filePath, function (err, stat) {
if (err) return eachCallback(err);
if (stat.isDirectory()) {
getFiles(filePath, callback);
} else if (stat.isFile() && /\.djvu$/.test(filePath)) {
filePaths.push(filePath);
callback(filePath)
}
})
}
});
});
});
}
init from started dir
getFiles(__dirname, function (file) {
workWithFile(file);
});
imports
const fs = require('fs');
const os = require('os');
const {spawn} = require('child_process');
const path = require('path');
const maxProcess = os.cpus().length - 1;// count of procces - 1 for system needs
let nowPlayed = 0;
method for convert file, and delete when converted.
function chpoc(args) {
console.log(args[1] + " start converting");
spawn(`ddjvu`, ["-format=pdf", args[0], args[1] + ".pdf"]).on('close', (data) => {
console.log(args[1] + ".pdf converted");
fs.unlink(args[0], (err) => {
if (err) throw err;
console.log(args[0] + ' successfully deleted!');
nowPlayed--;
})
});
}
queue for optimize max convertions at one time let queue = [];
function startQueue() {
if (nowPlayed < maxProcess && queue.length) {
nowPlayed++;
queue.pop()();
}
}
setInterval(startQueue, 500)
fill the queue and start it
function workWithFile(filepath) {
const args = filepath.match(/(.*)\.djvu/)
if (args && args.length) {
queue.push(() => {
chpoc(args);
});
}
}
show errors
const eachCallback = function (err) {
err && console.error(err);
}
catalog three and finde the djvus
let filePaths = [];
function getFiles(dirPath, callback) {
fs.readdir(dirPath, function (err, files) {
if (err) return callback(err);
files.forEach((fileName) => {
setTimeout(() => {
let filePath = path.join(dirPath, fileName);
if (filePath) {
fs.stat(filePath, function (err, stat) {
if (err) return eachCallback(err);
if (stat.isDirectory()) {
getFiles(filePath, callback);
} else if (stat.isFile() && /\.djvu$/.test(filePath)) {
filePaths.push(filePath);
callback(filePath)
}
})
}
});
});
});
}
init from started dir
getFiles(__dirname, function (file) {
workWithFile(file);
});

Resources