search keyword from the file and remove line in nodejs - node.js

Trying to search a keyword and remove that row from a file. actually it is working fine for me . but I want to show console message if keyword is not found.
from the code lastIndex is the array of search keyword. but how to show a message that the 'keyword not found' if array is empty or undefined!
files.forEach(path => {
var fs = require('fs')
fs.readFile(path, {encoding: 'utf-8'}, function(err, data) {
if (err) {
console.error(err);
return;
}
let dataArray = data.split('\n'); // convert file data in an array
const searchKeyword = 'Searching Word';
let lastIndex = -1; // let say, we have not found the keyword
for (let index=0; index<dataArray.length; index++) {
if (dataArray[index].includes(searchKeyword)) {
lastIndex = index;
var valueAtIndex = dataArray.at(lastIndex);
console.log(valueAtIndex)
break;
}
}
dataArray.splice(lastIndex, 1); // remove the keyword from the data Array
const updatedData = dataArray.join('\n')
fs.writeFile(path, updatedData, (err) => {
if (err) throw err;
console.log ('Successfully updated the file data for : '+path);
})
})
})

Related

Modify the value of a variable outside callback with the callback inside loop

I am new to Nodejs and I am facing with a problem: Modify the value of a variable outside callback with the callback inside a loop.
I am coding online-judgle project, this is my function to check output of a program with answer from database. I created result object to store amount of correct testcase.
function compareResult(fileName, problem, timeLimit, callback) {
const cp = require('child_process');
const exePath = 'submit\\' + fileName + '.exe';
const child = cp.spawn(exePath, ['--from=markdown', '--to=html'], {timeout: timeLimit});
MongoClient.connect(uri, function(err, db) {
if (err) throw err;
var dbo = db.db(dbName);
var query = { id_problem: problem, is_eg: "false" };
var proj = { projection: {input: 1, output: 1} };
dbo.collection("sample").find(query, proj).toArray(function(err, arr) {
if (err) throw err;
if (arr != null) {
var result = {
correct: 0,
total: arr.length
};
for (const json of arr) {
const answer = json['output'];
child.stdin.write(json['input']);
child.stdout.on('data', function(data) {
if (data == answer) {
result.correct += 1; // I want to modify result object here.
}
});
child.stdin.end();
};
console.log(result);
callback(result);
}
});
});
I want to modify result object in that place. How will I do it?
function compareResult(fileName, problem, timeLimit, callback) {
const cp = require('child_process');
const exePath = 'submit\\' + fileName + '.exe';
const child = cp.spawn(exePath, ['--from=markdown', '--to=html'], {timeout: timeLimit});
MongoClient.connect(uri, function(err, db) {
if (err) throw err;
var dbo = db.db(dbName);
var query = { id_problem: problem, is_eg: "false" };
var proj = { projection: {input: 1, output: 1} };
dbo.collection("sample").find(query, proj).toArray(function(err, arr) {
if (err) throw err;
if (arr != null) {
var result = {
correct: 0,
total: arr.length
};
for (const json of arr) {
const answer = json['output'];
child.stdin.write(json['input']);
child.stdout.on('data', function(data) {
if (data == answer) {
result.correct += 1;
}
// Decrement total here to track how many 'data' events have been emitted
result.total--;
if (result.total === 0) {
// All 'data' events have been emitted, so call the callback function
callback(result);
}
});
child.stdin.end();
};
}
});
});
}

NodeJS Appending File differently to source file

So I have some code that takes an input file, Replaces strings in that file and appends those replaced strings to another file.
The problem is that the append function outputs a slightly different file (not where the replaces are). The strings are replaced fine its just some of the lines are swapped and some lines do not have a break.
Code:
const settings = require('../settings.json')
const lineReader = require('line-reader');
const sleep = require('system-sleep')
const Promise = require('bluebird');
var eachLine = Promise.promisify(lineReader.eachLine);
const fs = require('fs')
function makeid(length) {
var result = '';
var characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789';
var charactersLength = characters.length;
for ( var i = 0; i < length; i++ ) {
result += characters.charAt(Math.floor(Math.random() *
charactersLength));
}
return result;
}
fs.stat('./unblacklisted.rbxlx', function(err, stat) {
if(err == null) {
fs.unlink('./unblacklisted.rbxlx', function(err) {
if (err) throw err
})
} else if(err.code === 'ENOENT') {
// file does not exist
} else {
console.log('Some other error: ', err.code);
}
})
eachLine(`./${settings['CorrectMapName/fileName']}`, function(line) {
if (typeof line.split('referent=')[1] !== 'undefined') {
let treatedline = line.replace(/(\r\n|\n|\r)/gm, "");
let test = treatedline.split('referent=')[1]
let hello = (treatedline.replace(test, `"RBX${Buffer.from(makeid(17), 'ascii').toString('hex').toUpperCase()}">`))
fs.appendFile('./unblacklisted.rbxlx',`${hello}\n`, function (err) {
if (err) throw err;
});
} else {
let treatedline = line.replace(/(\r\n|\n|\r)/gm, "");
fs.appendFile('./unblacklisted.rbxlx', `${treatedline}\n`, function (err) {
if (err) throw err;
return;
});
}
}).then(function() {
__callback()
})
The input and output files are XML and I will provide two pastebin URL's for the input file and output file
Input: https://pastebin.com/cHbzL1W6
Output: https://pastebin.com/C53YBwMy
These look very similar but if you run them through a file comparer some lines are switched and vise versa
Would love to fix this, Any help would be GREATLY appreciated
Using jifriend00's first comment I saw that fs.appendFile() was not appending Synchronously
I fixed this by using fs.appendFileSync()
If anyone reading has this problem just use fs.appendFileSync() :D

Converting an Excel file to CSV and reading its contents in JSON format

When an excel file is uploaded in my program, it needs to get converted to a CSV file to be read. The process is working fine and I am using asyc/await, however whenever I try to read the file using the csvtojson package in Node, the file does not get read properly. If I directly use a CSV file then it works fine. The issue arises when the conversion occurs.
product.js
const multer = require('multer');
const express = require('express');
const router = express.Router();
const csv = require('csvtojson');
const fs = require('fs');
const xlsx = require('node-xlsx');
router.post('/upload', upload.single('singleFile'), async (req, res) => {
let csvFilePath = req.file.path;
let fileType = req.file.mimetype;
const convertToCSV = async _ => {
console.log("2");
if (fileType === 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' ||
fileType === 'application/vnd.ms-excel') {
let obj = xlsx.parse(csvFilePath);
let rows = [];
let writeStr = "";
for (let i = 0; i < obj.length; i++) {
let sheet = obj[i];
for (let j = 0; j < sheet['data'].length; j++) {
rows.push(sheet['data'][j]);
}
}
//creates the csv string to write it to a file
for (let i = 0; i < rows.length; i++) {
writeStr += rows[i].join(",") + "\n";
}
console.log("3");
fs.writeFile("csv/out.csv", writeStr, function (err) {
if (err) {
return res.status(400).send({'error': err});
}
console.log("4");
console.log("out.csv was saved in the current directory!");
});
}
}
console.log("1");
await convertToCSV().then(async _ => {
console.log("5");
const jsonArray = await csv({flatKeys: true})
.fromFile(csvFilePath)
.then(async (jsonObj) => {
console.log("6");
console.log(jsonObj[0]);
...
//Few more functions
}).catch(err => {
return res.status(400).send(err);
});
});
});
My console log looks like this
1
2
3
5
4
out.csv was saved in the current directory!
6
{
'PK\u0003\u0004\u0014\u0000\u0006\u0000\b\u0000\u0000\u0000!\u0000b�h^\u0001\u0000\u0000�\u0004\u0000\u0000\u0013\u0000\b\u0002[Content_Types].xml �\u0004\u0002(�\u0000\u0002\u0000\u0000\
u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u00
Whereas if a new CSV file is uploaded or an existing CSV is used then the output for the console.log(jsonObj[0]); is
{
'Column1': 'Column 1 Data',
field2: 'field2 Data',
field3: 'field 3 Data',
Categories: 'categories',
....
}
I added await before fs.writeFile however the same issue arises. There are two files that get saved under the directory csv/
c33129f3bdef482657992dbf452d2c1b
out.csv
And the contents of the previous file are read (assuming that, since they are very similar) and out.csv is not read.
Update
Wrapped a promise around fs.writeFile and the console is ordered now, however the output for the data read is yet the same:
const convertToCSV = async _ => {
return new Promise(((resolve, reject) => {
console.log("2");
....
console.log("3");
fs.writeFile("csv/out.csv", writeStr, function (err) {
if (err) {
return res.status(400).send({'error': err});
}
console.log("4");
console.log("out.csv was saved in the current directory!");
resolve();
});
}
}));
}
Console Log
1
2
3
4
out.csv was saved in the current directory!
5
6
{
'PK\u0003\u0004\u0014\u0000\u0006\u0000\b\u0000\u0000\u0000!\u0000b�h^\u0001\u0000\u0000�\u0004\u0000\u0000\u0013\u0000\b\u0002[Content_Types].xml �\u0004\u0002(�\u0000\u0002\u0000\u0000\
u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u00
You don't wait fs.writeFile("csv/out.csv" that's why you get 5 before 4 in console.
You should wrap your function content into Promise:
const convertToCSV = async _ => {
return new Promise((resolve, reject) => {
console.log("2");
...
console.log("3");
fs.writeFile("csv/out.csv", writeStr, function (err) {
if (err) {
// I'd prefer to call "reject" here and add try/catch outside for sending 400
return resolve(res.status(400).send({'error': err}));
}
console.log("4");
console.log("out.csv was saved in the current directory!");
resolve();
});
})
)
Also you read csvFilePath that contains a filename of Excel file and not a CSV that was stored under csv/out.csv name.

Synchronous NodeJS batch job

I'm trying to write a batch script that will
Read XMLs from a directory
Parse each XML and find a value to use for DB(database) Lookup
Use the parsed value to DB lookup additional metadata
Populate XML with the metadata retrieved from DB lookup (step 4)
Write updated XML to complete directory
Close DB connection
The issue I'm running into is that I cannot control the code execution order so that I can close the DB connection at the end of the script. If I attempt to close the connection, I get a 'connection undefined' error. Below is my code for reference. Is there a good way to accomplish something like this in NodeJs, or should I look at doing this in Java or some other language?
enter code here
'use strict';
let fs = require('fs');
let xml2js = require('xml2js');
const oracledb = require('oracledb');
const dbConfig = require('./dbconfig.js');
function pad(number, length)
{
var str = '' + number;
while (str.length < length)
{
str = '0' + str;
}
return str;
}
async function run() {
try {
// Get a non-pooled connection
let connection;
if (!connection)
{
connection = await oracledb.getConnection(dbConfig);
console.log('Connection was successful!');
}
let directory = "EDI_XMLS";
let dirBuf = Buffer.from(directory);
//var f = 0;
let files = fs.readdirSync(directory);
console.log(files);
for (let f = 0; f < files.length; f++)
{
let parser = new xml2js.Parser();
var xml_json_data = "";
// read the file
await fs.readFile(directory + "/" + files[f], async function(err, data) {
// parse the file
await parser.parseString(data, async function(err, result) {
let results;
var line_count = result.page.GLLines[0].GLLine.length;
console.dir('Invoice: ' + result.page.InvoiceNumber[0]);
console.dir('Line Total: ' + line_count);
console.log('File: ' + f);
try
{ // Lookup Data
results = await connection.execute('SELECT BUSINESS_UNIT, OPERATING_UNIT, DEPTID,PRODUCT,a.effdt FROM SYSADM.PS_A_NSS_SHPTO_ACC#FDEV a where(a.a_ship_to_customer = :shipTo) order by a.effdt
desc',[pad(result.page.VoucherDescription[0], 10)], {
maxRows: 2
});
console.log(results.metaData);
console.log(results.rows);
}
catch (err)
{
console.error(err);
}
for (let i = 0; i < line_count; i++) // Populate data
{
result.page.GLLines[0].GLLine[i].GLBU[0] = results.rows[0][0];
result.page.GLLines[0].GLLine[i].OpUnit[0] = results.rows[0[1];
result.page.GLLines[0].GLLine[i].Department[0] = results.rows[0][2];
result.page.GLLines[0].GLLine[i].Product[0] = results.rows[0][3];
}
// Write to File
var builder = new xml2js.Builder();
var xml = builder.buildObject(result);
await fs.writeFile("complete/" + files[f], xml, function(err, data) {
if (err) console.log(err);
console.log("successfully written our update xml to file");
console.dir('BUs: ' + JSON.stringify(result.page));
}); //end write
}); //end parser
}); //end readfile
console.log('End');
} // async for
}
catch (err)
{
console.error(err);
}
finally
{
await connection.close();
console.log('Finally Done');
}
}
run();
console.log('completely Done');

Node.js reading textfiles in current directory and validate

This is actually the answer from my previous question.... the supplied code works for me. All I needed to do was retain a file counter (global) and in the read after validating add to array (global) which gets passed back to rendering process.
// Called from Render Process.
ipcMain.on('search-text-files', (event, arg) => {
const fs = require('fs');
const path = require('path');
var txtArr = [];
var fileName = '';
var fCtr = 0;
fs.readdir(__dirname+'/', function (err, items) {
if (err) {
throw err;
}
// loop through directory items
for (var i=0; i<items.length; i++) {
if (path.extname(items[i].toString() == '.txt') {
fctr+=1;
fileName = items[i].toString();
// read the file & validate
fs.readfile(__dirname+'/'+fileName, (err, data) {
if (err) {
throw err;
}
var checkArr[];
var curFile = '';
checkArr = data.toString().split('\r');
// access contents line by line
for (var line=0; line<checkArr.length; line++) {
... perform some sort of validation
... assign curFile from contents
}
if (file is valid) {
txtArr.push(curfile);
}
fCtr-=1;
if (fCtr == 0) {
event.sender.send('text-files-found', txtArr);
}
});
}
}
});
});

Resources