How to write data into file in node JS? - node.js

I have these excel file contains columns : No, itemCode, startAt, endAt
I loop the data using createReadStream
And I want to detect the invalid datetime from excel file and write the error into test.txt file
Here's my current code
const csv = require('csv-parser');
const fs = require('fs');
const moment = require('moment');
const yesql = require('yesql');
const _ = require('lodash');
const { exit } = require('process');
async function run() {
fs.createReadStream('test.csv')
.pipe(csv())
.on('data', (data) => csvResults.push(data))
.on('end', async () => {
const groups = _.groupBy(csvResults, item => item['itemCode']);
for (const itemCode in groups) {
for (const item of groups[itemCode]) {
const startDateTime = moment(item['startAt'], 'YYYY-MM-DD HH:mm:ss');
const endDateTime = moment(item['endAt'], 'YYYY-MM-DD HH:mm:ss');
if (startDateTime.isAfter(endDateTime)) {
console.error(`Datetime invalid ${item['No']}`);
// Want to write into file if got an error
var stream = fs.createWriteStream("test.txt");
stream.once('open', function(fd) {
stream.write(`${item['No']} : ${startDateTime} ${endDateTime} \n`);
stream.end();
});
continue;
}
}
}
exit();
});
}
run();

Your for loop does NOT wait for the fs.createWriteStream() stuff to complete so you end up trying to write to the same file multiple times at the same time which creates a mess in that file. Likewise, you call exit() before your writes are even done also because the for loop doesn't wait for the stream to finish.
If what you want to do is to append each error condition to your file, then you can do it like this:
const csv = require('csv-parser');
const fs = require('fs');
const fsp = fs.promises;
const moment = require('moment');
const yesql = require('yesql');
const _ = require('lodash');
const { exit } = require('process');
async function run() {
fs.createReadStream('test.csv')
.pipe(csv())
.on('data', (data) => csvResults.push(data))
.on('end', async () => {
const groups = _.groupBy(csvResults, item => item['itemCode']);
for (const itemCode in groups) {
for (const item of groups[itemCode]) {
try {
const startDateTime = moment(item['startAt'], 'YYYY-MM-DD HH:mm:ss');
const endDateTime = moment(item['endAt'], 'YYYY-MM-DD HH:mm:ss');
if (startDateTime.isAfter(endDateTime)) {
console.error(`Datetime invalid ${item['No']}`);
// append into file if got an error
await fsp.appendFile("test.txt", `${item['No']} : ${startDateTime} ${endDateTime} \n`);
}
} catch(e) {
console.log(e);
// not sure what you want to do here if you got an error while appending to the file
}
}
}
exit();
});
}
run();
If you want to just record the first error in the file and then stop further processing, you can use fsp.writeFile() instead of fsp.appendFile() and put your call to exit() right after that call.

Related

fs.watchFile() a json file until a specific value appear

So I have a json file that changes continously and I need to read it AFTER a value called auth-token is written to the file, here what I get now:
const json = fs.readFileSync("some-json.json")
const headers = JSON.parse(json);
return headers
But it reads the file before anything can be written to it, is there anyway that I can use fs.watchFile() and watch the file UNTIL the value is written?
Thanks
You can use fs.watch although its behavior is a bit unreliable with multiple events triggered upon file change (but I don't think it would be a problem here).
Here is a small sample:
const { watch } = require('fs');
const { readFile } = require('fs/promises');
(async () => {
const result = await new Promise((resolve) => {
const watcher = watch('some-json.json', async (eventType, filename) => {
try {
const fileContent = await readFile(filename);
const headers = JSON.parse(fileContent.toString());
if (headers['auth-token']) { // or whatever test you need here
watcher.close();
resolve(headers);
}
} catch (e) {}
});
});
console.log(result);
})();
Note that if your file gets modified many times before it contains the desired header, it might be preferable to replace the usage of fs.watch by a setInterval to read the file at regular intervals until it contains the value you expect.
Here is what it would look like:
const { readFile } = require('fs/promises');
(async () => {
const waitingTime = 1000;
const result = await new Promise((resolve) => {
const interval = setInterval(async (eventType, filename) => {
const fileContent = await readFile('some-json.json');
try {
const headers = JSON.parse(fileContent.toString());
if (headers['auth-token']) { // or whatever test you need here
clearInterval(interval);
resolve(headers);
}
} catch (e) {}
}, waitingTime);
});
console.log(result);
})();

Add transform before write stream

I have a function e.g.
function writeToStream(writeStream) {
writeStream.write("test\n");
}
How can I make this apply a transform before writing to the final destination? Note: I don't have control over the writeToStream function, I just want to apply a transformation to what it is writing out
const fs = require("fs");
const { Transform } = require("stream");
const upperCaseTr = new Transform({
transform(chunk, encoding, callback) {
//this is never called???
callback(null, chunk.toString().toUpperCase());
},
});
function writeToStream(writeStream) {
writeStream.write("test\n");
}
const r = fs.createWriteStream("test.txt");
writeToStream(upperCaseTr.pipe(r));
With the above code, my custom Transform upperCaseTr is never called
This answer was very helpful in solving this https://stackoverflow.com/a/50537771/2129219
Specifically, upperCaseTr.pipe(r) just returns r, so upperCaseTr is never called.
Here is an answer that uses through2, similar to the linked answer
const fs = require("fs");
const through2 = require("through2");
function writeToStream(writeStream) {
writeStream.write("test test test test test\n");
}
const stream = through2.obj((chunk, enc, callback) => {
callback(null, chunk.toString().toUpperCase());
});
const out = fs.createWriteStream("test.txt");
stream.pipe(out);
writeToStream(stream);
Here is one using a Transform more similar to my original question
const fs = require("fs");
const { Transform } = require("stream");
function writeToStream(writeStream) {
writeStream.write("test test test test test\n");
}
const upperCaseTr = new Transform({
transform(chunk, encoding, callback) {
callback(null, chunk.toString().toUpperCase());
},
});
const out = fs.createWriteStream("test.txt");
upperCaseTr.pipe(out);
writeToStream(upperCaseTr);

Recursive read images files in provided folder

I am getting folder absolute path and I want to extract all text files path from the folder (recursively).
This is what I've tried:
const fs = require('fs-extra');
const path = require('path');
const getFolderImagesRecursive = async (folderPath) => {
const directoryChildren = await fs.readdir(folderPath);
return directoryChildren.reduce(async (finalArray, directoryChild) => {
const fullPath = path.join(folderPath, directoryChild);
const pathStat = await fs.lstat(fullPath);
if (pathStat.isDirectory()) {
const recursiveResult = await getFolderImagesRecursive(fullPath);
return [
...finalArray,
...recursiveResult,
];
} else if (fullPath.split('.').pop() === 'txt') {
return [
...finalArray,
fullPath,
]
} else {
return finalArray;
}
}, []);
}
For testing purpose I've created dummy folders and text files or folders nested inside. When tried the function on the main test folder I got:TypeError: object is not iterable (cannot read property Symbol(Symbol.iterator)) on line 21.
Does anyone observe the error and can fix it?
The problem is that array.reduce is a synchronous function. And when you pass it an async function the result it gets is a promise. Not an array. The problem is that the finalArray (after the first iteration) is a promise that returns an array. And you're trying to destructure the promise on line 21. You should rewrite your code using loops instead of async/await like this
const fs = require('fs-extra');
const path = require('path');
const getFolderImagesRecursive = (folderPath) => {
const directoryChildren = fs.readdirSync(folderPath);
const finalArray = [];
directoryChildren.forEach(directoryChild => {
const fullPath = path.join(folderPath, directoryChild);
const pathStat = fs.lstatSync(fullPath);
if (pathStat.isDirectory()) {
const recursiveResult = getFolderImagesRecursive(fullPath);
finalArray.concat(recursiveResult);
} else if (fullPath.split('.').pop() === 'txt') {
finalArray.push(fullPath);
}
})
return finalArray;
}
}

nodeJS async function parse csv return data to other file

I'm creating a small tool for internal user with puppeteer.
Basically I got a csv file with some data i "read" and fill form with.
As I try to cleanup my project to be reusable i'm struggle a little bit:
I create a file name parsecsv.js
const config = require('../config.json');
const parse = require('csv-parse');
const fs = require('fs');
const processFile = async () => {
records = []
const parser = fs
.createReadStream(config.sourceFile)
.pipe(parse({
// CSV options
from_line: 1,
delimiter: ";",
}));
let i =1;
for await (const record of parser) {
records.push(record)
i++;
}
return records
}
const processFileData = async () => {
const records = await processFile()
console.info(records);
return records
}
module.exports ={
processFile, processFileData
}
in an other Js file i made
const parseCSV = require('./src/ParseCsv');
const records = parseCSV.processFileData();
const data = parseCSV.processFile();
console.log(typeof records);
console.table(records);
console.log(typeof data);
console.table(data);
But I never get my data only an empty oject.
How I can get my data to be able to "share" it with other function ?
thanks
as your functions are async ones and they return a promises, you can do something like
const parseCSV = require('./src/ParseCsv');
(async () => {
const records = await parseCSV.processFileData();
const data = await parseCSV.processFile();
console.log(typeof records);
console.table(records);
console.log(typeof data);
console.table(data);
})()

How to read remove some lines from json file using node.js

I want to extract some comment lines of data from json file and after making those into certain format i need to remove those. I am explaining my json file below.
test.json:
#PATH:/test/
#DEVICES:div1
#TYPE:p1
{
name:'Raj',
address: {
city:'bbsr'
}
}
The above file is one test json file. Here I need to read the lines which includes # at beginning and then make them separate object like const obj ={PATH:'/test/',DEVICES:'div1',TYPE:p1} and finally remove those 3 line from original test.json file using node.js
Following snippet can be used to solve this problem. It can handle any number of similar comments.
const fs = require('fs')
const readline = require('readline');
const filepath = '/Users/.../nodeText.json'; // file to read from
const writePath = '/Users/.../nodeFileResult.json' // file to write to
const inputStream = fs.createReadStream(filepath)
const startLine = 'const obj = {\n';
let content = '';
const readPromise = new Promise(function(resolve, reject) {
const lineReader = readline.createInterface({
input: inputStream
});
inputStream.on('end', () => {
resolve('end');
});
lineReader.on('line', (line) => {
let resultText = '';
const replaceDict = {
'#': '"',
':': '": "',
'\n': '",\n'
};
const regexString = /#|:|\n/;
const re = new RegExp(regexString, 'gim');
if(line.startsWith('#')) {
resultText = (line+`\n`).replace(re, (match) => { // \n to make sure its line end and for regex to work
return replaceDict[match];
});
} else if(line.startsWith('{')) {
resultText = line.replace('{', `}, \n {`);
} else {
resultText = line
}
content = content + resultText;
});
});
readPromise
.then((resolve) => {
if(resolve === 'end') {
content = startLine + content
fs.writeFile(writePath,
content,
error => {
if (error) {
console.error(error)
return
}
})
}
})

Resources