Breaking functions down in Nodejs - node.js

In my attempt to become a better developer I'm trying to refactor the below Node code into 2 (or even 3) separate functions.
The code simply takes in a file, parses some of the data, and rewrites the parsed data to another file.
So my question is, can the code below be broken down to 2 functions (one function to read and parse, the other to write)? Can it be broken down even further to 3 functions (one to read and parse, one to write, one that calls the other two)?
I have attempted to refactor the code into 2 functions but am not having any luck
const neatCsv = require('neat-csv');;
const fs = require('fs');
const ObjectsToCsv = require('objects-to-csv');
fs.readFile('./someFile.csv', async (err, data) => {
if (err) {
console.error(err);
return;
}
const neat = await neatCsv(data);
const sortArray = neat.filter((obj) => obj.Type !== 'Name');
const priceSortArray = sortArray.filter((obj) => +obj.Price > 5);
const filterSortArray = priceSortArray.sort((a,b) => parseFloat(b.IV) - parseFloat(a.IV));
(async () => {
const csv = new ObjectsToCsv(filterSortArray);
// Save to file:
await csv.toDisk('./someOtherFile.csv');
})();
});

Please try below approach. I hope this is asked by you.
const neatCsv = require('neat-csv');;
const fs = require('fs');
const ObjectsToCsv = require('objects-to-csv');
const readFile = (filePath) => {
fs.readFile('./someFile.csv', async (err, data) => {
if (err) {
console.error(err);
return;
};
const neat = await neatCsv(data);
const sortArray = neat.filter((obj) => obj.Type !== 'Name');
const priceSortArray = sortArray.filter((obj) => +obj.Price > 5);
const filterSortArray = priceSortArray.sort((a,b) => parseFloat(b.IV) - parseFloat(a.IV));
saveFile(filterSortArray);
});
}
const saveFile = (filterSortArray) => {
const csv = new ObjectsToCsv(filterSortArray);
// Save to file:
await csv.toDisk('./someOtherFile.csv');
}
readFile();

Related

Node JS get CSV

I have a project with Node JS in which I am collecting the information of a JSON by http using the node-fetch module.
This is the way I have found to use the node-fetch module with async, if it is possible to improve this function, suggestions are added, I am new to this module.
This is my code where I read the information:
const fetch = require('node-fetch');
(async () => {
try {
const res = await fetch('https://jsonplaceholder.typicode.com/users');
const headerDate = res.headers && res.headers.get('date') ? res.headers.get('date') : 'no response date';
const users = await res.json();
for(user of users) {
console.log(`Got user with id: ${user.id}, name: ${user.name}`);
}
} catch (err) {
console.log(err.message); //can be console.error
}
})();
My problem: how can I extract all the information to a CSV with a limit of lines ?, that is, the CSV has a limit of 10 lines (the limit can vary), if the JSON information occupies 30 lines, 3 CSVs would be created to store all the information. I have added the json-2-csv module, but I don't know how to use it or if this module is necessary or something else is better.
const { Parser } = require("json2csv");
const fetch = require("node-fetch");
const fs = require("fs");
const csvLimit = 3;
const getJson = async () => {
const response = await fetch("https://jsonplaceholder.typicode.com/users");
const responseJson = await response.json();
return responseJson;
};
const jsonToCsv = async () => {
const json = await getJson();
const json2csvParser = new Parser();
let i = 0,
j = 0;
while (j < json.length) {
let csv = [];
let temp = [];
for (j = i * csvLimit; j < (i + 1) * csvLimit; j++) {
temp.push(json[j]);
}
csv.push(json2csvParser.parse(temp));
fs.writeFileSync(`file${(i * csvLimit) / 3}.csv`, csv);
i++;
}
};
jsonToCsv();
If you want only specific fields in the csv file, then you can pass the fields as parameter in this way.
const json2csvParser = new Parser({fields})
I used the flat package to extract the field names from the keys of the first record of the JSON and then used the json-2-csv package to convert from JSON to CSV.
const converter = require("json-2-csv");
const fetch = require("node-fetch");
const fs = require("fs");
const flatten = require('flat');
const maxRecords = 3;
const getJson = async () => {
const response = await fetch("https://jsonplaceholder.typicode.com/users");
const responseJson = await response.json();
return responseJson;
};
const convertToCSV = async () => {
const json = await getJson();
let keys = Object.keys(flatten(json[0]));
let options = {
keys: keys
};
converter.json2csv(json, json2csvCallback, options);
};
let json2csvCallback = function (err, csv) {
if (err) throw err;
const headers = csv.split('\n').slice(0,1);
const records = csv.split('\n').slice(0,);
for(let i=1;i<records.length;i=i+maxRecords) {
let dataOut = headers.concat(records.slice(i, i+3)).join('\n');
let id = Math.floor(i/maxRecords)+1;
fs.writeFileSync('data' + id + '.csv', dataOut)
}
};
convertToCSV();
Here's one of the files opened in Excel.

Chaining GET requests to WP REST API in Express

I am struggling to understand callbacks, promises, and async/await.
What I want to do is read a .csv file inside my project folder that contains 150+ post ID's.
For each one of those ID's I want to make a https GET request to fetch a JSON response from my Wordpress website.
Then for each one of those posts that gets returned I want to insert them in my Firestore database.
I'm struggling with how to properly set up the callback functions.
Please help.
Thanks in advance.
const express = require('express');
const router = express.Router();
const https = require("https");
const Recipe = require("../includes/newrecipe");
var admin = require('firebase-admin');
var serviceAccount = require("../service_key.json");
const collectionKey = "recipes"; //name of the collection
admin.initializeApp({
credential: admin.credential.cert(serviceAccount),
databaseURL: "<MY_FIRESTORE_URL>"
});
const firestore = admin.firestore();
const fs = require('fs');
const parse = require('csv-parser');
function prepareCsvData() {
return new Promise((resolve, reject) => {
//establish empty csvData array and filename to be referenced
var csvData = [];
var filename = 'wprm_recipe_ids.csv';
//read the csv file and push the data object into the array
fs.createReadStream(filename)
.pipe(parse(['ID']))
.on('data', (data) => csvData.push(data))
.on('end', () => { resolve(csvData); });
});
}
function getRecipeFromBlog(recipeId) {
return new Promise((resolve, reject) => {
//make the get request to my website to get the recipe
https.get('<MY_WEBSITE_URL>' + recipeId, (response) => {
var body = "";
response.on('data', function (chunk) { body += chunk; });
response.on('end', () => {
var { recipe } = JSON.parse(body);
//build new recipe to be exported
var newRecipe = new Recipe(recipe);
resolve(newRecipe);
});
});
});
}
/* GET recipes. */
router.get('/', async (req, res, next) => {
//first prepare the csv data
//function returns a promise with the csv data
//that I can then use in the next step
const csvData = await prepareCsvData();
for (var i = 0; csvData.length < i; i++) {
getRecipeFromBlog(csvData[i].ID)
.then((newRecipe) => {
//when I have a recipe for a given recipe ID
//update database in firestore
firestore
.collection(collectionKey)
.doc(""+newRecipe.id)
.set(newRecipe)
.then(function() {
console.log('document written');
});
});
}
res.send('done');
});
You need to do something like below:
Play around this, You'll get it working hopefully!
Let me know if that worked!
router.get("/", async (req, res, next) => {
const csvData = await prepareCsvData();
const recipePromises = [];
// check if data is empty or not
if (!csvData.length) {
return res.send("Empty data");
}
csvData.forEach((element) => {
recipePromises.push(getRecipeFromBlog(element.id));
});
// await for all promises parallelly.
const result = await Promise.all(recipePromises);
// Get a new write batch
const batch = db.batch();
result.forEach((recipe) => {
const ref = db.collection("recipes").doc(`${recipe.id}`);
batch.set(ref, recipe);
});
// Commit the batch
await batch.commit();
res.send("done");
});
The OP code looks pretty close to working. Have the promise-returning functions been tested? Assuming they work, first decorate them as async...
async function prepareCsvData() {...}
async function getRecipeFromBlog(recipeId) {...}
Create another promise-returning function to insert many recipes into firebase...
async function writeRecipesToFB(recipes) {
const collectionRef = collection(collectionKey);
const promises = recipes.map(recipe => {
return collectionRef.doc(`${recipe.id}`).set(recipe);
});
return Promise.all(promises)
}
As another answer suggests, as an alternative, firebase's batch write is a good idea...
async function writeRecipesToFB(recipes) {
// as a set of promises
const collectionRef = collection(collectionKey);
const batch = db.batch();
recipes.forEach(recipe => {
const docRef = collectionRef.doc(`${recipe.id}`)
batch.set(docRef, recipe)
});
return batch.commit();
}
Now the express function is easy to write...
router.get('/', async (req, res, next) => {
const csvData = await prepareCsvData();
let promises = csvData.map(row => {
return getRecipeFromBlog(row.ID);
});
const recipes = await Promise.all(promises);
await writeRecipesToFB(recipes);
res.send('done');
});

How do your pipe to multiple streams? [duplicate]

This question already has answers here:
How to pipe one readable stream into two writable streams at once in Node.js?
(3 answers)
Closed 4 years ago.
I'm new to Node and I'm trying to download a file, hash it, then save it to disk using the hash as the file name. I'm getting correct hashes (I think) but the files are blank.
const fs = require("fs-extra")
const fetch = require("node-fetch")
const crypto = require('crypto')
const response = await fetch(url)
const sha256 = crypto.createHash("sha256")
sha256.setEncoding('hex')
response.body.pipe(sha256)
await new Promise(resolve => response.body.on("end", resolve))
sha256.end()
const hash = sha256.read()
const file = fs.createWriteStream(hash + ".jpg")
response.body.pipe(file)
My trick for resolving your problem is storing your file with unique name (using current timestamp as name), and then you can hash stream (from response) and rename it.
I've tested this code and it's working well:
const fs = require("fs-extra")
const path = require('path');
const fetch = require("node-fetch")
const crypto = require('crypto')
const downloadImage = async (url) => {
try {
const response = await fetch(url);
/** Store file with name current timestamp */
const filename = "IMG_" + Date.now() + ".jpg";
const dest = path.join(__dirname, filename);
response.body.pipe(fs.createWriteStream(dest))
/** Hash file */
const sha256 = crypto.createHash("sha256")
sha256.setEncoding('hex')
response.body.pipe(sha256)
await new Promise(resolve => response.body.on("end", resolve))
sha256.end()
const hash = sha256.read()
/** Rename file with hash value */
await fs.rename(dest, path.join(__dirname, hash + ".jpg"))
} catch (err) {
console.log(err);
}
}
const url = "https://i0.wp.com/wptavern.com/wp-content/uploads/2016/07/stack-overflow.png?ssl=1";
downloadImage(url);
But you can create function for hashing stream (response you received) as utility, this is my recommend for your code:
const fs = require("fs-extra")
const path = require('path');
const fetch = require("node-fetch")
const crypto = require('crypto')
function streamHash(stream, algorithm = 'md5') {
return new Promise((resolve, reject) => {
let shasum = crypto.createHash(algorithm);
try {
stream.on('data', function (data) {
shasum.update(data)
})
stream.on('end', function () {
const hash = shasum.digest('hex')
return resolve(hash);
})
} catch (error) {
return reject(error);
}
});
}
const downloadImage = async (url) => {
try {
const response = await fetch(url);
/** Store file with name current timestamp */
const filename = "IMG_" + Date.now() + ".jpg";
const dest = path.join(__dirname, filename);
response.body.pipe(fs.createWriteStream(dest))
/** Hash file */
const hash = await streamHash(response.body, 'sha256');
/** Rename file with hash value */
await fs.rename(dest, path.join(__dirname, hash + ".jpg"))
} catch (err) {
console.log(err);
}
}
const url = "https://i0.wp.com/wptavern.com/wp-content/uploads/2016/07/stack-overflow.png?ssl=1";
downloadImage(url);

how to make asynchrone synchrone => how to test asynchronous treatement (ie write file)

I a writing unit test (mocha chai) on a file manipulation function
I am a beginner in Node.Js but used to other languages
I use typescript but sample is in JS
here is my problem:
A function writes in a file
when the program read the file a bit later the file is empty
when I look when the program is done the file is filled
To go fast a sample code
const fs = require("fs");
function test() {
let fss = fs.createWriteStream("file.txt", { flags: 'a+' });
fss.write("bla");
fss.end(() => {
fss.destroy();
});
}
test();
let val = fs.readFileSync('file.txt', 'utf8');
console.log(val); // empty
But it's not the first time I meet the problem. What is the good practice : promisify, events (I don't really like everything as a callback) ?
A couple of alternatives:
Call the code you need when done with the write stream
const fs = require("fs");
function test() {
let fss = fs.createWriteStream("file.txt", { flags: 'a+' });
fss.write("bla");
fss.end(() => {
fss.destroy();
let val = fs.readFileSync('file.txt', 'utf8');
// do stuff here with val
});
}
test();
Return a Promise in test
const fs = require("fs");
function test() {
return new Promise((resolve, reject) => {
let fss = fs.createWriteStream("file.txt", { flags: 'a+' });
fss.write("bla");
fss.end(() => {
fss.destroy();
resolve();
});
});
}
test().then(() => {
let val = fs.readFileSync('file.txt', 'utf8');
// do stuff here with val
});
Use the async / await syntax
const fs = require("fs");
async function test() {
await new Promise((resolve, reject) => {
let fss = fs.createWriteStream("file.txt", { flags: 'a+' });
fss.write("bla");
fss.end(() => {
fss.destroy();
resolve();
});
});
}
await test();
let val = fs.readFileSync('file.txt', 'utf8');
// do stuff here with val

How to read file with async/await properly?

I cannot figure out how async/await works. I slightly understand it but I can't make it work.
function loadMonoCounter() {
fs.readFileSync("monolitic.txt", "binary", async function(err, data) {
return await new Buffer( data);
});
}
module.exports.read = function() {
console.log(loadMonoCounter());
};
I know, I could use readFileSync, but if I do, I know I'll never understand async/await and I'll just bury the issue.
Goal: Call loadMonoCounter() and return the content of a file.
That file is incremented every time incrementMonoCounter() is called (every page load). The file contains the dump of a buffer in binary and is stored on a SSD.
No matter what I do, I get an error or undefined in the console.
Since Node v11.0.0 fs promises are available natively without promisify:
const fs = require('fs').promises;
async function loadMonoCounter() {
const data = await fs.readFile("monolitic.txt", "binary");
return Buffer.from(data);
}
To use await/async you need methods that return promises. The core API functions don't do that without wrappers like promisify:
const fs = require('fs');
const util = require('util');
// Convert fs.readFile into Promise version of same
const readFile = util.promisify(fs.readFile);
function getStuff() {
return readFile('test');
}
// Can't use `await` outside of an async function so you need to chain
// with then()
getStuff().then(data => {
console.log(data);
})
As a note, readFileSync does not take a callback, it returns the data or throws an exception. You're not getting the value you want because that function you supply is ignored and you're not capturing the actual return value.
This is TypeScript version of #Joel's answer. It is usable after Node 11.0:
import { promises as fs } from 'fs';
async function loadMonoCounter() {
const data = await fs.readFile('monolitic.txt', 'binary');
return Buffer.from(data);
}
You can use fs.promises available natively since Node v11.0.0
import fs from 'fs';
const readFile = async filePath => {
try {
const data = await fs.promises.readFile(filePath, 'utf8')
return data
}
catch(err) {
console.log(err)
}
}
You can easily wrap the readFile command with a promise like so:
async function readFile(path) {
return new Promise((resolve, reject) => {
fs.readFile(path, 'utf8', function (err, data) {
if (err) {
reject(err);
}
resolve(data);
});
});
}
then use:
await readFile("path/to/file");
From node v14.0.0
const {readFile} = require('fs/promises');
const myFunction = async()=>{
const result = await readFile('monolitic.txt','binary')
console.log(result)
}
myFunction()
To keep it succint and retain all functionality of fs:
const fs = require('fs');
const fsPromises = fs.promises;
async function loadMonoCounter() {
const data = await fsPromises.readFile('monolitic.txt', 'binary');
return new Buffer(data);
}
Importing fs and fs.promises separately will give access to the entire fs API while also keeping it more readable... So that something like the next example is easily accomplished.
// the 'next example'
fsPromises.access('monolitic.txt', fs.constants.R_OK | fs.constants.W_OK)
.then(() => console.log('can access'))
.catch(() => console.error('cannot access'));
There is a fs.readFileSync( path, options ) method, which is synchronous.
const fs = require("fs");
const util = require("util");
const readFile = util.promisify(fs.readFile);
const getContent = async () => {
let my_content;
try {
const { toJSON } = await readFile("credentials.json");
my_content = toJSON();
console.log(my_content);
} catch (e) {
console.log("Error loading client secret file:", e);
}
};
I read file by using the Promise. For me its properly:
const fs = require('fs')
//function which return Promise
const read = (path, type) => new Promise((resolve, reject) => {
fs.readFile(path, type, (err, file) => {
if (err) reject(err)
resolve(file)
})
})
//example how call this function
read('file.txt', 'utf8')
.then((file) => console.log('your file is '+file))
.catch((err) => console.log('error reading file '+err))
//another example how call function inside async
async function func() {
let file = await read('file.txt', 'utf8')
console.log('your file is '+file)
}
You can find my approach below:
First, I required fs as fsBase, then I put the "promises" inside fs variable.
const fsBase = require('fs');
const fs = fsBase.promises
const fn = async () => {
const data = await fs.readFile('example.txt', 'utf8');
console.log(data);
};
fn();
This produces a String from the contents of your file you dont need to use promises for this to work
const fs = require('fs');
const data = fs.readFileSync("./path/to/file.json", "binary");
see this example
https://www.geeksforgeeks.org/node-js-fs-readfile-method/
// Include fs module
var fs = require('fs');
// Use fs.readFile() method to read the file
fs.readFile('demo.txt', (err, data) => {
console.log(data);
})

Resources