Export CSV without crash from node js - node.js

I need to export CSV for large amount of data, like it's having 100k rows, I'm using Json2CSV but sometimes it's taking a long time and crashed. I'm using node js stream but no use. I am googling but I don't have idea to clear it.Please any one can guide me how can I fix it.
Node JS
var formatedData = {......} //object with data
let filename = 'test' + '.csv';
let pathName = await writeToCSV(filename, formatedData, fields);
let readStream = fs.createReadStream(pathName);
res.setHeader('Content-disposition', `attachment; filename=${filename}`);
res.set('Content-Type', 'text/csv');
let downloadStream = readStream.pipe(res);
fields = null;
formatedData = null;
downloadStream.on('finish', function() {
fs.unlink(pathName, function() {});
downloadStream = null;
readStream = null;
});
writeToCSV
function writeToCSV(filename, data, fields, option) {
return new Promise((resolve, reject) => {
if (typeof data !== 'object') {
return reject(new Error('Data is not an object'));
}
let options = {
fields
};
if (typeof option === 'object') {
for (let key in option) {
options[key] = option[key];
}
}
let tmpPath = path.join(__dirname, '..', tmp);
let pathFile = tmpPath + filename;
return Promise.all([Promise.resolve(json2csv(data, options).split('\n')), checkTMPExist(tmpPath)]).then(data => {
let csvFormat = data[0];
let writeStream = fs.createWriteStream(pathFile);
csvFormat.forEach((lines, index) => {
if (index === csvFormat.length - 1) {
writeStream.end(lines + '\n');
} else {
writeStream.write(lines + '\n');
}
});
writeStream.on('finish', function() {
this.end();
return resolve(pathFile);
});
writeStream.on('error', function(err) {
this.end();
fs.unlink(pathFile, () => {});
return reject(err);
});
}).catch(err => {
fs.unlink(pathFile, () => {});
return reject(err);
});
});
}
Front end Ajax call
function export_csv(url, fileName) {
$.ajax({
url: url,
type: "GET",
success: function (result) {
var encodedUri = 'data:application/csv;charset=utf-8,' + encodeURIComponent(result);
var link = document.createElement("a");
link.setAttribute("href", encodedUri);
link.setAttribute("download", fileName);
document.body.appendChild(link);
link.click();
},
error: function (xhr) {
console.log("Export csv have some issue:" + JSON.stringify(xhr));
}
});
}

Related

How can I Convert code from Nodejs to Angular including API features [closed]

Closed. This question needs to be more focused. It is not currently accepting answers.
Want to improve this question? Update the question so it focuses on one problem only by editing this post.
Closed 5 days ago.
This post was edited and submitted for review 4 days ago.
Improve this question
I need to convert Nodejs code into Angular. Please suggest me all the steps. I have Nodejs code which is for Get Data using API. That I need to convert into Angular.
I have a task to get Data from Autodesk using their API and Post into MySQL Database (Workbench). This task has already been done in NodeJs. I want to apply same functionality in Angular.
Controller Code : UpdateController.js
const fs = require('fs');
const path = require('path');
const csvtojson = require('csvtojson');
const xlsx = require('xlsx');
const mongoose = require('mongoose');
const openssl = require('openssl-nodejs');
const extract = require('extract-zip');
const SubscriptionsModel = require('../models/subscription');
const PriceListModel = require('../models/pricelist');
const AuthModel = require('../models/auth');
var SubscriptionController = require('../controllers/subscriptioncontroller');
const { sendPasswordNotificationAfterBatch } = require('../models/notification');
var auth = new AuthModel();
class UploadController {
getCSVData(file){
console.log("In getCSVData");
return new Promise((resolve, reject) => {
csvtojson()
.fromFile(file)
.then(csvData => {
//console.log(csvData);
resolve(csvData);
}).catch(err => {
reject(err);
});
});
}
getXLSXData(filename){
console.log("In getXLSData");
return new Promise((resolve, reject) => {
var workbook = xlsx.readFile(filename);
var sheet_list = workbook.SheetNames;
var sheet = workbook.Sheets[sheet_list[0]];
var jsonObjects = xlsx.utils.sheet_to_json(sheet, { range: 7, raw: true, default: null });
var response = [];
response = jsonObjects.filter(item => {
return item.SRP !== undefined && item.SRP !== null && item.DTP !== undefined && item.DTP !== null
});
resolve(response);
}).catch(error => {
throw error;
});
}
updatePricelist(filePath){
return new Promise((resolve, reject) => {
this.getXLSXData(filePath).then(res => {
if(res){
// Delete backup
//PriceListModel.collection.drop();
// Create collection for updated data
PriceListModel.insertMany(res, err => {
if(err){
reject(err);
}
resolve();
})
}
}).catch(error => {
reject(error);
})
});
}
importSubscriptions()
{
console.log("In Import Subscriptions");
var url = `v1/export/subscriptions`;
console.log(`Importing subscription data...`);
return new Promise((resolve,reject) =>{
var body = {
'startDateSince': '2000-01-01'
}
auth.post(url, body).then(function(res)
{
var d = JSON.parse(res);
console.log(d);
resolve(d);
}).catch(err => {
reject(err);
});
});
}
checkImportSubscriptionJobStatus(jobId, count){
var url = `/v1/export/subscriptions/${jobId}`;
console.log(`Checking import subscription job status ${++count}...`);
return new Promise((resolve,reject) =>{
auth.getUsingHttpPlugin(url).then((res) => {
if (res.statusCode == 303) {
console.log("Current status is " + res.statusCode);
resolve(res.headers["location"]);
}
else {
console.log(`Current status is ${res.statusCode}, will retry in 60 seconds again `);
setTimeout(() => {
this.checkImportSubscriptionJobStatus(jobId, count).then(fileUrl => {
resolve(fileUrl);
});
}, '60000');
}
}).catch(function(err){
reject(err);
});
});
}
downloadFile(jobId, fileUrl) {
console.log("Downloading file...");
return new Promise((resolve, reject) => {
var file = fs.createWriteStream(`uploads/subscriptions/${jobId}.csv.zip.enc`);
//console.log(file);
console.log(fileUrl);
auth.getPlainHttp(fileUrl).then(function (response) {
response.pipe(file);
console.log(file.path);
resolve();
}).catch(err => {
console.log("In Catch");
reject(err);
});
});
}
decriptFieUsingOpenSSL(fileName, password) {
console.log("Decrypting file...");
return new Promise((resolve, reject) => {
var encFile = `../uploads/subscriptions/${fileName}.zip.enc`;
var zipFile = `../uploads/subscriptions/${fileName}.zip`;
var openSSLCmd = `enc -aes-256-cbc -md sha512 -d -in ${encFile} -out ${zipFile} -k ${password}`;
console.log('start running openssl command ' + openSSLCmd);
setTimeout(function () {
openssl(openSSLCmd, () => {
resolve();
})
}, 5000);
});
}
updateSubscriptionData(){
return new Promise((resolve, reject) => {
this.importSubscriptions().then(response => {
if(response.error !== undefined){
console.log(`Error Code: ${response.error.code}`);
console.log(`Message: ${response.error.message}`);
reject();
}else{
var id = response.id;
var password = response.password;
this.checkImportSubscriptionJobStatus(id, 0).then(fileUrl => {
this.downloadFile(id, fileUrl).then(() => {
var fileName = `${id}.csv`;
this.decriptFieUsingOpenSSL(fileName, password).then(() => {
console.log("File decrypted successfully...");
var targetFolder = path.join(__dirname, '..', 'uploads', 'subscriptions');
var zipFile = path.join(targetFolder, `${fileName}.zip`);
extract(zipFile, { dir : targetFolder }).then(() => {
console.log("Extracted file successfully");
var fullFileName = path.join(targetFolder, fileName);
this.getCSVData(fullFileName).then(res => {
if(res){
SubscriptionController.updateSubscriptions(res).then(data => {
resolve('Subscriptions data replicated successfully!!!');
console.log("CHECK NOTIFICATION");
sendPasswordNotificationAfterBatch();
}).catch(err => {
throw err;
});
}
}).catch(error => {
reject(error);
})
})
});
});
});
}
}).catch(err => {
reject(err);
})
});
}
}
module.exports = new UploadController();
Model Code : auth.js
var CryptoJS = require("crypto-js");
var request = require("request");
var httpRequest = require("http");
var httpsRequest = require("https");
var config = require('../config.json');
class AuthModel {
constructor() {
this.timestamp = Math.floor((new Date()).getTime() / 1000);
this.consumer_key = config.app.consmer_key;
this.consumer_secret = config.app.consumer_secret;
this.callback_url = config.app.callback_url;
this.partner_csn = config.app.partner_csn;
this.environment_url_stg = "enterprise-api-stg.autodesk.com"; //STG Environment
this.environment_url_prd = "enterprise-api.autodesk.com"; //prd Environment
this.access_token = '';
this.api_timestamp = '';
}
getbaseUrl()
{
var env = config && config.app.env;
console.log("ENVIRONMENT");
console.log(env);
if(env == 'prd')
{
return this.environment_url_prd;
console.log("ENVIRONMENT URL PID");
console.log(this.environment_url_prd);
}
else{
return this.environment_url_stg;
console.log("ENVRIONMENT URL STG");
console.log(this.environment_url_stg);
}
}
get(url, headerData){
var self = this;
return new Promise((resolve, reject) =>{
self.getAccessToken().then(function(token){
var time = Math.floor(Date.now() / 1000);
var header = {
'CSN': self.partner_csn,
'signature': self.getAPISignature(token),
'timestamp': self.api_timestamp,
'Authorization': "Bearer " + token
};
var options = {
method: 'GET',
url: `https://${ self.getbaseUrl() }/${url}`,
headers:header
};
request(options, function (error, response, body) {
if (error) {
reject(error);
}
resolve(body);
});
}).catch(function(err){
reject(err);
});
});
}
post(url, body){
console.log(url);
var self = this;
return new Promise((resolve, reject) =>{
self.getAccessToken().then(function(token){
var time = Math.floor(Date.now() / 1000);
var headers = {
'Content-Type': 'application/x-www-form-urlencoded',
'Authorization': `Bearer ${token}`,
'signature': self.getAPISignature(token),
'timestamp': self.api_timestamp,
'CSN': self.partner_csn
};
var options = {
method: 'POST',
url: `https://${ self.getbaseUrl() }/${url}`,
headers,
form: body
};
request(options, function (error, response) {
if (error){
reject(error);
}
resolve(response.body);
});
}).catch(function(err){
reject(err);
});
});
}
getAPISignature(token)
{
this.api_timestamp = Math.floor(Date.now() / 1000);
var message = this.callback_url + token + this.api_timestamp;
var hash = CryptoJS.HmacSHA256(message, this.consumer_secret);
var hashInBase64 = CryptoJS.enc.Base64.stringify(hash);
return hashInBase64;
}
createSignature(time)
{
var message = this.callback_url + this.consumer_key + time;
console.log(message);
var hash = CryptoJS.HmacSHA256(message, this.consumer_secret);
console.log(this.consumer_key);
console.log(this.consumer_secret);
var hashInBase64 = CryptoJS.enc.Base64.stringify(hash);
return hashInBase64;
}
createAuthorization(){
var passwordSignature = this.consumer_key + ":" + this.consumer_secret;
console.log(passwordSignature);
var authorization = Buffer.from(passwordSignature).toString('base64')
console.log("Authorization");
console.log(authorization);
return "Basic " + authorization;
}
getAccessToken(){
console.log("IN GetAccessToken");
var self = this;
var time = Math.floor((new Date()).getTime() / 1000);
return new Promise((resolve, reject) =>{
var options = {
method: 'POST',
url:`https://${self.getbaseUrl()}/v2/oauth/generateaccesstoken?grant_type=client_credentials`,
headers: {
timestamp: time,
signature: self.createSignature(time),
Authorization : self.createAuthorization()
}
};
request(options, function (error, response, body) {
if (error) {
reject(error);
}
resolve(JSON.parse(body).access_token);
console.log(JSON.parse(body).access_token);
});
});
}
getUsingHttpPlugin(url, headerData) {
var self = this;
return new Promise((resolve, reject) => {
self.getAccessToken().then(function (token) {
console.log("Get Access Token");
console.log(token);
var time = Math.floor(Date.now() / 1000);
var header = {
'CSN': self.partner_csn,
'signature': self.getAPISignature(token),
'timestamp': self.api_timestamp,
'Authorization': "Bearer " + token
};
var options = {
method: 'GET',
host: `${self.getbaseUrl()}`,
path: `${url}`,
headers: header
};
var s = httpRequest.request(options, (res) => {
resolve(res);
});
s.end();
}).catch(function (err) {
reject(err);
});
});
}
getPlainHttp(url) {
console.log("URL");
console.log(url);
return new Promise((resolve, reject) => {
console.log("In 1");
var s = httpsRequest.get(url, (res) => {
console.log("RESPONSE STATUS CODE");
console.log(res.statusCode);
if (res.statusCode > 200) {
reject(res);
}
resolve(res);
})
s.end();
});
}
}
module.exports = AuthModel;
If any other Node code needed from my side, I am happy to share.

readFile synchronously nodejs

I am new to nodejs and just started learning. I need to read 5 json files and place them in an array. I have created 2 functions: readDirectory and processFile.
let transactionArray = [];
router.get('/', (req,res) => {
//joining path of directory
const directoryPath = path.join(__dirname, '../data');
readDirectory(directoryPath);
res.send(JSON.stringify(transactionArray))
})
readDirectory will get the dir and will read the filenames.
function readDirectory(directoryPath){
//passsing directoryPath and callback function
fs.readdir(directoryPath, function (err, files) {
//handling error
if (err) {
return console.log('Unable to scan directory: ' + err);
}
//listing all files using map
let fileSummary = files.map(file => {
//get the filename
let categoryName = ''
if (file.includes('category1')) {
categoryName = 'category1'
} else if (file.includes('category2')) {
categoryName = 'category2'
} else {
categoryName = 'Others'
}
// read the file
const filePath = directoryPath +'/'+ file
fs.readFile(filePath, 'utf8', (err, fileContents) => {
if (err) {
console.error(err)
return
}
try {
let data = JSON.parse(fileContents, categoryName)
processFile(data, categoryName);
} catch(err) {
console.error(err)
}
})
})
});
}
Then it will read the file using function processFile.
function processFile(data, categoryName)
{
let paymentSource = ''
if (categoryName == 'category1'){
paymentSource = categoryName +': '+ categoryName +' '+ data.currency_code
} else if (categoryName == 'category2') {
paymentSource = categoryName +': '+ data.extra.payer +'-'+ data.currency_code
} else {
paymentSource = 'Others'
}
let transactionDetails = new Transaction(
data.id,
data.description,
categoryName,
data.made_on,
data.amount,
data.currency_code,
paymentSource)
transactionArray.push(transactionDetails)
console.log(transactionArray);
}
The console log is something like this:
[{Transaction1}] [{Transaction1},{Transaction2}] [{Transaction1},{Transaction2},{Transaction3}]
but the result on the UI is only []
During debug, I noticed that it is not reading synchronously so I tried using readFileSync but it did not work. How can I read both functions synchronously so it will not give an empty array?
Do some playing around to understand what the fs functions do when they have callbacks, and when they're synchronous. From the code that you have we have make a few changes so that you don't have to use the synchronous functions from the file system library.
First of all you need to wait for all the asynchronous tasks to complete before returning response.
router.get('/', async (req, res) => {
// joining path of directory
const directoryPath = path.join(__dirname, '../data')
readDirectory(directoryPath).then(() => {
res.send(JSON.stringify(transactionArray))
}).catch(err => {
res.status(500).json(err)
})
})
Secondly, to keep the code as is as to teach you something about promises, lets wrap the first function in a promise.
function readDirectory (directoryPath) {
return new Promise((resolve, reject) => {
// passsing directoryPath and callback function
fs.readdir(directoryPath, function (err, files) {
// handling error
if (err) {
return console.log('Unable to scan directory: ' + err)
}
// listing all files using map
const fileSummary = Promise.all(
files.map(file => {
return new Promise((resolve, reject) => {
// get the filename
let categoryName = ''
if (file.includes('category1')) {
categoryName = 'category1'
} else if (file.includes('category2')) {
categoryName = 'category2'
} else {
categoryName = 'Others'
}
// read the file
const filePath = directoryPath + '/' + file
fs.readFile(filePath, 'utf8', (err, fileContents) => {
if (err) {
console.error(err)
reject(err)
}
try {
const data = JSON.parse(fileContents, categoryName)
processFile(data, categoryName).then(data => {
data()
})
} catch (err) {
console.error(err)
reject(err)
}
})
})
})
).then(() => {
resolve()
}).catch(err => {
reject(err)
})
})
})
}
Please refer to the bible (MDN) for javascript about promises -> https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise
And finally wrap the processFile function in a promise
function processFile (data, categoryName) {
return new Promise((resolve, reject) => {
let paymentSource = ''
if (categoryName == 'category1') {
paymentSource = categoryName + ': ' + categoryName + ' ' + data.currency_code
} else if (categoryName == 'category2') {
paymentSource = categoryName + ': ' + data.extra.payer + '-' + data.currency_code
} else {
paymentSource = 'Others'
}
const transactionDetails = new Transaction(
data.id,
data.description,
categoryName,
data.made_on,
data.amount,
data.currency_code,
paymentSource)
transactionArray.push(transactionDetails)
console.log(transactionArray)
resolve()
})
}
What the heck am I doing? I'm just making your code execute asynchronous task, but wait for them to be completed before moving on. Promises are a way to handle this. You can easily pull this off with the FS synchronous functions, but this way you can learn about promises!

Node js Scraper

I have written a scraper in typescript, Running on node:10.12.0,
Issue: The code goes on sleep after few hours, randomly. And I had to restart it. My best guess is it stucks on url request
Tools/Packages Using:
Puppeteer
Cheerio
Typescript
Code:
import * as cheerio from "cheerio";
import * as request from "request";
import * as fs from "fs";
import * as shell from "shelljs";
import pup = require("puppeteer");
class App {
// #ts-ignore
public browser: pup.Browser;
public appendToFile(file: string, content: string): Promise < string > {
return new Promise < string > ((resolve, reject) => {
try {
fs.appendFileSync(file, content);
resolve("DONE");
} catch (e) {
reject(e);
}
});
}
public loadPage(url: string): Promise < any > {
return new Promise < any > ((resolve, reject) => {
request.get(url, async (err, res, html) => {
if (!err && res.statusCode === 200) {
resolve(html);
} else {
if (err) {
reject(err);
} else {
reject(res);
}
}
});
});
}
public step1(url: string): Promise < string > {
return new Promise < string > (async (resolve, reject) => {
let page: pup.Page | undefined;
try {
let next = false;
let urlLink = url;
let first = true;
let header = "unknown";
let f = url.split("/");
let folder = f[f.length - 3];
folder = folder || header;
let path = "data/" + folder;
shell.mkdir("-p", path);
page = await this.browser.newPage();
await page.goto(url, {
timeout: 0
});
let count = 1;
do {
next = false;
let res = await page.evaluate(() => {
let e = document.querySelectorAll(".ch-product-view-list-container.list-view li ul > li > h6 > a");
let p: string[] = [];
e.forEach((v) => {
p.push(("https://www.link.com") + (v.getAttribute("href") as string));
});
return p;
});
// for(const l of res) {
// try {
// await this.step2(l, "" , "")
// } catch(er) {
// this.appendToFile("./error.txt", l + "::" + url + "\n").catch(e=>e)
// }
// }
let p = [];
let c = 1;
for (const d of res) {
p.push(await this.step2(d, folder, c.toString()).catch((_e) => {
console.log(_e);
fs.appendFileSync("./error-2.txt", urlLink + " ### " + d + "\n");
}));
c++;
}
await Promise.all(p);
await this.appendToFile("./processed.txt", urlLink + ":" + count.toString() + "\n").catch(e => e);
count++;
console.log(urlLink + ":" + count);
let e = await page.evaluate(() => {
let ele = document.querySelector("#pagination-next") as Element;
let r = ele.getAttribute("style");
return r || "";
});
if (e === "") {
next = true;
await page.click("#pagination-next");
// console.log('waitng')
await page.waitFor(1000);
// console.log('done wait')
// await page.waitForNavigation({waitUntil: 'load'}).catch(e=> console.log(e));
// await Promise.all([
// page.click("#pagination-next"),
// page.waitForNavigation({ waitUntil: 'networkidle0'}), // ]);
}
} while (next);
// await page.close();
resolve("page all scrapped");
} catch (errrr) {
reject(errrr);
} finally {
if (page !== undefined) {
await page.close().catch(e => e);
}
}
});
}
public step2(url: string, folder: string, file: string): Promise < string > {
return new Promise < string > (async (resolve, reject) => {
try {
let html = await this.loadPage(url).catch(e => reject(e));
let $ = cheerio.load(html);
let ress: any = {};
let t = $(".qal_title_heading").text();
if (t) {
ress.header = t.replace(/"/g, "'").replace(/\n|\r|\t/g, "");
}
let d = $("div.ch_formatted_text.qal_thread-content_text.asker").html();
if (d) {
ress.body = d.replace(/"/g, "'").replace(/\n|\r|\t/g, "");
}
// let sprit = "-------------------------------";
let filename = "data" + file + ".json"; // ((t.replace(/[^\w\s]/gi, "")).substring(0,250)+".txt")
let data = JSON.stringify(ress) // t +sprit + d + "\n---end---\n"; await this.appendToFile("./data/"+ folder + "/" +filename, data+",\n")
.then((r) => {
resolve(r);
});
} catch (err) {
reject(err);
}
});
}
}
async function main() {
process.on("SIGTERM", () => {
console.log("SigTerm received");
process.exit(1);
});
process.on("SIGINT", () => {
console.log("SigInt received");
process.exit(1);
});
let path = "data/unknown";
shell.mkdir("-p", path);
let c = new App();
let list: string[] = [];
console.log(process.argv[2]);
require("fs").readFileSync(process.argv[2], "utf-8").split(/\r?\n/).forEach((line: string) => {
list.push(line);
});
console.log("total links->" + list.length);
c.browser = await pup.launch({
headless: true
});
for (const l of list) {
await c.step1(l).then(e => {
fs.appendFileSync("./processed.txt", l);
}).catch(e => {
fs.appendFileSync("./error.txt", l);
});
}
}
main();
Let me know if you need something else from me. Also this is all the code.
So , I figured two problems.
The chrome (under puppeteer) consumes high CPU, which gives the trend like this:
at start it's on moderate usage. and it gradually increases. My trend was it started off with 4% usage and after a day, it reached 100%. I've submitted an issue on their git
I did not specify the timeout in request
was:
request.get(url, async (err, res, html) => {
should be:
request.get(url,{timeout: 1500} async (err, res, html) => {
So far my code is running fine for more than a day now. only issue is high cpu usage. But it's none of my concern as for now.

Trouble with asynchronous functions in node.js

I'm very new to node, and I'm trying to pull a list of IDs from an API, iterate through that list for each ID saving the output, and ultimately rename each file generated. The code below is the closest I've come, and while it works sometimes, it frequently fails as I believe one function isn't waiting for the other to complete (e.g. tries to read before a write), but I'm sure I have other issues going on.
const apiKey = inputData.apiKey
var https = require('https');
var sync = require('sync');
var fs = require('fs');
var JSONfileloc = "./pdfs/file.json"
var queryurl = 'https://intakeq.com/api/v1/intakes/summary?startDate=2018-01-01'
var authHeaders = { 'X-Auth-Key': apiKey }
var queryOpts = { method: 'GET', headers: authHeaders}
function handleFile (error, file)
{
if (error) return console.error('Ran into a problem here', error)
}
fetch(queryurl, queryOpts)
.then
(function findAPI(res, err)
{
if( err )
{ console.log('I cant find the API '+err) }
return res.json()
{console.log('found the API!')}
}
)
.then (function itID(res, err)
{
if( err )
{ console.log('I cant iterate the API '+err) }
for(var i = 0; i < res.length; i++)
{
var intakeID=res[i].Id;
var APIoptions={ host:"intakeq.com", path:"/api/v1/intakes/"+ intakeID, headers: authHeaders };
var PDFoptions={ host:"intakeq.com", path:"/api/v1/intakes/"+ intakeID+'/pdf', headers: authHeaders };
console.log('Working on ID:'+intakeID)
var JSONrequest = https.get(APIoptions, writeJSON)
}})
//READ JSON FUNCTION
function readJSON (err, data)
{
if (err) throw err;
if(data.indexOf('New Patient Forms') >= 0)
var contents = fs.readFileSync(JSONfileloc, handleFile);
var jsonContent = JSON.parse(contents)
//pull PT Name
pName = (jsonContent.ClientName);
console.log('The Patient Name Is ' + jsonContent.ClientName)
//pull PT DOB
pDob = (jsonContent.Questions[3].Answer)
console.log('Patient DOB Is ' + jsonContent.Questions[3].Answer)
//pull Form Type
pForm = (jsonContent.QuestionnaireName)
console.log('The Form Submitted is ' + jsonContent.QuestionnaireName)
//rename and move JSON
fs.rename("./pdfs/file.json", './JSONLogs/'+pName+' '+pForm+' '+Date.now()+'.json', function(err) {
if ( err ) console.log('Problem renaming! ' + err)
else console.log('Copying & Renaming JSON File!');
})
};
//WRITE JSON FUNCTION
function writeJSON(response, err)
{
var JSONfile = fs.createWriteStream(JSONfileloc, handleFile);
if (err) throw err;
response.pipe(JSONfile);
console.log('JSON Created')
fs.readFile(JSONfileloc, readJSON)
}
The research I've done leads me to believe that async.forEach is probably the right approach here, but I've been having a hard time getting that to work properly. Thanks in advance and any suggestions are much appreciated.
const apiKey = inputData.apiKey
var https = require('https');
var sync = require('sync');
var fs = require('fs');
var JSONfileloc = "./pdfs/file.json"
var queryurl = 'https://intakeq.com/api/v1/intakes/summary?startDate=2018-01-01'
var authHeaders = {
'X-Auth-Key': apiKey
}
var queryOpts = {
method: 'GET',
headers: authHeaders
}
function handleFile(error, file) {
if (error) return console.error('Ran into a problem here', error)
}
fetch(queryurl, queryOpts)
.then(function findAPI(res) {
return res.json();
})
.then(function itID(res) {
const JSONRequests = [];
for (var i = 0; i < res.length; i++) {
var intakeID = res[i].Id;
var APIoptions = {
host: "intakeq.com",
path: "/api/v1/intakes/" + intakeID,
headers: authHeaders
};
var PDFoptions = {
host: "intakeq.com",
path: "/api/v1/intakes/" + intakeID + '/pdf',
headers: authHeaders
};
// https.get has response as a stream and not a promise
// This `httpsGet` function converts it to a promise
JSONRequests.push(httpsGet(APIoptions, i));
}
return Promise.all(JSONRequests);
})
function httpsGet(options, filename) {
return new Promise((resolve, reject) => {
https.get(options, (response) => {
// The WriteJSON function, just for brewity
// Otherwise pass resolve to the seperate writeJSON and call it in there
var JSONfile = fs.createWriteStream(filename + ".json");
response.pipe(JSONfile);
JSONfile.on('close', () => {
readJSON(filename + ".json").then(() => {
resolve();
})
})
})
})
}
//READ JSON FUNCTION
function readJSON(filename) {
// if (err) throw err;
var contents = fs.readFileSync(filename, 'utf-8'); // removed handleFile as readFileSync does not allow callbacks, added format
var jsonContent = JSON.parse(contents)
// Make your conditional checks here with the jsonContents
//pull PT Name
pName = (jsonContent.ClientName);
console.log('The Patient Name Is ' + jsonContent.ClientName)
//pull PT DOB
pDob = (jsonContent.Questions[3].Answer)
console.log('Patient DOB Is ' + jsonContent.Questions[3].Answer)
//pull Form Type
pForm = (jsonContent.QuestionnaireName)
console.log('The Form Submitted is ' + jsonContent.QuestionnaireName)
//rename and move JSON
return new Promise((resolve, reject) => {
fs.rename("./pdfs/file.json", './JSONLogs/' + pName + ' ' + pForm + ' ' + Date.now() + '.json', function (err) {
if (err) {
console.log('Problem renaming! ' + err);
reject(err);
} else {
console.log('Copying & Renaming JSON File!');
resolve();
}
})
})
};
Updated to convert https.get response stream to return a Promise which can be handled much better.

Node stream hangs when emitting error

I have a stream that's checking a CSV. It works fine except when emitting an error it hangs even after I send the response back.
export function ValidateCSV(options) {
let opt = options;
if (!(this instanceof ValidateCSV)) return new ValidateCSV(opt);
if (!opt) opt = {};
opt.objectMode = true;
opt.highWaterMark = 1000000;
Transform.call(this, opt);
}
util.inherits(ValidateCSV, Transform);
ValidateCSV.prototype.destroy = function () {
this.readable = false;
this.writable = false;
this.emit('end');
};
ValidateCSV.prototype._transform = function (chunk, encoding, done) {
// Do some stuff to the chunk
// Emit error
if (required.length > 0) {
this.emit('error', `The following columns are required: ${required.join(', ')}`);
}
done();
};
I was able to fix it by adding a destroy method but it is still slow and hangs for a few seconds with it. Is there a better way to end/destroy a Transform stream?
ValidateCSV.prototype.destroy = function () {
this.readable = false;
this.writable = false;
this.emit('end');
};
EDIT:
Here is how I'm using the stream with busboy:
function processMultipart(req, res) {
const userId = req.query._userId;
const busboy = new Busboy({ headers: req.headers, limits: { files: 1 } });
const updateId = req.params.id;
// Transform stream to validate the csv
const validateCSV = new ValidateCSV();
validateCSV
.on('finish', () => {
// Process the csv
})
.on('error', (er) => {
//Do some logging
res.status(500).json(er).end();
});
// Multipart upload handler
busboy
.on('file', (fieldname, file, filename) => {
dataset.name = fieldname.length > 0 ?
fieldname : filename.substr(0, filename.indexOf('.csv'));
file
.on('error', (er) => {
//Send Error
})
.on('end', () => {
// Save dataset to mongo
if (dataset._update) {
res.status(200).json(dataset).end();
} else {
Dataset.create(dataset, (er) => {
if (er) {
res.status(500).json(er).end();
} else {
res.status(200).json(dataset).end();
}
});
}
}).pipe(validateCSV);
});
req.pipe(busboy);
}

Resources