Appending array values to csv - node.js

I would like to take some output and append the output to a csv
This is the code I have so far:
async function writeData() {
const csv = require('csv-parser')
const results = [];
fs.createReadStream('C:\\Users\\User\\Documents\\testingclean.csv')
.pipe(csv())
.on('data',(data)=> results.push(data))
.on('end', async () => {
const cookieJar = await getCookieJar();
const promises = [];
results.forEach((data) => {
promises.push(boxrec.getPersonById(cookieJar,data.id));
})
try {
const fighters = await Promise.all(promises);
fighters.forEach((fighter)=>{
boxer = JSON.parse(JSON.stringify(fighter.output));
fs.appendFile('C:\\Users\\User\\Documents\\newtest.csv',boxer, (err) => {
if (err) console.error('Could not append data to csv');
console.log('Data successfully appended');
})
});
} catch (e) {
console.log(e);
}
})
};
try {
writeData();
} catch (error) {
console.log("Error in writeData: " + error);
}
However running this code does not produce the desired csv output.
I am specifically writing to csv because I have read that I cannot append to a json (would ideally want to write data to a json)

If you don't have anything writen to the CSV file, you should close it explicitly at the end of your program :
var readsteam = fs.createReadStream();
...
readStream.destroy();
Not sure but this should be the full code :
async function writeData() {
const csv = require('csv-parser')
const results = [];
var readsteam = fs.createReadStream('C:\\Users\\User\\Documents\\testingclean.csv');
readsteam
.pipe(csv())
.on('data',(data)=> results.push(data))
.on('end', async () => {
const cookieJar = await getCookieJar();
const promises = [];
results.forEach((data) => {
promises.push(boxrec.getPersonById(cookieJar,data.id));
})
try {
const fighters = await Promise.all(promises);
fighters.forEach((fighter)=>{
boxer = JSON.parse(JSON.stringify(fighter.output));
fs.appendFile('C:\\Users\\User\\Documents\\newtest.csv',boxer, (err) => {
if (err) console.error('Could not append data to csv');
console.log('Data successfully appended');
})
});
} catch (e) {
console.log(e);
}
}
readsteam.destroy();
};
try {
writeData();
} catch (error) {
console.log("Error in writeData: " + error);
}

Related

Send KnexJS error message to the frontend

I am having trouble sending an error to the front end when a csv file is uploaded and the numbers already exist in the database. The backend is logging an error that the primary key value already exist, but the code I have written tells the front end that the file uploaded just fine.
Code snippet:
router.post('/:program/upload', upload.single('testUpload'), (req, res, next) => {
try {
CSVtoPSQL(req.params.program, req.file.filename)
return res.status(201).json({
message: 'File Uploaded Just fine :)'
});
} catch (error) {
return res.status(500).json({
message: error
})
}
});
const CSVtoPSQL = (program, filePath) => {
let stream = fs.createReadStream(path.resolve(__dirname, '../files', filePath));
let csvData = [];
let csvStream = csv
.parse({ headers: false })
.on('error', error => console.error(error))
.on('data', (data) => {
csvData.push(data.toString());
})
.on('end', () => {
csvData.forEach(item => {
queries.upload(program, item)
.then(() => {
console.log('QR Code Added: ' + item);
}).catch((err) => {
console.log(`oopsie: ${err}`);
});
})
});
stream.pipe(csvStream);
}
Pretty confident the issue is with my poor understanding of promises.
As expected, I wasn't handling my promises correctly. I've updated the code a bit and now it responds with 2 arrays of successful uploads and errored uploads.
router.post('/:program/upload', upload.single('testUpload'), async (req, res, next) => {
try {
const result = await CSVtoPSQL(req.params.program, req.file.filename)
return res.status(201).json(result);
}
catch (error) {
return res.status(500).json({
message: error,
})
}
});
const CSVtoPSQL = (program, filePath) => {
let stream = fs.createReadStream(path.resolve(__dirname, '../files', filePath));
let csvData = [];
return new Promise((resolve) => {
const results = {
seccess: [],
error: [],
}
let csvStream = csv
.parse({ headers: false })
.on('error', error => console.error(error))
.on('data', (data) => {
csvData.push(data.toString());
})
.on('end', async () => {
await Promise.all(
csvData.map(async (item) => {
try{
await queries.upload(program, item);
results.success.push(item);
console.log('QR Code Added: ' + item);
}
catch (error) {
console.log(`oopsie: ${error}`)
results.error.push(item);
}
})
)
resolve(results);
});
stream.pipe(csvStream);
})
}

createReadStream directly from url

How can i read a file from https://storage.googleapis.com without download ?
let url = "https://storage.googleapis.com/shopify-tiers-assets-prod-us-east1/c84niyyf31t4bxhj7ulb2prf2r9p?GoogleAccessId=assets-us-prod%40shopify-tiers.iam.gserviceaccount.com&Expires=1611140532&Signature=bEiC5Ftxr8rCtiIfm3hyKKP%2B4EUf4TzPUAM3NPuO1jh7DkG1lr7DVQTMYd1rwC4DLMJAZfcQwca7X2Ab%2FWKNwbpfw%2FEjAIh%2B5qhsy77%2FP3BZFrAJjcpSHU6Mj2d3elv1cqTnVErPbig0TvB3caZ1P1apAVMXQP5WRHYGPcnXhV8g9%2B%2FRvQaO4myDS7qfhA89IicVI5e2pPwEMtUgeE6XQavY8ZdpzwLsO0XeAxsLBcH%2FezjIHPOShlRWN09OoqGwBWYcHOvULzA4Rt1fgtiejNI2vZo3FE806YWGW%2BbH%2BXzvFuNq7xMEDgvPIgkM9RPUNmcWSEzTo%2BHXAJ2Ph7%2FADw%3D%3D&response-content-disposition=attachment%3B+filename%3D%22bulk-102030409886.jsonl%22%3B+filename%2A%3DUTF-8%27%27bulk-102030409886.jsonl&response-content-type=application%2Fjsonl"
async function processLineByLine(url) {
try {
const rl = createInterface({
input: createReadStream(url),
crlfDelay: Infinity
});
rl.on("line", (line) => {
console.log(line);
});
await once(rl, "close");
console.log("File processed.");
} catch (err) {
console.error(err);
}
}
You could stream the file using got and then use ndjson as the file is json in each line.
const ndjson = require('ndjson')
const etl = require("etl");
const got = require("got");
const url = "https://storage.googleapis.com/shopify-tiers-assets-prod-us-east1/c84niyyf31t4bxhj7ulb2prf2r9p?GoogleAccessId=assets-us-prod%40shopify-tiers.iam.gserviceaccount.com&Expires=1611140532&Signature=bEiC5Ftxr8rCtiIfm3hyKKP%2B4EUf4TzPUAM3NPuO1jh7DkG1lr7DVQTMYd1rwC4DLMJAZfcQwca7X2Ab%2FWKNwbpfw%2FEjAIh%2B5qhsy77%2FP3BZFrAJjcpSHU6Mj2d3elv1cqTnVErPbig0TvB3caZ1P1apAVMXQP5WRHYGPcnXhV8g9%2B%2FRvQaO4myDS7qfhA89IicVI5e2pPwEMtUgeE6XQavY8ZdpzwLsO0XeAxsLBcH%2FezjIHPOShlRWN09OoqGwBWYcHOvULzA4Rt1fgtiejNI2vZo3FE806YWGW%2BbH%2BXzvFuNq7xMEDgvPIgkM9RPUNmcWSEzTo%2BHXAJ2Ph7%2FADw%3D%3D&response-content-disposition=attachment%3B+filename%3D%22bulk-102030409886.jsonl%22%3B+filename%2A%3DUTF-8%27%27bulk-102030409886.jsonl&response-content-type=application%2Fjsonl"
function getStream() {
return got.stream(url);
}
getStream()
.pipe(ndjson.parse())
.pipe(etl.map(data => {
// do something with data
console.log(data);
return data;
}))
.promise()
.then(() => console.log("done"))
.catch(err => console.error(err));

problem in sending base64 data in GET request

Hi I am facing issues sending base64 data in GET request.
I was successful in converting the image into base64 data and inserting it in receivedFile
but during response the attachments come as an empty array while the rest of the data i.e user_id is flowing successfully.
Hence if you could please help me to resolve this issue.
Below is the code
router.js
router.get('/users/data/expand/:nid',async (req,res) => {
var idselected = req.params.nid;
var dir = '\images';
var receivedFile = [];
try {
const checkData = await user.find({"user_id": idselected});
await checkData[0].attachments.forEach (element => {
fs.readdir(dir,function(err,files) {
if(err) {
console.log(err)
}else {
files.forEach((filename) => {
filename = element;
fs.readFile(filename,'base64', (err,base64Data) => {
if(err) {
console.log(err);
}
receivedFile.push(base64Data);
})
})
}
})
})
//issue is here the attachments is coming as empty instead of base64 data
const returnUser = new User({
user_id: checkData.user_id,
attachments: receivedFile
})
res.status(201).send(returnUser);
}
catch(e) {
res.status(500).send(e)
}
})
Well its always good to create helper functions and to promisfy it so you can use async / await syntax.
I have changed your code. I didnt tested it but i guess it should work:#
router.get("/users/data/expand/:nid", async (req, res) => {
var idselected = req.params.nid;
var dir = "images";
try {
const checkData = await user.findOne({ user_id: idselected });
let receivedFile = await Promise.all(
checkData.attachments.flatMap(async element => {
let files = await readDirectory(dir);
return await Promise.all(
files.map(filename => {
filename = element;
return readFile(filename)
})
);
})
);
const returnUser = new User({
user_id: checkData.user_id,
attachments: receivedFile
});
let savedUser = await returnUser.save();
res.status(201).send(savedUser);
} catch (e) {
res.status(500).send(e);
}
});
function readDirectory(dir) {
return new Promise((res, rej) => {
fs.readdir(dir, function(err, files) {
if (err) {
rej(err);
} else {
res(files);
}
});
});
}
function readFile(filename) {
return new Promise((res, rej) => {
fs.readFile(filename, "base64", (err, base64Data) => {
if (err) {
rej(err);
}
res(base64Data);
});
});
}
I guess you use mongoose.
There is an method called findOne and also you forgot to save your model with returnUser.save()

Converting Object Promise to String in Js

const url = 'https://www.benzinga.com/stock/aapl/';
const url1 = 'https://www.benzinga.com/stock/msft/';
const url2 = 'https://www.benzinga.com/stock/mu/';
axios.all([
axios.get(url),
axios.get(url1),
axios.get(url2)
])
.then(axios.spread((url11, url12, url13) => {
async function scraper() {
try{
var benzinga1 = url11;
return benzinga1;
} catch (err) {
console.error(err);
}
};
scraper();
async function scraper1() {
try{
var benzinga2 = url12;
return benzinga2;
} catch (err) {
console.error(err);
}
};
scraper1();
async function scraper2() {
try{
var benzinga3 = url13;
return benzinga3;
} catch (err) {
console.error(err);
}
};
scraper2();
async function final() {
try{
console.log(mybenzinga1);
} catch (err) {
console.error(err);
}
};
final();
}))
.catch((error) => {
next(error);
});
when saving url11 to the file it writes [Object object] how to derive a line from a promise.
I can not understand how to convert to a string and save the value to a file
when saving url11 to the file it writes [Object object] how to derive a line from a promise.
I can not understand how to convert to a string and save the value to a file
Try This
const axios = require("axios");
const fs = require('fs').promises;
const url = 'https://www.benzinga.com/stock/aapl/';
const url1 = 'https://www.benzinga.com/stock/msft/';
const url2 = 'https://www.benzinga.com/stock/mu/';
axios.all([
axios.get(url),
axios.get(url1),
axios.get(url2)
]).then((data) => {
data.map(async (item, index) => {
try {
await fs.writeFile(`index${index}.html`, item.data);
} catch (e) {
console.log(e);
}
});
}).catch((error) => {
next(error);
});

Streaming data from oracle db to browser with node.js

I am learning node.js and database. I am trying to stream heavy data about 7,700,000 rows and 96 columns from oracle to client. Later i use that data for virtual table. But in client it is showing only one row and then in node command error is displaying "Cannot set headers after they are sent to the client". How to stream data in client. Please help
var oracledb = require('oracledb');
const cors = require('cors');
var express = require('express');
var app = express();
app.use(cors());
oracledb.outFormat = oracledb.ARRAY;
oracledb.getConnection({
user: 'user',
password: 'password',
connectString: 'some string'
},
(err, connection) => {
if (err) {
console.error(err.message);
return;
}
var rowsProcessed = 0;
var startTime = Date.now();
var dataSize = 0;
var stream = connection.queryStream(
'SELECT * FROM table',
);
// stream.on('data', function (data) {
// rowsProcessed++;
// // console.log(JSON.stringify(data));
// // console.log(data);
// dataSize = dataSize + data.length;
// // oracleData.push(data);
// // console.log("pushing");
// // console.log(oracleData);
// // app.get('/data', (req, res) => {
// // res.send(data);
// // })
// // console.log(data);
// });
app.get('/data', (req, res) => {
stream.on('data', (data) => {
rowsProcessed++;
dataSize = dataSize + data.length;
res.send(JSON.stringify(data));
})
})
stream.on('end', function () {
var t = ((Date.now() - startTime) / 1000);
console.log('queryStream(): rows: ' + rowsProcessed +
', seconds: ' + t);
// console.log(dataSize + ' bytes');
connection.close(
function (err) {
if (err) {
console.error(err.message);
} else {
console.log("connection closed")
}
}
)
})
}
);
app.listen(5000, () => {
console.log('Listening at 5000')
})
I tried using above approach. But it is failing. How can I achieve the output?
The browser is freezing if I output entire data at single time that's why I am trying to use streaming and in the node command prompt it is displaying out of memory if I load entire data at single time.
Thank you.
The first thing you'll want to do is organize your app a little better. Separation of concerns is important, you should have a connection pool, etc. Have a look at this series for some ideas: https://jsao.io/2018/03/creating-a-rest-api-with-node-js-and-oracle-database/
Once you get the organization figured out, incorporate this example of streaming a large result set out.
const oracledb = require('oracledb');
async function get(req, res, next) {
try {
const conn = await oracledb.getConnection();
const stream = await conn.queryStream('select * from employees', [], {outFormat: oracledb.OBJECT});
res.writeHead(200, {'Content-Type': 'application/json'});
res.write('[');
stream.on('data', (row) => {
res.write(JSON.stringify(row));
res.write(',');
});
stream.on('end', () => {
res.end(']');
});
stream.on('close', async () => {
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
stream.on('error', async (err) => {
next(err);
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
} catch (err) {
next(err);
}
}
module.exports.get = get;
If you find you're doing this a lot, simplify things by creating a reusable transform stream:
const oracledb = require('oracledb');
const { Transform } = require('stream');
class ToJSONArray extends Transform {
constructor() {
super({objectMode: true});
this.push('[');
}
_transform (row, encoding, callback) {
if (this._prevRow) {
this.push(JSON.stringify(this._prevRow));
this.push(',');
}
this._prevRow = row;
callback(null);
}
_flush (done) {
if (this._prevRow) {
this.push(JSON.stringify(this._prevRow));
}
this.push(']');
delete this._prevRow;
done();
}
}
async function get(req, res, next) {
try {
const toJSONArray = new ToJSONArray();
const conn = await oracledb.getConnection();
const stream = await conn.queryStream('select * from employees', [], {outFormat: oracledb.OBJECT});
res.writeHead(200, {'Content-Type': 'application/json'});
stream.pipe(toJSONArray).pipe(res);
stream.on('close', async () => {
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
stream.on('error', async (err) => {
next(err);
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
} catch (err) {
next(err);
}
}
module.exports.get = get;

Resources