Node.js CSV module - node.js

I'm using a CSV module as in the example below.
I was wondering if there is a way to make sure that all the fields aren't inserted as strings, e.g. the number 1.23 is not inserted as string "1.23".
It seems to make everything type string.
var csv = require('csv');
var q = async.queue(myCollection.insert.bind(myCollection), 50);
csv()
.from.path(req.files.myCSV.path, {columns: true})
.transform(function(data, index, cb){
q.push(data, function (err, res) {
if (err) return cb(err);
cb(null, res[0]);
});
})
.on('end', function () {
q.drain = function() {};
})

In csv.transform(), before q.push(), you can convert fields using e.g. parseInt:
data.fieldX = parseInt(data.fieldX);
data.fieldY = parseFloat(data.fieldX);
You could also delete data.fieldY; or add fields data.fullName = data.first + ' ' + data.last;

Related

Saving JSON data correctly on nodeJS server

I'm still working on my SPA where can I track my expenses. Each Expense Item consists of a value, a date , description and tags.
The client send all these data to my server , where I want to save it in a JSON file.
My code looks like this right now:
(json.push isnt working)
client.on('message', function(value, date, descr, tags) {
console.log('message: ' + value, date, descr, tags );
var exp = new Object();
exp.id = id;
exp.value = value;
exp.date = date;
exp.tags = tags;
expArr[exp.id] = exp;
id++;
console.log(exp.id);
fs.readFile('expenses.json', function (err, data) {
var json = JSON.parse(data);
json.push(exp);
console.log(json);
fs.writeFile("expenses.json", JSON.stringify(exp),
function(err){
if (err) throw err;
console.log('The data was appended to file!');
});
})
});
My goal is, every newly added item should append to my JSON file.
In the end it should look like this for example:
{"expArray": [{"id":0,"value":"200","date":"dqwd","tags":"cnelw"},
{"id":1,"value":"300","date":"dqwd","tags":"ncjlekw"},
{"id":2,"value":"22","date":"dqwd","tags":"dnkqoe"}
]}
I dont know if it's necessary to to do an array there?
But I need to read the file again for the future and get the ID of the items to delete them on client-side or edit them.
Thanks for ur help!
try this:
client.on('message', function(value, date, descr, tags) {
console.log('message: ' + value, date, descr, tags );
// exp object
var exp = {id:id,value:value,date:date,tags:tags}
expArr[exp.id] = exp;
id++;
console.log(exp.id);
fs.readFile('expenses.json', function (err, data) {
var json = JSON.parse(data);
// _------- add expArray
json.expArray.push(exp);
console.log(json);
fs.writeFile("expenses.json", JSON.stringify(exp),
function(err){
if (err) throw err;
console.log('The data was appended to file!');
});
})
});

function querying all elements from mongodb does not populate array

so i've written a function that should query everything in the files inventory of my mongodb using mongoose, but instead... doesn't. while each element is in fact being read, files.push() doesn't seem to have any effect on the array, as the array is still empty.
while i know that console.log() is not an effective way to debug, considering express still does not render the contents of the array, it is in fact not being populated.
yes, it is being called as getAllFiles(Image).
code below:
const Image = module.exports = mongoose.model('files', imageSchema);
function getAllFiles(collection) {
let files = [];
collection.find({}, (err, buns) => {
buns.forEach((bun) => {
let fin = bun.path.replace("public/", "");
files.push(fin);
console.log(fin);
});
});
console.log(files);
return files;
}
terminal output (ignore extraneous outputs):
wildflower :: src/bunnydb » node app.js
(node:23296) DeprecationWarning: current URL string parser is deprecated, and will be removed in a future version. To use the new parser, pass option { useNewUrlParser: true } to MongoClient.connect.running on port 3000
[]
uploads/9160d961-3d9b-4dea-a39c-f79b86647408.jpg
was able to fix by adding a callback as it was running asynchronously:
function getAllFiles(collection, cb) {
let files = [];
collection.find({}, (err, buns) => {
console.log('err: ' + err);
console.log('buns: ' + buns);
buns.forEach((bun) => {
let fin = bun.path.replace("public/", "");
files.push(fin);
console.log('data: ' + fin);
});
cb(files);
});
console.log('arr: ' + files);
return files;
}
and on invocation the callback argument can be used to do stuff with the files

Nodejs - "Write After End" error while Exporting from Mongoose

I am trying to write the results of 3 Mongoose find query into a single file test.txt using event-stream nodejs.
My program is something like this…
var es = require('event-stream');
var fs = require('fs');
var wstream = fs.createWriteStream('TestFile.csv');
mongoose.connection.on('connected', function () {
//First Find Query
Model.find({"FirstId":{$exist:true}).cursor()
.pipe(es.map(function (data, callback) {
var csv = json2csv({data:formated, fields:fields, hasCSVColumnTitle:false});
callback(null, csv)
}))
.pipe(wstream);
//Second find query
Model.find({"SecondId":{$exist:true}).cursor()
.pipe(es.map(function (data, callback) {
if(data.Email)
{
var csv = json2csv({data:formated, fields:fields, hasCSVColumnTitle:false});
}
else{
callback();
}
callback(null, csv)
}))
.pipe(wstream);
//Third find query
Model.find({"ThirdId":{$exist:true}).cursor()
.pipe(es.map(function (data, callback) {
if(data.Email)
{
var csv = json2csv({data:formated, fields:fields, hasCSVColumnTitle:false});
}
else{
callback();
}
callback(null, csv)
}))
.pipe(wstream);
});
In this program I am able to write each find query result separately into the file. But when I combine 3-find query in a program, it throws “write after end” error.
Could anyone help me to resolve this error?
Thank You all for your time!

How to remove one line from a txt file

I have the following text file ("test.txt") that I want to manipulate in node.js:
world
food
I want to remove the first line so that food becomes the first line instead. How can I do that?
var fs = require('fs')
fs.readFile(filename, 'utf8', function(err, data)
{
if (err)
{
// check and handle err
}
// data is the file contents as a single unified string
// .split('\n') splits it at each new-line character and all splits are aggregated into an array (i.e. turns it into an array of lines)
// .slice(1) returns a view into that array starting at the second entry from the front (i.e. the first element, but slice is zero-indexed so the "first" is really the "second")
// .join() takes that array and re-concatenates it into a string
var linesExceptFirst = data.split('\n').slice(1).join('\n');
fs.writeFile(filename, linesExceptFirst, function(err, data) { if (err) {/** check and handle err */} });
});
I just came across the need to be able to exclude several lines in a file. Here's how I did it with a simple node function.
const fs = require('fs');
const removeLines = (data, lines = []) => {
return data
.split('\n')
.filter((val, idx) => lines.indexOf(idx) === -1)
.join('\n');
}
fs.readFile(fileName, 'utf8', (err, data) => {
if (err) throw err;
// remove the first line and the 5th and 6th lines in the file
fs.writeFile(fileName, removeLines(data, [0, 4, 5]), 'utf8', function(err) {
if (err) throw err;
console.log("the lines have been removed.");
});
})
use replace
const fs = require('fs');
function readWriteSync() {
var data = fs.readFileSync(filepath, 'utf-8');
// replace 'world' together with the new line character with empty
var newValue = data.replace(/world\n/, '');
fs.writeFileSync(filepath, newValue, 'utf-8');
}

How to export in nodejs a csv file from nodejs with a PostgreSQL

the nodejs file is on Heroku and I'm using PostgreSQL as a Database
I would like to export a csv file from a view :
// My PostgreSQL query
var copyTo = require('pg-copy-streams').to;
var csv = require('csv');
var fs = require('fs');
var stream = client.query(copyTo('COPY (SELECT * FROM v_metric) TO
STDOUT WITH CSV HEADER DELIMITER as \'|\''));
// export in csv
csv().from(stream.pipe(process.stdout,{ end: false)).to(fs.createWriteStream('sample.csv'))
I dont have any result, sample.csv is empty.
any idea?
thank you in advance
You can use pg-copy-streams npm module made specifically for this:
let data = '', copyTo = require('pg-copy-streams').to;
pool.connect(function(pgErr, client, done) {
if(pgErr) {
//handle error
return;
}
var stream = client.query(copyTo(`COPY (${query}) TO STDOUT With CSV HEADER`));
stream.on('data', chunk => {
data += chunk;
})
stream.on('end', response => {
console.log(data)
done();
});
stream.on('error', err => {
done();
})
})
Hope this helps.
The SQL Server's STDOUT will be different than your node processes's. You could try streaming the query results:
var query = client.query('SELECT * FROM v_metric');
query.on('row', function(row) {
//handle the row here
});
Then in your handler if your data isn't complicated (i.e. no delimiters or double quotes) you could skip using csv and iterate over the columns to convert them into a string that you write to the write stream. May be easier to have the column names in an array which you can pass as the SQL (via joining with ', ') and iterate over in the handler, but you could also extract the column names using Object.keys(row).
UPDATE: Example based on your comment:
var columns = ['country_cd','product_name','product_lvel','month_year','metric_name','val'];
var ws = fs.createWriteStream('sample.csv');
var query = client.query('SELECT '+columns.join(', ')+' FROM users');
query.on('row', function(row) {
var values = [];
// process column values; if you need to do special formatting (i.e. dates) don't loop and instead handle each one specially
columns.forEach(function(col) {
values = row[col];
});
ws.write(values.join('| '));
});
query.on('end', function(result) {
ws.close();
});
If you do want to use csv you can create a stream that you write to write data to in the handler and pip that to csv.
One other note, , is the default delimter so if you want to use something else, like |, you will need to specify that in the options.

Resources