Nodejs . Getting [object,Object] when fs.writeFileSync - node.js

I am reading a csv file, then writing another in the format I need. console.log shows the data i need but the file I create shows [object, Object] for each line.
I am not sure why the console.log shows the correct data but the file does not. I've read over the Node documentation but I cannot figure this out. Any information you can provide is appreciated.
this is what console.log shows
var fs = require("fs");
var parse = require('csv-parse');
//read file
var inputFile = 'EXAMPLE_UPSdailyShipment.csv';
fs.createReadStream('EXAMPLE_UPSdailyShipment.csv', "utf8", function read(err, data) {
if (err) {
throw err;
}
content = data;
});
var arr = [];
//data for new file
var parser = parse({
delimiter: ","
}, function(err, data) {
data.forEach(function(column) {
// create line object, data from parsed fields, PackageReference1 is order#
var line = {
"BuyerCompanyName": " ",
"OrderNumber": column[8],
"ShipperName": "UPS",
"TrackingNumber": column[9],
"DateShipped": column[0],
"ShipmentCost": column[2],
};
arr.push(line);
});
console.log(arr);
fs.writeFileSync("newtemp3.csv", arr, 'utf8');
console.log("file read and new file created")
});
fs.createReadStream(inputFile).pipe(parser);

I think you just need to stringify the data first:
fs.writeFileSync("newtemp3.csv", JSON.stringify(arr), 'utf8');
Hopefully this solves your problem.

Related

Does csv-parse allow you to read from file?

I'm learning how to use the csv-parse module for nodejs. I wrote this code and it works perfectly:
var fs = require('fs');
const fileName = './spreadsheet.csv';
const assert = require('assert');
const { parse } = require('csv-parse');
const records = [];
// Initialize the parser
const parser = parse({
delimiter: ','
});
// Use the readable stream api to consume records
parser.on('readable', function(){
let record;
while ((record = parser.read()) !== null) {
records.push(record);
}
});
// Catch any error
parser.on('error', function(err){
console.error(err.message);
});
fs.readFile(fileName, 'utf8', function (err, f) {
if (err) {
return console.error(err);
}
const rows = f.split("\r\n");
for(let x in rows) {
parser.write(rows[x]+"\n");
}
parser.end();
console.log(records);
});
But right now, I depend on the fs module and fs.readFile to consume my csv file. Does the csv-parse have an option to read ffrom file? I ask because as you can see in my code, I ahve to specify my own line-break characters, which could differ between csv files. I thought maybe the csv-parse module would have something that can more readily address such a situation?
The parser object will do most of the work for you. It is expecting the data to arrive on its stream interface and it will do everything else. All you have to do is open a stream and the pipe it to the parser like this:
fs.createReadStream(fileName).pipe(parser);
And, here it is combined with your code:
const fs = require('fs');
const fileName = './spreadsheet.csv';
const { parse } = require('csv-parse');
const records = [];
// Initialize the parser
const parser = parse({
delimiter: ','
});
// Use the readable stream api to consume records
parser.on('readable', function(){
let record;
while ((record = parser.read()) !== null) {
records.push(record);
}
});
// Catch any error
parser.on('error', function(err){
console.error(err.message);
});
parser.on('end', function() {
console.log(records);
});
// open the file and pipe it into the parser
fs.createReadStream(fileName).pipe(parser);
P.S. It's amazing that such a simple example of getting the CSV data from a file is not shown in the documentation (at least not anywhere I could find it). I'm also surprised, they don't offer an option where they will automatically read the data from the stream, instead requiring you to implement the readable event handler. Odd, for such an otherwise complete package.

Insert new JSON data to the json file using nodejs [duplicate]

I am new to Node.js and JavaScript. I have a results.json file that I want to keep a running log of results from a script that pulls images from the web. However, my current script only overwrites the existing result. How do I build upon or add to the results.json so each subsequent result is logged in the results.json file? I would like it to be valid json.
Here is general example:
var currentSearchResult = someWebSearchResult
var fs = require('fs');
var json = JSON.stringify(['search result: ' + currentSearchResult + ': ', null, "\t");
fs.writeFile("results.json", json);
And the results.json:
[
"search result: currentSearchResult"
]
If you want the file to be valid JSON, you have to open your file, parse the JSON, append your new result to the array, transform it back into a string and save it again.
var fs = require('fs')
var currentSearchResult = 'example'
fs.readFile('results.json', function (err, data) {
var json = JSON.parse(data)
json.push('search result: ' + currentSearchResult)
fs.writeFile("results.json", JSON.stringify(json))
})
In general, If you want to append to file you should use:
fs.appendFile("results.json", json , function (err) {
if (err) throw err;
console.log('The "data to append" was appended to file!');
});
Append file creates file if does not exist.
But ,if you want to append JSON data first you read the data and after that you could overwrite that data.
fs.readFile('results.json', function (err, data) {
var json = JSON.parse(data);
json.push('search result: ' + currentSearchResult);
fs.writeFile("results.json", JSON.stringify(json), function(err){
if (err) throw err;
console.log('The "data to append" was appended to file!');
});
})
Promise based solution [Javascript (ES6) + Node.js (V10 or above)]
const fsPromises = require('fs').promises;
fsPromises.readFile('myFile.json', 'utf8')
.then(data => {
let json = JSON.parse(data);
json.myArr.push({name: "Krishnan", salary: 5678});
fsPromises.writeFile('myFile.json', JSON.stringify(json))
.then( () => { console.log('Append Success'); })
.catch(err => { console.log("Append Failed: " + err);});
})
.catch(err => { console.log("Read Error: " +err);});
If your project supports Javascript ES8 then you could use asyn/await instead of native promise.
I have created data. Js file to maintain data displaying in the table fomat. Including text box to read data from user and how to display data enteted through text box in table

How do I save a file to my nodejs server from web service call

My issue is this:
I have made a call to someones web service. I get back the file name, extension and the "bytes". Bytes actually come in as an array and at position 0 "Bytes[0]" is the following string:
JVBERi0xLjYKJeLjz9MKMSAwIG9iago8PC9EZWNvZGVQYXJtczw8L0sgLTEvQ29sdW1ucyAyNTUwL1Jvd3MgMzMwMD4+L1R5cGUvWE9iamVjdC9CaXRzUGVyQ29tcG9uZW50IDEvU3VidHlwZS9JbWFnZS9XaWR0aCAyNTUwL0NvbG9yU3BhY2UvRGV2aWNlR3JheS9GaWx0ZXIvQ0NJVFRGYXhEZWNvZGUvTGVuZ3RoIDI4Mzc0L0hlaWdodCAzMzAwPj5zdHJlYW0K////////y2IZ+M8+zOPM/HzLhzkT1NAjCCoEY0CMJNAjCR4c8HigRhBAi1iZ0eGth61tHhraTFbraRaYgQ8zMFyGyGM8ZQZDI8MjMI8M6enp9W6enp+sadIMIIEYwy/ggU0wwgwjWzSBUmwWOt/rY63fraTVNu6C7R7pN6+v///20v6I70vdBaPjptK8HUQfX9/17D/TMet+l06T//0v3/S9v+r98V0nH///7Ff+Ed3/v16X9XX/S/KP0vSb//W88ksdW18lzBEJVpPXT0k9b71///...
The string example above has been cut off for readability.
How do I take that string and save it as a readable file?
This case it's a pdf.
let pdfBytes = '{String shown above in example}'
You can use the Node.js File System Module to save the received buffer.
Assuming the encoding of your data is base64:
const fs = require('fs');
let pdfBytes = 'JVBERi0xLjYKJeLjz9...'
let writeStream = fs.createWriteStream('filename.pdf');
writeStream.write(pdfBytes, 'base64');
writeStream.on('finish', () => {
console.log('saved');
});
writeStream.end();
I am using the fs file system here to create and save the file. I use a lot of try catch in case anything goes wrong. This example shows how you could pass the data to a function that could then create the file for you.
const util = require('util');
const fs = require('fs');
const fsOpen = util.promisify(fs.open);
const fsWriteFile = util.promisify(fs.writeFile);
const fsClose = util.promisify(fs.close);
function saveNewFile(path, data) {
return new Promise((async (resolve, reject) => {
let fileToCreate;
// Open the file for writing
try {
fileToCreate = await fsOpen(path, 'wx');
} catch (err) {
reject('Could not create new file, it may already exist');
return;
}
// Write the new data to the file
try {
await fsWriteFile(fileToCreate, data);
} catch (err) {
reject('Error writing to new file');
return;
}
// Close the file
try {
await fsClose(fileToCreate);
} catch (err) {
reject('Error closing new file');
return;
}
resolve('File created');
}));
};
// Data we want to use to create the file.
let pdfBytes = 'JVBERi0xLjYKJeLj...'
saveNewFile('./filename.pdf', pdfBytes);

Nodejs - "Write After End" error while Exporting from Mongoose

I am trying to write the results of 3 Mongoose find query into a single file test.txt using event-stream nodejs.
My program is something like this…
var es = require('event-stream');
var fs = require('fs');
var wstream = fs.createWriteStream('TestFile.csv');
mongoose.connection.on('connected', function () {
//First Find Query
Model.find({"FirstId":{$exist:true}).cursor()
.pipe(es.map(function (data, callback) {
var csv = json2csv({data:formated, fields:fields, hasCSVColumnTitle:false});
callback(null, csv)
}))
.pipe(wstream);
//Second find query
Model.find({"SecondId":{$exist:true}).cursor()
.pipe(es.map(function (data, callback) {
if(data.Email)
{
var csv = json2csv({data:formated, fields:fields, hasCSVColumnTitle:false});
}
else{
callback();
}
callback(null, csv)
}))
.pipe(wstream);
//Third find query
Model.find({"ThirdId":{$exist:true}).cursor()
.pipe(es.map(function (data, callback) {
if(data.Email)
{
var csv = json2csv({data:formated, fields:fields, hasCSVColumnTitle:false});
}
else{
callback();
}
callback(null, csv)
}))
.pipe(wstream);
});
In this program I am able to write each find query result separately into the file. But when I combine 3-find query in a program, it throws “write after end” error.
Could anyone help me to resolve this error?
Thank You all for your time!

Writing buffer response from resemble.js to file

I'm using node-resemble-js to compare two PNG images.
The comparison happens without issue and I get a successful/relevant response however I'm having trouble outputting the image diff.
var express = require('express');
var fs = require('fs');
var resemble = require('node-resemble-js');
var router = express.Router();
router.get('/compare', function(req, res, next) {
compareImages(res);
});
var compareImages = function (res) {
resemble.outputSettings({
largeImageThreshold: 0
});
var diff = resemble('1.png')
.compareTo('2.png')
.ignoreColors()
.onComplete(function(data){
console.log(data);
var png = data.getDiffImage();
fs.writeFile('diff.png', png.data, null, function (err) {
if (err) {
throw 'error writing file: ' + err;
}
console.log('file written');
});
res.render('compare');
});
};
module.exports = router;
It writes to diff.png as expected however it's not creating a valid image.
Any ideas where I'm going wrong? Feel like I'm pretty close but just unsure of final piece.
Thanks
Looks like there is a pack() method that needs to be called, which does some work and then streamifies the data. In that case you can buffer the stream and then call writeFile like this:
var png = data.getDiffImage();
var buf = new Buffer([])
var strm = png.pack()
strm.on('data', function (dat) {
buf = Buffer.concat([buf, dat])
})
strm.on('end', function() {
fs.writeFile('diff.png', buf, null, function (err) {
if (err) {
throw 'error writing file: ' + err;
}
console.log('file written');
})
})
or you can just pipe it like this, which is a little simpler:
png.pack().pipe(fs.createWriteStream('diff.png'))
Honestly, your approach made sense to me (grab the Buffer and write it) but I guess that data Buffer attached to what comes back from getDiffImage isn't really the final png. Seems like the docs are a bit thin but there's some info here: https://github.com/lksv/node-resemble.js/issues/4

Resources