How to copy file content into a string? - node.js

I am trying to "translate" my old scripts done in Ruby to node.js. One of them is about CSV parsing, and I am stuck at step one - load file into a string.
This prints content of my file to the console:
fs = require('fs');
fs.readFile("2015 - Best of.csv", 'utf8', function (err,data) {
if (err) {
return console.log(err);
}
console.log(data);
});
but for some reason I can't catch data into variable:
fs = require('fs');
var x = "";
fs.readFile("2015 - Best of.csv", 'utf8', function (err,data) {
if (err) {
return console.log(err);
}
x = x + data;
});
console.log(x);
How do I store (function's) 'data' variable into (global) 'x' variable?

It is working.
The problem is you are logging x before it gets filled. Since the call is asynchronous, the x variable will only have the contents of the string inside the function.
You can also see the: fs.readFileSync function.
However, I would recommend you get more confortable with node's async features.
Try this:
fs = require('fs');
var x = "";
fs.readFile("2015 - Best of.csv", 'utf8', function (err,data) {
if (err) {
return console.log(err);
}
x = x + data;
console.log(x);
});

Related

How to set variable value inside read file function and use it outside in node js?

I'm trying to read the file content as string and want to set the variable value equals to it. But when I try to access file_data variable in this case, this gives an empty value if it's used outside the function. However inside the fs.readFile function, it works fine.
var fs = require('fs');
let file_data = '';
fs.readFile('text_file.txt', 'utf8', function(err, data) {
if (err) throw err;
console.log(data); //this works fine here
file_data = data; //setting it here so that I can use it afterwards
});
//the following line gives blank output
console.log(file_data );
I'm a bit new at this, so please point out if there is something I need to read first before using functions like this.
fs.readFile() is an async function so
//the following line gives blank output
console.log(file_data );
Is processed before fs.readFile() responds, so you can:
fs.readFile('text_file.txt', 'utf8', function(err, data) {
if (err) throw err;
console.log(data); //this works fine here
file_data = data; //setting it here so that I can use it afterwards
// CONSOLE LOG HERE!! AND IT WORKS
console.log(file_data );
});
Alternatively, you can use the synchronous version fs.readFileSync()
try {
const data = fs.readFileSync('text_file.txt', 'utf8')
console.log(data)
} catch (err) {
console.error(err)
}
Source

Node.js function not running in order. Error: Unhandled stream error in pipe

I updated the function to create the CSV file but now I'm getting an error:
In upload function
internal/streams/legacy.js:57
throw er; // Unhandled stream error in pipe.
^
Error: ENOENT: no such file or directory, open 'C:\Users\shiv\WebstormProjects\slackAPIProject\billingData\CSV\1548963844106output.csv'
var csvFilePath = '';
var JSONFilePath = '';
function sendBillingData(){
var message = '';
axios.get(url, {
params: {
token: myToken
}
}).then(function (response) {
message = response.data;
fields = billingDataFields;
// saveFiles(message, fields, 'billingData/');
saveFilesNew(message, fields, 'billingData/');
var file = fs.createReadStream(__dirname + '/' + csvFilePath); // <--make sure this path is correct
console.log(__dirname + '/' + csvFilePath);
uploadFile(file);
})
.catch(function (error) {
console.log(error);
});
}
The saveFilesNew function is:
function saveFilesNew(message, options, folder){
try {
const passedData = message;
var relevantData='';
if (folder == 'accessLogs/'){
const loginsJSON = message.logins;
relevantData = loginsJSON;
console.log(loginsJSON);
}
if(folder == 'billingData/'){
relevantData = passedData.members;
const profile = passedData.members[0].profile;
}
//Save JSON to the output folder
var date = Date.now();
var directoryPath = folder + 'JSON/' + date + "output";
JSONFilePath = directoryPath + '.json';
fs.writeFileSync(JSONFilePath, JSON.stringify(message, null, 4), function(err) {
if (err) {
console.log(err);
}
});
//parse JSON onto the CSV
const json2csvParser = new Json2csvParser({ fields });
const csv = json2csvParser.parse(relevantData);
// console.log(csv);
//function to process the CSV onto the file
var directoryPath = folder + 'CSV/' + date + "output";
csvFilePath = directoryPath + '.csv';
let data = [];
let columns = {
real_name: 'real_name',
display_name: 'display_name',
email: 'email',
account_type: 'account_type'
};
var id = passedData.members[0].real_name;
console.log(id);
console.log("messageLength is" +Object.keys(message.members).length);
for (var i = 0; i < Object.keys(message.members).length; i++) {
console.log("value of i is" + i);
var display_name = passedData.members[i].profile.display_name;
var real_name = passedData.members[i].profile.real_name_normalized;
var email = passedData.members[i].profile.email;
var account_type = 'undefined';
console.log("name: " + real_name);
if(passedData.members[i].is_owner){
account_type = 'Org Owner';
}
else if(passedData.members[i].is_admin){
account_type = 'Org Admin';
}
else if(passedData.members[i].is_bot){
account_type = 'Bot'
}
else account_type = 'User';
data.push([real_name, display_name, email, account_type]);
}
console.log(data);
stringify(data, { header: true, columns: columns }, (err, output) => {
if (err) throw err;
fs.writeFileSync(csvFilePath, output, function(err) {
console.log(output);
if (err) {
console.log(err);
}
console.log('my.csv saved.');
});
});
} catch (err) {
console.error(err);
}
}
The upload file function is:
function uploadFile(file){
console.log("In upload function");
const form = new FormData();
form.append('token', botToken);
form.append('channels', 'testing');
form.append('file', file);
axios.post('https://slack.com/api/files.upload', form, {
headers: form.getHeaders()
}).then(function (response) {
var serverMessage = response.data;
console.log(serverMessage);
});
}
So I think the error is getting caused because node is trying to upload the file before its being created. I feel like this has something to do with the asynchronous nature of Node.js but I fail to comprehend how to rectify the code. Please let me know how to correct this and mention any improvements to the code structure/design too.
Thanks!
You don't wait for the callback provided to stringify to be executed, and it's where you create the file. (Assuming this stringify function really does acccept a callback.)
Using callbacks (you can make this cleaner with promises and these neat async/await controls, but let's just stick to callbacks here), it should be more like:
function sendBillingData() {
...
// this callback we'll use to know when the file writing is done, and to get the file path
saveFilesNew(message, fields, 'billingData/', function(err, csvFilePathArgument) {
// this we will execute when saveFilesNew calls it, not when saveFilesNew returns, see below
uploadFile(fs.createReadStream(__dirname + '/' + csvFilePathArgument))
});
}
// let's name this callback... "callback".
function saveFilesNew(message, options, folder, callback) {
...
var csvFilePath = ...; // local variable only instead of your global
...
stringify(data, { header: true, columns: columns }, (err, output) => {
if (err) throw err; // or return callbcack(err);
fs.writeFile(csvFilePath , output, function(err) { // NOT writeFileSync, or no callback needed
console.log(output);
if (err) {
console.log(err);
// callback(err); may be a useful approach for error-handling at a higher level
}
console.log('my.csv saved.'); // yes, NOW the CSV is saved, not before this executes! Hence:
callback(null, csvFilePath); // no error, clean process, pass the file path
});
});
console.log("This line is executed before stringify's callback is called!");
return; // implicitly, yes, yet still synchronous and that's why your version crashes
}
Using callbacks that are called only when the expected events happen (a file is done writing, a buffer/string is done transforming...) allows JS to keep executing code in the meantime. And it does keep executing code, so when you need data from an async code, you need to tell JS you need it done before executing your piece.
Also, since you can pass data when calling back (it's just a function), here I could avoid relying on a global csvFilePath. Using higher level variables makes things monolithic, like you could not transfer saveFilesNew to a dedicated file where you keep your toolkit of file-related functions.
Finally, if your global process is like:
function aDayAtTheOffice() {
sendBillingData();
getCoffee();
}
then you don't need to wait for the billing data to be processed before starting making coffee. However, if your boss told you that you could NOT get a coffee until the billing data was settled, then your process would look like:
function aDayAtTheOffice() {
sendBillingData(function (err) {
// if (err) let's do nothing here: you wanted a coffee anyway, right?
getCoffee();
});
}
(Note that callbacks having potential error as first arg and data as second arg is a convention, nothing mandatory.)
IMHO you should read about scope (the argument callback could be accessed at a time where the call to saveFilesNew was already done and forgotten!), and about the asynchronous nature of No... JavaScript. ;) (Sorry, probably not the best links but they contain the meaningful keywords, and then Google is your buddy, your friend, your Big Brother.)

In nodejs how to read a file and move it to another folder if the file contains a specified text

So I have the following code
var processed;
fs.readFile(path, 'utf-8', function(err, data) {
processed = false;
//checking if text is in file and setting flag
processed = true;
});
if (processed == true) {
try {
var fname = path.substring(path.lastIndexOf("\\") + 1);
fs.moveSync(path, './processedxml/' + fname, {
overwrite: true
})
} catch (err) {
console.log("Error while moving file to processed folder " + err);
}
}
But I don't get the desired output. Because looks like the readfile is executed by a separate thread and so the value of "processed" is not reliable.
I am not very familiar with nodejs so any help will be greatly appreciated.
Yes, you are right, your executions are performed by different threads.
In this scenario, you'll need to use promises.
You can solve your need easily by using "Promise FS" (you can use any other promise solution anyway).
Your code would be something like the following:
fs = require('promise-fs');
var fname = 'test.txt' ;
var toMove = false ;
fs.readFile('test.txt','utf8')
.then (function (content) {
if(content.indexOf('is VALID') !== -1) {
console.log('pattern found!');
toMove = true ;
}
else { toMove = false
}
return toMove ;
}).
then (function (toMove) {
if(toMove) {
var oldPath = 'test.txt'
var newPath = '/tmp/moved/file.txt'
fs.rename(oldPath, newPath, function (err) {
if (err) throw err
console.log('Successfully renamed - moved!')
}) ;
}
})
.catch (function (err) {
console.log(err);
})
Create a file "test.txt" and add the following contents:
this is text.file contents
token is VALID
The code above will evaluate if "is VALID" is present as content and if it does then it will move the file "test.txt" from your current folder to a new one called "moved" in "/tmp" directory. It will also rename the file as "file.txt" file name.
Hope it helps you.
Regards
It looks like you're shadowing path, trying to use it as a variable and as a node module. The easiest way to make this work is to choose a different variable name for the file and move the processing logic into the callback of fs.readFile.
var path = require('path');
var fs = require('fs-extra');
var file = 'some/file/path/foo.xml';
var text = 'search text';
fs.readFile(file, 'utf-8', function (err, data) {
if (err) {
console.error(err);
} else {
//checking if text is in file and setting flag
if (data.indexOf(text) > -1) {
try {
var fname = path.basename(file);
fs.moveSync(file, './processedxml/' + fname, {
overwrite: true
})
} catch (err) {
console.log("Error while moving file to processed folder " + err);
}
}
}
});

How to remove one line from a txt file

I have the following text file ("test.txt") that I want to manipulate in node.js:
world
food
I want to remove the first line so that food becomes the first line instead. How can I do that?
var fs = require('fs')
fs.readFile(filename, 'utf8', function(err, data)
{
if (err)
{
// check and handle err
}
// data is the file contents as a single unified string
// .split('\n') splits it at each new-line character and all splits are aggregated into an array (i.e. turns it into an array of lines)
// .slice(1) returns a view into that array starting at the second entry from the front (i.e. the first element, but slice is zero-indexed so the "first" is really the "second")
// .join() takes that array and re-concatenates it into a string
var linesExceptFirst = data.split('\n').slice(1).join('\n');
fs.writeFile(filename, linesExceptFirst, function(err, data) { if (err) {/** check and handle err */} });
});
I just came across the need to be able to exclude several lines in a file. Here's how I did it with a simple node function.
const fs = require('fs');
const removeLines = (data, lines = []) => {
return data
.split('\n')
.filter((val, idx) => lines.indexOf(idx) === -1)
.join('\n');
}
fs.readFile(fileName, 'utf8', (err, data) => {
if (err) throw err;
// remove the first line and the 5th and 6th lines in the file
fs.writeFile(fileName, removeLines(data, [0, 4, 5]), 'utf8', function(err) {
if (err) throw err;
console.log("the lines have been removed.");
});
})
use replace
const fs = require('fs');
function readWriteSync() {
var data = fs.readFileSync(filepath, 'utf-8');
// replace 'world' together with the new line character with empty
var newValue = data.replace(/world\n/, '');
fs.writeFileSync(filepath, newValue, 'utf-8');
}

Writing buffer response from resemble.js to file

I'm using node-resemble-js to compare two PNG images.
The comparison happens without issue and I get a successful/relevant response however I'm having trouble outputting the image diff.
var express = require('express');
var fs = require('fs');
var resemble = require('node-resemble-js');
var router = express.Router();
router.get('/compare', function(req, res, next) {
compareImages(res);
});
var compareImages = function (res) {
resemble.outputSettings({
largeImageThreshold: 0
});
var diff = resemble('1.png')
.compareTo('2.png')
.ignoreColors()
.onComplete(function(data){
console.log(data);
var png = data.getDiffImage();
fs.writeFile('diff.png', png.data, null, function (err) {
if (err) {
throw 'error writing file: ' + err;
}
console.log('file written');
});
res.render('compare');
});
};
module.exports = router;
It writes to diff.png as expected however it's not creating a valid image.
Any ideas where I'm going wrong? Feel like I'm pretty close but just unsure of final piece.
Thanks
Looks like there is a pack() method that needs to be called, which does some work and then streamifies the data. In that case you can buffer the stream and then call writeFile like this:
var png = data.getDiffImage();
var buf = new Buffer([])
var strm = png.pack()
strm.on('data', function (dat) {
buf = Buffer.concat([buf, dat])
})
strm.on('end', function() {
fs.writeFile('diff.png', buf, null, function (err) {
if (err) {
throw 'error writing file: ' + err;
}
console.log('file written');
})
})
or you can just pipe it like this, which is a little simpler:
png.pack().pipe(fs.createWriteStream('diff.png'))
Honestly, your approach made sense to me (grab the Buffer and write it) but I guess that data Buffer attached to what comes back from getDiffImage isn't really the final png. Seems like the docs are a bit thin but there's some info here: https://github.com/lksv/node-resemble.js/issues/4

Resources