i'm pretty new into NodeJs. And i am trying to read a file into a variable.
Here is my code.
var fs = require("fs"),
path = require("path"),
util = require("util");
var content;
console.log(content);
fs.readFile(path.join(__dirname,"helpers","test.txt"), 'utf8',function (err,data) {
if (err) {
console.log(err);
process.exit(1);
}
content = util.format(data,"test","test","test");
});
console.log(content);
But every time i run the script i get
undefined and undefined
What am i missing? Help please!
As stated in the comments under your question, node is asynchronous - meaning that your function has not completed execution when your second console.log function is called.
If you move the log statement inside the the callback after reading the file, you should see the contents outputted:
var fs = require("fs"),
path = require("path"),
util = require("util");
var content;
console.log(content);
fs.readFile(path.join(__dirname, "helpers", "test.txt"), 'utf8', function (err, data) {
if (err) {
console.log(err);
process.exit(1);
}
content = util.format(data, "test", "test", "test");
console.log(content);
});
Even though this will solve your immediately problem, without an understanding of the async nature of node, you're going to encounter a lot of issues.
This similar stackoverflow answer goes into more details of what other alternatives are available.
The following code snippet uses ReadStream. It reads your data in separated chunks, if your data file is small it will read the data in a single chunk. However this is a asynchronous task. So if you want to perform any task with your data, you need to include them within the ReadStream portion.
var fs = require('fs');
var readStream = fs.createReadStream(__dirname + '/readMe.txt', 'utf8');
/* include the file directory and file name instead of <__dirname + '/readMe.txt'> */
var content;
readStream.on('data', function(chunk){
content = chunk;
performTask();
});
function performTask(){
console.log(content);
}
There is also another easy way by using synchronous task. As this is a synchronous task, you do not need to worry about its executions. The program will only move to the next line after execution of the current line unlike the asynchronous task.
A more clear and detailed answer is provided in the following link:
Get data from fs.readFile
var fs = require('fs');
var content = fs.readFileSync('readMe.txt','utf8');
/* include your file name instead of <'readMe.txt'> and make sure the file is in the same directory. */
or easily as follows:
const fs = require('fs');
const doAsync = require('doasync');
doAsync(fs).readFile('./file.txt')
.then((data) => console.log(data));
Related
I'm learning how to use the csv-parse module for nodejs. I wrote this code and it works perfectly:
var fs = require('fs');
const fileName = './spreadsheet.csv';
const assert = require('assert');
const { parse } = require('csv-parse');
const records = [];
// Initialize the parser
const parser = parse({
delimiter: ','
});
// Use the readable stream api to consume records
parser.on('readable', function(){
let record;
while ((record = parser.read()) !== null) {
records.push(record);
}
});
// Catch any error
parser.on('error', function(err){
console.error(err.message);
});
fs.readFile(fileName, 'utf8', function (err, f) {
if (err) {
return console.error(err);
}
const rows = f.split("\r\n");
for(let x in rows) {
parser.write(rows[x]+"\n");
}
parser.end();
console.log(records);
});
But right now, I depend on the fs module and fs.readFile to consume my csv file. Does the csv-parse have an option to read ffrom file? I ask because as you can see in my code, I ahve to specify my own line-break characters, which could differ between csv files. I thought maybe the csv-parse module would have something that can more readily address such a situation?
The parser object will do most of the work for you. It is expecting the data to arrive on its stream interface and it will do everything else. All you have to do is open a stream and the pipe it to the parser like this:
fs.createReadStream(fileName).pipe(parser);
And, here it is combined with your code:
const fs = require('fs');
const fileName = './spreadsheet.csv';
const { parse } = require('csv-parse');
const records = [];
// Initialize the parser
const parser = parse({
delimiter: ','
});
// Use the readable stream api to consume records
parser.on('readable', function(){
let record;
while ((record = parser.read()) !== null) {
records.push(record);
}
});
// Catch any error
parser.on('error', function(err){
console.error(err.message);
});
parser.on('end', function() {
console.log(records);
});
// open the file and pipe it into the parser
fs.createReadStream(fileName).pipe(parser);
P.S. It's amazing that such a simple example of getting the CSV data from a file is not shown in the documentation (at least not anywhere I could find it). I'm also surprised, they don't offer an option where they will automatically read the data from the stream, instead requiring you to implement the readable event handler. Odd, for such an otherwise complete package.
I want to download and save a text file using Node. But after downloading the pipe method return an error to memory lack. By setting emitter size to zero(0) the error is disappeared but the size of the file exceed to 1 GB!
var download = function (uri, filename, callback) {
var file = fs.createWriteStream(filename);
http.get(uri, function (res) {
res.pipe(file);
res.on('end', function () {
file.end();
});
});
};
ERROR: MaxListenersExceededWarning: Possible EventEmitter memory leak
detected. 11 end listeners added. Use emitter
I couldn't find any solution to save a text file from web. What's the best way to do this?
EDIT:
I move pipe method to solve memory problem. But in the file it puts binary data not text. How can I save file as a text file?
A possible try could be:
npm i request
then following code instead of yours:
var request = require('request');
var fs = require('fs');
var download = function (uri, filename, callback) {
var file = fs.createWriteStream(filename);
request(uri).pipe(fs.createWriteStream(file));
};
This way it will handle memory/network/back-pressure on its own in nearly all cases.
This code writes a UTF-8 string to the text file at filepath, but it appears to be functionally equivalent to your latest update (edit 3).
If you're still getting binary/buffered output after trying this code, please provide the result of typeof res.
const downloadFile = (uri, filepath, callback) => {
http.get(uri, res => {
console.log(typeof res);
res.pipe(fs.createWriteFileStream(filepath))
.on('finish', callback);
});
};
I have the following code
var express = require('express');
var Promise = require('bluebird');
var fs = Promise.promisifyAll(require('fs'));
var path = require("path");
var langs = fs.readdirSync({directory having multiple directories});
var arr = [];
for(var v in langs){
var filename = config.path.storage + "/" + langs[v] +"/assets/_assets.json";
fs.readFile(filename, "utf8", function(err, data) {
if (err) throw err;
res.write(data);
//res.end();
});
}
res.end();
What I want to do is, once all the files in the directories (lang in this case - langs contains: [ 'ar-dz', 'en-gb', 'en-us' ]) are read, I want to do a res.send() to the client.
I tried to promisify the 'fs' module via 'bluebird' module. I assume the callback in fs.readFileAsync() is causing the problem, is there a way around it?
Can't set headers after they are sent. is occurring because you are calling res.write() after you've called res.end(). This happens because the async response to fs.readFile() comes later after your for loop is done. Instead, you need to use your promises to coordinate the result of your async operations and when they are all done, you can then send your response with all the data.
You can use your Bluebird promise library to do that like this:
fs.readdirAsync(...).map(function(file) {
return fs.readFileAsync(file, 'utf8');
}).then(function(arrayOfData) {
res.send(arrayOfData.join(""));
}).catch(function(err) {
// send some error response here
});
I try to write some nodejs code that completes an existing file. But each time I start the script, it over writes the content of my test file.
var fs = require('fs');
var writer = fs.createWriteStream('test.txt', {flags: 'w'});
writer.on('finish', function() {
console.log('data has been saved successfully');
});
function writeInList(id) {
console.log(id);
writer.write(id+' \n');
}
for (var id = 0; id<10; id++){
writeInList(id);
}
writer.end();
Of course I have searched for a solution:
Writing large files with Node.js
But I'm not able to make it run. Could anybody help me please?
I'd like to watch a CSV file and get the newest records since it was changed. I'm running the following shell command to build a very simple csv file and append a new line every second:
rm test.csv & x=0 && while true; do echo "${x},${x},${x}" >> test.csv; x=$(($x+1)); sleep 1; done
The following code prints all the records of the file until the first change and then just emits the dashed line, as if it's not re-reading the file:
'use strict';
var fs = require('fs'),
dataFile = __dirname + '/server/data/test.csv',
csv = require('csv');
var parser = csv.parse({delimiter: ','}, function(err, data){
console.log(data);
});
var watcher = fs.watch(dataFile);
watcher.on('change', fileChange);
function fileChange(e, fn){
if (e) console.error(e)
fs.createReadStream(dataFile).pipe(parser);
console.log('-------')
}
Shouldn't the fileChange function re-read the file on every change? My ultimate plan here is to get both the previous array of lines and the current one and use lodash's difference function to return only the differences. If there's better way, I'm open to hear it though.
My guess is that fs.createReadStream() has opened the file and it's not being closed. So on the second event fs.createReadStream() fails. No bueno.
Try using fs.readFile() instead like this:
function fileChange(e, fn){
if (e) console.error(e)
fs.readFile(dataFile, function (err, data) {
if (err) throw err;
console.log(data);
console.log('-------')
});
};
See the documentation here: http://nodejs.org/api/fs.html#fs_fs_readfile_filename_options_callback
I ended up solving the issue by stating the file on change, and reading the difference in size to the stream data:
'use strict';
var fs = require('fs'),
dataFile = __dirname + '/server/data/test.csv',
readSize = 0,
csv = require('csv');
var parser = csv.parse();
parser.on('readable', function(data){
var record;
while(record = parser.read()){
console.log(record);
}
});
var watcher = fs.watch(dataFile);
watcher.on('change', fileChange);
// fires when the watched file changes
function fileChange(e, fn){
// get these syncronously
var stats = fs.statSync(dataFile);
// if it's smaller, wait half a second
if (stats.size <= readSize) {
setTimeout(fileChange, 500);
}
// read the stream offset
var stream = fs.createReadStream(dataFile, {start: readSize, end: stats.size});
stream.on('data', function(chunk){
parser.write(chunk.toString());
});
readSize = stats.size;
}
Any feedback on why this may not work would be appreciated.