Unable to read content of .txt using fs - node.js

I am using fs mudule to read .txt file content , but the result always empty . My .txt file do has content in it could any one give me a hand pls ? This is my test code :
var fs = require("fs");
var content = "";
fs.readFile("2.txt", "utf8", function(err, data){
if(err) {
return console.log("fail", err);
}
content = data;
});
console.log(content);
The content is empty in console .

You are writing the result too early. You should log the result in the readFile callback.
var fs = require("fs");
var content = "";
fs.readFile("2.txt", "utf8", function(err, data){
if(err) {
return console.log("fail", err);
}
content = data;
console.log(content);
});
// The console log below will be executed right after the readFile call.
// It won't wait the file to be actually read.
// console.log(content);
Or you can write the same logic like this:
const fs = require('fs');
async function main() {
try {
const content = await fs.promises.readFile('2.txt', 'utf8');
console.log(content);
} catch (ex) {
console.trace(ex);
}
}
main();

Related

Uglifyjs node js

This is the code written by me to get all the js files in a directory to be minified:
var http = require('http');
var testFolder = './tests/';
var UglifyJS = require("uglify-js");
var fs = require('fs');
var glob = require("glob");
var fillnam="";
hello();
function hello()
{
glob("gen/*.js", function (er, files) {
//console.log(files);
for(var i=0;i<files.length;i++)
{
fillnam=files[i];
console.log("File Name "+fillnam);
fs.readFile(fillnam, 'utf8', function (err,data)
{
if (err) {
console.log(err);
}
console.log(fillnam+" "+data);
var result = UglifyJS.minify(data);
var gtemp_file=fillnam.replace(".js","");
console.log(gtemp_file);
fs.writeFile(gtemp_file+".min.js", result.code, function(err) {
if(err) {
console.log(err);
} else {
console.log("File was successfully saved.");
}
});
});
}
});
}
http.createServer(function (req, res) {
res.writeHead(200, {'Content-Type': 'text/html'});
res.end('Hello World!');
}).listen(8080);
As a result respective minified js files with same name with .min.js should be formed in the same directory.
But what I am getting is a single file with all files data over written. Like for example if there are two files in a directory a.js and b.js with content:
var a=10;var b=20;
var name="stack";
What I'm getting is single file a.min.js with file content:
var a=10tack;
Please help.
You need to collect all file contents first, concat them and then run UglifyJS.minify on them to be able to save it as a single file.
Something like this (not fully tested)
const testFolder = './tests/';
const UglifyJS = require("uglify-js");
const fs = require('fs');
const readFile = require('util').promisify(fs.readFile);
const glob = require("glob");
function hello() {
glob("gen/*.js", async(er, files) {
let data = [];
for (const file of files) {
const fileData = await readFile(file, {
encoding: 'utf-8'
});
data.push(fileData);
}
const uglified = UglifyJS.minify(data.join('\n'));
fs.writeFile('main.min.js', uglified);
});
}
hello();

EROFS: read-only file system while creating node js function AWS Lambda

I have created one function on the AWS lambda using Node js version 6.10.
I need to create PDF file from html string and send that file in the email.
Following is my code:
exports.handler = function index(event, context, callback) {
var toAddress = event.to;
var fromAddress = event.from;
var subject = event.subject;
var MailBody = event.mailBody;
var PDFHTML = event.PDFHTML;
var LabelHTML = event.LabelHtml;
var options = { format: 'Legal',"header": {
"height": "25mm",
}
}
pdf.convertHTMLString(LabelHTML, '/tmp/LabelDetails.pdf', function(err, res1) {
if (err)
{
console.log(err);
callback(err, false);
}
else
{
pdf.convertHTMLString(PDFHTML, '/tmp/DiagramDetails.pdf', function(err, res1) {
if (err)
{
console.log(err);
callback(null, false);
}
else
{
merge(['/tmp/LabelDetails.pdf','/tmp/DiagramDetails.pdf'],'/tmp/Final.pdf',function(err){
if(err)
{
console.log(err);
callback(null, false);
}
else
{
/*Send mail code */
callback(null, true);
}
});
}
});
}
});
};
var fs = require("fs");
var pdf = require('html-to-pdf');
var merge = require('easy-pdf-merge');
var nodemailer = require('nodemailer');
var path = require("path");
When I try to convert the html string to the PDF file it throws the error EROFS: read only file system.
My simple node js code works perfectly fine.
After doing more research on this issue I found out that AWS gives write credentials only to the /tmp folder. So I applied file path like /tmp/FileName.pdf but still issue seems to be there.

Streaming the results of multiple fs.readfiles

I'm a newbie at node js streams, and what I want to achieve is streaming the results of the readfiles in a module that I have. I want to then somehow invoke this readable stream in my main app and listen to data events, so everytime readfile returns a result a data event will trigger, and the object is passed as a chunk. This is what I've got so far and it's throwing an error...
function streamObjects(type, dirname){
var Readable = require('stream').Readable;
var rs = new Readable({objectMode: true});
fs.readdir(dirname, function(err, files){
if(err)
console.log(err);
for(var i=0;i<10;i++)
{
fs.readFile(path.resolve(dirname, files[i]),function(err,data){
if(err)
console.log(err);
rs.push(JSON.parse(data));
}); //end readFile
} //end for loop
return rs;
});
}
You want to use EventEmitter.
Your readFile is an async function which is not called when you return rs Readable.
var util = require('util');
var EventEmitter = require('events').EventEmitter;
function streamObjects(type, dirname) {
var Readable = require('stream').Readable;
var rs = new Readable({objectMode: true});
var self = this;
fs.readdir(dirname, function (err, files) {
if (err)
console.log(err);
for (var i = 0; i < 10; i++) {
fs.readFile(path.resolve(dirname, files[i]), function (err, data) {
if (err)
console.log(err);
self.emit('data', files[i], data);
rs.push(JSON.parse(data));
}); //end readFile
} //end for loop
});
}
util.inherits(streamObjects, EventEmitter);
module.exports = streamObjects;
From another file
var streamObjects = require('streamObjects');
var streamObjectInstance = new streamObjects(type, dirName);
streamObjectInstance.on('data', yourFunctionHere);
I did not put into error emit, but you can add those to when error happens.

Node.js concatinating files

I'm trying to concatenate multiple files and save to a new file using node js createWriteStream and createReadStream. I have a small bug that I would like to print out the file name before its contents. However, the file name is always printed on top of the output file. Please see my result output and if you have any ideas why, please help!
Thanks!
test.js
var async = require('async');
var fs = require('fs');
var path = require('path');
var SOURCE_FOLDER = '/tmp/test';
var SOURCE_FILE_PATTERN = /\.json$/
var REPORT_FILE = path.join(SOURCE_FOLDER, 'output.html');
var writeStream = fs.createWriteStream(REPORT_FILE, {
flags: 'w',
encoding: 'UTF-8'
});
var appendReport = function appendReport(file, callback) {
var readStream = fs.createReadStream(file, {
flags: 'r',
encoding: 'UTF-8'
});
readStream.pipe(writeStream, {
end: false
});
readStream.on('end', callback);
};
fs.readdir(SOURCE_FOLDER, function (err, files) {
if (err) {
console.log(err);
} else {
writeStream.write("<html><body><pre>\n");
async.forEach(files, function (file, callback) {
var filePath = path.join(SOURCE_FOLDER, file);
fs.stat(filePath, function (err, stats) {
if (err) {
callback(err);
} else if (stats.isFile() && file.match(SOURCE_FILE_PATTERN)) {
writeStream.write("\n" + filePath);
appendReport(filePath, callback);
} else {
callback();
}
});
}, function (err) {
writeStream.write("\n</pre></body></html>");
});
}
});
My Current Result:
# node test.js; cat /tmp/test/output.html
<html><body><pre>
/tmp/test/a.json
/tmp/test/b.jsoncontent A
content B
</pre></body></html>
My Expected Result:
# node test.js; cat /tmp/test/output.html
<html><body><pre>
/tmp/test/a.json
content A
/tmp/test/b.json
content B
</pre></body></html>
The problem is that async.forEach === async.each and async.each() calls the iterator in parallel. What you want is async.eachSeries() instead.

unzip the file and parse it to js

hi i had tried to unzip the file from my c drive and trying to parse to javascript object
here is the code
var AdmZip = require('adm-zip');
var fs = require('fs'), xml2js = require('xml2js');
var parser = new xml2js.Parser();
var paramdata = 'c:/sample/kusuma.zip';
console.log(paramdata);
var zip = new AdmZip(paramdata);
var zipEntries = zip.getEntries();
var obj = [];
var count = 0;
zipEntries.forEach(function(zipEntry) {
var len = zipEntries.length;
console.log(zipEntry.toString());
console.log(zipEntry.entryName);
fs.readFile("", function(err, data) {
console.log(data);
parser.parseString(data, function(err, result) {
count++;
console.log(count);
obj.push(result);
if (count === len) {
console.log(obj);
res.send(obj);
}
});
});
});
please check the code once and provide me some more examples
Well, fs.readFile() is for reading files that are themselves directly on disk, which these aren't.
However, adm-zip is already reading in the contents of the .zip, so you shouldn't need fs. Each zipEntry has getData() and getDataAsync() methods that can be used to retrieve contents.
zipEntries.forEach(function (zipEntry) {
zipEntry.getDataAsync(function (data) {
parser.parseString(data, function (err, result) {
console.log(result);
});
});
});
Also, as zipEntries is an Array, you can use .filter() to reduce it to only XML files.
var zipEntries = zip.getEntries().filter(function (zipEntry) {
return !zipEntry.isDirectory && /\.xml$/.test(zipEntry.entryName);
});
You'll also want to determine len once from the collection rather than from each entry. You can also test that against use obj.length rather than having to keep count separately:
var len = zipEntries.length;
var obj = [];
zipEntries.forEach(function (zipEntry) {
zipEntry.getDataAsync(function (data) {
parser.parseString(data, function (err, result) {
obj.push(result);
if (obj.length === len) {
res.send(obj);
}
});
});
});

Resources