How to get write unencoded html to a file? - node.js

I'm using reading in a .HTML snippet from the file system; it contains just <h1>Hulton Archive</h1>. Then I'm writing a new XML file which must contain that HTML snippet in a certain element. using XMLbuilder to build an XML file out of it. Here's what I have:
var fs = require('fs');
var xml2js = require('xml2js');
var builder = new xml2js.Builder();
var parseString = require('xml2js').parseString;
var result;
var inputFile = "html-snippet.html";
var outputFile = "test.xml";
fs.readFile(inputFile, "UTF-8", function (err, data) {
if (err) {
return console.log(err);
}
console.log(data);
var obj = {name: "Super", Surname: "Man", age: data};
var outputXML = builder.buildObject(obj);
fs.writeFile(outputFile, outputXML, function(err) {
if(err) {
console.log(err);
} else {
console.log(outputFile + " was saved!");
}
});
});
The problem is that the HTML tags are encoded in the input file; changed from <h1>header</h1> into <h1>header</h1>. I want to preserve the HTML tags instead of encoding them in the output file.
I have tried writing this file using both XMLbuilder (https://github.com/oozcitak/xmlbuilder-js) and xml2js (https://github.com/Leonidas-from-XIV/node-xml2js). It seems like both of them were encoding the HTML on the output file.
How can I get write out the XML file without encoding the HTML?

Using npm install ent you can decode the HTML after you generate the XML. This does generate "invalid" XML however. For most people, CDATA is probably the better choice.
var fs = require('fs');
var xml2js = require('xml2js');
var builder = new xml2js.Builder();
var parseString = require('xml2js').parseString;
var decode = require('ent').decode;
var result;
var inputFile = "html-snippet.html";
var outputFile = "test.xml";
fs.readFile(inputFile, "UTF-8", function (err, data) {
if (err) {
return console.log(err);
}
console.log(data);
var obj = {name: "Super", Surname: "Man", age: data};
var outputXML = decode(builder.buildObject(obj));
fs.writeFile(outputFile, outputXML, function(err) {
if(err) {
console.log(err);
} else {
console.log(outputFile + " was saved!");
}
});
});

Related

Read content of posted .txt file with Azure function NodeJS

I want to know the content from a .txt file I upload via the JSON response with an Azure function. I'm able to read the filename and type, but also want to convert the file to a string in my JSON response. But currently the response in data stays empty:
{
"name": "20200429112846_-_IB_records.txt",
"type": "text/plain",
"data": ""
}
My code is:
var multipart = require("parse-multipart");
module.exports = function (context, request) {
// encode body to base64 string
var bodyBuffer = Buffer.from(request.body);
var boundary = multipart.getBoundary(request.headers['content-type']);
// parse the body
var parts = multipart.Parse(bodyBuffer, boundary);
var fileContent = "";
var fileBuffer = Buffer.from(parts[0].data);
var fs = require('fs');
fs.readFile(fileBuffer, 'utf8', function(err, data) {
if (err) throw err;
fileContent = data;
});
context.res = { body : { name : parts[0].filename, type: parts[0].type, data: fileContent}};
context.done();
};
Anyone got an idea?
fs.readFile operates asynchronously, so
context.res = { body : { name : parts[0].filename, type: parts[0].type, data: fileContent}};
context.done();
is executed before the file has actually been read. One way to solve this is to put the context-stuff in the readFile callback:
fs.readFile(fileBuffer, 'utf8', function(err, data) {
if (err) throw err;
fileContent = data;
context.res = { body : { name : parts[0].filename, type: parts[0].type, data: fileContent}};
context.done();
});

Unable to read content of .txt using fs

I am using fs mudule to read .txt file content , but the result always empty . My .txt file do has content in it could any one give me a hand pls ? This is my test code :
var fs = require("fs");
var content = "";
fs.readFile("2.txt", "utf8", function(err, data){
if(err) {
return console.log("fail", err);
}
content = data;
});
console.log(content);
The content is empty in console .
You are writing the result too early. You should log the result in the readFile callback.
var fs = require("fs");
var content = "";
fs.readFile("2.txt", "utf8", function(err, data){
if(err) {
return console.log("fail", err);
}
content = data;
console.log(content);
});
// The console log below will be executed right after the readFile call.
// It won't wait the file to be actually read.
// console.log(content);
Or you can write the same logic like this:
const fs = require('fs');
async function main() {
try {
const content = await fs.promises.readFile('2.txt', 'utf8');
console.log(content);
} catch (ex) {
console.trace(ex);
}
}
main();

Uglifyjs node js

This is the code written by me to get all the js files in a directory to be minified:
var http = require('http');
var testFolder = './tests/';
var UglifyJS = require("uglify-js");
var fs = require('fs');
var glob = require("glob");
var fillnam="";
hello();
function hello()
{
glob("gen/*.js", function (er, files) {
//console.log(files);
for(var i=0;i<files.length;i++)
{
fillnam=files[i];
console.log("File Name "+fillnam);
fs.readFile(fillnam, 'utf8', function (err,data)
{
if (err) {
console.log(err);
}
console.log(fillnam+" "+data);
var result = UglifyJS.minify(data);
var gtemp_file=fillnam.replace(".js","");
console.log(gtemp_file);
fs.writeFile(gtemp_file+".min.js", result.code, function(err) {
if(err) {
console.log(err);
} else {
console.log("File was successfully saved.");
}
});
});
}
});
}
http.createServer(function (req, res) {
res.writeHead(200, {'Content-Type': 'text/html'});
res.end('Hello World!');
}).listen(8080);
As a result respective minified js files with same name with .min.js should be formed in the same directory.
But what I am getting is a single file with all files data over written. Like for example if there are two files in a directory a.js and b.js with content:
var a=10;var b=20;
var name="stack";
What I'm getting is single file a.min.js with file content:
var a=10tack;
Please help.
You need to collect all file contents first, concat them and then run UglifyJS.minify on them to be able to save it as a single file.
Something like this (not fully tested)
const testFolder = './tests/';
const UglifyJS = require("uglify-js");
const fs = require('fs');
const readFile = require('util').promisify(fs.readFile);
const glob = require("glob");
function hello() {
glob("gen/*.js", async(er, files) {
let data = [];
for (const file of files) {
const fileData = await readFile(file, {
encoding: 'utf-8'
});
data.push(fileData);
}
const uglified = UglifyJS.minify(data.join('\n'));
fs.writeFile('main.min.js', uglified);
});
}
hello();

unzip the file and parse it to js

hi i had tried to unzip the file from my c drive and trying to parse to javascript object
here is the code
var AdmZip = require('adm-zip');
var fs = require('fs'), xml2js = require('xml2js');
var parser = new xml2js.Parser();
var paramdata = 'c:/sample/kusuma.zip';
console.log(paramdata);
var zip = new AdmZip(paramdata);
var zipEntries = zip.getEntries();
var obj = [];
var count = 0;
zipEntries.forEach(function(zipEntry) {
var len = zipEntries.length;
console.log(zipEntry.toString());
console.log(zipEntry.entryName);
fs.readFile("", function(err, data) {
console.log(data);
parser.parseString(data, function(err, result) {
count++;
console.log(count);
obj.push(result);
if (count === len) {
console.log(obj);
res.send(obj);
}
});
});
});
please check the code once and provide me some more examples
Well, fs.readFile() is for reading files that are themselves directly on disk, which these aren't.
However, adm-zip is already reading in the contents of the .zip, so you shouldn't need fs. Each zipEntry has getData() and getDataAsync() methods that can be used to retrieve contents.
zipEntries.forEach(function (zipEntry) {
zipEntry.getDataAsync(function (data) {
parser.parseString(data, function (err, result) {
console.log(result);
});
});
});
Also, as zipEntries is an Array, you can use .filter() to reduce it to only XML files.
var zipEntries = zip.getEntries().filter(function (zipEntry) {
return !zipEntry.isDirectory && /\.xml$/.test(zipEntry.entryName);
});
You'll also want to determine len once from the collection rather than from each entry. You can also test that against use obj.length rather than having to keep count separately:
var len = zipEntries.length;
var obj = [];
zipEntries.forEach(function (zipEntry) {
zipEntry.getDataAsync(function (data) {
parser.parseString(data, function (err, result) {
obj.push(result);
if (obj.length === len) {
res.send(obj);
}
});
});
});

How do I render an EJS template file in Node.js?

I'm using Node.js and trying to render an EJS template file. I figured out how to render strings:
var http = require('http');
var ejs = require('ejs');
var server = http.createServer(function(req, res){
res.end(ejs.render('Hello World'));
});
server.listen(3000);
How can I render an EJS template file?
There is a function in EJS to render files, you can just do:
ejs.renderFile(__dirname + '/template.ejs', function(err, data) {
console.log(err || data);
});
Source: Official EJS documentation
var fs = require('fs');
var templateString = fs.readFileSync('template.ejs', 'utf-8');
and then you do your thing:
var server = http.createServer(function(req, res){
res.end(ejs.render(templateString));
});
All you have to do is compile the file as a string (with optional local variables), like so:
var fs = require('fs'), ejs = require('ejs'), http = require('http'),
server, filePath;
filePath = __dirname + '/sample.html'; // this is from your current directory
fs.readFile(filePath, 'utf-8', function(error, content) {
if (error) { throw error); }
// start the server once you have the content of the file
http.createServer(function(req, res) {
// render the file using some local params
res.end(ejs.render(content, {
users: [
{ name: 'tj' },
{ name: 'mape' },
{ name: 'guillermo' }
]
});
});
});
#ksloan's answer is really good. I also had the same use case and did little bit of digging. The function renderFile() is overloaded. The one you will need mostly is:
renderFile(path: string,data, cb)
for example:
ejs.renderFile(__dirname + '/template.ejs', dataForTemplate, function(err, data) {
console.log(err || data)
})
where dataForTemplate is an object containing values that you need inside the template.
There's a synchronous version of this pattern that tightens it up a little more.
var server = http.createServer(function(req, res) {
var filePath = __dirname + '/sample.html';
var template = fs.readFileSync(filePath, 'utf8');
res.end(ejs.render(template,{}));
});
Note the use of readFileSync(). If you specify the encoding (utf8 here), the function returns a string containing your template.
The answer of #ksloan should be the accepted one. It uses the ejs function precisely for this purpose.
Here is an example of how to use with Bluebird:
var Promise = require('bluebird');
var path = require('path');
var ejs = Promise.promisifyAll(require('ejs'));
ejs.renderFileAsync(path.join(__dirname, 'template.ejs'), {context: 'my context'})
.then(function (tpl) {
console.log(tpl);
})
.catch(function (error) {
console.log(error);
});
For the sake of completeness here is a promisified version of the currently accepted answer:
var ejs = require('ejs');
var Promise = require('bluebird');
var fs = Promise.promisifyAll(require('fs'));
var path = require('path');
fs.readFileAsync(path.join(__dirname, 'template.ejs'), 'utf-8')
.then(function (tpl) {
console.log(ejs.render(tpl, {context: 'my context'}));
})
.catch(function (error) {
console.log(error);
});
Use ejs.renderFile(filename, data) function with async-await.
To render HTML files.
const renderHtmlFile = async () => {
try {
//Parameters inside the HTML file
let params = {firstName : 'John', lastName: 'Doe'};
let html = await ejs.renderFile(__dirname + '/template.html', params);
console.log(html);
} catch (error) {
console.log("Error occured: ", error);
}
}
To render EJS files.
const renderEjsFile = async () => {
try {
//Parameters inside the HTML file
let params = {firstName : 'John', lastName: 'Doe'};
let ejs = await ejs.renderFile(__dirname + '/template.ejs', params);
console.log(ejs);
} catch (error) {
console.log("Error occured: ", error);
}
}

Resources