I am using reactjs.net with Webpack and I am trying to load all components which are in the same folder/subfolder as my index.js
//Components/index.js
var fs = require('fs');
var path = require('path');
var process = require("process");
var directory = __dirname;
function addFile(file, path, exports) {
if (!file.endsWith(".jsx")) return exports;
exports[file.substring(0,file.length-4)] = require(path);
return exports;
}
function addFolder(dir, exports) {
fs.readdir(dir, function (err, files) {
if (err) {
console.error("Could not list the directory.", err);
process.exit(1);
}
files.forEach(function (file, index) {
var currentPath = path.join(dir, file);
fs.stat(dir, function (error, stat) {
if (error) {
console.error("Error stating file.", error);
return;
}
if (stat.isFile())
exports = addFile(file, currentPath, exports);
else if (stat.isDirectory())
exports = addFolder(currentPath, exports);
});
});
return exports;
});
}
module.exports = addFolder(directory, {});
index.js is then required as following:
//Content/client.js
var Components = require('expose-loader?Components!./../Components');
Naturally Webpack just packs the whole addFolder function into the output, which of course will not work on the client side. Obviosuly I would want to execute the code in index.js, so now I am curious how I can load all my components without manually stating and expose them globally in the Components object while using the file name as identifier.
Related
I have a problem with tar-fs. When I try to tar a folder with multiple directories and files up to a total of 37GB it completes without errors, But when I try to extract the files I only get files back up to approximately 8GB. I'missing files in the resulting tar file.
I have now replaced tar-fs with tar (node-tar) and this has solved this problem. This indicates that tar-fs is not working properly.
const fs = require('fs');
const tar = require('tar-fs');
const { pipeline, Transform } = require('stream');
exports.compress = function (opts, callback) {
// utility
var error = function (error) {
callback(error);
};
// ensure callback
callback = callback || function () {};
// ensure opts
opts = opts || {};
// ensure src and dest
if(!opts.src) return error("No source for compress!");
if(!opts.dest) return error("No destination for compress!");
const tarstrm = tar.pack(opts.src, {});
const deststrm = fs.createWriteStream(opts.dest);
pipeline(tarstrm, deststrm, (err) => {
if (err) {
console.error('An error occurred:', err);
error(err);
// process.exitCode = 1;
} else {
//console.log(hash.copy().digest('hex'));
}
});
}
So after checking if any .xml files are present with the below code, how do I access the file? I need to access the file to parse the contents and display.
var fileWatcher = require("chokidar");
var watcher = fileWatcher.watch("./*.xml", {
ignored: /[\/\\]\./,
usePolling: true,
persistent: true,
});
// Add event listeners.
watcher.on("add", function (path) {
console.log("File", path, "has been added");
});
I'm assuming, based on chokidars docs that it's used to watch for file changes in a directory?
If you want to open a file in node js just use the filesystem ('fs') module.
const fs = require('fs')
//open file synchronously
let file;
try {
file = fs.readFile(/* provide path to file */)
} catch(e) {
// if file not existent
file = {}
console.log(e)
}
//asynchronously
fs.readFile(/* file path */, (err, data) => {
if (err) throw err;
// do stuff with data
});
EDIT: as a little extra you can enable async/await for fs
const fs = require('fs')
const { promisify } = require('util')
const readFileAsync = promisify(fs.readFile);
(async function() {
try {
const file = await readFileAsync(/* file path */)
} catch(e) {
// handle error if file does not exist...
}
})();
if you want to open file when its added you could do this
const fs = require('fs')
var fileWatcher = require("chokidar");
var watcher = fileWatcher.watch("./*.xml", {
ignored: /[\/\\]\./,
usePolling: true,
persistent: true,
});
// Add event listeners.
watcher.on("add", function (path) {
console.log("File", path, "has been added");
fs.readFile(path, (err, data) => {
if (err) throw err;
// do stuff with data
});
});
This is the code written by me to get all the js files in a directory to be minified:
var http = require('http');
var testFolder = './tests/';
var UglifyJS = require("uglify-js");
var fs = require('fs');
var glob = require("glob");
var fillnam="";
hello();
function hello()
{
glob("gen/*.js", function (er, files) {
//console.log(files);
for(var i=0;i<files.length;i++)
{
fillnam=files[i];
console.log("File Name "+fillnam);
fs.readFile(fillnam, 'utf8', function (err,data)
{
if (err) {
console.log(err);
}
console.log(fillnam+" "+data);
var result = UglifyJS.minify(data);
var gtemp_file=fillnam.replace(".js","");
console.log(gtemp_file);
fs.writeFile(gtemp_file+".min.js", result.code, function(err) {
if(err) {
console.log(err);
} else {
console.log("File was successfully saved.");
}
});
});
}
});
}
http.createServer(function (req, res) {
res.writeHead(200, {'Content-Type': 'text/html'});
res.end('Hello World!');
}).listen(8080);
As a result respective minified js files with same name with .min.js should be formed in the same directory.
But what I am getting is a single file with all files data over written. Like for example if there are two files in a directory a.js and b.js with content:
var a=10;var b=20;
var name="stack";
What I'm getting is single file a.min.js with file content:
var a=10tack;
Please help.
You need to collect all file contents first, concat them and then run UglifyJS.minify on them to be able to save it as a single file.
Something like this (not fully tested)
const testFolder = './tests/';
const UglifyJS = require("uglify-js");
const fs = require('fs');
const readFile = require('util').promisify(fs.readFile);
const glob = require("glob");
function hello() {
glob("gen/*.js", async(er, files) {
let data = [];
for (const file of files) {
const fileData = await readFile(file, {
encoding: 'utf-8'
});
data.push(fileData);
}
const uglified = UglifyJS.minify(data.join('\n'));
fs.writeFile('main.min.js', uglified);
});
}
hello();
I have created one function on the AWS lambda using Node js version 6.10.
I need to create PDF file from html string and send that file in the email.
Following is my code:
exports.handler = function index(event, context, callback) {
var toAddress = event.to;
var fromAddress = event.from;
var subject = event.subject;
var MailBody = event.mailBody;
var PDFHTML = event.PDFHTML;
var LabelHTML = event.LabelHtml;
var options = { format: 'Legal',"header": {
"height": "25mm",
}
}
pdf.convertHTMLString(LabelHTML, '/tmp/LabelDetails.pdf', function(err, res1) {
if (err)
{
console.log(err);
callback(err, false);
}
else
{
pdf.convertHTMLString(PDFHTML, '/tmp/DiagramDetails.pdf', function(err, res1) {
if (err)
{
console.log(err);
callback(null, false);
}
else
{
merge(['/tmp/LabelDetails.pdf','/tmp/DiagramDetails.pdf'],'/tmp/Final.pdf',function(err){
if(err)
{
console.log(err);
callback(null, false);
}
else
{
/*Send mail code */
callback(null, true);
}
});
}
});
}
});
};
var fs = require("fs");
var pdf = require('html-to-pdf');
var merge = require('easy-pdf-merge');
var nodemailer = require('nodemailer');
var path = require("path");
When I try to convert the html string to the PDF file it throws the error EROFS: read only file system.
My simple node js code works perfectly fine.
After doing more research on this issue I found out that AWS gives write credentials only to the /tmp folder. So I applied file path like /tmp/FileName.pdf but still issue seems to be there.
I have created a function to require my modules (middlewares and controllers) but I want to initialize middlewares first.
My code bellow initialize both modules in the same time :
function loadModule(app, modulePath) {
var folder = path.join(__dirname, modulePath);
fs.readdir(folder, function(err, files) {
_.forEach(files, function(file) {
var controllerPath = path.join(folder, file);
fs.stat(controllerPath, function(err, stats) {
if (stats.isFile() && file !== 'Controller.js') {
var loadingModulePath = './' + modulePath + '/' + file;
console.log('Loading module : ' + loadingModulePath);
var Module = require(loadingModulePath)(app);
}
});
});
});
}
loadModule(app, 'middlewares');
loadModule(app, 'controllers');
Issue : sometimes controllers are initialized in first, sometimes that are middlewares...
Edit #1 :
const express = require('express'),
app = express(),
async = require('async');
function loadModule(module, app) {
var folder = path.join(__dirname, module);
fs.readdir(folder, function(err, files) {
_.forEach(files, function(file) {
var controllerPath = path.join(folder, file);
fs.stat(controllerPath, function(err, stats) {
if (stats.isFile() && file !== 'Controller.js') {
var loadingModulePath = './' + module + '/' + file;
console.log('Loading module : ' + loadingModulePath);
var Module = require(loadingModulePath)(app);
}
});
});
});
}
async.series([
function(callback, app) {
loadModule('middlewares', app);
callback(null);
},
function(callback, app) {
loadModule('controllers', app);
callback(null);
}
], function(err, results) {
console.log(err, results);
});
Issue edit #1 : app is undefined...