Initialize modules asynchronously - node.js

I have created a function to require my modules (middlewares and controllers) but I want to initialize middlewares first.
My code bellow initialize both modules in the same time :
function loadModule(app, modulePath) {
var folder = path.join(__dirname, modulePath);
fs.readdir(folder, function(err, files) {
_.forEach(files, function(file) {
var controllerPath = path.join(folder, file);
fs.stat(controllerPath, function(err, stats) {
if (stats.isFile() && file !== 'Controller.js') {
var loadingModulePath = './' + modulePath + '/' + file;
console.log('Loading module : ' + loadingModulePath);
var Module = require(loadingModulePath)(app);
}
});
});
});
}
loadModule(app, 'middlewares');
loadModule(app, 'controllers');
Issue : sometimes controllers are initialized in first, sometimes that are middlewares...
Edit #1 :
const express = require('express'),
app = express(),
async = require('async');
function loadModule(module, app) {
var folder = path.join(__dirname, module);
fs.readdir(folder, function(err, files) {
_.forEach(files, function(file) {
var controllerPath = path.join(folder, file);
fs.stat(controllerPath, function(err, stats) {
if (stats.isFile() && file !== 'Controller.js') {
var loadingModulePath = './' + module + '/' + file;
console.log('Loading module : ' + loadingModulePath);
var Module = require(loadingModulePath)(app);
}
});
});
});
}
async.series([
function(callback, app) {
loadModule('middlewares', app);
callback(null);
},
function(callback, app) {
loadModule('controllers', app);
callback(null);
}
], function(err, results) {
console.log(err, results);
});
Issue edit #1 : app is undefined...

Related

change data of a file which contain an array using node

I have created a file called config.js which looks like below:
const config = {
staticFiles:{
name:[
'./',
'./index.html',
'./script.js',
'./icon.jpg'
]
},
outputFolderName: "D:\\DemoApp",
sourceApplicationParentPath: "D:\\DemoApp\\"
};
Now I am reading list of files from sourceApplicationParentPath folder using node and have to update staticFiles array of above file. I am not sure how should I do it. Can someone please help.
Thanks in advance.
config.js
const config = {
staticFiles: {
name: ['./',
'./index.html',
'./script.js',
'./icon.jpg',
]
},
outputFolderName: 'D:\\DemoApp',
sourceApplicationParentPath: 'D:\\DemoApp'
};
module.exports = config;
index.js
var fs = require('fs'),
config = require('./config'),
util = require('util');
fs.readdir(config.sourceApplicationParentPath, function(err, files) {
if (err) console.log(err.message);
for (var i = 0; i < files.length; i++) {
if (config.staticFiles.name.indexOf(`./${files[i]}`) == -1) {
config.staticFiles.name.push('./' + files[i]);
}
if (i == (files.length - 1)) {
var buffer = `const config = \n ${util.inspect(config, false, 2, false)}; \n module.exports = config;`;
fs.writeFile('./config.js', buffer, function(err) {
err || console.log('Data replaced \n');
})
}
}
});
The Above code is tested and working fine.
You can add or change the object or an array or value in config.js without duplicate entry.
config.js
const config = {
staticFiles:{
name:[
'./',
'./index.html',
'./script.js',
'./icon.jpg'
]
},
outputFolderName: "D:\\DemoApp",
sourceApplicationParentPath: "D:\\DemoApp\\"
};
exports.config = config;
code for the file from where you want to change the data
var fs = require('fs');
var bodyparser = require('body-parser');
var config = require('./config.js')
//path of directory
var directoryPath = "D:\\DemoApp\\"
var data = config.config;
//passsing directoryPath and callback function
fs.readdir(directoryPath, function (err, files) {
//handling error
if (err) {
return console.log('Unable to scan directory: ' + err);
}
var dataToUpdate = data.staticFiles.name;
//listing all files using forEach
files.forEach(function (file) {
// Do whatever you want to do with the file
console.log(file)
dataToUpdate.push(file)
});
data.staticFiles.name = dataToUpdate;
var value = 'const config = ' + JSON.stringify(data) + ';' + '\n' + 'exports.config = config';
fs.writeFile('./config.js',value, er => {
if(er){
throw er;
}
else{console.log('success')}
});
});

Uglifyjs node js

This is the code written by me to get all the js files in a directory to be minified:
var http = require('http');
var testFolder = './tests/';
var UglifyJS = require("uglify-js");
var fs = require('fs');
var glob = require("glob");
var fillnam="";
hello();
function hello()
{
glob("gen/*.js", function (er, files) {
//console.log(files);
for(var i=0;i<files.length;i++)
{
fillnam=files[i];
console.log("File Name "+fillnam);
fs.readFile(fillnam, 'utf8', function (err,data)
{
if (err) {
console.log(err);
}
console.log(fillnam+" "+data);
var result = UglifyJS.minify(data);
var gtemp_file=fillnam.replace(".js","");
console.log(gtemp_file);
fs.writeFile(gtemp_file+".min.js", result.code, function(err) {
if(err) {
console.log(err);
} else {
console.log("File was successfully saved.");
}
});
});
}
});
}
http.createServer(function (req, res) {
res.writeHead(200, {'Content-Type': 'text/html'});
res.end('Hello World!');
}).listen(8080);
As a result respective minified js files with same name with .min.js should be formed in the same directory.
But what I am getting is a single file with all files data over written. Like for example if there are two files in a directory a.js and b.js with content:
var a=10;var b=20;
var name="stack";
What I'm getting is single file a.min.js with file content:
var a=10tack;
Please help.
You need to collect all file contents first, concat them and then run UglifyJS.minify on them to be able to save it as a single file.
Something like this (not fully tested)
const testFolder = './tests/';
const UglifyJS = require("uglify-js");
const fs = require('fs');
const readFile = require('util').promisify(fs.readFile);
const glob = require("glob");
function hello() {
glob("gen/*.js", async(er, files) {
let data = [];
for (const file of files) {
const fileData = await readFile(file, {
encoding: 'utf-8'
});
data.push(fileData);
}
const uglified = UglifyJS.minify(data.join('\n'));
fs.writeFile('main.min.js', uglified);
});
}
hello();

How to know non blocking Recursive job is complete in nodejs

I have written this non-blocking nodejs sample recursive file search code, the problem is I am unable to figure out when the task is complete. Like to calculate the time taken for the task.
fs = require('fs');
searchApp = function() {
var dirToScan = 'D:/';
var stringToSearch = 'test';
var scan = function(dir, done) {
fs.readdir(dir, function(err, files) {
files.forEach(function (file) {
var abPath = dir + '/' + file;
try {
fs.lstat(abPath, function(err, stat) {
if(!err && stat.isDirectory()) {
scan(abPath, done);;
}
});
}
catch (e) {
console.log(abPath);
console.log(e);
}
matchString(file,abPath);
});
});
}
var matchString = function (fileName, fullPath) {
if(fileName.indexOf(stringToSearch) != -1) {
console.log(fullPath);
}
}
var onComplte = function () {
console.log('Task is completed');
}
scan(dirToScan,onComplte);
}
searchApp();
Above code do the search perfectly, but I am unable to figure out when the recursion will end.
Its not that straight forward, i guess you have to rely on timer and promise.
fs = require('fs');
var Q = require('q');
searchApp = function() {
var dirToScan = 'D:/';
var stringToSearch = 'test';
var promises = [ ];
var traverseWait = 0;
var onTraverseComplete = function() {
Q.allSettled(promises).then(function(){
console.log('Task is completed');
});
}
var waitForTraverse = function(){
if(traverseWait){
clearTimeout(traverseWait);
}
traverseWait = setTimeout(onTraverseComplete, 5000);
}
var scan = function(dir) {
fs.readdir(dir, function(err, files) {
files.forEach(function (file) {
var abPath = dir + '/' + file;
var future = Q.defer();
try {
fs.lstat(abPath, function(err, stat) {
if(!err && stat.isDirectory()) {
scan(abPath);
}
});
}
catch (e) {
console.log(abPath);
console.log(e);
}
matchString(file,abPath);
future.resolve(abPath);
promises.push(future);
waitForTraverse();
});
});
}
var matchString = function (fileName, fullPath) {
if(fileName.indexOf(stringToSearch) != -1) {
console.log(fullPath);
}
}
scan(dirToScan);
}
searchApp();

Proper way of using mongodb connection or express.js across multiple node.js file

I want to split my node.js file into multiple file like:
user.js for dealing with user database CRUD
group.js for dealing with group database CRUD
database.js for initializing database
restful.js for initializing express.js
What my concern is whether this is a common or good practice.
So the following files are after splitting:
app.js
require('./restful');
require('./database');
config.js
module.exports = function(){
database :{
username: "mark",
password: "pass",
host: "127.0.0.1",
port: "27017",
name: "mydb"
};
database.js
var config = require('./config');
var mongodb = require('mongodb');
var url = 'mongodb://'+ config.database.username+ ':' + config.database.password + '#' + config.database.host + ':' + config.database.port + '/' + config.database.name;
var Database = function() {
var db;
// always return the singleton instance, if it has been initialised once already.
if (Database.prototype._singletonInstance) {
return Database.prototype._singletonInstance;
}
this.getDatabase = function() {
return db;
}
this.get = this.getDatabase;
mongodb.MongoClient.connect(url, function(err, result) {
if(err || result === undefined || result === null) {
throw err;
} else {
db = result;
}
});
Database.prototype._singletonInstance = this;
};
new Database();
console.log('MongoDb set up OK!');
exports.Database = Database;
};
restful.js
var express = require('express');
var bodyParser = require('body-parser');
var restful_express = express();
restful_express.use(bodyParser());
var allowCrossDomain = function(req, res, next) {
res.header('Access-Control-Allow-Origin', '*');
res.header('Access-Control-Allow-Methods', 'GET, POST');
res.header('Access-Control-Expose-Headers', 'token');
res.header('Access-Control-Allow-Headers', 'Content-Type, X-XSRF-TOKEN, Last-Event-ID, Content-Language, Accept-Language, Accept');
res.header('X-XSS-Protection', 0);
next();
}
restful_express.use(allowCrossDomain);
restful_express.listen(process.env.PORT || 7272, function(err) {
winlog.info('ExpressJs listening at 7272');
});
var Restful = function() {
// always return the singleton instance, if it has been initialised once already.
if (Restful.prototype._singletonInstance) {
return Restful.prototype._singletonInstance;
}
this.getRestful = function() {
return restful_express;
}
Restful.prototype._singletonInstance = this;
};
new Restful();
console.log('Express.js Restful set up OK!');
exports.Restful = Restful;
user.js
var config = require('./config');
var database = require('./database');
var db = database.Database();
var restful = require('./restful').Restful().getRestful();
module.exports = function(){
restful.get('/getUser/:email', function(req, res) {
db.get().collection("user").findOne({email:req.param('email')}, function(err, result) {
if(result) {
res.send({name:result.name});
} else {
res.statusCode = 400;
res.send({msg:'Email not found'});
}
});
});
});
group.js
var config = require('./config');
var database = require('./database');
var db = database.Database();
var restful = require('./restful').Restful().getRestful();
module.exports = function(){
restful.get('/getGroup/:name', function(req, res) {
db.get().collection("group").findOne({name:req.param('name')}, function(err, result) {
if(result) {
res.send({name:result.name});
} else {
res.statusCode = 400;
res.send({msg:'Group not found'});
}
});
});
});
So database.js can be simplified to:
var config = require('./config');
var mongodb = require('mongodb');
var db;
var url = 'mongodb://'+ config.database.username+ ':' + config.database.password + '#' + config.database.host + ':' + config.database.port + '/' + config.database.name;
module.exports = function(){
initDb : function () {
mongodb.MongoClient.connect(url, function(err, result) {
if(err || result === undefined || result === null) {
throw err;
} else {
db = result;
}
});
},
getDb : function () {
if(db === null || db === undefined) {
this.initDb();
}
return db;
}
};
It's just an absolutely legal way. But quite over complicated one.
module is a singleton by default. Regardless how many times you call require('some_module'); it's scaffolding code is executed only once. Try the following code in REPL
//index.js
var i = 0;
i ++;
exports.show = function(){
return i;
};
exports.increment = function(){
return i++;
};
then in REPL
> var one = require('./index.js');
> one.show();
1
> var two = require('./index.js');
> two.show();
1
> one.increment();
2
> two.show();
2

node phantomjs seo running via node write the file

I'm wondering if this is the write way:
node.js
var fs = require('fs');
var path = require('path');
var childProcess = require('child_process');
var phantomjs = require('phantomjs');
var binPath = phantomjs.path;
var childArgs = [
path.join(__dirname, 'phantomjs-runner.js'),
'http://localhost:3000'
]
childProcess.execFile(binPath, childArgs, function(err, stdout, stderr) {
if(err){
}
if(stderr){
}
fs.writeFile(__dirname+'/public/snapshots/index.html', stdout, function(err) {
if(err) {
console.log(err);
}
else {
console.log("The file was saved!");
}
});
});
phantomjs-runner.js
var system = require('system');
var url = system.args[1] || '';
if(url.length > 0) {
var page = require('webpage').create();
page.open(url, function (status) {
if (status == 'success') {
var delay, checker = (function() {
var html = page.evaluate(function () {
var body = document.getElementsByTagName('body')[0];
if(body.getAttribute('data-status') == 'ready') {
return document.getElementsByTagName('html')[0].outerHTML;
}
});
if(html) {
clearTimeout(delay);
console.log(html);
phantom.exit();
}
});
delay = setInterval(checker, 100);
}
});
}
may be could be a better way like
clearTimeout(delay);
fs.writeFile
phantom.exit();
How can I manage ie
different urls and different files
I mean
http://localhost:3000 index.html
http://localhost:3000/blog blog.html
http://localhost:3000/blog/postid postid.html
ENDED UP
'use strict';
var fs = require('fs'),
path = require('path'),
childProcess = require('child_process'),
phantomjs = require('phantomjs'),
binPath = phantomjs.path,
config = require('../config/config');
var env = (process.env.NODE_ENV === 'production') ? 'production' : null,
currentPath = (env) ? config.proot + '/build/default/snapshots' : config.proot + '/default/snapshots';
function normalizeUrl(url){
if( (typeof url === 'undefined') || !url){
return '';
}
if ( url.charAt( 0 ) === '/' ){
url = url.substring(1);
}
return '/'+url.replace(/\/$/, "");
}
function normalizePage(route){
if(!route){
return 'index';
}
var chunks = route.substring(1).split('/');
var len = chunks.length;
if(len===1){
return chunks[0];
}
chunks.shift();
//I get the id
return chunks[0];
}
module.exports = function (url) {
var route = normalizeUrl(url);
var page = normalizePage(route);
var childArgs = [
path.join(__dirname, './phantomjs-runner.js'),
config.url+route
];
childProcess.execFile(binPath, childArgs, function(err, stdout, stderr) {
if(err){
}
if(stderr){
}
fs.writeFile(currentPath + '/' + page + '.html', stdout, function(err) {
if(err) {
console.log(err);
}
else {
console.log("The file was saved!");
}
});
});
};
So I worked out for the parameters I'm still
waiting for way to get the output ^^

Resources