nodejs + documentdb warm up error - node.js

I've been trying to add DocumentDb support to my node.js Express application like it's explained in here https://azure.microsoft.com/en-us/documentation/articles/documentdb-nodejs-application/
Now if a user makes a request immediately after application's start she's getting an error because I suppose my DocumentDb database isn't ready yet.
What I want here is to make my user wait as database initialization is getting done.. how can I achieve this or what is a general approach in node.js to handle initialization that takes a place during the application's start?
var express = require('express');
....
var DocumentDBClient = require('documentdb').DocumentClient;
var config = require('./config');
var StoryDao = require('./models/storyDao');
var Home = require('./controllers/home');
var docDbClient = new DocumentDBClient(config.endPoint, {
masterKey: config.authKey
});
var storyDao = new StoryDao(docDbClient, config.databaseId, config.storyCollectionId);
var home = new Home(storyDao);
storyDao.init();
....
app.listen(app.get('port'), function(){
console.log('app started on http://localhost:' + app.get('port') + '; press Ctrl-C to terminate.');
});

Standing up a DocumentDB account is a one-time setup and should only take <5 minutes. The easiest thing to do is wait for it to finish.
Otherwise, you could set up a mock DAO... which would be useful from a unit testing perspective :)

Related

My node socket.io connection get automatically disconnect after a short period of inactivity

I have spent a long on Google but I didn't found any solution. I saw many questions related to my problem on stack overflow but I didn't get any solution.
Explaining in brief, I am coding for real time chat room and it works fine when I continuously send messages from either room but when I left it for a short period mean (when message is not send from either side) it automatically gets disconnect inside the browser is still opened. And messages send from one room didn't transferred to other room. Please help me.
var express = require('express'); // node code
var app = express();
var server = require('http').createServer(app);
var io = require('socket.io').listen(server);
var redis = require('redis');
var redis_client = redis.createClient(6379,'192.168.43.49');
redis_client.auth('rajesh');
redis_client.select(1);
io.sockets.on('connection',function(socket){
/* used for transfering message from one node to other node */
socket.on('node_message',function(data){
// sending message to other end
redis_client.smembers(data.user_hash,function(err,reply){
if(reply!=null){
reply.forEach(function(value){
var res = {'user_hash':socket.user_hash,'msg':data.msg};
io.to(value).emit('node_message',res); // i am sending sender user_hash also so that while getting message in second end i will validate second party {cause problem in multi-page to show message in every panel}
});
}
});
});
socket.on('disconnect', function (){
console.log('lost connection',socket.user_hash);
})
});

Nodejs Clustering with Sticky-Session

const cluster = require('cluster');
const http = require('http');
const numCPUs = require('os').cpus().length;
if (cluster.isMaster) {
console.log(`Master ${process.pid} is running`);
// Fork workers.
for (let i = 0; i < numCPUs; i++) {
cluster.fork();
}
cluster.on('exit', (worker, code, signal) => {
console.log(`worker ${worker.process.pid} died`);
});
} else {
// Workers can share any TCP connection
// In this case it is an HTTP server
var sticky = require('sticky-session');
var express = require('express');
var app = express();
app.get('/', function (req, res) {
console.log('worker: ' + cluster.worker.id);
res.send('Hello World!');
});
var server = http.createServer(app);
sticky.listen(server,3000);
console.log(`Worker ${process.pid} started`);
}
I looked up the documentation for nodejs clustering and sticky-session
and another stack overflow answer regarding this
var cluster = require('cluster');
var http = require('http');
var sticky = require('sticky-session');
var express = require('express');
var app = express();
app.get('/', function (req, res) {
console.log('worker: ' + cluster.worker.id);
res.send('Hello World!');
});
var server = http.createServer(app);
sticky.listen(server,3000);
If the above snippet is run without forking it works fine but else never works as shown in the clustered example above in which the threads are started but server is never initialised .
I read there is alternative of sticky-cluster can somebody give a proper authoritative answer on this topic which will be useful for people looking for the same and the another main issue comes with this is the app.locals object which is used to store variables for an app instance and the occurrence multiple server instances causes this to break as values will be different across different instances so this approach causes a big issue and app breaks so .When answering please don't copy paste some code please give a detailed answer detailing the approach its benefit and short comings.
I am not looking for a answer that is limited to using sticky-sessions nodejs module, I welcome all other approaches in which all cores of the processor are used and but ensuring session continuity .
If it involves RedisStore or MongoDb store its ok,What I want to know is about a standard approach in case of nodejs application with clustering with session continuity
https://github.com/indutny/sticky-session
https://nodejs.org/api/cluster.html
https://stackoverflow.com/a/37769107/3127499
There is a small problem in your code.
"sticky-session" module already uses node.js "cluster" module within.You dont need to "fork()" because sticky-session will already do it for you. Lets find out how:
var cluster = require('cluster'); // Only required if you want the worker id
var sticky = require('sticky-session');
var server = require('http').createServer(function(req, res) {
res.end('worker: ' + cluster.worker.id);
});
sticky.listen(server, 3000);
calling sticky.listen() will already spawn workers for you.See the listen() implementation below
function listen(server, port, options) {
if (!options)
options = {};
if (cluster.isMaster) {
var workerCount = options.workers || os.cpus().length;
var master = new Master(workerCount, options.env);
master.listen(port);
master.once('listening', function() {
server.emit('listening');
});
return false;
}
return true;
}
This line var master = new Master(workerCount, options.env) is responsible for spawning workers.
see the Master() implementation below:
function Master(workerCount, env) {
net.Server.call(this, {
pauseOnConnect: true
}, this.balance);
this.env = env || {};
this.seed = (Math.random() * 0xffffffff) | 0;
this.workers = [];
debug('master seed=%d', this.seed);
this.once('listening', function() {
debug('master listening on %j', this.address());
for (var i = 0; i < workerCount; i++)
// spawning workers
this.spawnWorker();
});
}
So indeed when you call sticky.listen(server,port) you are actually calling cluster.fork().hence you should not explicitly again call fork().
Now your code should look like:
var cluster = require('cluster'); // Only required if you want the worker id
var sticky = require('sticky-session');
var server = require('http').createServer(function(req, res) {
res.end('worker: ' + cluster.worker.id);
});
//sticky.listen() will return false if Master
if (!sticky.listen(server, 3000)) {
// Master code
server.once('listening', function() {
console.log('server started on 3000 port');
});
} else {
// Worker code
}
One important thing to remember is that spawned workers will have its own EVENTLOOP and memory hence resources are not shared among each other.
You can use "REDIS" or other npm modules such as "memored" to share resources among different workers.
Hope this solves your both issues.
I think you are confusing sticky session with shared memory store.
Let me try to help:
Sticky-sessions module is balancing requests using their IP address. Thus client will always connect to same worker server, and socket.io will work as expected, but on multiple
processes!
Implementing sticky sessions means that you now have multiple nodes accepting connections. However, it DOES NOT guarantee that these nodes will SHARE the same memory, as each worker has their own eventloop and internal memory state.
In other words, data being processed by one node may not be available to other worker nodes, which explains the issue you pointed out.
...another main issue comes with this is the app.locals object which
is used to store variables for an app instance and the occurrence
multiple server instances causes this to break as values will be
different across different instances so this approach causes a big
issue and app breaks...
Thus, to resolve this, we would require using something like Redis so that data can be shared across multiple nodes.
Hope this helps!
If I understand your questions correctly, you are dealing with in-memory data storage or session storage. This is one of the known problems in the session based authentication in multi-node or in a cluster. Suppose you made a call to Node A and get the session called sessionA but for the next call you made it to Node B. Node B does not know anything about sessionA. People try to solve this issue by using sticky session but that is not enough. Good practice will be to use an alternative approach, such as JWT or oAuth2. I prefer JWT for service to service communication. JWT does not store anything and stateless. It works brilliantly with REST since REST is also stateless. Here https://www.rfc-editor.org/rfc/rfc7519 is the specification of the JWT implementation. If you need to have some sort of refresh token, that case you need to consider a storage. Storage can be anything like REDIS, MongoDB or any other SQL based DB. For further clarification about JWT in nodejs:
https://jwt.io/
https://jwt.io/introduction/
https://www.npmjs.com/package/jsonwebtoken
https://cloud.google.com/iot/docs/how-tos/credentials/jwts#iot-core-jwt-refresh-nodejs

How can I read the mp3 files from the mongodb that I have stored using gridFS?

My code:
storeAudio.js
var mongoose = require('mongoose');
var fs = require('fs');
var Grid = require('gridfs-stream');
Grid.mongo=mongoose.mongo;
//establish mongoDB connection
mongoose.Promise = global.Promise;
var conn = mongoose.createConnection('mongodb://localhost:27017/aHolyBoly');
conn.once('open',function(){
var gfs = Grid(conn.db);
// var db = new mongo.Db('aHolyBoly', new mongo.Server("127.0.0.1", 27017));
//var gfs = Grid(db, mongo);
var writeStream = gfs.createWriteStream({
filename:'song1.mp3'
});
fs.createReadStream('../list/hero.mp3').pipe(writeStream);
writeStream.on('close',function(file){
console.log(file.filename +'Written to db');
});
});
my mp3 file is written successfully in the DB.
Now my aim is to create the routes using express so that I can use it as an API using as providers in my angular2 app.
server.js
var mongoose = require('mongoose');
var fs = require('fs');
var Grid = require('gridfs-stream');
Grid.mongo=mongoose.mongo;
//establish mongoDB connection
mongoose.Promise = global.Promise;
var conn = mongoose.createConnection('mongodb://localhost:27017/aHolyBoly');
conn.once('open',function(){
var gfs = Grid(conn.db);
// var db = new mongo.Db('aHolyBoly', new mongo.Server("127.0.0.1", 27017));
//var gfs = Grid(db, mongo);
var writeStream = gfs.createWriteStream({
filename:'song1.mp3'
});
fs.createReadStream('../list/hero.mp3').pipe(writeStream);
writeStream.on('close',function(file){
console.log(file.filename +'Written to db');
});
});
Here somewhere in my code, I am unable to find the song written in the Db using gridFS.
link for gridFS API gridFS API Link
I want to create an application using ionic2 which contains a search bar and when I enter song name it will display the songs and I can play it.
I am struggling from 1 week written this 3 times on stack but unable to get the answer.
I bet you are a newbie in server-side web development, because your question kinda misses the whole point of creating a node.js application.
When writing a server the goal is that the server cannot access the files on the client's computer. This means your app acts as a thin client. Don't confuse that node.js runs on your computer as well: it creates his own mock server which acts as a real faraway computer even though it is on your very same computer that you use. For this reason you cannot modify files outside of your project library, or even if you can it will be a hack and it is not what node.js is made for.
If you want a program specifically to modify files on your computer you should learn a thick client programming language, like C, C++ or Java. They only run on your computer ad have full access to any of your files.
If you are sticking to JavaScript for whatever reason, it as well has the power to modify your files, however it is still not what it is intended for. You have to give it special permissions and it still is weird that you give access to all your personal files to your browser. However this Javascript code id on the client side i.e. you don't need node.js, neither express. The client code can be written and run with a single browser. Check out this article for more information about it.
But if you are still convinced that node is your way, then you have some options still. First, but I don't think it is your answer: write an upload website which uploads the mp3 files to your server (still on the same computer but places it in the project location). It should work and look like an actual website (like what node.js was made for). And finally what you're asking for can be kinda achieved, but it's reaaaaally hacky, you can try and just reference it from the application path like
var fs = require('fs'),
path = require('path'),
filePath = path.join(__dirname, '../outerfile.mp3')
filePath2 = path.join('/var/outerfile.mp3');
However it is probably platform dependent how it will run and not likely to be working at all.
My final advice: you should consider using another language for your project.

Nodejs memory usage

I think I have a problem with memory usage. I have a node.js server :
var arcadeGames = require('./server/js/arcade');
var cardsGames = require('./server/js/cards');
requires modules that exports object required from .json data
var http = require('http');
var express = require('express');
var app = express();
var server = http.createServer(app);
//var io = require('socket.io').listen(server);
//var fs = require('fs');
app.get('/specificCategory/:id',function(req,res,next){
switch(req.params.id){
case "Cards":
console.log(cardsGames.titles);
break;
case "Action":
console.log(actionGames.titles);
break;
default:
console.log("undefined");
}
//var specificCaategory = require('./server/js/'+ req.params.id.toLowerCase());
//var categoryTitlesAndUrlThumbs = spe
//console.log(specificCaategory.titles);
})
(both way are working the same commented one or the one with switch)
the get function is called from browser by clicking the categories ex :Cards, Action and send the request through http, controller from angularjs. The problem is that when I console.loged out on server first click on each category works fine, but after that, the server takes a lot of time to console.log out the info.(what will happends in browser if this is so hard for server).
Have I done something to load the memory so much?
Add res.end(); after your switch case.

Socket.io memory leak

Here is my full node.js code:
var express = require('express'),
http = require("http"),
app = express(),
server = http.createServer(app),
sio = require('socket.io'),
io = null;
server.listen(3000, "localhost");
io = sio.listen(server);
io.sockets.on('connection', function (socket) {
socket.emit('access', "connected");
});
When I check the node.exe process (I'm on localhost windows), every time a user connects, the memory usage increases but every time a user disconnects, the memory usage doesn't decrease. Is something wrong with my setup?
I can also be the redisClient that you use that keeps the reference.
Either try with another redis Client or try setting the redisClient = null; and see if that changes something.
It can also be something else in your code.
You can also try the Node Inspector which run in the Chrome developer tools where you can see the different object that are in memory.

Resources