Seed nodejs express heroku app - node.js

mongoose.connect('mongodb://localhost:27017/smartphones');
mongoose.connection.on('error', console.error.bind(console, 'MongoDB connection error:'));
var smartphones = [
new Smartphone({
title: "V3",
usp: "20MP Softlight Camera",
image_path: "/image/phone_v3max.png",
qty: 1,
price: 200
}),
new Smartphone({
title: "V5",
usp: "Feel the Real Speed",
image_path: "/image/phone_v5.png",
qty: 1,
price: 450
})
];
var done = 0;
for(var i = 0; i < smartphones.length; i++) {
smartphones[i].save(function(err, result) {
done++;
if(done === smartphones.length) {
exit();
}
});
}
function exit() {
mongoose.disconnect();
}
When I work locally on Nodejs App I could run and seed the data file (smartphoneIndex-seeder.js) that is included the project by running the command line. But how should that be done when the app has been pushed to Heroku, how can I seed the data from there? The app is running but pages that runs by the seed data would not show up on my page unfortunately. Anyone an idea how I could connect the seed date so my entire app would be run properly.

It sounds to me like you want some sort of pre-boot process. You probably want to write some code that executes before you tell your application to start listening on a port. This code will look at the database, look to see if any data exists, and if it doesn't then it will call some code (such as your smartphoneIndex-seeder function which will push data into the application.
Once that decision is made, you can then call your normal app.listen() express code.

Related

Why AWS Lambda execution time is long using pg-promise

I started using AWS Lambda to perform a very simple task which is executing an SQL query to retrieve records from an RDS postgres database and create SQS message base on the result.
Because Amazon is only providing aws-sdk module (using node 4.3 engine) by default and we need to execute this SQL query, we have to create a custom deployment package which includes pg-promise. Here is the code I'm using:
console.info('Loading the modules...');
var aws = require('aws-sdk');
var sqs = new aws.SQS();
var config = {
db: {
username: '[DB_USERNAME]',
password: '[DB_PASSWORD]',
host: '[DB_HOST]',
port: '[DB_PORT]',
database: '[DB_NAME]'
}
};
var pgp = require('pg-promise')({});
var cn = `postgres://${config.db.username}:${config.db.password}#${config.db.host}:${config.db.port}/${config.db.database}`;
if (!db) {
console.info('Connecting to the database...');
var db = pgp(cn);
} else {
console.info('Re-use database connection...');
}
console.log('loading the lambda function...');
exports.handler = function(event, context, callback) {
var now = new Date();
console.log('Current time: ' + now.toISOString());
// Select auction that need to updated
var query = [
'SELECT *',
'FROM "users"',
'WHERE "users"."registrationDate"<=${now}',
'AND "users"."status"=1',
].join(' ');
console.info('Executing SQL query: ' + query);
db.many(query, { status: 2, now: now.toISOString() }).then(function(data) {
var ids = [];
data.forEach(function(auction) {
ids.push(auction.id);
});
if (ids.length == 0) {
callback(null, 'No user to update');
} else {
var sqsMessage = {
MessageBody: JSON.stringify({ action: 'USERS_UPDATE', data: ids}), /* required */
QueueUrl: '[SQS_USER_QUEUE]', /* required */
};
console.log('Sending SQS Message...', sqsMessage);
sqs.sendMessage(sqsMessage, function(err, sqsResponse) {
console.info('SQS message sent!');
if (err) {
callback(err);
} else {
callback(null, ids.length + ' users were affected. SQS Message created:' + sqsResponse.MessageId);
}
});
}
}).catch(function(error) {
callback(error);
});
};
When testing my lambda function, if you look at the WatchLogs, the function itself took around 500ms to run but it says that it actually took 30502.48 ms (cf. screenshots).
So I'm guessing it's taking 30 seconds to unzip my 318KB package and start executing it? That for me is just a joke or am I missing something? I tried to upload the zip and also upload my package to S3 to check if it was faster but I still have the same latency.
I noticed that the Python version can natively perform SQL request without any custom packaging...
All our applications are written in node so I don't really want to move away from it, however I have a hard time to understand why Amazon is not providing basic npm modules for database interactions.
Any comments or help are welcome. At this point I'm not sure Lambda would be benefic for us if it takes 30 seconds to run a script that is triggered every minute...
Anyone facing the same problem?
UPDATE: This is how you need to close the connection as soon as you don't need it anymore (thanks again to Vitaly for his help):
exports.handler = function(event, context, callback) {
[...]
db.many(query, { status: 2, now: now.toISOString() }).then(function(data) {
pgp.end(); // <-- This is important to close the connection directly after the request
[...]
The execution time should be measured based on the length of operations being executed, as opposed to how long it takes for the application to exit.
There are many libraries out there that make use of a connection pool in one form or another. Those typically terminate after a configurable period of inactivity.
In case of pg-promise, which in turn uses node-postgres, such period of inactivity is determined by parameter poolIdleTimeout, which defaults to 30 seconds. With pg-promise you can access it via pgp.pg.defaults.poolIdleTimeout.
If you want your process to exit after the last query has been executed, you need to shut down the connection pool, by calling pgp.end(). See chapter Library de-initialization for details.
It is also shown in most of the code examples, as those need to exit right after finishing.

MongoDB connections not releasing after query

I made some node app with using MongoDB and Socket.IO, and when I insert some documents, every operation isn't working. And I figured out that MongoDB doesn't insert documents after some documents inserted(I don't know how many exactly). So I checked connections with mongodb shell:
db.serverStatus().connections
before starting node app, it saids:
{ "current" : 1, "available" : 3275, "totalCreated" : NumberLong(639) }
after inserts some docs:
{ "current" : 51, "available" : 3225, "totalCreated" : NumberLong(708) }
turn off node app:
{ "current" : 1, "available" : 3275, "totalCreated" : NumberLong(708) }
This is the code of server side. (I'm using external MongoDB module so it can be little different from Original MongoDB module for node.js. this module is just simple wrapper for MongoDB with Promise based API)
const app = require('http').createServer();
const io = require('socket.io')(app);
const am2 = require('am2');
...
am2.connect('localhost', '...', { ... });
const sockets = {};
io.on('connection', (socket) => {
// save the socket into sockets object
sockets[socket.id] = socket;
// release socket on disconnection
socket.on('disconnect', () => {
delete sockets[socket.id];
});
...
// pseudo doc inserting event
socket.on('/insert/some/docs', (data) => {
am2.insert({ ... }).then( () => {
socket.emit('/after/insert/some/docs', {});
}).catch( ... );
});
});
when client emit '/insert/some/docs' event, server will insert document into MongoDB. first few tries works well, but after some insertion, it does not work anymore.
I think this happen because lot's of connections are still alive after insertion is done, but I don't know why. If it was RDBMS like MySQL, every connection must be close after operation is done, but in MongoDB, it should not be(as I know).
I don't know why this is happening, so it will be very appreciate give me a hand.
I solved this by releasing cursor after get data from MongoDB. Make sure to release, or it keeps your connection pool and makes your application not working.

server.close() doesn't work in a Vow teardown

I'm trying to write some Vows-based tests for my run-of-the-mill Express app.
Here's the test source:
var vows = require('vows');
var assert = require('assert');
var startApp = require('./lib/start-app.js');
var suite = vows.describe('tournaments');
suite.addBatch({
"When we setup the app": {
topic: function() {
return startApp();
},
teardown: function(topic) {
if (topic && topic.close) {
topic.close();
}
},
"it works": function(topic) {
assert.isObject(topic);
}
}
});
suite.run();
And here's start-app.js:
var app = require('../../app.js');
function start() {
var server = app.listen(56971, 'localhost');
return server;
}
module.exports = start;
app.js exports a regular Express.js app, created with express().
The problem is that whenever I run the test, topic.close() doesn't work in the teardown function, and the test hangs forever after succeeding. I've tried searching the web and adding lots and lots of console.logs, all to no avail.
I'm on the Windows x64 build of Node.js 4.2.0, and I'm using assert#1.3.0 and vows#0.8.1.
Any idea how I can make my test stop hanging?
Here's what I did to solve the issue in a project I was contributing: a final batch just to close the server.
suite.addBatch({
'terminate server': {
topic: function() {
server.close(this.callback); // this is a regular node require(`http`) server, reused in several batches
},
'should be listening': function() {
/* This test is necessary to ensure the topic execution.
* A topic without tests will be not executed */
assert.isTrue(true);
}
}
}).export(module);
Before adding this test, suite would never end executing. You can check the results at https://travis-ci.org/fmalk/node-static/builds/90381188

how to make sure that the node.js calls to mongodb are really asynchronous

I am trying to write a node.js application, and we need to deploy it in production.
We need to make sure that node.js does not hang when there are any long running processes/operations, like querying, or the database server access.
So, i am trying to make a call to mongo or to filesystem which takes very long time to finish, so that i can verify that other node.js server is free to serve any other requests while that takes place.
Sadly, i am not able to insert a record for which mongo takes really long time to finish or to make a synch call to the file system.
Can someone tell me how to do it?
Thanks
Tuco
The trick is do a console log of the data after the block that do a call and a console.log in the callback if in the console apears first the message is actually asynchronous
Im using mongojs as driver for mongo:
collection.find({}, function(err, res) {
console.log("done")
});
console.log("sendign signal")
If its asynchronous, in the console:
sendign signal
done!
Now for the chained behavior you can make something like that
dbChain = (function() {
var chain = [], cursor = 0, busy = false;
chainin = {
push : function(aFn) {
if(!busy) {
chainin.reset();
aFn();
busy = true;
} else {
chain.push(aFn)
}
},
next : function() {
cursor++;
if(chain[cursor]) {
chain[cursor]();
} else {
chainin.reset();
}
},
reset : function() {
chain = [];
cursor = 0;
busy = false;
}
}
return chainin;
})()
and, in all the db calls you have to do:
dbChain.push(...(a function ) ...)
in all your callbacks
dbChain.next()

Node/Mongo application crashes on Amazon EC2 and cannot access the server

I have a node app with mongo server on an amazon ec2 instance. It works great, but I just added a new API call and every time I call it, the server freezes and I cannot access/ssh into it for several hours. While this is happening, my server goes down which makes the app that relies on it unusable and my users angry...
This code works perfectly on my localhost, but as soon as I run it on my server it freezes. My thoughts are that it may be crashing mongo? I have no idea why this would happen...
If anyone has any ideas what could be going wrong, please let me know.
node is using express. The send_error function will perform a res.send({some error}). db.CommentModel returns mongoose.model('comment', Comment);
in app.js
app.get('/puzzle/comment/:id', auth.restrict, puzzle.getComments);
in the file which defines getComments
exports.getComments = function(req, res)
{
var userID = _u.stripNonAlphaNum(req.params.id);
var CommentModel = db.CommentModel;
CommentModel.find({user: userID}, function(e, comments) {
if(e)
{
err.send_error(err.DB_ERROR, res);
}
else if (!comments)
{
err.send_error(err.DB_ERROR, res);
}
else if (comments.length == 0)
{
res.send([]);
}
else
{
var commentIDs = [];
for (var i = 0; i<comments.length; i++)
{
commentIDs.push({_id: comments[i].puzzle});
}
var TargetModel = pApp.findPuzzleModel(_u.stripNonAlphaNum(req.apiKey));
TargetModel.find({removed: false, $or: commentIDs}, function(e, puzzles) {
if(e)
{
err.send_error(err.DB_ERROR, res);
}
else if (!puzzles)
{
err.send_error(err.DB_ERROR, res);
}
else
{
res.send(puzzles);
}
});
}
});
}
It sounds like your query is causing something on your server (potentially mongo) to consume a very large amount of CPU - as this is commonly what causes the issue you have seen with SSH access.
You should try reading over the logs of your mongo instance and seeing if there are any long running queries.
Monngodb provides an internal profiler for examining long running commands. Try setting long running profiling level to 1, running the command and examining the logfile output.
More details on the profiler are available at http://www.mongodb.org/display/DOCS/Database+Profiler

Resources