Using Promise in afterSave - node.js

I am trying to do the following code in cloud code:
//Code for sending push notifications after an update is created
Parse.Cloud.afterSave('AssignmentMod', function(request){
//Only send if the mod is new
console.log("MOD SAVED. NOTIFICATIONS CHECK.")
if(!request.object.existed()) {
console.log("NEW MOD ADDED. NOTIFICATIONS START.")
var mod = request.object
//Get the users who have these classes
var userType = Parse.Object.extend('User')
var usersQuery = new Parse.Query(userType)
usersQuery.equalTo('currentClasses', mod.get("parentClass"))
//Get the users who removed the assignment
var modsType = Parse.Object.extend('AssignmentMod')
//If if was an assignment and removed at some point
var mods1Query = new Parse.Query(modsType)
mods1Query.notEqualTo("assignment", null)
mods1Query.equalTo("assignment", mod.get("assignment"))
mods1Query.equalTo("type", "remove")
//If it was an assignment mod and was removed at some point
var mods2Query = new Parse.Query(modsType)
mods2Query.notEqualTo("assignmentMod", null)
mods2Query.equalTo("assignmentMod", mod.id)
mods2Query.equalTo("type", "remove")
//Get the remove mods for this particular update
var modsQuery = new Parse.Query.or(mods1Query,mods2Query)
//Run the user and mods queries
Parse.Promise.when(
usersQuery.find(),
modsQuery.find()
).then( function(users, removeMods) {
console.log("QUERY 1 COMPLETE.")
//Get all users that copied this remove mod
var copyType = Parse.Object.extend('ModCopy')
var copyQuery = new Parse.Query(copyType)
copyQuery.containedIn("assignmentMod", removeMods)
copyQuery.find().then(function(copies){
console.log("QUERY 2 COMPLETE.")
var copyUsers = copies.map(function(copy){
return copy.get("user")
})
//Get the devices of users that belong to the class, did not remove the assignment, and have an ios devices
var deviceQuery = new Parse.Query(Parse.Installation)
deviceQuery.equalTo("deviceType", "ios")
deviceQuery.containedIn("user", users)
deviceQuery.notContainedIn("user", copyUsers)
//Send the push notification
console.log("PUSHING NOTIFICATIONS")
Parse.Push.send(
{
where: deviceQuery,
data: {
alert: "You have new updates."
}
},
{userMasterKey: true}
).then(
function(){
console.log("SUCCESSFULLY SEND NOTIFICATIONS.")
},
function(error){
throw "Error sending push notifications:" + error.code + " : " + error.message
console.log("FAILED TO SEND NOTIFICATIONS")
}
)
},
function(reason){
throw "Error: " + reason
print("ERROR: " + reason)
})
},
function(reason){
throw "Error: " + reason
print("ERROR: " + reason)
})
}
})
I can see in my logs that "MOD SAVED. NOTIFICATIONS CHECK." and "NEW MOD ADDED> NOTIFICATIONS START." are both logged out. However, I get no other logs after that and no push notifications are sent. I should at leas see the logs, and see none. Is there an issue with parse server using promises inside an afterSave or something? Why am is my code seemingly halting execution when it hits the first promise?

Your afterSave() cloud code must be ended by a response.success() or response.error(error)
You should wait for all your Promises complete end then finish your code with something like:
promise.then( function(){
console.log("END of afterSave() " ;
response.success();
}, function(error){
console.error("error in afterSave() " + " : " + error.message);
response.error(JSON.stringify({code: error.code, message: error.message}));
});

This turned out to be an issue with Heroku hosting. I have not tracked down passed that. If the same code is hosted on AWS or another hosting platform, these issues do not occur. There is likely some server settings Heroku uses in it's default setup that need changed. If you encounter this issue and have more of a solution beyond "Switch off Heroku hosting" then submit an answer and I will accept.

Related

res.send() is not sending current response, instead keeps last one

This is some of my code that I have in my index.js. Its waiting for the person to visit url.com/proxy and then it loads up my proxy page, which is really just a form which sends back an email and a code. From my MongoDB database, I grab the users order using the code, which contains some information I need (like product and the message they're trying to get). For some reason, it seems like its responding before it gets this information and then holds onto it for the next time the form is submitted.
The newline in my res.send(product + '\n' + message) isnt working either, but thats not a big deal right now.
But.. for example, the first time I fill out the form ill get a blank response. The second time, I'll get the response to whatever I filled in for the first form, and then the third time ill get the second response. I'm fairly new to Web Development, and feel like I'm doing something obviously wrong but can't seem to figure it out. Any help would be appreciated, thank you.
app.get('/proxy', function(req,res){
res.sendFile(__dirname+ "/views/proxy.html");
});
var message = "";
var product = "";
app.post('/getMessage', function(req,res)
{
returnMsg(req.body.user.code, req.body.user.email);
//res.setHeader('Content-Type', 'text/plain');
res.send(product + "\n" + message);
});
function returnMsg(code, email){
MongoClient.connect(url, function(err, db){
var cursor = db.collection('Orders').find( { "order_id" : Number(code) })
cursor.each(function(err, doc){
assert.equal(err, null);
if (doc!= null)
{
message = doc["message"];
product = doc["product"];
}
else {
console.log("wtf");
// error code here
}
});
console.log(email + " + " + message);
var document = {
"Email" : email,
"Message" : message
}
db.collection("Users").insertOne(document);
db.close();
});
}
You need to do lots of reading about your asynchronous programming works in node.js. There are significant design problems with this code:
You are using module level variables instead of request-level variables.
You are not correctly handling asynchronous responses.
All of this makes a server that simply does not work correctly. You've found one of the problems already. Your async response finishes AFTER you send your response so you end up sending the previously saved response not the current one. In addition, if multiple users are using your server, their responses will tromp on each other.
The core design principle here is first that you need to learn how to program with asynchronous operations. Any function that uses an asynchronous respons and wants to return that value back to the caller needs to accept a callback and deliver the async value via the callback or return a promise and return the value via a resolved promise. The caller then needs to use that callback or promise to fetch the async value when it is available and only send the response then.
In addition, all data associated with a request needs to stay "inside" the request handle or the request object - not in any module level or global variables. That keeps the request from one user from interfering with the requests from another user.
To understand how to return a value from a function with an asynchronous operation in it, see How do I return the response from an asynchronous call?.
What ends up happening in your code is this sequence of events:
Incoming request for /getMessage
You call returnMsg()
returnMsg initiates a connection to the database and then returns
Your request handler calls res.send() with whatever was previously in the message and product variables.
Then, sometime later, the database connect finishes and you call db.collection().find() and then iterate the cursor.
6/ Some time later, the cursor iteration has the first result which you put into your message and product variables (where those values sit until the next request comes in).
In working out how your code should actually work, there are some things about your logic that are unclear. You are assigning message and product inside of cursor.each(). Since cursor.each() is a loop that can run many iterations, which value of message and product do you actually want to use in the res.send()?
Assuming you want the last message and product value from your cursor.each() loop, you could do this:
app.post('/getMessage', function(req, res) {
returnMsg(req.body.user.code, req.body.user.email, function(err, message, product) {
if (err) {
// send some meaningful error response
res.status(500).end();
} else {
res.send(product + "\n" + message);
}
});
});
function returnMsg(code, email, callback) {
let callbackCalled = false;
MongoClient.connect(url, function(err, db) {
if (err) {
return callback(err);
}
var cursor = db.collection('Orders').find({
"order_id": Number(code)
});
var message = "";
var product = "";
cursor.each(function(err, doc) {
if (err) {
if (!callbackCalled) {
callback(err);
callbackCalled = true;
}
} else {
if (doc != null) {
message = doc["message"];
product = doc["product"];
} else {
console.log("wtf");
// error code here
}
}
});
if (message) {
console.log(email + " + " + message);
var document = {
"Email": email,
"Message": message
}
db.collection("Users").insertOne(document);
}
db.close();
if (!callbackCalled) {
callback(null, message, product);
}
});
}
Personally, I would use promises and use the promise interface in your database rather than callbacks.
This code is still just conceptual because it has other issues you need to deal with such as:
Proper error handling is still largely unfinished.
You aren't actually waiting for things like the insert.One() to finish before proceeding.

Node js - error state of a callback in a loop

I am using node js to send push notifications to Android devices using GCM and want to delete the device id if I get an error message from Google. The code snippet is attached and runs in a loop (i is number of device ids).
My issue is that if the call returns an error (err4), all future loops return if(err4) as true and all the ids after the first error are deleted. Why is the err4 not getting reset?
for(var i = 0; i < noofdeviceIds; i++){
// some more code here to create the 'message'.
(function(i) {
gcm.send(message, function(err4, messageId){
if (err4) {
console.log("\nError occured: Notification could not be sent with error: ", err4);//
if(err4 == 'NotRegistered' || err4 == 'InvalidRegistration'){
console.log("\nRemoving the entry from the DB.");
var removeEntryQuery = "DELETE FROM devicereg WHERE deviceregId = '" + deviceregIds[i] + "'";
req._dbConnection.query(removeEntryQuery, function(err5, row) {
if(row!=0){console.log("\nDB returned: ", row);}
});
}
}
else {
console.log("\nMessage sent with message ID: ", messageId);
}
});
})(i);
}

Browsers not updating when sockets get emmited

I'm trying to create a chat system. Here it is:
The top field takes the name of the user while the bottom textarea takes the message. Once the user pressed enter the middle textarea ( which is disabled) updates itself with the new record. This is done using nodejs sockets. My problem is that if I open another instance of google chrome and I type something the other google chrome textarea does not update. I'm puzzled by this since my code should cover this case:
Here is the server.js code that handles the insertion. After it inserts it, it emits a socket with the last insertion so that the index.html can update itself.
io.sockets.on("connection",function(socket){
socket.on("send",function(data){
mongodb.connect("mongodb://127.0.0.1:27017/myDatabase",function(err,db){
if(err) throw err;
var to_be_inserted = {name: data.name,content: data.content};
db.collection("chat").insert(to_be_inserted,function(err,objects){
if(err) throw err;
var cursor = db.collection("chat").find().sort({_id: -1}).limit(1);
cursor.toArray(function(err,docs){
console.log("abc");
socket.emit("data_to_be_printed",docs);
});
});
})
})
})
As you can see once the data is inserted a socket is emitted containing the last row of the db. The index.html should handle this socket by updating itself. Here is the code that handles it:
<script>
var socket = io.connect("127.0.0.1:1337");
socket.on("data_to_be_printed",function(cursor){
var completed = document.getElementById("chatArea").value;
for(var i=0; i < cursor.length; i++)
{
console.log(cursor[i].name + " wrote: " + cursor[i].content);
var name = cursor[i].name;
var content = cursor[i].content;
var name_to_go = name.replace("/\r?\n|\r/g","");
var content_to_go = content.replace("/\r?\n|\r/g","");
completed+="\n" + name_to_go + ": " + content_to_go;
}
document.getElementById("chatArea").value = completed;
});
function keyfunction(e)
{
if((e.keyCode == 13 || e.which == 13) && !e.shiftKey)
{
socketEmitDb();
}
}
function socketEmitDb()
{
var name = document.getElementById("name").value;
var content = document.getElementById("writtenThing").value;
console.log("Name: " + name + " |||| content: " + content);
document.getElementById("name").value="";
document.getElementById("writtenThing").value="";
if(name.length > 0 && content.length > 0)
{
socket.emit("send",{"name": name,"content": content});
}else
{
alert("Make sure that the name and the message is not empty");
}
}
</script>
Should't the socket be emitted towards all opened sockets?
In your server program change socket.emit to io.sockets.emit.
socket.emit will send message to specific socket (client) that got connected. io.sockets.emit will send message to all connected sockets (clients)

redis and watch + multi allows concurrent users

I'm doing a load test on user signup with the same email address for a webservice and the first 10 users which connect simultaneously will always register.
I'm using WATCH and MULTI but that doesn't seem to work any way.
I'm calling save() to save the user.
this.insert = function(callback) {
this.preInsert();
created = new Date();
updated = new Date();
// Also with these uncommented it still doesn't work
// Common.client.watch("u:" + this.username);
// Common.client.watch("em:" + this.email);
console.log(ID + " email is locked " + this.email);
Common.client.multi()
.set("u:" + this.username, ID)
.hmset("u:" + ID,
{"username": this.username
,"password": this.password
,"email": this.email
,"payment_plan": payment_plan
,"created": created.getTime()
,"updated": updated.getTime()
,"avatar": this.avatar})
.zadd("u:users", 0, ID)
.sadd("u:emails", this.email)
.set("u:"+ ID + ":stats", 0)
.set("em:" + this.email, ID)
.exec();
this.postInsert();
if (callback != null)
callback(null, this);
}
this.save = function(callback) {
// new user
if (ID == -1) {
var u = this;
Common.client.watch("u:" + this.username);
Common.client.exists("u:" + this.username, function(error, exists) {
// This username already exists
if (exists == 1) {
Common.client.unwatch();
if (callback != null)
callback({code: 100, message: "This username already exists"});
}
else {
Common.client.watch("em:" + u.email);
Common.client.get("em:" + u.email, function(err, emailExists) {
if (emailExists != null) {
Common.client.unwatch();
if (callback != null)
callback({code: 101, message: "This email is already in use"});
}
else {
Common.client.incr("u:nextID", function(error, id) {
if (error) callback(error);
else {
ID = id;
u.insert(callback);
}
});
}
});
}
});
}
// existing user
else {
var u = this;
Common.client.get("em:" + this.email, function(err, emailExists) {
if (emailExists != ID && emailExists) {
if (callback != null) {
callback({code: 101, message: "This email is already in use " + ID + " " + emailExists});
}
}
else {
u.update(callback);
}
});
}
}
The output almost always is:
1 email is locked test#test.com
2 email is locked test#test.com
3 email is locked test#test.com
4 email is locked test#test.com
5 email is locked test#test.com
6 email is locked test#test.com
7 email is locked test#test.com
8 email is locked test#test.com
9 email is locked test#test.com
10 email is locked test#test.com
Am I doing something wrong or redis can't handle that much concurrency.
Also this is the definition of Common:
var Common = {
client: redis.createClient(),
...
};
YES! After a night's rest of course the solution came to me in the shower.
The problem was that I used a single redis thread for the whole app and all the connections registered the watch on that thread. Of course it didn't signal that the keys were modified by a different client because there was no other client.
I know this thread is a 8 months old, but anyway, my thoughts still can help someone. There is a problem I still cannot understand, I even started my own thread dedicated to this issue Redis WATCH MULTI EXEC by one client, where I refer to yours. I now use "connection per transaction" method, which means I create new connection if I need to do transactions with WATCH-MULTI-EXEC. In other cases for atomic operations I use connection, which is created during an app launch. Not sure this method is effective, because creating a new connection means creating + authorizing and this produces latency, but it works.

node.js redis async query

Hope someone can assist with a (simple) async question on node-redis. I'm trying to load a set from a hash in the redis db and then use that populated set further on. Here's the code snippet :-
var redis_client = redis.createClient(REDIS_PORT, REDIS_URL);
redis_client.hgetall(target_hash,function(e,o){
Object.keys(o).forEach(function(target){
// get the "name" from the hash
redis_client.hget(o[target],"name",function(e,o){
if (e){
console.log("Error occurred getting key: " + e);
}
else {
redis_client.sadd("newset",o);
}
});
});
// the following line prints nothing - why ??
redis_client.smembers("newset",redis.print);
When I examine the contents of "newset" in redis it is populated as expected, but at runtime it displayed as empty. I'm sure it's an async issue - any help much appreciated !
hgetall is an asynchronous call: when it receives a reply from the redis server, it will eventually call your callback function (target) { ... }. But within your script, it actually returns immediately. Since hgetall returns very fast, Node will immediately run the next statement, smembers. But at this point the sadd statements haven’t run yet (even if your system is very fast because there hasn’t been a context switch yet).
What you need to do is to make sure smembers isn’t called before all the possible sadd calls have executed. redis_client provides the multi function to allow you to queue up all the sadd calls and run a callback when they’re all done. I haven’t tested this code, but you could try this:
var redis_client = redis.createClient(REDIS_PORT, REDIS_URL);
redis_client.hgetall(target_hash, function(e, o) {
var multi = redis_client.multi();
var keys = Object.keys(o);
var i = 0;
keys.forEach(function (target) {
// get the "name" from the hash
redis_client.hget(o[target], "name", function(e, o) {
i++;
if (e) {
console.log("Error occurred getting key: " + e);
} else {
multi.sadd("newset", o);
}
if (i == keys.length) {
multi.exec(function (err, replies) {
console.log("MULTI got " + replies.length + "replies");
redis_client.smembers("newset", redis.print);
});
}
});
});
});
Some libraries have a built-in equivalent of forEach that allows you to specify a function to be called when the loop is all done. If not, you have to manually keep track of how many callbacks there have been and call smembers after the last one.
You shouldn't use multi unless you need actually need a transaction.
just keep a counter of the transactions and call smembers in the final callback
var redis_client = redis.createClient(REDIS_PORT, REDIS_URL);
var keys = Object.keys(o);
var i = 0;
redis_client.hgetall(target_hash,function(e,o){
Object.keys(o).forEach(function(target){
// get the "name" from the hash
redis_client.hget(o[target],"name",function(e,o){
i++
if (e){
console.log("Error occurred getting key: " + e);
}
else {
redis_client.sadd("newset",o);
if (i == keys.length) {
redis_client.smembers("newset", redis.print);
}
}});

Resources