Why does this request to Node server hang? - node.js

I am very new to express and node. I was trying to upload an image using multiparty and code given here.
I have put a check for file size. When I upload a file of size greater than the limit it lands in the "problem section". The problem is the server hangs and responds back only after request timeout. I have tried many solution on stack overflow but nothing seems to work. It works if the file size is below the limit. I am very sure that the code reaches the problem section and there is no problem with the upload logic. But it seems that I have to do something in the "problem section". Please tell me what am I missing.
I have replaced the code in the problem section with
next(), res.send(), res.end(), next(err), return; but It does not work. It hangs no matter what.
Following is the code:
router.post("/image", function(req, res, next) {
if(req.user) {
upload.uploadToS3(req, S3_BUCKET, S3_PROFILE_IMAGE_FOLDER, function(result) {
if(result.error != null && result.error === false) {
models.Customer.update({
orignalImage : result.fileLocation
},{
where : { mobileNumber : req.user.mobileNumber}
}).then(function(customer) {
if(customer) {
res.send({
url: result.fileLocation,
error : false
});
} else {
res.status(400);
res.send({error : true,
error_message : 'Image upload failed'});
}
});
} else {
//PROBLEM SECTION
res.status(404);
res.json({error : true, error_message : result.error_message});
}
});
} else {
res.status(403);
res.send({message: "access denied"});
}
});
response after timeout
Please tell me if you need more details I will upload it.
var uploadToS3 = function(req, S3_BUCKET, folder, callback) {
var s3Client = knox.createClient({
secure: false,
key: awsConfig.accessKeyId,
secret: awsConfig.secretAccessKey,
bucket: S3_BUCKET,
});
var headers = {
'x-amz-acl': 'public-read',
};
var form = new multiparty.Form();
var batch = new Batch();
batch.push(function(cb) {
form.on('part', function(part) {
var validity = validateFile({type : part.headers['content-type'], name : part.filename, length : part.byteCount});
console.log(validity);
if(validity.isValid) {
cb(null, { filename : folder+"/"+generateFileName({name : part.filename}), part : part});
} else {
cb({error : true, error_message : validity.reason, part:part }, "advra kedavra");
}
});
});
batch.end(function(err, results) {
if (err) {
console.log(err);
err.statusCode = 200;
callback(err);
} else {
form.removeListener('close', onEnd);
var result = results[0];
var part = result.part;
var destPath = result.filename;
var counter = new ByteCounter();
part.pipe(counter); // need this until knox upgrades to streams2
headers['Content-Length'] = part.byteCount;
s3Client.putStream(part, destPath, headers, function(err, s3Response) {
result = {};
if(err) {
console.log(err);
result.error = true;
result.error_message = "Problem in uploading!";
} else {
console.log(s3Response.req.url);
result = {error: false, statusCode : s3Response.statusCode, message : "file upload successful.", fileLocation : s3Response.req.url};
}
callback(result);
});
part.on('end', function() {
console.log("File upload complete", counter.bytes);
});
}
});
function onEnd() {
console.log("no uploaded file");
callback({error:false, error_message:"no uploaded file."});
}
form.on('error', function(err) {
console.log('Error parsing form: ' + err.stack);
});
form.on('close', onEnd);
form.parse(req);
}

After a 3 day long search for the answer I found one answer. Express.js close response
The problem section should be the following :
res.status(400);
res.set("Connection", "close");
res.json({error:true, error_message : "<some - error message>"});

You should simply add a .end() after setting the status as: res.status(400).end();
See official docs
res.end([data] [, encoding])
Ends the response process. This method actually comes from Node core, specifically the response.end() method of http.ServerResponse.
Use to quickly end the response without any data. If you need to respond with data, instead use methods such as res.send() and res.json().
res.end();
res.status(404).end();

res.status(400);
res.set("Connection", "close");
res.json({error:true, error_message : "<some - error message>"});
I am not sure that solves your issue. The 'problem section' is in your callback, which would only run after the upLoadToS3 function runs. The 'problem' is probably with that function. You might have to refactor that to handle large file uploads.

Related

Unable to run a function synchronously in nodejs and express

I have used wikipedia-js for this project. This is my code for summary.js file.
var wikipedia = require("wikipedia-js");
var something = "initial";
module.exports = {
wikitext: function(topicname) {
console.log("Inside wikitex funciton :" + topicname);
var options = {
query: topicname,
format: "html",
summaryOnly: false,
lang: "en"
};
wikipedia.searchArticle(options, function(err, htmlWikiText) {
console.log("Inside seararticlefunciton :");
if (err) {
console.log("An error occurred[query=%s, error=%s]", topicname, err);
return;
}
console.log("Query successful[query=%s, html-formatted-wiki-text=%s]", topicname, htmlWikiText);
something = htmlWikiText;
});
return something;
},
};
This module I am using in /wiki/:topicname route. The corresponding code in index.js is like this.
router.get('/wiki/:topicname', function(req, res, next) {
var topicname = req.params.topicname;
console.log(topicname);
var first = summary.wikitext(topicname);
res.send("Hello "+first);
});
The problem is, everytime i visit a wiki/some-topic, the last return statement of summary.js executes before htmlWikiText is populated with content. So I always see hello initial on the browser page. Although after sometime it gets printed on terminal due to console.log statement.
So how should I resolve this issue?
I'm not going to try turning this code into synchronous. I'll just correct it to work as an asynchronous version.
You need to pass in callback to wikitext() and return the value in that callback. Here is the revised code of wikitext() and the route that calls it:
var wikipedia = require("wikipedia-js");
module.exports = {
wikitext: function(topicname, callback) {
console.log("Inside wikitex funciton :" + topicname);
var options = {
query: topicname,
format: "html",
summaryOnly: false,
lang: "en"
};
wikipedia.searchArticle(options, function(err, htmlWikiText) {
console.log("Inside seararticlefunciton :");
if (err) {
console.log("An error occurred[query=%s, error=%s]", topicname, err);
return callback(err);
}
console.log("Query successful[query=%s, html-formatted-wiki-text=%s]", topicname, htmlWikiText);
callback(null, htmlWikiText);
});
}
};
router.get('/wiki/:topicname', function(req, res, next) {
var topicname = req.params.topicname;
console.log(topicname);
summary.wikitext(topicname, function(err, result) {
if (err) {
return res.send(err);
}
if (!result) {
return res.send('No article found');
}
res.send("Hello "+result);
});
});

Using NodeJS and async.queue to download and save lots of images to local disk

OK I have a NodeJS app and I'm trying to download lots of images from a web server (about 500 for now but the number will increase). The problem I get is a "Unhandled stream error in pipe Error: EMFILE" because it seems that too much files get opened at the same time.
So I'm trying to use async.queue to process files by batches of 20. But I still get the error.
SomeModel.find({}, function(err, photos){
if (err) {
console.log(err);
}
else {
photos.forEach(function(photo){
var url = photo.PhotoURL;
var image = url.replace('http://someurl.com/media.ashx?id=', '').replace('&otherstuffattheend', '.jpg');
photo.target = image;
var q = async.queue(function (task) {
request
.get(task.PhotoURL)
.on('response', function(response) {
console.log(task.PhotoURL + ' : ' + response.statusCode, response.headers['content-type']);
console.log(task.target);
})
.on('error', function(err) {
console.log(err);
})
.pipe(fs.createWriteStream(task.target));
}, 20);
q.push(photo, function(err) {
if (err) {
console.log(err);
}
});
q.drain = function() {
console.log('Done.')
}
});
}
});
What am I doing wrong ? Many thanks for your time and help.
The problem is that you're creating a new queue for each photo and each queue receives just one photo. Instead, only create a queue once (outside of the forEach()) and push the photo objects to it. You're also missing the callback in your task handler. For example:
var q = async.queue(function(task, cb) {
request
.get(task.PhotoURL)
.on('response', function(response) {
console.log(task.PhotoURL + ' : ' + response.statusCode, response.headers['content-type']);
console.log(task.target);
// the call to `cb` could instead be made on the file stream's `finish` event
// if you want to wait until it all gets flushed to disk before consuming the
// next task in the queue
cb();
})
.on('error', function(err) {
console.log(err);
cb(err);
})
.pipe(fs.createWriteStream(task.target));
}, 20);
q.drain = function() {
console.log('Done.')
};
photos.forEach(function(photo) {
var url = photo.PhotoURL;
var image = url.replace('http://someurl.com/media.ashx?id=', '').replace('&otherstuffattheend', '.jpg');
photo.target = image;
q.push(photo, function(err) {
if (err) {
console.log(err);
}
});
});

Node.JS Restify and post Events

Hello I have a post event that takes in key value and maps it to a sproc. This part works very well. However, for some reason I cannot return the data response as JSON.
It says the return type is "octet stream" even though I'm setting it to application/json.
I'm a little new to REST, so if I'm doing something incorrectly, please let me know.
server.post({ path: PATH + '/data.json', version: '1.0' }, genericData);
function genericData(req, res, next) {
if (req.params.data != null){
var sp_code_name = config.get('data:' + req.params.data);
var connection = new sql.Connection(conn_str, function(err) {
// ... error checks
if(err) {
return console.log("Could not connect to sql: ", err);
connection.close();
}
});
var request = new sql.Request(connection);
console.log(sp_code_name);
request.execute(sp_code_name, function (err, recordset, returnValue) {
if (err) {
connection.close();
return console.log("Is this a good query or the right table?: ", err);
}
//if (recordset && returnValue == 0) {
res.setHeader('Access-Control-Allow-Origin','*');
res.setHeader('content-type', 'application/json');
res.send(200, recordset[0]);
console.log(recordset[0]);
// return next();
// }
// return next();
connection.close();
});
}
res.setHeader('content-type', 'application/text');
res.send(200, "Something is wrong with the stream.");
}
You need to wrap your error response in an else clause:
if (req.params.data != null){
...
} else {
res.setHeader('content-type', 'application/text');
res.send(200, "Something is wrong with the stream.");
}
Without the else clause, it's getting called on every request before your request.execute callback is called. That's setting your content-type to application/text instead of application/json as you're expecting.

sending data from Javascript to save in mongoDB through nodejs

I am trying to parse an object from a javascript (a blog post head and body) through a node.js server and on to save it in the mongoDB.
this is the parsing code:
function saveState( event ) {
var url = '';
var postTitle = headerField.innerHTML;
var article = contentField.innerHTML;
var post = {
title: postTitle,
article: article
};
var postID = document.querySelector('.save').getAttribute('id');
if(postID != "new"){
url += "?id=" + postID
}
var request = new XMLHttpRequest();
request.open("POST", "draft" + url, true);
request.setRequestHeader("Content-Type", "application/json");
request.send(post);
}
this is sent to this node server handler:
app.post('/draft', routes.saveDraft);
exports.saveDraft = function(req, res){
var id = url.parse(req.url, true).query.id;
var post = db.getPostByID(id);
if(id){
console.log('post id' + id);
db.savePost(id, req.body.head, req.body.article);
}
else{
db.newPost(req.body.head, req.body.article);
}
res.render('editDraft.hbs', post); //send the selected post
};
and then, sent to one of these DB functions:
exports.newPost = function (postTitle, article) {
new postCatalog({title:postTitle,
_id:1,
author:'temp',
AuthorID:2,
date:'2/3/12',
inShort:article.substring(0,100),
content:article ,
published:false
}).save(function (err, login) {
if (err) {
return console.log('error');
}
else {
console.log('Article saved');
}
});
}
exports.savePost = function (id, postTitle, article) {
postCatalog.find({_id: id}).save(function (err, login) {
if (err) {
return console.log('error');
}
else {
console.log('Draft saved');
}
});
}
now, I just can't get this to work..
I am new to node and I could really use your help!
thanks
EDITED:
the parameters being sent to the DB saving functions were not written properly.
but i'm still stuck in the same place, where the data is being sent but not saved correctly. I think there's something wrong with my getPostByID function but I can't figure it out:
exports.getPostByID =function (id) {
var post = postCatalog.find({_id: id}, function(err, post){
if(err) {
return handleError(err);
}
else{
if(post > 0){
post = post[0];
}
return post;
}
});
return post;
}
I am using express (including bodyparser) and mongoose. view engine is hbs.
thanks again.
You have to write it the asynchronous way, e.g. your getPostByID:
exports.getPostByID = function (id, callback) {
postCatalog.find({_id: id}, function(err, post) {
if (err) {
callback(err);
}
else if (post && post.length > 0) {
callback(null, post[0]);
}
else {
callback(null, null); // no record found
}
});
};
And this is true for your whole code. It's totally different and the way you tried it will never work under Node.js.
BTW there is a mongo-driver method findOne which is better suited in this special case but I didn't want to change your code too much.

Http Route called multiple times

When the following route is called in express, it is actually executed 6 times. The console.log is printed 6 times, and my mongoose logic is executed 6 times (saves 6 time in database).
I then get returned a http 500 from cloud9ide "Could not proxy request". I am really confused, I have no loops in my code, how can this happen? The console.log("in else (2)"); get printed 6 times.
Edit: I have tried the mongooseLogic code with various parts commented out, and the issue was still there. This looks like it isn't a mongoose issue.
Second edit: I have changed the post for get and hardcoded the body that would be sent, and the route was executed only once.
Third edit: I am also using everyauth for session/authentication with the facebook oauth.
app.post("/result/:userId/:elementId", function(req, res) {
var receivedJSON = req.body;
console.log("In route");
//Making sure the receive request is valid
if(typeof(receivedJSON.type) !== undefined) {
mongooseLogic.saveResults(req.params.elementId, receivedJSON, req.params.userId, function(message) {
if(message === "Success") {
res.json({ success: true, message: 'Result saved.'});
}
else {
res.json({ success: false, message: 'Error in saving results. Trace: ' + message});
}
});
}
else {
res.json({ success: false, message: 'Failed, Invalid object sent to server'});
}
});
Code on the mongooseLogic file:
var saveResults = function(elementRefId, receivedResult, userId, callback){
if(elementRefId.toString().length !== 24 ){
callback("Invalid objectId for elementId");
}
else{
Result.findOne({ "id" : userId, "serieResult.id": elementRefId }, function(err, result){
if(err){
callback("No Document found: " + err);
}
else if( result === null){
console.log("in null");
var tempResult = {
id : elementRefId,
unit : receivedResult.unit,
value : receivedResult.value
}
Result.update({ "id" : userId}, { $push: {"serieResult": tempResult}}, {upsert: true}, function(err){
if(err){
callback("Error in saving result (Distance): " + err);
}
else{
callback("Success");
}
});
}
else{
Result.update({ "id" : userId, "serieResult.id": elementRefId },
{ $set:{
"serieResult.$.unit" : receivedResult.unit,
"serieResult.$.value" : receivedResult.value,
},{upsert: true}, function(err){
if(err){
callback("Cant update doc: " + err);
}
else{
console.log("in else (2)");
callback("Success");
}
});
}
});
}
}
}
This was a problem with the Cloud9 proxy that interfered. The problem was adressed thanks to this issue and has been resolved.

Resources