Sending results of MongoDB find() using Express - node.js

I am new to NodeJS and I am trying to connect to MongoDB I prepared a query and I can see the output on server by traversing the cursor. But I want to send the output of find() to response.
var cursor = db.collection('dbLocations').find(
{$and: [
{type: locationType},
{createdAt : { "$gte": new Date(createdAt), "$lt": date1 }}
]}).skip(skip).limit(count);
Now I am getting error like
Cannot set headers after they are sent to the client if I do a string concate and then JSON.stringify
I tried pretty() but it gives me error skip(...).limit(...).toString(...).pretty is not a function
or skip(...).limit(...).pretty is not a function.
I am totally unable to figure out how to convert as I am not clear about the concept of callback() and all solutions have that. Is there a simple stringify, parse or pretty kind of solution to it.
Below is my express snippet for get, it looks very messy right now. I want to send the find() output instead of random stuff.
app.get('/api/first', function(request, response) {
response.writeHead(200, {'Content-Type': 'application/json'});
var locationType = request.body.type;
var createdAt = request.body.createdAt;
//var pageNumber = parseInt(request.body.pageNumber);
console.log(locationType);
console.log(createdAt);
//console.log(pageNumber);
var date1 = new Date(createdAt);
date1.setDate(date1.getDate() + 1);
var count = 2;
var str="";
var skip;
if(request.body.pageNumber)
skip = parseInt((request.body.pageNumber-1)*count);
else
skip = 0;
MongoClient.connect(url, function(err, client) {
if (err) throw err;
console.log('Connected');
var db = client.db('locationapi');
var cursor = db.collection('dbLocations').find(
{$and: [
{type: locationType},
{createdAt : {"$gte": new Date(createdAt), "$lt": date1}}
]}
).skip(skip).limit(count);
cursor.each(function(err, doc) {
if(err) throw err;
if(doc !== null) {
console.log(doc); str=str+doc;
} else client.close();
});
client.close();
});
var myObj = {
name: 'jgj',
job: 'Ninja'
}; // random stuff
response.end(JSON.stringify(myObj));
});

db.collection('dbLocations').find({
$and: [{
type: locationType
}, {
createdAt: {
"$gte": new Date(createdAt),
"$lt": date1
}
}]
}, {
skip: skip,
limit: count
}).toArray(function (err, docs) {
if (err) return res.status(500).send({error: err})
res.send(docs)
});
https://mongodb.github.io/node-mongodb-native/3.3/api/Collection.html#find
https://mongodb.github.io/node-mongodb-native/3.3/api/Cursor.html#toArray

Related

Remove object array items by comparing object array from mongodb

There is document which is having array of object inside.
Like
Objectid('')
fruits : [{_id:'2'},{_id:'3'},{_id:'4'}]
I want to delete these items fruits_id = [{_id:'3'},{_id:'4'}].
fruit_group.updateOne(
{collection_id: collection_id},
{$pullAll: {"fruits": fruits_id}}
)
so far i found below logic which i think is not efficient.
routes.post('/removeFruits', async (request, response, next) => {
var post_data = request.body;
var collection_id = post_data.collection_id;
var fruits_ids = JSON.parse(post_data.fruits_ids);
var prev_fruits;
await fruit_group.findOne({'collection_id': collection_id}, function (err, result) {
if (err) {
console("Some error occurred");
response.json({'message': "Some error occurred", 'result': 'false'});
}
prev_fruits = result.fruits;
});
for (var i = 0; i < fruits_ids.length; i++) { // this will delete all occurring items from array
var key = fruits_ids[i].user_id;
prev_fruits.filter(x => x.user_id === key).forEach(x => prev_fruits.splice(prev_fruits.indexOf(x), 1));
}
await fruit_group.updateOne({'collection_id': collection_id}, {$set: {'fruits': prev_fruits}}, function (err, result) {
if (err) {
response.json({'message': "Some error occurred", 'result': 'false'});
}
response.json({'message': 'Deletion successfully', 'result': 'true'});
});
});
is there anyway to achieve the same result?
Assuming fruits_id = [{ _id: '3' }, { _id: '4' }], you could do something like this using $pull and $in:
await fruit_group.updateOne({'collection_id': collection_id}, { $pull: { fruits: { $in: fruits_id }}})
This follows the example of removing all items that equal a specified value.

how to implement updateMany in mongodb

One of the property value has an extra space at the end of the string and totally there are 5000 Objects need to remove the space and update it. how to apply condition in updateMany
var MongoClient = require('mongodb').MongoClient;
var url = "mongodb://localhost:27017/";
MongoClient.connect(url, (err, db) => {
if (err) throw err;
var dbo = db.db("new");
dbo.collection("compo").find({}).toArray((error, result) => {
if (error) throw error;
let finalvalue = [];
result.forEach(req => {
let nameComp = req.ComponentName;
if (nameComp.substr(nameComp.length - 1) == " " && nameComp.length > 2) {
let Compon = req.ComponentName.substring(0, req.ComponentName.length - 1);
finalvalue.push({
"_id": req._id ,
"ComponentName": Compon
});
}
})
console.log(finalvalue);
dbo.collection("components").updateMany({},finalvalue);
db.close();
});
});
I have referred through the below link but don't know where to apply the javascript condition https://docs.mongodb.com/manual/reference/method/db.collection.updateOne/#db.collection.updateOne
Try this.It may help you.
dbo.collection("compo").find({}, { "nameComp": 1 }).forEach(function (doc) {
dbo.collection("compo").update(
{ "_id": doc._id },
{ "$set": { "nameComp": doc.nameComp.trim() } }
);
})
Please refer this link: https://www.tutorialspoint.com/how-to-remove-white-spaces-leading-and-trailing-from-string-value-in-mongodb

MongoDB mongoose subdocuments created twice

I am using a simple form that can be used to register an article to a website.
the back-end looks like this:
// Post new article
app.post("/articles", function(req, res){
var newArticle = {};
newArticle.title = req.body.title;
newArticle.description = req.body.description;
var date = req.body.date;
var split = date.split("/");
newArticle.date = split[1]+'/'+split[0]+'/'+split[2];
newArticle.link = req.body.link;
newArticle.body = req.body.body;
var platforms = req.body.platforms;
console.log(platforms);
Article.create(newArticle, function(err, createdArticle){
if(err){
console.log(err.message);
} else {
var counter=0;
platforms.forEach(function(platform){
var platformed=mongoose.mongo.ObjectID(platform);
Platform.findById(platformed, function(err, foundPlatform){
if(err){
console.log(err);
} else {
counter++;
foundPlatform.articles.push(createdArticle);
foundPlatform.save();
createdArticle.platforms.push(foundPlatform);
createdArticle.save();
if(counter==platforms.length){
res.redirect('articles/' + createdArticle._id);
}
}
});
});
}
});
});
The platforms field is passed to the back-end as an array of strings, one string being one objectID. When platforms only contains 1 string i.e. 1 platform to be linked to, everything works fine. When platforms contains multiple string. the created article has duplicates of each platform. Or sometimes only duplicates of some platforms
Any ideas?
UPDATE 1:
Article Schema:
var mongoose = require("mongoose");
var articleSchema = new mongoose.Schema({
title : String,
description : String,
link : String,
date : String,
body : String,
platforms : [
{
type: mongoose.Schema.Types.ObjectId,
ref: "Platform"
}
]
})
module.exports = mongoose.model("Article", articleSchema);
Platform Schema:
var mongoose = require("mongoose");
var platformSchema = new mongoose.Schema({
name : String,
category : String,
contacts : [
{
type: mongoose.Schema.Types.ObjectId,
ref: "Contact"
}
],
website : String,
country : String,
contactInformation : String,
businessModelNotes : String,
source : String,
generalNotes : String,
projects : [
{
type: mongoose.Schema.Types.ObjectId,
ref: "Project"
}
],
articles : [
{
type: mongoose.Schema.Types.ObjectId,
ref: "Article"
}
],
privacy : String,
comments : [
{
type: mongoose.Schema.Types.ObjectId,
ref: "Comment"
}
]
});
module.exports = mongoose.model("Platform", platformSchema);
The forEach loop in your attempt does not recognise the callback completion of the findById() async method before the next iteration. You need to use any of async library methods async.each, async.whilst, or async.until which are equivalent to a for loop, and will wait until async's callback is invoked before moving on to the next iteration (in other words, a for loop that will yield).
For example:
var platform_docs = [];
async.each(platforms, function(id, callback) {
Platform.findById(id, function(err, platform) {
if (platform)
platform_docs.push(platform);
callback(err);
});
}, function(err) {
// code to run on completion or err
console.log(platform_docs);
});
For the whole operation, you could use the async.waterfall() method which allows each function to pass its results on to the next function.
The first function in the method creates the new article.
The second function uses the async.each() utility function to iterate over the platforms list, perform an asynchronous task for each id to update the platform using findByIdAndUpdate(), and when they're all done return the results of the update query in an object variable to the next function.
The final function will update the newly created article with the platform ids from the previous pipeline.
Something like the following example:
var newArticle = {},
platforms = req.body.platforms,
date = req.body.date,
split = date.split("/");
newArticle.title = req.body.title;
newArticle.description = req.body.description;
newArticle.date = split[2]+'/'+split[0]+'/'+split[2];
newArticle.link = req.body.link;
newArticle.body = req.body.body;
console.log(platforms);
async.waterfall([
// Create the article
function(callback) {
var article = new Article(newArticle);
article.save(function(err, article){
if (err) return callback(err);
callback(null, article);
});
},
// Query and update the platforms
function(articleData, callback) {
var platform_ids = [];
async.each(platforms, function(id, callback) {
Platform.findByIdAndUpdate(id,
{ "$push": { "articles": articleData._id } },
{ "new": true },
function(err, platform) {
if (platform)
platform_ids.push(platform._id);
callback(err);
}
);
}, function(err) {
// code to run on completion or err
if (err) return callback(err);
console.log(platform_ids);
callback(null, {
"article": articleData,
"platform_ids": platform_ids
});
});
},
// Update the article
function(obj, callback) {
var article = obj.article;
obj.platform_ids.forEach(function(id){ article.platforms.push(id); });
article.save(function(err, article){
if (err) return callback(err);
callback(null, article);
});
}
], function(err, result) {
/*
This function gets called after the above tasks
have called their "task callbacks"
*/
if (err) return next(err);
console.log(result);
res.redirect('articles/' + result._id);
});
Move your save function
if(counter==platforms.length){
createdArticle.save(function(err, savedObject){
if(err || !savedObject) console.log(err || "not saved");
else {
res.redirect('articles/' + savedObject._id.toString());
}
});
}
============= EDIT
Its because you have to call article.save only one time, and not at each loop. In addition you use save() as a sync function but it's async.
I thinks you should use directly update function :
} else {
var counter=0;
// map plateform array id with ObjectID
var idarray = platforms.map(function(e){return mongoose.mongo.ObjectID(e);});
// update all plateform with article id
Platform.update({_id:{$in: idarray}}, {$push:{articles: createdArticle}}, {multi:true, upsert:false}, function(err, raw){
if(err)
{
// error case
return res.status(403).json({});
}
// retrieve plateform
Platform.find({_id:{$in: idarray}}, function(err, results){
if(err || !results)
{
// error case
return res.status(403).json({});
}
Article.update({_id: createdArticle._id.toString()}, {$push:{platforms:{$each: results}}}, {multi:false, upsert:false}, function(err, saved){
if(err || !saved)
{
// error
return res.status(403).json({});
}
res.redirect('articles/' + savedObject._id.toString());
});
});
});
But it's a bad idea to store full objects, why not storing only id ??

node js mongo db dependencies (doc not being found)

I have the following code:
var method = PushLoop.prototype;
var agent = require('./_header')
var request = require('request');
var User = require('../models/user_model.js');
var Message = require('../models/message_model.js');
var async = require('async')
function PushLoop() {};
method.startPushLoop = function() {
getUserList()
function getUserList() {
User.find({}, function(err, users) {
if (err) throw err;
if (users.length > 0) {
getUserMessages(users)
} else {
setTimeout(getUserList, 3000)
}
});
}
function getUserMessages(users) {
// console.log("getUserMessages")
async.eachSeries(users, function (user, callback) {
var params = {
email: user.email,
pwd: user.password,
token: user.device_token
}
messageRequest(params)
callback();
}, function (err) {
if (err) {
console.log(err)
setTimeout(getUserList, 3000)
}
});
}
function messageRequest(params) {
var url = "https://voip.ms/api/v1/rest.php?api_username="+ params.email +"&api_password="+ params.pwd +"&method=getSMS&type=1&limit=5"
request(url, function(err, response, body){
if (!err) {
var responseObject = JSON.parse(body);
var messages = responseObject.sms
if (responseObject["status"] == "success") {
async.eachSeries(messages, function(message, callback){
console.log(params.token)
saveMessage(message, params.token)
callback();
}, function(err) {
if (err) {
console.log(err)
}
// setTimeout(getUserList, 3000)
})
} else {
// setTimeout(getUserList, 3000)
}
} else {
console.log(err)
// setTimeout(getUserList, 3000)
}
});
setTimeout(getUserList, 3000)
}
function saveMessage(message, token) {
// { $and: [ { price: { $ne: 1.99 } }, { price: { $exists: true } }
// Message.find({ $and: [{ message_id: message.id}, {device_token: token}]}, function (err, doc){
Message.findOne({message_id: message.id}, function (err, doc){
if (!doc) {
console.log('emtpy today')
var m = new Message({
message_id: message.id,
did: message.did,
contact: message.contact,
message: message.message,
date: message.date,
created_at: new Date().toLocaleString(),
updated_at: new Date().toLocaleString(),
device_token: token
});
m.save(function(e) {
if (e) {
console.log(e)
} else {
agent.createMessage()
.device(token)
.alert(message.message)
.set('contact', message.contact)
.set('did', message.did)
.set('id', message.id)
.set('date', message.date)
.set('message', message.message)
.send();
}
});
}
}) //.limit(1);
}
};
module.exports = PushLoop;
Which actually works perfectly fine in my development environment - However in production (i'm using Openshift) the mongo documents get saved in an endless loop so it looks like the (if (!doc)) condition always return true therefore the document gets created each time. Not sure if this could be a mongoose issue - I also tried the "find" method instead of "findOne". My dev env has node 0.12.7 and Openshift has 0.10.x - this could be the issue, and i'm still investigating - but if anybody can spot an error I cannot see in my logic/code please let me know
thanks!
I solved this issue by using a "series" like pattern and using the shift method on the users array. The mongoose upsert findOneOrCreate is good however if there is a found document, the document is returned, if one isn't found and therefore created, it's also returned. Therefore I could not distinguish between the newly insert doc vs. a found doc, so used the same findOne function which returns null if no doc is found I just create it and send the push notification. Still abit ugly, and I know I could have used promises or the async lib, might refactor in the future. This works for now
function PushLoop() {};
var results = [];
method.go = function() {
var userArr = [];
startLoop()
function startLoop() {
User.find({},function(err, users) {
if (err) throw err;
users.forEach(function(u) {
userArr.push(u)
})
function async(arg, callback) {
var url = "https://voip.ms/api/v1/rest.php?api_username="+ arg.email +"&api_password="+ arg.password +"&method=getSMS&type=1&limit=5"
request.get(url, {timeout: 30000}, function(err, response, body){
if (!err) {
var responseObject = JSON.parse(body);
var messages = responseObject.sms
var status = responseObject.status
if (status === "success") {
messages.forEach(function(m) {
var message = new Message({
message_id: m.id,
did: m.did,
contact: m.contact,
message: m.message,
date: m.date,
created_at: new Date().toLocaleString(),
updated_at: new Date().toLocaleString(),
device_token: arg.device_token
});
var query = { $and : [{message_id: m.id}, {device_token: arg.device_token}] }
var query1 = { message_id: m.id }
Message.findOne(query).lean().exec(function (err, doc){
if (!doc || doc == null) {
message.save(function(e) {
console.log("message saved")
if (e) {
console.log("there is an error")
console.log(e)
} else {
console.log(message.device_token)
var messageStringCleaned = message.message.toString().replace(/\\/g,"");
var payload = {
"contact" : message.contact,
"did" : message.did,
"id" : message.message_id,
"date" : message.date,
"message" : messageStringCleaned
}
var note = new apns.Notification();
var myDevice = new apns.Device(message.device_token);
note.expiry = Math.floor(Date.now() / 1000) + 3600; // Expires 1 hour from now.
note.badge = 3;
note.alert = messageStringCleaned;
note.payload = payload;
apnsConnection.pushNotification(note, myDevice);
}
})
}
});
});
}
else {
console.log(err)
}
}
});
setTimeout(function() {
callback(arg + "testing 12");
}, 1000);
}
// Final task (same in all the examples)
function series(item) {
if(item) {
async( item, function(result) {
results.push(result);
return series(userArr.shift());
});
} else {
return final();
}
}
function final() {
console.log('Done');
startLoop();
}
series(userArr.shift())
});
}
}
module.exports = PushLoop;

nodejs mongoose bulk update

I have a collection of documents and I need to add a new field for ever document. If I run a query to get all documents and then update every single one node.js is stopped, may be for memory leak
This is my code
var express = require('express');
var geocoderProvider = 'google';
var httpAdapter = 'http';
var People = require("./models/people").collection.initializeOrderedBulkOp();
var app = express();
var geocoder = require('node-geocoder').getGeocoder(geocoderProvider, httpAdapter, {});
app.get('/', function (req, res) {
People.find({}, function (err, docs) {
if (err) {
res.send(err);
}else{
docs.forEach( function (doc){
geocoder.geocode({address: doc.address, country: 'Italy', zipcode: doc.cap}, function(error, value) {
doc.loc.coordinates[0]=value[0].latitude;
doc.loc.coordinates[1]=value[0].longitude;
People.update({ _id: doc._id }, { $set: { loc: doc.loc }}, { multi: true }, function (error){
if(error){
console.error('ERROR!');
}
});
});
});
}
});
});
var server = app.listen(3000, function () {
var host = server.address().address
var port = server.address().port
console.log('Example app listening at http://%s:%s', host, port)
});
There is any way to bulk update with mongoose?
Thanks in advance
More detailed info about the query and update query.
var bulk = People.collection.initializeOrderedBulkOp();
bulk.find(query).update(update);
bulk.execute(function (error) {
callback();
});
Query is searching with array.
Update needs a $set
var bulk = People.collection.initializeOrderedBulkOp();
bulk.find({'_id': {$in: []}}).update({$set: {status: 'active'}});
bulk.execute(function (error) {
callback();
});
Query is a searching the id
var bulk = People.collection.initializeOrderedBulkOp();
bulk.find({'_id': id}).update({$set: {status: 'inactive'}});
bulk.execute(function (error) {
callback();
});
You can drop down to the collection level and do a bulk update. This action will not be atomic - some of the writes can fail and others might succeed - but it will allow you to make these writes in a single round trip to your database.
It looks like this:
var bulk = People.collection.initializeUnorderedBulkOp();
bulk.find({<query>}).update({<update>});
bulk.find({<query2>}).update({<update2>});
...
bulk.execute(function(err) {
...
});
Check out the docs here: http://docs.mongodb.org/manual/core/bulk-write-operations/
This example should include all the cases that we can mix together using directly with Mongoose bulkWrite() function:
Character.bulkWrite([
{
insertOne: {
document: {
name: 'Eddard Stark',
title: 'Warden of the North'
}
}
},
{
updateOne: {
filter: { name: 'Eddard Stark' },
// If you were using the MongoDB driver directly, you'd need to do
// `update: { $set: { title: ... } }` but mongoose adds $set for
// you.
update: { title: 'Hand of the King' }
}
},
{
deleteOne: {
{
filter: { name: 'Eddard Stark' }
}
}
}
]).then(res => {
// Prints "1 1 1"
console.log(res.insertedCount, res.modifiedCount, res.deletedCount);
});
Official Documentation: https://mongoosejs.com/docs/api.html#model_Model.bulkWrite

Resources