Can anyone explain me how to save the nested array items into mongodb with mongoose and nodejs?.
Here is a schema I am using.
var demoSchema = ({
"r_id": Number,
"r_label": String,
"entity": [{
"d_label": String,
"d_type": String
}
]
})
And here is Nodejs function I am using to save the data into db
app.route("/mypages/rooms")
.post(function(req, res) {
var db = mongoOp.demo();
var response = {};
req.checkBody("r_id", "Enter a valid r_id address.").notEmpty();
req.checkBody("r_label", "Enter a valid label address.").notEmpty();
var errors = req.validationErrors();
if (errors) {
console.log(errors);
console.log(req.body);
res.status(500);
res.end('500 Server Error');
//res.render('addrooms',{flag:1});
return;
} else {
db.r_id = req.body.r_id;
db.r_label = req.body.r_label;
db.entity = req.body.entity;
db.save(function(err) {
if (err) {
findfromdb(req, res, 2); //own function for implementation purpose
} else {
findfromdb(req, res, 1);
}
});
//var middleVar = req.body.resources;
// console.log(middleVar[0].d_rgb);
}
});
set entity with array []
db.entity = [{}];
app.route("/mypages/rooms")
.post(function(req, res) {
var db = mongoOp.demo();
var response = {};
req.checkBody("r_id", "Enter a valid r_id address.").notEmpty();
req.checkBody("r_label", "Enter a valid label address.").notEmpty();
var errors = req.validationErrors();
if (errors) {
console.log(errors);
console.log(req.body);
res.status(500);
res.end('500 Server Error');
//res.render('addrooms',{flag:1});
return;
} else {
db.r_id = req.body.r_id;
db.r_label = req.body.r_label;
db.entity = [{
"d_label": req.body.label_type,
"d_type": req.body.d_type
}];
db.save(function(err) {
if (err) {
findfromdb(req, res, 2); //own function for implementation purpose
} else {
findfromdb(req, res, 1);
}
});
//var middleVar = req.body.resources;
// console.log(middleVar[0].d_rgb);
}
});
The below operation adds the element label_type and d_type to entity array if they does not exist in the array, if they exists, then they won't be added
https://docs.mongodb.com/manual/reference/operator/update/addToSet/
Model.update(
query, // { _id: 1 }
{
$addToSet: {
"enity": {
"d_label": req.body.label_type,
"d_type": req.body.d_type
}
}
}
)
have a look at this answer
Pushing item to Mongodb collection array
Related
I am trying to query an autogenerated Amplify API using postman. I'm banging my head against the wall on something that should be simple. Can someone explain why this query URL doesn't return a JSON object?. The data exists in dynamo but returns an empty array in postman (and a 200 status):
POSTMAN (this is what I expected to work):
https://xxxxx.execute-api.us-east-1.amazonaws.com/staging/api/getShipContainer?location=fl
UPDATE after staring at the code for longer I see that req.params[partitionKeyName] is somehow evaluating to getShipContainer which would explain my issue, but how do I fix this? And why did it happen:
condition[partitionKeyName]['AttributeValueList'] = [ convertUrlType(req.params[partitionKeyName], partitionKeyType) ];
This syntax works (returns dynamo object) but is very clearly incorrect (location is a dynamo column, and fl is the filter param): https://xxxxxx.execute-api.us-east-1.amazonaws.com/staging/api/fl?location
Query method:
const userIdPresent = false; // TODO: update in case is required to use that definition
const partitionKeyName = "location";
const partitionKeyType = "S";
const sortKeyName = "containerId";
const sortKeyType = "S";
const hasSortKey = sortKeyName !== "";
const path = "/api";
const UNAUTH = 'UNAUTH';
const hashKeyPath = '/:' + partitionKeyName;
const sortKeyPath = hasSortKey ? '/:' + sortKeyName : '';
// declare a new express app
var app = express()
app.use(bodyParser.json())
app.use(awsServerlessExpressMiddleware.eventContext())
//Enable CORS for all methods
app.use(function(req, res, next) {
res.header("Access-Control-Allow-Origin", "*")
res.header("Access-Control-Allow-Headers", "*")
next()
});
// convert url string param to expected Type
const convertUrlType = (param, type) => {
switch(type) {
case "N":
return Number.parseInt(param);
default:
return param;
}
}
/********************************
* HTTP Get method for list objects *
********************************/
//api/:location
app.get(path + hashKeyPath, function(req, res) {
var condition = {}
condition[partitionKeyName] = {
ComparisonOperator: 'EQ'
}
if (userIdPresent && req.apiGateway) {
condition[partitionKeyName]['AttributeValueList'] = [req.apiGateway.event.requestContext.identity.cognitoIdentityId || UNAUTH ];
} else {
try {
condition[partitionKeyName]['AttributeValueList'] = [ convertUrlType(req.params[partitionKeyName], partitionKeyType) ];
} catch(err) {
res.statusCode = 500;
res.json({error: 'Wrong column type ' + err});
}
}
let queryParams = {
TableName: tableName,
KeyConditions: condition
}
console.log(`req gg cond::`,JSON.stringify(condition),`params`,Object.entries(req.params).map(([i,k])=>i+' '+k).join(','))
dynamodb.query(queryParams, (err, data) => {
if (err) {
res.statusCode = 500;
res.json({error: 'Could not load items: ' + err});
} else {
res.json(data.Items);
}
});
});
results of the console.log I put in to debug:
req gg cond::
{
"location": {
"ComparisonOperator": "EQ",
"AttributeValueList": [
"getShipContainer"
]
}
}
params location getShipContainer
shouldn't the expected query be using LOCATION and ignoring "getShipContainer" completely? Im very confused because the code was auto generated. getShipContainer is the name of the lambda function that is being called
I also tested this in the API Gateway test console with the same result:
Here is a quick screenshot of my dynamo table as well:
the issue is mentioned in this github issue.
change your handler function to this, and https://xxxxx.execute-api.us-east-1.amazonaws.com/staging/api/location will return the list of items.
app.get(path + hashKeyPath, function (req, res) {
let scanParams = {
TableName: tableName,
};
dynamodb.scan(scanParams, (err, data) => {
if (err) {
res.statusCode = 500;
res.json({ error: "Could not load items: " + err });
} else {
res.json(data.Items);
}
});
});
And here's the code for filtering if the parameter is specified in the URL.
Location is one of the dynamodb reserved word, so I've used attribute mapping.
https://xxxxx.execute-api.us-east-1.amazonaws.com/staging/api/location?location=fl
app.get(path + hashKeyPath, function (req, res) {
var filterParams = {};
const location = req.query[partitionKeyName] || "";
if (location) {
filterParams = {
FilterExpression: "#loc = :loc",
ExpressionAttributeNames: {
"#loc": "location",
},
ExpressionAttributeValues: {
":loc": location,
},
};
}
let scanParams = {
TableName: tableName,
...filterParams,
};
dynamodb.scan(scanParams, (err, data) => {
if (err) {
res.statusCode = 500;
res.json({ error: "Could not load items: " + err });
} else {
res.json(data.Items);
}
});
});
Usually I use DynamoDB Global Secondary Index to query all items. But it might be depends on the use case. For example, I've used to query List of Items by its Site Id.
Table would be something like this:
itemID
itemName
siteID
router.get("/", function (req, res) {
const sortKeyName = "siteID";
let queryParams = {
TableName: tableName,
IndexName: "siteIDGSI",
KeyConditionExpression: "siteID = :site_id",
ExpressionAttributeValues: {
":site_id": req.params[sortKeyName],
},
};
dynamodb.query(queryParams, (err, data) => {
if (err) {
res.statusCode = 500;
res.json({ error: "Could not load items: " + err });
} else {
res.json({
statusCode: 200,
message: "List of Items in " + req.params[sortKeyName],
items: data.Items,
});
}
});
});
So you can create GSI for containerId
Even though I have searched for the solution of this error and i found some answers but none of them helped me fix this error, Error: NJS-012: encountered invalid bind data type in parameter 2.Maybe, one error can occur in a different scenarios.
Stored procedure definition
create or replace PROCEDURE SP_MEAL_GETMEALTYPES
(
p_DataSource OUT Sys_RefCursor
)
AS
BEGIN
OPEN p_DataSource FOR
select mealtypeid,description from mealtypes;
END;
File name: menusStoredProc.js
"use strict";
var dbParams = require('../../oracle/dbParams');
function storedProcs() {
this.SP_USER_GETMENUS = {
name: 'sp_meal_getmealtypes',
params: {
dataSource: {val: null, type: dbParams.CURSOR, dir: dbParams.BIND_OUT}
},
resultSetColumns: ['mealTypeId','description']
}
}
module.exports = storedProcs;
File name: menus.js
var express = require('express');
var MenusStoreProc = require('../storedProcedures/menusStoredProc');
var oraDbAssist = require('../../oracle/oracleDbAssist');
var router = express.Router();
router.get('/getmenus', (req, res, next) => {
var sp = new MenusStoreProc().SP_USER_GETMENUS;
oraDbAssist.getConnection(function (err, conn) {
if (err)
return console.log('Connecting to db failed - ' + err);
oraDbAssist.executeSqlWithConn(sp, false, conn, function (err, menus) {
if (err)
return console.log('Executing ' + sp.name + ' failed - ' + err);
res.status(200).json(JSON.stringify(menus));
});
});
});
module.exports = router;
Function definition added - executeSqlWithConn
function executeSqlWithConn(sp, autoCommit, connection, next) {
var sql = createProcedureSqlString(sp.name, sp.params);
var params = buildParams(sp.params);
connection.execute(sql, params, {autoCommit: autoCommit}, function(err, result) {
if (err) {
next(err, null);
return;
}
var allRows = [];
var numRows = 50; // number of rows to return from each call to getRows()
for(var attributeName in result.outBinds) {
if(result.outBinds[attributeName] && result.outBinds[attributeName].metaData) { // db response is a result set
function fetchRowsFromResultSet(pResultSet, pNumRows) {
pResultSet.getRows(pNumRows, function(readErr, rows) {
if(err) {
pResultSet.close(function (err) { // always close the result set
next(readErr);
});
return;
}
allRows.push(rows);
if (rows.length === pNumRows) {
fetchRowsFromResultSet(result.outBinds[attributeName], numRows);
return;
}
var allRowsResult = Array.prototype.concat.apply([], allRows);
generateJsonFromDbResultSet(pResultSet.metaData, allRowsResult, sp, function(resultSet) {
pResultSet.close(function (err) { // always close the result set
next(null, resultSet);
});
});
});
}
fetchRowsFromResultSet(result.outBinds[attributeName], numRows);
return;
}
}
next(null, result.outBinds);
});
}
Function definition added - buildParams
function buildParams(params) {
for(var attributeName in params) {
params[attributeName].val = typeof params[attributeName].val === 'undefined' ? null : params[attributeName].val;
if(params[attributeName].type.is(dbParams.DATE))
params[attributeName].val = params[attributeName].val ? new Date(params[attributeName].val) : null;
params[attributeName].type = params[attributeName].type.value;
params[attributeName].dir = params[attributeName].dir.value;
}
return params;
}
Any help, dear members ?
I am trying to add server side pagination to a NodeJS, Express and MongoDB API. The API use mongoose to handle the database. I am lost in how to customize the response from the Controller.
Model:
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
const clientSchema = Schema({
code: {
type: String,
required: [true,'Code no puede estar vacio']
},
name: {
type: String,
required: [true,'Name no puede estar vacio']
}
},{
timestamps: true
});
const Client = module.exports = mongoose.model('clients',clientSchema);
Controller for get all clients:
const mongoose = require("mongoose");
const Client = require('../models/client');
const clientController = {};
clientController.index = (limit, callback) => {
Client.find(callback).limit(limit);
};
module.exports = clientController;
Route to get the clients:
app.get('/api/clients', (req, res) => {
Client.index(limit,(err, client) => {
if (err) {
res.status(500).json({
msg: "Error en aplicacion",
err
});
}
res.status(200).json(client);
});
});
How can I customize the result in the controller to something like this:
[
{
"totalRecords":"99999999999",
"offset":"888888",
"page":"4",
"nextPage":"5"
"result":{...}
}
]
I already have a function to calculate the pagination, But I don't know how to add the information about the pagination in the result of the controller.
Before I was adding the pagination data in the route, But I want to handle the pagination logic in the controller.
Or is better handle the pagination in the route?
Thanks in advance
You can create a method in mongoose model called as paginate :
Add this before declaring mongoose model :
clientSchema.methods.paginate = function(pageNo, callback){
var limit = 10;
var skip = pageNo * (limit - 1);
var totalCount;
//count documents
this.count({}, function(err, count)){
if(err){
totalCount = 0;
}
else{
totalCount = count;
}
}
if(totalCount == 0){
return callback('No Document in Database..', null);
}
//get paginated documents
this.find().skip(skip).limit(limit).exec(function(err, docs){
if(err){
return callback('Error Occured', null);
}
else if(!docs){
return callback('Docs Not Found', null);
}
else{
var result = {
"totalRecords" : totalCount,
"page": pageNo,
"nextPage": pageNo + 1,
"result": docs
};
return callback(null, result);
}
});
});
const Client = module.exports = mongoose.model('clients',clientSchema);
Then in controller change :
app.get('/api/clients', (req, res) => {
//You could put page number in request query ro request params
Client.paginate(req.body.pageNo, function(err, response) {
if (err) {
return res.status(500).json({
message : "Error en aplicacion",
error : err
});
}
return res.status(200).json(response);
});
});
I have been learning about q promises and tried to build up some mock APIs to implement its functionality,While doing so I came across the following error,
Enterprise.forEach is not a function
My API code is as follows,
var mongoose = require('mongoose');
var Enterprise = mongoose.model('Enterprise_gpy');
var q = require('q');
var displayEnterprise = function(req, res) {
function displayEnterpriseName() {
var deferred = q.defer();
Enterprise.forEach(function(err, doc) {
if (err) {
console.log('Error Finding Files');
deferred.reject(err);
} else {
var name = Enterprise.enterprise_name;
deferred.resolve({
name: name
});
}
return deferred.promise;
});
}
function displayEnterpriseEmail() {
var deferred = q.defer();
Enterprise.forEach(function(err, doc) {
if (err) {
console.log('Error Finding Files');
deferred.reject(err);
} else {
var email = Enterprise.enterprise_email;
deferred.resolve({
email: email
});
}
return deferred.promise;
});
}
q.all([
displayEnterpriseName(),
displayEnterpriseEmail()
])
.then(function(success) {
console.log(500, success);
})
.fail(function(err) {
console.log(200, err);
});
}
module.exports = {
displayEnterprise: displayEnterprise
}
In your code Enterprise is a mongoose schema so when you try to do loop using forEach then got
Enterprise.forEach is not a function
you can use forEach after Enterprise.find(). so use
Enterprise.find({}, function(err, docs) {
if (err) {
console.log('Error Finding Files');
deferred.reject(err);
} else {
var names = [];
docs.forEach (function(doc) {
var name = doc.enterprise_name;
names.push(name);// pushed in names array
//.....
});
deferred.resolve({
names: names
}); // return all names
}
});
instead of
Enterprise.find().forEach
and should use
var name = doc.enterprise_name; instead of var name = Enterprise.enterprise_name;
and
var email = doc.enterprise_email; instead of var email = Enterprise.enterprise_email;
forEach only works for arrays, and you're using it on a mongoose model.
try this instead:
Enterprise.find().exec(function(err, docs) {
docs.forEach(function(doc) {
// do something with all the documents
}
// do something outside the loop
})
I have the following code:
var method = PushLoop.prototype;
var agent = require('./_header')
var request = require('request');
var User = require('../models/user_model.js');
var Message = require('../models/message_model.js');
var async = require('async')
function PushLoop() {};
method.startPushLoop = function() {
getUserList()
function getUserList() {
User.find({}, function(err, users) {
if (err) throw err;
if (users.length > 0) {
getUserMessages(users)
} else {
setTimeout(getUserList, 3000)
}
});
}
function getUserMessages(users) {
// console.log("getUserMessages")
async.eachSeries(users, function (user, callback) {
var params = {
email: user.email,
pwd: user.password,
token: user.device_token
}
messageRequest(params)
callback();
}, function (err) {
if (err) {
console.log(err)
setTimeout(getUserList, 3000)
}
});
}
function messageRequest(params) {
var url = "https://voip.ms/api/v1/rest.php?api_username="+ params.email +"&api_password="+ params.pwd +"&method=getSMS&type=1&limit=5"
request(url, function(err, response, body){
if (!err) {
var responseObject = JSON.parse(body);
var messages = responseObject.sms
if (responseObject["status"] == "success") {
async.eachSeries(messages, function(message, callback){
console.log(params.token)
saveMessage(message, params.token)
callback();
}, function(err) {
if (err) {
console.log(err)
}
// setTimeout(getUserList, 3000)
})
} else {
// setTimeout(getUserList, 3000)
}
} else {
console.log(err)
// setTimeout(getUserList, 3000)
}
});
setTimeout(getUserList, 3000)
}
function saveMessage(message, token) {
// { $and: [ { price: { $ne: 1.99 } }, { price: { $exists: true } }
// Message.find({ $and: [{ message_id: message.id}, {device_token: token}]}, function (err, doc){
Message.findOne({message_id: message.id}, function (err, doc){
if (!doc) {
console.log('emtpy today')
var m = new Message({
message_id: message.id,
did: message.did,
contact: message.contact,
message: message.message,
date: message.date,
created_at: new Date().toLocaleString(),
updated_at: new Date().toLocaleString(),
device_token: token
});
m.save(function(e) {
if (e) {
console.log(e)
} else {
agent.createMessage()
.device(token)
.alert(message.message)
.set('contact', message.contact)
.set('did', message.did)
.set('id', message.id)
.set('date', message.date)
.set('message', message.message)
.send();
}
});
}
}) //.limit(1);
}
};
module.exports = PushLoop;
Which actually works perfectly fine in my development environment - However in production (i'm using Openshift) the mongo documents get saved in an endless loop so it looks like the (if (!doc)) condition always return true therefore the document gets created each time. Not sure if this could be a mongoose issue - I also tried the "find" method instead of "findOne". My dev env has node 0.12.7 and Openshift has 0.10.x - this could be the issue, and i'm still investigating - but if anybody can spot an error I cannot see in my logic/code please let me know
thanks!
I solved this issue by using a "series" like pattern and using the shift method on the users array. The mongoose upsert findOneOrCreate is good however if there is a found document, the document is returned, if one isn't found and therefore created, it's also returned. Therefore I could not distinguish between the newly insert doc vs. a found doc, so used the same findOne function which returns null if no doc is found I just create it and send the push notification. Still abit ugly, and I know I could have used promises or the async lib, might refactor in the future. This works for now
function PushLoop() {};
var results = [];
method.go = function() {
var userArr = [];
startLoop()
function startLoop() {
User.find({},function(err, users) {
if (err) throw err;
users.forEach(function(u) {
userArr.push(u)
})
function async(arg, callback) {
var url = "https://voip.ms/api/v1/rest.php?api_username="+ arg.email +"&api_password="+ arg.password +"&method=getSMS&type=1&limit=5"
request.get(url, {timeout: 30000}, function(err, response, body){
if (!err) {
var responseObject = JSON.parse(body);
var messages = responseObject.sms
var status = responseObject.status
if (status === "success") {
messages.forEach(function(m) {
var message = new Message({
message_id: m.id,
did: m.did,
contact: m.contact,
message: m.message,
date: m.date,
created_at: new Date().toLocaleString(),
updated_at: new Date().toLocaleString(),
device_token: arg.device_token
});
var query = { $and : [{message_id: m.id}, {device_token: arg.device_token}] }
var query1 = { message_id: m.id }
Message.findOne(query).lean().exec(function (err, doc){
if (!doc || doc == null) {
message.save(function(e) {
console.log("message saved")
if (e) {
console.log("there is an error")
console.log(e)
} else {
console.log(message.device_token)
var messageStringCleaned = message.message.toString().replace(/\\/g,"");
var payload = {
"contact" : message.contact,
"did" : message.did,
"id" : message.message_id,
"date" : message.date,
"message" : messageStringCleaned
}
var note = new apns.Notification();
var myDevice = new apns.Device(message.device_token);
note.expiry = Math.floor(Date.now() / 1000) + 3600; // Expires 1 hour from now.
note.badge = 3;
note.alert = messageStringCleaned;
note.payload = payload;
apnsConnection.pushNotification(note, myDevice);
}
})
}
});
});
}
else {
console.log(err)
}
}
});
setTimeout(function() {
callback(arg + "testing 12");
}, 1000);
}
// Final task (same in all the examples)
function series(item) {
if(item) {
async( item, function(result) {
results.push(result);
return series(userArr.shift());
});
} else {
return final();
}
}
function final() {
console.log('Done');
startLoop();
}
series(userArr.shift())
});
}
}
module.exports = PushLoop;