Remove object array items by comparing object array from mongodb - node.js

There is document which is having array of object inside.
Like
Objectid('')
fruits : [{_id:'2'},{_id:'3'},{_id:'4'}]
I want to delete these items fruits_id = [{_id:'3'},{_id:'4'}].
fruit_group.updateOne(
{collection_id: collection_id},
{$pullAll: {"fruits": fruits_id}}
)
so far i found below logic which i think is not efficient.
routes.post('/removeFruits', async (request, response, next) => {
var post_data = request.body;
var collection_id = post_data.collection_id;
var fruits_ids = JSON.parse(post_data.fruits_ids);
var prev_fruits;
await fruit_group.findOne({'collection_id': collection_id}, function (err, result) {
if (err) {
console("Some error occurred");
response.json({'message': "Some error occurred", 'result': 'false'});
}
prev_fruits = result.fruits;
});
for (var i = 0; i < fruits_ids.length; i++) { // this will delete all occurring items from array
var key = fruits_ids[i].user_id;
prev_fruits.filter(x => x.user_id === key).forEach(x => prev_fruits.splice(prev_fruits.indexOf(x), 1));
}
await fruit_group.updateOne({'collection_id': collection_id}, {$set: {'fruits': prev_fruits}}, function (err, result) {
if (err) {
response.json({'message': "Some error occurred", 'result': 'false'});
}
response.json({'message': 'Deletion successfully', 'result': 'true'});
});
});
is there anyway to achieve the same result?

Assuming fruits_id = [{ _id: '3' }, { _id: '4' }], you could do something like this using $pull and $in:
await fruit_group.updateOne({'collection_id': collection_id}, { $pull: { fruits: { $in: fruits_id }}})
This follows the example of removing all items that equal a specified value.

Related

Getting an {"message":"Invalid update pipeline operator: \"_id\""} error

I am trying to update two rows in my players table based on the id. I am trying to use the updateMany method where id can be found in an array of id's but I am getting the {"message":"Invalid update pipeline operator: \"_id\""} error. I checked the array to make sure it is valid id's. Here is my code
const winningTeam = asyncHandler(async (req, res) => {
req.body.forEach((element) => {
element.wins += 1;
element.lastPlayed = Date.now();
element.percentage = (element.wins / (element.wins + element.wins)) * 1000;
});
let usersId = [];
usersId.push(req.body[0]._id);
if (req.body.length === 2) {
usersId.push(req.body[1]._id);
}
const player = await Player.updateMany({ _id: { $in: usersId } }, req.body);
if (player) {
res.status(200).json(player);
} else {
res.status(400);
throw new Error("Invalid Data");
}
});
You should use $set property for the update parameter. I'm not sure about the structure of your req.body but it should be something like this:
Player.updateMany({ _id: { $in: usersId } }, {$set: req.body});
instead of this:
Player.updateMany({ _id: { $in: usersId } }, req.body);
Take a look at docs for updateMany

How to get all data matched each record from mongodb using mongoose and nodejs

Tried to get value of product_name from my mongodb using mongoose but i do not know how to do it.
My DB Data collection:
{
_id:ObjectId("5ecea02ebb6f3c19e86fe805"),
product_name:"Test1"
},
{
_id:ObjectId("5ecea02ebb6f3c19e86fe806"),
product_name:"Test2"
},
{
_id:ObjectId("5ecea02ebb6f3c19e86fe807"),
product_name:"Test3"
},
{
_id:ObjectId("5ecea02ebb6f3c19e86fe808"),
product_name:"Test4"
},
{
_id:ObjectId("5ecea02ebb6f3c19e86fe809"),
product_name:"Test5"
}
data.controller.js:
module.exports.getData = (req, res, next) => {
var tableCate = mongoose.model("Product");
tableCate.find({ product_name }, function(err, docs) {
if (err) {
console.log( err);
return
} else {
console.log(docs)// output should be Test1,Test2,Test3,Test4,Test5
}
});
db.collection.find returns a Cursor which is A pointer to the result set of a query, to access the result you can use db.collection.find({}).toArray() to return an array of documents or
.forEach(function(item){
// and you can print or do what you want with each item
})

MongoDB Result can console.log() but not pushed to array NodeJS

I have a list of users I would like to look up in my MongoDB database and once I look them up, I want to store them in an array so I can do things with the data. I'm using the official mongodb package in NodeJS. Here's the code
var chatsData = []
for (let userID of chatIDs) {
db.collection('users').find({ '_id': ObjectId(userID) }).toArray((err, result) => {
if (err) throw err
chatsData.push(result)
console.log(result)
})
}
}
console.log('vvv Final data in array: vvv')
console.log(chatsData)
When I run the code this, I get this vvv
vvv Final data in array: vvv
[]
[
{
_id: 5eae4c90ad1dd6304c69a75a,
usnm: 'gohjunhao',
eml: 'junhao#gmail.com',
phnm: '00000000',
pswd: '$2a$10$IUaxiweNrUUwxZP6XEQfFeTTnbta13/kv6DdebwJ0WT/bM.3fc5ay',
register_date: 2020-05-03T04:46:08.054Z,
__v: 0
}
]
[
{
_id: 5ead401f8059852114bf9867,
usnm: 'gfox.2020',
eml: 'carrie#gmail.com',
phnm: '11111111',
pswd: '$2a$10$UYaEraoI4Kj0dI.nt5Hbr.LgDL1TNtDOsz7tcxETJW7HRtmgWo.UK',
register_date: 2020-05-02T09:40:47.684Z,
__v: 0
}
]
How do I get a proper array of data in my array so it can be used later? Is what I'm doing wrong? Do I need to use a .then() statement or an async await?
Here's the full code
MongoClient.connect(url, { useUnifiedTopology: true }).then(async chooseDB => {
db = chooseDB.db('nodejs')
// Get a list of all tables
db.listCollections().toArray((err, result) => {
/***** YOU DON'T NEED TO UNDERSTAND THIS PART OF THE CODE ******/
if (err) throw err
var chatList = []
var chatIDs = []
for (let i = 0; i < result.length; i++) {
const table = result[i]
if (table.name.indexOf(data) > 1) {
// add tables with personal id to chatList
chatList.push(table.name)
// add id's of other chats to out table
chatIDs.push(table.name.replace('dm', '').replace('~', '').replace(data, ''))
}
}
/***** IT'S JUST HOW I GET MY CHAT ID'S *****/
// Get data on users
var chatsData = []
for (let userID of chatIDs) {
try{
let temp = await db.collection('users').find({ '_id': toMongoObjectId(userID) }).toArray()
chatsData.push(temp)
}
catch(error) {
console.log(error)
}
}
console.log('vvv Final data in array: vvv')
console.log(chatsData)
toClient.userData = chatsData
toClient.users = chatList
socket.emit('res_chatList', toClient)
})
})
This can be solved using async-await, write async to the function of then block-like,
mongoclient.connect().then( async (cli) => {
db = cli.db(dbName);
...
})
your logic to fetch data will be
var chatsData = []
for (let userID of chatIDs) {
try{
let temp = await db.collection('users').find({ '_id': ObjectId(userID) }).toArray();
chatsData.push(temp);
}
catch(error) {
console.log(error);
}
}
console.log(chatsData);

How to implement async in for loop?

I have a collection called 'alldetails' which have the details of some collection
{
"name" : "Test1",
"table_name" : "collection1",
"column_name" : "column1"
},
{
"name" : "Test2",
"table_name" : "collection2",
"column_name" : "column2"
},
{
"name" : "Test3",
"table_name" : "collection3",
"column_name" : "column3"
}
I have collection1,collection2 and collection3 which have column1,column2,colum3 respectively
I have to fetch all the name from the 'alldetails' and I have to get the min and max value of other table based on the column name.
So I want the output like below
{name: ["Test1","Test2","Test3"],
date: [{min_date: "2018-12-01", max_date: "2018-12-31", name: "Test1"},
{min_date: "2018-12-01", max_date: "2018-12-31", name: "Test2"},
{min_date: "2018-12-01", max_date: "2018-12-31", name: "Test3"}]
}
I tried the below code because of non blocking its not waiting the response.
alldetails.find({}, { _id: 0 }).then(async function(result) {
let result_data = {};
let resolvedFinalArray = {};
let array = [];
result_data["name"]= [];
result_data["date"] = [];
resolvedFinalArray = await Promise.all(result.map(async value => {
result_data["name"].push(value.name)
getResult(value.table_name,value.column_name,function(response){
result_data["date"].push({min_date: response.minvalue, max_date: response.maxvalue, name:value.name})
});
}));
setTimeout(function()
{
console.log(resolvedFinalArray);
}, 3000);
});
Please suggest me a solution.
If you want to wait for getResult then you need to return Promise from result.map callback.
You are not pushing anything to resolvedFinalArray so why bother with console.log(resolvedFinalArray)
alldetails.find({}, {_id: 0}).then(async (result) => {
let result_data = {};
result_data["name"] = [];
result_data["date"] = [];
await Promise.all(result.map(value => {
// create Promise that resolves where getResult callback is fired
return new Promise((resolve) => {
getResult(value.table_name, value.column_name, (response) => {
result_data["name"].push(value.name);
result_data["date"].push({
min_date: response.minvalue,
max_date: response.maxvalue,
name: value.name
});
resolve();
});
});
}));
console.log(result_data);
});
or using for loop
alldetails.find({}, {_id: 0}).then(async (result) => {
let result_data = {};
result_data["name"] = [];
result_data["date"] = [];
for (let i = 0; i < result.length; i++) {
const value = result[i];
await new Promise((resolve) => {
getResult(value.table_name, value.column_name, (response) => {
result_data["name"].push(value.name);
result_data["date"].push({
min_date: response.minvalue,
max_date: response.maxvalue,
name: value.name
});
resolve();
});
});
}
console.log(result_data);
});
use async.eachOfLimit if you want to apply an async function on all element of an array:
var async = require("async");
var array = [{_id: "...."},{...},{...}];
async.eachOfLimit(array, 1, function(element, index, cb){
myAsyncFunctionWithMyElement(element, function(err){
return cb(err);
});
}, function(err){
// final callback
});
The array forEach method won't work with async function (unless you do deeply evil things like redefining the prototype). This question has a nice insight of the internal.
If you don't want to rely on external libraries, an easy (and my favourite) approach is something like:
for (let i = 0; i < <your array>.length; i++ ) {
await Promise.all( <your logic> );
}
Just adapt it to your need! :)
You might want to use the for await of loop. See this blog post for details.
This, IMHO, is the most modern way to do it, and it doesn't require you to load any external dependencies, since it is built-in to the language itself. It's basically very similar to the classical for of loop.
This should work, if all lexical scope are taken to consideration. Async each is also is better option it would reduce if else blocks and manage promise for you.
alldetails.find({}, { _id: 0 })
.exec((err, result) => {
if (!err) {
let resolvedFinalArray = [];
result.map((value) => {
resolvedFinalArray.push({
name: value.name,
date: []
});
getResult(value.table_name, value.column_name, (err, response) => {
if (!err) {
resolvedFinalArray[resolvedFinalArray.indexOf(value.name)]['date'].push({
min_date: response.minvalue,
max_date: response.maxvalue,
name:value.name
});
} else {
// Send your error messsage.
// res.status(500).send(err);
}
});
});
console.log(resolvedFinalArray);
// res.send(resolvedFinalArray);
} else {
// Send your error messsage.
// res.status(500).send(err);
}
});

How can i have auto-increment field in nedb?

I want to have exactly auto-increment field like relational or objective databases, so i need an integer _id field with automatically set field value, value should be one more last record _id value like this:
data:
{_id:1,name"foo"}
{_id:2,name"bar"}
remove last record:
{_id:1,name"foo"}
add new record:
{_id:1,name"foo"}
{_id:3,name"newbar"}
I added a function to my datastore and calculate maximum of _id and plus 1 max(_id)+1 and set as field value, but there is problem here:
When we use auto-increment field in relational databases, it works like i said and after you remove last record it reserved a deleted record number and new inserted records continue increment but in my way its says the _id of removed record for new record.
My code is:
var Datastore = require('nedb'),
localDb = new Datastore({
filename: __dirname + '/dbFilePath.db',
autoload: true
});
localDb.getMax = function(fieldName, onFind){
db.find({}).sort({_id:-1}).limit(1).exec(function (err, docs) {onFind && onFind(err, docs['_id']);});
return localDb;
}
localDb.insertAutoId = function(data, onAdd){
var newIndex = 0;
localDb.getMax(function (err, maxValue) {
newIndex = maxValue+1;
if(!data["_id"])
data["_id"] = newIndex;
localDb.insert(data, function (err, newDoc) {
onAdd && onAdd(err, newDoc);
});
});
return localDb;
}
An improved answer for nedb would be:
db.getAutoincrementId = function (cb) {
this.update(
{ _id: '__autoid__' },
{ $inc: { seq: 1 } },
{ upsert: true, returnUpdatedDocs: true },
function (err, affected, autoid) {
cb && cb(err, autoid.seq);
}
);
return this;
};
Which is equivalent to the mongodb way:
db.getAutoincrementId = function (cb) {
this.findAndModify({
query: { _id: '__autoid__' },
update: { $inc: { seq: 1 } },
new: true
}
function (err, autoid) {
cb && cb(err, autoid.seq);
}
);
return this;
};
You can store the last value of the index in the database. Something like this:
var Datastore = require('nedb');
var db = new Datastore({
filename: __dirname + '/dbFilePath.db',
autoload: true
});
// Initialize the initial index value
// (if it already exists in the database, it is not overwritten)
db.insert({_id: '__autoid__', value: -1});
db.getAutoId = function(onFind) {
db.findOne( { _id: '__autoid__' }, function(err, doc) {
if (err) {
onFind && onFind(err)
} else {
// Update and returns the index value
db.update({ _id: '__autoid__'}, { $set: {value: ++doc.value} }, {},
function(err, count) {
onFind && onFind(err, doc.value);
});
}
});
return db;
}
I do not know if it will be useful for you anymore I use a database to store the next ids, inspired in the mysql system. Who always reserves the next id.
So I created a function that verifies if there is an id to the db, if it does not, it add with the value "1", and when it updates it looks for and if it exists and it performs the sequence.
This gave me full control over my ids.
The schema would be:
{
name: nameDb,
nextId: itemID
}
If you want you can create functions for updating documents, versioning, etc.
example:
db.autoincrement = new Datastore({filename: 'data/autoincrement.db', autoload: true});
function getUniqueId(nameDb, cb) {
db.autoincrement.findOne({name: nameDb}, function (err, doc) {
if (err) {
throw err;
} else {
if (doc) {
const itemID = doc.nextId + 1;
db.autoincrement.update({name: nameDb}, {
name: nameDb,
nextId: itemID
}, {}, function (err, numReplaced) {
db.autoincrement.persistence.compactDatafile();
if (err) {
throw err;
} else {
// console.log(numReplaced);
}
cb(doc.nextId);
});
} else {
const data = {
name: nameDb,
nextId: 2
};
db.autoincrement.insert(data, function (err, newDoc) {
if (err) {
throw err;
} else {
// console.log(newDoc);
}
cb(1);
});
}
}
});
}
insert new document example:
function insert(req, cb) {
getUniqueId("testdb", function (uniqueId) {
data.itemId = uniqueId;
db.testdb.insert(data, function (err, newDoc) {
if (err) {
cb({error: '1', message: 'error#2'});
throw err;
}
cb({error: '0', message: 'Item add'});
});
});
}

Resources