Inserting multiple document into MongoDB with Mongoskin - node.js

I'm trying to insert several documents trough a loop, but I get a problem with the uniqueness of the ObjecId's.
I have this function:
// Fetch all client documents.
db.collection('clients').find({}, {cost: 1}).toArray(function(err, dbDocs) {
if (err) throw err;
// Set up a general doc.
var currentTime = new Date();
var doc = {
year: currentTime.getFullYear(),
quarter: Math.floor(currentTime.getMonth() / 3) + 1,
paid: false
};
// For each client document, insert a document to invoices collection.
for (var i = 0, j = dbDocs.length; i < j; i += 1) {
doc.quarterCost = (dbDocs[i].cost.monthly * 3);
doc.client_id = dbDocs[i]._id;
db.collection('invoices').insert(doc, function(err, result) {
if (err) throw err;
if (result) console.log('Invoice created');
});
}
});
I get a "mongoError: E110000 Duplicate key error index: ... " after the first document is created and inserted.
Question: Why are this loop trying to insert every document with the same ObjectID, and therefore generation an error? How do i rewrite this to make sure the ObjectId is random every time?

On the first insert, doc is modified to have the new _id field. Which means that you need to reset it at the start of your for loop as drivers don't add a new _id value to a document if there is already one.

Related

How to use promise, .then.. properly?

First of all, I'm sorry that I couldn't come up with a better title for this post. Also, I'm a beginner in nodejs. I have a problem for which I am hoping to find answer. Please help me and thank you so much.
I am trying to create a collection and insert data into it and retrieve the same data that has been inserted in the same code. This piece of code dbo.collection(nameofCollection).insertMany(data) inserts the data into the collection if exists already and if it doesn't exist, it will create the collection and then insert.
This is the code that I've written for this task :
var excel = require('excel4node');
const mongoose = require('mongoose');
const mongodb = require('mongodb')
const mdb = mongodb.MongoClient;
mdb.connect("mongodb://localhost:27017/", async function(err, db) {
if (err) throw err
const nameCollection = "tempCollection"
const dbo = db.db("reports")
const data = [
{
empID : "001ev",
empName : "xyz",
salary : 20000
},
{
empID : "00234",
empName : "abc",
salary : 10000
},
{
empID : "11345",
empName : "pqr",
salary : 15000
}
];
dbo.collection(nameCollection).insertMany(data, function(err, res) {
if (err) throw err;
console.log("Number of documents inserted: " + res.insertedCount);
console.log("first")
})
const records = await dbo.collection(nameCollection).find({}, { projection: { _id: 0} }).toArray()
console.log(records)
console.log("second")
})
Now, the issue that I'm facing with this code is that, when there is no collection in the data base, it is creating one and inserting the data. Up to that it is working fine. But, the thing is, find query is executing first and then insert is executing. And if that collection already exists then it is working fine, it is inserting the data and then find query is executing.
So, to put it short, if there is no collection with the name, nameCollection; find query is executed first, which returns []. Then the collection is created and insertion of data is happening. But, why is this happening, although in my code I wrote to create the collection first, insert data and then retrieve it?
This is because of asynchronous runtime. There is no guarantee that find function will be executed the last.
try adding this:
try {
const res = await dbo.collection(nameCollection).insertMany(data);
console.log("Number of documents inserted: " + res.insertedCount);
console.log("first");
const records = await dbo.collection(nameCollection).find({}, { projection: { _id: 0} }).toArray()
console.log(records)
console.log("second")
} catch (error) {
throw err;
}
In this case you can omit try/catch block since it does not have impact on error handling.

Update Entire Collection In MongoDB Node Js

I have a program that receive a New file from FTP server after some specific time , I insert the new updated file's data into my database MongoDB the fields remain same , only data changes in new file.. Now the problem is that EVERY TIME I have to insert whole new collection into database, and the collection increases accordingly.Forexample- first time data is of 20 records , second time 40 and then 60 and so on .The thing I want to do is I want to check which field's data is updated in New file before inserting new FILE's data , and should only update these field's data in database instead of inserting whole new document.Does MONGOOSE or MONGODB provide solution for this , means IF I PASS A DATA AS PARAMETER , IT SOULD COMPARE MY EXISTING COLLECTION WITH MY NEW DATA and then Update Only UPDATED FIELDS ..Please help me i,m stuck , thanks :) . I,m using NODE JS ...
var c = new Client();
var connectionProperties = {
host: 'ABC',
user: 'ABC',
port: 'ABC',
password: 'ABC',
};
c.connect(connectionProperties);
c.on('ready', function () {
c.get('path-to-excel-file', function (err, stream) {
if (err) throw err;
stream.once('close', function () {
const workBook = XLSX.readFile('Convertedfile.xlsx');
XLSX.writeFile(workBook, 'Convertedfile', { bookType: "csv" });
csv()
.fromFile("Convertedfile")
.then((jsonObj) => {
Model.collection.insert(jsonObj, function (err, docs) {
if (err) {
return console.error(err);
} else {
console.log("All Documents insterted");
}
});
})
c.end()
});
stream.pipe(fs.createWriteStream('ConvertedFile.xlsx'))
})
})
It looks like you need upsert means update if document/record exists or insert/create it.
So this can be done either 1 document at a moment or in bulk, but it would need a query to find the matching document(s) first.
Since you've not provided any sample data so I can't write a sample code snippet for you but here is the link to get you started, for bulk: Bulk.find.upsert and for single document this thread is good : how-do-i-update-upsert-a-document-in-mongoose
Update: here is the mongodb bulk upsert in action:
const mongo = require('mongodb');
const MongoClient = mongo.MongoClient;
const client = new MongoClient('mongodb://127.0.0.1:27017', { useUnifiedTopology: true });
client.connect(async err => {
if (err) {
console.log('DB Connection Error ', err);
} else {
const db = client.db('node-cheat-db');
// lets assume you've read all file contents and data is now ready for db operation
let records = [
{first: 'john', last: 'doe', email: 'johen#doe.com'},
{first: 'jack', last: 'doe', email: 'jack#doe.com'},
{first: 'jill', last: 'doe_updated', email: 'jill#doe.com'}
];
// prepare bulk upsert so that new records are created and existing are updated
let bulk = db.collection('users').initializeOrderedBulkOp();
for (var i = 0; i < records.length; i++) {
bulk.find({
"email": records[i].email // at least 1 key should be treated as PK; in this example email is PK
}).upsert(records[i]).replaceOne(records[i]);
}
bulk.execute(function (err,updateResult) {
if (updateResult.result.ok != 1) {
console.log('Bulk Upsert Error');
} else {
console.log(`Inserted: ${updateResult.result.nUpserted} and Updated: ${updateResult.result.nModified}`);
}
});
}
});
sample output looks like:
Inserted: 0 and Updated: 3
Further Details:
Clone node-cheat bulk-update, run node bulk-update.js followed by npm install mongodb.

mongoose Update with $in is not working properly

I have the following mongoose update in my JS:
var ids=[1,3];
restid=1;
db.model('rests').update({ _id: restid,'menu.id': {$in: ids}}, {$inc: {'menu.$.soldCounter': 1}}, {multi: true},function(err) {
if(err)
console.log("Error while updating sold counters: " + err.message);
});
for some reason, the update is being made on a rest document with _id=1, but only on menu.1.soldCounter and not on menu.3.soldCounter
I have tested it several times and the conclusion is that only the first ID from the ids array will get updated.
Can someone spot the problem?
It's because $ operator acts as a placeholder for the first element that matches, which stated in the guide : https://docs.mongodb.org/v2.6/reference/operator/update/positional/
Currently there is no way to update all sub document field in single operator. Here is the discussion on the request of this function : https://jira.mongodb.org/browse/SERVER-1243
actually there is a way to do that:
var indexes = [1,2,3];
var update = { $inc: {} };
for (var i = 0; i < indexes.length; ++i) {
update.$inc[`menu.${indexes[i]}.soldCounter`] = 1;
}
db.model('rests').update({ _id: rest._id }, update, function(err) {
if(err)
console.log("Error while updating sold counters: " + err.message);
});
you need to know the indexes you want to update in advanced. then it takes just one call to the DB.

MongoDB: atomic call on 2 collections (find + insert)

How can I atomically get the latest "rounds" record ObjectId and use that when inserting to the "deposits" collection?
This post answer says it can't be done: Is there any way to atomically update two collections in MongoDB?
Is this still true?
In process A, I want to atomically FIND the latest round id (rid) and INSERT that into deposits. The race condition is that after A finds rid, another process B might insert into rounds, so now A has an rid that isn't the latest, but is 1 behind. How can A finds rid in rounds + insert this rid into deposits (act on these 2 collections) atomically?
// GET ROUND ID (RID) OF LATEST
var rid;
db.rounds.find().limit(1).sort({$natural:-1}, function(err, latestInsertedRound){
rid = latestInsertedRound[0]._id;
print(rid, 'rid'); // if another process B inserts now, this rid is 1 behind
// INSERT INTO DEPOSITS
db.deposits.insert({uid:uid, iid:iid, rid:rid}, function(err, insertedDeposit){
print(insertedDeposit, 'insertedDeposit');
});
});
Inserting a document in Mongodb has a callback function that can be used. This callback function has a second parameter which returns the document inserted.
I tried printing the second parameter using console.log. It looks like :
{ result: { ok: 1, n: 1 },
ops:
[ { username: 'user1',
password: 'password1',
_id: 562099bae1872f58b3a22aed } ],
insertedCount: 1,
insertedIds: [ 562099bae1872f58b3a22aed ]
}
insertedIds is the array that holds the _ids of the inserted document or documents.
So you can insert your object in the second collection in the function callback of the insertion of first collection. A little confusing.
In simple terms : Insert the document in first collection. In it's callback, insert the document in the second collection.
MongoClient.connect(MONGOLAB_URI, function (err, db) {
if (err)
console.log("We have some error : " + err);
else {
db.createCollection('rounds', function (err, rounds) {
if (err) {
console.log('Error while creating rounds collection');
throw err;
}
else {
rounds.insert({ 'username' : 'user1', 'password' : 'password1' }, function(err,docsInserted){
console.log('Last document inserted id :', docsInserted.insertedIds[0]);
//inserting the document in the function callback
db.createCollection('deposits', function (err, deposits) {
if (err) {
console.log('Error while creating deposits collection');
throw err;
}
else {
//change array index according to your need
//you may be inserting multiple objects simultaneously
deposits.insert({'last_inserted_object' : docsInserted.insertedIds[0]);
console.log('inserted into deposits collection');
}
});
});
}
});
}
});
It seems it's not possible to operate atomically on 2 collections in MongoDB, as explained in this answer:
Is there any way to atomically update two collections in MongoDB?
I leave the question up because it has a slightly different focus (not 2 updates, but find+insert).

Pass variables to mongodb query in nodejs

Can I pass a variable to a query so:
db.collection('coll').find(query).toArray(function(err, docs) {
if(err) throw err;
docs.forEach(function(doc) {
var Id = parseInt(doc._id); // a returnd _id from previous query
Id--;
query = {_id : parseInt(doc._id) }; // returns null
// OR query = {_id : Id }; // error
db.collection('coll').find(query).toArray(function(err, beforeDocs) {
//if (err) throw err;
console.dir(beforeDocs);
});
});
In fact what I am trying to do is query by id to get the record that comes before the results given in the first query
Any ideas??
Is it at all possible to query mongodb with variables??
var query = { _id : var i, name : var name , etc...}
There is no way to search for the document that shows up before a match in a mongodb query.
I believe your best bet is something like this:
var prevDoc = null; // to store the previous document
var cursor = db.collection('collection').find({}); // get all docs
cursor.sort('_id', 1); // it only makes sense to grab previous if they're sorted
cursor.each(function(err, doc) {
if(err) throw err;
if(doc == null) { // if we reached the end of our cursor...
db.close(); // close the db connection
} else {
if( doc.foo === "bar" ) { // here you put your match criteria
console.dir(prevDoc); // here you print your previous document
}
}
prevDoc = doc; // store current doc so that it's available for next iteration
});
To answer your direct question ("Is it at all possible to query mongodb with variables?"), it is possible to query mongodb with a query which contains variables that already have values at the time the query is created. It is not possible, however, to pass new uninstantiated variables along for mongodb to manipulate somehow.

Resources