MongoDB RangeError: attempt to write outside buffer bounds - node.js

I am not receiving this error all the time but for specifics arrays. I am trying to insert a JSON object into mongodb collection using node.js mongodb native driver. This JSON object has couple of string attributes and a big string array attribute. Array could have thousands of string items.
My JSON looks like this
{
FileName :"504-2345.txt",
SIMs :["8931440400012","893144040001","4000130360507",.........]
}
Any idea when MongoDB throws RangeError: attempt to write outside buffer bounds? Please suggest
Below method insert the data in Mongodb
Insert: function (data, type, callback) {
MongoClient.connect(url, function (err, db) {
// logger.log("info","Before Inserting documents into "+type +" documents =>"+data.length);
if (err) {
logger.log("error", err);
}
var collection = db.collection(type);
// Insert some documents
collection.insertOne(data, function (err, result) {
if (err) {
logger.log("error", " Error for Data while inserting Error =" + err);
}
else {
db.close();
if (result.ops) {
callback(err, result.ops[0]);
}
}
});
});
},

The document should be less than 16mb. Otherwise it get exceptions.You can refer this link for more details. You can use gridFs in here.

Related

CouchDB bulk copy operation doesn't send back callback

I'm using nano db library (also try this with cloudant ant library)
for node js
I have this sample code :
exports.insert = function (db_name, data, callback) {
var db_name=db_name; if(!db){ db = nano.use(db_name); } console.log(`try to make bulk copy to Couchdb`); db.bulk(data, function (err, body) {
if (!err) {
console.log(`data in db ${db_name} inserted successfully`);
return callback(null, body);
}
console.log(`err ouccre ${err}`);
return callback(err);
}); }
The "data" variable is an object that contain the 'docs' property and
'docs' contain array.
I'ts always do the bulk work and put the docs array into my cloudnat
db , but
Often doesn't return any callback (err / success) to my node js
process
and it stuck...
Any suggestion?

'undefined is not a function' when using unordered bulk upsert

I am using a bulk upsert to update/add several documents at once to my database:
var bulk = collection.initializeUnorderedBulkOp();
docs.forEach(function(doc, index, array){
bulk.find({'docId' : doc.docId}).upsert().updateOne(doc);
});
bulk.execute();
On bulk.execute this is returning the following error:
/myPath/node_modules/mongodb/node_modules/mongodb-core/lib/topologies/server.js:771
catch(err) { process.nextTick(function() { throw err}); }
^
TypeError: undefined is not a function
at /myPath/node_modules/mongodb/lib/bulk/unordered.js:470:5
I have looked at the code in the mongodb module and it seems to be failing on the callback here:
// Execute batches
return executeBatches(this, function(err, result) {
callback(err, result);
});
The data is being written to the database exactly as expected, but this error is still being thrown, and I can't work out what I could be doing to cause it.
I have ruled out problems with my data by using trivial objects, and by using bulk inserts instead of upserts as they are simpler, but the results are the same.
You need to invoke bulk.execute with a callback.
bulk.execute(function(err,results) {
if(err)
console.error(err);
else
console.log(results);
});
Or the like, should work.

node js mongo find last inserted

I am having a hard time trying to find the last inserted element into mongo. I am using an example code I found and trying to make the query and display the item but I am getting an error. I understand I am suppose to do something like this.
db.collectionName.findOne({}, {sort:{$natural:-1}})
But this is what I have so far and it's not working.
var MongoClient = require('mongodb').MongoClient;
MongoClient.connect("mongodb://localhost:27017/exampleDb", function(err, db) {
if(err) { return console.dir(err); }
var collection = db.collection('test');
var doc1 = {'hello':'doc1'};
var doc2 = {'hello':'doc2'};
var lotsOfDocs = [{'hello':'doc3'}, {'hello':'doc4'}];
collection.insert(doc1);
collection.insert(doc2, {w:1}, function(err, result) {});
collection.insert(lotsOfDocs, {w:1}, function(err, result) {});
collection.find({}).toArray(function(err, docs) {
console.log(docs[0]);
});
db.close();
});
This is the error.
nodejs/node_modules/mongodb/lib/mongodb/connection/base.js:246
throw message;
^
TypeError: Cannot read property '0' of null
I checked to make sure the database is not empty so I am not sure why it's returning null.
I've found a possible solution(here) to your problem. It might be due to the fact that the database connection closes before the operations that you have issued finish.
You can fix it by including the db.close() call inside the find query.
collection.find({}).toArray(function(err, docs) {
console.log(docs[0]);
db.close();
});

Mongoose (mongodb) batch insert?

Does Mongoose v3.6+ support batch inserts now? I've searched for a few minutes but anything matching this query is a couple of years old and the answer was an unequivocal no.
Edit:
For future reference, the answer is to use Model.create(). create() accepts an array as its first argument, so you can pass your documents to be inserted as an array.
See Model.create() documentation
Model.create() vs Model.collection.insert(): a faster approach
Model.create() is a bad way to do inserts if you are dealing with a very large bulk. It will be very slow. In that case you should use Model.collection.insert, which performs much better. Depending on the size of the bulk, Model.create() will even crash! Tried with a million documents, no luck. Using Model.collection.insert it took just a few seconds.
Model.collection.insert(docs, options, callback)
docs is the array of documents to be inserted;
options is an optional configuration object - see the docs
callback(err, docs) will be called after all documents get saved or an error occurs. On success, docs is the array of persisted documents.
As Mongoose's author points out here, this method will bypass any validation procedures and access the Mongo driver directly. It's a trade-off you have to make since you're handling a large amount of data, otherwise you wouldn't be able to insert it to your database at all (remember we're talking hundreds of thousands of documents here).
A simple example
var Potato = mongoose.model('Potato', PotatoSchema);
var potatoBag = [/* a humongous amount of potato objects */];
Potato.collection.insert(potatoBag, onInsert);
function onInsert(err, docs) {
if (err) {
// TODO: handle error
} else {
console.info('%d potatoes were successfully stored.', docs.length);
}
}
Update 2019-06-22: although insert() can still be used just fine, it's been deprecated in favor of insertMany(). The parameters are exactly the same, so you can just use it as a drop-in replacement and everything should work just fine (well, the return value is a bit different, but you're probably not using it anyway).
Reference
Mongo documentation
Aaron Heckman on Google Groups discussing bulk inserts
Mongoose 4.4.0 now supports bulk insert
Mongoose 4.4.0 introduces --true-- bulk insert with the model method .insertMany(). It is way faster than looping on .create() or providing it with an array.
Usage:
var rawDocuments = [/* ... */];
Book.insertMany(rawDocuments)
.then(function(mongooseDocuments) {
/* ... */
})
.catch(function(err) {
/* Error handling */
});
Or
Book.insertMany(rawDocuments, function (err, mongooseDocuments) { /* Your callback function... */ });
You can track it on:
https://github.com/Automattic/mongoose/issues/723
https://github.com/Automattic/mongoose/blob/1887e72694829b62f4e3547283783cebbe66b46b/lib/model.js#L1774
Indeed, you can use the "create" method of Mongoose, it can contain an array of documents, see this example:
Candy.create({ candy: 'jelly bean' }, { candy: 'snickers' }, function (err, jellybean, snickers) {
});
The callback function contains the inserted documents.
You do not always know how many items has to be inserted (fixed argument length like above) so you can loop through them:
var insertedDocs = [];
for (var i=1; i<arguments.length; ++i) {
insertedDocs.push(arguments[i]);
}
Update: A better solution
A better solution would to use Candy.collection.insert() instead of Candy.create() - used in the example above - because it's faster (create() is calling Model.save() on each item so it's slower).
See the Mongo documentation for more information:
http://docs.mongodb.org/manual/reference/method/db.collection.insert/
(thanks to arcseldon for pointing this out)
Here are both way of saving data with insertMany and save
1) Mongoose save array of documents with insertMany in bulk
/* write mongoose schema model and export this */
var Potato = mongoose.model('Potato', PotatoSchema);
/* write this api in routes directory */
router.post('/addDocuments', function (req, res) {
const data = [/* array of object which data need to save in db */];
Potato.insertMany(data)
.then((result) => {
console.log("result ", result);
res.status(200).json({'success': 'new documents added!', 'data': result});
})
.catch(err => {
console.error("error ", err);
res.status(400).json({err});
});
})
2) Mongoose save array of documents with .save()
These documents will save parallel.
/* write mongoose schema model and export this */
var Potato = mongoose.model('Potato', PotatoSchema);
/* write this api in routes directory */
router.post('/addDocuments', function (req, res) {
const saveData = []
const data = [/* array of object which data need to save in db */];
data.map((i) => {
console.log(i)
var potato = new Potato(data[i])
potato.save()
.then((result) => {
console.log(result)
saveData.push(result)
if (saveData.length === data.length) {
res.status(200).json({'success': 'new documents added!', 'data': saveData});
}
})
.catch((err) => {
console.error(err)
res.status(500).json({err});
})
})
})
You can perform bulk insert using mongoose, as the highest score answer.
But the example cannot work, it should be:
/* a humongous amount of potatos */
var potatoBag = [{name:'potato1'}, {name:'potato2'}];
var Potato = mongoose.model('Potato', PotatoSchema);
Potato.collection.insert(potatoBag, onInsert);
function onInsert(err, docs) {
if (err) {
// TODO: handle error
} else {
console.info('%d potatoes were successfully stored.', docs.length);
}
}
Don't use a schema instance for the bulk insert, you should use a plain map object.
It seems that using mongoose there is a limit of more than 1000 documents, when using
Potato.collection.insert(potatoBag, onInsert);
You can use:
var bulk = Model.collection.initializeOrderedBulkOp();
async.each(users, function (user, callback) {
bulk.insert(hash);
}, function (err) {
var bulkStart = Date.now();
bulk.execute(function(err, res){
if (err) console.log (" gameResult.js > err " , err);
console.log (" gameResult.js > BULK TIME " , Date.now() - bulkStart );
console.log (" gameResult.js > BULK INSERT " , res.nInserted)
});
});
But this is almost twice as fast when testing with 10000 documents:
function fastInsert(arrOfResults) {
var startTime = Date.now();
var count = 0;
var c = Math.round( arrOfResults.length / 990);
var fakeArr = [];
fakeArr.length = c;
var docsSaved = 0
async.each(fakeArr, function (item, callback) {
var sliced = arrOfResults.slice(count, count+999);
sliced.length)
count = count +999;
if(sliced.length != 0 ){
GameResultModel.collection.insert(sliced, function (err, docs) {
docsSaved += docs.ops.length
callback();
});
}else {
callback()
}
}, function (err) {
console.log (" gameResult.js > BULK INSERT AMOUNT: ", arrOfResults.length, "docsSaved " , docsSaved, " DIFF TIME:",Date.now() - startTime);
});
}
You can perform bulk insert using mongoDB shell using inserting the values in an array.
db.collection.insert([{values},{values},{values},{values}]);
Sharing working and relevant code from our project:
//documentsArray is the list of sampleCollection objects
sampleCollection.insertMany(documentsArray)
.then((res) => {
console.log("insert sampleCollection result ", res);
})
.catch(err => {
console.log("bulk insert sampleCollection error ", err);
});

Retrieving all records asynchronously then performing an action

In a node.js server, using the mongodb native driver, I want to retrieve records from a cursor and then output them as JSON. I have this (simplified)
var ans = {ids: []};
cursor.each(function(err, doc) {
if (doc) {
ans.ids.push(doc.tag);
}
});
cursor.count(function(err, result) {
ans.count = result;
res.send(JSON.stringify(ans));
});
and the result is something like {ids:[], count: 3}. In other words the query appears to run without returning any records. I assume that this is because the data's already been sent before the cursor.each callbacks have run. How do I re-structure this to make sure the sending happens after the iterating?
I have found the answer. The example for cursor.each says "If the item is null then the cursor is exhausted/empty and closed", so: (error handling omitted)
var ans = {ids: []};
cursor.each(function(err, doc) {
if (doc) {
ans.ids.push(doc.tag);
}
else {
cursor.count(function(err, result) {
ans.count = result;
res.send(JSON.stringify(ans));
});
}
});

Resources