I can get positive count of elements in collection ...
var collection;
collection = db.collection("allCodes");
collection.count(function(err, count) {
if (err) {
throw err;
}
console.log("There are " + count + " records.");
});
... with output:
Connected to Database ok
There are 354 records.
... but can not get elements in this collection:
collection.find().each(function(err, doc) {
if (err) {
throw err;
}
console.log("each doc");
console.log(doc);
});
... it prints nothing. I am new in mongodb. So what I do wrong? I want to print all elements in allCodes collection.
Update: all code that inserts data then gets count and then tries to fetch data itself, but nothing comes out.
var MongoClient, collection;
MongoClient = require("mongodb").MongoClient;
var objectToInsert = [{
'a': 1
}, {
'a': 2
}, {
'b': 3
}]
MongoClient.connect("mongodb://127.0.0.1:27017/test", function(err, db) {
console.log("Connected to Database");
collection = db.collection("test2");
// clear collection -------------------------------
collection.remove(function(err, result) {
// insert ------------------------------------
collection.insert(objectToInsert, function(docs) {
// count - ok -----------------------------------
collection.count(function(err, count) {
console.log("Count: " + count);
// find - fail - no objects printed -----------------------
collection.find().toArray(function(err, docs) {
console.log("Printing docs from Array");
docs.forEach(function(doc) {
console.log("Doc from array");
console.log(doc);
});
});
db.close();
});
});
});
});
It has output:
Connected to Database
Count: 3
So why I have only count. Where is my data?
You're closing your connection to the database before the find gets a chance to complete.
Move the db.close() call inside the callback of toArray like this:
collection.find().toArray(function(err, docs) {
console.log("Printing docs from Array");
docs.forEach(function(doc) {
console.log("Doc from array");
console.log(doc);
});
db.close();
});
Disclaimer: I am not familiar with node.js at all.
From the example in the documentation, it seems that you are required to create a cursor object first and only afterwards iterate over the results. I'm not sure what considerations there are for chaining commands.
var cursor = collection.find();
// Execute the each command, triggers for each document
cursor.each( function( err, item ) {
console.log( "each doc" );
});
Related
Probably this is a promise implementation but would like to check with experts before doing so.
Need to do:
Read entire file line-by-line into MongoDB collection A.
Upon completion of step 1, Insert/Update/Delete documents from collection B based on state in collection A. If document not present in A delete from B.
Problem: Even before completion of step 1 above, step 2 starts execution and starts deleting records from B.
Tried so far: Async.series does not work. Below given is my code.
MongoClient.connect(config.mongodb.uri, function (err, db) {
if (err) {
logger.error('Unable to connect to the mongoDB server. Error:', err);
reject(err);
} else {
let startTime = new Date();
async.series([
function(callback) {
console.log('First in series');
db.collection('eligibilityStage').drop({}, function (err, oldObject) {
debugger;
var lr = new LineByLineReader(config.eligibiltyFile.fileRemoteLocation + '/' + latestEligibilityfileName);
lr.on('error', function (err) {
console.log(err);
});
var lineCount;
lr.on('line', function (line) { //** --> Jumps from here to second function in series, line#43**
if (line.length == config.eligibiltyFile.detailRecordlineWidth) {
var document = require('fixy').parse({
map: mapData, options: {
skiplines: null, fullwidth: config.eligibiltyFile.detailRecordlineWidth
}
}, line);
db.collection('eligibilityStage').insertOne(document[0], function (err, records) {
lineCount++;
if (err) {
console.log(err);
}
});
}
});
lr.on('end', function () {
console.log('File is closed, read lines:'+lineCount);
console.log('File is closed, rowcount:'+db.eigibilityStage.Count());
});
callback(null, 'loadStage');
});
},
function(callback) {
// Deletes
console.log('Series 2 function, read lines:'+lineCount);
console.log('Series 2 function, rowcount:'+db.eigibilityStage.Count());
callback(null, 'processStage');
}
],
function(err, results){
});
}
})
Am I doing it wrong? Or is this a standard problem to be solved using promise?
Am using Nodejs and MongoDB and I am new to nodejs. I need to know how to get data from one collection and append some additional data and insert into another collection.
db.collection('collection1').find({ "Id" : 12345 }).toArray(function(err, result){
db.collection('collection2', function(err, collection){
collection.insert({
//some data
})
})
})
When I try this code its not working its giving me error insert is not defined.
thanks,
John.
db.collection('collection1').find({ "Id" : 12345 }).toArray(function(err, result){
//do the modification here
db.collection('collection2').insert(modifiedResult, function(err, result){
if(err) {
//log error
}else{
//log result
}
})
})
One more thing, If the result array length is more that one and you want to insert then separately then use promise
db.collection('collection1').find({ "Id" : 12345 }).toArray(function(err, result){
//do the modification here
Promise.all(modifiedResult.map((eachModifiedResult)=>{
return db.collection('collection2').insert(eachModifiedResult);
}).then((result)=>{
//result of the insert
}).catch((err){
//err if any happen
});
})
But if you have a very large doc then do it as Neil Said. Read the collection one by one using cursor and modify them and insert them to other db.
You can use callback library like async or Promises Q
Promise
var collectionData = null;
var modifiedResult = null;
// here i am using async library to avoid callbackHell
async.series([
// for get data from collection 1.
function(cb) {
var criteria = {
"Id": 12345
}
db.collection('collection1').find(criteria).toArray(function(dbErr, dbResult) {
if (err) {
cb(dbErr)
} else {
collectionData = dbResult;
cb()
}
})
},
// Append Data in collectionData
function(cb) {
// do you work here to append data in collectionData
modifiedResult = extendedData; // this is just an example you need to work on it
cb();
},
// Update collection 2 here
function(cb) {
db.collection('collection2').insert(modifiedResult, function(err, result) {
if (err) {
cb(dbErr)
} else {
collectionData = dbResult;
cb()
}
});
}
]);
I'm finding documents by _id in a loop and updating a boolean in each document:
db.items.findById(key, function(error, item) {
item.flags.cake = false;
item.update(function(error, zzz) {
if(error) return next(error);
console.log('output ',zzz);
});
});
But the documents will not update. The mongoose schema for item:
flags: {
cake:Boolean
}
Use the save() method instead which makes use of a callback that will receive three parameters you can use:
1) err if an error occurred
2) item which is the saved item
3) numAffected will be 1 when the document was successfully persisted to MongoDB, otherwise 0.
Items.findById(key, function(error, item) {
item.flags.cake = false;
item.save(function (err, item, numAffected) {
if (err) console.log(err)
console.log('output ', item);
});
});
As an extra measure of flow control, save will return a Promise.
item.save().then(function(item) {
console.log('output ', item);
});
I'm playing with node.js and node-mysql and I'd like to understand the following:
connection.connect(function(err) { if (err) throw err; });
console.log(" beginn insert "+ Date() );
for (var i=0;i<200;i++){
connection.query('INSERT INTO animals SET ?', {name: "tiger!"});
}
connection.end();
console.log(" end insert " + Date() );
The output is always ' beginn insert ' immediately followed by ' end insert ' and then only the database starts working. How is it possible that these node-mysql statements seem to be non-blocking?
Thanks, Felix
Node itself is non-blocking for IO so queries such as these do not block. Instead you need to pass a callback function when the query completes:
var queries = 0;
connection.query("INSERT INTO animals SET ?", {name: "tiger!"},
function (err) {
// handle error?
queries++;
if (queries == 200) {
// Last query has finished running
}
});
Use Async; something like the following:
console.log(" beginning insert "+ Date() );
// create your queries as an array of objects
queries = [];
for (var i=0;i<200;i++){
queries.push({name: "tiger!"});
};
async.map(queries, myQuery, function(err, data){
// Here all of your queries are done
connection.end();
console.log(" end insert " + Date() );
});
function myQuery(name, callback){
connection.query('INSERT INTO animals SET ?', [name], function(err, result) {
if(err){
console.error(err);
callback(err, null);
}else{
callback(null, result);
}
});
}
How can I update ALL documents in a collection where an attributes value needs to be different (a unique number) for each document?
Below is my current code. This actually seems to update (I don't get an error) but the values in the db are not being updated.
Model.find({}, function (err, docs) {
if (err) {
console.log(err);
};
if(docs && docs.length > 0){
for(var i=0; i<docs.length; i++){
//SET NEW VALUE FOR EACH DOC - VALUE MUST BE UNIQUE TO EACH DOC
docs[i].code = generateRandomCode();
}
// PASS IN ARRAY OF UPDATED DOCS TO BE SAVED
Model.update(docs, function (err, docs) {
if (err) {
console.log(err);
}
if(!err){
req.updatedSuccessfully = true;
}
return next();
});
}
else{
return next();
}
});
Before this I was trying to do something like this:
Model.update({}, { code: generateRandomCode() }, { multi: true }, function (err, numberAffected, raw) {
if (err) return handleError(err);
console.log('The number of updated documents was %d', numberAffected);
console.log('The raw response from Mongo was ', raw);
});
The problem with this is that generateRandomCode() is only called once but I need to create a different code for each document. So neither of these example work.
Instead of trying model.update(), can you try to simply save the documents?
See answer to this question on this url: Update model with Mongoose, Express, NodeJS