In a node.js server, using the mongodb native driver, I want to retrieve records from a cursor and then output them as JSON. I have this (simplified)
var ans = {ids: []};
cursor.each(function(err, doc) {
if (doc) {
ans.ids.push(doc.tag);
}
});
cursor.count(function(err, result) {
ans.count = result;
res.send(JSON.stringify(ans));
});
and the result is something like {ids:[], count: 3}. In other words the query appears to run without returning any records. I assume that this is because the data's already been sent before the cursor.each callbacks have run. How do I re-structure this to make sure the sending happens after the iterating?
I have found the answer. The example for cursor.each says "If the item is null then the cursor is exhausted/empty and closed", so: (error handling omitted)
var ans = {ids: []};
cursor.each(function(err, doc) {
if (doc) {
ans.ids.push(doc.tag);
}
else {
cursor.count(function(err, result) {
ans.count = result;
res.send(JSON.stringify(ans));
});
}
});
Related
Seems like a super basic task, but I just cannot get this to work (not very experienced with mongo or nodeJS).
I have an array of records. I need to check the DB to see if any records with a matching name already exist and if they do grab that record so I can update it.
Right now I am trying this
function hit_the_db(db, record_name, site_id) {
return new Promise((resolve, reject) => {
var record = db.collection('' + site_id + '_campaigns').find({name: record_name}).toArray(function(err, result) {
if (err) {
console.log('...error => ' + err.message);
reject(err);
} else {
console.log('...promise resolved...');
resolve(result);
}
});
console.log('...second layer of select successful, returning data for ' + record.length + ' records...');
return record;
});
}
This query works in another part of the app so I tried to just copy it over, but I am not getting any records returned even though I know there should be with the data I am sending over.
site_id is just a string that would look like ksdlfnsdlfu893hdsvSFJSDgfsdk. The record_name is also just a string that could really be anything but it is previously filtered so no spaces or special characters, most are something along these lines this-is-the-name.
With the names coming through there should be at least one found record for each, but I am getting nothing returned. I just cannot wrap my head around using mongo for these basic tasks, if anyone can help it would be greatly appreciated.
I am just using nodeJS and connecting to mongoDB, there is no express or mongoose or anything like that.
The problem here is that you are mixing callback and promises for async code handling. When you call:
var record = db.collection('' + site_id + '_campaigns').find({name: record_name}).toArray(function(err, result) {
You are passing in a callback function, which will receive the resulting array of mongo records in a parameter called result, but then assigning the immediate returned value to a variable called 'record', which is not going to contain anything.
Here is a cleaned up version of your function.
function hit_the_db(db, site_id, record_name, callback) {
// Find all records matching 'record_name'
db.collection(site_id + 'test_campaigns').find({ name: record_name }).toArray(function(err, results) {
// matching records are now stored in 'results'
if (err) {
console.log('err:', err);
}
return callback(err, results);
});
}
Here is optional code for testing the above function.
// This is called to generate test data
function insert_test_records_callback(db, site_id, record_name, insert_count, callback) {
const testRecords = [];
for (let i = 0; i < insert_count; ++i) {
testRecords.push({name: record_name, val: i});
}
db.collection(site_id + 'test_campaigns').insertMany(testRecords, function(err, result) {
return callback(err);
});
}
// This cleans up by deleting all test records.
function delete_test_records_callback(db, site_id, record_name, callback) {
db.collection(site_id + 'test_campaigns').deleteMany({name: record_name}, function(err, result) {
return callback(err);
});
}
// Test function to insert, query, clean up test records.
function test_callback(db) {
const site_id = 'ksdlfnsdlfu893hdsvSFJSDgfsdk';
const test_record_name = 'test_record_callback';
// First call the insert function
insert_test_records_callback(db, site_id, test_record_name, 3, function(err) {
// Once execution reaches here, insertion has completed.
if (err) {
console.log(err);
return;
}
// Do the query function
hit_the_db(db, site_id, test_record_name, function(err, records) {
// The query function has now completed
console.log('hit_the_db - err:', err);
console.log('hit_the_db - records:', records);
delete_test_records_callback(db, site_id, test_record_name, function(err, records) {
console.log('cleaned up test records.');
});
});
});
}
Output:
hit_the_db - err: null
hit_the_db - records: [ { _id: 5efe09084d078f4b7952dea8,
name: 'test_record_callback',
val: 0 },
{ _id: 5efe09084d078f4b7952dea9,
name: 'test_record_callback',
val: 1 },
{ _id: 5efe09084d078f4b7952deaa,
name: 'test_record_callback',
val: 2 } ]
cleaned up test records.
const collect = [];
req.body.product.forEach(function(entry) {
mongoClient.connect(databaseServerUrl, function(err, db) {
let testCollection = db.collection('Tests');
testCollection.find({Product: entry}).toArray((err, docs) => {
let waiting = docs.length;
docs.forEach(function (doc) {
collect.push(doc);
finish();
});
function finish() {
waiting--;
if (waiting === 0) {
res.send(collect);
}
}
});
db.close();
});
});
this is only getting back the first set. If I have two nodes in my array of req.body.product for example. I am only getting back the first set. But I need to get back everything not just from one Collection.
Rather than performing two queries and combining the results into one array, I suggest performing a single query that gets all of the results, which would look something like this:
mongoClient.connect(databaseServerUrl, function(err, db) {
const query = { $or: req.body.product.map(Product => ({ Product })) };
db.collection('Tests').find(query).toArray((err, docs) => {
// ...handle `err` here...
res.send(docs);
db.close();
});
});
Note that I haven't tested this since I don't have a MongoDB database in front of me.
your mongoClient.connect() is asyncronous but your loop just execute without waiting for the callback.
Try async forEach loop: enter link description here
This should solve your problem
First off, don't worry, it's a tiny data set - I realise it wouldn't be wise to dump an entire production DB to a single screen via an API... I just need to get a JSON dump of entire (small) DB to return via an API endpoint in a Node.js application.
My application does successfully return single records with this code:
MongoClient.connect("mongodb://localhost:27017/search", function (err, db) {
if(err) throw err;
db.collection('results', function(err, collection) {
// search for match that "begins with" searchterm
collection.findOne({'string':new RegExp('^' + searchterm, 'i')}, function(err, items){
// get result
var result;
if (items == null || items.result == null){
result = "";
}
else {
result = items.result;
}
// return result
res.send(result);
});
});
});
So I know Node is talking to Mongo successfully, but how can I tweak this query/code to basically return what you get when you execute the following on the MongoDB command line:
$ db.results.find()
This is snippet.
model.find({}).exec(function (err, result) {
if (err) {console.error(err); return;}
else return result;
});
First use your predefined model and call find. the logic is to place a empty object {} essentially rendering . select all from this model.
Make sense?
Exactly as you've described it.
collection.find({}).exec((err, result) => {
if (err) {
console.log(err);
return;
}
if (result.length > 0) {
// We check that the length is > 0 because using .find() will always
// return an array, even an empty one. So just checking if it exists
// will yield a false positive
res.send(result);
// Could also just use `return result;`
});
Thanks guys, I appreciate your answers pointing me in the right direction, in terms of using {} as the query. Here is the code that eventually worked for me:
db.collection('results', function(err, collection) {
collection.find({}).toArray(function(err, docs) {
res.send(docs);
});
});
The crucial element being the toArray(...) part.
I'm trying to read all records in a sqlite3 table and return them via callback. But it seems that despite using serialize these calls are still ASYNC. Here is my code:
var readRecordsFromMediaTable = function(callback){
var db = new sqlite3.Database(file, sqlite3.OPEN_READWRITE | sqlite3.OPEN_CREATE);
var allRecords = [];
db.serialize(function() {
db.each("SELECT * FROM MediaTable", function(err, row) {
myLib.generateLog(levelDebug, util.inspect(row));
allRecords.push(row);
}
callback(allRecords);
db.close();
});
}
When the callback gets fired the array prints '[]'.
Is there another call that I can make (instead of db.each) that will give me all rows in one shot. I have no need for iterating through each row here.
If there isn't, how do I read all records and only then call the callback with results?
I was able to find answer to this question. Here it is for anyone who is looking:
var sqlite3 = require("sqlite3").verbose();
var readRecordsFromMediaTable = function(callback){
var db = new sqlite3.Database(file, sqlite3.OPEN_READONLY);
db.serialize(function() {
db.all("SELECT * FROM MediaTable", function(err, allRows) {
if(err != null){
console.log(err);
callback(err);
}
console.log(util.inspect(allRows));
callback(allRows);
db.close();
});
});
}
A promise based method
var readRecordsFromMediaTable = function(){
return new Promise(function (resolve, reject) {
var responseObj;
db.all("SELECT * FROM MediaTable", null, function cb(err, rows) {
if (err) {
responseObj = {
'error': err
};
reject(responseObj);
} else {
responseObj = {
statement: this,
rows: rows
};
resolve(responseObj);
}
db.close();
});
});
}
The accepted answer using db.all with a callback is correct since db.each wasn't actually needed. However, if db.each was needed, the solution is provided in the node-sqlite3 API documentation, https://github.com/mapbox/node-sqlite3/wiki/API#databaseeachsql-param--callback-complete:
Database#each(sql, [param, ...], [callback], [complete])
...
After all row callbacks were called, the completion callback will be called if present. The first argument is an error object, and the second argument is the number of retrieved rows
So, where you end the first callback, instead of just } put }, function() {...}. Something like this:
var readRecordsFromMediaTable = function(callback){
var db = new sqlite3.Database(file, sqlite3.OPEN_READWRITE | sqlite3.OPEN_CREATE);
var allRecords = [];
db.serialize(function() {
db.each("SELECT * FROM MediaTable", function(err, row) {
myLib.generateLog(levelDebug, util.inspect(row));
allRecords.push(row);
}, function(err, count) {
callback(allRecords);
db.close();
}
});
}
I know I'm kinda late, but since you're here, please consider this:
Note that it first retrieves all result rows and stores them in memory. For queries that have potentially large result sets, use the Database#each function to retrieve all rows or Database#prepare followed by multiple Statement#get calls to retrieve a previously unknown amount of rows.
As described in the node-sqlite3 docs, you should use .each() if you're after a very large or unknown number or rows, since .all() will store all result set in memory before dumping it.
That being said, take a look at Colin Keenan's answer.
I tackled this differently, since these calls are asynchronous you need to wait until they complete to return their data. I did it with a setInterval(), kind of like throwing pizza dough up into the air and waiting for it to come back down.
var reply = '';
db.all(query, [], function(err, rows){
if(err != null) {
reply = err;
} else {
reply = rows;
}
});
var callbacker = setInterval(function(){
// check that our reply has been modified yet
if( reply !== '' ){
// clear the interval
clearInterval(callbacker);
// do work
}
}, 10); // every ten milliseconds
Old question, but I came across the issue, with a different approach as to solve the problem. The Promise option works, though being a little too verbose to my taste, in the case of a db.all(...) call.
I am using instead the event concept of Node:
var eventHandler = require('events')
In your Sqlite function:
function queryWhatever(eventHandler) {
db.serialize(() => {
db.all('SELECT * FROM myTable', (err, row) => {
// At this point, the query is completed
// You can emit a signal
eventHandler.emit('done', 'The query is completed')
})
})
}
Then, give your callback function to the eventHandler, that "reacts" to the 'done' event:
eventHandler.on('done', () => {
// Do something
})
I'm building out an api using Node, MongoDB and Mongoose. One thing that is bugging me is that you can't seem to set multiple fields at once:
app.put('/record/:id', function(req, res) {
Record.findById(req.params.id, function(err, doc) {
if (!err) {
doc.update(req.params);
doc.save();
...
However, it seems that you have to work out the update query and run it on the Model object rather than on the document object. Unless you want to assign individual properties and run save() at the end.
Is there any way of accomplishing this without having to write a Mongo query?
jsaak's answer is good but doesn't work for nested objects. I elaborated on his answer by searching and setting nested objects.
I added these functions to a utility.js file
var _ = require('underscore');
exports.updateDocument = function(doc, SchemaTarget, data) {
for (var field in SchemaTarget.schema.paths) {
if ((field !== '_id') && (field !== '__v')) {
var newValue = getObjValue(field, data);
console.log('data[' + field + '] = ' + newValue);
if (newValue !== undefined) {
setObjValue(field, doc, newValue);
}
}
}
return doc;
};
function getObjValue(field, data) {
return _.reduce(field.split("."), function(obj, f) {
if(obj) return obj[f];
}, data);
}
function setObjValue(field, data, value) {
var fieldArr = field.split('.');
return _.reduce(fieldArr, function(o, f, i) {
if(i == fieldArr.length-1) {
o[f] = value;
} else {
if(!o[f]) o[f] = {};
}
return o[f];
}, data);
}
implement as:
var util = require('./utility');
app.put('/record/:id', function(req, res) {
Record.findById(req.params.id, function(err, doc) {
if (!err) {
utils.updateDocument(doc, Record, req.params);
doc.save();
...
Maybe this has changed since this question was first asked, but you can update multiple paths in Mongoose with the set method ike:
// object
doc.set({
path : value,
path2 : {
path : value
}
});
doc.save();
References
http://mongoosejs.com/docs/api.html#document_Document-set
direct updating is not recommended according to this document:
http://mongoosejs.com/docs/2.7.x/docs/updating-documents.html
i solved it like this:
Book.findOne({isbn: req.params.isbn}, function (err, book){
if (err) {
res.send(422,'update failed');
} else {
//update fields
for (var field in Book.schema.paths) {
if ((field !== '_id') && (field !== '__v')) {
if (req.body[field] !== undefined) {
book[field] = req.body[field];
}
}
}
book.save();
}
});
If you want to update the entire document , you can delete the document based on its id and store the entire object again.
That object must contain data for each and every fields of the mongo document.
Here is an example.
mongoDBCollectionObject.findOneAndRemove({ // -- it will delete the entire document
_id: req.body.fieldsdata._id // here fiedsdata is exact copy with modification of previous data
}, function(err, data) {
var newFieldsData = new mongoDBCollectionObject(fieldsdata); //-- fieldsdata updated data
newFieldsData.save(function(err, data) { // save document to that collection with updated data
if (err) {
console.log(err);
} else
res.json({
success: true
});
});
})
To clarify the question, it looks like you are taking the Request parameters and using those to find and update the given document.
Is there any way of accomplishing this without having to write a Mongo query?
The obvious answer is to update the Model object with the value from the Request. Which is what you suggest...
Unless you want to assign individual properties and run save() at the end.
But it seems like you don't want to do this? It sounds like you want to update the Model object directly from the Request object?
You can do this if you really want. You just loop through req.params and set the doc values where appropriate.
for(var i in req.params) {
if(req.params[i] != doc[i]){
doc[i] = req.params[i];
}
}
It should be as simple as this. However, you only want to do this if you have a whole bunch of validation code on the Model objects. The whole point to the Model is that you don't want to get random data in the DB. The line above will generically "set" the correct values, but you'll definitely need to include code for authentication, authorization and validation around that simple for loop.
try to updating the collection without the find, like this
Record.update({_id:req.params.id}, {$set: { field: request.field }}, {upsert: true}, function(err{...})
The option upsert create the document if not exist.
In case you have a new object and want to update whole object in the database, you can update multiple fields at once like this:
find the object
get all schema paths (fields)
save the new object.
SomeModel.findOne({ 'id': 'yourid' },function (err, oldObject) {
if (err) return handleError(err);
// get all schema paths (fields)
SomeModel.schema.eachPath(function(path) {
// leave __id and __v alone
if (path != '_id' && path != '__v') {
// update the data from new object
oldObject[path] = newObject[path];
}
})
oldObject.save(function(err) {
if (err)
console.log(err)
});
})
A neat and clean approach would be using async await and findOneAndRemove along with create Here is the sample code
try {
let resp = await this.findOneAndRemove({ _id: req.body._id });
let entry = await this.create(req.body);
} catch (err) {
}
Don't Forget to mark this whole function as async