Find data from mongodb databse and display it in new html page - node.js

Through ajax request I have get the data form client and save it in mongodb database (mongoose) through save query .Now I want to know how to find data and display it new page.
I have get the data from client and save it in database. Now I want that when callback function calls it find data from the database and display it in new page using response.redirect.Please guide me.
$("#saveChanges5").live('click',function(){
var sendingObj = {
regOperation: $("#area_operation").val()
,regFieldAct: $("#field_activity").val()
,regOther: $("#other_details").val()
};
$.ajax({
url:'/WorkDetails'
,type:'post'
,data: sendingObj
,success:function(){
alert('Successfully saved')
},
error: function(){
alert("Saving Failed")
}
})
});
app.post("/WorkDetails",function(req ,res){
console.log(req.body);
saveWorkDetails(req.body.regOperation ,req.body.regFieldAct ,req.body.regOther ,function(){
res.send("");//here i want to add new code
});
});
function saveWorkDetails(area_operation ,field_activity ,other_details , callback){
console.log("save CALLED");
var receivedObj = new WorkDetailInfo({
area_operation:area_operation ,
field_activity:field_activity ,
other_details:other_details
});
console.log(receivedObj);
receivedObj.save(function(err){
console.log("inside Save ");
if(err){
//res.send(err);
console.log(err);
}
else{
callback();
}
});
}

You can make use of narwhal-mongodb APIs
Following is the example usage:
var MongoDB = require("mongodb");
var db = new MongoDB.Mongo().getDB("mydb");
var colls = db.getCollectionNames();
colls.forEach(function(el) { print(el); });
var coll = db.getCollection("testCollection");

Related

How do I read and save all cloudant documents in a list

So I'm trying to pull all the data out of cloudant database using Node.js and I'm having some issues figuring out how to get all my documents. Basically, I have a whole react webpage built, but I'm new to Node.js and cloudant. Using just the node run command I can connect to my database pull/log the database information, but I'm having trouble getting to the documents and then putting all their data in a list, so i can create a table view of the data. Any suggestions would be amazing and I obviously removed my credentials for security purposes.
var cloudant = new Cloudant({ url: 'xxxx', plugins: { iamauth: { iamApiKey: 'xxxx' } } });
cloudant.db.list(function(err, body) {
body.forEach(function(db) {
console.log(db);
});
});`
var readDocument = function(callback) {
console.log("Reading document 'slay-data'");
cloudant.db.get('slay-data', function(err, data) {
console.log('Error:', err);
console.log('Data:', data);
// keep a copy of the doc so you know its revision token
doc = [data];
callback(err, data);
//console.log(doc);
});
cloudant.db.list(function (err, data) {
console.log(err, data);
});
};
var docu = readDocument();
console.log(docu)
If you want all the data out of a Cloudant database you can do this (I've switched to async/await Node, as it's easier to read:
const cloudant = new Cloudant({ url: 'xxxx', plugins: { iamauth: { iamApiKey: 'xxxx' } } });
const db = cloudant.db.use('mydb')
const readAllData = async function () {
// extract all the data, including the document bodies
const response = await db.list({ include_docs: true })
// response.rows is an array with a 'doc' attribute for each document
const docs = response.rows.map((r) => { return r.doc })
return docs
}
readAllData()

save updated models to mongodb

I have following code to fetch some data from the db (mongo).
function getAllUsers(){
var UsersPromise = Q.defer();
UserSchema.find({}, function(err, data){
if(err){
UsersPromise .reject(err);
}else {
UsersPromise .resolve(data);
}
});
return UsersPromise .promise;
}
Then I modify each of these models. I add certain fields to the model depending on the type of user. (This is working correctly).
function buildUsers(users){
// my code iterates over users and adds
// properties as required.
// Working fine.
return users; // updated users.
}
Now I want to save these updated models back to mongo and this is where it's making me pull my hair.
function saveUsers(users){
// here, the users are received correctly. But the following line to save the users fails.
var SaveUsersPromise = Q.defer();
UserSchema.save(users, function(err, data){
if(err){
SaveUsersPromise .reject(err);
} else {
SaveUsersPromise .resolve(data);
}
});
return SaveUsersPromise .promise;
}
Lastly I call these functions like:
DB.connect()
.then(getAllUsers)
.then(buildUsers)
.then(saveUsers)
.catch(errorHandler);
Everything works correctly untill I call UserSchema.save. What could be the problem?
PS: I am using mongoose.
TIA.
UserSchema.save accepts single instance, you have to loop through users and save each. Mongoose doesn't have bulk inserts implemented yet (see issue #723).
Here's simple implementation using async.eachSeries
function saveUsers(users){
var async = require('async'); // <== npm install async --save
var SaveUsersPromise = Q.defer();
async.eachSeries(users, function(user, done){
UserSchema.save(user, done);
// or
user.save(done); // if user is Mongoose-document object
}, function(err){
if(err){
SaveUsersPromise.reject(err);
} else {
SaveUsersPromise.resolve();
}
});
return SaveUsersPromise.promise;
}

Querying a MongoDB documents in real-time

I'm building a web application that will work with Big Data.
I will mine Twitter data using the Apache Storm, subsequently saving them in a MongoDB database.
At same time, this data has to be fetched via Node.js in real time and be sent via socket.io to my front-end.
Exist a way to querying MongoDB via Node.js in real time?
Thanks.
I am working on a project with mongoDB, I used the mongodb npm module to query the database in real time.
First I get a list of collections which are in my database:
//My server controller page
var MongoClient = require('mongodb').MongoClient
, assert = require('assert');
exports.getCollections = function(req,res){
mongoose.connection.db.collectionNames(function(err, names) {
if (err){
console.log(err)
} else {
res.status(200).send({collections: names});
}
});
};
On the front end, I do an Angular ng-repeat to list my collections, then when I click on the collection name, I run the following code:
MongoClient.connect(url, function (err, db) {
assert.equal(null, err);
var collection = db.collection(req.body.collName);
collection.find({}).limit(req.body.limit).toArray(function (err, docs) {
if (err) {
console.log(err)
} else {
res.status(200).send({r: docs, count: docs.length});
db.close();
}
})
});
Here is my client side angular code:
//get the collection list upon page load
$http.get('collections')
.success(function(c){
$scope.collList = c.collections;
})
.error(function(err){
$scope.error = err;
});
//function run when collection is selected
$scope.doFind = function(coll){
$scope.collViewing = coll;
$http.post('/basicfind',{collName: coll,limit:$scope.limiter})
.success(function(data){
$scope.results = data.r;
$scope.countOfResults = data.count;
})
.error(function(err){
$scope.error = err.message;
});
};
Hope that helps, let me know if you need me to share any more code

Store values from spookyjs environment into mongoDB

I am trying to scrape data from site by spookyjs and store in mongoDB.I am able to get data from the website.But not able to save scraped data from spookyjs environment to mongoDB.To save scraped data,I passed my database model instance to spookyjs .I refered below link for it.
https://github.com/SpookyJS/SpookyJS/wiki/Introduction
Below is my code where I extracted data in prod_link_info variable and pass its values into mongoDB
var product_model = require('./product').product_model;
//get results
spooky.then([{product_model:product_model},function(){
this.waitForSelector('li[id^="product_"]', function() {
// Get info on all elements matching this CSS selector
var prod_link_info = this.evaluate(function() {
var nodes = document.querySelectorAll('li[id^="product_"]');
return [].map.call(nodes, function(node) { // Alternatively: return Array.prototype.map.call(...
return node.querySelector('a').getAttribute('href')+"\n";
});
});
//insert values in mongodb
for (var i = 0; i < prod_link_info.length; i++) {
product_model.create(
{
prod_link_info:prod_link_info[i],
}, function(err, product){
if(err) console.log(err);
else console.log(product);
});
} });
}]);
Below is the code of database schema and model used in above code.
var mongoose=require('mongoose');
var Schema = mongoose.Schema;
// create a schema
var productSchema = new Schema({
prod_link_info: String,
});
var product_model= mongoose.model('product_model', productSchema);
module.exports = {
product_model: product_model
}
But when I run above code it gives me following error ReferenceError: Can't find variable: product_model.
I want to store the data extracted from spookyjs to mongoDB.Please suggest where am I doing wrong.
When you pass hash of variables to spooky, it is converted to a string using JSON.stringify and then gets converted back to an object using JSON.parse in casper environment (please refer docs); so it is impossible to pass mongoose model to casper environment (moreover there is no actual reason for that).
To solve the problem, you should pass the data from Spooky (casper) environment. As far as I know, the only way to do is to emit data and then handle it using spooky.on. Your example should look like:
var product_model = require('./product').product_model;
//get results
spooky.then([{},function(){
this.waitForSelector('li[id^="product_"]', function() {
// Get info on all elements matching this CSS selector
var prod_link_info = this.evaluate(function() {
var nodes = document.querySelectorAll('li[id^="product_"]');
return [].map.call(nodes, function(node) { // Alternatively: return Array.prototype.map.call(...
return node.querySelector('a').getAttribute('href')+"\n";
});
});
this.emit('data.ready', prod_link_info);
});
}]);
spooky.on('data.ready', function (prod_link_info) {
//insert values in mongodb
for (var i = 0; i < prod_link_info.length; i++) {
product_model.create(
{
prod_link_info:prod_link_info[i],
}, function(err, product){
if(err) console.log(err);
else console.log(product);
});
}
});

NodeJS MongoDB : Multiple save requests not working

I am using node-mongodb-native in my application. I send multiple POST requests to nodejs server to save/update each documents, but only one document is getting updated and all other document are not changing. The data received in the server is correct.
save : function(req,res) {
data = req.body;
if(!data._id){
data._id = new ObjectID();
}else{
data._id = ObjectID(data._id);
}
mColl(req.params.collname, function (collection,db) {
collection.save(data, {safe:true}, function(err, result) {
if(err){
res.send(err);
}
else {
res.send(result);
}
});
});
}
I am not getting the response for the request also.
For starters, don't do this:
data = req.body;
When a new request comes in, you're overwriting the (global!) data variable, and all kinds of undefined stuff can happen. So always declare a new variable:
var data = req.body;

Resources