I am using twitter API in my code and mongodb. The is reflecting the correct output in database, but it's not terminating. I guess the problem is with db.server.find({id:myid},cb); statement in code below. However, I don't know how to work it out.
var Twit = require('../lib/twitter'),
conf = require('../config1');
var myid;
var twit = new Twit(conf);
var databaseUrl = "mydb2"; // "username:password#example.com/mydb"
var collections = ["server", "followers"];
var db = require("mongojs").connect(databaseUrl, collections);
twit.get('account/verify_credentials', function (err, reply) {
myid = reply.id;
function addToServer(myid, cb) {
db.server.find({
id: myid
}, cb);
};
addToServer(myid, function (err, resp) {
if (err) {
console.log("err");
} else if (resp.length > 0) {
console.log("My Id present in server present");
} else {
console.log("New to the app.So updating server ");
db.server.insert({
id: myid
});
db.followers.insert({
id: myid,
following: []
})
}
});
});
P.S: This is a part of my code , I have also used process.exit(0) function, but still no help.
I think your issue is related to this: https://github.com/mafintosh/mongojs/issues/15.
Here's a gist. If I call db.close() the program exists, and if I don't, it doesn't. So process.on('exit') must not be the right place to call it.
But the issue is that that you have a persistent tcp connection open to the DB, and as long as that's running, the script won't shut down.
Is this a run-once script, or do you need to keep this thing running?
EDIT:
Since the script only needs to run once, I'd use callbacks on your 2 database queries and close the database down in the last callback.
Related
I tried node.js crud operation using mongodb and also stored in redis cache. First time I tried to run get method to get data from db and second time I run the get method. It got data from cache but I tried to delete the data in the table and another time to run get method it is not showing data.It is showing empty data. But data is stored in redis cache. how can I solve this issue?
cache.js
// var asyncRedis = require("async-redis")
// var myCache = asyncRedis.createClient()
var redis = require('redis');
const client = redis.createClient()
client.on('connect', function () {
console.log('Redis client connected');
});
client.on('error', function (err) {
console.log('Something went wrong ' + err);
});
var value;
var todayEnd = new Date().setHours(23, 59, 59, 999);
function Get_Value()
{
client.get('products', function(err,results) {
value = JSON.parse(results);
})
return value
}
function Set_Value(products)
{
client.set('products', JSON.stringify(products))
client.expireat('products', parseInt(todayEnd/1000));
}
exports.get_value = Get_Value;
exports.set_value = Set_Value;
routes.py
data = cache.get_value()
console.log(data)
if (data) {
console.log("GET")
res.send(data)
}
else {
console.log("SET")
const r = await db.collection('Ecommerce').find().toArray();
res.send(r)
data = cache.set_value(r)
}
hari,
Your Get_Value looks a little odd to me. The Redis get will be executed asynchronously. So when you place the return value statement outside the callback, it will return right away, value is still undefined.
The easiest way to solve this would be to call Get_Value with a callback to continue when redis GET returns.
function Get_Value(callback) {
client.get('products', function(err,results) {
let value = JSON.parse(results);
>> callback(value);
});
}
You would use it like that:
Get_Value(function(value) {
console.log("products: " + value);
}
An other option would be to use the Promise API from Node Redis (see docs here: https://github.com/NodeRedis/node_redis)
Does this help?
In my original function I need to make 2 requests to 2 different db's within the same couch login.
var cloudant = require('cloudant')(https://cloudant_url);
var userdb = cloudant.db.use('user');
var addrdb = cloudant.db.use('address');
function onChange(username) {
userdb.get(username, function(err,resp) {
var user_id = resp.id;
addrdb.get(user_id,function(err1,resp1){
var addr = resp1.address;
});
});
};
var nockVar = function() {
nock(testCloudantDBURL)
.get('/user/jack')
.reply(200,{'id' : 123});
nock(testCloudantDBURL)
.get('/address/123')
.reply(200,{'address':'123});
};
describe('Test Cloudant Listener code' , function() {
nockVar();
it('test get scenario', function() {
onChange('jack');
});
});
With this only the first call works and I can get the id : 123. The second call on address db is not getting intercepeted.
With nock I'm able to intercept only the first call,the second call is not happening.Any pointers ?
This happens because your code is executed asynchronously and your test doesn't wait for the userdb.get and addrdb.get to finish. Easiest (not best) way to handle this is to add a done callback to your test scenario and call it as soon as your onChange function is finished. Roughly something like:
function onChange(username, done) {
userdb.get(username, function(err,resp) {
var user_id = resp.id;
addrdb.get(user_id,function(err1,resp1){
var addr = resp1.address;
done();
});
};
};
it('test get scenario', function(done) {
onChange('jack', done);
});
You might also consider working with Promises based code.
I wish to update a Mongo collection in some code that looks like this:
var Q = Npm.require('q');
var db = new Mongo.Collection('mydb');
function doSomething() {
var d = Q.defer();
setTimeout( function() {
d.resolve();
}, 1000);
return d.promise;
}
doSomething().then( function() {
console.log('before find');
var records = db.find({}).fetch(); // blocking operation never completes
console.log('after find');
console.log(records); // should be []
});
When running meteor with the above code, it will get as far as logging "before find" but then the execution is halted waiting for db.find to complete. It never completes.
Are there any solutions or workarounds for this?
Update: it seems to be the .fetch() that causes the issue. I need this part though, I want to manipulate the data I am receiving from Mongo.
Instead of using fetch, add a callback function.
It will allow you to manipulate the data after its retrieved:
var records = db.find({}, function(error, data){
// Do something with your data here
});
** Edit - with the callback above, a curser is returned. If you want to return an Array with the results, use the following:
var records = db.find({}).toArray(function(error, data){
// Do something with your data here
});
I am trying to keep a session open with the Bloomberg Public API, relaying calls from my own service's API to it to fetch data. I am running the Node.JS / Express server locally right now. I have an API route that works fine the first time: I send the GET, and quickly get the response back. If I then send another GET to the same route, and I can see the data that the Bloomberg API returns in my server console, but it seems that the server gets stuck at the res.send(...) and I have no Idea why. I've tried numerous things like moving code blocks around and forcefully destroying variables, but to no avail. Do you guys see anything obvious that would/might work?
'use strict';
var _ = require('lodash');
var Blpapi = require('./blpapi.model');
var count = 0;
var blpapi = require('blpapi');
// Add 'authenticationOptions' key to session options if necessary.
var session = new blpapi.Session({ serverHost: '10.8.8.1', serverPort: 8194 });
var service_refdata = 1; // Unique identifier for refdata service
session.start();
session.on('SessionStarted', function(m) {
console.log(m);
session.openService('//blp/refdata', service_refdata);
});
session.on('ServiceOpened', function(m) {
console.log(m);
});
session.on('SessionStartupFailure', function(m) {
console.log('SessionStartupFailure', util.inspect(m));
session.stop();
session.destroy();
});
session.on('SessionTerminated', function(m) {
console.log('Session Terminated');
session.stop();
session.destroy();
});
exports.getStock = function (req, res) {
var stock = req.url.substring(8, req.url.length);
stock = stock.replace(/_/g, ' ');
session.on('HistoricalDataResponse', function(m) {
console.log(m);
if(m.eventType === 'RESPONSE' && m.correlations[0].value === 101) {
console.log('send');
res.send(m.data.securityData);
}
else {
res.send(500);
}
});
newRequest(stock);
};
function newRequest(sec) {
if(typeof sec !== 'string') return;
session.request('//blp/refdata', 'HistoricalDataRequest',
{ securities: [sec],
fields: ['PX_LAST', 'OPEN'],
startDate: "20140101",
endDate: "20140301",
periodicitySelection: "DAILY" }, 101);
}
function handleError(res, err) {
return res.send(500, err);
}
Edit1: If I change the res.send(m.data.securityData); to res.send(201);, the requests come back fine, so I'm figuring it has to do with that object.
I figured it out. It's because I was declaring the session.on('HistoricalDataResponse', .... statement inside of my route controller. Moving it out and adding a bit of logic around it solved the problem.
I am trying to developpe an API with NodeJs which accepts an object containing multiple queries to mongdb and answers an object with the different results (in fact Json).
I use express and my code is :
var nb_query=0;
var results;
//api
app.get("/api/:p",api);
function api(req, res) {
var jsonq=decodeURIComponent(req.params.p);
//console.log(jsonq);
var queries=JSON.parse(jsonq);
nb_query=Object.keys(queries).length;
results={};
for(var nq in queries) { // for each query
do_find_query(nq,queries[nq], function() {
//todo : managing head
res.end(JSON.stringify(results));
}
);
}
} // end of api function
function do_find_query (name_query,query,callback) {
var collection=fdb.collection(query.collection);
collection.find(query.find,query.fields,query.options).toArray(function(err,docs) {
if(err) throw err;
results[name_query]=docs;
nb_query--;
if(nb_query==0)
callback();
}
);
}
As you see, I use global vars to store the results and the counter nb_query. And I ask myself if it is a problem or not (now no because I am alone on the server, but when we will be thousands of billions? :-) ).
As I understand Node, there is only one thread and I think Node will finalize a started job unless he encoutered an io access. In this case, he stacks the io with the callback, and begins to answer to a new request.
If this is correct, I think that Node could answer to 2 or more different calls to my api (which need mongo calls) and so store different values in global vars which is shared (there's only one thread).
If this is right, I would also know what is the best way to change it.
I have the idea of declaring results and nb_query in api function and pass it to do_find_query, but nb_query isn't an object and is so not changed correctly.
I know I can put nb_query in an object to pass it 'by reference', but I want to know first if it is necessary and if it is a good way or if there is a better one.
Thanks for your help !
Doom.
------------------------------------------------------------------------------
EDIT :
I have change my code and it seems to work without global vars and without async library (which is for me using a hammer to swat a fly)
//api
app.get("/api/:p",api);
function api(req, res) {
var jsonq=decodeURIComponent(req.params.p);
//console.log(jsonq);
var queries=JSON.parse(jsonq);
var query_names=Object.keys(queries);
var results={};
var query_left=query_names.length;
query_names.map( function(query_name) {
var query=queries[query_name];
var collection=fdb.collection(query.collection);
collection.find(query.find,query.fields,query.options).toArray(function(err,docs) {
if(err) throw err; //todo : handle errors in a better way
results[query_name]=docs;
if(--query_left==0)
res.json(results);
}
);
}
);
}
But I still do not know if this is necessary to do or not. (I think so but I am new in Node so ...)
Thanks to mscdex as his answer make me known res.json() and help me understand scope of variable.
Instead of using globals, try this (uses the async module):
var async = require('async');
// ...
app.get('/api/:p', api);
function api(req, res) {
var jsonq = decodeURIComponent(req.params.p),
queries = JSON.parse(jsonq),
keys = Object.keys(queries),
queriesLeft = keys.length,
results = {};
async.each(keys, function(name, cb) {
var query = queries[name],
collection = fdb.collection(query.collection);
collection.find(query.find, query.fields, query.options)
.toArray(function(err, docs) {
if (err) return cb(err);
results[name] = docs;
cb();
}
);
}, function(err) {
if (err) throw err; // TODO: handle better
res.json(results);
});
} // end of api function