I defined the simple gulp task that can be run in terminal with gulp load. The purpose is to get number of users that does not have location property set.
However, after the number returned succesfully, the process hangs in terminal and I need to ctrl-c to stop it. Note, This is not a async call and uses mongoose plug-in to access DB.
gulp.task('load', function () {
dbCall.getUserNumberWithoutLocation();
});
var getUserNumberWithoutLocation = function() {
var query = User.find({ 'location': null });
query.exec(function(err, users) {
for (var i = 0; i < users.length; i++) {
console.log(users[i].location);
}
console.log(users.length);
})
};
Despite the prompt still does not return after gulp.task done (which probably is my local machine issue), I put my code here. The callback function is passed into the inner getUserNumberWithoutLocation function, in which the it is called once User.find is completed.
gulp.task('load', ['style'], function (callback) {
dbCall.getUserNumberWithoutLocation(callback);
});
exports.getUserNumberWithoutLocation = function(callback) {
var query = User.find({'location': null});
query.exec(function(err, users) {
console.log(users.length);
if (err) {
return callback(err);
}
callback();
});
};
Related
I think the rendering takes place before the searching of the string on the files, i have tried different methods but don't seems to get this working. any help will be appreciated. im a noob on to the nodejs. im trying to get the id of the user and query and get all the data and there after see if he is in any of the lists given and finally render the page.
const j = [];
let name = '';
const filename = [];
var ext = '';
module.exports = function(app, express) {
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json());
app.post('/cusdetails', isLoggedIn, function (req, res) {
var cusid=req.body.cusid;
var insertQuerys = "SELECT * FROM customer WHERE cusid=? ORDER BY rowid DESC LIMIT 1";
connection.query(insertQuerys,[cusid],
function(err, rows){
rows.forEach( (row) => {
name=row.fncus;
});
fs.readdir('./views/iplist', function(err, files) {
if (err)
throw err;
for (var index in files) {
j.push(files[index])
}
j.forEach(function(value) {
var k = require('path').resolve(__dirname, '../views/iplist/',value);
fs.exists(k, function(fileok){
if(fileok) {
fs.readFile(k, function(err, content) {
if (err) throw err;
if (content.indexOf(name) > -1) {
ext = path.extname(k);
filename.push(path.basename(k, ext));
}
});
}
else {
console.log(" FileNotExist ");
}
});
});
});
console.log(filename);
res.render('cusdetails.ejs', {rows: rows, user:req.user , aml: filename });
});
})
You can create simple Promise wrapper and then use it inside async/await function to pause execution until resolved.
// use mysql2 package as it provides promise, less work to write promise wrappers
const mysql = require('mysql2/promise');
// create the connection to database
const connection = mysql.createConnection({
host: 'localhost',
user: 'root',
database: 'test'
});
// sample wrapper
function some(k) {
// more advisable to have local variables, why do you need this to be array?
var filename = [];
return new Promise((resolve, reject) => {
// doing this is also not recommended check nodejs documentation **fs.exists** for more info
fs.exists(k, function(fileok){
if(fileok) {
fs.readFile(k, function(err, content) {
if (err) reject(err);
if (content.indexOf(name) > -1) {
ext = path.extname(k);
filename.push(path.basename(k, ext));
resolve(filename)
}
});
}
else {
// reject(new Error("FileNotExist"))
console.log(" FileNotExist ");
}
});
})
}
// note the use of async
app.post('/cusdetails', isLoggedIn, async function (req, res) {
var cusid=req.body.cusid;
var insertQuerys = "SELECT * FROM customer WHERE cusid=? ORDER BY rowid DESC LIMIT 1";
// using await to pause excution, waits till query is finished
const [rows] = await connection.query(insertQuerys,[cusid])
rows.forEach( (row) => {
name=row.fncus;
});
// then you can
var result = await some(k)
...
Note however this way you loose the advantage of concurrent execution, as it's kindoff blocking. If the result of one call is not used in another, you can execute in parallel and await for result to achieve sequencing like
const [rows] = connection.query(insertQuerys,[cusid])
var result = some(k)
console.log(await rows) // do something
console.log(await result) // do something
JavaScript is asynchronous. This means that if you have a function with a callback (i.e. your query), the callback will be called asynchronously, at an unknown time, while the other code executes.
You need to look up some tutorials how to deal with callbacks, to get a proper understanding of it. Another method is using async/await and/or promises.
Basically, if you take the following code:
console.log("this will print first");
setTimeout(function () {
console.log("this will print last");
}, 1000);
console.log("this will print second");
If you run the code above, the top level is executed synchronously, so, it first calls console.log, then it executes setTimeout, which is synchronous. It sets a timeout, then says "I'm ready", and the code continues to the other console.log. After 1 second (1000 milliseconds), the callback in the setTimeout function is executed, and only then that console.log is called. You can not make the rest of the code wait this way, you need to restructure your code or read into promises.
I am having trouble writing tests for my objects using mocha. The code appears to work in practice but during my tests, database updates are happening later than expected. During test setup I insert a few records into a collection. In the teardown I clear the collection. When I search for a known record I get zero results unless I recursively invoke the callback function (as shown in the code below).
Everything is written asynchronously. The setup function returns all the records. Somehow it seems that the data is not refreshed quickly enough. Can anyone advise if I am approaching this correctly with the recursive loop?
var myclass = require('myclass')
var mongoose = require('mongoose');
var should = require('should');
mongoose.connect('mongodb://localhost/myDbTests');
mongoose.connection.on('error', console.error.bind(console, 'connection error:'));
describe('Test my collection.', function () {
beforeEach('load dummy data into the database', function (done) {
myclass.load_data(dummyData, function (count) {
count.should.be.greaterThan(5);
done();
});
});
afterEach('clear the database', function (done) {
myclass.model.remove({}, function() {
done();
});
});
it('check that a known record exists database', function (done) {
var keep_checking = function (td) {
if (!td) {
myclass.get_record('MYRECORD', keep_checking);
} else {
td.should.have.property('category', 'someCategory');
done();
}
}
keep_checking(0);
});
});
My load_data is:
var _load_data = function (data, callback) {
data.forEach(function (d) {
var rec = new _model(d);
rec.save(function(err, res) {
if (err) return console.error(err);
});
});
callback(data.length);
};
You should wait until the database connection is open to run your tests.
I achieve that in my tests with the before hook which runs before any test (and before beforeEach as well):
before(function(done) {
mongoose.connection.once('open', done);
}
That will prevent anything from being run before the database connection is open.
I was not loading the data correctly. Mongoose does not allow multiple record insertion so I had used a synchronous .forEach loop to save each object. A better way to do, and hence my solution, is the following:
var _load_data = function (data, callback) {
var total = data.length,
count = 0;
function saveAll() {
var doc = data[count];
var rec = new _model(doc);
rec.save(function(err, res) {
if (err) {
throw err;
}
if (count !== total) {
count += 1;
saveAll();
} else {
callback(count);
}
});
}
saveAll();
};
How do I set a variable to a query? I am trying to use functions and callbacks in node.js to work through async, but I am not sure how to get a query to equal to a variable. What I am trying to do in this code is take a friend collection that belongs to a user and return the friends result(which I don't think I am doing correctly in the query insertAll) and then find the user's info for each of the query. And then return the results as a render. I am not sure how to call render either with this...
Here is my code:
exports.contactList = function(req, res) {
var insertFriend = function(data, callback) {
var friend = User.findById({_id: user.friendStatus.fuId}, function() {
callback(null, data);
}, friend);
};;
var insertAll = function(coll, callback) {
var queue = coll.slice(0),
friendX;
(function iterate(){
if(queue.length === 0) {
callback();
return;
}
friendX = queue.splice(0,1)[0];
insertFriend(friendX, function(err, friendX) {
if(err) {throw err;}
console.log(friendX + ' inserted');
process.nextTick(iterate);
});
})();
};
insertAll([Friend.findOne({userId: req.signedCookies.userid})], function(){
});
};
A Query object is returned if you do not pass a callback.
From http://mongoosejs.com/docs/queries.html:
When a callback function:
is passed, the operation will be executed immediately with the results passed to the
callback.
is not passed, an instance of Query is returned, which provides a special QueryBuilder
interface for you.
I have an HTTP Get request and I want to parse the response and save it to my database.
If i call crawl(i) independentely i get good results. But i have to call crawl() from 1 to 2000.
I get good results but some responses seem to get lost and some responses are duplicates. I don't think I understand how to call thousands of asynchronous functions. I am using the async module queue function but so far I am still missing some data and still have some duplicates. What am I doing wrong here? Thanks for your help.
What i am crawling
My node functions :
function getOptions(i) {
return {
host: 'magicseaweed.com',
path: '/syndicate/rss/index.php?id='+i+'&unit=uk',
method: 'GET'
}
};
function crawl(i){
var req = http.request(getOptions(i), function(res) {
res.on('data', function (body) {
parseLocation(body);
});
});
req.end();
}
function parseLocation(body){
parser.parseString(body, function(err, result) {
if(result && typeof result.rss != 'undefined') {
var locationTitle = result.rss.channel[0].title;
var locationString = result.rss.channel[0].item[0].link[0];
var location = new Location({
id: locationString.split('/')[2],
name: locationTitle
});
location.save();
}
});
}
N = 2 //# of simultaneous tasks
var q = async.queue(function (task, callback) {
crawl(task.url);
callback();
}, N);
q.drain = function() {
console.log('Crawling done.');
}
for(var i = 0; i < 100; i++){
q.push({url: 'http://magicseaweed.com/syndicate/rss/index.php?id='+i+'&unit=uk'});
}
[EDIT] WELL, after a lot of testing it seems that the service I am crawling cannot handle so many request that fast. Because when I do each requests sequentially, I can get all the good responses.
Is there a way to SLOW DOWN ASYNC queue method?
You should have a look at this great module, async which simplifies async tasks like this. You can use queue, simple example:
N = # of simultaneous tasks
var q = async.queue(function (task, callback) {
somehttprequestfunction(task.url, function(){
callback();
}
}, N);
q.drain = function() {
console.log('all items have been processed');
}
for(var i = 0; i < 2000; i++){
q.push({url:"http://somewebsite.com/"+i+"/feed/"});
}
It will have a window of ongoing actions and the tasks room will be available for a future task if you only invoke the callback function. Difference is, your code now opens 2000 connections immidiately and obviously the failure rate is high. Limiting it to a reasonable value, 5,10,20 (depends on site and connection) will result in a better sucess rate. If a request fails, you can always try it again, or push the task to another async queue for another trial. The key point is to invoke callback() in queue function, so that a room will be available when it is done.
var q = async.queue(function (task, callback) {
crawl(task.url);
callback();
}, N);
You'are executing next task immediately after starting the previous one, in this way, the queue is just meaningless. You should modify your code like this:
// first, modify your 'crawl' function to take a callback argument, and call this callback after the job is done.
// then
var q = async.queue(function (task, next/* name this argument as 'next' is more meaningful */) {
crawl(task.url, function () {
// after this one is done, start next one.
next();
});
// or, more simple way, crawl(task.url, next);
}, N);
Another option if you want. Vanilla JS without fancy libraries.
var incrementer = 0;
var resultsArray = [];
var myInterval = setInterval(function() {
incrementer++
if(incrementer == 100){
clearInterval(myInterval)
//when done parse results array
}
//make request here
//push request result to array here
}, 500);
Invokes the function every half second. Easy way to force sync and exit after x requests.
I know I am a little late to the question, however here is a solution I wrote to slow down the number of requests when testing an api endpoint, using node 4 or node 5:
var fs = require('fs');
var supertest = require('supertest');
var request = supertest("http://sometesturl.com/api/test/v1/")
var Helper = require('./check.helper');
var basicAuth = Helper.basicAuth;
var options = Helper.options;
fs.readFile('test.txt', function(err, data){
var parsedItems = JSON.parse(data);
var urlparts = []
// create a queue
for (let year of range(1975, 2016)) {
for (var make in parsedItems[year]){
console.log(year, make, '/models/' + year + '/' + make)
urlparts.push({urlpart:'/models/' + year + '/' + make, year: year, make: make})
}
}
// start dequeue
waitDequeue();
// This function calls itself after the makeRequest promise completes
function waitDequeue(){
var item = urlparts.pop()
if (item){
makeRequest(item)
.then(function(){
// wait this time before next dequeue
setTimeout(function() {
waitDequeue();
}, 3000);
})
} else {
write(parsedItems)
}
}
// make a request, mutate parsedItems then resolve
function makeRequest(item){
return new Promise((resolve, reject)=>{
request
.get(item.urlpart)
.set(options.auth[0], options.auth[1])
.set(options.type[0], options.type[1])
.end(function(err, res) {
if (err) return done1(err);
console.log(res.body)
res.body.forEach(function(model){
parsedItems[item.year][item.make][model] = {}
});
resolve()
})
})
}
// write the results back to the file
function write(parsedItems){
fs.writeFile('test.txt', JSON.stringify(parsedItems, null, 4), function(err){
console.log(err)
})
}
})
A little late but I have found this works!
Using async you can slow down the queue by using whilst inside the task handler eg:
var q = async.priorityQueue(function(task, callback) {
// your code process here for each task
//when ready to complete the task delay it by calling
async.whilst( //wait 6 seconds
function() {
return count < 10;
},
function(callback) {
count++;
setTimeout(function() {
callback(null, count);
}, 1000);
},
function (err, n) {
// n seconds have passed
callback(); //callback to q handler
}
); //whilst
} , 5);
Check below algorithm...
users = getAllUsers();
for(i=0;i<users.length;i++)
{
contacts = getContactsOfUser(users[i].userId);
contactslength = contacts.length;
for(j=o;j<contactsLength;j++)
{
phones = getPhonesOfContacts(contacts[j].contactId);
contacts[j].phones = phones;
}
users[i].contacts = contacts;
}
return users;
I want to develop such same logic using node.js.
I have tried using async with foreach and concat and foreachseries functions. But all fail in the second level.
While pointer is getting contacts of one user, a value of i increases and the process is getting started for next users.
It is not waiting for the process of getting contacts & phones to complete for one user. and only after that starting the next user. I want to achieve this.
Actually, I want to get the users to object with proper
Means all the sequences are getting ruined, can anyone give me general idea how can I achieve such a series process. I am open to change my algorithm also.
In node.js you need to use asynchronous way. Your code should look something like:
var processUsesrs = function(callback) {
getAllUsers(function(err, users) {
async.forEach(users, function(user, callback) {
getContactsOfUser(users.userId, function(err, contacts) {
async.forEach(contacts, function(contact, callback) {
getPhonesOfContacts(contacts.contactId, function(err, phones) {
contact.phones = phones;
callback();
});
}, function(err) {
// All contacts are processed
user.contacts = contacts;
callback();
});
});
}, function(err) {
// All users are processed
// Here the finished result
callback(undefined, users);
});
});
};
processUsers(function(err, users) {
// users here
});
You could try this method without using async:
function getAllUserContacts(users, callback){
var index = 0;
var results = [];
var getUserContacts = function(){
getContactsOfUser(users[index].userId, function(contacts){
var index2 = 0;
var getContactsPhones = function(){
getPhonesOfContacts(contacts[index2].contactId, function(phones){
contacts[index2].phones = phones;
if(index2 === (contacts.length - 1)){
users[index].contacts = contacts;
if(index === (users.length - 1)){
callback(users)
} else {
index++;
getUserContacts();
}
}else{
index2++;
getContactsPhones();
}
});
}
getContactsPhones();
});
}
getUserContacts();
}
//calling the function
getAllUsers(function(users){
getAllUsersWithTheirContacts(users, function(usersWithContacts){
console.log(usersWithContacts);
})
})
//Asynchronous nested loop
async.eachSeries(allContact,function(item, cb){
async.eachSeries(item,function(secondItem,secondCb){
console.log(secondItem);
return secondCb();
}
return cb();
},function(){
console.log('after all process message');
});