Page Twitter's REST API with Node - node.js

I am attempting to get a list of tweets from Twitter with a specific hashtag using Node js. Twitter has is it so that you can only get a maximum of 15 tweets for every request, so I need to make multiple requests if I want a sizable list of tweets. To let Twitter know that you want the next list of tweets, you need to provide a "max_id" variable, which should hold the minumum id of the list of tweets that you got back from the previous request. This process is documented here.
Here is my attempt at doing this:
var hashtag = "thisisahashtag"; // hashtag that we are looking for
var max_id = '';
do {
twitter.search({
q: "#" + hashtag,
result_type: "recent",
max_id: max_id
},
session.accessToken,
session.accessTokenSecret,
function(error, data, response) { // callback
// get the ids of all the tweets from one response and do comparison for smallest one
for(var i = 0; i < data.statuses.length; i++) {
var id = data.statuses[i].id;
if(max_id == '' || id < parseInt(max_id)) {
max_id = id;
}
}
// do something with the data...
}
)
} while(max_id != '0');
I am using the node-twitter-api module to make the requests. This won't work because the outer loop will keep firing off without waiting for the query. Is there a better way to do this?

Twitter lets you request up to 100 tweets at a time. I added a count parameter for this. You should initiate subsequent requests from your callback. That way you will serialize your requests.
function twitterSearch() {
twitter.search({
q: "#" + hashtag,
result_type: "recent",
max_id: max_id,
count: 100 // MAX RETURNED ITEMS IS 100
},
session.accessToken,
session.accessTokenSecret,
function(error, data, response) {
for(var i = 0; i < data.statuses.length; i++) {
var id = data.statuses[i].id;
if(max_id == '' || id < parseInt(max_id)) {
max_id = id;
}
}
// GET MORE TWEETS
if (max_id != '0')
twitterSearch();
}
);
}

Related

Test random URLs from spreadsheet using alasql

I have a large number of URLs within a xlsx file. What I'd like to do is randomly select some of these URLs, load them, then check that they return a status code of 200.
So I'm using the npm alasql package to do this.
At the moment, the following code successfully loads the first 5 URLs in the spreadsheet, checks that they 200, then finishes the test.
var alasql = require('alasql');
var axios = require('axios');
module.exports = {
'#tags': ['smokeTest'],
'Site map XML pages load': async (browser) => {
const result = await alasql.promise('select URL from xlsx("./testUrls.xlsx",{sheetid:"RandomUrls"})');
var xcelData = result.map(item => {
return item.URL;
});
async function siteMapUrlsTestArr(item) {
var response = await axios.get(browser.launch_url + item);
browser.verify.equal(response.status, 200);
console.log('Sitemap test URL =', (browser.launch_url + item));
}
for (let index = 0; index < xcelData.length; index++) {
if (index < 5) {
const xmlTestUrl = xcelData[index];
await siteMapUrlsTestArr(xmlTestUrl);
} else {}
}
},
'Closing the browser': function (browser) {
browser.browserEnd();
},
};
However, what I'd like to do is randomly select 5 URLs from the (large) list of URLs in the spreadsheet, rather than the first 5 URLs.
I appreciate that this will (probably) include using the Math.floor(Math.random() command, but I can't seem to get it to work no matter where I place this command.
Any help would be greatly appreciated. Thanks.
Your logic is flawed. Here's how.
You want to select 5 random URLs from a list and then, perform the operation on the items but what you're doing is you're getting all the items and running the operation using a loop on first five.
To correct it:
//Fixed to five as you only want to test 5 URLs.
for (let index = 0; index < 5; index++) {
//Selecting a Random item from the list using Math.random();
const xmlTestUrl = xcelData[Math.floor(Math.random() * xcelData.length)];
//Performing the HTTP response operation on it.
await siteMapUrlsTestArr(xmlTestUrl);
}
The aforementioned solution will select a random item in each loop and perform the HTTP response operation on it. The items will be randomly selected using Math.random().

looping through an array of objects using node js and mongodb

i am new with using mongodb and node am trying to loop through an array of objects from my database and display only the objects using a res.json, in my console.log it displays all the object, but in my postman using res.json it displays only one object please help
MY CODE
const course = res.GetUserDT.coursesHandled;
for (let index = 0; index < course.length; index++) {
console.log(course[index]);
}
const course = res.GetUserDT.coursesHandled;
for (let index = 0; index < course.length; index++) {
res.json(course[index]);
}
my console output
{ courseCode: '2103' }
{ courseCode: '2012' }
{ courseCode: '2062' }
my postman output
{ courseCode: '2103' }
Hi and welcome to Stackoverflow.
The Problem here is that res.json() sends a an immidiate response to the requestor - meaning a response is sent already within the first iteration of your for-loop.
I am also wondering why you need that loop - as you are not doing anything within it. So why don't you just send the array immidiately like so:
res.json(res.GetUserDT.coursesHandled);
You can only send res.json once.
To send all of them together you can create an array, push all the objects in the array and send it back as a response.
let arrayToReturn = []
for (let index = 0; index < course.length; index++) {
arrayToReturn.push(course[index])
}
res.json(arrayToReturn);
Update
#David's answer is the most accurate solution i.e just directly send the array as a response instead of looping
res.json(res.GetUserDT.coursesHandled);
Assuming that is express, res.json() will send the data and end the response.
try something like:
const responseArray = [];
for (let index = 0; index < course.length; index++) {
responseArray.push( course[index] );
}
res.json(responseArray);

Ionic 4+ paginated rest api search filter with infinite scroll

I'm using ionic 5.4.5. RestApi was created with node and express js.
When I want to search, the app just looking for datas on the first page. I want the app to search for all datas.How can I do this?
Our api paginated.
app.get("/units", (req, res) => {
let page = parseInt(req.query.page)
let limit = parseInt(req.query.limit)
if(!page && !limit){
page = 1;
limit = 5;
}
let startIndex = (page -1) * limit
let endIndex = page * limit
let results = {}
if(endIndex < data.length){
results.next = {
page : page + 1,
limit : limit
}
}
if(startIndex > 0){
results.previous = {
page : page - 1,
limit : limit
}
}
results.results = data.slice(startIndex,endIndex)
res.json(results);
});
app.get("/units/:id", (req, res) => {
const itemId = req.params.id;
const item = data.find(_item => _item.id == itemId);
if (item) {
res.json(item);
} else {
res.json({ message: `item ${itemId} doesn't exist`})
}
});
home.page.ts
getItems(ev) {
let val = ev.target.value;
if (val == '') {
this.loadUnit();
return;
}
if (val && val.trim() != '') {
this.unit = this.unit.filter((un,name) => {
return (un.name.toLocaleLowerCase().indexOf(val.toLocaleLowerCase()) > -1);
})
}
}
I also use a service to get data from the API.
Either return all data from the API and perform filtering and pagination on the app. Or perform filtering and pagination (in that order) in the API.
The last option is probably better if there are a lot of data rows, because:
Sending all rows to the app will use a lot of data and take a lot of time, especially over a WiFi or mobile data connection.
Processing on the server is usually faster than processing on the client, especially if it is a mobile device.
You might also want to return the total number of rows from the API.

Node.js dialling back API requests

I have a list of 125,000 + Id numbers.
I am making a request to an api to get more information for each one.
But my problem is that the api will stop giving me a response if I request more then 6 per second.
I need a way to control the speed of the requests.
Just use a function called by setInterval to do the actual API querying ?
Simple example:
var ids = [ /* big array here */ ];
function queryAllIds(ids, callback) {
var processedIds = [];
var lastProcessedId = 0;
var processedIdCount = 0;
var intervalId;
function queryApi() {
var idToProcess = lastProcessedId++;
doActualQuery(ids[idToProcess], function(result) {
processedIds[idToProcess] = result;
processedIdCount++;
if (processedIdCount === ids.length) {
nextTick(callback, processedIds);
}
});
}
if (intervalId && lastProcessedId === ids.length)
clearInterval(intervalId);
}
intervalId = setInterval(queryApi, 1000/6);
}
queryAllIds(ids, function (processedIds) {
// Do stuff here
});
We ended up using rate limiter which provided the rate limiting we needed right out of the box. https://www.npmjs.com/package/limiter

http call in backbone promise

Hi I have a backbone web app using Jquery and NodeJs/mongo as the server side framework. I'm having problems with making a http get call with a foreah loop and the results of the get call being iteratively added to each row of the loop.
var eventid = this.model.get("_id");
var inPromise = $.get("/registrants/list?eventid="+eventid,null,null,"json").then(
function (result){
var temp;
var finalVal = '';
var tempfinalVal = "";
var loop = 0
percentage = 0;
$.each(result.registrants,function(index,registrant){
temp = JSON.parse(registrant.fields);
for (var key in temp) {
if(key =="Email"){
if(temp[key] != ""){
$.get("/stats/registrant?userid="+temp[key]+"&eventid="+eventid,null,null,"json").then(function(result2){
percentage = (result2.Stats.type ===undefined || result2.Stats.type ==null) ? "0": result2.Stats.type;
finalVal +=percentage+"\n";
}).fail(function(){
percentage = "0";
});
}
}else if(key =="eventid"){
loop++;
finalVal = finalVal.slice(0, - 1);
finalVal +='\n';
}
finalVal +=temp[key] + ',';
}
});
//promises.push(inPromise);
}
).done(function(finalVal){
$("#webcast-download-registrants-tn").attr("href",'data:text/csv;charset=utf-8;filename=registration.csv",'+encodeURIComponent(finalVal));
console.log("DONE");
}).fail(function(){
console.log("fail");
});
// promise.done(function () {
// console.log(" PROMISE DONE");
// });
So I have the loop through a collection and the last item of the docuemnt gets a content froma nother http call and when all is fone it will create a CSV file. The problem is that THE "DONE" text echos firts then the "CALL" text is displayed
Rick, your problem is not the simplest due to :
the need for nested asynchronous gets
the need to build each CSV data row partly synchronously, partly asynchronously.
the need for a mechanism to handle the fulfilment of multiple promises generated in the inner loop.
From what you've tried, I guess you already know that much.
One important thing to note is that you can't rely on for (var key in temp) to deliver properties in any particular order. Only arrays have order.
You might try something like this :
var url = "/stats/registrant",
data = { 'eventid': this.model.get('_id') },
rowTerminator = "\n",
fieldNames = ['firstname','lastname','email','company','score'];
function getScore(email) {
return $.get(url, $.extend({}, data, {'userid':email}), null, "json").then(function(res) {
return res.Stats ? res.Stats.type || 0 : 0;
}, function() {
//ajax failure - assume score == 0
return $.when(0);
});
}
$.get("/registrants/list", data, null, "json").then(function(result) {
var promises = [];//An array in which to accumulate promises of CSV rows
promises.push($.when(fieldNames)); //promise of CSV header row
if(result.registrants) {
$.each(result.registrants, function(index, registrant) {
if(registrant.fields) {
// Synchronously initialize row with firstname, lastname, email and company
// (omitting score for now).
var row = fieldNames.slice(0,-1).map(function(fieldName, i) {
return registrant.fields[fieldName] || '';
});
//`row` remains available to inner functions due to closure
var promise;
if(registrant.fields.Email) {
// Fetch the registrant's score ...
promise = getScore(registrant.fields.Email).then(function(score) {
//... and asynchronously push the score onto row
row.push(score);
return row;
});
} else {
//or synchronously push zero onto row ...
row.push(0);
//... and create a resolved promise
promise = $.when(row);
}
promises.push(promise);//Accumulate promises of CSV data rows (still in array form), in the correct order.
}
});
}
return $.when.apply(null, promises).then(function() {
//Join all the pieces, in nested arrays, together into one long string.
return [].slice.apply(arguments).map(function(row) {
return row.join(); //default glue is ','
}).join(rowTerminator);
});
}).done(function(str) {
$("#webcast-download-registrants-tn").attr("href",'data:text/csv;charset=utf-8;filename=registration.csv",'+encodeURIComponent(str));
console.log("DONE");
}).fail(function() {
console.log("fail");
});
partially tested
See comments in code for explanation and please ask if there's anything you don't follow.

Resources