I'm trying to call a REST API in a "for" loop, however, the results aren't what I'm expecting.
I've attempted to wrap everything in a promise, but the order of operations is still off, executing it asynchronously rather than synchronously.
var https = require('https');
var zlib = require("zlib");
var axios = require('axios');
const cheerio = require('cheerio');
var page = 1;
var hasMore = "true";
function delay() {
return new Promise(resolve => setTimeout(resolve, 300));
}
async function getLocation(page) {
// notice that we can await a function
// that returns a promise
await delay();
var apiUrl = 'https://my.api.com/search/advanced?page=' + page +
'&pagesize=5';
https.get(apiUrl, function(response) {
console.log("headers: ", response.headers);
console.log(response.statusCode)
if (response.statusCode == 200) {
var gunzip = zlib.createGunzip();
var jsonString = '';
response.pipe(gunzip);
gunzip.on('data', function(chunk) {
jsonString += chunk;
});
gunzip.on('end', function() {
obj = JSON.parse(jsonString);
var url = obj.items[0].owner.link;
axios(url)
.then(response => {
const html = response.data;
const $ = cheerio.load(html);
//OUTPUT LOCATION
console.log($('h3.location').text().trim());
})
.catch(console.error);
});
gunzip.on('error', function(e) {
console.log(e);
});
} else {
console.log("Error");
}
});
}
async function startGetLocation() {
var page = 1;
var hasMore = "true";
do {
//OUTPUT PAGE NUMBER
console.log(page.toString());
await getLocation(page);
page = page + 1;
} while (page < 3);
}
startGetLocation();
Based on the sample code, I would have expected the below to output:
1
New York
2
However, it's outputting:
1
2
New York
The problem is that the callback function that you passed to the https.get() function gets executed asynchronously and that the getLocation function does not wait until this part resolves.
So you could simply wrap the https.get() call and the unzipping part in a promise, wait for it to resolve and then do the axios-part.
async function getLocation(page) {
await delay();
var apiUrl = 'https://my.api.com/search/advanced?page=' + page +
'&pagesize=5';
const fetchAndUnzipPromise = new Promise((resolve, reject) => {
https.get(apiUrl, function (response) {
console.log("headers: ", response.headers);
console.log(response.statusCode)
if (response.statusCode == 200) {
var gunzip = zlib.createGunzip();
var jsonString = '';
response.pipe(gunzip);
gunzip.on('data', function (chunk) {
jsonString += chunk;
});
gunzip.on('end', function () {
obj = JSON.parse(jsonString);
var url = obj.items[0].owner.link;
resolve(url);
});
gunzip.on('error', function (e) {
reject(e);
});
} else {
reject(new Error("Statuscode not as exepcted"));
}
});
});
return fetchAndUnzipPromise.then(url => {
return axios(url)
.then(response => {
const html = response.data;
const $ = cheerio.load(html);
//OUTPUT LOCATION
console.log($('h3.location').text().trim());
})
.catch(console.error);
})
}
Related
I'm trying to return the results of an http request from a function and thought request-promise was supposed to return after an await. I obviously don't have this right. I get "undefined" returned from the function.
var s = getUrl();
console.log('result: ' + s)
function getUrl() {
var rp = require('request-promise');
var url = 'http://myserver.com?param=xxx';
rp(url)
.then(function (data) {
return data;
})
.catch(function (err) {
console.log(err);
});
}
How do I await the call so getUrl returns the data?
EDIT: After Kevin's comment. I tried to put this into a module and call it but it's returning [object Promise].
function getUrl(url, params) {
var rp = require('request-promise');
return rp(url + '?' + params)
.then(function (data) {
return data;
})
.catch(function (err) {
console.log(err);
});
}
async function getUpdate(o) {
var url = 'http://myserver.com';
var params = 'param=xxx';
var s = await getUrl(url, params);
return s;
}
exports.askVelo = (o) => {
var sRtn = '';
console.log(o.code);
switch (o.code) {
case 'g':
sRtn = getUpdate(o);
break;
}
console.log('heres rtn: ' + sRtn); // sRtn is [object Promise]
return sRtn;
}
getUpdate is now just returning [object Promise]... Why is that not working now?
I really needed to get the data in a synchronous fashion, so I finally gave up and used sync-request and skipped all the await/promise stuff.
sr = require('sync-request');
var url = 'http://myserver';
var qstring = 'param=xxx';
var res = sr('GET', url + '?' + qstring);
var str = res.body.toString();
I'm a beginner of nodejs, async bothers me.
I want my code run sequencely or it will breaks.
I have a for loop, and it simply doesn't work...
Here are all the codes:
const util = require('util');
const request = require('request');
const cheerio = require('cheerio');
var host = "http://www.nicotv.me";
var url = "http://www.nicotv.me/video/play/57838-1-%s.html";
var len = 99;
var tab = /-(\d)-/.exec(url);
tab = tab[1] // '1' not '-1-'
function getLen(url) {
//you can ignore this function, it gives len=2
request(url, function (err, response, html) {
if (err) {
console.log('url:', url);
console.log('error:', err);
console.log('statusCode:', response && response.statusCode);
}
else{
var $ = cheerio.load(html);
var cls = '.ff-playurl-dropdown-%s';
$(util.format(cls, tab)).filter(function (){
var data = $(this);
len = data.html().match(/<a href=/g).length;
console.log("episode:", len);
});
getLink(len, function(){
});
}
});
}
getLen(util.format(url, 1)); //len = 2
var getLink = function(lengths, callback){
for (let i = 1; i <= lengths; i++) {
var tmp = util.format(url, i);
try {
request(tmp, function (err, res, html){
console.log('url:', tmp);
if(err){
console.log("error:", err);
console.log("statusCode:", res && res.statusCode);
}else{
var reg = /src="(\/player.php?.{1,})"/;
var result = reg.exec(html);
console.log(result[1]);
}
});
callback();
} catch (error) {
console.log(error);
break;
}
}
}
here is my output:
episode: 2
url: http://www.nicotv.me/video/play/57838-1-2.html
/player.php?u=aHR0cDovL3R5angyLmtpbmdzbnVnLmNuLzM2MHl1bi0xNS5waHA/dmlkPTE1NzkxMzU2MzEyNDAwNTQ5&p=360biaofan&c=0&j=aHR0cDovL2ppZXhpLmtpbmdzbnVnLmNuLzM2MGJpYW9mYW4ucGhwP3VybD0=&x=10&y=&z=
url: http://www.nicotv.me/video/play/57838-1-2.html
/player.php?u=aHR0cDovL3R5angyLmtpbmdzbnVnLmNuLzM2MHl1bi0xNS5waHA/dmlkPTE1Nzg1MDQyMDYyNDAwNTgx&p=360biaofan&c=0&j=aHR0cDovL2ppZXhpLmtpbmdzbnVnLmNuLzM2MGJpYW9mYW4ucGhwP3VybD0=&x=10&y=&z=aHR0cDovL3R5angyLmtpbmdzbnVnLmNuLzM2MHl1bi0xNS5waHA/dmlkPTE1NzkxMzU2MzEyNDAwNTQ5
First problem is these two /player*** link are from 57838-1-1.html
And one of them are not complete.
Second problem is the url output shows 57838-1-2.html twice.
Thanks for your kindly help.
Yesterday had the same problem, so I solved with:
Using request-promise
Replace the loop method arrTitles.Each with for (const jt of arrTitles)
Here a sample:
const request = require('request-promise');
const cheerio = require('cheerio');
var getUrlData =
async function (url) {
console.log(url);
try {
return await request.get(url);
}
catch (err) {
console.error(`${err}: ${url}`);
}
return;
};
var run =
async function (pageUrl) {
var arrData =
await fn.getUrlData(pageUrl)
.then(response => readTable(response));
console.log(arrData);
};
var readTable =
function (document) {
var $;
let arrData = [];
try {
$ = cheerio.load(document);
$('table tr')
.each(
function (trN) {
$(this)
.children('td')
.each(
function (tdN) {
arrData.push($(this).text().trim());
}
)
});
}
catch { }
return arrData;
};
run();
I want to download a series of ts files, but every time I run this code, it will be stuck in the download process of a file without any error message.
However, if you put the url in the program in the address bar of the browser, or use a tool like wget, you can still download it.
const puppeteer = require('puppeteer');
var https = require('https');
var fs = require('fs');
async function doRequest(url) {
return new Promise(function (resolve) {
var req = https.get(url, function (response) {
resolve(response);
});
req.end('error', function(e) {
sleep(3000).then(() => {
console.log('re try to get ts ...'+e);
doRequest(url); })
});
req.on('error', function(e) {
sleep(3000).then(() => {
console.log('re try to get ts ...'+e);
doRequest(url); })
});
});
}
(async() => {
for(let i = 21; i<100; i++)
{
var ts_url = 'https://hls2.videos.sproutvideo.com/1e7c8a8ff0518509452c7eb2e75a2a1f/e84883ff69cb66752bd6783cdbaa35fb/video/720_000'+i+'.ts?Policy=eyJTdGF0ZW1lbnQiOlt7IlJlc291cmNlIjoiaHR0cHM6Ly9obHMyLnZpZGVvcy5zcHJvdXR2aWRlby5jb20vMWU3YzhhOGZmMDUxODUwOTQ1MmM3ZWIyZTc1YTJhMWYvZTg0ODgzZmY2OWNiNjY3NTJiZDY3ODNjZGJhYTM1ZmIvKi50cz9zZXNzaW9uSUQ9MDZkN2ZhOWMtMDgxOS00Y2YwLTk0M2QtNzA2MGQzOGY2N2RkIiwiQ29uZGl0aW9uIjp7IkRhdGVMZXNzVGhhbiI6eyJBV1M6RXBvY2hUaW1lIjoxNTQ2NzY3NjY4fX19XX0_&Signature=DTCQWVIdInCe2YIf-fxD4RDEHOXGUDK2pUwxV0cKPi0m~WwlLYIEFSmQkAbK-oV-uLU93E1O2TGizvrMDp6voFVnm-jLaOur1JRlJDBCP7T8KEYrkkU3Y3grZAKHmi0gQiVpVIKRgo7gnDKwMZ1NjosQPbaf1XDMpuHxAyKfPGgIRLpSEp4BZ1dqcfzs-YyYQzNaK-a3tYONmpyID3bZnF8sn2pMZonArCz24BQL0wEfXeS3HqxwVv85z641kKxQBGd~8lG88qUTpJCvqWmIZhikzWjGQPY~6ezgJMKhjJQIoPMVGZehT~NcAzPwXo84kd5ksaOdbh4paHsUe1096A__&Key-Pair-Id=APKAIB5DGCGAQJ4GGIUQ&sessionID=06d7fa9c-0819-4cf0-943d-7060d38f67dd';
await console.log('downloading '+i);
var file_ts_path = fs.createWriteStream(i+'.ts');
let responseRes = await doRequest(ts_url);
await responseRes.pipe(file_ts_path);
}
})();
const puppeteer = require('puppeteer');
var https = require('https');
var fs = require('fs');
async function doRequest(url) {
return new Promise(function (resolve) {
var req = https.get(url, function (response) {
resolve(response);
});
req.end('error', function(e) {
sleep(3000).then(() => {
console.log('re try to get ts ...'+e);
doRequest(url); })
});
req.on('error', function(e) {
sleep(3000).then(() => {
console.log('re try to get ts ...'+e);
doRequest(url); })
});
});
}
(async() => {
for(let i = 21; i<100; i++)
{
var ts_url = 'https://hls2.videos.sproutvideo.com/1e7c8a8ff0518509452c7eb2e75a2a1f/e84883ff69cb66752bd6783cdbaa35fb/video/720_000'+i+'.ts?Policy=eyJTdGF0ZW1lbnQiOlt7IlJlc291cmNlIjoiaHR0cHM6Ly9obHMyLnZpZGVvcy5zcHJvdXR2aWRlby5jb20vMWU3YzhhOGZmMDUxODUwOTQ1MmM3ZWIyZTc1YTJhMWYvZTg0ODgzZmY2OWNiNjY3NTJiZDY3ODNjZGJhYTM1ZmIvKi50cz9zZXNzaW9uSUQ9MDZkN2ZhOWMtMDgxOS00Y2YwLTk0M2QtNzA2MGQzOGY2N2RkIiwiQ29uZGl0aW9uIjp7IkRhdGVMZXNzVGhhbiI6eyJBV1M6RXBvY2hUaW1lIjoxNTQ2NzY3NjY4fX19XX0_&Signature=DTCQWVIdInCe2YIf-fxD4RDEHOXGUDK2pUwxV0cKPi0m~WwlLYIEFSmQkAbK-oV-uLU93E1O2TGizvrMDp6voFVnm-jLaOur1JRlJDBCP7T8KEYrkkU3Y3grZAKHmi0gQiVpVIKRgo7gnDKwMZ1NjosQPbaf1XDMpuHxAyKfPGgIRLpSEp4BZ1dqcfzs-YyYQzNaK-a3tYONmpyID3bZnF8sn2pMZonArCz24BQL0wEfXeS3HqxwVv85z641kKxQBGd~8lG88qUTpJCvqWmIZhikzWjGQPY~6ezgJMKhjJQIoPMVGZehT~NcAzPwXo84kd5ksaOdbh4paHsUe1096A__&Key-Pair-Id=APKAIB5DGCGAQJ4GGIUQ&sessionID=06d7fa9c-0819-4cf0-943d-7060d38f67dd';
await console.log('downloading '+i);
var file_ts_path = fs.createWriteStream(i+'.ts');
let responseRes = await doRequest(ts_url);
await responseRes.pipe(file_ts_path);
}
})();
const puppeteer = require('puppeteer');
var https = require('https');
var fs = require('fs');
async function doRequest(url) {
return new Promise(function (resolve) {
var req = https.get(url, function (response) {
resolve(response);
});
req.end('error', function(e) {
sleep(3000).then(() => {
console.log('re try to get ts ...'+e);
doRequest(url); })
});
req.on('error', function(e) {
sleep(3000).then(() => {
console.log('re try to get ts ...'+e);
doRequest(url); })
});
});
}
(async() => {
for(let i = 21; i<100; i++)
{
var ts_url = 'https://hls2.videos.sproutvideo.com/1e7c8a8ff0518509452c7eb2e75a2a1f/e84883ff69cb66752bd6783cdbaa35fb/video/720_000'+i+'.ts?Policy=eyJTdGF0ZW1lbnQiOlt7IlJlc291cmNlIjoiaHR0cHM6Ly9obHMyLnZpZGVvcy5zcHJvdXR2aWRlby5jb20vMWU3YzhhOGZmMDUxODUwOTQ1MmM3ZWIyZTc1YTJhMWYvZTg0ODgzZmY2OWNiNjY3NTJiZDY3ODNjZGJhYTM1ZmIvKi50cz9zZXNzaW9uSUQ9MmFmN2JkNzEtODc3Mi00OThmLThjNzQtN2Y5NTQ3ZTgxYjA2IiwiQ29uZGl0aW9uIjp7IkRhdGVMZXNzVGhhbiI6eyJBV1M6RXBvY2hUaW1lIjoxNTQ2NzEzNzU4fX19XX0_&Signature=hFOZlf5Iqf7k-LqVms1FT4LKoEM8psMVkCsdjoHGhKztNTzROyGgfHg34RYr3ezffQaQV6drXBuID1NMypQhwSXgJ-ZRAoGC3KnKtrfm9bSpK2Wq97sZf97D5PbBn7wNaJhmfvbztym-cRknztepOqM2v~KvCz6~esS99TOsWbtQFBZWLPacp5MV3v5eZ4wh2WJXX1jDqI1XmpZ0jyU2oJCXOgVbvU1aF86E7duvniDrbhmS1R00~tWTAETBbmBSubDw-7fGq7XzeZcFRfXbdwb0a9KwsAGh54lj1UBUMsDzEtH8vI8r9aC~MnFIRub1KxsFSOzUlRLYRp4GsPJQiQ__&Key-Pair-Id=APKAIB5DGCGAQJ4GGIUQ&sessionID=2af7bd71-8772-498f-8c74-7f9547e81b06';
await console.log('downloading '+i);
var file_ts_path = fs.createWriteStream(i+'.ts');
let responseRes = await doRequest(ts_url);
await responseRes.pipe(file_ts_path);
}
})();
You need to download a file, write it to stream, and then pipe it.
https://nodejs.org/api/https.html#https_https_get_url_options_callback
https://nodejs.org/api/fs.html#fs_fs_createwritestream_path_options
var https = require('https');
var fs = require('fs');
function doRequest(url, dest) {
return new Promise(function (resolve) {
https.get(url, res => {
let data = '';
res.on('data', (buffer) => {
data += buffer; // save downloaded data
});
res.on('end', () => {
var stream = fs.createWriteStream(dest);
stream.write(data, 'binary');
stream.on('finish', () => {
resolve('File Saved !');
});
res.pipe(stream);
});
}).on('error', function (e) {
reject('re try to get ts ...' + e);
});
});
}
for (let i = 21; i < 100; i++) {
var ts_url =
'https://hls2.videos.sproutvideo.com/1e7c8a8ff0518509452c7eb2e75a2a1f/e84883ff69cb66752bd6783cdbaa35fb/video/720_000' +
i +
'.ts?Policy=eyJTdGF0ZW1lbnQiOlt7IlJlc291cmNlIjoiaHR0cHM6Ly9obHMyLnZpZGVvcy5zcHJvdXR2aWRlby5jb20vMWU3YzhhOGZmMDUxODUwOTQ1MmM3ZWIyZTc1YTJhMWYvZTg0ODgzZmY2OWNiNjY3NTJiZDY3ODNjZGJhYTM1ZmIvKi50cz9zZXNzaW9uSUQ9MDZkN2ZhOWMtMDgxOS00Y2YwLTk0M2QtNzA2MGQzOGY2N2RkIiwiQ29uZGl0aW9uIjp7IkRhdGVMZXNzVGhhbiI6eyJBV1M6RXBvY2hUaW1lIjoxNTQ2NzY3NjY4fX19XX0_&Signature=DTCQWVIdInCe2YIf-fxD4RDEHOXGUDK2pUwxV0cKPi0m~WwlLYIEFSmQkAbK-oV-uLU93E1O2TGizvrMDp6voFVnm-jLaOur1JRlJDBCP7T8KEYrkkU3Y3grZAKHmi0gQiVpVIKRgo7gnDKwMZ1NjosQPbaf1XDMpuHxAyKfPGgIRLpSEp4BZ1dqcfzs-YyYQzNaK-a3tYONmpyID3bZnF8sn2pMZonArCz24BQL0wEfXeS3HqxwVv85z641kKxQBGd~8lG88qUTpJCvqWmIZhikzWjGQPY~6ezgJMKhjJQIoPMVGZehT~NcAzPwXo84kd5ksaOdbh4paHsUe1096A__&Key-Pair-Id=APKAIB5DGCGAQJ4GGIUQ&sessionID=06d7fa9c-0819-4cf0-943d-7060d38f67dd';
doRequest(ts_url, i + '.ts')
.then(console.log)
.catch(console.log);
}
I am working on automating an End to end scenario using Mocha.
I have a url endpoint which is to be polled until a certain value is obtained in the resulting response. Is there any way to do it ?
Example with request and callback approach:
const request = require('request');
describe('example', () => {
it('polling', function (done) {
this.timeout(5000);
let attemptsLeft = 10;
const expectedValue = '42';
const delayBetweenRequest = 100;
function check() {
request('http://www.google.com', (error, response, body) => {
if (body === expectedValue) return done();
attemptsLeft -= 1;
if (!attemptsLeft) return done(new Error('All attempts used'));
setTimeout(check, delayBetweenRequest);
});
}
check();
});
});
Example with got and async/await approach:
const utils = require('util');
const got = require('got');
const wait = utils.promisify(setTimeout);
describe('example', () => {
it('polling', async function (done) {
this.timeout(5000);
const expectedValue = '42';
const delayBetweenRequest = 100;
for (let attemptsLeft = 10; attemptsLeft; attemptsLeft -= 1) {
const resp = await got.get('http://www.google.com');
if (resp.body === expectedValue) return done();
await wait(delayBetweenRequest);
}
done(new Error('All attempts used'));
});
});
This is how I was able to do it with WebdriverIO and Mocha
describe("wait for value in content of page", () => {
it("should be able to wait to value in url", () => {
var max_seconds_to_wait = 10;
var seconds_counter = 0;
var should_continue = true;
while (should_continue) {
browser.url('http://your.url.com');
var response = JSON.parse(browser.getText("body"));
console.log(response)
if (response == 'something') {
should_continue = false;
}
browser.pause(1000);
seconds_counter++;
if (seconds_counter > max_seconds_to_wait) {
throw 'Waiting for json from url timeout error';
}
}
});
});
I have been able to use bluebird to run an array of http get requests in parallel within a function that returns the responses of each requests after they have all finished. However, I would like to chain the requests in sequence to run one after another without effecting the asynchronous behavior of the promises in my current function shown below.
var Promise = require('bluebird');
function buildCartWithItems(cartID,requests, cookie) {
var promises = [];
for (idx in requests) {
var request = requests[idx]; // requests is an array containing the json records of each request parameter
request["id"] = cartID;
promises.push(new Promise(function(resolve, reject) {
var options = {
host: 'example.com',
path: "/example/addToCart?"+param(request),
headers: {'Cookie': cookie, 'Connection': 'keep-alive'}
};
https.get(options, function (response) {
var body = '';
response.on('data', function(d) {
body += d;
});
response.on('end', function() {
console.log("jsonservicesPostRequest response: " + body);
var parsed = JSON.parse(body);
if (parsed["STATUS"] == "success") {
resolve(parsed.RESULT.itemGroup.id);
} else
resolve("error");
});
response.on('error', function(exception) {
console.log("auth error: " + exception);
resolve(exception);
});
});
}));
}
return Promise.all(promises);
}
Use:
var cart = 12345; // cart ID
var itemsParams = [];
for (idx in products) {
var parms = {
'guid' : prod["_id"]["GUID"],
'count': prod["deficit"],
};
itemsParams.push(parms);
}
buildCartWithItems(cart,itemsParams,newcookie).then(function(results) {
// results -> array of all of the cart.id's
console.log("Build Cart Results: " + JSON.stringify(results));
});