Requests for multiple pages with puppeteer - node.js

I am trying to get information from many sites (links from array) which have dynamically content (emails and names of companies) with puppeteer. I use "for" cycle to iterate array with links, do page.goto... to each site, wait until the site is loaded , wait several seconds for dynamical content, and begin doing requests. But i have first and last request completed (Promises resolve). Other promises don't return me dynamical content. What should i do for fix that? Thanks
let puppeteer = require('puppeteer');
(async() => {
const browser = await puppeteer.launch();
let page = await browser.newPage();
const url = 'https://abcdsite.com/';
let arrayNames = ['first','second','third','abcd'];
for(let i=0;i<await arrayNames.length;){
let nameUrl = await arrayNames[i];
if (i<4){
let temp1;
console.log(`begin for ${nameUrl}`);
await page.goto(`${url}${nameUrl}`, { waitUntil: 'load' })
.then(()=>{
return new Promise(res=>{
//wait content dynamic load
setTimeout(()=>{
temp1 = page.evaluate(() => {
return new Promise(resolve => { // <-- return the data to node.js from browser
let name = document.querySelector('h1').innerHTML;
let email = document.getElementsByClassName('sidebar-views-contacts h-card vcard')[0]
.children[2].children[0].children[0].innerHTML;
resolve(email);
});
});
res(temp1);
},7000);
})
})
.then((res)=>{
i++;
console.log(`https://abcdsite.com/${nameUrl}`,temp1);
});
}
else{
break
}
}
})();

I think this helps you.
1) make an async function to request and parse your data
2) create an array of parallel tasks.
let puppeteer = require('puppeteer');
async function makeRequest(page, url, nameUrl) {
await page.goto(`${url}${nameUrl}`, { waitUntil: 'load' });
setTimeout(() => {
const userEmail = await page.evaluate(() => {
let name = document.querySelector('h1').innerHTML;
let email = document.getElementsByClassName('sidebar-views-contacts h-card vcard')[0]
.children[2].children[0].children[0].innerHTML;
return email;
});
return Promise.resolve(userEmail);
}, 7000);
}
(async () => {
const browser = await puppeteer.launch();
let page = await browser.newPage();
const url = 'https://abcdsite.com/';
let arrayNames = ['first', 'second', 'third', 'abcd'];
let tasks = [];
for (let i = 0; i < arrayNames.length; i++) {
tasks.push(makeRequest(page, url, arrayNames[i]));
}
Promise.all(tasks)
.then((res) => {
for (let i = 0; i < arrayNames.length; i++) {
console.log(`https://abcdsite.com/${arrayNames[i]}`, res[i]);
}
});
})();
Series solution
For more information read this.
for (let i = 0; i < arrayNames.length; i++) {
let temp = await makeRequest(page, url, arrayNames[i]);
console.log(`https://abcdsite.com/${arrayNames[i]}`, temp);
}

puppeteer's page.goto function has multiple parameters you can use to ensure that the page is fully loaded. See the documentation here.
In addition, you can use the page.waitFor method to wait for a few seconds. See documentation here.
Here you have a simple example that I think may work for you:
const puppeteer = require('puppeteer')
const url = 'https://stackoverflow.com/'
const arrayNames = ['tags', 'users', 'jobs', 'questions'];
(async () => {
const browser = await puppeteer.launch()
const page = await browser.newPage()
const data = {}
for (const nameUrl of arrayNames) {
const fullUrl = `${url}${nameUrl}`
console.log(`begin for ${fullUrl}`)
await page.goto(fullUrl, { waitUntil: 'networkidle0' }) // check networkidle0 parameter and others here: https://pptr.dev/#?product=Puppeteer&version=v2.1.1&show=api-pagegotourl-options
await page.waitFor(2000) // wait 2 seconds to allow a full login. Optional
const pageData = await page.evaluate(() => {
const name = document.querySelector('h1').innerText
const pageTitle = document.querySelector('title').innerText
// get whatever data you need to get from the page.
return { name: name, title: pageTitle }
})
console.log('\t Data from page: ', pageData)
data[fullUrl] = pageData
}
console.log(data)
})()
This does not run all sites in parallel, but you can then play around with the example.
Instead of 'awaiting' the await page.evaluate part, you could get all the promises in an array and then use await Promise.all([listOfPromises])

Related

How to use Node.js crawler Web

I expecting the product info will be printed when displayed. However, the current code will show all items loaded even if they're not shown yet.
How do i modify my code, thank you
// const request = require("request");
const cheerio = require("cheerio");
const puppeteer = require('puppeteer');
(async () => {
const browser = await puppeteer.launch({
headless: false // 無外殼的 Chrome,有更佳的效能
});
const page = await browser.newPage();
await page.goto('https://www.balenciaga.com/en-us/women/shoes/sneakers');
await getData(page)
await scrollItem(page)
})();
const scrollItem = async (page) => {
pageHeight = await page.evaluate('document.body.scrollHeight')
await page.evaluate('window.scrollTo(0, document.body.scrollHeight)',
await page.waitForFunction(`document.body.scrollHeight > ${pageHeight}`),
await getData(page)
)
}
const getData = async (page) => {
let body = await page.content()
let $ = await cheerio.load(body)
const data = []
const list = $(".l-productgrid__item .c-product__infos");
for (let i = 0; i < list.length; i++) {
const title = list.eq(i).find('.c-product__infos h2').text();
const price = list.eq(i).find('.c-product__infos p').text().trim();
data.push({ title, price });
}
data.forEach((res, i) => {
console.log(`${i+1} 名稱: ${res.title}, 價錢: ${res.price}`)
})
await scrollItem(page)
}
working code:
// define function which accepts body and cheerio as args
function extract(input, cheerio) {
// return object with extracted values
let $ = cheerio.load(input);
return $('.l-productgrid__item .c-product__infos').map(function() {
return {
header: $('h2', this).text().trim(),
price: $('p', this).text().trim()
}
}).toArray()
}
proof of work (screenshot)

Scraper with Puppeteer login returns just one element of the array

This code is supposed to loop through the urls that get scraped from the scrapeProductPage function. But before looping, it needs to log in so that it can obtain the prices. The prices are only displayed to logged in users. Instead of looping through the urls it just returns the scraped data from one page. The error I get is "MaxListenersExceededWarning: Possible EventEmitter memory leak detected".
const request = require("request-promise");
const cheerio = require("cheerio");
const ObjectsToCsv = require("objects-to-csv");
const puppeteer = require('puppeteer');
const url = "https://www.example.com";
const scrapeResults = [];
async function scrapeProductPage() {
try {
const htmlResult = await request.get(url);
const $ = await cheerio.load(htmlResult);
$("td.productListing-data > a[style='position:relative;float:left;']").each((index, element) => {
let url = $(element).attr("href");
url = "https\://www.example.com/" + url;
const scrapeResult = { url };
scrapeResults.push(scrapeResult);
});
return scrapeResults;
} catch (err) {
console.error(err);
}
}
async function scrapeDescription(productsWithImages) {
process.setMaxListeners(0);
const browser = await puppeteer.launch({
headless: false
});
const page = await browser.newPage();
await page.goto('https://www.example.com/login');
await page.waitFor(500);
await page.waitFor('input[name="email_address"]');
await page.type('input[name="email_address"]', 'example#gmail.com');
await page.type('input[name="password"]', '123test');
await page.click('#btnLogin');
return await Promise.all(
productsWithImages.map(async job => {
try {
await page.goto(job.url, { waitUntil: "load" });
const content = await page.content();
const $ = await cheerio.load(content);
job.main_img = $('img#main_img').attr('src');
job.name = $('h2').text();
job.price = $("td.products_info_price").text();
return job;
} catch (error) {
console.error(error);
}
})
);
}
async function saveDataToCsv(data) {
const csv = new ObjectsToCsv(data);
console.log(csv);
}
async function scrapeWona() {
const productsWithImages = await scrapeProductPage();
const wonaFullData = await scrapeDescription(productsWithImages);
await saveDataToCsv(productsWithImages);
}
scrapeWona();
The reason you're getting the warning is because of process.setMaxListeners(0)
Indicates you have a memory leak somewhere in the code.
You can take a look at the documentation here also: https://nodejs.org/docs/latest/api/events.html#events_emitter_setmaxlisteners_n
Take a look at the answer from here: node.js - request - How to "emitter.setMaxListeners()"?

Puppeteer getting element from elementHandle causing protocol error

I'm trying to scrape a certain facebook page for its posts written by a certain user and starting with a certain word.
const puppeteer = require('puppeteer');
async function findPosts(page) {
const USERNAME = 'test123';
const posts = await page.$$('.userContentWrapper');
return posts.filter(async post => {
try {
let usernameElement = await post.$('.fwb');
let username = await page.evaluate(element => element.textContent, usernameElement);
if (username === USERNAME) {
let postElement = await post.$('[data-testid="post_message"] p');
let postContent = page.evaluate(element => element.textContent, postElement);
return /\[test \d+\]/.test(postContent);
}
return false;
} catch(e) {
console.log(e);
return false;
}
});
}
(async () => {
const browser = await puppeteer.launch({
headless: false
});
const page = await browser.newPage();
await page.goto('https://www.facebook.com/groups/groupid/');
const pageTitle = await page.title();
console.log(pageTitle);
const posts = await findPosts(page);
console.log(posts);
await browser.close();
})();
I'm getting
Error: Protocol error (Runtime.callFunctionOn): Target closed. when
I'm trying to get the usernameElement
at this line:
let usernameElement = await post.$('.fwb');
Not sure what's going wrong here, any suggestions?
The problem is that the filter function does not work with Promises. So the return posts.filter(...) will immediately return and after that the browser is closed. Therefore, when you try to run the $ function on the page, the page does not exist anymore and you get the Target closed error.
To make it work with async/await syntax, you could use a simple loop instead:
async function findPosts(page) {
const USERNAME = 'test123';
const posts = await page.$$('.userContentWrapper');
const postsToReturn = [];
for (let post of posts) {
/* ... if else logic */
postsToReturn.push(post); // instead of return true
}
return postsToReturn;
}

Multiple .click() events in Puppeteer

I need to go to a website and get their data. I must make first click() to get to next option, on which i have to make another click() and then another click() and then another click().
I would like to make json from their database. You can check it here:
https://www.akumulator.pl/dobierz-akumulator
I already tried some tutorials on puppeteer and their official documentation
const puppeteer = require('puppeteer');
let scrape = async() => {
const browser = await puppeteer.launch({headless: false});
const page = await browser.newPage();
const models = [];
const fuels = [];
const engines = [];
const productions = [];
await page.goto('https://www.akumulator.pl/dobierz-akumulator');
const result = await page.evaluate(() => {
let data = [];
let elements = document.getElementById('edit-brand').children;
[].forEach.call(elements, element => {
data.push(element.innerText);
})
return data;
});
browser.close();
return result
}
scrape().then((value) => {
console.log(value); // Success!
});
You can use something like this in a loop:
await Promise.all([
page.$(selectorToClick).click(),
page.waitForSelector(selectorToBeCreatedOrUnblocked)
]);

How to return value from async/await function?

Using puppeteer to collect data from 2 different webpages into arrays for later comparison. However the program does not wait for the returned array before carrying forward.
async function go(){
try{
const browser = await puppeteer.launch();
const page = await browser.newPage();
await page.goto('www.webpage.com');
const tds = await page.$$('td');
const data = [];
for (let i = 0; i < tds.length; i++){
const td = tds[i];
const tdcontent = await page.evaluate(td => td.innerText, td);
if (tdcontent.length > 5) {
data[i] = {"content": tdcontent};
}
}
return data;
} catch (e) {
console.log(e);
}
};
(async function main(){
const returnedData = await go();
console.log(returnedData.length);
})();
The return data.length is 0. New to nodejs, and async programming structure. I think it is because the .length is logged before the data is returned?
how do I return the data in a way where can manipulate it and complete my comparisons?
I try to not use page.$$ in such cases. Instead I use document.querySelectorAll and map thru the elements and extract the text.
Here is the modified code:
const getTdData = async () => {
try {
const browser = await puppeteer.launch();
const page = await browser.newPage();
await page.goto("http://example.com");
return page.evaluate(() => {
// get all td elements
const tdList = [...document.querySelectorAll("td")];
return tdList.map(element => ({ content: element.textContent }));
});
} catch (e) {
console.log(e);
}
};
(async function main() {
const returnedData = await getTdData();
console.log(returnedData.length);
})();
First of all, you are missing an apostrophe in your page.$$() function. You should change this to:
const tds = await page.$$('td');
Next, you are trying to pass a non-existent variable to page.evaluate(). You can fix this by passing tds[i] instead of td:
const tdcontent = await page.evaluate(td => td.innerText, tds[i]);
Your final result should look something like this:
const go = async () => {
try {
const browser = await puppeteer.launch();
const page = await browser.newPage();
await page.goto('www.webpage.com');
const tds = await page.$$('td');
const data = [];
for (let i = 0; i < tds.length; i++) {
const tdcontent = await page.evaluate(td => td.innerText, tds[i]);
if (tdcontent.length > 5) {
data[i] = {
content: tdcontent,
};
}
}
return data;
} catch (error) {
console.log(error);
}
};
(async function main() {
const returnedData = await go();
console.log(returnedData.length);
})();
If you are are still experiencing issues, you may want to wait until the page has loaded completely using page.goto( ... , { waitUntil: 'networkidle0' }), or wait until the element in question has been added to the DOM using page.waitForSelector():
await page.goto('www.webpage.com' , {
waitUntil: 'networkidle0',
});
// ...
await page.waitForSelector('td');

Resources