I ran the following and it appears to gather a large number of links, however on actual inspection of the site with collectLinks1 I get all valid links, but with collectLinks2 I got 59 iterations of http://pieroxy.net/blog/2014/11/18/[
I'm new to Puppeteer and I can't find out why with collectLinks2 I don't get the links.
const { parse, resolve } = require('url');
const trim = require('lodash/trim');
const startsWith = require('lodash/startsWith');
const includes = require('lodash/includes');
// https://github.com/GoogleChrome/puppeteer
const puppeteer = require('puppeteer');
// https://github.com/gwuhaolin/chrome-finder
const findChrome = require('chrome-finder');
function resolveUrl(url, baseUrl) {
url = trim(url);
if (!url) return null;
if (startsWith(url, '#')) return null;
const { protocol } = parse(url);
if (includes(['http:', 'https:'], protocol)) {
return url.split('#')[0];
} if (!protocol) {
return resolve(baseUrl, url).split('#')[0];
}
return null;
}
async function collectLinks1(htmlPage) {
const baseUrl = htmlPage.url();
const links = [];
const assetUrls = await htmlPage.$$eval('a[href]', assetLinks => assetLinks.map(link => link.href));
assetUrls.forEach(link => {
const _link = resolveUrl(link, baseUrl);
if (_link) links.push(_link);
});
return links;
}
async function collectLinks2(htmlPage) {
const baseUrl = htmlPage.url();
const links = [];
await htmlPage.exposeFunction('pushToLinks', link => {
const _link = resolveUrl(link, baseUrl);
if (_link) links.push(_link);
});
await htmlPage.evaluate(() => {
function findLinks(document) {
document.querySelectorAll('a[href]')
.forEach(link => {
window.pushToLinks(link.href);
});
}
findLinks(window.document);
});
return links;
}
const crawl = async url => {
try {
console.log(`Crawling ${url}`);
const browser = await puppeteer.launch({
headless: false,
executablePath: findChrome(),
});
const page = await browser.newPage();
await page.goto(url);
// OK
const links1 = await collectLinks1(page);
links1.forEach(link => { console.log(link); });
// KO
const links2 = await collectLinks2(page);
links2.forEach(link => { console.log(link); });
await browser.close();
} catch (err) {
console.log(err);
}
};
crawl('http://pieroxy.net/blog/2014/11/18/user_agent_detection_in_java.html');
You need to await the function defined via page.exposeFunction as it returns a Promise. As you are only calling the function but not awaiting its result, your page.evaluate call will resolve before your script finished executing.
Solution
Instead of the forEach, you should use a loop to iterate over all the items and communicate them to the page one after another.
async function collectLinks2(htmlPage) {
// ...
await htmlPage.evaluate(async () => {
async function findLinks(document) {
for (const link of document.querySelectorAll('a[href]')) {
await window.pushToLinks(link.href);
}
}
await findLinks(window.document);
});
return links;
}
Related
The script by itself works great (entering the url manually, writing a json file using the fs module, node script_name.js) but within a Express get request it returns undefined.
So I've built a simple frontend to let the user enter the subreddit name to be scraped.
And here's where the problem is:
Express controller
const run = require("../run");
requestPosts: async (req, res) => {
try {
const { subreddit } = req.body;
const response = await run(subreddit);
//console.log(response);
res.json(response);
} catch (error) {
console.error(error);
}
},
Cheerio functions
const axios = require("axios");
const { load } = require("cheerio");
let posts = [];
async function getImage(postLink) {
const { data } = await axios(postLink);
const $ = load(data);
return $("a.post-link").attr("href");
}
async function run(url) {
try {
console.log(url);
const { data } = await axios(url);
const $ = load(data);
$(".thing.linkflair.link").map(async (i, e) => {
const title = $(e)
.find(".entry.unvoted .top-matter .title .title")
.text();
const user = $(e)
.find(".entry.unvoted .top-matter .tagline .author")
.text();
const profileLink = `https://old.reddit.com/user/${user}`;
const postLink = `https://old.reddit.com/${$(e).find("a").attr("href")}`;
// const thumbail = $(e).find("a img").attr("src");
const image = await getImage(postLink);
posts.push({
id: i + 1,
title,
postLink,
image,
user: { user, profileLink },
});
});
const nextPage = $(".next-button a").attr("href");
if (nextPage) {
await run(nextPage);
} else {
return posts;
}
} catch (error) {
console.error(error);
}
}
module.exports = run;
I've tried working with Promise((resolve, reject) => {}).
I think it's returning undefined because maybe the code its not synchronized.
(idk if it makes sense, i've just started programming)
.map() is not promise-aware and does not wait for your promises to finish. So, $(".thing.linkflair.link").map() finishes long before any of the asynchronous functions inside its callback do. Thus you try to return posts BEFORE it has been populated.
Passing an async callback to .map() will return an array of promises. You can use Promise.all() on those promises to know when they are done and once you're doing that, you may as well just return each post object rather that using a higher level scoped/shared object, thus making the code more self contained.
I would suggest this code:
async function run(url) {
try {
console.log(url);
const { data } = await axios(url);
const $ = load(data);
const posts = await Promise.all($(".thing.linkflair.link").map(async (i, e) => {
const title = $(e)
.find(".entry.unvoted .top-matter .title .title")
.text();
const user = $(e)
.find(".entry.unvoted .top-matter .tagline .author")
.text();
const profileLink = `https://old.reddit.com/user/${user}`;
const postLink = `https://old.reddit.com/${$(e).find("a").attr("href")}`;
// const thumbail = $(e).find("a img").attr("src");
const image = await getImage(postLink);
// return a post object
return {
id: i + 1,
title,
postLink,
image,
user: { user, profileLink },
};
}));
const nextPage = $(".next-button a").attr("href");
if (nextPage) {
const newPosts = await run(nextPage);
// add these posts to the ones we already have
posts.push(...newPosts);
}
return posts;
} catch (error) {
console.error(error);
}
}
This code is supposed to loop through the urls that get scraped from the scrapeProductPage function. But before looping, it needs to log in so that it can obtain the prices. The prices are only displayed to logged in users. Instead of looping through the urls it just returns the scraped data from one page. The error I get is "MaxListenersExceededWarning: Possible EventEmitter memory leak detected".
const request = require("request-promise");
const cheerio = require("cheerio");
const ObjectsToCsv = require("objects-to-csv");
const puppeteer = require('puppeteer');
const url = "https://www.example.com";
const scrapeResults = [];
async function scrapeProductPage() {
try {
const htmlResult = await request.get(url);
const $ = await cheerio.load(htmlResult);
$("td.productListing-data > a[style='position:relative;float:left;']").each((index, element) => {
let url = $(element).attr("href");
url = "https\://www.example.com/" + url;
const scrapeResult = { url };
scrapeResults.push(scrapeResult);
});
return scrapeResults;
} catch (err) {
console.error(err);
}
}
async function scrapeDescription(productsWithImages) {
process.setMaxListeners(0);
const browser = await puppeteer.launch({
headless: false
});
const page = await browser.newPage();
await page.goto('https://www.example.com/login');
await page.waitFor(500);
await page.waitFor('input[name="email_address"]');
await page.type('input[name="email_address"]', 'example#gmail.com');
await page.type('input[name="password"]', '123test');
await page.click('#btnLogin');
return await Promise.all(
productsWithImages.map(async job => {
try {
await page.goto(job.url, { waitUntil: "load" });
const content = await page.content();
const $ = await cheerio.load(content);
job.main_img = $('img#main_img').attr('src');
job.name = $('h2').text();
job.price = $("td.products_info_price").text();
return job;
} catch (error) {
console.error(error);
}
})
);
}
async function saveDataToCsv(data) {
const csv = new ObjectsToCsv(data);
console.log(csv);
}
async function scrapeWona() {
const productsWithImages = await scrapeProductPage();
const wonaFullData = await scrapeDescription(productsWithImages);
await saveDataToCsv(productsWithImages);
}
scrapeWona();
The reason you're getting the warning is because of process.setMaxListeners(0)
Indicates you have a memory leak somewhere in the code.
You can take a look at the documentation here also: https://nodejs.org/docs/latest/api/events.html#events_emitter_setmaxlisteners_n
Take a look at the answer from here: node.js - request - How to "emitter.setMaxListeners()"?
I'm trying to scrape a certain facebook page for its posts written by a certain user and starting with a certain word.
const puppeteer = require('puppeteer');
async function findPosts(page) {
const USERNAME = 'test123';
const posts = await page.$$('.userContentWrapper');
return posts.filter(async post => {
try {
let usernameElement = await post.$('.fwb');
let username = await page.evaluate(element => element.textContent, usernameElement);
if (username === USERNAME) {
let postElement = await post.$('[data-testid="post_message"] p');
let postContent = page.evaluate(element => element.textContent, postElement);
return /\[test \d+\]/.test(postContent);
}
return false;
} catch(e) {
console.log(e);
return false;
}
});
}
(async () => {
const browser = await puppeteer.launch({
headless: false
});
const page = await browser.newPage();
await page.goto('https://www.facebook.com/groups/groupid/');
const pageTitle = await page.title();
console.log(pageTitle);
const posts = await findPosts(page);
console.log(posts);
await browser.close();
})();
I'm getting
Error: Protocol error (Runtime.callFunctionOn): Target closed. when
I'm trying to get the usernameElement
at this line:
let usernameElement = await post.$('.fwb');
Not sure what's going wrong here, any suggestions?
The problem is that the filter function does not work with Promises. So the return posts.filter(...) will immediately return and after that the browser is closed. Therefore, when you try to run the $ function on the page, the page does not exist anymore and you get the Target closed error.
To make it work with async/await syntax, you could use a simple loop instead:
async function findPosts(page) {
const USERNAME = 'test123';
const posts = await page.$$('.userContentWrapper');
const postsToReturn = [];
for (let post of posts) {
/* ... if else logic */
postsToReturn.push(post); // instead of return true
}
return postsToReturn;
}
Hi Guys I want to log in a website and once authenticated want to loop through a given set of URLS and scrape data. What I intend to do can be described by this example,however I get Unhandled promise rejection.
const puppeteer = require("puppeteer");
list = [
"https://www.facebook.com/",
"https://www.google.com/",
"https://www.zocdoc.com/"
];
const getTitle = async (p, url) => {
try{
await p.goto(url);
const title = await p.title();
console.log(title);
}
catch(e) {
console.log(e)
}
return title
};
(async () => {
const browser = await puppeteer.launch();
const page = await browser.newPage();
console.log(this)
for (var url of list) {
getTitle(page, url)
}
await browser.close();
})();
There are multiple issues in this example.
You should await the call to function getTitle, you re awaiting inside the function but you have to await the call to the function too.
You should surround getTitle with a try and catch block and check inside the function if theres a title to return (ex. the title for google is null)
const puppeteer = require("puppeteer");
list = [
"https://www.facebook.com/",
"https://www.google.com/",
"https://www.zocdoc.com/"
];
const getTitle = async (p, url) => {
try{
await p.goto(url);
const title = await p.title();
if(title){
return title
}
}
catch(e) {
throw(e)
console.log(e)
}
};
(async () => {
const browser = await puppeteer.launch();
const page = await browser.newPage();
console.log(this)
for (var url of list) {
try{
console.log(await getTitle(page, url))
}
catch(e ){
console.log('No title')
}
}
await browser.close();
})();
Using puppeteer to collect data from 2 different webpages into arrays for later comparison. However the program does not wait for the returned array before carrying forward.
async function go(){
try{
const browser = await puppeteer.launch();
const page = await browser.newPage();
await page.goto('www.webpage.com');
const tds = await page.$$('td');
const data = [];
for (let i = 0; i < tds.length; i++){
const td = tds[i];
const tdcontent = await page.evaluate(td => td.innerText, td);
if (tdcontent.length > 5) {
data[i] = {"content": tdcontent};
}
}
return data;
} catch (e) {
console.log(e);
}
};
(async function main(){
const returnedData = await go();
console.log(returnedData.length);
})();
The return data.length is 0. New to nodejs, and async programming structure. I think it is because the .length is logged before the data is returned?
how do I return the data in a way where can manipulate it and complete my comparisons?
I try to not use page.$$ in such cases. Instead I use document.querySelectorAll and map thru the elements and extract the text.
Here is the modified code:
const getTdData = async () => {
try {
const browser = await puppeteer.launch();
const page = await browser.newPage();
await page.goto("http://example.com");
return page.evaluate(() => {
// get all td elements
const tdList = [...document.querySelectorAll("td")];
return tdList.map(element => ({ content: element.textContent }));
});
} catch (e) {
console.log(e);
}
};
(async function main() {
const returnedData = await getTdData();
console.log(returnedData.length);
})();
First of all, you are missing an apostrophe in your page.$$() function. You should change this to:
const tds = await page.$$('td');
Next, you are trying to pass a non-existent variable to page.evaluate(). You can fix this by passing tds[i] instead of td:
const tdcontent = await page.evaluate(td => td.innerText, tds[i]);
Your final result should look something like this:
const go = async () => {
try {
const browser = await puppeteer.launch();
const page = await browser.newPage();
await page.goto('www.webpage.com');
const tds = await page.$$('td');
const data = [];
for (let i = 0; i < tds.length; i++) {
const tdcontent = await page.evaluate(td => td.innerText, tds[i]);
if (tdcontent.length > 5) {
data[i] = {
content: tdcontent,
};
}
}
return data;
} catch (error) {
console.log(error);
}
};
(async function main() {
const returnedData = await go();
console.log(returnedData.length);
})();
If you are are still experiencing issues, you may want to wait until the page has loaded completely using page.goto( ... , { waitUntil: 'networkidle0' }), or wait until the element in question has been added to the DOM using page.waitForSelector():
await page.goto('www.webpage.com' , {
waitUntil: 'networkidle0',
});
// ...
await page.waitForSelector('td');