chrome.runtime.onInstalled event fired unexpected in Puppeteer - google-chrome-extension

I have installed this extension below
https://chrome.google.com/webstore/detail/microsoft-editor-spelling/gpaiobkfhnonedkhhfjpmhdalgeoebfa
I see, the event chrome.runtime.onInstalled always fired when you run the code below.
const puppeteer = require('puppeteer-core');
(async function(){
const browser = await puppeteer.launch({
userDataDir:'C:\\Users\\xxxxxx\\AppData\\Local\\Google\\Chrome\\User Data',
executablePath:'C:\\Program Files\\Google\\Chrome\\Application\\chrome.exe',
headless:false,
ignoreDefaultArgs:[
],
args:[
'--disable-extensions-except=C:\\Users\\xxxxxx\\AppData\\Local\\Google\\Chrome\\User Data\\Default\\Extensions\\gpaiobkfhnonedkhhfjpmhdalgeoebfa\\1.2211.1007_0',
]
});
const pages=await browser.pages();
const page=pages[0];
await page.goto("https://www.bing.com/");
globalThis.setTimeout(async function(){
await browser.close();
},2 * 60 *1000);
})();
Additional,you can make an extension by yourself and same issue here.
chrome.runtime.onInstalled.addListener((reason) => {
const t = Date.now();
console.log('chrome.runtime.onInstalled','reason',reason,t);
chrome.tabs.create({ url: `https://www.google.com/?${t}` });
}
);

Related

puppeteer - Error: Protocol error (Network.getResponseBody): No resource with given identifier found

I'm trying with this code to get the response body from a website using puppeteer
#!/usr/bin/env node
require('dotenv').config();
const puppeteer = require('puppeteer');
const readline = require('readline').createInterface({
input: process.stdin,
output: process.stdout
});
const path = require('path');
const fs = require('fs');
//
console.log('Starting Puppeteer...');
let responseBody = [];
(async () => {
const browser = await puppeteer.launch({
headless: false,
executablePath: '/Applications/Google Chrome.app/Contents/MacOS/Google Chrome'
});
const page = await browser.newPage();
await page.setRequestInterception(true);
page.on('request', (request) => {
request.continue();
});
//
page.on('requestfinished', async (request) => {
const response = await request.response();
const url = response.url();
// store chunks url
if( url.startsWith('https://audio-akp-quic-control-examplecdn-com.akamaized.net/audio/') ){
console.log(await response.buffer());
//responseBody.push(response.buffer());
}
});
//
await page.goto('https://accounts.examplecdn.com/login', {
waitUntil: ['load', 'networkidle2']
});
const emailField = await page.waitForSelector('#login-username', {timeout: 3000});
await emailField.type(process.env.EMAIL, {delay: 100});
const passwordField = await page.waitForSelector('#login-password', {timeout: 3000});
await passwordField.type(process.env.PASSWORD, {delay: 100});
const submitButton = await page.waitForSelector('#login-button', {timeout: 3000});
await submitButton.click();
//
const navigation = await page.waitForNavigation({ waitUntil: ['load', 'networkidle2'] });
//if( navigation.url().endsWith('status') ){
await page.goto('https://example.cdn.com/search', {
waitUntil: ['load', 'networkidle2']
}).then( async (response) => {
//console.log(response);
const cookieButton = await page.$('#onetrust-accept-btn-handler');
await cookieButton.click();
const searchField = await page.$('[data-testid="search-input"]');
await readline.question('What track do you want to search for?', (input) => {
console.log('answer:', input);
searchField.type(input).then( async () => {
await page.waitForXPath('//*[#id="searchPage"]/div/div/section[1]/div[2]/div/div/div/div[4]').then( async (element) => {
element.focus().then( async () => {
// //*[#id="searchPage"]/div/div/section[1]/div[2]/div/div/div/div[3]/button
const playButton = await page.waitForXPath('//*[#id="searchPage"]/div/div/section[1]/div[2]/div/div/div/div[3]/button');
await playButton.click();
});
});
});
});
});
//}
})();
I'm having problem with it and this error will be logged and the script will terminate.
/Users/dev/Desktop/test/node_modules/puppeteer/lib/cjs/puppeteer/common/Connection.js:208
this._callbacks.set(id, { resolve, reject, error: new Error(), method });
^
Error: Protocol error (Network.getResponseBody): No resource with given identifier found
at /Users/dev/Desktop/test/node_modules/puppeteer/lib/cjs/puppeteer/common/Connection.js:208:63
at new Promise (<anonymous>)
at CDPSession.send (/Users/dev/Desktop/test/node_modules/puppeteer/lib/cjs/puppeteer/common/Connection.js:207:16)
at /Users/dev/Desktop/test/node_modules/puppeteer/lib/cjs/puppeteer/common/HTTPResponse.js:99:53
at runMicrotasks (<anonymous>)
at processTicksAndRejections (node:internal/process/task_queues:93:5)
at async /Users/dev/Desktop/test/index.js:40:25
what I need to do is to collect all the response body content when a certain url is called, then using ffmpeg I want to convert it back to a full length track. How I can solve the problem, is possible to get the response body of each request and then join all togheter?

Puppeteer - How to wait img after change innerHTML?

I would like to add an image before a screenshot with puppeteer.
The following code works but instead of waiting like this, I would like to wait until the img is here :
element.innerHTML = "<img id=\"logo_website\" src=\"http://random.com/logo.jpg\">";
await page.waitFor(2000)
I tried with the following "waitFor" but it doesn't work.
await page.waitFor("#logo_website")
You can try page.waitForResponse() in this way:
'use strict';
const puppeteer = require('puppeteer');
(async function main() {
try {
const browser = await puppeteer.launch();
const [page] = await browser.pages();
await page.goto('https://example.org/');
await Promise.all([
page.waitForResponse('https://www.iana.org/_img/2013.1/iana-logo-header.svg'),
page.evaluate(() => {
document.body.innerHTML = '<img id="logo_website" src="https://www.iana.org/_img/2013.1/iana-logo-header.svg">';
}),
]);
await page.screenshot({ path: 'scr.png' });
await browser.close();
} catch (err) {
console.error(err);
}
})();

can we scrape data for a website which requires authentication in Node.js?

I am trying to web scraping using node.js for a website that requires authentication. Is there any way to perform this in node.js?
You can try puppeteer:
'use strict';
const puppeteer = require('puppeteer');
(async function main() {
try {
const browser = await puppeteer.launch({ headless: false });
const [page] = await browser.pages();
await page.goto('https://httpbin.org/forms/post');
await page.type('input[name="custname"]', 'user');
await page.type('input[name="custemail"]', 'user#example.com');
await Promise.all([
page.click('button'),
page.waitForNavigation(),
]);
await page.waitForSelector('pre');
const data = await page.evaluate(() => {
return document.querySelector('pre').innerText;
});
console.log(JSON.parse(data).form.custemail);
await browser.close();
} catch (err) {
console.error(err);
}
})();
===============================
For the side from the comment:
'use strict';
const puppeteer = require('puppeteer');
(async function main() {
try {
const browser = await puppeteer.launch({ headless: false });
const [page] = await browser.pages();
page.setDefaultTimeout(0);
await page.goto('https://www.trxade.com/market/login');
await page.waitForSelector('input[name="deaNumber"]');
await page.type('input[name="deaNumber"]', '...');
await page.type('input[name="password"]', '...');
await Promise.all([
page.click('input[name="form_login_proceed"]'),
page.waitForNavigation(),
]);
// await browser.close();
} catch (err) {
console.error(err);
}
})();

Is there a way to get puppeteer's waitUntil "networkidle" to only consider XHR (ajax) requests?

I am using puppeteer to evaluate the javascript-based HTML of web pages in my test app.
This is the line I am using to make sure all the data is loaded:
await page.setRequestInterception(true);
page.on("request", (request) => {
if (request.resourceType() === "image" || request.resourceType() === "font" || request.resourceType() === "media") {
console.log("Request intercepted! ", request.url(), request.resourceType());
request.abort();
} else {
request.continue();
}
});
try {
await page.goto(url, { waitUntil: ['networkidle0', 'load'], timeout: requestCounterMaxWaitMs });
} catch (e) {
}
Is this the best way to wait for ajax requests to be completed?
It feels right but I'm not sure if I should use networkidle0, networkidle1, etc?
You can use pending-xhr-puppeteer, a lib that expose a promise awaiting that all the pending xhr requests are resolved.
Use it like this :
const puppeteer = require('puppeteer');
const { PendingXHR } = require('pending-xhr-puppeteer');
const browser = await puppeteer.launch({
headless: true,
args,
});
const page = await browser.newPage();
const pendingXHR = new PendingXHR(page);
await page.goto(`http://page-with-xhr`);
// Here all xhr requests are not finished
await pendingXHR.waitForAllXhrFinished();
// Here all xhr requests are finished
DISCLAIMER: I am the maintener of pending-xhr-puppeteer
XHR by their nature can appear later in the app. Any networkidle0 will not help you if app sends XHR after for example 1 second and you want to wait for it. I think if you want to do this "properly" you should know what requests you are waiting for and await for them.
Here is an example with XHRs occurred later in the app and it wait for all of them:
const puppeteer = require('puppeteer');
const html = `
<html>
<body>
<script>
setTimeout(() => {
fetch('https://swapi.co/api/people/1/');
}, 1000);
setTimeout(() => {
fetch('https://www.metaweather.com/api/location/search/?query=san');
}, 2000);
setTimeout(() => {
fetch('https://api.fda.gov/drug/event.json?limit=1');
}, 3000);
</script>
</body>
</html>`;
// you can listen to part of the request
// in this example I'm waiting for all of them
const requests = [
'https://swapi.co/api/people/1/',
'https://www.metaweather.com/api/location/search/?query=san',
'https://api.fda.gov/drug/event.json?limit=1'
];
const waitForRequests = (page, names) => {
const requestsList = [...names];
return new Promise(resolve =>
page.on('request', request => {
if (request.resourceType() === "xhr") {
// check if request is in observed list
const index = requestsList.indexOf(request.url());
if (index > -1) {
requestsList.splice(index, 1);
}
// if all request are fulfilled
if (!requestsList.length) {
resolve();
}
}
request.continue();
})
);
};
(async () => {
const browser = await puppeteer.launch();
const page = await browser.newPage();
await page.setRequestInterception(true);
// register page.on('request') observables
const observedRequests = waitForRequests(page, requests);
// await is ignored here because you want to only consider XHR (ajax)
// but it's not necessary
page.goto(`data:text/html,${html}`);
console.log('before xhr');
// await for all observed requests
await observedRequests;
console.log('after all xhr');
await browser.close();
})();
I agree with the sentiment in this answer that waiting for all network activity to cease ("all the data is loaded") is a rather ambiguous concept that is entirely dependent on the behavior of the website you're scraping.
Options for detecting responses include waiting a fixed duration, a fixed duration after network traffic idles, for a specific response (or set of responses), for an element to appear on the page, for a predicate to return true, etc, all of which Puppeteer supports.
With this in mind, the most typical scenario is that you're waiting for some particular response or set of responses from known (or partially-known, using some pattern or prefix) resource URL(s) that will deliver a payload you want to read and/or trigger a DOM interaction you need to detect. Puppeteer offers page.waitForResponse for doing just this.
Here's an example, building on an existing answer (and showing how to retrieve the data from the responses while we're at it):
const puppeteer = require("puppeteer");
const html = `
<html>
<body>
<script>
setTimeout(() => {
fetch("http://jsonplaceholder.typicode.com/users/1");
}, 1000);
setTimeout(() => {
fetch("http://jsonplaceholder.typicode.com/users/2");
}, 2000);
setTimeout(() => {
fetch("http://jsonplaceholder.typicode.com/users/3");
}, 3000);
setTimeout(() => {
// fetch something irrelevant to us
fetch("http://jsonplaceholder.typicode.com/users/4");
}, 0);
</script>
</body>
</html>`;
(async () => {
const browser = await puppeteer.launch();
const [page] = await browser.pages();
await page.setContent(html);
const expectedUrls = [
"http://jsonplaceholder.typicode.com/users/1",
"http://jsonplaceholder.typicode.com/users/2",
"http://jsonplaceholder.typicode.com/users/3",
];
try {
const responses = await Promise.all(expectedUrls.map(url =>
page.waitForResponse(
response => response.url() === url,
{timeout: 5000}
)
));
const data = await Promise.all(
responses.map(response => response.json())
);
console.log(data);
}
catch (err) {
console.error(err);
}
await browser.close();
})()

use same browser instance?

Hi I am trying to make a screenshot service
const puppeteer = require('puppeteer');
var resWidth = 1366;
var resHeight = 1000;
var browser;
(async () => {
browser = await puppeteer.launch({ignoreHTTPSErrors: true});
});
and when I receive a work I try to do
data.forEach(function(d){
try {
console.log(d["url"]);
(async () => {
var page = await browser.newPage();
await page.setViewport({width: resWidth, height: resHeight});
await page.goto(d["url"], {timeout: 90000, waitUntil: 'networkidle'});
await page.screenshot({path: './picdata/' + d['id'] + '.png' ,fullPage: true});
await page.close();
})();
} catch(e) {}
});
but I can't... here is the error:
(node:40596) UnhandledPromiseRejectionWarning: Unhandled promise rejection (rejection id: 7): TypeError: Cannot read property 'newPage' of undefined
I don't want to open a new browser for each screenshot launching browser takes time and requires more memory?
what should I do?
The problem:
(async () => {
browser = await puppeteer.launch({ignoreHTTPSErrors: true});
});
This code never gets executed. Why? because it's not a true closure.
More on closures, here.
That being said, that wont have work for your given scenario, as they are async tasks.
My try with your example:
'use strict';
const puppeteer = require('puppeteer');
const resWidth = 1366;
const resHeight = 1000;
let browser;
let page;
async function launchBrowser() {
browser = await puppeteer.launch({ headless: true }); //this "{ headless: true }" will ensure a browser window is not open a single time.
};
launchBrowser().then((x) => { // wait until browser has launched
data.forEach(async (d) => {
try {
page = await browser.newPage(); // the document is not very clear about this method, so I am still unsure if I could move this in the above launchBrowser() method.
await page.setViewport({ width: resWidth, height: resHeight });
await page.goto(d['url'], { timeout: 90000, waitUntil: 'networkidle' });
await page.screenshot({ path: './picdata/' + d['id'] + '.png', fullPage: true });
}
catch (e) {
console.log(e);
await browser.close(); // close browser if there is an error
}
});
})
.then(() => {
await browser.close(); // close browser finally.
});
Pro Tip: Start using let, const instead of var.
There is a great article on this, here

Resources