I'm trying to access a website that only allows me to go through if I change the value of some cookies, however whenever I try to change the value of the __Secure-3PSIDCC cookie it tells me that it is an Invalid cookie field
DevTools listening on ws://127.0.0.1:33837/devtools/browser/e3f433a2-8a43-481b-9a2f-b6aa4a923228
MESA-INTEL: warning: Performance support disabled, consider sysctl dev.i915.perf_stream_paranoid=0
libva error: vaGetDriverNameByIndex() failed with unknown libva error, driver_name = (null)
[27423:27516:0915/232908.850008:ERROR:nss_util.cc(349)] After loading Root Certs, loaded==false: NSS error code: -8018
/home/bonk/Kisa/testing/node_modules/puppeteer/lib/cjs/puppeteer/common/Connection.js:298
error: new Errors_js_1.ProtocolError(),
^
ProtocolError: Protocol error (Network.setCookies): Invalid cookie fields
at /home/bonk/Kisa/testing/node_modules/puppeteer/lib/cjs/puppeteer/common/Connection.js:298:24
at new Promise (<anonymous>)
at CDPSession.send (/home/bonk/Kisa/testing/node_modules/puppeteer/lib/cjs/puppeteer/common/Connection.js:294:16)
at Page.setCookie (/home/bonk/Kisa/testing/node_modules/puppeteer/lib/cjs/puppeteer/common/Page.js:869:67)
at processTicksAndRejections (node:internal/process/task_queues:96:5)
at async f (/home/bonk/Kisa/testing/test.js:75:5) {
originalMessage: 'Invalid cookie fields'
here is my code
const f = (async() => {
const browser = await puppeteer.launch({
dumpio: true,
headless: false
});
const page = await browser.newPage();
page.setUserAgent(
"Mozilla/5.0 (X11; Linux x86_64; rv:91.0) Gecko/20100101 Firefox/91.0"
);
const cookies = [
{
'name': '__Secure-3PSIDCC',
'value': 'hello', domain: '.google.com'
}
];
await page.setCookie(...cookies);
await page.goto("https://google.com");
//await page._client.send("Page.setDownloadBehavior", {
// behavior: "allow",
// downloadPath: "./anime/" + titlee
//})
await setTimeout(async() =>{
const link = await page.evaluate(() => document.querySelector('*').outerHTML);
await page.mouse.click(782, 354);
await setTimeout(async() =>{
await page.bringToFront();
await page.mouse.click(782, 354);
}, 10);
}, 10000);
});
f();
changing the value of __Secure-3PSIDCC cookie on firefox works however it throws me the error above when I try and change the cookie value in chromium
Related
i'm trying to run a test on a website using puppeteer and i want to use 'expect' assertion from chai but i keep getting this error and that my test failed.
const puppeteer = require('puppeteer')
const expect = require('chai').expect
describe('My First Puppeteer Test ', () => {
it( 'should launch the browser', async function() {
const browser = await puppeteer.launch({
headless: false,
slowMo: 10,
devtools: false,
defaultViewport: null,
args: ['--start-maximized']
})
const page = await browser.newPage()
await page.goto('http://example.com')
const title = await page.title()
const url = page.url()
const text = await page.$eval('h1', Element => Element.textContent)
const count = await page.$$eval('p', Element => Element.length)
expect(title).to.be.a('string', 'Example Domain')
expect(url).to.include('example.com')
expect(text).to.browser.a('string', 'example domain')
expect(count).to.equal(2)
await browser.close()
})
})
what's wrong with it?
do i need to add something in order for this to work?
this is the message i get from terminal
My First Puppeteer Test
1) should launch the browser
0 passing (1s)
1 failing
1) My First Puppeteer Test
should launch the browser:
Error: Invalid Chai property: browser
at Object.proxyGetter [as get] (node_modules\chai\lib\chai\utils\proxify.js:78:17)
at Context.<anonymous> (tests\example.test.js:25:24)
at runNextTicks (node:internal/process/task_queues:61:5)
at listOnTimeout (node:internal/timers:528:9)
at processTimers (node:internal/timers:502:7)
Im trying to add an external javascript file to puppeteer on Lambda.
const chromium = require('chrome-aws-lambda');
const browser = await chromium.puppeteer.launch({
....
});
const webPage = await browser.newPage();
await webPage.goto(url, { waitUntil: 'networkidle2', timeout: 0 });
await webPage.addScriptTag({
url: `https://code.jquery.com/jquery-3.6.0.min.js`,
});
But unable to load the script
ERROR Error: Loading script from https://code.jquery.com/jquery-3.6.0.min.js failed
at DOMWorld.addScriptTag (/var/task/node_modules/puppeteer-core/lib/cjs/puppeteer/common/DOMWorld.js:171:23)
at processTicksAndRejections (internal/process/task_queues.js:95:5)
at async I (/var/task/src/functions/preparePages/handler.js:20:3689)
at async Runtime.E [as handler] (/var/task/src/functions/preparePages/handler.js:20:5400)
Error: Protocol error (Page.navigate): Cannot navigate to invalid URL
at Promise (/home/microservice/node_modules/puppeteer/lib/Connection.js:183:56)
at new Promise ()
at CDPSession.send (/home/microservice/node_modules/puppeteer/lib/Connection.js:182:12)
at navigate (/home/microservice/node_modules/puppeteer/lib/FrameManager.js:119:39)
at FrameManager.navigateFrame (/home/microservice/node_modules/puppeteer/lib/FrameManager.js:96:7)
at Frame.goto (/home/microservice/node_modules/puppeteer/lib/FrameManager.js:407:37)
at Frame. (/home/microservice/node_modules/puppeteer/lib/helper.js:111:23)
at Page.goto (/home/microservice/node_modules/puppeteer/lib/Page.js:629:49)
at Page. (/home/microservice/node_modules/puppeteer/lib/helper.js:111:23)
at module.exports (/home/microservice/node_modules/htmltopdf-puppeteer/index.js:19:16)\n at process._tickCallback (internal/process/next_tick.js:68:7)
Use http to access the url over network or use file:/// to access the local file
const browser = await puppeteer.launch({ headless: true, args: ['--no-sandbox'] })
const page = await browser.newPage();
await page.goto("http://google.com");
or
await page.goto("file:///C:/ec2-user/report.html");
I have a Firebase function to create a PDF file. Lately, it times out due to a "Chrome revision"? Neither do I understand the error message, nor do I understand what is wrong. The function works, when I deploy it locally under MacOS.
TimeoutError: Timed out after 30000 ms while trying to connect to the browser! Only Chrome at revision r818858 is guaranteed to work.
at Timeout.onTimeout (/workspace/node_modules/puppeteer/lib/cjs/puppeteer/node/BrowserRunner.js:204:20)
at listOnTimeout (internal/timers.js:549:17)
at processTimers (internal/timers.js:492:7)
The function:
const puppeteer = require('puppeteer');
const createPDF = async (html, outputPath) => {
let pdf;
try {
const browser = await puppeteer.launch({
args: ['--no-sandbox']
});
const page = await browser.newPage();
await page.emulateMediaType('screen');
await page.setContent(html, {
waitUntil: 'networkidle0'
});
pdf = await page.pdf({
// path: outputPath,
format: 'A4',
printBackground: true,
margin: {
top: "50px",
bottom: "50px"
}
});
await browser.close();
} catch (e) {
console.error(e);
}
return pdf;
};
TimeoutError: Timed out after 30000 ms while trying to connect to the browser!
The aforementioned error is coming from the fact that as mentioned in the documentation:
When you install Puppeteer, it downloads a recent version of Chromium
Everytime you're executing Puppeteer you're running a Chromium in the backend to which Puppeteer will try to connect, hence when it can't connect to the browser this errors raises.
After doing multiple test I was able to execute the Cloud Function by adding the parameter headless on the launch option, since the documentation mentions that it should be true by default I don't quite understand why setting it manually allows the Cloud Function to finish correctly.
At the beginning I was trying with the timeout set to 0 to disable the error due to timeout, however it seems that it's not required, since by only adding headless it finished correctly, but if you find the same problem with the timeouts you can add it.
At the end my code looks like this:
const createPDF = async (html, outputPath) => {
let pdf;
try {
const browser = await puppeteer.launch({
args: ['--no-sandbox'],
headless: true,
timeout: 0
});
const page = await browser.newPage();
await page.emulateMediaType('screen');
await page.setContent(html, {
waitUntil: 'networkidle0'
});
pdf = await page.pdf({
// path: outputPath,
format: 'A4',
printBackground: true,
margin: {
top: "50px",
bottom: "50px"
}
});
await browser.close();
console.log("Download finished"); //Added this to debug that it finishes correctly
} catch (e) {
console.error(e);
}
return pdf;
};
Running puppeteer with Proxy authentication failing.
Puppeteer version: 1.8
Platform / OS version:MacOS 10.13.6
Node.js version:v10.9.0
const puppeteer = require('puppeteer');
(async () => {
const browser = await puppeteer.launch({
headless:false,
ignoreHTTPSErrors:true,
devtools:true,
timeout:3000,
args: ['--no-sandbox','--proxy-server=xxx:xxx']
});
const user='xxx';
const password='xxx';
const page = await browser.newPage();
// await page.setExtraHTTPHeaders({
// 'Proxy-Authorization': 'Basic ' + Buffer.from(`${user}:${password}`).toString('base64'),
// });
await page.authenticate({username:user, password:password});
await page.goto('https://www.apple.com/');
let title = await page.title();
console.log("title:" + title);
await browser.close();
})();
got error:
(node:5858) UnhandledPromiseRejectionWarning:
Error: net::ERR_TUNNEL_CONNECTION_FAILED at https://www.apple.com/
at navigate (/xxx/node_modules/_puppeteer#1.8.0#puppeteer/lib/Page.js:622:37)
at process._tickCallback (internal/process/next_tick.js:68:7)
(node:5858) UnhandledPromiseRejectionWarning:
Unhandled promise rejection. This error originated either by throwing
inside of an async function without a catch block,
or by rejecting a promise which was not handled with .catch().
(rejection id: 1)
const puppeteer = require('puppeteer');
(async () => {
const browser = await puppeteer.launch({
headless:false,
ignoreHTTPSErrors:true,
devtools:true,
timeout:3000,
args: ['--no-sandbox','--proxy-server=https=xxx:xxx']
});
const user='xxx';
const password='xxx';
const page = await browser.newPage();
await page.authenticate({username:user, password:password});
await page.goto('https://www.apple.com/');
let title = await page.title();
console.log("title:" + title);
await browser.close();
})();
For me it worked by adding https between the proxy server argument.
Within your args to launch puppeteer,
You can also try this format:
--proxy-server=http=xxx:xxxx
OR
Have you tried setting proxy like this '--proxy-server=https=MyWorkingProxyIP:Port' according to docs https://www.chromium.org/developers/design-documents/network-settings ?