I am trying to connect to a .onion site using python. I have tor running on port 9050 and I am getting the following error:
Traceback (most recent call last):
File "/Users/jane/code/test/test.py", line 15, in main
res = await fetch(session, id)
File "/Users/jane/code/test/test.py", line 9, in fetch
async with session.get(url) as res:
File "/usr/local/lib/python3.7/site-packages/aiohttp/client.py", line 1005, in __aenter__
self._resp = await self._coro
File "/usr/local/lib/python3.7/site-packages/aiohttp/client.py", line 476, in _request
timeout=real_timeout
File "/usr/local/lib/python3.7/site-packages/aiohttp/connector.py", line 522, in connect
proto = await self._create_connection(req, traces, timeout)
File "/usr/local/lib/python3.7/site-packages/aiohttp/connector.py", line 854, in _create_connection
req, traces, timeout)
File "/usr/local/lib/python3.7/site-packages/aiohttp/connector.py", line 959, in _create_direct_connection
raise ClientConnectorError(req.connection_key, exc) from exc
aiohttp.client_exceptions.ClientConnectorError: Cannot connect to host intelex7ny6coqno.onion:80 ssl:None [nodename nor servname provided, or not known]
The code:
import asyncio
import aiohttp
from aiohttp_socks import SocksConnector
async def fetch(session, id):
print('Starting {}'.format(id))
url = 'http://intelex7ny6coqno.onion/topic/{}'.format(id)
async with session.get(url) as res:
return res.text
async def main(id):
connector = SocksConnector.from_url('socks5://localhost:9050')
async with aiohttp.ClientSession(connector=connector) as session:
res = await fetch(session, id)
print(res)
if __name__ == '__main__':
ids = ['10', '11', '12']
loop = asyncio.get_event_loop()
future = [asyncio.ensure_future(main(id)) for id in ids]
loop.run_until_complete(asyncio.wait(future))
This code works fine:
import requests
session = requests.session()
session.proxies['http'] = 'socks5h://localhost:9050'
session.proxies['https'] = 'socks5h://localhost:9050'
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; rv:60.0) Gecko/20100101 Firefox/60.0',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
}
res = session.get(url, headers=headers)
print(res)
Why am I getting Cannot connect to host intelex7ny6coqno.onion:80 ssl:None [nodename nor servname provided, or not known]?
What am I missing here?
By default it appears to be using the local DNS resolver to asynchronously resolve hostnames. When using requests socks5h you are getting DNS resolution over SOCKS (Tor).
Adding rdns=True appears to work for .onion addresses:
connector = SocksConnector.from_url('socks5://localhost:9050', rdns=True)
Related
I have stumbled upon an error which I could not resolve. When I ran my python code, this error occurs and it only happens when I am making an API call using the requests package.
Code calling the API:
def getAccs(id):
accountid = ''
url = "{}/{}".format(acc_api, id)
req = requests.get(url, headers=head)
result = json.loads(req.text)
if result['id'] is None:
# Fetches accountid from another api call after updating
accountid = updateCorp(result['name'], id)
else:
accountid = result['id']
return accountid
if __name__ == "__main__":
### Get data from appSettings.json
with open('appSettingsStg.json') as app:
data = json.load(app)
acc_api = data['Urls']['Accounts']
# Header
head = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.122 Safari/537.36",
"Content-Type": "application/json"
}
Error:
Traceback (most recent call last):
File "insert.py", line 313, in <module>
acc.append(res)
File "insert.py", line 102, in getAccs
req = requests.get(url, headers=head)
File "/home/dev/.local/lib/python3.7/site-packages/requests/api.py", line 76, in get
return request('get', url, params=params, **kwargs)
File "/home/dev/.local/lib/python3.7/site-packages/requests/api.py", line 61, in request
return session.request(method=method, url=url, **kwargs)
File "/home/dev/.local/lib/python3.7/site-packages/requests/sessions.py", line 530, in request
resp = self.send(prep, **send_kwargs)
File "/home/dev/.local/lib/python3.7/site-packages/requests/sessions.py", line 685, in send
r.content
File "/home/dev/.local/lib/python3.7/site-packages/requests/models.py", line 829, in content
self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b''
File "/home/dev/.local/lib/python3.7/site-packages/requests/models.py", line 751, in generate
for chunk in self.raw.stream(chunk_size, decode_content=True):
File "/home/dev/.local/lib/python3.7/site-packages/urllib3/response.py", line 571, in stream
for line in self.read_chunked(amt, decode_content=decode_content):
File "/home/dev/.local/lib/python3.7/site-packages/urllib3/response.py", line 738, in read_chunked
self._init_decoder()
File "/home/dev/.local/lib/python3.7/site-packages/urllib3/response.py", line 376, in _init_decoder
self._decoder = _get_decoder(content_encoding)
File "/home/dev/.local/lib/python3.7/site-packages/urllib3/response.py", line 147, in _get_decoder
return GzipDecoder()
File "/home/dev/.local/lib/python3.7/site-packages/urllib3/response.py", line 74, in __init__
self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
ValueError: Invalid initialization option
Some things I have tried
Importing zlib and urllib3 (As mentioned in this post)
Upgrading requests package (v2.24.0 as of 25/08/2020)
Reinstalling requests and urllib3 (Didnt work but I thought it was at least worth a try)
Any advice is much appreciated !
I want to write a simple script that checks to see if website is up. If it is not, I want to catch the http return error code using the aiohttp module for Python. In the example below, I pass in a fake website 'http://www.googlesr2332.com' rather than returning the http error, I am getting the following:
Traceback (most recent call last):
File "/home/runner/.local/share/virtualenvs/python3/lib/python3.7/site-packages/aiohttp/connector.py", l
ine 967, in _create_direct_connection traces=traces), loop=self._loop)
File "/home/runner/.local/share/virtualenvs/python3/lib/python3.7/site-packages/aiohttp/connector.py", l
ine 830, in _resolve_host
self._resolver.resolve(host, port, family=self._family) File "/home/runner/.local/share/virtualenvs/python3/lib/python3.7/site-packages/aiohttp/resolver.py", li
ne 30, in resolve
host, port, type=socket.SOCK_STREAM, family=family)
File "/usr/local/lib/python3.7/asyncio/base_events.py", line 784, in getaddrinfo
None, getaddr_func, host, port, family, type, proto, flags)
File "/usr/local/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/usr/local/lib/python3.7/socket.py", line 748, in getaddrinfo
for res in _socket.getaddrinfo(host, port, family, type, proto, flags):
socket.gaierror: [Errno -2] Name or service not known
The above exception was the direct cause of the following exception:
Traceback (most recent call last): File "main.py", line 19, in <module>
loop.run_until_complete(main())
File "/usr/local/lib/python3.7/asyncio/base_events.py", line 579, in run_until_complete
return future.result()
File "main.py", line 8, in main
async with session.get(site) as response:
File "/home/runner/.local/share/virtualenvs/python3/lib/python3.7/site-packages/aiohttp/client.py", line
1012, in __aenter__
self._resp = await self._coro
File "/home/runner/.local/share/virtualenvs/python3/lib/python3.7/site-packages/aiohttp/client.py", line 483, in _request
timeout=real_timeout
File "/home/runner/.local/share/virtualenvs/python3/lib/python3.7/site-packages/aiohttp/connector.py", l
ine 523, in connect
proto = await self._create_connection(req, traces, timeout)
File "/home/runner/.local/share/virtualenvs/python3/lib/python3.7/site-packages/aiohttp/connector.py", l
ine 859, in _create_connection req, traces, timeout)
File "/home/runner/.local/share/virtualenvs/python3/lib/python3.7/site-packages/aiohttp/connector.py", l
ine 971, in _create_direct_connection
raise ClientConnectorError(req.connection_key, exc) from exc
aiohttp.client_exceptions.ClientConnectorError: Cannot connect to host www.googlesr2332.com:80 ssl:default [Name or service not known]
Here is the sample code I am running:
import aiohttp
import asyncio
sites = ['http://www.google.com', 'http://python.org', 'http://www.facebook.com', 'http://www.googlesr2332.com']
async def main():
async with aiohttp.ClientSession() as session:
for site in sites:
async with session.get(site) as response:
if response.status == 200:
print("Status:", response.status)
print("Content-type:", response.headers['content-type'])
html = await response.text()
print("Body:", html[15], "...")
else:
print(response.status)
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
You have the code when there is a response. But there is no code to handle if the connection itself has got in trouble.
import aiohttp
import asyncio
sites = ['http://www.google.com', 'http://python.org', 'http://www.facebook.com', 'http://www.googlesr2332.com']
async def main():
async with aiohttp.ClientSession() as session:
for site in sites:
try:
async with session.get(site) as response:
if response.status == 200:
print("Status:", response.status)
print("Content-type:", response.headers['content-type'])
html = await response.text()
print("Body:", html[:15], "...")
else:
print(response.status)
except aiohttp.ClientConnectorError as e:
print('Connection Error', str(e))
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
When making a request to a website, you expect to get a response from it. But if your request can't reach the desired server, you can't get any response. You don't have any errors handling, so you get an error when trying to reach website that doesn't exist. The error message is pretty much self-explanatory: Cannot connect to host www.googlesr2332.com:80 ssl:default [Name or service not known]. Consider wrapping your request sending function with try except.
Hello in my code it break my request, i tried several time , but after 1-2 hours bot status no longer changed.
i hosting those files in ftp server.
import discord
from discord.ext.commands import Bot
from discord.ext import commands
import asyncio
import time
import random
from discord import Game
import math, time
import requests
Client = discord.client
client = commands.Bot(command_prefix = '!')
Clientdiscord = discord.Client()
#client.event
async def on_ready():
client.loop.create_task(scheduler())
print('project ready')
async def scheduler():
while True:
# sleep until the next whole second
now = time.time()
await asyncio.sleep(math.ceil(now) - now)
await client.change_presence(game=discord.Game(name='steady', type=3))
await asyncio.sleep(5)
await client.change_presence(game=discord.Game(name='ready', type=3))
await asyncio.sleep(5)
await client.change_presence(game=discord.Game(name='go', type=3))
await asyncio.sleep(5)
await client.change_presence(game=discord.Game(name='Total Used : '+requests.get('http://username.mydomain/project/total_visit/count.txt').text, type=3))
await asyncio.sleep(10)
await client.change_presence(game=discord.Game(name='Online : '+requests.get('http://username.mydomain/project/total_online/Live.php').text, type=3))
await asyncio.sleep(10)
await client.change_presence(game=discord.Game(name='Total Users : '+requests.get('http://username.mydomain/project/total_users/total.php').text, type=3))
await asyncio.sleep(10)
await client.change_presence(game=discord.Game(name='return again', type=3))
#i tired to kill all request to start clear again.
client.logout()
client.close()
await asyncio.sleep(20)
client.run("client_token")
Task exception was never retrieved
future: exception=ConnectionError(MaxRetryError("HTTPConnectionPool(host='username.mydomain', port=80): Max retries exceeded with url: /project/total_visit/count.txt (Caused by NewConnectionError(': Failed to establish a new connection: [WinError 10061] No connection could be made because the target machine actively refused it',))",),)>
Traceback (most recent call last):
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python36-32\lib\site-packages\urllib3\connection.py", line 159, in _new_conn
(self._dns_host, self.port), self.timeout, **extra_kw)
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python36-32\lib\site-packages\urllib3\util\connection.py", line 80, in create_connection
raise err
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python36-32\lib\site-packages\urllib3\util\connection.py", line 70, in create_connection
sock.connect(sa)
ConnectionRefusedError: [WinError 10061] No connection could be made because the target machine actively refused it
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python36-32\lib\site-packages\urllib3\connectionpool.py", line 600, in urlopen
chunked=chunked)
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python36-32\lib\site-packages\urllib3\connectionpool.py", line 354, in _make_request
conn.request(method, url, **httplib_request_kw)
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python36-32\lib\http\client.py", line 1239, in request
self._send_request(method, url, body, headers, encode_chunked)
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python36-32\lib\http\client.py", line 1285, in _send_request
self.endheaders(body, encode_chunked=encode_chunked)
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python36-32\lib\http\client.py", line 1234, in endheaders
self._send_output(message_body, encode_chunked=encode_chunked)
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python36-32\lib\http\client.py", line 1026, in _send_output
self.send(msg)
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python36-32\lib\http\client.py", line 964, in send
self.connect()
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python36-32\lib\site-packages\urllib3\connection.py", line 181, in connect
conn = self._new_conn()
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python36-32\lib\site-packages\urllib3\connection.py", line 168, in _new_conn
self, "Failed to establish a new connection: %s" % e)
urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [WinError 10061] No connection could be made because the target machine actively refused it
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python36-32\lib\site-packages\requests\adapters.py", line 449, in send
timeout=timeout
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python36-32\lib\site-packages\urllib3\connectionpool.py", line 638, in urlopen
_stacktrace=sys.exc_info()[2])
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python36-32\lib\site-packages\urllib3\util\retry.py", line 398, in increment
raise MaxRetryError(_pool, url, error or ResponseError(cause))
urllib3.exceptions.MaxRetryError: HTTPConnectionPool(host='username.mydomain', port=80): Max retries exceeded with url: /project/total_visit/count.txt (Caused by NewConnectionError(': Failed to establish a new connection: [WinError 10061] No connection could be made because the target machine actively refused it',))
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Users\Administrator\Desktop\bot.py", line 33, in scheduler
await client.change_presence(game=discord.Game(name='Total Used : '+requests.get('http://username.mydomain/project/total_visit/count.txt').text, type=3))
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python36-32\lib\site-packages\requests\api.py", line 75, in get
return request('get', url, params=params, **kwargs)
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python36-32\lib\site-packages\requests\api.py", line 60, in request
return session.request(method=method, url=url, **kwargs)
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python36-32\lib\site-packages\requests\sessions.py", line 533, in request
resp = self.send(prep, **send_kwargs)
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python36-32\lib\site-packages\requests\sessions.py", line 646, in send
r = adapter.send(request, **kwargs)
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python36-32\lib\site-packages\requests\adapters.py", line 516, in send
raise ConnectionError(e, request=request)
requests.exceptions.ConnectionError: HTTPConnectionPool(host='username.mydomain', port=80): Max retries exceeded with url: /project/total_visit/count.txt (Caused by NewConnectionError(': Failed to establish a new connection: [WinError 10061] No connection could be made because the target machine actively refused it',))
definitely you must to get error,, you made many get.requests.
to slove it, try (Close request) / (restart Bot [background Command])
hm im not good enough, but you can try my step's
1) add Full (urllib3 library)
import discord
from discord.ext.commands import Bot
from discord.ext import commands
import asyncio
import time
import random
from discord import Game
import math, time
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
import urllib3
import urllib3.contrib.pyopenssl
import certifi
2) add (Backgroundworker) Command [will restart your get.request every time.]
Client = discord.client
client = commands.Bot(command_prefix = '!')
Clientdiscord = discord.Client()
urllib3.contrib.pyopenssl.inject_into_urllib3()
#client.event
async def on_ready():
print('Project Runing..!')
async def task():
await client.wait_until_ready()
while True:
await client.change_presence(game=discord.Game(name='Loading..!', type=3))
print('Get_Url-Refresh')
await asyncio.sleep(1)
print('Total_Used')
one = requests.get('http://username.mydomain/project/total_visit/count.txt', timeout=5)
print('Total_Online')
two = requests.get('http://username.mydomain/project/total_online/Live.php', timeout=5)
print('Total_Users')
three = requests.get('http://username.mydomain/project/total_users/total.php', timeout=5)
await asyncio.sleep(1)
print('Refresh Request')
await asyncio.sleep(10)
print('Running')
await client.change_presence(game=discord.Game(name='steady', type=3))
await asyncio.sleep(5)
await client.change_presence(game=discord.Game(name='ready', type=3))
await asyncio.sleep(5)
await client.change_presence(game=discord.Game(name='go', type=3))
await asyncio.sleep(5)
print('Total Used..!')
await client.change_presence(game=discord.Game(name='Total Used : ' + one.text, type=3))
await asyncio.sleep(5)
print('Online..!')
await client.change_presence(game=discord.Game(name='Online : ' + two.text, type=1))
await asyncio.sleep(5)
print('Total Users..!')
await client.change_presence(game=discord.Game(name='Total Users : ' + three.text, type=3))
await asyncio.sleep(5)
print('Refresh..!')
def handle_exit():
print("Handling")
client.loop.run_until_complete(client.logout())
for t in asyncio.Task.all_tasks(loop=client.loop):
if t.done():
t.exception()
continue
t.cancel()
try:
client.loop.run_until_complete(asyncio.wait_for(t, 5, loop=client.loop))
t.exception()
except asyncio.InvalidStateError:
pass
except asyncio.TimeoutError:
pass
except asyncio.CancelledError:
pass
while True:
#client.event
async def on_message(m):
if m.content == 'die':
print("Terminating")
raise SystemExit
client.loop.create_task(task())
try:
client.loop.run_until_complete(client.start('You_Token_Here'))
except SystemExit:
handle_exit()
except KeyboardInterrupt:
handle_exit()
client.loop.close()
print("Program ended")
break
print("Bot restarting")
client = discord.Client(loop=client.loop)
Credits to Dev I.A (Gamer's Control).
I am running aiohttp as my server. When a request comes in, I try to spawn a process to handle it. But I get the below error:
Traceback (most recent call last): File "asyncppx.py", line 33, in
app.add_routes([web.get('/', asyncio.ensure_future(runMcows(n)))]) File
"/Users/i3ye/Programming/vsc/async/env/lib/python3.6/site-packages/aiohttp/web_app.py",
line 231, in add_routes
self.router.add_routes(routes) File "/Users/i3ye/Programming/vsc/async/env/lib/python3.6/site-packages/aiohttp/web_urldispatcher.py",
line 966, in add_routes
route_obj.register(self) File "/Users/i3ye/Programming/vsc/async/env/lib/python3.6/site-packages/aiohttp/web_routedef.py",
line 38, in register
reg(self.path, self.handler, **self.kwargs) File "/Users/i3ye/Programming/vsc/async/env/lib/python3.6/site-packages/aiohttp/web_urldispatcher.py",
line 922, in add_get
resource.add_route(hdrs.METH_HEAD, handler, **kwargs) File "/Users/i3ye/Programming/vsc/async/env/lib/python3.6/site-packages/aiohttp/web_urldispatcher.py",
line 269, in add_route
expect_handler=expect_handler) File "/Users/i3ye/Programming/vsc/async/env/lib/python3.6/site-packages/aiohttp/web_urldispatcher.py",
line 682, in init
resource=resource) File "/Users/i3ye/Programming/vsc/async/env/lib/python3.6/site-packages/aiohttp/web_urldispatcher.py",
line 103, in init
assert callable(handler), handler AssertionError: > Task was destroyed but it
is pending! task: > sys:1: RuntimeWarning: coroutine 'runMcows' was never
awaited
The code is below, any suggestions?
from aiohttp import web
import aiohttp
import asyncio
loop = asyncio.get_event_loop()
#tasks = []
n = 0
def mcowA(n):
print (n, " : A")
return
async def fetch(session, url):
async with session.get(url) as response:
return await response.text()
def mcowB(n):
print (n, " : B")
return
async def runMcows(n):
mcowA(n)
async with aiohttp.ClientSession() as session:
html = await fetch(session, 'http://localhost:8081')
mcowB(n)
return html
try:
app = web.Application()
app.add_routes([web.get('/', asyncio.ensure_future(runMcows(n)))])
loop.run_forever()
web.run_app(app)
finally:
loop.close()
If you look at the server example here:
Your code should be like this in the main execution:
app = web.Application()
app.add_routes([web.get('/', runMcows])
web.run_app(app)
app.add_routes You need to pass a coroutine runMcows which can only take 1 variable, the request itself.
async def runMcows(request):
mcowA(n)
async with aiohttp.ClientSession() as session:
html = await fetch(session, 'http://localhost:8081')
mcowB(n)
return web.Response(text=html) # Change this response type based on what you need.
I'm trying to get a website's source data after logging in but am having trouble logging in to get to the source. The url is the webpage I see after logging in. I.e. if I login on chrome, I can use url to go to where I need to get the source data.
I keep getting multiple errors, primarily handshake errors:
"sslv3 alert handshake failure", "bad handshake", "urllib3.exceptions.MaxRetryError", and I think the primary error is
Traceback (most recent call last):
File "C:\Users\bwayne\AppData\Local\Programs\Python\Python36-32\lib\site-packages\urllib3\contrib\pyopenssl.py", line 441, in wrap_socket
cnx.do_handshake()
File "C:\Users\bwayne\AppData\Local\Programs\Python\Python36-32\lib\site-packages\OpenSSL\SSL.py", line 1716, in do_handshake
self._raise_ssl_error(self._ssl, result)
File "C:\Users\bwayne\AppData\Local\Programs\Python\Python36-32\lib\site-packages\OpenSSL\SSL.py", line 1456, in _raise_ssl_error
_raise_current_error()
File "C:\Users\bwayne\AppData\Local\Programs\Python\Python36-32\lib\site-packages\OpenSSL_util.py", line 54, in exception_from_error_queue
raise exception_type(errors)
OpenSSL.SSL.Error: [('SSL routines', 'ssl3_read_bytes', 'sslv3 alert handshake failure')]
During handling of the above exception, another exception occurred:
import requests, sys
import ssl
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.poolmanager import PoolManager
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
ctx.options |= ssl.OP_NO_SSLv2
ctx.options |= ssl.OP_NO_SSLv3
ctx.options |= ssl.OP_NO_TLSv1
ctx.options |= ssl.OP_NO_TLSv1_1
class Ssl3HttpAdapter(HTTPAdapter):
def init_poolmanager(self, connections, maxsize, block=False):
self.poolmanager = PoolManager(num_pools=connections,
maxsize=maxsize,
block=block,
ssl_version=ssl.PROTOCOL_TLSv1)
url = "www.thewebsite.com"
def do_requests(url):
payload = {'Username': 'myName', 'Password': 'myPass'}
headers = {'User-Agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Mobile Safari/537.36'}
with requests.Session() as s:
s.mount(url,Ssl3HttpAdapter())
p = s.post(url, headers=headers, data=payload, verify=False)
def main(url):
do_requests(url)
main(url)
How can I login? I've double and triple checked that the HTML names are correct: