Python 3.5 Async & await Ping - python-3.x

I try to make Async ping process using subprocess.Popen , I try to understand how i implement it in this case
aList = []
async def sn(frm, to):
i = 0
for i in list(range(frm, to)):
aList.append(i)
cmd = "ping -n 1 " + '10.0.0.'
coroutines = [subprocess.Popen(cmd + str(i), stdout=subprocess.PIPE) for i in aList]
results = await asyncio.gather(*coroutines)
print(results)
loop = asyncio.get_event_loop()
loop.run_until_complete(sn(frm, to))
loop.close()

You can find simpler code for pinging host without async-await. But if necessary you can try the following working example to ping with async-await
import platform
import subprocess
import aiohttp
import asyncio
async def getPingedHost(host, netTimeout=3):
""" Description: Function to ping a host and get string of outcome or False
Import: from shared.getPingedHost import getPingedHost
Testing: python -m shared.getPingedHost
"""
args = ['ping']
platformOs = platform.system().lower()
if platformOs == 'windows':
args.extend(['-n', '1'])
args.extend(['-w', str(netTimeout * 1000)])
elif platformOs in ('linux', 'darwin'):
args.extend(['-c', '1'])
args.extend(['-W', str(netTimeout)])
else:
raise NotImplemented('Unsupported OS: {}'.format(platformOs))
args.append(host)
output = ''
try:
outputList = []
if platformOs == 'windows':
output = subprocess.run(args, check=True, universal_newlines=True,
stdout=subprocess.PIPE, # Capture standard out
stderr=subprocess.STDOUT, # Capture standard error
).stdout
outputList = str(output).split('\n')
if output and 'TTL' not in output:
output = False
else:
subprocess.run(args, check=True)
output = outputList[2]
except (subprocess.CalledProcessError, subprocess.TimeoutExpired):
output = False
return output
async def main():
async with aiohttp.ClientSession() as client:
output = await getPingedHost('google.com')
print(output)
loop = asyncio.get_event_loop()
loop.run_until_complete(main())

class rscan(object):
state = {'online': [], 'offline': []} # Dictionary with list
ips = [] # Should be filled by function after taking range
# Amount of pings at the time
thread_count = 8
# Lock object to prevent race conditions
lock = threading.Lock()
# Using Windows ping command
def ping(self, ip):
answer = subprocess.call(['ping','-n','1',ip],stdout = open('1.txt','w'))
return answer == 0 and ip
def pop_queue(self):
ip = None
self.lock.acquire() # lock !!!
if self.ips:
ip = self.ips.pop()
self.lock.release()
return ip
def noqueue(self):
while True:
ip = self.pop_queue()
if not ip:
return None
result = 'online' if self.ping(ip) else 'offline'
self.state[result].append(ip) ### check again
def start(self):
threads = []
for i in range(self.thread_count):
t = threading.Thread(target=self.noqueue)
t.start()
threads.append(t)
# Wait for all threads
[ t.join() for t in threads ]
return self.state
def rng(self, frm, to, ip3):
self.frm = frm
self.to = to
self.ip3 = ip3
for i in range(frm, to):
ip = ip3 + str(i)
self.ips.append(ip)
if __name__== '__main__':
scant = rscan()
scant.thread_count = 8
edited a bit class i have found also used threads instead of Async & await
Credit: http://blog.boa.nu/2012/10/python-threading-example-creating-pingerpy.html

Related

Having issues with TheadPoolExecutor _wait_for_tstate_lock python (thread deadlock?)

I am having issues with ThreadPoolExecutor. It starts out strong and then slows down to an eventual stop. I don't understand what I'm doing wrong, I've tried moving ThreadPoolExecutor section to outside the domains_loop and just not using domains_loop but it does the same thing.
Changing the maxworkers down to 5 just freezes it earlier, so I know I must be doing something wrong that has nothing to do with the amount of threads.
The file read, url concat, and file write stuff works just fine, it's the async http requests that seem to be broken.
Interestingly if I have less subdomains it will still occasionally lock
sub_file = [
'mail.myresmed.com',
'www.resmed.com',
'bcg29k.2163007t.resmed.com',
'account.resmed.com',
'account-uat.resmed.com',
'www.account-uat.resmed.com',
'adfs.resmed.com',
'admin-mysleep.resm'
]
dir_file = ['/.git', '/test', '/manage', '/login']
subfile_iterator = [0]
dirfile_iterator = [0]
subfile_readstack = []
dirfile_readstack = [""] #first element is blank so the base url will be fetched
domains_list = []
results_list = []
sleep_inc = 0.0001
stack_size = 100
#browser_list = []
results = []
'''
***************************************************************************************************************************************************************************
FILE FNs
***************************************************************************************************************************************************************************
'''
async def write_to_file(results_list):
file = open('results.txt', 'a')
print("Writing to log")
for result in results_list:
#print("writing...\n")
#print(result.headers)
#file.write("{}\n\n".format(result.headers))
headers = result.headers
cookiejar = result.cookies
cookies = cookiejar.items()
file.write("\n\n")
file.write("***************************************************************************************************************************************\n")
file.write("---------------------------------------------------------------------------------------------------------------------------------------\n")
file.write("---------------------------------------------------------------------------------------------------------------------------------------\n")
file.write(" {} \n".format(result.url))
file.write("---------------------------------------------------------------------------------------------------------------------------------------\n")
file.write("---------------------------------------------------------------------------------------------------------------------------------------\n")
file.write("- status: {}\n".format(result.status_code))
file.write("- reason: {}\n".format(result.reason))
#file.write("- is redirect? {}\n".format(result.is_redirect))
#if result.is_redirect:
# file.write("is permanent redirect? {}\n".format(result.is_permanent.redirect))
file.write("\n- headers: \n")
for key,value in headers.items():
file.write("\t{keys}: {values}\n".format(keys=key, values=value))
file.write("\n- cookies: \n")
for cookie in cookies:
file.write("\t{}\n".format(cookie))
result_bytes = result.content
html_formatted = result_bytes.decode('utf-8')
soup = bs(html_formatted, "html.parser")
file.write("\n----------------------\n")
file.write("- style tags: \n")
file.write("----------------------\n\n")
for tags in soup.find_all('style'):
#prettify the css
file.write("{}\n\n".format(tags))
file.write("\n----------------------\n")
file.write("- script tags: \n")
file.write("----------------------\n\n")
for tags in soup.find_all('script'):
#prettify the javascript
file.write("{}\n\n".format(tags))
file.write("\n----------------------\n")
file.write("- links: \n")
file.write("----------------------\n\n")
for tags in soup.find_all('a'):
#prettify the javascript
file.write("{}\n\n".format(tags))
file.write("---------------------------------------------------------------------------------------------------------------------------------------\n")
file.write("---------------------------------------------------------------------------------------------------------------------------------------\n")
file.write("***************************************************************************************************************************************\n")
file.write("\n")
file.close()
def files_exist(subfile, dirfile):
if os.path.isfile(subfile):
subfile_exist = True
else:
print('sub_file does not exit')
if os.path.isfile(dirfile):
dirfile_exist = True
else:
print('dir_file does not exit')
if subfile_exist and dirfile_exist:
return True
else:
sys.exit()
async def read_from_file(list_file, file_lines, read_stack, file_iterator):
global sleep_inc
if file_iterator[-1] >= file_lines -1:
return
if len(read_stack) < stack_size -1:
with open(list_file) as f:
for i in range(1, file_lines+1):
file_iterator.append(i)
line = linecache.getline(list_file, i, module_globals=None).strip()
if len(line) > 0:
print("reading: {}".format(line))
read_stack.append(line)
await asyncio.sleep(sleep_inc)
if i == stack_size:
await asyncio.sleep(sleep_inc)
else:
await asyncio.sleep(sleep_inc)
async def get_lines(list_file):
with open(list_file) as f:
f.seek(0) #ensure you're at the start of the file..
first_char = f.read(1) #get the first character
if not first_char:
print("FAIL: the sub or dir files (or both) are empty") #first character is the empty string..
sys.exit()
else:
f.seek(0) #f
for i, l in enumerate(f):
await asyncio.sleep(sleep_inc)
pass
return i + 1
async def file_lines():
global sub_file
global dir_file
#global subfile_lines
#global dirfile_lines
if files_exist(sub_file, dir_file):
print("Reading files... ")
subfile_lines = files_read_loop.create_task(get_lines(sub_file))
dirfile_lines = files_read_loop.create_task(get_lines(dir_file))
await asyncio.wait([subfile_lines, dirfile_lines])
return (subfile_lines, dirfile_lines)
async def load_files():
global sub_file
global dir_file
global subfile_iterator
global dirfile_iterator
global subfile_readstack
global dirfile_readstack
(subfile_lines, dirfile_lines) = await file_lines()
read_from_sub_file = files_read_loop.create_task(read_from_file(sub_file, subfile_lines.result(), subfile_readstack, subfile_iterator))
read_from_dir_file = files_read_loop.create_task(read_from_file(dir_file, dirfile_lines.result(), dirfile_readstack, dirfile_iterator))
concat_sub_to_dir = files_read_loop.create_task(concat_addr(subfile_readstack, dirfile_readstack))
await asyncio.wait([read_from_sub_file, read_from_dir_file, concat_sub_to_dir])
async def write_log():
global results
print("write_log")
ret = files_write_loop.create_task(write_to_file(results))
'''
***************************************************************************************************************************************************************************
URL FNs
***************************************************************************************************************************************************************************
'''
async def concat_addr(subread, dirread):
global results_list
global domains_list
global sleep_inc
global subfile_readstack
global dirfile_readstack
global subfile_lines
global dirfile_lines
domains_list_size = len(domains_list)
if domains_list_size < stack_size -1:
for i, j in enumerate(subfile_readstack):
for j, k in enumerate(dirfile_readstack):
domains_list.insert(0, subfile_readstack[i] + dirfile_readstack[j])
print("adding: {subf}{dirf} to domains_list".format(subf=subfile_readstack[i], dirf=dirfile_readstack[j]))
await asyncio.sleep(sleep_inc)
else:
await asyncio.sleep(sleep_inc)
def fetch(session, url):
FQDM = "https://{domain}?".format(domain=url)
try:
fresh_agent = user_agents.swap()
custom_header = {'user-agent': fresh_agent}
with session.get(FQDM, headers=custom_header) as response:
status = response.status_code
url = response.url
print(f"=== {status} - {url}")
results.append(response)
return response
except:
print(f"Server at {url} not found")
finally:
pass
async def get(domains):
global results
with ThreadPoolExecutor(max_workers=50) as executor:
with requests.Session() as session:
loop = asyncio.get_event_loop()
print('''\n\n
------------------------
RESULTS
------------------------
\n
''')
for url in domains:
loop.run_in_executor( executor, fetch, *(session, url))
return True
async def iterate_domains():
global results
global domains_list
ret = domains_loop.create_task(get(domains_list))
'''
***************************************************************************************************************************************************************************
MAIN
***************************************************************************************************************************************************************************
'''
if __name__ == "__main__":
try:
#file_sema = asyncio.BoundedSemaphore(value=10)
files_read_loop = asyncio.get_event_loop()
files_read_loop.run_until_complete(load_files())
domains_loop = asyncio.get_event_loop()
domains_loop.set_debug(1)
domains_loop.run_until_complete(iterate_domains())
files_write_loop = asyncio.get_event_loop()
files_write_loop.run_until_complete(write_log())
except Exception as e:
print("****** EXCEPTION: {} ".format(e))
pass
finally:
files_read_loop.close()
domains_loop.close()
files_write_loop.close()
Solution is to add a timeout to each request, like so:
session.get(FQDM, headers=custom_header, timeout=X)
Full example:
def fetch(session, url):
FQDM = "https://{domain}?".format(domain=url)
try:
fresh_agent = user_agents.swap()
custom_header = {'user-agent': fresh_agent}
with session.get(FQDM, headers=custom_header, timeout=X) as response: <<<<<--------
status = response.status_code
url = response.url
print(f"=== {status} - {url}")
results.append(response)
return response
except:
print(f"Server at {url} not found")
finally:
pass

Python OS polling select

I am following the David Beazley Python Concurrency From the Ground Up: LIVE! - PyCon 2015 talk where I aim to understand basics of Python concurrency.
I would like to know why Python select is failing which is used for polling the operating system (OS) and check if there is some task that needs to be done.
from socket import *
from select import select
from collections import deque
tasks = deque()
recv_wait = {}
send_wait = {}
def fib(n):
if n <= 2:
return 1
else:
return fib(n-1)+fib(n-2)
def run():
while any([tasks, recv_wait, send_wait]):
while not tasks:
can_recv, can_send, _ = select(recv_wait, send_wait, [])
for s in can_recv:
tasks.append(recv_wait.pop(s))
for s in can_send:
tasks.append(send_wait.pop(s))
task = tasks.popleft()
try:
why, what = next(task)
if why == 'recv':
recv_wait[what] = task
elif why == 'send':
send_wait[what] = task
else:
raise RuntimeError("Arg!!")
except StopIteration:
print("task done")
def fib_server(address):
sock = socket(AF_INET, SOCK_STREAM)
sock.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)
sock.bind(address)
sock.listen(5)
while True:
yield 'recv', sock
client, addr = sock.accept()
print("Connection", addr)
tasks.append(fib_handler(client))
def fib_handler(client):
while True:
yield 'recv', client
req = client.recv(100)
if not req:
break
n = int(req)
result = fib(n)
result = fib(n)
resp = str(result).encode('ascii') + b'\n'
yield 'send',resp
client.send(resp)
print("Closed")
tasks.append(fib_server(('', 25000)))
run()
# Separate terminal window
nc localhost 25000
12
# Running Python server
➜ python3 -i aserver.py
Connection ('127.0.0.1', 61098)
Traceback (most recent call last):
File "aserver.py", line 63, in <module>
run()
File "aserver.py", line 20, in run
can_recv, can_send, _ = select(recv_wait, send_wait,[])
TypeError: argument must be an int, or have a fileno() method.
def fib_handler(client):
while True:
yield 'recv', client
req = client.recv(100)
if not req:
break
n = int(req)
result = fib(n)
result = fib(n) # duplicate
resp = str(result).encode('ascii')+ b'\n'
yield 'send',resp # should be yielding fd (client)
client.send(resp)
print("Closed")

Is there any class to return a print of a function?

I am developing a python program for reading ports. My script has a print for every open port checked. But I would like to remove this print and put it inside a class. For when the programmer wants to see print he calls the class.
I can create common classes to get user input from a main file (main.py) and run inside the script, but I can't control print from the main file
def ping(target,scale):
def portscan(port):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
con = s.connect((target,port))
time.sleep(5)
port_print = 'Port :',port,"Is Open!."
time.sleep(5)
#python = sys.executable
#os.execl(python, python, * sys.argv)
print('Terminated')
con.close()
except:
#result = None
#return result
pass
r = 1
scal = int(scale)
for x in range(1,scal):
t = threading.Thread(target=portscan,kwargs={'port':r})
r += 1
t.start()
As you can see I created the variable port_print, and I would like that when the user called in the main file, there would be the print.
Use a Queueto get around return limitations in threads:
from queue import Queue
def ping(target,scale, queue):
def portscan(port, queue):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
con = s.connect((target,port))
time.sleep(5)
port_print = 'Port :',port,"Is Open!."
queue.put(port_print)
time.sleep(5)
#python = sys.executable
#os.execl(python, python, * sys.argv)
print('Terminated')
con.close()
except:
#result = None
#return result
pass
r = 1
scal = int(scale)
for x in range(1,scal):
t = threading.Thread(target=portscan,kwargs={'port':r, queue=queue})
r += 1
t.start()
def main():
my_queue = Queue()
target = 'some target'
scale = 10
ping(target, scale, my_queue)
random_port_print = my_queue.get()
print(random_port_print)
Not tested but prly pretty close to correct.

Pass asynchronous websocket.send() to stdout/stderr wrapper class

I have a class function which unbuffers stdout and stderr, like so:
class Unbuffered:
def __init__(self, stream):
self.stream = stream
def write(self, data):
data = data.strip()
if data.startswith("INFO: "):
data = data[6:]
if '[' in data:
progress = re.compile(r"\[(\d+)/(\d+)\]")
data = progress.match(data)
total = data.group(2)
current = data.group(1)
data = '{0}/{1}'.format(current, total)
if data.startswith("ERROR: "):
data = data[7:]
self.stream.write(data + '\n')
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
The output is from a function run in ProcessPoolExecutor when inbound from websocket arrives.
I want the output printed in console as well as sent to my websocket client. I tried asyncing Unbuffered and passing websocket instance to it but no luck.
UPDATE: The essentials of run() and my websocket handler() look something like this:
def run(url, path):
logging.addLevelName(25, "INFO")
fmt = logging.Formatter('%(levelname)s: %(message)s')
#----
output.progress_stream = Unbuffered(sys.stderr)
stream = Unbuffered(sys.stdout)
#----
level = logging.INFO
hdlr = logging.StreamHandler(stream)
hdlr.setFormatter(fmt)
log.addHandler(hdlr)
log.setLevel(level)
get_media(url, opt)
async def handler(websocket, path):
while True:
inbound = json.loads(await websocket.recv())
if inbound is None:
break
url = inbound['url']
if 'path' in inbound:
path = inbound['path'].rstrip(os.path.sep) + os.path.sep
else:
path = os.path.expanduser("~") + os.path.sep
# blah more code
while inbound != None:
await asyncio.sleep(.001)
await loop.run_in_executor(None, run, url, path)
run(), handler() and Unbuffered are separate from each other.
Rewriting get_media() to use asyncio instead of running it in a different thread would be the best. Otherwise, there are some options to communicate between a regular thread and coroutines, for example, using a socketpair:
import asyncio
import socket
import threading
import time
import random
# threads stuff
def producer(n, writer):
for i in range(10):
# print("sending", i)
writer.send("message #{}.{}\n".format(n, i).encode())
time.sleep(random.uniform(0.1, 1))
def go(writer):
threads = [threading.Thread(target=producer, args=(i + 1, writer,))
for i in range(5)]
for t in threads:
t.start()
for t in threads:
t.join()
writer.send("bye\n".encode())
# asyncio coroutines
async def clock():
for i in range(11):
print("The time is", i)
await asyncio.sleep(1)
async def main(reader):
buffer = ""
while True:
buffer += (await loop.sock_recv(reader, 10000)).decode()
# print(len(buffer))
while "\n" in buffer:
msg, _nl, buffer = buffer.partition("\n")
print("Got", msg)
if msg == "bye":
return
reader, writer = socket.socketpair()
reader.setblocking(False)
threading.Thread(target=go, args=(writer,)).start()
# time.sleep(1.5) # socket is buffering
loop = asyncio.get_event_loop()
loop.run_until_complete(asyncio.wait([clock(), main(reader)]))
loop.close()
You can also try this 3rd-party thread+asyncio compatible queue: janus

2 threads each serving 2 different HTTP Server, will not stop after a given duration

So I have spent at least a couple of days on this problem.
I would like to have 2 threads HTTP server each serving two different IP:ports.
I open a FF and navigate to either say: http://196.64.131.250:8001/ or http://196.64.131.250:8002 and it should do a GET.
Also I like my threads or program itself stop after a given duration from command line say 5 sec.
I have done everything I could, even tried SIGAlarm and variable "keepRunning" which gets changed by a third thread after duration , but my program does Not stop. What am I doing wrong please.
note that I have commented the daemon: i.e (ZhttpThread[1-2].daemon = True)
if I dont comment it my threads stop right away. I want my HTTP server threads continue servicing, and if the duration DUR expires , then the program stops or threads stop.
import SimpleHTTPServer, SocketServer, logging, cgi, sys, signal, threading, time
import subprocess as sub
keepRunning = True
origTime = int(time.time())
class ServerHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def do_GET(self):
logging.warning("======= GET STARTED =======")
getHdr = self.headers
SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self)
getHdr = self.headers
print(', '.join((getHdr)))
#if ("accept-encoding" in getHdr):
if ("accept-encoding" in (', '.join((getHdr)))):
print ('Test Passed ---------------')
signal.alarm(1)
class threadWithTO(threading.Thread):
def __init__(self, thID, ip, port, timeout):
threading.Thread.__init__(self)
self.ip = ip
self.port = port
self.handler = ServerHandler
self.httpd = SocketServer.TCPServer((self.ip, self.port), self.handler)
def run(self):
print (self.httpd)
#self.httpd.serve_forever()
if (keepRunning == True):
self.httpd.handle_request()
else:
self._stop.set()
def Run(self):
self.start()
def timeHandler(signum, frame):
print('Times up', signum)
sys.exit()
def timeChkr( threadName, dur, t1, t2):
print (origTime)
print ('Begin Timer thread')
while True:
nowTime = int(time.time())
print (nowTime)
delta = nowTime - origTime
if (delta >= dur):
keepRunning = False
t1.stop()
t2.stop()
else:
time.sleep(1)
def main():
#signal.signal(signal.SIGALRM, timeHandler)
#signal.alarm(DUR)
origTime = int(time.time())
ZhttpThread1 = threadWithTO("thread1", I, PORT, DUR)
ZhttpThread2 = threadWithTO("thread2", I, (int(PORT)+1), DUR)
t = threading.Thread(target=timeChkr, args = ("thread3",DUR))
t.daemon = True
t.start()
#ZhttpThread1.daemon = True
#ZhttpThread2.daemon = True
ZhttpThread1.Run()
ZhttpThread2.Run()
ok i figured out the issue is with socket. My socket is open and even though I have tried socket.settimeout I still cant get the socket to close
Thanks to Andrew.. my son whom sparked the idea in my head... here is the solution..
class ServerHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def do_GET(self):
logging.warning("======= GET STARTED =======")
logging.warning(self.headers)
SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self)
def do_POST(self):
logging.warning("======= POST STARTED =======")
logging.warning(self.headers)
form = cgi.FieldStorage(
fp=self.rfile,
headers=self.headers,
environ={'REQUEST_METHOD':'POST',
'CONTENT_TYPE':self.headers['Content-Type'],
})
logging.warning("======= POST VALUES =======")
print form.list
'''
for item in form.list:
logging.warning(item) '''
logging.warning("\n")
SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self)
class StoppableHTTPServer(BaseHTTPServer.HTTPServer):
def server_bind(self):
BaseHTTPServer.HTTPServer.server_bind(self)
self.socket.settimeout(1)
self.run = True
def get_request(self):
while self.run:
try:
sock, addr = self.socket.accept()
sock.settimeout(None)
return (sock, addr)
except socket.timeout:
pass
def stop(self):
self.run = False
def serve(self):
while self.run:
#print "1"
self.handle_request()
if __name__=="__main__":
if len(sys.argv) < 1:
I = ""
PORT = 8000
DUR = 10
else:
I = sys.argv[1]
PORT = int(sys.argv[2])
DUR = int(sys.argv[3])
#httpd1 = StoppableHTTPServer((I,PORT), SimpleHTTPServer.SimpleHTTPRequestHandler)
#httpd2 = StoppableHTTPServer((I,(int(PORT)+1)), SimpleHTTPServer.SimpleHTTPRequestHandler)
httpd1 = StoppableHTTPServer((I,PORT), ServerHandler)
httpd2 = StoppableHTTPServer((I,(int(PORT)+1)), ServerHandler)
thread.start_new_thread(httpd1.serve, ())
thread.start_new_thread(httpd2.serve, ())
#raw_input("Press <RETURN> to stop server\n")`enter code here`
print '0'
time.sleep(DUR)
print 'Times up Dude'
httpd1.stop()
httpd2.stop()

Resources