How to make a code for proper threading usage - multithreading

This is what I am doing right now.
def formate(newurl1, schemess):
newurl = newurl1.replace('http://', '')
portss = 21, 22, 80, 8080, 443, 8443, 3306, 445
th1 = threading.Thread(target=connCheck, args= (newurl, schemess, 21,))
th2 = threading.Thread(target=connCheck, args=( newurl, schemess, 22,))
th3 = threading.Thread(target=connCheck, args= (newurl, schemess,80,))
th4 = threading.Thread(target=connCheck, args=(newurl, schemess,8080,))
th5 = threading.Thread(target=connCheck, args= (newurl, schemess,443,))
th6 = threading.Thread(target=connCheck, args= (newurl, schemess,8443,))
th7 = threading.Thread(target=connCheck, args= (newurl, schemess,3306,))
th8 = threading.Thread(target=connCheck, args= (newurl, schemess,445,))
print "\n Host:- ", newurl1, "\n"
print "\t[+] List of open ports:-"
th1.start()
th2.start()
th3.start()
th4.start()
th5.start()
th6.start()
th7.start()
th8.start()
th8.join()
for x in urls_returning200:
formate(x, '')
NOTE:- connCheck is my another function for creating sockets. so, that does not need to define here.
Now all the work is going good, what I want to know that, Is there any alternative for that? Means, this code is working perfectly for me what its quite lengthy which is really not good. So Is there anyone who can suggest me something?
What I had tried.
threadingss = []
def formate(newurl1, schemess):
portss = 21, 22, 80, 8080, 443, 8443, 3306, 445
for x in portss:
threadin = threading.Thread(target=connCheck, args(newurl, schemess, x))
threadingss.append(threadin)
for x in threadingss:
x.start()
for x in threadingss:
x.join()
for x in urls_returning200:
formate(x, '')
But that cause error so that also doesn't work for me?
So please suggest me something.

Related

How to find _ transferSize in har file exported using browsermob-proxy in python

I am trying to export .har file using firefox-selenium-browsermob-proxy-python. Using the below code.
bmp_loc = "/Users/project/browsermob-proxy-2.1.4/bin/browsermob-proxy"
server = Server(bmp_loc)
server.start()
proxy = server.create_proxy(params={'trustAllServers': 'true'})
selenium_proxy = proxy.selenium_proxy()
caps = webdriver.DesiredCapabilities.FIREFOX
caps['marionette'] = False
proxy_settings = {
"proxyType": "MANUAL",
"httpProxy": selenium_proxy.httpProxy,
"sslProxy": selenium_proxy.sslProxy,
}
caps['proxy'] = proxy_settings
driver = webdriver.Firefox(desired_capabilities=caps)
proxy.new_har("generated_har",options={'captureHeaders': True})
driver.get("someurl")
browser_logs = proxy.har
I am interested to get _transferSize in the .har file to perform some analysis but unable to get that, instead I am getting that as 'comment':
"redirectURL": "", "headersSize": 1023, "bodySize": 38, "comment": ""
whereas manually downloading the .har file using firefox I am getting _transferSize
Version used:
browsermob_proxy==2.1.4
selenium==4.0.0
Can anybody please help me to resolve this?
You can get _transferSize by adding headersSize and bodySize from the har file itself.
urls = ["https://google.com"]
for ur in urls:
server = proxy.start_server()
client = proxy.start_client()
client.new_har("demo.com")
# print(client.proxy)
options = webdriver.ChromeOptions()
options.add_argument("--disk-cache-size=0")
options = {
'enable_har': True
}
driver = webdriver.Chrome(seleniumwire_options=options)
driver.request_interceptor = proxy.interceptor
driver.get(ur)
time.sleep(40)
row_list = []
json_dictionary = json.loads(driver.har)
repeat_url_list = []
repeat_urls = defaultdict(lambda:[])
resp_size = 0
count_url = 0
url_time = 0
status_list = []
status_url = defaultdict(lambda:[])
a_list = []
with open("network_log2.har", "w", encoding="utf-8") as f:
# f.write(json.dumps(driver.har))
for i in json_dictionary['log']['entries']:
f.write(str(i))
f.write("\n")
url = i['request']['url']
a_list.append(url)
timing = i['time']
if timing>2000:
timing = round(timing/2000,1)
url_time += 1
status = i['response']['status']
if status in status_list:
status_url[status] = status_url[status] + 1
else:
status_url[status] = 1
status_list.append(status)
size = i['response']['headersSize'] + i['response']['bodySize']
if size//1000 > 500:
resp_size += 1
if url in repeat_url_list:
repeat_urls[url] = 1
else:
repeat_url_list.append(url)
rurl_count = len(repeat_urls)

How to fix error after creating exe with Pyinsaller?

After creating the file exe, the following error is displayed.
This problem may be related to the TCP/IP protocol.
I don't quite understand what the mistake is.
Traceback (most recent call last):
File "list_queue.py", line 56, in <module>
File "list_queue.py", line 17, in lenth_queue
File "pymqi\__init__.py", line 3024, in connect
File "pymqi\__init__.py", line 1649, in connect_tcp_client
File "pymqi\__init__.py", line 1624, in connect_with_options
pymqi.MQMIError: MQI Error. Comp: 2, Reason 2012: FAILED: MQRC_ENVIRONMENT_ERROR
Although everything works in PyCharm, all the data I enter works and the script works fine.
MyCode:
def lenth_queue():
dict_queue = collections.defaultdict(dict)
queue_manager = input('Enter the name of the queue manager: ')
channel = input('Enter the name of the communication channel: ')
host = input('Enter a name for the IP address of the queue manager: ')
port = input('Enter the name of the queue manager port: ')
conn_info = '%s(%s)' % (host, port)
queue_type = pymqi.CMQC.MQQT_LOCAL
qmgr = pymqi.connect(queue_manager, channel, conn_info)
c = 0
try:
prefix = '*'
pcf = pymqi.PCFExecute(qmgr,response_wait_interval=600000)
attrs = [] # typeList[pymqi.MQOpts]
attrs.append(pymqi.CFST(Parameter=pymqi.CMQC.MQCA_Q_NAME,
String=pymqi.ensure_bytes(prefix)))
attrs.append(pymqi.CFIN(Parameter=pymqi.CMQC.MQIA_Q_TYPE,
Value=queue_type))
attrs.append(pymqi.CFIL(Parameter=pymqi.CMQCFC.MQIACF_Q_ATTRS,
Values=[pymqi.CMQC.MQIA_CURRENT_Q_DEPTH]))
object_filters = []
object_filters.append(
pymqi.CFIF(Parameter=pymqi.CMQC.MQIA_CURRENT_Q_DEPTH,
Operator=pymqi.CMQCFC.MQCFOP_GREATER,
FilterValue=0))
response = pcf.MQCMD_INQUIRE_Q(attrs, object_filters)
for queue_info in response:
queue_name = queue_info[pymqi.CMQC.MQCA_Q_NAME]
queue_depth = queue_info[pymqi.CMQC.MQIA_CURRENT_Q_DEPTH]
dict_queue[queue_name.strip().decode()] = queue_depth
c += 1
writer_queue('Queue_lenth',dict_queue)
return 'File written successfully'
except pymqi.MQMIError as e:
return 'Failed to connect'
def writer_queue(name,dict_q):
txt = io.open(name + ".txt", "w", encoding="utf-8")
for key in dict_q:
txt.write('{}: {} message(s)'.format(key, dict_q[key]) + '\n')
txt.close()
print(lenth_queue())
input('Press ENTER to exit')

Create Dictionary with uneven lens

This may sound a bit strange or silly but I am trying to create a dictionary or lists that can be referenced. Maybe if you look at the attached pic of my Excel would give you a better understanding.
I want the values of each row to be into a dictionary with say the key as 0 and different values as under Hostname, IP, GroupName and Port. the dictionary works with just the Hostname and IP as their length is same, but when i try to add the GroupName to the dict by using a lot of methods i found on Stackoverflow, it does not work as the length is not the same
enter image description here
Any help would be appreciated
Here is my code.
df = pd.read_excel("object.xlsx")
HostList =[]
IPList = []
for x in ExcelHostList:
for hostname in x:
if hostname not in HostList:
HostList.append(hostname)
for ips in ExcelIPList:
for ipadd in ips:
if ipadd not in IPList:
IPList.append(ipadd)
dict1 = dict(zip(HostList, IPList))
dict1
{'test1': '1.1.1.1', 'test2': '2.2.2.2', 'test3': '3.3.3.3', 'test4': '4.4.4.4', 'test5': '5.5.5.5', 'test6': '6.6.6.6'}
I have tried with making it a dict and then trying to combine them
ExcelHostList = (df["Hostname"].str.split("\n").to_dict())
ExcelIPList = (df["IP"].str.split("\n").to_dict())
ExcelGroupName = (df["GroupName"].to_dict())
dict2 = {z[0]: list(z[1:]) for z in zip(HostList, IPList, ExcelGroupName)}
dict2
{'test1': ['1.1.1.1', 'test-group-1'], 'test2': ['2.2.2.2', 'test-group-2'], 'test3': ['3.3.3.3', 'test-group-3']}
It's going to be very difficult to provide you with a good answer without more context about what exactly your end goal is for this data, but here are two ways you can consider structuring your data:
A list of dicts:
list_of_dicts = [
dict(
Hostname = ['test1', 'test2', 'test3',],
IP = ['1.1.1.1', '2.2.2.2', '3.3.3.3',],
GroupName = 'test-group-1',
Port = [443, 22, 808, 80, 161],
),
dict(
Hostname = ['test4',],
IP = ['4.4.4.4',],
GroupName = 'test-group-2',
Port = [443, 8080],
),
dict(
Hostname = ['test5', 'test6',],
IP = ['5.5.5.5', '6.6.6.6',],
GroupName = 'test-group-3',
Port = [443],
),
]
print(list_of_dicts)
print(list_of_dicts[0])
print(list_of_dicts[0]["Hostname"])
print(list_of_dicts[0]["Hostname"][0])
A dict of dicts using the GroupNames as keys:
dict_of_dicts = {
'test-group-1' : dict(
Hostname = ['test1', 'test2', 'test3',],
IP = ['1.1.1.1', '2.2.2.2', '3.3.3.3',],
Port = [443, 22, 808, 80, 161],
),
'test-group-2' : dict(
Hostname = ['test4',],
IP = ['4.4.4.4',],
Port = [443, 8080],
),
'test-group-3' : dict(
Hostname = ['test5', 'test6',],
IP = ['5.5.5.5', '6.6.6.6',],
Port = [443],
),
}
print(dict_of_dicts)
print(dict_of_dicts['test-group-1'])
print(dict_of_dicts['test-group-1']["Hostname"])
print(dict_of_dicts['test-group-1']["Hostname"][0])
examples in python tutor
Also if you already have this data in excel then I would highly recommend looking into using pandas to read this data into a DataFrame.

Python 3: Multiple mails being sent using a 'for loop' instead of single mail

I have the below script which is perfectly working, but when I read the status of the Cisco devices using a for loop and send mail using the user defined function send_mail(), I am receiving two mails instead of one mail, any suggestions to get a single mail, will be greatly appreciated:
username = 'cisco'
password = 'cisco'
hosts = ['10.0.10.100','10.0.10.101']
platform = 'cisco_ios'
def send_mail():
fromaddr = "#gmail.com"
toaddr = "#hotmail.com"
msg = MIMEMultipart()
msg['From'] = '#gmail.com'
msg['To'] = '#hotmail.com'
msg['Subject'] = "This is Health check"
msg.attach(MIMEText(status, 'plain'))
server = smtplib.SMTP('smtp.gmail.com', 587)
server.starttls()
server.login(fromaddr, "password")
text = msg.as_string()
server.sendmail(fromaddr, toaddr, text)
server.quit()
for host in hosts:
#print(host)
connect = ConnectHandler(device_type=platform, ip=host, username=username, password=password)
output = connect.send_command('terminal length 0', expect_string=r'#')
output = connect.send_command('enable',expect_string=r'#')
host_name = connect.send_command('show run | in hostname',expect_string=r'#')
interface_status = connect.send_command(f'show ip int brief',expect_string=r'#')
#print(interface_status)
old_stdout = sys.stdout
interface_result = StringIO()
sys.stdout = interface_result
sys.stdout = old_stdout
data = pd.read_fwf(StringIO(interface_status), widths=[27, 16, 3, 7, 23, 8])
status = " "
for index, row in data.iterrows():
if row[4] == 'administratively down' or row[4] == 'down':
log = (f"\nInterface {row[0]} is down in {host_name}\n")
status += log
bgp_status = connect.send_command('show ip bgp summary | be N',expect_string=r'#')
old_stdout = sys.stdout
bgp_result = StringIO()
sys.stdout = bgp_result
sys.stdout = old_stdout
bgp_data = pd.read_fwf(StringIO(bgp_status), delim_whitespace=True, header=None)
for index, row in bgp_data.iterrows():
if row[9] == 'Down' or row[9] == 'Idle' or row[9] == 'Active':
bgp = (f"\nNeighbor {row[0]} is down in {host_name}\n")
status += bgp
send_mail()
I think I understand what your issue is. In short, you are calling send_mail() within each for loop, whereas you actually want to collect statuses for each host, and then send a final status report as one email. This is a short example to illustrate how you might do this - obviously you will have to adapt your code accordingly.
A couple of points:
Rather than having send_mail rely on a global status variable, pass in a status argument. This helps decouple functions from each other.
You might want to collect statuses for each host in a list, and then combine them in a logical manner when sending your status email.
String concatenation is slow. Better to build a list of strings, and join them using the string join function.
Here is some example code:
hosts = ['10.0.10.100','10.0.10.101']
def send_mail(status=''):
'''A placeholder function for testing'''
print("Sending email:\n{status}".format(status=status))
return
status_for_hosts = []
for host in hosts:
status_list = []
# An example of how you might collect statuses for a host
status_list.append("Host {host_ip} is up".format(host_ip=host))
# Use join() instead of string concatenation
full_status = ''.join(status_list)
status_for_hosts.append(full_status)
send_mail(status='\n'.join(status_for_hosts))

basic client server: Passing strings properly

I am making a simple client/server that will pass strings back and forth. It is only slightly more advanced than an echo server. However, I am having some issue with passing the strings. I first ran into an issue where I was using the incorrect data type (not converting to utf-8), but am still having an issue.
Here is my server code:
import socket
import re
host = ''
port = 15008
backlog = 5
size = 1024
QUIT = bytes("QUIT" , "utf-8")
BYE = bytes("BYE" , "utf-8")
MATCH = bytes("MATCH" , "utf-8")
NO_MATCH = bytes("NO MATCH", "utf-8")
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((host,port))
s.listen(backlog)
while 1:
client, address = s.accept()
regex = client.recv(size)
if regex == QUIT:
client.send(BYE)
client.close()
break
string = client.recv(size)
if string == QUIT:
client.send(BYE)
client.close()
break
if re.match(regex, string):
client.send(MATCH)
else:
client.send(NO_MATCH)
and the client code:
import socket
host = 'localhost'
port = 15008
size = 1024
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host,port))
s.send(bytes('[ws]', "utf-8"))
s.send(bytes('s', "utf-8"))
data = s.recv(size)
print('Should match: ' + data)
s.close()
Right now both the server and client just get hung-up.
Needed to use decode.
data = s.recv(size).decode("utf-8")

Resources