Modifying python script to work with version 3.7.6 - python-3.x

I currently host the English Amiga Board (EAB) FTP, a FTP server filled with the classic Amiga computer goodies. It is open to everyone but users can also register their own account. I got help in creating a python script that checks that the username being registered at the FTP is a valid account on the EAB forums and that it has at least 50 posts.
However, I've now updated the server (fedora server) and the Python version was updated to 3.7.6. The script have now stopped working and I'm unable to reach the original author.
# python ./eab_post_count.py -u Turran
Traceback (most recent call last):
File "./eab_post_count.py", line 3, in <module>
import sys, re, urllib2
ModuleNotFoundError: No module named 'urllib2'
This script should return "0" if the user exists and have 50 posts.
While I script in some languages, python is not one of them so I would be grateful for any help to modify it to not use urllib2, which I understand is no longer valid for Python 3.
The script:
#!/usr/bin/env python
import sys, re, urllib2
titlecmd = "eab_post_count.py"
version = "1.15"
ignorecase = 0
lastpost = 0
userfound = False
if len(sys.argv) == 1: sys.argv[1:] = ["-h"]
def parseurl(url):
if not url:
print("Empty URL!")
sys.exit(3)
headers = { 'User-Agent' : 'Mozilla/5.0' }
request = urllib2.Request(url, None, headers)
try:
response = urllib2.urlopen(request)
except Exception:
print('URL open failed, EAB down?')
sys.exit(2)
content = response.read()
return content
def pagesearch(content, trigger, start, end):
sane = 0
needlestack = []
while sane == 0:
curpos = content.find(trigger)
if curpos >= 0:
testlen = len(content)
content = content[curpos:testlen]
curpos = content.find('"')
testlen = len(content)
content = content[curpos+1:testlen]
curpos = content.find(end)
needle = content[0:curpos]
result = content[len(start):curpos]
if needle.startswith(start):
needlestack.append(result)
else:
sane = 1
return needlestack
def unescape(s):
s = s.replace("<", "<")
s = s.replace(">", ">")
# this has to be last:
s = s.replace("&", "&")
return s
for idx, arg in enumerate(sys.argv):
if arg == '-h':
print(titlecmd + ' v' + version +' by modrobert in 2017')
print('Function: Returns the number of posts for a given EAB forum user.')
print('Syntax : ' + titlecmd + ' -u <username> [-i] [-l YYYY-MM-DD]')
print('Options : -h this help text.')
print(' : -i ignore case sensivity in user name.')
print(' -l last post after YYYY-MM-DD required.')
print(' -u followed by user name.')
print('Result : 0 = user found, 1 = user not found, 2 = EAB down, 3 = other fail.')
sys.exit(3)
if arg == '-u':
try:
username = sys.argv[idx+1]
except IndexError:
print('Missing username.')
sys.exit(3)
usernameurl = re.sub('[ ]', '%20', username)
if arg == '-i':
ignorecase = 1
if arg == '-l':
lastpost = 1
try:
lpdate = sys.argv[idx+1]
except IndexError:
print('Missing date.')
sys.exit(3)
try:
username
except NameError:
print('Username -u option required.')
sys.exit(3)
if lastpost:
eaburl = "http://eab.abime.net/memberlist.php?do=getall&pp=100&lastpostafter=" + lpdate + "&ausername=" + usernameurl
else:
eaburl = "http://eab.abime.net/memberlist.php?do=getall&pp=100&ausername=" + usernameurl
eabcontent = parseurl(eaburl)
countlist = pagesearch(eabcontent, 'td class', 'alt2">', '</td>')
userlist = pagesearch(eabcontent, 'member.php?', '>', '</a>')
for idx, item in enumerate(userlist):
# lets strip those fancy moderators and admins
userstr = re.sub('<[^<]+?>', '', item)
if ignorecase:
if unescape(str.lower(userstr)) == str.lower(username):
userfound = True;
break
else:
if unescape(str(userstr)) == username:
userfound = True;
break
if userfound == False:
print("User not found: " + username)
sys.exit(1)
usercount = idx
for idx, item in enumerate(countlist):
# hairy stuff below ;)
if idx < (3 * usercount):
continue
stripitem = re.sub('[,]', '', item)
try:
print(int(stripitem))
sys.exit(0)
except Exception:
continue
Thanks in advance!

Related

NoneType object is not iterable python cisco meraki

Tried to do a script that will pull all the clients on my meraki organization. An error occurred I think the file that I want to iterate cant be read by my code.
Thanks,
I tried changing the shard to specific tried installing modules that i think required but still same error
import sys, getopt, requests, json, time, datetime, os, sqlite3
#SECTION: GLOBAL VARIABLES: MODIFY TO CHANGE SCRIPT BEHAVIOUR
API_EXEC_DELAY = 0.21 #Used in merakirequestthrottler() to avoid hitting dashboard API max request rate
#connect and read timeouts for the Requests module in seconds
REQUESTS_CONNECT_TIMEOUT = 90
REQUESTS_READ_TIMEOUT = 90
#SECTION: GLOBAL VARIABLES AND CLASSES: DO NOT MODIFY
LAST_MERAKI_REQUEST = datetime.datetime.now() #used by merakirequestthrottler()
ARG_APIKEY = '' #DO NOT STATICALLY SET YOUR API KEY HERE
ARG_ORGNAME = '' #DO NOT STATICALLY SET YOUR ORGANIZATION NAME HERE
ORG_LIST = None #list of organizations, networks and MRs the used API key has access to
DEVICE_DB = None #SQLite3 database of all network devices
MAX_CLIENT_TIMESPAN = 2592000 #maximum timespan GET clients Dashboard API call supports
class c_Net:
def __init__(self):
id = ''
name = ''
shard = 'n132.meraki.com'
devices = []
class c_Organization:
def __init__(self):
id = ''
name = ''
shard = 'n132.meraki.com'
nets = []
#SECTION: General use functions
def merakirequestthrottler():
#makes sure there is enough time between API requests to Dashboard not to hit shaper
global LAST_MERAKI_REQUEST
if (datetime.datetime.now()-LAST_MERAKI_REQUEST).total_seconds() < (API_EXEC_DELAY):
time.sleep(API_EXEC_DELAY)
LAST_MERAKI_REQUEST = datetime.datetime.now()
return
def printhelp():
print(readMe)
#SECTION: Meraki Dashboard API communication functions
def getInventory(p_org):
#returns a list of all networks in an organization
merakirequestthrottler()
try:
r = requests.get('https://%s/api/v0/organizations/%s/inventory' % (p_org.shard, p_org.id), headers={'X-Cisco-Meraki-API-Key': ARG_APIKEY, 'Content-Type': 'application/json'}, timeout=(REQUESTS_CONNECT_TIMEOUT, REQUESTS_READ_TIMEOUT) )
except:
print('ERROR 01: Unable to contact Meraki cloud')
return(None)
if r.status_code != requests.codes.ok:
return(None)
return(r.json())
def getNetworks(p_org):
#returns a list of all networks in an organization
merakirequestthrottler()
try:
r = requests.get('https://%s/api/v0/organizations/%s/networks' % (p_org.shard, p_org.id), headers={'X-Cisco-Meraki-API-Key': ARG_APIKEY, 'Content-Type': 'application/json'}, timeout=(REQUESTS_CONNECT_TIMEOUT, REQUESTS_READ_TIMEOUT) )
except:
print('ERROR 07: Unable to contact Meraki cloud')
return(None)
if r.status_code != requests.codes.ok:
return(None)
return(r.json())
def getOrgs():
#returns the organizations' list for a specified admin, with filters applied
merakirequestthrottler()
try:
r = requests.get('https://n132.meraki.com/api/v0/organizations', headers={'X-Cisco-Meraki-API-Key': ARG_APIKEY, 'Content-Type': 'application/json'}, timeout=(REQUESTS_CONNECT_TIMEOUT, REQUESTS_READ_TIMEOUT) )
except:
print('ERROR 02: Unable to contact Meraki cloud')
return(None)
if r.status_code != requests.codes.ok:
return(None)
rjson = r.json()
orglist = []
listlen = -1
if ARG_ORGNAME.lower() == '/all':
for org in rjson:
orglist.append(c_Organization())
listlen += 1
orglist[listlen].id = org['id']
orglist[listlen].name = org['name']
else:
for org in rjson:
if org['name'] == ARG_ORGNAME:
orglist.append(c_Organization())
listlen += 1
orglist[listlen].id = org['id']
orglist[listlen].name = org['name']
return(orglist)
def getShardHost(p_org):
#Looks up shard URL for a specific org. Use this URL instead of 'api.meraki.com'
# when making API calls with API accounts that can access multiple orgs.
#On failure returns None
merakirequestthrottler()
try:
r = requests.get('https://n132.meraki.com/api/v0/organizations/%s/snmp' % p_org.id, headers={'X-Cisco-Meraki-API-Key': ARG_APIKEY, 'Content-Type': 'application/json'}, timeout=(REQUESTS_CONNECT_TIMEOUT, REQUESTS_READ_TIMEOUT) )
except:
print('ERROR 03: Unable to contact Meraki cloud')
return None
if r.status_code != requests.codes.ok:
return None
rjson = r.json()
return(rjson['hostname'])
def refreshOrgList():
global ORG_LIST
global DEVICE_DB
print('INFO: Starting org list refresh at %s...' % datetime.datetime.now())
flag_firstorg = True
orglist = getOrgs()
if not orglist is None:
for org in orglist:
print('INFO: Processing org "%s"' % org.name)
org.shard = 'n132.meraki.com'
orgshard = getShardHost(org)
if not orgshard is None:
org.shard = orgshard
netlist = getNetworks(org)
devlist = getInventory(org)
if not devlist is None and not netlist is None:
DEVICE_DB = sqlite3.connect(':memory:')
dbcursor = DEVICE_DB.cursor()
dbcursor.execute('''CREATE TABLE devices (serial text, name text, networkId text, mac text, type text, model text)''')
dbcursor.execute('''CREATE TABLE ouis (oui text)''')
DEVICE_DB.commit()
for device in devlist:
if not device['networkId'] is None:
devType = 'merakiDevice'
if device['model'][:2] in ['MR','MS','MX','Z1','Z3']:
devType = 'merakiNetworkDevice'
dbcursor.execute('''INSERT INTO devices VALUES (?,?,?,?,?,?)''', (device['serial'],device['name'],device['networkId'],device['mac'],devType,device['model']))
dbcursor.execute('''INSERT INTO ouis VALUES (?)''', (device['mac'][:8],))
DEVICE_DB.commit()
flag_firstnet = True
for net in netlist:
if net['type'] != 'systems manager': #ignore systems manager nets
dbcursor.execute('''SELECT serial, name, model FROM devices WHERE networkId = ? AND type = ?''', (net['id'],'merakiNetworkDevice'))
devicesofnet = dbcursor.fetchall()
if len(devicesofnet) > 0: #network has MR, MS, MX, Zx
if flag_firstnet:
if flag_firstorg:
ORG_LIST = []
lastorg = -1
flag_firstorg = False
ORG_LIST.append(org)
lastorg += 1
lastnet = -1
ORG_LIST[lastorg].nets = []
flag_firstnet = False
ORG_LIST[lastorg].nets.append(c_Net())
lastnet += 1
ORG_LIST[lastorg].nets[lastnet].id = net['id']
ORG_LIST[lastorg].nets[lastnet].name = net['name']
ORG_LIST[lastorg].nets[lastnet].shard = org.shard
ORG_LIST[lastorg].nets[lastnet].devices = []
for device in devicesofnet:
ORG_LIST[lastorg].nets[lastnet].devices.append(device)
LAST_ORGLIST_REFRESH = datetime.datetime.now()
print('INFO: Refresh complete at %s' % LAST_ORGLIST_REFRESH)
return None
def getclientlist(p_shardhost, p_serial, p_timespan):
merakirequestthrottler()
try:
r = requests.get('https://%s/api/v0/devices/%s/clients?timespan=%s' % (p_shardhost, p_serial, p_timespan), headers={'X-Cisco-Meraki-API-Key': ARG_APIKEY, 'Content-Type': 'application/json'}, timeout=(REQUESTS_CONNECT_TIMEOUT, REQUESTS_READ_TIMEOUT) )
except:
print('ERROR 04: Unable to contact Meraki cloud')
return(None)
if r.status_code != requests.codes.ok:
return(None)
return(r.json())
#SECTION: main
def main(argv):
global ARG_APIKEY
global ARG_ORGNAME
#initialize command line arguments
ARG_APIKEY = ''
ARG_ORGNAME = ''
arg_numresults = ''
arg_mode = ''
arg_filter = ''
#get command line arguments
try:
opts, args = getopt.getopt(argv, 'hk:o:m:')
except getopt.GetoptError:
printhelp()
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
printhelp()
sys.exit()
elif opt == '-k':
ARG_APIKEY = arg
elif opt == '-o':
ARG_ORGNAME = arg
elif opt == '-m':
arg_mode = arg
#check that all mandatory arguments have been given
if ARG_APIKEY == '':
printhelp()
sys.exit(2)
#set defaults for empty command line arguments
if ARG_ORGNAME == '':
ARG_ORGNAME = '/all'
refreshOrgList()
if ORG_LIST is None or DEVICE_DB is None:
print('ERROR 05: No organizations with network devices access points for the specified API key')
sys.exit(2)
DEVcursor = DEVICE_DB.cursor()
for org in ORG_LIST:
flag_firstNet = True
orgClientList = []
reportFileName = 'clients_' + org.name + '_' + str(datetime.datetime.now()).replace(':','.') + '.csv'
print ('INFO: Processing org "%s"' % org.name)
for net in org.nets:
print ('INFO: Processing net "%s"' % net.name)
for dev in net.devices:
clients = getclientlist(org.shard, dev[0], MAX_CLIENT_TIMESPAN)
for client in clients:
DEVcursor.execute('''SELECT oui FROM ouis WHERE oui = ?''', (client['mac'][:8],))
matchingMerakiOuis = DEVcursor.fetchall()
if len(matchingMerakiOuis) == 0: #client device is not, in fact, a Meraki device neighbour
if flag_firstNet:
flag_firstNet = False
print('INFO: Creating file "' + reportFileName + '"')
try:
f = open(reportFileName, 'w')
f.write('id,mac,description,mdnsName,dhcpHostname,ip,vlan,switchport,usageKBSentToClient,usageKBRecvFromClient,networkId,networkName,reportedByDevSerial,reportedByDevName,reportedByDevModel\n')
except:
print('ERROR 06: Unable to open file "' + reportFileName + '" for writing')
sys.exit(2)
try:
f.write(str(client['id']) + ',' +
str(client['mac']) + ',' +
str(client['description']) + ',' +
str(client['mdnsName']) + ',' +
str(client['dhcpHostname']) + ',' +
str(client['ip']) + ',' +
str(client['vlan']) + ',' +
str(client['switchport']) + ',' +
str(int(client['usage']['sent'])) + ',' +
str(int(client['usage']['recv'])) + ',' +
str(net.id) + ',' +
str(net.name) + ',' +
str(dev[0]) + ',' +
str(dev[1]) + ',' +
str(dev[2]) + '\n' )
except:
print('ERROR 08: Unable to write to file "' + reportFileName + '"')
sys.exit(2)
DEVICE_DB.close()
try:
f.close()
except:
print ('INFO: Unable to close file (not open?)')
if __name__ == '__main__':
main(sys.argv[1:])
Traceback error:
Traceback (most recent call last):
File "orgsclientcsv.py", line 351, in <module>
main(sys.argv[1:])
File "orgsclientcsv.py", line 308, in main
for client in clients:
TypeError: 'NoneType' object is not iterable
The result should be an csv file that will give me list of phones mac address serial number manufacturer but the actual output is only phones mac address like Iphone

MaybeEncodingError: Error sending result: '<multiprocessing.pool.ExceptionWithTraceback object at 0x0000018F09F334A8>'

I am getting the below error when I am downloading files using multiprocessing. I am downloading Wikipedia page views and they have it by hour so it might include a lot of downloading.
Any recommendation to why this error is caused and HOW TO SOLVE IT? Thanks
MaybeEncodingError: Error sending result:
''. Reason: 'TypeError("cannot serialize
'_io.BufferedReader' object",)'
import fnmatch
import requests
import urllib.request
from bs4 import BeautifulSoup
import multiprocessing as mp
def download_it(download_file):
global path_to_save_document
filename = download_file[download_file.rfind("/")+1:]
save_file_w_submission_path = path_to_save_document + filename
request = urllib.request.Request(download_file)
response = urllib.request.urlopen(request)
data_content = response.read()
with open(save_file_w_submission_path, 'wb') as wf:
wf.write(data_content)
print(save_file_w_submission_path)
pattern = r'*200801*'
url_to_download = r'https://dumps.wikimedia.org/other/pagecounts-raw/'
path_to_save_document = r'D:\Users\Jonathan\Desktop\Wikipedia\\'
def main():
global pattern
global url_to_download
r = requests.get(url_to_download)
data = r.text
soup = BeautifulSoup(data,features="lxml")
list_of_href_year = []
for i in range(2):
if i == 0:
for link in soup.find_all('a'):
lien = link.get('href')
if len(lien) == 4:
list_of_href_year.append(url_to_download + lien + '/')
elif i == 1:
list_of_href_months = []
list_of_href_pageviews = []
for loh in list_of_href_year:
r = requests.get(loh)
data = r.text
soup = BeautifulSoup(data,features="lxml")
for link in soup.find_all('a'):
lien = link.get('href')
if len(lien) == 7:
list_of_href_months.append(loh + lien + '/')
if not list_of_href_months:
continue
for lohp in list_of_href_months:
r = requests.get(lohp)
data = r.text
soup = BeautifulSoup(data,features="lxml")
for link in soup.find_all('a'):
lien = link.get('href')
if "pagecounts" in lien:
list_of_href_pageviews.append(lohp + lien)
matching_list_of_href = fnmatch.filter(list_of_href_pageviews, pattern)
matching_list_of_href.sort()
with mp.Pool(mp.cpu_count()) as p:
print(p.map(download_it, matching_list_of_href))
if __name__ == '__main__':
main()
As Darkonaut proposed. I used multithreading instead.
Example:
from multiprocessing.dummy import Pool as ThreadPool
'''This function is used for the download the files using multi threading'''
def multithread_download_files_func(self,download_file):
try:
filename = download_file[download_file.rfind("/")+1:]
save_file_w_submission_path = self.ptsf + filename
'''Check if the download doesn't already exists. If not, proceed otherwise skip'''
if not os.path.exists(save_file_w_submission_path):
data_content = None
try:
'''Lets download the file'''
request = urllib.request.Request(download_file)
response = urllib.request.urlopen(request)
data_content = response.read()
except urllib.error.HTTPError:
'''We will do a retry on the download if the server is temporarily unavailable'''
retries = 1
success = False
while not success:
try:
'''Make another request if the previous one failed'''
response = urllib.request.urlopen(download_file)
data_content = response.read()
success = True
except Exception:
'''We will make the program wait a bit before sending another request to download the file'''
wait = retries * 5;
time.sleep(wait)
retries += 1
except Exception as e:
print(str(e))
'''If the response data is not empty, we will write as a new file and stored in the data lake folder'''
if data_content:
with open(save_file_w_submission_path, 'wb') as wf:
wf.write(data_content)
print(self.present_extract_RC_from_RS + filename)
except Exception as e:
print('funct multithread_download_files_func' + str(e))
'''This function is used as a wrapper before using multi threading in order to download the files to be stored in the Data Lake'''
def download_files(self,filter_files,url_to_download,path_to_save_file):
try:
self.ptsf = path_to_save_file = path_to_save_file + 'Step 1 - Data Lake\Wikipedia Pagecounts\\'
filter_files_df = filter_files
self.filter_pattern = filter_files
self.present_extract_RC_from_RS = 'WK Downloaded-> '
if filter_files_df == '*':
'''We will create a string of all the years concatenated together for later use in this program'''
reddit_years = [2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018]
filter_files_df = ''
'''Go through the years from 2005 to 2018'''
for idx, ry in enumerate(reddit_years):
filter_files_df += '*' + str(ry) + '*'
if (idx != len(reddit_years)-1):
filter_files_df += '&'
download_filter = list([x.strip() for x in filter_files_df.split('&')])
download_filter.sort()
'''If folder doesn't exist, create one'''
if not os.path.exists(os.path.dirname(self.ptsf)):
os.makedirs(os.path.dirname(self.ptsf))
'''We will get the website HTML elements using beautifulsoup library'''
r = requests.get(url_to_download)
data = r.text
soup = BeautifulSoup(data,features="lxml")
list_of_href_year = []
for i in range(2):
if i == 0:
'''Lets get all href available on this particular page. The first page is the year page'''
for link0 in soup.find_all('a'):
lien0 = link0.get('href')
'''We will check if the length is 4 which corresponds to a year'''
if len(lien0) == 4:
list_of_href_year.append(url_to_download + lien0 + '/')
elif i == 1:
list_of_href_months = []
list_of_href_pageviews = []
for loh in list_of_href_year:
r1 = requests.get(loh)
data1 = r1.text
'''Get the webpage HTML Tags'''
soup1 = BeautifulSoup(data1,features="lxml")
for link1 in soup1.find_all('a'):
lien1 = link1.get('href')
'''We will check if the length is 7 which corresponds to the year and month'''
if len(lien1) == 7:
list_of_href_months.append(loh + lien1 + '/')
for lohm in list_of_href_months:
r2 = requests.get(lohm)
data2 = r2.text
'''Get the webpage HTML Tags'''
soup2 = BeautifulSoup(data2,features="lxml")
for link2 in soup2.find_all('a'):
lien2 = link2.get('href')
'''We will now get all href that contains pagecounts in their name. We will have the files based on Time per hour. So 24 hrs is 24 files
and per year is 24*365=8760 files in minimum'''
if "pagecounts" in lien2:
list_of_href_pageviews.append(lohm + lien2)
existing_file_list = []
for file in os.listdir(self.ptsf):
filename = os.fsdecode(file)
existing_file_list.append(filename)
'''Filter the links'''
matching_fnmatch_list = []
if filter_files != '':
for dfilter in download_filter:
fnmatch_list = fnmatch.filter(list_of_href_pageviews, dfilter)
i = 0
for fnl in fnmatch_list:
'''Break for demo purpose only'''
if self.limit_record != 0:
if (i == self.limit_record) and (i != 0):
break
i += 1
matching_fnmatch_list.append(fnl)
'''If the user stated a filter, we will try to remove the files which are outside that filter in the list'''
to_remove = []
for efl in existing_file_list:
for mloh in matching_fnmatch_list:
if efl in mloh:
to_remove.append(mloh)
'''Lets remove the files which has been found outside the filter'''
for tr in to_remove:
matching_fnmatch_list.remove(tr)
matching_fnmatch_list.sort()
'''Multi Threading of 200'''
p = ThreadPool(200)
p.map(self.multithread_download_files_func, matching_fnmatch_list)
except Exception as e:
print('funct download_files' + str(e))
From the accepted answer, I understood that it is simply replacing from multiprocessing import Pool by from multiprocessing.dummy import Pool.
This worked for me.

TypeError: missing 1 required positional argument: 'value'

experts
I meet an value missing error in my code , but I think the variable in function are claimed. I don't know why it is happens.
$ python check_rsg_V0312.py
2018-03-20 13:05:49 === Script Start ===
2018-03-20 13:05:49 Monitoring via remote logon
The authenticity of host 'rpahost0 ([127.0.0.1]:7000)' can't be established.
RSA key fingerprint is 2d:f5:67:75:84:b6:24:45:e6:48:60:65:61:ca:69:f7.
Are you sure you want to continue connecting
(yes/no)? yes
Warning: Permanently added 'rpahost0' (RSA) to the list of known hosts.
Password:
Traceback (most recent call last):
File "check_rsg_V0312.py", line 89, in <module>
label = ssh_cmd(nLocalport, rsg_target, ouser, lookip, opasw, command,)
File "check_rsg_V0312.py", line 79, in ssh_cmd
print (ssh.before.decode(),ssh.after().decode())
TypeError: __init__() missing 1 required positional argument: 'value'
Below is my code, it is really strange that one error happens in ssh_cmd
function. detailed please review comments in code.
import os,time,pexpect,re, subprocess, smtplib
from email import encoders
from email.header import Header
from email.mime.text import MIMEText
#-----------------------------------------
dir = os.environ['HOME']
ouser = 'x02d726'
opasw = 'qwe12'
nLocalport = 7000
lookip = '127.0.0.1'
rsg_target = "rpahost0"
command = "ls -ltrh | grep tunnel | tail"
nstage = 0
mail_addr = 'cheng.huang#qq.com'
otp_file = dir + '/otplist/C591260'
otp_list = []
rsg_file = dir + '/.rsg_hosts'
known_hosts = dir + '/.ssh/known_hosts'
rsg_port = "auto"
#-----------------------------------------
def printlog(prompt):
year, mon, mday, hour, min, sec, wday, yday, isdst = time.localtime()
print("%04d-%02d-%02d %02d:%02d:%02d %s" % (year, mon , mday, hour, min,
sec, prompt))
def get_egw_name(ref_arr, key):
for oneline in ref_arr:
if (re.search(key, oneline)):
templine = oneline
oneline = re.sub('^\s+|\s+$','',oneline)
egw_ssg = re.split('\s+',oneline)[2]
result = re.split(':',egw_ssg)[0]
return result
def sendworker(to_addr):
from_addr = 'itk-bj.ericsson.se'
smtp_server = 'smtp.eamcs.ericsson.se'
msg = MIMEText('There is no otp left ,please input new OTP list',
'plain', 'utf-8')
msg['From'] = from_addr
msg['To'] = to_addr
msg['Subject'] = Header(u'OTP List is Blank', 'utf-8')
server = smtplib.SMTP(smtp_server, 25)
#server.set_debuglevel(1)
server.sendmail(from_addr, to_addr, msg.as_string())
server.quit()
def ssh_cmd(port, target, user, ip, pasw, cmd ):
printlog("=== Script Start ===")
printlog("Monitoring via remote logon")
time.sleep(1)
ssh = pexpect.spawn('/usr/bin/ssh -p %s -o HostKeyAlias=%s %s#%s %s' %
(port, target, user, ip, cmd ),timeout=6000)
try:
i = ssh.expect(['Password: ', 'continue connecting (yes/no)?'],
timeout=15)
if i == 0 :
print(ssh.before.decode(),ssh.after.decode())
ssh.sendline(pasw)
elif i == 1:
print(ssh.before.decode(),ssh.after.decode())
ssh.sendline('yes')
ssh.expect('Password: ')
print(ssh.before.decode(),ssh.after.decode())
ssh.sendline(pasw)
except pexpect.EOF:
print ("no connection EOF,please check RSG tunnel")
except pexpect.TIMEOUT:
print ("your pexpect has TIMEOUT")
else:
ssh.expect(pexpect.EOF)
print (ssh.before.decode(),ssh.after().decode()) # if I disable this line, there will be no error.
flag = ssh.before.decode()
return flag
ssh.close()
if __name__ == '__main__':
if os.path.exists(os.environ['HOME'] + "/.ssh/known_hosts"):
os.remove(known_hosts)
else:
pass
label = ssh_cmd(nLocalport, rsg_target, ouser, lookip, opasw, command)
if re.search('tunnel_check', str(label)):
nstage = 1
if (nstage == 0):
printlog("Tunnel was down and will re-establish now\n")
rsg = open (rsg_file,'r')
rsg_in = rsg.readlines()
rsg.close()
egwname = get_egw_name(rsg_in, rsg_target)
try :
otp = open(otp_file, 'r')
otp_arrary = otp.readlines()
otp.close()
for ot in otp_arrary:
ot = ot.strip()
ot = ot.replace('^\s*|\s*$', '')
otp_list.append(ot)
otp_num = len(otp_list) + 1
if (otp_num > 0):
os.remove(rsg_file)
try :
new_otp = otp_list[0]
except IndexError:
printlog('There is no otp left ,please input new OTP list')
sendworker(mail_addr)
sys.exit()
else:
out = open(rsg_file, 'w')
for line in rsg_in :
line = line.strip()
line = line.replace('^\s+|\s+$','')
if (re.match(egwname, line)):
temp_line = line
old_otp = re.split('\s+',temp_line)[5]
old_otp = old_otp.replace('^\s+|\s+$', '')
line = line.replace(old_otp, new_otp).replace('\\','')
printlog(line + "\n")
out.write(line + "\n")
out.close()
os.remove(otp_file)
time.sleep(1)
outotp = open(otp_file , 'w+')
i = 0
while (i < len(otp_list)):
outotp.write(otp_list[i] + "\n")
i += 1
outotp.close()
os.system("pkill -9 -f \"ssh\.\*-L " + str(nLocalport) + "\"")
os.system("sleep 10")
os.system("pkill -9 -f \"rtunnel\.\*-p " + str(nLocalport) + "\"")
os.system("nohup /opt/ericsson/itk/bin/rtunnel -d -q -g -p " + str(nLocalport) + "-rp auto " + rsg_target + " &")
os.system("sleep 10")
printlog("Kill the rtunnel process\n");
printlog("Tunnel is re-established again\n");
else:
sendworker(mail_addr)
except IOError:
print ("File is not accessible.")
else:
printlog("Tunnel OK")
As you see , after disable the line in Try... else... block, the code will be OK.
else:
ssh.expect(pexpect.EOF)
print (ssh.before.decode(),ssh.after().decode()) # if I disable this line, there will be no error.
flag = ssh.before.decode()
return flag
If you look carefully your traceback, you will find the problem:
Instead of:
print (ssh.before.decode(),ssh.after().decode())
you should write
print (ssh.before.decode(),ssh.after.decode())
after is a method which return string, not a function.
BTW, I think you should put decoding/encoding in your pexpect constructor.

Target machine refused connection

from geopy.geocoders import Nominatim
import openpyxl
wb = openpyxl.load_workbook('#######.xlsx')
ws = wb.active
geolocator = Nominatim(timeout=60)
for i in range(2,1810):
count1 = 0
count2 = 1
address = str(ws['B'+str(i)].value)
city = str(ws['C'+str(i)].value)
state = str(ws['D'+str(i)].value)
zipc = str(ws['F'+str(i)].value)
result = None
iden1 = address + ' ' + city + ' ' + state
iden2 = city + ' ' + zipc + ' ' + state
iden3 = city + ' ' + state
print(iden1, iden2, iden3)
print(geolocator.geocode(iden2).address)
try:
location1 = geolocator.geocode(iden1)
except:
pass
try:
location2 = geolocator.geocode(iden2)
except:
pass
try:
location3 = geolocator.geocode(iden3)
except:
pass
count = None
try:
county1 = str(location1.address)
county1_list = county1.split(", ")
#print(county1_list)
for q in county1_list:
if 'county' in q.lower():
if count == None:
count = q
except:
pass
try:
county2 = str(location2.address)
county2_list = county2.split(", ")
#print(county2_list)
for z in county2_list:
if 'county' in z.lower():
if count == None:
count = z
except:
pass
try:
county3 = str(location3.address)
county3_list = county3.split(", ")
#print(county3_list)
for j in county3_list:
if 'county' in j.lower():
if count == None:
count = j
except:
pass
print(i, count)
#ws['E'+str(i)] = count
if count == 50:
#wb.save("#####" +str(count2) +".xlsx")
count2 += 1
count1 = 0
Hello all, this code is pretty simple and uses geopy to extract county names using 3 different methods names iden1, iden2, and iden3 which are a combination of address, city, state, and zipcode. This ran fine for about 300 lines but began to repeat the same county, and after restarting the script, just spat out Nones. I put in the line print (geolocator.geocode(iden2).address) to find the error and got this error message.
Traceback (most recent call last):
File "C:/Users/#####/Downloads/Web content/#####/####_county.py",
line 19, in
print(geolocator.geocode(iden2).address) File "C:\Users#####\AppData\Local\Programs\Python\Python36-32\lib\site-packages\geopy\geocoders\osm.py",
line 193, in geocode
self._call_geocoder(url, timeout=timeout), exactly_one File "C:\Users#####\AppData\Local\Programs\Python\Python36-32\lib\site-packages\geopy\geocoders\base.py",
line 171, in _call_geocoder
raise GeocoderServiceError(message) geopy.exc.GeocoderServiceError: [WinError 10061] No connection could
be made because the target machine actively refused it
This script was working before but now does not. Is my IP being blocked from using goepy's database or something? Thanks for your help!
It looks like you're hitting their ratelimiting. It seems that they ask that you limit your API requests to 1/second. You can take a look here for their usage policy where they list alternatives to using their API as well as contraints.

python 2.7 thread not running correctly

I have been fighting with a threaded send of an string image over python sockets for a while now and have had no luck on this issue.
code for the client side is:
import socket
from PIL import ImageGrab #windows only screenshot
from threading import Thread
import win32api, win32con
import re
import win32com.client
import getpass
import time
import select
shell = win32com.client.Dispatch("WScript.Shell")
host = raw_input("SERVER:")
dm = win32api.EnumDisplaySettings(None, 0)
dm.PelsHeight = 800
dm.PelsWidth = 600
win32api.ChangeDisplaySettings(dm, 0)
port = 9000
def picture():
while 1:
image = ImageGrab.grab().resize((800,600)) #send screen as string
data = image.tostring()
sendme = (data)
try:
s.sendall(sendme)
print ("sent")
except socket.error as e:
print e
except Exception as e:
print e
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host, port))
pict = Thread(target=picture)
pict.start()
while 1:
socket_list = [s]
# Get the list sockets which are readable
read_sockets, write_sockets, error_sockets = select.select(socket_list , [], [])
for sock in read_sockets:
if sock == s:
data = sock.recv(1024)
print data
if "LEFTC" in data:
data = data.replace("LEFTC","")
x = re.findall(r'X(.*?)Y',data)
y = re.findall(r'Y(.*?)EOC',data)
x = str(x)
y = str(y)
#REPLACE CODE TO BE REWRITTEN
x = x.replace("[","").replace("]","").replace("'","").replace(" ","")
y = y.replace("[","").replace("]","").replace("'","").replace(" ","")
print(str(x) + ' X\n')
print(str(y) + ' Y\n')
try:
win32api.SetCursorPos((int(x),int(y))) #click time
win32api.mouse_event(win32con.MOUSEEVENTF_LEFTDOWN,int(x),int(y),0,0)
win32api.mouse_event(win32con.MOUSEEVENTF_LEFTUP,int(x),int(y),0,0)
except Exception as e:
print e
elif "RIGHTC" in data:
data = data.replace("RIGHTC","")
x = re.findall(r'X(.*?)Y',data)
y = re.findall(r'Y(.*?)EOC',data)
x = str(x)
y = str(y)
#REPLACE FUNCTION MAREKD FOR REWRITE
x = x.replace("[","").replace("]","").replace("'","").replace(" ","")
y = y.replace("[","").replace("]","").replace("'","").replace(" ","")
print(str(x) + ' X\n')
print(str(y) + ' Y\n')
try: #click
win32api.SetCursorPos((int(x),int(y)))
win32api.mouse_event(win32con.MOUSEEVENTF_RIGHTDOWN,int(x),int(y),0,0)
win32api.mouse_event(win32con.MOUSEEVENTF_RIGHTUP,int(x),int(y),0,0)
except Exception as e:
print e
else:
#This does not work correctly: only BACKSPACE and the else are working.
if "CAPS" in data:
shell.SendKeys('{CAPSLOCK}')
elif "CAPSOFF" in data:
shell.SendKeys('{CAPSLOCK}')
elif "BACKSPACE" in data:
shell.SendKeys('{BACKSPACE}')
elif "SHIFT" in data:
shell.SendKeys('+' + data)
else:
shell.SendKeys(data)
time.sleep(0.1)
server code is:
import socket
import pygame
from pygame.locals import *
from threading import Thread
x = y = 0
host = ""
#port defined here
port = 9000
#This list is used to make the library more pythonic and compact. This also leads to less source code.
keylist = [pygame.K_a,pygame.K_b,pygame.K_c,pygame.K_d,pygame.K_e,pygame.K_f,pygame.K_g,pygame.K_h,pygame.K_i,pygame.K_j,pygame.K_k,pygame.K_l,pygame.K_m,pygame.K_n,pygame.K_o,pygame.K_p,pygame.K_q,pygame.K_r,pygame.K_s,pygame.K_t,pygame.K_u,pygame.K_v,pygame.K_w,pygame.K_x,pygame.K_y,pygame.K_z]
key = ['a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z','1','2','3','4','5','6','7','8','9','0']
i/o function
def ioinput(sock):
while 1:
evt = pygame.event.poll() #has to be in the same while loop as the evt called or wont work.
if evt.type == pygame.MOUSEBUTTONDOWN and evt.button == 1: # one for left
x, y = evt.pos
command = ("LEFTC" + " " + "X" + str(x) + "Y" + str(y) + "EOC")
sock.sendall(command)
elif evt.type == pygame.MOUSEBUTTONDOWN and evt.button == 3: # 3 for right 2 is middle which support comes for later.
x, y = evt.pos
command = ("RIGHTC" + " " + "X" + str(x) + "Y" + str(y) + "EOC")
sock.sendall(command)
elif evt.type == pygame.KEYDOWN:
keyname = pygame.key.name(evt.key)
if evt.key == pygame.K_BACKSPACE:
command = ("BACKSPACE")
sock.sendall(command)
elif evt.key in keylist:
if keyname in key:
command = (keyname)
sock.sendall(command)
def mainloop():
message = []
while 1:
try:
while True:
try:
conn, addr = server.accept()
except socket.error:
break
screen = pygame.display.set_mode((800,600))
clickctrl = Thread(target=ioinput, args=(conn,))
clickctrl.start()
while 1:
d = conn.recv(1024*1024*1)
if not d:
break
else:
message.append(d)
data = ''.join(message)
image = pygame.image.frombuffer(data,(800,600),"RGB")
screen.blit(image,(0,0))
pygame.display.flip()
except Exception as e:
continue
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server.setblocking(False)
server.bind((host, port))
server.listen(55000)
print "Listening on %s" % ("%s:%s" % server.getsockname())
Main event loop.
mainloop()
The picture thread will run 3 to six times then die however the keyboard and mouse input layer continues to operate. I suspect that the GIL is getting in my way. Am i correct or am I missing something really simple here? This program is supposed to be a simplistic reverse remote desktop appication.
I found the problem after speaking with a good friend. turns out that my server side while loop was setup so that it would break.
i fixed this by changing:
while 1:
d = conn.recv(1024*1024*1)
if not d:
break
else:
message.append(d)
data = ''.join(message)
image = pygame.image.frombuffer(data,(800,600),"RGB")
screen.blit(image,(0,0))
pygame.display.flip()
to :
while 1:
d = conn.recv(1024*1024*1)
message.append(d)
try:
print("attempting to parse..")
data = ''.join(message)
image = pygame.image.frombuffer(data,(800,600),"RGB")
screen.blit(image,(0,0))
pygame.display.flip()
print("recieved pic")
except Exception as e:
print e
continue
Also, client side on the picture thread i added a time.sleep (1) after the exception handling, otherwise the image does not come though correctly.

Resources