How to get the dictionary value after post request - python-3.x

import request
p = s.post(url, data=post_json_data,
headers=headers)
posted_response = p.text
print(posted_response["message"])`
if posted_response["message"] == "Job added":
print("Success")`
My posted_response = {"message":"Job added"}
Unable to get the value posted_response["message"]. Is there any solution to get value from dictionary after post request is done

You should change posted_response = p.text to posted_response = p.json().
Hope it can help.
Best regards.

Related

How to resolve value error for a Youtube Comment and Reply Scraper function using Youtube API and Python

Am trying to create a function that uses Youtube API to fetch the comments and responses.
Please find my code below . This shows value error and doesn't work when am trying to store the data in DataFrame. I could understand that the issue is with the Reply Part but am not sure what went wrong. Any guidance or suggestions to tell me where I have gone wrong?
Update : I also noticed not all comments are getting extracted.
Thanks in Advance.
My Code :
def vid_comments():
Username = []
Comment = []
Comment_Likes = []
Comment_Date = []
Reply_Count = []
Reply = []
Replied_By = []
video_response=resource.commentThreads().list(part='snippet,replies',videoId=Video_ID).execute()
while video_response:
for item in video_response['items']:
item_info = item["snippet"]
topLevelComment = item_info["topLevelComment"]
comment_info = topLevelComment["snippet"]
Username.append(comment_info["authorDisplayName"])
Comment.append(comment_info["textDisplay"])
Comment_Likes.append(comment_info["likeCount"])
Comment_Date.append(comment_info['publishedAt'])
comment = item['snippet']['topLevelComment']['snippet']['textDisplay']
replycount = item['snippet']['totalReplyCount']
Reply_Count.append(replycount)
if replycount>0:
for reply in item['replies']['comments']:
reply_info = reply["snippet"]
Reply.append(reply_info['textDisplay'])
Replied_By.append(reply_info['authorDisplayName'])
if 'nextPageToken' in video_response:
p_token = video_response['nextPageToken']
video_response = resource.commentThreads().list(part = 'snippet,replies',videoId = Video_ID,pageToken = p_token).execute()
else:
break
vid_comments()
cmt_data = {'Comment': Comment,'Username':Username,'Comment Likes Count':Comment_Likes,'Comment Date':Comment_Date,'Reply Count':Reply_Count}
Comment_Section=pd.DataFrame(cmt_data)
reply_data = {'Reply':Reply,'Replied_By':Replied_By}
Reply_Section = pd.DataFrame(reply_data)
This issues is solved with few modifications in the code

Python Akamai Sensor Data Generation with valid Cookie abck

I am trying to send twice post request to www.footlocker.it
sess = requests.session()
print("start-Point")
bot = BotDetector()
payload = "{\"sensor_data\":\"" + bot.generatesensordata() + "\"}"
d = sess.post(url_ak, headers=headers_ak, data=payload, verify=False, timeout=15)
bot.cookie = sess.cookies["_abck"]
payload = "{\"sensor_data\":\"" + bot.generatesensordata1() + "\"}"
d = sess.post(url_ak, headers=headers_ak, data=payload, verify=False, timeout=15)
print('Status code {},'.format(d.status_code))
print('Header {},'.format(d.headers))
Target is for getting valid cookie abck and success true as status code.
I have write some custom code for botdetector. But i can't bypass with good result.
it means your sensor data is bad most likely. take a look at the akamai script for the site & compare it to what you have now.

Unable to get the response in POST method in Python

I am facing a unique problem.
Following is my code.
url = 'ABCD.com'
cookies={'cookies':'xyz'}
r = requests.post(url,cookies=cookies)
print(r.status_code)
json_data = json.loads(r.text)
print("Printing = ",json_data)
When I use the url and cookie in the POSTMAN tool and use POST request I get JSON response . But when I use the above code with POST request method in python I get
404
Printing = {'title': 'init', 'description': "Error: couldn't find a device with id: xxxxxxxxx in ABCD: d1"}
But when I use the following code i .e with GET request method
url = 'ABCD.com'
cookies={'cookies':'xyz'}
r = requests.post(url,cookies=cookies)
print(r.status_code)
json_data = json.loads(r.text)
print("Printing = ",json_data)
I get
200
Printing = {'apiVersion': '0.4.0'}
I am not sure why POST method works with JSON repsone in POSTMAN tool and when I try using python it is not work. I use latest python 3.6.4
I finally found what was wrong following is correct way
url = 'ABCD.com'
cookies={'cookies':'xyz'}
r = requests.post(url,headers={'Cookie'=cookies)
print(r.status_code)
json_data = json.loads(r.text)
print("Printing = ",json_data)
web page was expecting headers as cookie and i got the response correctly

BurpSuite API - Get Response from edited requests

I have a problem with Burpsuite API that I can't find a proper function to print out the response for edited requests . I'm developing a new plugin for burpsuite with python . myscript is simply takes requests from proxy then it edit headers and send it again .
from burp import IBurpExtender
from burp import IHttpListener
import re,urllib2
class BurpExtender(IBurpExtender, IHttpListener):
def registerExtenderCallbacks(self, callbacks):
self._callbacks = callbacks
self._helpers = callbacks.getHelpers()
callbacks.setExtensionName("Burp Plugin Python Demo")
callbacks.registerHttpListener(self)
return
def processHttpMessage(self, toolFlag, messageIsRequest, currentRequest):
# only process requests
if messageIsRequest:
requestInfo = self._helpers.analyzeRequest(currentRequest)
#timestamp = datetime.now()
#print "Intercepting message at:", timestamp.isoformat()
headers = requestInfo.getHeaders()
#print url
if(requestInfo.getMethod() == "GET"):
print "GET"
print requestInfo.getUrl()
response = urllib2.urlopen(requestInfo.getUrl())
print response
elif(requestInfo.getMethod() == "POST"):
print "POST"
print requestInfo.getUrl()
#for header in headers:
#print header
bodyBytes = currentRequest.getRequest()[requestInfo.getBodyOffset():]
bodyStr = self._helpers.bytesToString(bodyBytes)
bodyStr = re.sub(r'=(\w+)','=<xss>',bodyStr)
newMsgBody = bodyStr
newMessage = self._helpers.buildHttpMessage(headers, newMsgBody)
print "Sending modified message:"
print "----------------------------------------------"
print self._helpers.bytesToString(newMessage)
print "----------------------------------------------\n\n"
currentRequest.setRequest(newMessage)
return
You need to print the response but you don't do anything in case messageIsRequest is false. When messageIsRequest is false it means that the currentRequest is a response and you can print out the response as you did for the request. I did it in Java like this:
def processHttpMessage(self, toolFlag, messageIsRequest, httpRequestResponse):
if messageIsRequest:
....
else
HTTPMessage = httpRequestResponse.getResponse()
print HTTPMessage
There is even a method that lets you bind request and response together when using a proxy. It can be found in IInterceptedProxyMessage:
/**
* This method retrieves a unique reference number for this
* request/response.
*
* #return An identifier that is unique to a single request/response pair.
* Extensions can use this to correlate details of requests and responses
* and perform processing on the response message accordingly.
*/
int getMessageReference();
I don't think it is supported for HTTPListeners.
I am writing the extensions in Java and tried to translate to Python for this anwser. I haven't tested this code and some bugs might be introduced due to translation.

How to use urllib with username/password authentication in python 3?

Here is my problem with urllib in python 3.
I wrote a piece of code which works well in Python 2.7 and is using urllib2. It goes to the page on Internet (which requires authorization) and grabs me the info from that page.
The real problem for me is that I can't make my code working in python 3.4 because there is no urllib2, and urllib works differently; even after few hours of googling and reading I got nothing. So if somebody can help me to solve this, I'd really appreciate that help.
Here is my code:
request = urllib2.Request('http://mysite/admin/index.cgi?index=127')
base64string = base64.encodestring('%s:%s' % ('login', 'password')).replace('\n', '')
request.add_header("Authorization", "Basic %s" % base64string)
result = urllib2.urlopen(request)
resulttext = result.read()
Thankfully to you guys I finally figured out the way it works.
Here is my code:
request = urllib.request.Request('http://mysite/admin/index.cgi?index=127')
base64string = base64.b64encode(bytes('%s:%s' % ('login', 'password'),'ascii'))
request.add_header("Authorization", "Basic %s" % base64string.decode('utf-8'))
result = urllib.request.urlopen(request)
resulttext = result.read()
After all, there is one more difference with urllib: the resulttext variable in my case had the type of <bytes> instead of <str>, so to do something with text inside it I had to decode it:
text = resulttext.decode(encoding='utf-8',errors='ignore')
What about urllib.request ? It seems it has everything you need.
import base64
import urllib.request
request = urllib.request.Request('http://mysite/admin/index.cgi?index=127')
base64string = bytes('%s:%s' % ('login', 'password'), 'ascii')
request.add_header("Authorization", "Basic %s" % base64string)
result = urllib.request.urlopen(request)
resulttext = result.read()
An alternative using OpenerDirector that installs the auth headers for all future urllib requests
login_pass = base64.b64encode(f'{login}:{password}'.encode()).decode()
opener = urllib.request.build_opener()
opener.addheaders = [('Authorization', f'Basic {login_pass}')]
urllib.request.install_opener(opener)
response = urllib.request.urlopen(API_URL)
print(response.read().decode())
A further example using HTTPBasicAuthHandler although a bit more work required if need to send credentials unconditionally:
password_mgr = urllib.request.HTTPPasswordMgrWithPriorAuth()
password_mgr.add_password(None, API_URL, login, password, is_authenticated=True)
auth_handler = request.HTTPBasicAuthHandler(password_mgr)
opener = request.build_opener(auth_handler)
request.install_opener(opener)
response = urllib.request.urlopen(API_URL)
print(response.read().decode())

Resources