how to set up "Flask" webhook for "telebot"? - python-3.x

Locally everything works, with the help of "ngrok" I hang up a webhook. I uploaded the code to "pythonanywhere" and the handlers are idle.
There is such a view for Flask
#app.route("/", methods=["GET", "POST"])
def receive_update():
if request.headers.get("content-type") == "application/json":
json_string = request.get_data().decode("utf-8")
update = telebot.types.Update.de_json(json_string)
bot.process_new_updates
return ""
else:
abort(403)
telegram sends updates to the server, but I'm confused by the presence of such entities
<telebot.types.User object at 0x7fc907fb8b0>,
instead of key: valueю
{'update_id': ******, 'message': {'content_type': 'text', 'id': ****, 'message_id': ****, 'from_user': <telebot.types.User object at 0x7f5c07fb8b0>, 'date': *********, 'chat': <telebot.types.Chat object at 0x7f5c907fb82>, 'sender_chat': None, 'forward_from': None, 'forward_from_chat': None, 'forward_from_message_id': None, 'forward_signature': None, 'forward_sender_name': None, 'forward_date': None, 'is_automatic_forward': None, 'reply_to_message': None, 'via_bot': None, 'edit_date': None, 'has_protected_content': None, 'media_group_id': None, 'author_signature': None, 'text': '/start', 'entities': [<telebot.types.MessageEntity object at 0x7f5c07fba00>], 'caption_entities': None, 'audio': None, 'document': None, 'photo': None, 'sticker': None, 'video': None, 'video_note': None, 'voice': None, 'caption': None, 'contact': None, 'location': None, 'venue': None, 'animation': None, 'dice': None, 'new_chat_member': None, 'new_chat

Related

How to extract replies from Facebook comments stored in list of dictionaries?

I'm working with a Facebook scraping, however, I'm having difficulties working with the responses to the comments.
For the collection of comments, this is the code:
import pandas as pd
import facebook_scraper
post_ids = ['1014199301965488']
options = {"comments": True,
"reactors": True,
"allow_extra_requests": True,
}
cookies = "/content/cookies.txt" #it is necessary to generate a Facebook cookie file
replies = []
for post in facebook_scraper.get_posts(post_urls=post_ids, cookies=cookies, options=options):
for p in post['comments_full']:
replies.append(p)
Basically, each comment can have more than one answer. From what I understand, each answer is stored in a list of dictionaries. Here is an example of some replies.
[{'comment_id': '1014587065260045', 'comment_url': 'https://facebook.com/1014587065260045', 'commenter_id': '100002664042251', 'commenter_url': 'https://facebook.com/anderson.ritmoapoesia?fref=nf&rc=p&__tn__=R', 'commenter_name': 'Anderson Ritmoapoesia', 'commenter_meta': None, 'comment_text': 'Boa irmão!\nTmj', 'comment_time': datetime.datetime(2015, 8, 17, 0, 0), 'comment_image': 'https://scontent.xx.fbcdn.net/m1/v/t6/An_UvxJXg9tdnLU3Y5qjPi0200MLilhzPXUgxzGjQzUMaNcmjdZA6anyrngvkdub33NZzZhd51fpCAEzNHFhko5aKRFP5fS1w_lKwYrzcNLupv27.png?_nc_eui2=AeH0Z9O-PPSBg9l8FeLeTyUHMiCX3WNpzi0yIJfdY2nOLeM4yQsYnDi7Fo-bVaW2oRmOKEYPCsTFZnVoJbmO2yOH&ccb=10-5&oh=00_AT-4ep4a5bI4Gf173sbCjcAhS7gahF9vcYuM9GaQwJsI9g&oe=6301E8F9&_nc_sid=55e238', 'comment_reactors': [{'name': 'Marcio J J Tomaz', 'link': 'https://facebook.com/marcioroberto.rodriguestomaz?fref=pb', 'type': 'like'}], 'comment_reactions': {'like': 1}, 'comment_reaction_count': 1}]
[{'comment_id': '1014272461958172', 'comment_url': 'https://facebook.com/1014272461958172', 'commenter_id': '100009587231687', 'commenter_url': 'https://facebook.com/cassia.danyelle.94?fref=nf&rc=p&__tn__=R', 'commenter_name': 'Cassia Danyelle', 'commenter_meta': None, 'comment_text': 'Concordo!', 'comment_time': datetime.datetime(2015, 8, 17, 0, 0), 'comment_image': None, 'comment_reactors': [], 'comment_reactions': None, 'comment_reaction_count': None}, {'comment_id': '1014275711957847', 'comment_url': 'https://facebook.com/1014275711957847', 'commenter_id': '1227694094', 'commenter_url': 'https://facebook.com/marcusvinicius.espiritosanto?fref=nf&rc=p&__tn__=R', 'commenter_name': 'Marcus Vinicius Espirito Santo', 'commenter_meta': None, 'comment_text': 'Concordo Marcão a única observação que faço é: a justiça deveria funcionar sempre dessa forma rápida e precisa, como neste caso.', 'comment_time': datetime.datetime(2015, 8, 17, 0, 0), 'comment_image': 'https://scontent.xx.fbcdn.net/m1/v/t6/An_UvxJXg9tdnLU3Y5qjPi0200MLilhzPXUgxzGjQzUMaNcmjdZA6anyrngvkdub33NZzZhd51fpCAEzNHFhko5aKRFP5fS1w_lKwYrzcNLupv27.png?_nc_eui2=AeH0Z9O-PPSBg9l8FeLeTyUHMiCX3WNpzi0yIJfdY2nOLeM4yQsYnDi7Fo-bVaW2oRmOKEYPCsTFZnVoJbmO2yOH&ccb=10-5&oh=00_AT-4ep4a5bI4Gf173sbCjcAhS7gahF9vcYuM9GaQwJsI9g&oe=6301E8F9&_nc_sid=55e238', 'comment_reactors': [{'name': 'Marcos Alexandre de Souza', 'link': 'https://facebook.com/senseimarcos?fref=pb', 'type': 'like'}], 'comment_reactions': {'like': 1}, 'comment_reaction_count': 1}]
[{'comment_id': '1014367808615304', 'comment_url': 'https://facebook.com/1014367808615304', 'commenter_id': '100005145968202', 'commenter_url': 'https://facebook.com/flavioluis.schnurr?fref=nf&rc=p&__tn__=R', 'commenter_name': 'Flavio Luis Schnurr', 'commenter_meta': None, 'comment_text': 'E porque você não morre ! Quem apoia assassinos também é!', 'comment_time': datetime.datetime(2015, 8, 17, 0, 0), 'comment_image': None, 'comment_reactors': [], 'comment_reactions': None, 'comment_reaction_count': None}]
[{'comment_id': '1014222638629821', 'comment_url': 'https://facebook.com/1014222638629821', 'commenter_id': '100009383732423', 'commenter_url': 'https://facebook.com/profile.php?id=100009383732423&fref=nf&rc=p&__tn__=R', 'commenter_name': 'Anerol Ahnuc', 'commenter_meta': None, 'comment_text': 'Hã?', 'comment_time': datetime.datetime(2015, 8, 17, 0, 0), 'comment_image': 'https://scontent.xx.fbcdn.net/m1/v/t6/An_UvxJXg9tdnLU3Y5qjPi0200MLilhzPXUgxzGjQzUMaNcmjdZA6anyrngvkdub33NZzZhd51fpCAEzNHFhko5aKRFP5fS1w_lKwYrzcNLupv27.png?_nc_eui2=AeH0Z9O-PPSBg9l8FeLeTyUHMiCX3WNpzi0yIJfdY2nOLeM4yQsYnDi7Fo-bVaW2oRmOKEYPCsTFZnVoJbmO2yOH&ccb=10-5&oh=00_AT-4ep4a5bI4Gf173sbCjcAhS7gahF9vcYuM9GaQwJsI9g&oe=6301E8F9&_nc_sid=55e238', 'comment_reactors': [], 'comment_reactions': {'like': 1}, 'comment_reaction_count': 1}, {'comment_id': '1014236578628427', 'comment_url': 'https://facebook.com/1014236578628427', 'commenter_id': '100009383732423', 'commenter_url': 'https://facebook.com/profile.php?id=100009383732423&fref=nf&rc=p&__tn__=R', 'commenter_name': 'Anerol Ahnuc', 'commenter_meta': None, 'comment_text': 'Eu hein?', 'comment_time': datetime.datetime(2015, 8, 17, 0, 0), 'comment_image': None, 'comment_reactors': [], 'comment_reactions': None, 'comment_reaction_count': None}]
[{'comment_id': '1014435731941845', 'comment_url': 'https://facebook.com/1014435731941845', 'commenter_id': '100003779689547', 'commenter_url': 'https://facebook.com/marcia.pimentel.5454?fref=nf&rc=p&__tn__=R', 'commenter_name': 'Márcia Pimentel', 'commenter_meta': None, 'comment_text': 'Não é que sejam defensores Marcondes Martins,sim,eles falam que ele era um ser humano que errou e que podia ter pago de outra maneira,e não com a morte,porque só quem tem direito de tirar a vida das pessoas é Aquele que nos deu... Jesus.', 'comment_time': datetime.datetime(2015, 8, 17, 0, 0), 'comment_image': None, 'comment_reactors': [], 'comment_reactions': None, 'comment_reaction_count': None}, {'comment_id': '1014445965274155', 'comment_url': 'https://facebook.com/1014445965274155', 'commenter_id': '100000515531313', 'commenter_url': 'https://facebook.com/DJ.Marcondes.Martins?fref=nf&rc=p&__tn__=R', 'commenter_name': 'Marcondes Martins', 'commenter_meta': None, 'comment_text': 'Marcia Márcia Pimentel ta teoria é tudo bonitinho. Mas bandidos matam, estupram, humilham pessoas de bem e a justiça ainda protege esses vermes, a sociedade ja está cansada disso.', 'comment_time': datetime.datetime(2015, 8, 17, 0, 0), 'comment_image': None, 'comment_reactors': [], 'comment_reactions': None, 'comment_reaction_count': None}]
Based on the data above, I only need the values for 'comment_text', however, I've never worked with this type of structure. Is it possible to extract each occurrence in 'comment_text'?
Since you're working with a list of dictionaries, I would use a list comprehension to loop the items in the list, and then extract only the key I wanted from each dictionary:
replies.append([reply['comment_text'] for reply in p])
An example of what it would do
p = [{'comment_id': '1014272461958172', 'comment_url': 'https://facebook.com/1014272461958172', 'commenter_id': '100009587231687', 'commenter_url': 'https://facebook.com/cassia.danyelle.94?fref=nf&rc=p&__tn__=R', 'commenter_name': 'Cassia Danyelle', 'commenter_meta': None, 'comment_text': 'Concordo!', 'comment_time': datetime.datetime(2015, 8, 17, 0, 0), 'comment_image': None, 'comment_reactors': [], 'comment_reactions': None, 'comment_reaction_count': None}, {'comment_id': '1014275711957847', 'comment_url': 'https://facebook.com/1014275711957847', 'commenter_id': '1227694094', 'commenter_url': 'https://facebook.com/marcusvinicius.espiritosanto?fref=nf&rc=p&__tn__=R', 'commenter_name': 'Marcus Vinicius Espirito Santo', 'commenter_meta': None, 'comment_text': 'Concordo Marcão a única observação que faço é: a justiça deveria funcionar sempre dessa forma rápida e precisa, como neste caso.', 'comment_time': datetime.datetime(2015, 8, 17, 0, 0), 'comment_image': 'https://scontent.xx.fbcdn.net/m1/v/t6/An_UvxJXg9tdnLU3Y5qjPi0200MLilhzPXUgxzGjQzUMaNcmjdZA6anyrngvkdub33NZzZhd51fpCAEzNHFhko5aKRFP5fS1w_lKwYrzcNLupv27.png?_nc_eui2=AeH0Z9O-PPSBg9l8FeLeTyUHMiCX3WNpzi0yIJfdY2nOLeM4yQsYnDi7Fo-bVaW2oRmOKEYPCsTFZnVoJbmO2yOH&ccb=10-5&oh=00_AT-4ep4a5bI4Gf173sbCjcAhS7gahF9vcYuM9GaQwJsI9g&oe=6301E8F9&_nc_sid=55e238', 'comment_reactors': [{'name': 'Marcos Alexandre de Souza', 'link': 'https://facebook.com/senseimarcos?fref=pb', 'type': 'like'}], 'comment_reactions': {'like': 1}, 'comment_reaction_count': 1}]
print([reply['comment_text'] for reply in p]) # ['Concordo!', 'Concordo Marcão a única observação que faço é: a justiça deveria funcionar sempre dessa forma rápida e precisa, como neste caso.']

List to data frame in python

I'm trying to grab transaction data from plaid and input it into a data frame with clean columns. The "before" format is a list as excerpted below.
My goal is that the "after" format is a data frame where there is a column for each name in the list (e.g., "account_id" or "amount") such that I can then parse the list and insert values in each column.
I'm new to python--I'm fluent in r/dplyr but the syntax is confusing me.
Thanks in advance!
{'account_id': 'nKllGzvJQeIpZwvxlv1Mhw98Zdo57Ec6ZNEm5',
'account_owner': None,
'amount': 4.33,
'authorized_date': datetime.date(2020, 12, 8),
'authorized_datetime': None,
'category': ['Food and Drink', 'Restaurants', 'Coffee Shop'],
'category_id': '13005043',
'check_number': None,
'date': datetime.date(2020, 12, 8),
'datetime': None,
'iso_currency_code': 'USD',
'location': {'address': None,
'city': None,
'country': None,
'lat': None,
'lon': None,
'postal_code': None,
'region': None,
'store_number': None},
'merchant_name': 'Starbucks',
'name': 'Starbucks',
'payment_channel': 'in store',
'payment_meta': {'by_order_of': None,
'payee': None,
'payer': None,
'payment_method': None,
'payment_processor': None,
'ppd_id': None,
'reason': None,
'reference_number': None},
'pending': False,
'pending_transaction_id': None,
'personal_finance_category': None,
'transaction_code': None,
'transaction_id': 'llvvGK61QjH1eX8yP8qZC8BxB3WegMFZrXRjr',
'transaction_type': 'place',
'unofficial_currency_code': None}
enter code here
In this case, I would use the DataFrame constructor, in the list-of-records format.
Example:
import datetime
import pandas as pd
transaction = {'account_id': 'nKllGzvJQeIpZwvxlv1Mhw98Zdo57Ec6ZNEm5',
'account_owner': None,
'amount': 4.33,
'authorized_date': datetime.date(2020, 12, 8),
'authorized_datetime': None,
'category': ['Food and Drink', 'Restaurants', 'Coffee Shop'],
'category_id': '13005043',
'check_number': None,
'date': datetime.date(2020, 12, 8),
'datetime': None,
'iso_currency_code': 'USD',
'location': {'address': None,
'city': None,
'country': None,
'lat': None,
'lon': None,
'postal_code': None,
'region': None,
'store_number': None},
'merchant_name': 'Starbucks',
'name': 'Starbucks',
'payment_channel': 'in store',
'payment_meta': {'by_order_of': None,
'payee': None,
'payer': None,
'payment_method': None,
'payment_processor': None,
'ppd_id': None,
'reason': None,
'reference_number': None},
'pending': False,
'pending_transaction_id': None,
'personal_finance_category': None,
'transaction_code': None,
'transaction_id': 'llvvGK61QjH1eX8yP8qZC8BxB3WegMFZrXRjr',
'transaction_type': 'place',
'unofficial_currency_code': None}
transactions = [transaction]
df = pd.DataFrame(transactions)
Note that some columns are themselves objects, like the payment category column or payment_meta column. You didn't specify if you thought those should be handled specially or broken out into multiple columns, but it's something you should consider doing.
Also note that transactions is a list, and you can put multiple transactions in the same format into it, and construct the dataframe all at once. (This is preferable to creating multiple dataframes and appending them together, for the same reason as in dplyr.)

how to convert data to standard json

messages=%5B%7B%22values%22%3A+%7B%22momentum%22%3A+%220.00%22%7D%2C+%22exchange%22%3A+%22binance%22%2C+%22market%22%3A+%22BNT%2FETH%22%2C+%22base_currency%22%3A+%22BNT%22%2C+%22quote_currency%22%3A+%22ETH%22%2C+%22indicator%22%3A+%22momentum%22%2C+%22indicator_number%22%3A+0%2C+%22analysis%22%3A+%7B%22config%22%3A+%7B%22enabled%22%3A+true%2C+%22alert_enabled%22%3A+true%2C+%22alert_frequency%22%3A+%22once%22%2C+%22signal%22%3A+%5B%22momentum%22%5D%2C+%22hot%22%3A+0%2C+%22cold%22%3A+0%2C+%22candle_period%22%3A+%224h%22%2C+%22period_count%22%3A+10%7D%2C+%22status%22%3A+%22hot%22%7D%2C+%22status%22%3A+%22hot%22%2C+%22last_status%22%3A+%22hot%22%2C+%22prices%22%3A+%22+Open%3A+0.000989+High%3A+0.000998+Low%3A+0.000980+Close%3A+0.000998%22%2C+%22lrsi%22%3A+%22%22%2C+%22creation_date%22%3A+%222020-05-10+16%3A16%3A23%22%2C+%22hot_cold_label%22%3A+%22%22%2C+%22indicator_label%22%3A+%22%22%2C+%22price_value%22%3A+%7B%22open%22%3A+0.000989%2C+%22high%22%3A+0.000998%2C+%22low%22%3A+0.00098%2C+%22close%22%3A+0.000998%7D%2C+%22decimal_format%22%3A+%22%25.6f%22%7D%2C+%7B%22values%22%3A+%7B%22leading_span_a%22%3A+%220.00%22%2C+%22leading_span_b%22%3A+%220.00%22%7D%2C+%22exchange%22%3A+%22binance%22%2C+%22market%22%3A+%22BNT%2FETH%22%2C+%22base_currency%22%3A+%22BNT%22%2C+%22quote_currency%22%3A+%22ETH%22%2C+%22indicator%22%3A+%22ichimoku%22%2C+%22indicator_number%22%3A+1%2C+%22analysis%22%3A+%7B%22config%22%3A+%7B%22enabled%22%3A+true%2C+%22alert_enabled%22%3A+true%2C+%22alert_frequency%22%3A+%22once%22%2C+%22signal%22%3A+%5B%22leading_span_a%22%2C+%22leading_span_b%22%5D%2C+%22hot%22%3A+true%2C+%22cold%22%3A+true%2C+%22candle_period%22%3A+%224h%22%2C+%22hot_label%22%3A+%22Bullish+Alert%22%2C+%22cold_label%22%3A+%22Bearish+Alert%22%2C+%22indicator_label%22%3A+%22ICHIMOKU+4+hr%22%2C+%22mute_cold%22%3A+false%7D%2C+%22status%22%3A+%22cold%22%7D%2C+%22status%22%3A+%22cold%22%2C+%22last_status%22%3A+%22cold%22%2C+%22prices%22%3A+%22+Open%3A+0.000989+High%3A+0.000998+Low%3A+0.000980+Close%3A+0.000998%22%2C+%22lrsi%22%3A+%22%22%2C+%22creation_date%22%3A+%222020-05-10+16%3A16%3A23%22%2C+%22hot_cold_label%22%3A+%22Bearish+Alert%22%2C+%22indicator_label%22%3A+%22ICHIMOKU+4+hr%22%2C+%22price_value%22%3A+%7B%22open%22%3A+0.000989%2C+%22high%22%3A+0.000998%2C+%22low%22%3A+0.00098%2C+%22close%22%3A+0.000998%7D%2C+%22decimal_format%22%3A+%22%25.6f%22%7D%2C+%7B%22values%22%3A+%7B%22bbp%22%3A+%220.96%22%2C+%22mfi%22%3A+%2298.05%22%7D%2C+%22exchange%22%3A+%22binance%22%2C+%22market%22%3A+%22BNT%2FETH%22%2C+%22base_currency%22%3A+%22BNT%22%2C+%22quote_currency%22%3A+%22ETH%22%2C+%22indicator%22%3A+%22bbp%22%2C+%22indicator_number%22%3A+1%2C+%22analysis%22%3A+%7B%22config%22%3A+%7B%22enabled%22%3A+true%2C+%22alert_enabled%22%3A+true%2C+%22alert_frequency%22%3A+%22once%22%2C+%22candle_period%22%3A+%224h%22%2C+%22period_count%22%3A+20%2C+%22hot%22%3A+0.09%2C+%22cold%22%3A+0.8%2C+%22std_dev%22%3A+2%2C+%22signal%22%3A+%5B%22bbp%22%2C+%22mfi%22%5D%2C+%22hot_label%22%3A+%22Lower+Band%22%2C+%22cold_label%22%3A+%22Upper+Band+BB%22%2C+%22indicator_label%22%3A+%22Bollinger+4+hr%22%2C+%22mute_cold%22%3A+false%7D%2C+%22status%22%3A+%22cold%22%7D%2C+%22status%22%3A+%22cold%22%2C+%22last_status%22%3A+%22cold%22%2C+%22prices%22%3A+%22+Open%3A+0.000989+High%3A+0.000998+Low%3A+0.000980+Close%3A+0.000998%22%2C+%22lrsi%22%3A+%22%22%2C+%22creation_date%22%3A+%222020-05-10+16%3A16%3A23%22%2C+%22hot_cold_label%22%3A+%22Upper+Band+BB%22%2C+%22indicator_label%22%3A+%22Bollinger+4+hr%22%2C+%22price_value%22%3A+%7B%22open%22%3A+0.000989%2C+%22high%22%3A+0.000998%2C+%22low%22%3A+0.00098%2C+%22close%22%3A+0.000998%7D%2C+%22decimal_format%22%3A+%22%25.6f%22%7D%5D
i need to convert this data in python3 to standard json for post json api
any solution ?
thanks
That looks like it's been URL form encoded.
Try
import urllib.parse
import json
# note **without** the message= part
stuff = "%5B%7B%22values%22%3A+%7B%22momentum%22%3A+%220.00%22%7D%2C+%22exchange%22%3A+%22binance%22%2C+%22market%22%3A+%22BNT%2FETH%22%2C+%22base_currency%22%3A+%22BNT%22%2C+%22quote_currency%22%3A+%22ETH%22%2C+%22indicator%22%3A+%22momentum%22%2C+%22indicator_number%22%3A+0%2C+%22analysis%22%3A+%7B%22config%22%3A+%7B%22enabled%22%3A+true%2C+%22alert_enabled%22%3A+true%2C+%22alert_frequency%22%3A+%22once%22%2C+%22signal%22%3A+%5B%22momentum%22%5D%2C+%22hot%22%3A+0%2C+%22cold%22%3A+0%2C+%22candle_period%22%3A+%224h%22%2C+%22period_count%22%3A+10%7D%2C+%22status%22%3A+%22hot%22%7D%2C+%22status%22%3A+%22hot%22%2C+%22last_status%22%3A+%22hot%22%2C+%22prices%22%3A+%22+Open%3A+0.000989+High%3A+0.000998+Low%3A+0.000980+Close%3A+0.000998%22%2C+%22lrsi%22%3A+%22%22%2C+%22creation_date%22%3A+%222020-05-10+16%3A16%3A23%22%2C+%22hot_cold_label%22%3A+%22%22%2C+%22indicator_label%22%3A+%22%22%2C+%22price_value%22%3A+%7B%22open%22%3A+0.000989%2C+%22high%22%3A+0.000998%2C+%22low%22%3A+0.00098%2C+%22close%22%3A+0.000998%7D%2C+%22decimal_format%22%3A+%22%25.6f%22%7D%2C+%7B%22values%22%3A+%7B%22leading_span_a%22%3A+%220.00%22%2C+%22leading_span_b%22%3A+%220.00%22%7D%2C+%22exchange%22%3A+%22binance%22%2C+%22market%22%3A+%22BNT%2FETH%22%2C+%22base_currency%22%3A+%22BNT%22%2C+%22quote_currency%22%3A+%22ETH%22%2C+%22indicator%22%3A+%22ichimoku%22%2C+%22indicator_number%22%3A+1%2C+%22analysis%22%3A+%7B%22config%22%3A+%7B%22enabled%22%3A+true%2C+%22alert_enabled%22%3A+true%2C+%22alert_frequency%22%3A+%22once%22%2C+%22signal%22%3A+%5B%22leading_span_a%22%2C+%22leading_span_b%22%5D%2C+%22hot%22%3A+true%2C+%22cold%22%3A+true%2C+%22candle_period%22%3A+%224h%22%2C+%22hot_label%22%3A+%22Bullish+Alert%22%2C+%22cold_label%22%3A+%22Bearish+Alert%22%2C+%22indicator_label%22%3A+%22ICHIMOKU+4+hr%22%2C+%22mute_cold%22%3A+false%7D%2C+%22status%22%3A+%22cold%22%7D%2C+%22status%22%3A+%22cold%22%2C+%22last_status%22%3A+%22cold%22%2C+%22prices%22%3A+%22+Open%3A+0.000989+High%3A+0.000998+Low%3A+0.000980+Close%3A+0.000998%22%2C+%22lrsi%22%3A+%22%22%2C+%22creation_date%22%3A+%222020-05-10+16%3A16%3A23%22%2C+%22hot_cold_label%22%3A+%22Bearish+Alert%22%2C+%22indicator_label%22%3A+%22ICHIMOKU+4+hr%22%2C+%22price_value%22%3A+%7B%22open%22%3A+0.000989%2C+%22high%22%3A+0.000998%2C+%22low%22%3A+0.00098%2C+%22close%22%3A+0.000998%7D%2C+%22decimal_format%22%3A+%22%25.6f%22%7D%2C+%7B%22values%22%3A+%7B%22bbp%22%3A+%220.96%22%2C+%22mfi%22%3A+%2298.05%22%7D%2C+%22exchange%22%3A+%22binance%22%2C+%22market%22%3A+%22BNT%2FETH%22%2C+%22base_currency%22%3A+%22BNT%22%2C+%22quote_currency%22%3A+%22ETH%22%2C+%22indicator%22%3A+%22bbp%22%2C+%22indicator_number%22%3A+1%2C+%22analysis%22%3A+%7B%22config%22%3A+%7B%22enabled%22%3A+true%2C+%22alert_enabled%22%3A+true%2C+%22alert_frequency%22%3A+%22once%22%2C+%22candle_period%22%3A+%224h%22%2C+%22period_count%22%3A+20%2C+%22hot%22%3A+0.09%2C+%22cold%22%3A+0.8%2C+%22std_dev%22%3A+2%2C+%22signal%22%3A+%5B%22bbp%22%2C+%22mfi%22%5D%2C+%22hot_label%22%3A+%22Lower+Band%22%2C+%22cold_label%22%3A+%22Upper+Band+BB%22%2C+%22indicator_label%22%3A+%22Bollinger+4+hr%22%2C+%22mute_cold%22%3A+false%7D%2C+%22status%22%3A+%22cold%22%7D%2C+%22status%22%3A+%22cold%22%2C+%22last_status%22%3A+%22cold%22%2C+%22prices%22%3A+%22+Open%3A+0.000989+High%3A+0.000998+Low%3A+0.000980+Close%3A+0.000998%22%2C+%22lrsi%22%3A+%22%22%2C+%22creation_date%22%3A+%222020-05-10+16%3A16%3A23%22%2C+%22hot_cold_label%22%3A+%22Upper+Band+BB%22%2C+%22indicator_label%22%3A+%22Bollinger+4+hr%22%2C+%22price_value%22%3A+%7B%22open%22%3A+0.000989%2C+%22high%22%3A+0.000998%2C+%22low%22%3A+0.00098%2C+%22close%22%3A+0.000998%7D%2C+%22decimal_format%22%3A+%22%25.6f%22%7D%5D"
parsed = urllib.parse.unquote_plus(stuff) # <<< encoded form, get rid of +
as_json = json.loads(parsed)
print(as_json)
gives me
[{'values': {'momentum': '0.00'}, 'exchange': 'binance', 'market': 'BNT/ETH', 'base_currency': 'BNT', 'quote_currency': 'ETH', 'indicator': 'momentum', 'indicator_number': 0, 'analysis': {'config': {'enabled': True, 'alert_enabled': True, 'alert_frequency': 'once', 'signal': ['momentum'], 'hot': 0, 'cold': 0, 'candle_period': '4h', 'period_count': 10}, 'status': 'hot'}, 'status': 'hot', 'last_status': 'hot', 'prices': ' Open: 0.000989 High: 0.000998 Low: 0.000980 Close: 0.000998', 'lrsi': '', 'creation_date': '2020-05-10 16:16:23', 'hot_cold_label': '', 'indicator_label': '', 'price_value': {'open': 0.000989, 'high': 0.000998, 'low': 0.00098, 'close': 0.000998}, 'decimal_format': '%.6f'}, {'values': {'leading_span_a': '0.00', 'leading_span_b': '0.00'}, 'exchange': 'binance', 'market': 'BNT/ETH', 'base_currency': 'BNT', 'quote_currency': 'ETH', 'indicator': 'ichimoku', 'indicator_number': 1, 'analysis': {'config': {'enabled': True, 'alert_enabled': True, 'alert_frequency': 'once', 'signal': ['leading_span_a', 'leading_span_b'], 'hot': True, 'cold': True, 'candle_period': '4h', 'hot_label': 'Bullish Alert', 'cold_label': 'Bearish Alert', 'indicator_label': 'ICHIMOKU 4 hr', 'mute_cold': False}, 'status': 'cold'}, 'status': 'cold', 'last_status': 'cold', 'prices': ' Open: 0.000989 High: 0.000998 Low: 0.000980 Close: 0.000998', 'lrsi': '', 'creation_date': '2020-05-10 16:16:23', 'hot_cold_label': 'Bearish Alert', 'indicator_label': 'ICHIMOKU 4 hr', 'price_value': {'open': 0.000989, 'high': 0.000998, 'low': 0.00098, 'close': 0.000998}, 'decimal_format': '%.6f'}, {'values': {'bbp': '0.96', 'mfi': '98.05'}, 'exchange': 'binance', 'market': 'BNT/ETH', 'base_currency': 'BNT', 'quote_currency': 'ETH', 'indicator': 'bbp', 'indicator_number': 1, 'analysis': {'config': {'enabled': True, 'alert_enabled': True, 'alert_frequency': 'once', 'candle_period': '4h', 'period_count': 20, 'hot': 0.09, 'cold': 0.8, 'std_dev': 2, 'signal': ['bbp', 'mfi'], 'hot_label': 'Lower Band', 'cold_label': 'Upper Band BB', 'indicator_label': 'Bollinger 4 hr', 'mute_cold': False}, 'status': 'cold'}, 'status': 'cold', 'last_status': 'cold', 'prices': ' Open: 0.000989 High: 0.000998 Low: 0.000980 Close: 0.000998', 'lrsi': '', 'creation_date': '2020-05-10 16:16:23', 'hot_cold_label': 'Upper Band BB', 'indicator_label': 'Bollinger 4 hr', 'price_value': {'open': 0.000989, 'high': 0.000998, 'low': 0.00098, 'close': 0.000998}, 'decimal_format': '%.6f'}]
Whereas if you want a JSON string to POST somewhere, call as_string = json.dumps(parsed)

How do I remove an item in my dictionary?

I am trying to remove an item ("logs") from my dictionary using the del method.
this is my code:
del response.json() ["logs"]
print(response.json())
this is my JSON dictionary:
{'count': 19804,
'next': {'limit': 1, 'offset': 1},
'previous': None,
'results':
[{'id': '334455',
'custom_id': '112',
'company': 28,
'company_name': 'Sunshine and Flowers',
'delivery_address': '34 olive beach house, #01-22, 612345',
'delivery_timeslot': {'lower': '2019-12-06T10:00:00Z', 'upper': '2019-12-06T13:00:00Z', 'bounds': '[)'},
'sender_name': 'Edward Shine',
'sender_email': '',
'sender_contact': '91234567',
'removed': None,
'recipient_name': 'Mint Shine',
'recipient_contact': '91234567',
'notes': '',
'items': [{'id': 21668, 'name': 'Loose hair flowers', 'quantity': 1, 'metadata': {}, 'removed': None}, {'id': 21667, 'name': "Groom's Boutonniere", 'quantity': 1, 'metadata': {}, 'removed': None}, {'id': 21666, 'name': 'Bridal Bouquet', 'quantity': 1, 'metadata': {}, 'removed': None}],
'latitude': '1.28283838383642000000',
'longitude': '103.2828037266201000000',
'created': '2019-08-15T05:40:30.385467Z',
'updated': '2019-08-15T05:41:27.930110Z',
'status': 'pending',
'verbose_status': 'Pending',
'**logs**': [{'id': 334455, 'order': '50c402d8-7c76-45b5-b883-e2fb887a507e', 'order_custom_id': '112', 'order_delivery_address': '34 olive beach house, #01-22, 6123458', 'order_delivery_timeslot': {'lower': '2019-12-06T10:00:00Z', 'upper': '2019-12-06T13:00:00Z', 'bounds': '[)'}, 'message': 'Order was created.', 'failure_reason': None, 'success_code': None, 'success_description': None, 'created': '2019-08-15T05:40:30.431790Z', 'removed': None}, {'id': 334455, 'order': '50c402d8-7c76-45b5-b883-e2fb887a507e', 'order_custom_id': '112', 'order_delivery_address': '34 olive beach house, #01-22, 612345', 'order_delivery_timeslot': {'lower': '2019-12-06T10:00:00Z', 'upper': '2019-12-06T13:00:00Z', 'bounds': '[)'}, 'message': 'Order is pending.', 'failure_reason': None, 'success_code': None, 'success_description': None, 'created': '2019-08-15T05:40:30.433139Z', 'removed': None}],
'reschedule_requests': [],
'signature': None}]
but it is saying this error
KeyError: 'logs'
what am i doing wrong? please assist
Every time you call response.json(), it returns a new dict, so the key you delete from response.json() won't be reflected in the next call to response.json().
You should instead save the returning value of response.json() to a variable before deleting the desired key:
data = response.json()
del data['results'][0]['logs']
print(data)

Getting tags from python 3 dictionary object. AWS Boto3 Python 3

I have a dictionary object that is being returned to me from AWS. I need to pull the tag "based_on_ami" out of this dictionary. I have tried converting to a list, but I am new to programming and have not been able to figure out how to access Tags since they are a few levels down in the dictionary.
What is the best way for me to pull that tag out of the dictionary and put it into a variable i can use?
{
'Images':[
{
'Architecture':'x86_64',
'CreationDate':'2017-11-27T14:41:30.000Z',
'ImageId':'ami-8e73e0f4',
'ImageLocation':'23452345234545/java8server_ubuntu16-2b71edd1-f95e-4ee5-8fd6-d8a46975fdb5',
'ImageType':'machine',
'Public':False,
'OwnerId':'23452345234545',
'State':'available',
'BlockDeviceMappings':[
{
'DeviceName':'/dev/sda1',
'Ebs':{
'Encrypted':False,
'DeleteOnTermination':True,
'SnapshotId':'snap-0c10e8f5ced5b5240',
'VolumeSize':8,
'VolumeType':'gp2'
}
},
{
'DeviceName':'/dev/sdb',
'VirtualName':'ephemeral0'
},
{
'DeviceName':'/dev/sdc',
'VirtualName':'ephemeral1'
}
],
'EnaSupport':True,
'Hypervisor':'xen',
'Name':'java8server_ubuntu16-2b71edd1-f95e-4ee5-8fd6-d8a46975fdb5',
'RootDeviceName':'/dev/sda1',
'RootDeviceType':'ebs',
'SriovNetSupport':'simple',
'Tags':[
{
'Key':'service',
'Value':'baseami'
},
{
'Key':'cloudservice',
'Value':'ami'
},
{
'Key':'Name',
'Value':'java8server_ubuntu16-2b71edd1-f95e-4ee5-8fd6-d8a46975fdb5'
},
{
'Key':'os',
'Value':'ubuntu 16.04 lts'
},
{
'Key':'based_on_ami',
'Value':'ami-aa2ea8d0'
}
],
'VirtualizationType':'hvm'
}
],
'ResponseMetadata':{
'RequestId':'2c376c75-c31f-4aba-a058-173f3b125a00',
'HTTPStatusCode':200,
'HTTPHeaders':{
'content-type':'text/xml;charset=UTF-8',
'transfer-encoding':'chunked',
'vary':'Accept-Encoding',
'date':'Fri, 01 Dec 2017 18:17:53 GMT',
'server':'AmazonEC2'
},
'RetryAttempts':0
}
}
The best way to approach this type of problem is to find the value you're looking for, and then work outwards until you find a solution. You need to look at what is at each of those levels.
So, what are you looking for? You're looking for the Value for based_on_ami's Key. So your final step is going to be:
if obj['Key'] == 'based_on_ami':
# do something with obj['Value'].
But how do you get there? Well, the object is inside of a list, so you'll need to iterate the list:
for tag in <some list>:
if tag['Key'] == 'based_on_ami':
# do something with tag['Value'].
What is that list? It's the list of tags:
for tag in image['Tags']:
if tag['Key'] == 'based_on_ami':
# do something with tag['Value'].
And where are those tags? In an image object that you find in a list:
for image in image_list:
for tag in image['Tags']:
if tag['Key'] == 'based_on_ami':
# do something with tag['Value'].
The image list is the value found at the Images key in your initial dict.
image_list = my_data['Images']
for image in image_list:
for tag in image['Tags']:
if tag['Key'] == 'based_on_ami':
# do something with tag['Value'].
And now you're collecting all of those values, so you'll need a list and you'll need to append to it:
result = []
image_list = my_data['Images']
for image in image_list:
for tag in image['Tags']:
if tag['Key'] == 'based_on_ami':
result.append(tag['Value'])
So, I took your example above, and added another based_on_ami node with the value quack:
{'ResponseMetadata': {'RequestId': '2c376c75-c31f-4aba-a058-173f3b125a00', 'RetryAttempts': 0, 'HTTPHeaders': {'vary': 'Accept-Encoding', 'transfer-encoding': 'chunked', 'server': 'AmazonEC2', 'content-type': 'text/xml;charset=UTF-8', 'date': 'Fri, 01 Dec 2017 18:17:53 GMT'}, 'HTTPStatusCode': 200}, 'Images': [{'Public': False, 'CreationDate': '2017-11-27T14:41:30.000Z', 'BlockDeviceMappings': [{'Ebs': {'SnapshotId': 'snap-0c10e8f5ced5b5240', 'VolumeSize': 8, 'Encrypted': False, 'VolumeType': 'gp2', 'DeleteOnTermination': True}, 'DeviceName': '/dev/sda1'}, {'VirtualName': 'ephemeral0', 'DeviceName': '/dev/sdb'}, {'VirtualName': 'ephemeral1', 'DeviceName': '/dev/sdc'}], 'OwnerId': '23452345234545', 'ImageLocation': '23452345234545/java8server_ubuntu16-2b71edd1-f95e-4ee5-8fd6-d8a46975fdb5', 'RootDeviceName': '/dev/sda1', 'ImageType': 'machine', 'Hypervisor': 'xen', 'RootDeviceType': 'ebs', 'State': 'available', 'Architecture': 'x86_64', 'Name': 'java8server_ubuntu16-2b71edd1-f95e-4ee5-8fd6-d8a46975fdb5', 'Tags': [{'Value': 'baseami', 'Key': 'service'}, {'Value': 'ami', 'Key': 'cloudservice'}, {'Value': 'java8server_ubuntu16-2b71edd1-f95e-4ee5-8fd6-d8a46975fdb5', 'Key': 'Name'}, {'Value': 'ubuntu 16.04 lts', 'Key': 'os'}, {'Value': 'ami-aa2ea8d0', 'Key': 'based_on_ami'}], 'EnaSupport': True, 'SriovNetSupport': 'simple', 'ImageId': 'ami-8e73e0f4'}, {'Public': False, 'CreationDate': '2017-11-27T14:41:30.000Z', 'BlockDeviceMappings': [{'Ebs': {'SnapshotId': 'snap-0c10e8f5ced5b5240', 'VolumeSize': 8, 'Encrypted': False, 'VolumeType': 'gp2', 'DeleteOnTermination': True}, 'DeviceName': '/dev/sda1'}, {'VirtualName': 'ephemeral0', 'DeviceName': '/dev/sdb'}, {'VirtualName': 'ephemeral1', 'DeviceName': '/dev/sdc'}], 'VirtualizationType': 'hvm', 'OwnerId': '23452345234545', 'ImageLocation': '23452345234545/java8server_ubuntu16-2b71edd1-f95e-4ee5-8fd6-d8a46975fdb5', 'RootDeviceName': '/dev/sda1', 'ImageType': 'machine', 'Hypervisor': 'xen', 'RootDeviceType': 'ebs', 'State': 'available', 'Architecture': 'x86_64', 'Name': 'java8server_ubuntu16-2b71edd1-f95e-4ee5-8fd6-d8a46975fdb5', 'Tags': [{'Value': 'baseami', 'Key': 'service'}, {'Value': 'ami', 'Key': 'cloudservice'}, {'Value': 'java8server_ubuntu16-2b71edd1-f95e-4ee5-8fd6-d8a46975fdb5', 'Key': 'Name'}, {'Value': 'ubuntu 16.04 lts', 'Key': 'os'}, {'Value': 'quack', 'Key': 'based_on_ami'}], 'EnaSupport': True, 'SriovNetSupport': 'simple', 'ImageId': 'ami-8e73e0f4'}]}
My result:
['ami-aa2ea8d0', 'quack']
info = {...}
tags = []
for image in info['Images']:
for tag in image['Tags']:
if tag['Key'] == 'based_on_ami':
tags.append(tag['Value'])
print(tags)

Resources