How to call a loop under main fuction python - python-3.x

I am working on a script which i have to modify in order loop through the multiple resources within a functions.
Below are the items which we need to loop through to get the data from and, this is coming from Config_local
BASE_URL = "https://synergy.hpe.example.com/rest/"
RES_EXT = [ 'resource-alerts?count=500&start=1',
'resource-alerts?count=500&start=501'
'resource-alerts?count=500&start=1001,
'resource-alerts?count=500&start=1501'
]
While i am looping through above list under def main(): section and taking get_resource_alerts_response() under loop the data coming out of the loop getting over-written and thus returning only last loop data only.
Main Script:
import os
import shutil
import smtplib
from email.message import EmailMessage
import pandas as pd
pd.set_option('expand_frame_repr', True)
import requests
from Config_local import (
BASE_URL,
DST_DIR,
OUTFILE,
PASSWORD,
SRC_DIR,
TIMESTAMP_DST,
USERNAME,
SUBJECT,
FROM,
TO,
EMAIL_TEMPLATE,
SMTP_SERVER,
RES_EXT,
)
class FileMoveFailure(Exception):
pass
class SynergyRequestFailure(Exception):
pass
class SessionIdRetrievalFailure(SynergyRequestFailure):
pass
class ResourceAlertsRetrievalFailure(SynergyRequestFailure):
pass
def move_csv_files():
for csv_file in os.listdir(SRC_DIR):
if csv_file.endswith(".csv") and os.path.isfile(os.path.join(SRC_DIR, csv_file)):
try:
shutil.move(
os.path.join(f"{SRC_DIR}/{csv_file}"),
f"{DST_DIR}/{csv_file}-{TIMESTAMP_DST}.log"
)
except OSError as os_error:
raise FileMoveFailure(
f'Moving file {csv_file} has failed: {os_error}'
)
def get_session_id(session):
try:
response = session.post(
url=f"{BASE_URL}/login-sessions",
headers={
"accept": "application/json",
"content-type": "application/json",
"x-api-version": "120",
},
json={
"userName": USERNAME,
"password": PASSWORD
},
verify=False
)
except requests.exceptions.RequestException as req_exception:
# you should also get this logged somewhere, or at least
# printed depending on your use case
raise SessionIdRetrievalFailure(
f"Could not get session id: {req_exception}"
)
json_response = response.json()
if not json_response.get("sessionID"):
# always assume the worse and do sanity checks & validations
# on fetched data
raise KeyError("Could not fetch session id")
return json_response["sessionID"]
#def get_all_text(session, session_id):
# all_text = ''
# for res in RES_EXT:
# url= f"{BASE_URL}{res}"
# newresult = get_resource_alerts_response(session, session_id, url)
# all_text += newresult
# print(f"{all_text}")
# return str(all_text)
#
def get_resource_alerts_response(session, session_id, res):
try:
return session.get(
url=f"{BASE_URL}{res}",
headers={
"accept": "application/json",
"content-type": "text/csv",
"x-api-version": "2",
"auth": session_id,
},
verify=False,
stream=True
)
except requests.exceptions.RequestException as req_exception:
# you should also get this logged somewhere, or at least
# printed depending on your use case
raise ResourceAlertsRetrievalFailure(
f"Could not fetch resource alerts: {req_exception}"
)
def resource_alerts_to_df(resource_alerts_response):
with open(OUTFILE, 'wb') as f:
for chunk in resource_alerts_response.iter_content(chunk_size=1024*36):
f.write(chunk)
return pd.read_csv(OUTFILE)
def send_email(df):
server = smtplib.SMTP(SMTP_SERVER)
msg = EmailMessage()
msg['Subject'], msg['From'], msg['To'] = SUBJECT, FROM, TO
msg.set_content("Text version of your html template")
msg.add_alternative(
EMAIL_TEMPLATE.format(df.to_html(index=False)),
subtype='html'
)
server.send_message(msg)
def main():
move_csv_files()
session = requests.Session()
session_id = get_session_id(session)
for res in RES_EXT:
resource_alerts_response = get_resource_alerts_response(session,
session_id, res)
print(resource_alerts_response)
df = resource_alerts_to_df(resource_alerts_response)
print(df)
send_email(df)
if __name__ == '__main__':
main()
any help or hint will be much appreciated.

This is a copy of the code which I recall we had over SO but not what you want now, However, as the whole code body is okay and the idea of for loop is also looks good, you Just need to tweek it to meet the requirement.
1- You need to create and empty DataFrame assignment Synergy_Data = pd.DataFrame()
2- then you can append the data you received from for loop ie resource_alerts_response which becomes df = resource_alerts_to_df(resource_alerts_response)
3- lastly, you can append this df to the empty Synergy_Data and then call that under your if __name__ == '__main__' to send an e-mail. Also don't forget to declare Synergy_Data as a global variable.
Synergy_Data = pd.DataFrame()
def main():
global Synergy_Data
move_csv_files()
session = requests.Session()
session_id = get_session_id(session)
for res in RES_EXT:
resource_alerts_response = get_resource_alerts_response(session,
session_id, res)
df = resource_alerts_to_df(resource_alerts_response)
Synergy_Data = Synergy_Data.append(df)
if __name__ == '__main__':
main()
send_email(Synergy_Data)
Hope this will be helpful.

Only the last response is being used because this value is overwritten in resource_alerts_response on each iteration of the loop. You may consider acting on the data on each iteration or storing it for use later i.e. after the loop. I've included these options with modifications to the main() function below.
Option 1
Send an email for each resource alert response
def main():
move_csv_files()
session = requests.Session()
session_id = get_session_id(session)
for res in RES_EXT:
resource_alerts_response = get_resource_alerts_response(session,
session_id, res)
print(resource_alerts_response)
# Indent lines below so that the operations below are executed in each loop iteration
df = resource_alerts_to_df(resource_alerts_response)
print(df)
send_email(df)
Option 2
Merge all resource alert responses and send one email
def main():
move_csv_files()
session = requests.Session()
session_id = get_session_id(session)
df_resource_alerts_responses = None
for res in RES_EXT:
resource_alerts_response = get_resource_alerts_response(session,
session_id, res)
print(resource_alerts_response)
df = resource_alerts_to_df(resource_alerts_response)
if df_resource_alerts_responses is None:
df_resource_alerts_responses = df
else:
df_resource_alerts_responses = df_resource_alerts_responses.append(df, ignore_index=True)
print(df_resource_alerts_responses)
if df_resource_alerts_responses is not None:
send_email(df_resource_alerts_responses)

Related

bithumb Api error -{'status': '5100', 'message': 'Bad Request.(Auth Data)'}

I wanted to make a program where I can check the order details of my order at Bithumb Exchange.
So I looked at the docs(https://api.bithumb.com/info/orders) and made it, but the same error code kept coming out, so I don't know what to do.
import time
import math
import base64
import hmac, hashlib
import urllib.parse
import requests
class XCoinAPI:
api_url = "https://api.bithumb.com";
api_key = "";
api_secret = "";
def __init__(self, api_key, api_secret):
self.api_key = api_key;
self.api_secret = api_secret;
def body_callback(self, buf):
self.contents = buf;
def microtime(self, get_as_float = False):
if get_as_float:
return time.time()
else:
return '%f %d' % math.modf(time.time())
def usecTime(self) :
mt = self.microtime(False)
mt_array = mt.split(" ")[:2];
return mt_array[1] + mt_array[0][2:5];
def xcoinApiCall(self, endpoint, rgParams):
# 1. Api-Sign and Api-Nonce information generation.
# 2. Request related information from the Bithumb API server.
#
# - nonce: it is an arbitrary number that may only be used once.
# - api_sign: API signature information created in various combinations values.
endpoint_item_array = {
"endpoint" : endpoint
}
uri_array = dict(endpoint_item_array, **rgParams) # Concatenate the two arrays.
str_data = urllib.parse.urlencode(uri_array)
nonce = self.usecTime()
data = endpoint + chr(1) + str_data + chr(1) + nonce
utf8_data = data.encode('utf-8')
key = self.api_secret
utf8_key = key.encode('utf-8')
h = hmac.new(bytes(utf8_key), utf8_data, hashlib.sha512)
hex_output = h.hexdigest()
utf8_hex_output = hex_output.encode('utf-8')
api_sign = base64.b64encode(utf8_hex_output)
utf8_api_sign = api_sign.decode('utf-8')
headers = {
"Accept": "application/json",
"Content-Type": "application/x-www-form-urlencoded",
"Api-Key": self.api_key,
"Api-Nonce": nonce,
"Api-Sign": utf8_api_sign
}
url = self.api_url + endpoint
r = requests.post(url, headers=headers, data=uri_array)
return r.json()
a = XCoinAPI(api_key="MYKEY1c", api_secret="MYKEY2")
aa= a.xcoinApiCall("/info/orders",{"order_currency":"LN","payment_currency":"BTC"})
print(aa)
{'status': '5100', 'message': 'Bad Request.(Auth Data)'}
Process finished with exit code 0
The error code 5100, bad request keeps coming up(https://apidocs.bithumb.com/docs/api-%EC%A3%BC%EC%9A%94-%EC%97%90%EB%9F%AC-%EC%BD%94%EB%93%9C)
I really don't know which code to modify.
I think it's a parameter problem with XcoinApiCall, but I don't know.

count successful and unsuccessful post requests for asynchronous post call/request

I need help in implementing the logic to count number of successful post calls which are asynchronous in nature (status_code=200) as well as failed_calls (status_code != 200)
I am new to coroutines. Would appreciate if someone can suggest a better way of making a post asynchronous call which can be retried, polled for status, and that can emit metrics for successful post requests as well.
Following is my code:
asyncio.get_event_loop().run_in_executor(
None,
self.publish_actual,
event_name,
custom_payload,
event_message_params,
)
which calls publish_actual:
def publish_actual(
self,
event_name: str,
custom_payload={},
event_message_params=[],
):
"""Submits a post request using the request library
:param event_name: name of the event
:type event_name: str
:param key: key for a particular application
:param custom_payload: custom_payload, defaults to {}
:type custom_payload: dict, optional
:param event_message_params: event_message_params, defaults to []
:type event_message_params: list, optional
"""
json_data = {}
path = f"/some/path"
self.request(path, "POST", json=json_data)
which calls following request function
def request(self, api_path, method="GET", **kwargs):
try:
self._validate_configuration()
headers = {}
api_endpoint = self.service_uri.to_url(api_path)
logger.debug(api_endpoint)
if "headers" in kwargs and kwargs["headers"]:
headers.update(kwargs["headers"])
headers = {"Content-Type": "application/json"}
begin = datetime.now()
def build_success_metrics(response, *args, **kwargs):
tags = {
"name": "success_metrics",
"domain": api_endpoint,
"status_code": 200,
}
build_metrics(tags)
def check_for_errors(response, *args, **kwargs):
response.raise_for_status()
response = self.session.request(
method=method,
url=api_endpoint,
headers=headers,
timeout=self.timeout,
hooks={"response": [build_success_metrics, check_for_errors]},
**kwargs,
)
end = datetime.now()
logger.debug(
f"'{method}' request against endpoint '{api_endpoint}' took {round((end - begin).total_seconds() * 1000, 3)} ms"
)
logger.debug(f"response: {response}")
except RequestException as e:
tags = {
"name": "error_metrics",
"domain": api_endpoint,
"exception_class": e.__class__.__name__,
}
build_metrics(tags)
return f"Exception occured: {e}"
Let me know if anything else is required from my end to explain what exactly I have done and what I am trying to achieve.
There is not much await and async in your example so I've just addressed the counting part of your question in general terms in asyncio. asyncio.Queue is good for this because you can separate out the counting from the cause quite simply.
import asyncio
import aiohttp
class Count():
def __init__(self, queue: asyncio.Queue):
self.queue = queue
self.good = 0
self.bad = 0
async def count(self):
while True:
result = await self.queue.get()
if result == 'Exit':
return
if result == 200:
self.good += 1
else:
self.bad += 1
async def request(q: asyncio.Queue):
async with aiohttp.ClientSession() as session:
for _ in range(5): # just poll 30 times in this instance
await asyncio.sleep(0.1)
async with session.get(
'https://httpbin.org/status/200%2C500', ssl=False
) as response:
q.put_nowait(response.status)
q.put_nowait('Exit')
async def main():
q = asyncio.Queue()
cnt = Count(q)
tasks = [cnt.count(), request(q)]
await asyncio.gather(*[asyncio.create_task(t) for t in tasks])
print(cnt.good, cnt.bad)
if __name__ == "__main__":
asyncio.run(main())
Output is random given httpbin response. Should add to 5.
4 1

How to handle method 'post' in google cloud function?

I'm trying to create a cloud function which goes to the certain google sheet and collects necessary data, after that, it connects with bigquery database and writes down data to a bigquery table. When I run apps script, which trigers cloud function, I get next message - Error: could not handle the request
The code from the cloud function (main.py)
from __future__ import print_function
import json
import os.path
import pickle
import functions_framework
import requests
from google_auth_oauthlib.flow import InstalledAppFlow
from googleapiclient.discovery import build
from google.auth.transport.requests import Request
from google.cloud import bigquery
class GoogleSheetService:
# The settings of the particular google table
SCOPES = ['https://www.googleapis.com/auth/spreadsheets']
TOKEN_PICKLE = 'settings/token.pickle'
service = None
# The settings for the bigquery service
credentials_path = 'settings/pythonbq.privateKey.json'
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = credentials_path
client = bigquery.Client()
table_id = 'cobalt-alliance-365419.BTC_Daily.table-btc'
DATA = []
def __init__(self):
creds = None
if os.path.exists(self.TOKEN_PICKLE):
with open(self.TOKEN_PICKLE, 'rb') as token:
creds = pickle.load(token)
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'settings/credentials.json', self.SCOPES)
creds = flow.run_local_server(port=0)
with open(self.TOKEN_PICKLE, 'wb') as token:
pickle.dump(creds, token)
self.service = build('sheets', 'v4', credentials=creds)
def get_data(self, spreadsheet_id, range_name):
sheet = self.service.spreadsheets()
result = sheet.values().get(spreadsheetId=spreadsheet_id, range=range_name).execute()
self.DATA = result.get('values', [])
def get_row(self, data_of_column):
r = []
for row in self.DATA:
if data_of_column == 'date':
r.append(row[1].replace('0:00:00', '').rstrip())
if data_of_column == 'symbol':
r.append(row[2])
if data_of_column == 'volume_BTC':
r.append(float(row[4]))
if data_of_column == 'volume_USD':
r.append(float(row[5]))
return r
def sample_data(self, row1=None, row2=None, row3=None, row4=None):
return {u'date': f'{row1}', u'symbol': f'{row2}', u'volume_BTC': f'{row3}', u'volume_USD': f'{row4}'}
def write_data(self):
rows_array = []
number_of_rows = len(self.DATA)
for i in range(number_of_rows):
rows_array.append(self.sample_data(self.get_row('date')[i], self.get_row('symbol')[i],
self.get_row('volume_BTC')[i], self.get_row('volume_USD')[i]))
return rows_array
def write_to_db(self):
rows_to_insert = self.write_data()
if not rows_to_insert:
return 'Data is empty'
errors = self.client.insert_rows_json(self.table_id, rows_to_insert)
if not errors:
return f'New rows have been added.'
else:
return f'Encountered errors while inserting rows: {errors}'
#functions_framework.http
def main(request):
gs = GoogleSheetService()
if requests.method == "GET":
gs.get_data('164RTnYK49DvV2Ion45JHMCFQa8S', 'A2:F100')
data_json = json.dumps(gs.DATA)
data = {'data_json': data_json}
return requests.get(data=data)
elif requests.method == "POST":
gs.get_data('164RTnYK49DvV2Ion45JHMCFQa8S', 'A2:F100')
gs.write_to_db()
Apps script
function callCloudRun() {
const token = ScriptApp.getIdentityToken();var options = {
'method' : 'post',
'headers': {'Authorization': 'Bearer ' + token},
};
options = {muteHttpExceptions: true};
var response = UrlFetchApp.fetch(CLOUD_RUN_URL, options);
Logger.log(response.getContentText());
}

How to post group of requests to 2 urls with aiohttp

I have 2 URLs and 60k+ requests. Basically, I need to post every request to both URLs, then compare their responses, but not to wait for the response to post another request.
I've tried to do it with aiohttp and asyncio
import asyncio
import time
import aiohttp
import os
from aiofile import AIOFile
testURL = ""
prodURL = ""
directoryWithRequests = ''
directoryToWrite = ''
headers = {'content-type': 'application/soap+xml'}
i = 1
async def fetch(session, url, reqeust):
global i
async with session.post(url=url, data=reqeust.encode('utf-8'), headers=headers) as response:
if response.status != 200:
async with AIOFile(directoryToWrite + str(i) + '.xml', 'w') as afp:
await afp.write(reqeust)
i += 1
return await response.text()
async def fetch_all(session, urls, request):
results = await asyncio.gather(*[asyncio.create_task(fetch(session, url, request)) for url in urls])
return results
async def asynchronousRequests(requestBody):
urls = [testURL, prodURL]
global i
with open(requestBody) as my_file:
body = my_file.read()
async with aiohttp.ClientSession() as session:
htmls = await fetch_all(session, urls, body)
# some conditions
async def asynchronous():
try:
start = time.time()
futures = [asynchronousRequests(directoryWithRequests + i) for i in os.listdir(directoryWithRequests)]
for future in asyncio.as_completed(futures):
result = await future
print("Process took: {:.2f} seconds".format(time.time() - start))
except Exception as e:
print(str(e))
if __name__ == '__main__':
try:
# AsyncronTest
ioloop = asyncio.ProactorEventLoop()
ioloop.run_until_complete(asynchronous())
ioloop.close()
if i == 1:
print('Regress is OK')
else:
print('Number of requests to check = {}'.format(i))
except Exception as e:
print(e)
I believe that the code above works, but it creates N futures, where the N equals to the number of request files. This brings to sort of ddos because the server can't response to that number of requests at the same time.
Found suitable solution. Basically it's just 2 async tasks:
tasks = [
postRequest(testURL, client, body),
postRequest(prodURL, client, body)
]
await asyncio.wait(tasks)
It's not the same performance as the code in the question with afortable number of requests, but as least it doesn't ddos the server that much.

Python Telegram API ReplyKeyboardMarkup

I am new to python. I am trying to send a response showing a custom keyboard with two buttons to the user in Telegram app but it gives me an error about encode.
I would like to know where is my mistake.
import json
from urllib.request import urlopen
from urllib.parse import quote, unquote
import time
def Decode(telegram_response):
decoded=''
for line in telegram_response:
decoded=decoded+line.decode('utf-8')
return decoded
TOKEN = 'mytoken'
URL = 'https://api.telegram.org/bot{}/'.format(TOKEN)
cmd = 'getme'
telegram_response = urlopen(URL + cmd)
decoded = Decode(telegram_response)
gtm = json.loads(decoded)
status = True
while status:
cmd = 'getUpdates'
telegram_response = urlopen(URL + cmd)
decoded = Decode(telegram_response)
upds = json.loads(decoded)
new_message = len(upds['result'])
if new_message !=0:
msg = upds['result'][0]['message']
chat_id = str(msg['chat']['id'])
reply_markup = {'keyboard': [[{'text':'first button'}],[{'text':'second button'}]], 'resize_keyboard': True, 'one_time_keyboard': True}
reply_markup = json.dumps(reply_markup)
params = ({'chat_id': chat_id, 'reply_markup': reply_markup, 'disable_web_page_preview': 'true'})
myresponse =urlopen(URL + 'sendMessage' + quote((params).encode('utf-8')))
Easy way to build powerful bots is to use python-telegram-bot library.
I re-writed your code with few major fixes and features. Hope it help you to learn bots more deeper.
my version of bot:
###############################################################################
!#/usr/bin/python3
from sys import exc_info as error
from urllib.request import urlopen
from urllib.parse import urlencode
import json
TOKEN = 'XXXXXXXXXXXXXXXXXXXXXXXXXXX'
URL = 'https://api.telegram.org/bot{}'.format(TOKEN)
STATUS = True
OFFSET = 0
def call_api_method(method='getMe', data=None):
# Call API method with data.
data = urlencode(data).encode("utf-8") if data else data
response = urlopen('{}/{}'.format(URL, method), data)
return json.loads(response.read())
def get_me():
# Get bot info.
bot = call_api_method()
return type('Bot', (), dict(bot['result']))
def get_updates():
# Get new updates from Telegram.
data = {'offset': OFFSET, 'limit': 0, 'timeout': 0}
return type('Updates', (), call_api_method('getUpdates', data))
def handle(update):
# Make usefull objects.
message = type('Message', (object,), dict(update['message']))
user = type('User', (), dict(update['message']['from']))
chat = type('Chat', (), dict(update['message']['chat']))
return message, user, chat
def send_message(chat_id, message):
# Send message to specific chat.
data = {'text': message,
'chat_id': chat_id,
'parse_mode': 'Markdown',
'disable_web_page_preview': True}
call_api_method('sendMessage', data)
def send_keyboard(chat_id, message, keyboard):
# Send message and keyboard to specific chat.
data = {'text': message,
'chat_id': chat_id,
'parse_mode': 'Markdown',
'reply_markup': reply_markup(keyboard),
'disable_web_page_preview': 'true'}
call_api_method('sendMessage', data)
def reply_markup(keyboard):
# Serialize keyboard data to JSON.
return json.dumps({'keyboard': keyboard,
'resize_keyboard': True,
'one_time_keyboard': True,
'selective': True})
def main_keyboard():
# Main menu.
return [first_button(), second_button()]
def one_line_keyboard():
# Menu with buttons in one line.
return [two_buttons()]
def first_button():
# Single keyboard button.
return ['first button']
def second_button():
# Single keyboard button.
return ['second button']
def two_buttons():
# Two buttons on one line.
return ['left button', 'right button']
while STATUS:
# Get updates forever. Except if get Error.
try:
if not OFFSET:
OFFSET = 1
# Print bot info on the start.
bot = get_me()
print('Bot #{} is running...'.format(bot.username))
updates = get_updates()
for update in updates.result:
# Handle last update.
OFFSET = update['update_id'] + 1
message, user, chat = handle(update)
# Greeting user by full name.
greeting = 'Hello, {} {}!'.format(user.first_name, user.last_name)
#send_message(chat.id, greeting)
send_keyboard(chat.id, greeting, one_line_keyboard())
except:
STATUS = False
print('\nERROR:\t', error()[1])
###############################################################################
you can this code: I hope useful for you .
i change :
params = ({'chat_id': chat_id, 'reply_markup': reply_markup, 'disable_web_page_preview': 'true'})
myresponse =urlopen(URL + 'sendMessage' + quote((params).encode('utf-8')))
with:
params = ({'text': 'ss', 'chat_id': chat_id, 'reply_markup': reply_markup, 'disable_web_page_preview': 'true'})
data = urllib.parse.urlencode(params).encode("utf-8")
myresponse = urlopen(URL + 'sendMessage', data)
complate code :
import json
import urllib
from urllib.parse import quote
from urllib.request import urlopen
def Decode(telegram_response):
decoded = ''
for line in telegram_response:
decoded = decoded + line.decode('utf-8')
return decoded
TOKEN = 'XXXXXXXXXXXXXXXXXXXXXXXXXXX'
URL = 'https://api.telegram.org/bot{}/'.format(TOKEN)
cmd = 'getme'
telegram_response = urlopen(URL + cmd)
decoded = Decode(telegram_response)
gtm = json.loads(decoded)
status = True
while status:
cmd = 'getUpdates'
telegram_response = urlopen(URL + cmd)
decoded = Decode(telegram_response)
upds = json.loads(decoded)
new_message = len(upds['result'])
if new_message != 0:
msg = upds['result'][0]['message']
chat_id = str(msg['chat']['id'])
reply_markup = {'keyboard': [[{'text': 'first button'}], [{'text': 'second button'}]], 'resize_keyboard': True,
'one_time_keyboard': True}
reply_markup = json.dumps(reply_markup)
params = ({'text': 'ss', 'chat_id': chat_id, 'reply_markup': reply_markup, 'disable_web_page_preview': 'true'})
data = urllib.parse.urlencode(params).encode("utf-8")
myresponse = urlopen(URL + 'sendMessage', data)

Resources