Getting Quotes from eTrade API in Python3 - python-3.x

I'm trying to get quotes from the etrade API. I'm able to list accounts, get transactions, but not get quotes. I've tried removing the accounts and transactions api calls but it makes no difference. I get an "oauth_problem=signature_invalid" response. Any ideas what I need to do differently?
from rauth import OAuth1Service
import webbrowser
import hmac
# required for google sheets
# from __future__ import print_function
import pickle
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
class ETradeManager():
def __init__(self):
session = None
service = None
def connect_to_etrade(self):
self.service = OAuth1Service(
name='etrade',
consumer_key='',
consumer_secret='',
request_token_url='https://apisb.etrade.com/oauth/request_token',
access_token_url='https://apisb.etrade.com/oauth/access_token',
authorize_url='https://us.etrade.com/e/t/etws/authorize?key={}&token={}',
base_url='https://etsw.etrade.com')
oauth_token, oauth_token_secret = self.service.get_request_token(params=
{'oauth_callback': 'oob',
'format': 'json'})
auth_url = self.service.authorize_url.format(self.service.consumer_key, oauth_token)
webbrowser.open(auth_url)
verifier = input('Please input the verifier: ')
print("Attempting to get session")
self.session = self.service.get_auth_session(oauth_token, oauth_token_secret, params={'oauth_verifier': verifier})
url = 'https://apisb.etrade.com/v1/accounts/list'
resp = self.session.get(url, params={'format': 'json'})
accountid = ""
print(resp.text)
trans_url_template = "https://apisb.etrade.com/v1/accounts/{}/transactions"
trans_url = trans_url_template.format(accountid)
resp = self.session.get(trans_url, params={'format': 'json'})
f = open("trans.xml", "w")
f.write(resp.text)
# key = self.service.consumer_secret + \
# '&' + \
# oauth_token_secret
# hashed = hmac.new(key.encode(), base_string.encode(), sha1)
# def get_quote(self):
quote_url_template = "https://apisb.etrade.com/v1/market/quote/{}"
quote_url = quote_url_template.format("TSLA")
resp = self.session.get(quote_url_template, params={'format': 'json'})
f = open("quote.xml", "w")
f.write(resp.text)
trade_manager = ETradeManager()
trade_manager.connect_to_etrade()
# trade_manager.get_quote()

Not sure if you figured this out but you had a typo here:
resp = self.session.get(quote_url_template, params={'format': 'json'})
Should be using quote_url, not quote_url_template

Related

Not able to read from Tweets from twitter

I am trying to read tweets having specific keywords using docker. I have taken reference from
Github link .
I have made some minor changes. While I'm trying to execute I am facing issues with a number of arguments through all the details in place. It would be great if anybody can guide me where I'm doing wrong
### twitter
import tweepy
from tweepy.auth import OAuthHandler
from tweepy import Stream
#from tweepy.streaming import StreamListener
import json
import logging
### logging
FORMAT = "%(asctime)s | %(name)s - %(levelname)s - %(message)s"
LOG_FILEPATH = "C:\\docker-kafka\\log\\testing.log"
logging.basicConfig(
filename=LOG_FILEPATH,
level=logging.INFO,
filemode='w',
format=FORMAT)
### Authenticate to Twitter
with open('C:\\docker-kafka\\credential.json','r') as f:
credential = json.load(f)
CONSUMER_KEY = credential['twitter_api_key']
CONSUMER_SECRET = credential['twitter_api_secret_key']
ACCESS_TOKEN = credential['twitter_access_token']
ACCESS_TOKEN_SECRET = credential['twitter_access_token_secret']
BEARER_TOKEN = credential['bearer_token']
#from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
from kafka import KafkaProducer
producer = KafkaProducer(bootstrap_servers='localhost:9092',
value_serializer=lambda v: v.encode('utf-8')) #Same port as your Kafka server
topic_name = "docker-twitter"
class twitterAuth():
"""SET UP TWITTER AUTHENTICATION"""
def authenticateTwitterApp(self):
auth = OAuthHandler(consumer_key=CONSUMER_KEY, consumer_secret=CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
return auth
class TwitterStreamer():
"""SET UP STREAMER"""
def __init__(self):
self.twitterAuth = twitterAuth()
def stream_tweets(self):
while True:
listener = ListenerTS()
auth = self.twitterAuth.authenticateTwitterApp()
stream = Stream(auth, listener)
stream.filter(track=["Starbucks"], stall_warnings=True, languages= ["en"])
class ListenerTS(tweepy.Stream):
def on_status(self, status):
tweet = json.dumps({
'id': status.id,
'text': status.text,
'created_at': status.created_at.strftime("%Y-%m-%d %H:%M:%S")
}, default=str)
producer.send(topic_name, tweet)
return True
if __name__ == "__main__":
TS = TwitterStreamer()
TS.stream_tweets()
Answer reference:
Not able to read from Tweets from twitter
As far as i understand, the class tweepy.Stream needs to be initialized, even if inheriting it. So, instead of:
try
class ListenerTS(tweepy.Stream):
def __init__(self):
tweepy.Stream.__init__(self, CONSUMER_KEY, CONSUMER_SECRET, ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
Also check this docs:
https://docs.tweepy.org/en/stable/streaming.html
And maybe this link:
https://improveandrepeat.com/2022/04/python-friday-117-streaming-search-results-with-tweepy/
I was able to resolve the issue by adding the secrets but got a different error.
def stream_tweets(self):
while True:
listener = ListenerTS(CONSUMER_KEY,CONSUMER_SECRET,ACCESS_TOKEN,ACCESS_TOKEN_SECRET)
listener.filter(track=["Starbucks"], stall_warnings=True, languages= ["en"])

How to handle method 'post' in google cloud function?

I'm trying to create a cloud function which goes to the certain google sheet and collects necessary data, after that, it connects with bigquery database and writes down data to a bigquery table. When I run apps script, which trigers cloud function, I get next message - Error: could not handle the request
The code from the cloud function (main.py)
from __future__ import print_function
import json
import os.path
import pickle
import functions_framework
import requests
from google_auth_oauthlib.flow import InstalledAppFlow
from googleapiclient.discovery import build
from google.auth.transport.requests import Request
from google.cloud import bigquery
class GoogleSheetService:
# The settings of the particular google table
SCOPES = ['https://www.googleapis.com/auth/spreadsheets']
TOKEN_PICKLE = 'settings/token.pickle'
service = None
# The settings for the bigquery service
credentials_path = 'settings/pythonbq.privateKey.json'
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = credentials_path
client = bigquery.Client()
table_id = 'cobalt-alliance-365419.BTC_Daily.table-btc'
DATA = []
def __init__(self):
creds = None
if os.path.exists(self.TOKEN_PICKLE):
with open(self.TOKEN_PICKLE, 'rb') as token:
creds = pickle.load(token)
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'settings/credentials.json', self.SCOPES)
creds = flow.run_local_server(port=0)
with open(self.TOKEN_PICKLE, 'wb') as token:
pickle.dump(creds, token)
self.service = build('sheets', 'v4', credentials=creds)
def get_data(self, spreadsheet_id, range_name):
sheet = self.service.spreadsheets()
result = sheet.values().get(spreadsheetId=spreadsheet_id, range=range_name).execute()
self.DATA = result.get('values', [])
def get_row(self, data_of_column):
r = []
for row in self.DATA:
if data_of_column == 'date':
r.append(row[1].replace('0:00:00', '').rstrip())
if data_of_column == 'symbol':
r.append(row[2])
if data_of_column == 'volume_BTC':
r.append(float(row[4]))
if data_of_column == 'volume_USD':
r.append(float(row[5]))
return r
def sample_data(self, row1=None, row2=None, row3=None, row4=None):
return {u'date': f'{row1}', u'symbol': f'{row2}', u'volume_BTC': f'{row3}', u'volume_USD': f'{row4}'}
def write_data(self):
rows_array = []
number_of_rows = len(self.DATA)
for i in range(number_of_rows):
rows_array.append(self.sample_data(self.get_row('date')[i], self.get_row('symbol')[i],
self.get_row('volume_BTC')[i], self.get_row('volume_USD')[i]))
return rows_array
def write_to_db(self):
rows_to_insert = self.write_data()
if not rows_to_insert:
return 'Data is empty'
errors = self.client.insert_rows_json(self.table_id, rows_to_insert)
if not errors:
return f'New rows have been added.'
else:
return f'Encountered errors while inserting rows: {errors}'
#functions_framework.http
def main(request):
gs = GoogleSheetService()
if requests.method == "GET":
gs.get_data('164RTnYK49DvV2Ion45JHMCFQa8S', 'A2:F100')
data_json = json.dumps(gs.DATA)
data = {'data_json': data_json}
return requests.get(data=data)
elif requests.method == "POST":
gs.get_data('164RTnYK49DvV2Ion45JHMCFQa8S', 'A2:F100')
gs.write_to_db()
Apps script
function callCloudRun() {
const token = ScriptApp.getIdentityToken();var options = {
'method' : 'post',
'headers': {'Authorization': 'Bearer ' + token},
};
options = {muteHttpExceptions: true};
var response = UrlFetchApp.fetch(CLOUD_RUN_URL, options);
Logger.log(response.getContentText());
}

Http message is not callable porting python2 to python3 download script

i'am trying to port to python 3 this script :
import re
from os.path import basename
import os
from urllib.parse import urlparse,urlsplit
from urllib.request import urlopen,Request
import urllib
def url2name(url):
return basename(urlsplit(url)[2])
def download(url, out_path="."):
localName = url2name(url)
req = Request(url)
r = urlopen(req)
if r.info().has_key('Content-Disposition'):
# If the response has Content-Disposition, we take file name from it
localName = r.info()['Content-Disposition'].split('filename=')[1]
if localName[0] == '"' or localName[0] == "'":
localName = localName[1:-1]
elif r.url != url:
# if we were redirected, the real file name we take from the final URL
localName = url2name(r.url)
localName = os.path.join(out_path, localName)
f = open(localName, 'wb')
f.write(r.read())
f.close()
but i have a :
'HTTPMessage' object is not callable
r.info() seems to have problems
how to get the header info in python 3 ?
Try with this, you should use context managers:
def download(url, out_path="."):
localName = url2name(url)
req = Request(url)
with urlopen(req) as f:
content_disp = f.getheader('Content-Disposition')
if content_disp:
# If the response has Content-Disposition, we take file name from it
localName = content_disp.split('filename=')[1]
if localName[0] == '"' or localName[0] == "'":
localName = localName[1:-1]
elif f.url != url:
# if we were redirected, the real file name we take from the final URL
localName = url2name(f.url)
localName = os.path.join(out_path, localName)
with open(localName, 'wb') as fp:
fp.write(f.read())

File upload using Flask

I am trying to implement a python API in order to upload a file on my server but for an unknown reason, it doesn't run.
From my understanding, the app.py is not recognised
Here is my API.py
from flask_cors import CORS
from flask_restful import Api, Resource, reqparse
import sqlite3
import uuid
import os
import csv
import urllib.request
import threading
import queue as Queue
import subprocess
import json
import re
import datetime
app = Flask(__name__)
api = Api(app)
CORS(app)
class upload(Resource):
def post(self):
ALLOWED_EXTENSIONS = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'])
def allowed_file(filename):
return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
def upload_file():
# check if the post request has the file part
if 'file' not in request.files:
resp = jsonify({'message' : 'No file part in the request'})
resp.status_code = 400
return resp
file = request.files['file']
int = str(request.form['int']) #true or false
if file.filename == '':
resp = jsonify({'message' : 'No file selected for uploading'})
resp.status_code = 400
return resp
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return json.dumps(data), 200
else:
resp = jsonify({'message' : 'Allowed file types are doc, etc.'})
resp.status_code = 400
return resp
api.add_resource(upload, "/api/v1/upload")
app.run(host='0.0.0.0', debug=True)
Here is my app.py
UPLOAD_FOLDER = '/home/xxxx/xxx/upload'
app = Flask(__name__)
#app.secret_key = "secret key"
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024
Can you please help? Any suggestions?
As per the comment, it looks like you have two sepearate applications here.
I would just stick with the first one API.py, but you'll need to move the lines where you set the config variables into API.py:
So after this line in API.py:
app = Flask(__name__)
Immediately set the config values:
UPLOAD_FOLDER = '/home/xxxx/xxx/upload'
#app.secret_key = "something super secret"
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024
Then execute with:
python API.py

twitter feed sentiment analysis

I am doing Twitter feed sentiment analysis based on a keyword but I keep getting Error 406.
Mostly the error is coming in sentiment_Analysis function because I checked removing that and I get no error.
Please find my code below:
from __future__ import absolute_import, print_function
from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
import urllib.request
import urllib.parse
import urllib.error
consumer_key='qr****************T1' #consumer key
consumer_secret='ZbkO**********************U9Y' #consumer secret
access_token='989***************************2kd' #access token
access_token_secret='Fv****************************qY7' #access secret
#For sentiment Analysis we will be using Sentigem API key
akhil_auth= "297**********************************hB"
def sentiment_Analysis(text):
encoded_text=urllib.parse.quote(text)
#API_Call= "https://api.sentigem.com/external/get-sentiment?api-key="+akhil_auth+"&text="+text
# output=urllib.urlopen(API_Call).read
output = urllib.request.urlopen(API_Call)
return output
class StdOutListener(StreamListener):
def on_data(self,data):
tweet= data.split(',"text":"')[1].split('","source')[0]
sentimentRating=sentiment_Analysis(tweet)
saveTweets= tweet+'::'+sentimentRating+'\n'
output=open('output.csv','a')
output.write(saveTweets)
output.close()
print("Conversion successful.")
return True
def on_error(self, status):
print (status)
if __name__ == '__main__':
l = StdOutListener()
auth = OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
stream = Stream(auth, l)
#topic = input("Read tweets cotaining: ")
stream.filter("Burger King")

Resources