How to run python script with the same port - python-3.x

Right now I am setting up oauth2 from Gmail to send mail from my python script.
I am using a quick start code from Google to verify the authorize code but I am facing a situation where the port always changes when I am running the python script.
from __future__ import print_function
import pickle
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
# If modifying these scopes, delete the file token.pickle.
SCOPES = ['https://www.googleapis.com/auth/gmail.readonly']
def main():
"""Shows basic usage of the Gmail API.
Lists the user's Gmail labels.
"""
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'../credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
service = build('gmail', 'v1', credentials=creds)
# Call the Gmail API
results = service.users().labels().list(userId='me').execute()
labels = results.get('labels', [])
if not labels:
print('No labels found.')
else:
print('Labels:')
for label in labels:
print(label['name'])
if __name__ == '__main__':
main()
My problem with this code is that whenever I run the script, it always changes the port and I cannot set the redirected URI in google console.
My question is that how can I set the running port on python script?

For example, how about the following modification?
From:
creds = flow.run_local_server(port=0)
To:
creds = flow.run_local_server()
In this case, the port 8080 is used every time.
or
creds = flow.run_local_server(port=8000)
In this case, the port 8000 is used every time.
Reference:
run_local_server

Related

HttpError 403 when requesting None returned "Insufficient Permission

Uploading from e-mail's attachments from Gmail to Google Drive:
from __future__ import print_function
import pickle
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
import io
import base64
from googleapiclient.http import MediaIoBaseUpload
from time import sleep
q='has:attachment'
maxResults=int(input("Please specify the number of emails with attachments that you would like to see:"))
#for drive api---------------------------------------------------------------------------------
# If modifying these scopes, delete the file token.pickle.
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token_drive.pickle'):
with open('token_drive.pickle', 'rb') as token_drive:
creds = pickle.load(token_drive)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
Credentials_drive.json', 'https://www.googleapis.com/auth/drive.metadata.readonly')
creds1 = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token_drive.pickle', 'wb') as token_drive:
pickle.dump(creds, token_drive)
drive_service= build('drive', 'v3', credentials=creds1)
sleep(5)
# for gmail api---------------------------------------------------------------------------------
# If modifying these scopes, delete the file token.pickle.
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'Credentials.json', 'https://www.googleapis.com/auth/gmail.readonly')
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
service = build('gmail', 'v1', credentials=creds)
# Call the Gmail API
results = service.users().labels().list(userId='me').execute()
#Get Messages
results = service.users().messages().list(userId='me',q=q, maxResults=maxResults ,labelIds=['INBOX']).execute()
messages = results.get('messages', [])
def create_folder_in_drive(service,folder_name,parent_folder=[]):
file_metadata ={
'name': folder_name,
'parents': parent_folder,
'mimeType':'application/vnd.google-apps.folder'
}
for message in messages:
msg = service.users().messages().get(userId='me',metadataHeaders=['parts'], id=message['id']).execute()
messageID=msg['threadId']
messageSubject='(No Subject)({0})'.format(messageID)
msgdetail=msg.get('payload')
for item in msgdetail['headers']:
if item['name']=='Subject':
if item['value']:
messageSubject='{0} ({1})'.format(item['value'],messageID)
else:
messageSubject='(No Subject)({0})'.format(messageID)
print("messagesubject:" , messageSubject )
#create drive folder
folder_id=create_folder_in_drive(drive_service,messageSubject)
if 'parts' in msgdetail:
for msgPayload in msgdetail['parts']:
mime_type=msgPayload['mimeType']
file_name=msgPayload['filename']
body=msgPayload['body']
print(body)
if 'attachmentId' in body:
attachment_id=body['attachmentId']
response=service.users().messages().attachments().get(
userId='me',
messageId=msg['id'],
id=attachment_id
).execute()
file_data=base64.urlsafe_b64decode(
response.get('data').encode('UTF-8'))
fh=io.BytesIO(file_data)
file_metadata= {
'name':file_name,
'parents':[folder_id]
}
media_body=MediaIoBaseUpload(fh,mimetype=mime_type,chunksize=1024*1024,resumable=True)
file=drive_service.files().create(
body= file_metadata,
media_body=media_body,
fields='id'
).execute()
Hello friends, if I delete the token.pickle and token_drive.pickle files (these files are created separately from google cloud) in the file directory and run the code:
"ResumableUploadError: <HttpError 403 when requesting None returned "Insufficient Permission: Request had insufficient authentication scopes.". Details: "[{'domain': 'global', 'reason': 'insufficientPermissions', 'message': 'Insufficient Permission: Request had insufficient authentication scopes.'}]"> error", when I run the code without deleting the pickle files, I get the error which is "NameError: name 'service' is not defined."
It seems like a problem with authentication of Gmail and Drive at the same time because media_body and file_metadata return a value, but I couldn't solve the problem.
I'm on a phone and eye-balling your code.
You should be able to get a single token with scopes sufficient for Gmail and Drive rather than juggle multiple tokens.
Unpickling the objects may be causing some sort of collision too. Even though it's just for you, I'd recommend avoiding pickling as much as possible.
Here's a Google sample that shows the OAuth flow flow for Gmail using Python. The sample writes|reads tokens to disk as a file. Try this code with the 2 scopes.
#for gmail and drive api------------------------------------------------------------
# If modifying these scopes, delete the file token.pickle.
SCOPES=['https://www.googleapis.com/auth/gmail.readonly','https://www.googleapis.com/auth/drive','https://www.googleapis.com/auth/drive.file','https://www.googleapis.com/auth/drive.metadata']
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'Credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
service = build('gmail', 'v1', credentials=creds)
drive_service = build('drive', 'v3', credentials=creds)

Updating custom_attributes using Python using script below

This is my first post, and am still in my early days when it comes to python, I cannot figure out why i've been getting the below error. This script can potentially be used to update custom attributes in Google Admin SDK.
**current_schemas = user['customSchemas']
KeyError: 'customSchemas'**
https://github.com/pethron/gsuite-custom-schema-update
https://medium.com/faun/how-to-update-in-bulk-g-suite-users-custom-attributes-with-google-admin-sdk-cf6841d272d9
import pickle
import os.path
import yaml
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
# If modifying these scopes, delete the file token.pickle.
SCOPES = ['https://www.googleapis.com/auth/admin.directory.user']
API_SERVICE = 'admin'
API_VERSION = 'directory_v1'
def get_credentials():
"""Shows basic usage of the Admin SDK Directory API.
Prints the emails and names of the first 10 users in the domain.
"""
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
credentials_path = os.path.join(os.getcwd(), 'credentials.json')
flow = InstalledAppFlow.from_client_secrets_file(
credentials_path, SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
return creds
def update_saml_attributes(service, user, schema_config, federations, session_duration=28800):
custom_schema_roles = []
for federation in federations:
custom_schema_roles.append(
{
'type': 'work',
'value': "arn:aws:iam::{0}:role/{1},arn:aws:iam::{0}:saml-provider/{2}".format(
federation['account'], federation['role'], federation['provider'])
}
)
current_schemas = user['customSchemas']
user['customSchemas'][schema_config['name']] = {
schema_config['session']: session_duration,
schema_config['role']: custom_schema_roles
}
user.update({'customSchemas': current_schemas})
ret = service.users().update(userKey=user['id'], body=user).execute()
return ret['customSchemas']
def main():
# Load the custom schema file
custom_schema_file = os.path.join(os.getcwd(), 'custom-schema.yaml')
with open(custom_schema_file, "r") as yaml_file:
schema_config = yaml.safe_load(yaml_file)
# Load the federation file
federation_file = os.path.join(os.getcwd(), 'federation.yaml')
with open(federation_file, "r") as yaml_file:
federations = yaml.safe_load(yaml_file)
# Get credentials and build the service client
creds = get_credentials()
service = build(API_SERVICE, API_VERSION, credentials=creds)
# Call the Admin SDK Directory API
orgPath = "orgUnitPath='/'" # If need change like "orgUnitPath='/<my organizational unit>'"
results = service.users().list(customer='my_customer',
projection="full",
query=orgPath,
maxResults=2,
orderBy='email').execute()
users = results.get('users', [])
if not users:
print('No users in the domain.')
else:
print('Update users with the following customSchemas')
for user in users:
for email in user['emails']:
for federation in federations:
if federation['email'] == email['address']:
userUpdated = update_saml_attributes(service, user, schema_config, federation['federations'])
print(u'{0} {1} {2}'.format(user['primaryEmail'], user['id'], userUpdated))
if __name__ == '__main__':
main()
Any advise would be much appreciated.

Getting 401 error when trying to delete gmail messages through API

I have gotten inspiration from
https://developers.google.com/gmail/api/quickstart/python and https://developers.google.com/gmail/api/v1/reference/users/labels/list#examples to get to the following code
from __future__ import print_function
import pickle
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
from apiclient import errors
import requests as req
# If modifying these scopes, delete the file token.pickle.
SCOPES = ['https://mail.google.com/']
def chunks(l, n):
for i in range(0, len(l), n):
yield l[i:i + n]
def ListMessagesWithLabels(service, user_id, label_ids=[]):
"""List all Messages of the user's mailbox with label_ids applied.
Args:
service: Authorized Gmail API service instance.
user_id: User's email address. The special value "me"
can be used to indicate the authenticated user.
label_ids: Only return Messages with these labelIds applied.
Returns:
List of Messages that have all required Labels applied. Note that the
returned list contains Message IDs, you must use get with the
appropriate id to get the details of a Message.
"""
try:
response = service.users().messages().list(userId=user_id,
labelIds=label_ids).execute()
messages = []
if 'messages' in response:
messages.extend(response['messages'])
while 'nextPageToken' in response:
page_token = response['nextPageToken']
response = service.users().messages().list(userId=user_id,
labelIds=label_ids,
pageToken=page_token).execute()
messages.extend(response['messages'])
return messages
except errors.HttpError as error:
print
'An error occurred: %s' % error
def main():
"""Shows basic usage of the Gmail API.
Lists the user's Gmail labels.
"""
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
service = build('gmail', 'v1', credentials=creds)
# Call the Gmail API
results = service.users().labels().list(userId='me').execute()
labels = results.get('labels', [])
if not labels:
print('No labels found.')
else:
print('Labels:')
for label in labels:
print(label['name'])
if (label["id"].upper() != label["id"]):
messages = ListMessagesWithLabels(service, 'me', [label['id']])
if messages is not None:
usr_id = 'me'
ids = [message["id"] for message in messages]
for x in chunks(ids, 1000):
post_req = req.post(f"https://www.googleapis.com/gmail/v1/users/{usr_id}/messages/batchDelete", data={"ids":x})
if post_req.status_code == 200:
print("all good")
if __name__ == '__main__':
main()
The objective is to go through each label and delete all the messages.
All my POST requests got denied because I am not 'authorized' even though when I start the program I go through the Athentication in the browser etc.
How am I supposed to construct my POSTs to be able to achieve what I want to do ?
When trying to delete the messages, you should be using the service you previously built. That's how you authenticated in the first place. When trying to use batchDelete here:
post_req = req.post(f"https://www.googleapis.com/gmail/v1/users/{usr_id}/messages/batchDelete", data={"ids":x})
You're just doing a basic request to the specified endpoint, you're not following the OAuth process. And because you're not accessing a public resource, you are getting an authorization error.
You should be using something along the following lines instead:
messagesToDelete = {
"ids": [
"messageID1",
"messageID2",
# ... rest of messages to delete
]
}
service.users().messages().batchDelete(userId="me", body=messagesToDelete).execute()
Reference:
Users.messages: batchDelete

I am trying to figure out how to use the google drive api along with python to update file permissions as well as ownership

I took the code from the Google Drive API page for managing shares and did some modifications to it to be able to transfer the ownership of a specific file but I keep getting back the following error
from __future__ import print_function
import pickle
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
SCOPES = 'https://www.googleapis.com/auth/drive'
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_local_server()
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
file_id = 'XXXXXX'
drive_service = build('drive', 'v3', credentials=creds)
def callback(request_id, response, exception):
if exception:
# Handle error
print(exception)
else:
print("Permission Id: %s" % response.get('id'))
batch = drive_service.new_batch_http_request(callback=callback)
user_permission = {
'type': 'user',
'role': 'writer',
'emailAddress': 'XXX#gmail.com'
}
batch.add(drive_service.permissions().create(
fileId=file_id,
body=user_permission,
fields='id',
))
domain_permission = {
'type': 'domain',
'role': 'reader',
'domain': 'example.com'
}
batch.add(drive_service.permissions().create(
fileId=file_id,
body=domain_permission,
fields='id',
))
batch.execute()
The error I get is that
<HttpError 403 when requesting https://www.googleapis.com/drive/v3/files/1w43iXVO04QhDl-eWHi75ImSdxu-_V5xp/permissions?fields=id&alt=json returned "Insufficient Permission: Request had insufficient authentication scopes.">
<HttpError 403 when requesting https://www.googleapis.com/drive/v3/files/1w43iXVO04QhDl-eWHi75ImSdxu-_V5xp/permissions?fields=id&alt=json returned "Insufficient Permission: Request had insufficient authentication scopes.">
But i am not sure if its an issue with the code itself or with how I setup the credentials for the API. does someone have experience with the Google Drive API for Python and can give me a pointer on what I am doing wrong?
Modify scopes:
SCOPES = ['https://www.googleapis.com/auth/drive.file', 'https://www.googleapis.com/auth/drive',
'https://www.googleapis.com/auth/drive.appdata']
Delete the generated token.pickle file.
Then rerun the code.
Specific permissions can be viewed here: https://developers.google.com/drive/api/v3/reference/files/create?authuser=3#auth
May be obvious but your credentials don't have authorisation.
Have you made sure your loading the correct credentials.json and scope?
You can get the same error using the quickstart credentials for different api products.
try commenting out this out when you try to load the correct credentials and scope.
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)

How do you programmatically get a list of Google Forms (with ids) using Google Forms API?

I would like to know if it is possible to get the list of a Google Drive user's Forms (not the Docs, Spreadsheets, ...) using the Forms Service or the Google Drive API.
The reason why I am not sure is because there is no indication on how to do this on the Forms Service page, and on the Drive API's page the indications are very general. They don't take into account the fact that I will be using a OAuth2 token with a 'forms' scope and not a 'drive' scope.
/* List all Google Forms in your Google Drive */
function getGoogleFormsList() {
var files = DriveApp.getFilesByType(MimeType.GOOGLE_FORMS);
while (files.hasNext()) {
var file = files.next();
Logger.log("%s %s %s", file.getName(), file.getId(), file.getUrl());
}
}
Python solution:
from __future__ import print_function
import os.path
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import InstalledAppFlow
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
# If modifying these scopes, delete the file token.json.
SCOPES = ['https://www.googleapis.com/auth/drive.metadata.readonly']
def main():
"""Shows basic usage of the Drive v3 API.
Prints the names and ids of the first 10 files the user has access to.
"""
creds = None
# The file token.json stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.json'):
creds = Credentials.from_authorized_user_file('token.json', SCOPES)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'client_secrets.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.json', 'w') as token:
token.write(creds.to_json())
try:
page_token = None
drive_service = build('drive', 'v3', credentials=creds)
while True:
response = drive_service.files().list(
# You can use MIME types to filter query results
q="mimeType='application/vnd.google-apps.form'",
spaces='drive',
fields='nextPageToken, files(id, name)',
pageToken=page_token
).execute()
for file in response.get('files', []):
# Process change
print ('Found file: %s (%s)' % (file.get('name'), file.get('id')))
page_token = response.get('nextPageToken', None)
if page_token is None:
break
except HttpError as error:
# TODO(developer) - Handle errors from drive API.
print(f'An error occurred: {error}')
if __name__ == '__main__':
main()

Resources