How to use SlidesSnippet tool probably for google slides api python - python-3.x

So I am trying to automate creating google slides using SlidesSnippets
(https://github.com/gsuitedevs/python-samples/blob/master/slides/snippets/slides_snippets.py)
I have all api credentials handles and access into my api service but having a hard time understanding how to fully link the code with my slides directory
(https://docs.google.com/presentation/u/0/?tgif=d)
I have taken the code in the github and re-wrote the init section shown below:
class SlidesSnippets(object):
# def __init__(self, service, drive_service, sheets_service, credentials):
def __init__(self):
# self.credentials = credentials
self.credentials = GoogleCredentials.get_application_default()
scope = [
'https://www.googleapis.com/auth/drive',
]
self.credentials_scoped = self.credentials.create_scoped(scope)
http = self.credentials_scoped.authorize(httplib2.Http())
# self.service = service
self.service = build('slides', 'v1', http=http)
# self.drive_service = drive_service
self.drive_service = build('drive', 'v3', http=http)
# self.sheets_service = sheets_service
The comments are what was originally in the class function and then I replaced it with my details.
So when I run this code:
import slides_snippets as slides
slides_init = slides.SlidesSnippets()
slides_dict = slides_init.create_presentation("TEST")
I get this response that looks like a slides id tag and then when I go to
and when I try and go to that directory with the tag in it
(https://docs.google.com/presentation/d/OUTPUT_SLIDE_ID_FROM_create_presentation/edit)
It asks for me to request control and the powerpoint is nowhere to be seen in my slides drive.
Did I mess anything up in my SlidesSnippets init function?

I used some function to init it. Maybe it can help you.
class SlidesSnippets(object):
def init(self):
self.drive_credentials = None
self.slides_credentials = None
def drive_service(self,gdrive_SCOPES = ['https://www.googleapis.com/auth/drive.file']):
'''gdriver token'''
if os.path.exists('drivetoken.pickle'):
with open('drivetoken.pickle', 'rb') as token:
gdrive_creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not gdrive_creds or not gdrive_creds.valid:
if gdrive_creds and gdrive_creds.expired and gdrive_creds.refresh_token:
gdrive_creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', gdrive_SCOPES)
gdrive_creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('drivetoken.pickle', 'wb') as token:
pickle.dump(gdrive_creds, token)
drive_service = build('drive', 'v3', credentials=gdrive_creds)
self.drive_service,self.drive_credentials = drive_service, gdrive_creds
return self.drive_service,self.drive_credentials
def slides_service(self,slides_SCOPES = ['https://www.googleapis.com/auth/presentations']):
'''slides token'''
if os.path.exists('slidetoken.pickle'):
with open('slidetoken.pickle', 'rb') as token:
slides_creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not slides_creds or not slides_creds.valid:
if slides_creds and slides_creds.expired and slides_creds.refresh_token:
slides_creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', slides_SCOPES)
slides_creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('slidetoken.pickle', 'wb') as token:
pickle.dump(slides_creds, token)
slides_service = build('slides', 'v1', credentials=slides_creds)
self.slides_service,self.slides_credentials = slides_service, slides_creds
return self.slides_service,self.slides_credentials

Related

Google API service object is not created when I make exe file

I'm trying to make an app that fetches data from CSV and adds events to Google Calendar using Google API Client in python. Given below is my code for creating a service object.
def create_service(client_secret_file, api_name, api_version, *scopes, prefix=''):
CLIENT_SECRET_FILE = client_secret_file
API_SERVICE_NAME = api_name
API_VERSION = api_version
SCOPES = [scope for scope in scopes[0]]
cred = None
working_dir = os.getcwd()
token_dir = 'token files'
pickle_file = f'token_{API_SERVICE_NAME}_{API_VERSION}{prefix}.pickle'
### Check if token dir exists first, if not, create the folder
if not os.path.exists(os.path.join(working_dir, token_dir)):
os.mkdir(os.path.join(working_dir, token_dir))
if os.path.exists(os.path.join(working_dir, token_dir, pickle_file)):
with open(os.path.join(working_dir, token_dir, pickle_file), 'rb') as token:
cred = pickle.load(token)
if not cred or not cred.valid:
if cred and cred.expired and cred.refresh_token:
cred.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(CLIENT_SECRET_FILE, SCOPES)
cred = flow.run_local_server()
with open(os.path.join(working_dir, token_dir, pickle_file), 'wb') as token:
pickle.dump(cred, token)
try:
print(CLIENT_SECRET_FILE, API_SERVICE_NAME, API_VERSION, SCOPES, cred)
service = build(serviceName=API_SERVICE_NAME, version='v3', http=SCOPES[0], developerKey="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")
# service = build(API_SERVICE_NAME, API_VERSION, credentials=cred)
print(API_SERVICE_NAME, API_VERSION, 'service created successfully')
return service
except Exception as e:
print(e)
print(f'Failed to create service instance for {API_SERVICE_NAME}')
And I create the service object using:
def main():
CLIENT_SECRET_FILE = 'client_secret.json'
API_NAME = 'calendar'
API_VERSION = 'v3'
SCOPES = ['https://www.googleapis.com/auth/calendar']
service = create_service(CLIENT_SECRET_FILE, API_NAME, API_VERSION, SCOPES)
if __name__ == '__main__':
main()
input()
The code works perfectly when it's running using .py file. The main goal here to make an exe file. When I make the exe file an exception is raised in and the code directly prints print(f'Failed to create service instance for {API_SERVICE_NAME}')
Here is the output I get with .py file
And here is the output I get with .exe file
Is there any chance anyone could let me know if there is any possible solution for this?

HttpError 403 when requesting None returned "Insufficient Permission

Uploading from e-mail's attachments from Gmail to Google Drive:
from __future__ import print_function
import pickle
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
import io
import base64
from googleapiclient.http import MediaIoBaseUpload
from time import sleep
q='has:attachment'
maxResults=int(input("Please specify the number of emails with attachments that you would like to see:"))
#for drive api---------------------------------------------------------------------------------
# If modifying these scopes, delete the file token.pickle.
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token_drive.pickle'):
with open('token_drive.pickle', 'rb') as token_drive:
creds = pickle.load(token_drive)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
Credentials_drive.json', 'https://www.googleapis.com/auth/drive.metadata.readonly')
creds1 = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token_drive.pickle', 'wb') as token_drive:
pickle.dump(creds, token_drive)
drive_service= build('drive', 'v3', credentials=creds1)
sleep(5)
# for gmail api---------------------------------------------------------------------------------
# If modifying these scopes, delete the file token.pickle.
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'Credentials.json', 'https://www.googleapis.com/auth/gmail.readonly')
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
service = build('gmail', 'v1', credentials=creds)
# Call the Gmail API
results = service.users().labels().list(userId='me').execute()
#Get Messages
results = service.users().messages().list(userId='me',q=q, maxResults=maxResults ,labelIds=['INBOX']).execute()
messages = results.get('messages', [])
def create_folder_in_drive(service,folder_name,parent_folder=[]):
file_metadata ={
'name': folder_name,
'parents': parent_folder,
'mimeType':'application/vnd.google-apps.folder'
}
for message in messages:
msg = service.users().messages().get(userId='me',metadataHeaders=['parts'], id=message['id']).execute()
messageID=msg['threadId']
messageSubject='(No Subject)({0})'.format(messageID)
msgdetail=msg.get('payload')
for item in msgdetail['headers']:
if item['name']=='Subject':
if item['value']:
messageSubject='{0} ({1})'.format(item['value'],messageID)
else:
messageSubject='(No Subject)({0})'.format(messageID)
print("messagesubject:" , messageSubject )
#create drive folder
folder_id=create_folder_in_drive(drive_service,messageSubject)
if 'parts' in msgdetail:
for msgPayload in msgdetail['parts']:
mime_type=msgPayload['mimeType']
file_name=msgPayload['filename']
body=msgPayload['body']
print(body)
if 'attachmentId' in body:
attachment_id=body['attachmentId']
response=service.users().messages().attachments().get(
userId='me',
messageId=msg['id'],
id=attachment_id
).execute()
file_data=base64.urlsafe_b64decode(
response.get('data').encode('UTF-8'))
fh=io.BytesIO(file_data)
file_metadata= {
'name':file_name,
'parents':[folder_id]
}
media_body=MediaIoBaseUpload(fh,mimetype=mime_type,chunksize=1024*1024,resumable=True)
file=drive_service.files().create(
body= file_metadata,
media_body=media_body,
fields='id'
).execute()
Hello friends, if I delete the token.pickle and token_drive.pickle files (these files are created separately from google cloud) in the file directory and run the code:
"ResumableUploadError: <HttpError 403 when requesting None returned "Insufficient Permission: Request had insufficient authentication scopes.". Details: "[{'domain': 'global', 'reason': 'insufficientPermissions', 'message': 'Insufficient Permission: Request had insufficient authentication scopes.'}]"> error", when I run the code without deleting the pickle files, I get the error which is "NameError: name 'service' is not defined."
It seems like a problem with authentication of Gmail and Drive at the same time because media_body and file_metadata return a value, but I couldn't solve the problem.
I'm on a phone and eye-balling your code.
You should be able to get a single token with scopes sufficient for Gmail and Drive rather than juggle multiple tokens.
Unpickling the objects may be causing some sort of collision too. Even though it's just for you, I'd recommend avoiding pickling as much as possible.
Here's a Google sample that shows the OAuth flow flow for Gmail using Python. The sample writes|reads tokens to disk as a file. Try this code with the 2 scopes.
#for gmail and drive api------------------------------------------------------------
# If modifying these scopes, delete the file token.pickle.
SCOPES=['https://www.googleapis.com/auth/gmail.readonly','https://www.googleapis.com/auth/drive','https://www.googleapis.com/auth/drive.file','https://www.googleapis.com/auth/drive.metadata']
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'Credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
service = build('gmail', 'v1', credentials=creds)
drive_service = build('drive', 'v3', credentials=creds)

Updating custom_attributes using Python using script below

This is my first post, and am still in my early days when it comes to python, I cannot figure out why i've been getting the below error. This script can potentially be used to update custom attributes in Google Admin SDK.
**current_schemas = user['customSchemas']
KeyError: 'customSchemas'**
https://github.com/pethron/gsuite-custom-schema-update
https://medium.com/faun/how-to-update-in-bulk-g-suite-users-custom-attributes-with-google-admin-sdk-cf6841d272d9
import pickle
import os.path
import yaml
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
# If modifying these scopes, delete the file token.pickle.
SCOPES = ['https://www.googleapis.com/auth/admin.directory.user']
API_SERVICE = 'admin'
API_VERSION = 'directory_v1'
def get_credentials():
"""Shows basic usage of the Admin SDK Directory API.
Prints the emails and names of the first 10 users in the domain.
"""
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
credentials_path = os.path.join(os.getcwd(), 'credentials.json')
flow = InstalledAppFlow.from_client_secrets_file(
credentials_path, SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
return creds
def update_saml_attributes(service, user, schema_config, federations, session_duration=28800):
custom_schema_roles = []
for federation in federations:
custom_schema_roles.append(
{
'type': 'work',
'value': "arn:aws:iam::{0}:role/{1},arn:aws:iam::{0}:saml-provider/{2}".format(
federation['account'], federation['role'], federation['provider'])
}
)
current_schemas = user['customSchemas']
user['customSchemas'][schema_config['name']] = {
schema_config['session']: session_duration,
schema_config['role']: custom_schema_roles
}
user.update({'customSchemas': current_schemas})
ret = service.users().update(userKey=user['id'], body=user).execute()
return ret['customSchemas']
def main():
# Load the custom schema file
custom_schema_file = os.path.join(os.getcwd(), 'custom-schema.yaml')
with open(custom_schema_file, "r") as yaml_file:
schema_config = yaml.safe_load(yaml_file)
# Load the federation file
federation_file = os.path.join(os.getcwd(), 'federation.yaml')
with open(federation_file, "r") as yaml_file:
federations = yaml.safe_load(yaml_file)
# Get credentials and build the service client
creds = get_credentials()
service = build(API_SERVICE, API_VERSION, credentials=creds)
# Call the Admin SDK Directory API
orgPath = "orgUnitPath='/'" # If need change like "orgUnitPath='/<my organizational unit>'"
results = service.users().list(customer='my_customer',
projection="full",
query=orgPath,
maxResults=2,
orderBy='email').execute()
users = results.get('users', [])
if not users:
print('No users in the domain.')
else:
print('Update users with the following customSchemas')
for user in users:
for email in user['emails']:
for federation in federations:
if federation['email'] == email['address']:
userUpdated = update_saml_attributes(service, user, schema_config, federation['federations'])
print(u'{0} {1} {2}'.format(user['primaryEmail'], user['id'], userUpdated))
if __name__ == '__main__':
main()
Any advise would be much appreciated.

How to run python script with the same port

Right now I am setting up oauth2 from Gmail to send mail from my python script.
I am using a quick start code from Google to verify the authorize code but I am facing a situation where the port always changes when I am running the python script.
from __future__ import print_function
import pickle
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
# If modifying these scopes, delete the file token.pickle.
SCOPES = ['https://www.googleapis.com/auth/gmail.readonly']
def main():
"""Shows basic usage of the Gmail API.
Lists the user's Gmail labels.
"""
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'../credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
service = build('gmail', 'v1', credentials=creds)
# Call the Gmail API
results = service.users().labels().list(userId='me').execute()
labels = results.get('labels', [])
if not labels:
print('No labels found.')
else:
print('Labels:')
for label in labels:
print(label['name'])
if __name__ == '__main__':
main()
My problem with this code is that whenever I run the script, it always changes the port and I cannot set the redirected URI in google console.
My question is that how can I set the running port on python script?
For example, how about the following modification?
From:
creds = flow.run_local_server(port=0)
To:
creds = flow.run_local_server()
In this case, the port 8080 is used every time.
or
creds = flow.run_local_server(port=8000)
In this case, the port 8000 is used every time.
Reference:
run_local_server

How do you programmatically get a list of Google Forms (with ids) using Google Forms API?

I would like to know if it is possible to get the list of a Google Drive user's Forms (not the Docs, Spreadsheets, ...) using the Forms Service or the Google Drive API.
The reason why I am not sure is because there is no indication on how to do this on the Forms Service page, and on the Drive API's page the indications are very general. They don't take into account the fact that I will be using a OAuth2 token with a 'forms' scope and not a 'drive' scope.
/* List all Google Forms in your Google Drive */
function getGoogleFormsList() {
var files = DriveApp.getFilesByType(MimeType.GOOGLE_FORMS);
while (files.hasNext()) {
var file = files.next();
Logger.log("%s %s %s", file.getName(), file.getId(), file.getUrl());
}
}
Python solution:
from __future__ import print_function
import os.path
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import InstalledAppFlow
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
# If modifying these scopes, delete the file token.json.
SCOPES = ['https://www.googleapis.com/auth/drive.metadata.readonly']
def main():
"""Shows basic usage of the Drive v3 API.
Prints the names and ids of the first 10 files the user has access to.
"""
creds = None
# The file token.json stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.json'):
creds = Credentials.from_authorized_user_file('token.json', SCOPES)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'client_secrets.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.json', 'w') as token:
token.write(creds.to_json())
try:
page_token = None
drive_service = build('drive', 'v3', credentials=creds)
while True:
response = drive_service.files().list(
# You can use MIME types to filter query results
q="mimeType='application/vnd.google-apps.form'",
spaces='drive',
fields='nextPageToken, files(id, name)',
pageToken=page_token
).execute()
for file in response.get('files', []):
# Process change
print ('Found file: %s (%s)' % (file.get('name'), file.get('id')))
page_token = response.get('nextPageToken', None)
if page_token is None:
break
except HttpError as error:
# TODO(developer) - Handle errors from drive API.
print(f'An error occurred: {error}')
if __name__ == '__main__':
main()

Resources