Password Protection to the file in Python - python-3.x

I am currently reading file from azure blob storage and sending email. I would like to add password protection to the attachment.
below is my current code which is sending an email with the attachment
import base64
from sendgrid import SendGridAPIClient
from sendgrid.helpers.mail import (Mail, Attachment, FileContent, FileName, FileType)
from azure.storage.blob import BlockBlobService
# mail details
to = To_address
from_email = From_Address
subject = Subject
content = '''
Test Content'''
# create mail object
message = Mail(
from_email = from_email,
to_emails = to,
subject = subject,
html_content = content
)
# read the content from azure blob storage
blob_service = BlockBlobService(account_name=Blob_Storage_Account, account_key=account_key)
## read the content inside blob
# read export file
export_file_data = blob_service.get_blob_to_bytes(Blob_Container, export_file_name)
export_file_mail_content = export_file_data.content
# create export file attachement
export_file_encoded = base64.b64encode(export_file_mail_content).decode()
export_file_attachment = Attachment()
export_file_attachment.file_content = FileContent(export_file_encoded)
export_file_attachment.file_type = FileType('application/txt')
export_file_attachment.file_name = FileName(export_file_name.split('/')[-1])
message.add_attachment(export_file_attachment)
# send mail with above attachment
try:
mail = SendGridAPIClient(send_grid_api_key)
response = mail.send(message)
except Exception as e:
print(str(e))
any possibilities here to add password to the file which is located in Azure storage before sending email.
I tried with pyminizip but the same is not supporting to my version 3.6. Is there any suggestion apart from open source module?

Related

loading data from GCS bucket to Sharepoint folder

I am working on a POC where I have to load data from GCS Bucket to a sharePoint Location.
I am using the below code but not able to get desired result.
# Import the storage library
from google.cloud import storage
client = storage.Client()
bucket_name = 'my-bucket'
file_name = 'my-file.csv'
# Download the file from GCS
bucket = client.bucket(bucket_name)
blob = bucket.blob(file_name)
blob.download_to_filename(file_name)
# Import the office365-rest-python-client library
from office365.runtime.auth.authentication_context import AuthenticationContext
from office365.sharepoint.client_context import ClientContext
from office365.sharepoint.files.file import File
# Set the SharePoint site URL
site_url = 'https://7rhjkshshgvavvd.sharepoint.com/sites/MyDemo/testing/'
# Authenticate with SharePoint
context = AuthenticationContext(url=site_url)
if context.acquire_token_for_user(username="XXXXXX", password="XXXXXX"):
print("Authenticated with SharePoint")
else:
print("Failed to authenticate with SharePoint")
# Construct a ClientContext object
client_context = ClientContext(site_url, context)
# Set the path to the file you want to upload
# Upload the file to SharePoint
file_creation_info = File.from_local_file(client_context)
sp_file = file_creation_info.upload()
client_context.execute_query()
print(f'File uploaded to SharePoint: {sp_file.server_relative_url}')

How would I save a file received by discord.py

I was working on my discord bot trying to implement a email feature where you imbed a file and then the discord bot downloads it and sends it back out to the server. I came across the issue where I have no idea how I would begin to save the file. You can find my code at https://github.com/Omar-Alabdalla/DiscordBot. the specific files that have the emailing feature are mailFunctions(discord commands part) and basicMail(email commands part).
I looked through the docs of nextcord.py and couldn't find any simple way that I could understand. I probably just missed what I was supposed to find though.
discord command Code:
#commands.command()
async def mailFile(self, ctx, *stuff):
# received if else statement from stackoverflow: https://stackoverflow.com/questions/65169339/download-csv-file-sent-by-user-discord-py
if str(ctx.attachments) == "[]": # This checks if there is an attachment on the message
return "You didn't include a file"
else:
await save("mailFile
The mailing class code:
def sendFileMail(rmail, message):
mail_content = '''Hello,
This is a test mail.
In this mail we are sending some attachments.
The mail is sent using Python SMTP library.
Thank You
'''
# Setup the MIME
message = MIMEMultipart()
message['From'] = sender_email
message['To'] = rmail
message['Subject'] = 'A test mail sent by Python. It has an attachment.'
# The subject line
# The body and the attachments for the mail
message.attach(MIMEText(mail_content, 'plain'))
attach_file_name = 'TP_python_prev.pdf'
attach_file = open(attach_file_name, 'rb') # Open the file as binary mode
payload = MIMEBase('application', 'octate-stream')
payload.set_payload(attach_file.read())
encoders.encode_base64(payload) # encode the attachment
# add payload header with filename
payload.add_header('Content-Decomposition', 'attachment', filename=attach_file_name)
message.attach(payload)
# Create SMTP session for sending the mail
session = smtplib.SMTP('smtp.gmail.com', 587) # use gmail with port
session.starttls() # enable security
session.login(sender_email, password) # login with mail_id and password
text = message.as_string()
session.sendmail(sender_email, rmail, text)
session.quit()
print('Mail Sent')
Apologies for not including code prior First time posting on stack overflow
See Attachment.save: https://nextcord.readthedocs.io/en/latest/api.html?highlight=attachment#nextcord.Attachment.save
for attachment in ctx.message.attachments:
await attachment.save(attachment.filename)

How do I add an attachment to a daily scheduled email in AWS, SES, Lambda, Cloudwatch

1). I have found a piece of code (below) in Node.js which allows me to send an email alert at 9am which works fine. I need to further develop this so that it will attach the latest file from an S3 bucket.
2). I have found other code in Python which will send an email with the file attached as soon as it is uploaded to the S3 bucket.
I can't get the extra functionality from 2 to work in 1.
Below is the code for the function which successfully schedules an email but I need to be able to attach the latest file in an S3 bucket. I also don't know the name of the file as it will be uploaded by someone else.
All I need to do is to send the latest file in the S3 bucket as an attachment at 9am daily.
I have the Cloudwatch schedule feature already setup.
const AWS = require('aws-sdk');
AWS.config.update({
region: 'eu-west-2'
})
const ses = new AWS.SES();
const s3 = new AWS.S3();
exports.handler = async (event) => {
const params = {
Destination: {
ToAddresses: ['abc#abc.com']
},
Message: {
Subject: {Data: 'Daily Email'},
Body: {
Text: {Data: 'Hello: \n\n Good Morning: Here is your 9am Alert!!! \n\n'}
}
},
Source: 'xyz#xyz.com'
};
await ses.sendEmail(params).promise().then(response => {
console.log('Successfully sent email!!!');
}, error => {
console.error('An error occured while attempting to send email: ', error);
})
};
I managed to get a Python version working, which sends the attachment, but for some reason it doesn't include the file name to the attachment? It gives the attachment name as a string of random characters.
Does anyone know how to include the file attachment name to the attached file & which part of my code would be causing the issue? I know the attachment name as "TEST.pdf" can I just add the name to the successfully attached file?
import boto3
import os.path
import email
from botocore.exceptions import ClientError
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.mime.application import MIMEApplication
def lambda_handler(event, context):
ses = boto3.client("ses")
s3 = boto3.client("s3")
AWS_REGION = "aws_region"
bucket_name = 'testing'
object_name = 'TEST.pdf'
fileObj = s3.get_object( Key=object_name, Bucket='testing')
key = str(fileObj)
sender = "abc#abc.com"
to = "xyz#xyz.com"
subject = 'Email from abc'
body = """Hello, xyz Please see the attached file related to recent submission.
<br>
This email is from blahblahblah:
"""
file_name = os.path.basename(key)
tmp_file_name = '/tmp/' +file_name
s3.download_file(bucket_name, object_name, tmp_file_name)
ATTACHMENT= tmp_file_name
# att= tmp_file_name
# The email body for recipients with non-HTML email clients.
BODY_TEXT = "Hello, xyz Please see the attached file related to recent submission."
# The HTML body of the email.
BODY_HTML = """\
<html>
<head></head>
<body>
<h1>Hello!!!</h1>
<p>Please see the attached file related to TEST.pdf latest upload to S3 Bucket.</p>
</body>
</html>
"""
# The character encoding for the email.
CHARSET = "utf-8"
# Create a new SES resource and specify a region.
client = boto3.client('ses',region_name=AWS_REGION)
# Create a multipart/mixed parent container.
msg = MIMEMultipart('mixed')
# Add subject, from and to lines.
msg['Subject'] = subject
msg['From'] = sender
msg['To'] = to
# Create a multipart/alternative child container.
msg_body = MIMEMultipart('alternative')
# Encode the text and HTML content and set the character encoding. This step is
# necessary if you're sending a message with characters outside the ASCII range.
textpart = MIMEText(BODY_TEXT.encode(CHARSET), 'plain', CHARSET)
htmlpart = MIMEText(BODY_HTML.encode(CHARSET), 'html', CHARSET)
# Add the text and HTML parts to the child container.
msg_body.attach(textpart)
msg_body.attach(htmlpart)
# Define the attachment part and encode it using MIMEApplication.
att = MIMEApplication(open(ATTACHMENT, 'rb').read())
# Add a header to tell the email client to treat this part as an attachment,
# and to give the attachment a name.
att.add_header('Content-Disposition','attachment',filename=os.path.basename(ATTACHMENT))
# Attach the multipart/alternative child container to the multipart/mixed
# parent container.
msg.attach(msg_body)
# Add the attachment to the parent container.
msg.attach(att)
print(msg)
try:
#Provide the contents of the email.
response = client.send_raw_email(
Source=sender,
Destinations=[
to
],
RawMessage={
'Data':msg.as_string(),
},
# ConfigurationSetName=CONFIGURATION_SET
)
# Display an error if something goes wrong.
except ClientError as e:
print(e.response['Error']['Message'])
else:
print("Email sent! Message ID:"),
print(response['MessageId'])

Unable to set X-SES-CONFIGURATION-SET headers in aws ses

I am using this script to send email using AWS SES and I am able to receive email in my inbox but I am not able to see X-SES-CONFIGURATION-SET header in the received email. I also tried using Simple email body type with ConfigurationSetName still no luck. Any help is highly appreciated.
import boto3
from botocore.exceptions import ClientError
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
# Replace sender#example.com with your "From" address.
# This address must be verified with Amazon SES.
SENDER = "Niranj Raja <niranj#xxx.com>"
# Replace recipient#example.com with a "To" address. If your account
# is still in the sandbox, this address must be verified.
RECIPIENT = "niranj#xxx.com"
# Specify a configuration set. If you do not want to use a configuration
# set, comment the following variable, and the
# ConfigurationSetName=CONFIGURATION_SET argument below.
CONFIGURATION_SET = "test-me"
# The subject line for the email.
SUBJECT = "Amazon SES Test (SDK for Python)"
# The email body for recipients with non-HTML email clients.
BODY_TEXT = ("Amazon SES Test (Python)\r\n"
"This email was sent with Amazon SES using the "
"AWS SDK for Python (Boto)."
)
# The HTML body of the email.
BODY_HTML = """<html>
<head></head>
<body>
<h1>Amazon SES Test (SDK for Python)</h1>
<p>This email was sent with
<a href='https://aws.amazon.com/ses/'>Amazon SES</a> using the
<a href='https://aws.amazon.com/sdk-for-python/'>
AWS SDK for Python (Boto)</a>.</p>
</body>
</html>
"""
# The character encoding for the email.
CHARSET = "UTF-8"
msg = MIMEMultipart('mixed')
# Add subject, from and to lines.
msg['Subject'] = SUBJECT
msg['From'] = SENDER
msg['To'] = RECIPIENT
#msg['X-SES-CONFIGURATION-SET'] = CONFIGURATION_SET
msg.add_header('X-SES-CONFIGURATION-SET', CONFIGURATION_SET)
print(dir(msg))
print('')
# Create a multipart/alternative child container.
msg_body = MIMEMultipart('alternative')
print(dir(msg_body))
# Encode the text and HTML content and set the character encoding. This step is
# necessary if you're sending a message with characters outside the ASCII range.
textpart = MIMEText(BODY_TEXT.encode(CHARSET), 'plain', CHARSET)
htmlpart = MIMEText(BODY_HTML.encode(CHARSET), 'html', CHARSET)
msg_body.attach(textpart)
msg_body.attach(htmlpart)
# Create a new SES resource and specify a region.
client = boto3.client(
'sesv2',
aws_access_key_id='test',
aws_secret_access_key='test1',
region_name='region-10'
)
# Try to send the email.
try:
# Provide the contents of the email.
response = client.send_email(
FromEmailAddress=SENDER,
Destination={
'ToAddresses': [
RECIPIENT,
],
},
Content={
'Raw': {
'Data': msg_body.as_string()
}
}
)
print(response)
# Display an error if something goes wrong.
except ClientError as e:
print(e.response['Error']['Message'])
else:
print("Email sent! Message ID:"),
print(response['MessageId'])

OSError: Errno 30 Read-only file system: Any way to create a CSV and attach it to email from Python in AWS Lambda?

So I am trying to create a Lambda Function to Query from MySQL Database and write the query output to a CSV file, attach the created file as an attachment and send an email using Python.
I have a working code to achieve this, but unable to execute the same in AWS Lambda.
Here's the code I'm working on right now:
engine = create_engine("mysql+pymysql://username:password#MYSQL DB Creds")
con_mysql = engine.connect()
#dump_attachment_query_link
attach = pd.read_sql("SELECT * FROM some_table)",con_mysql)
#NAMING_FILES
#start,yest are dates
def filenames(start, yest):
if start == yest:
return "Dump_{}.csv".format(yest)
else:
return "Dump_{}_to_{}.csv".format(start, yest)
attach.reset_index(drop = True, inplace = True)
att = attach.to_csv(filenames(start, yest))
files = filenames(start, yest)
def send_mail(fromaddr, subject, message):
access_token, expires_in = refresh_authorization(GOOGLE_CLIENT_ID, GOOGLE_CLIENT_SECRET, GOOGLE_REFRESH_TOKEN)
auth_string = generate_oauth2_string(fromaddr, access_token, as_base64=True)
msg = MIMEMultipart('related')
msg['Subject'] = subject + ": %s" %yest
msg['From'] = fromaddr
msg['To'] = "receivers'mail"
msg.preamble = 'This is a multi-part message in MIME format.'
msg_alternative = MIMEMultipart('alternative')
msg.attach(msg_alternative)
part_text = MIMEText(lxml.html.fromstring(message).text_content().encode('utf-8'), 'plain', _charset='utf-8')
part_html = MIMEText(message.encode('utf-8'), 'html', _charset='utf-8')
msg_alternative.attach(part_text)
msg_alternative.attach(part_html)
part = MIMEBase('application', "octet-stream")
part.set_payload(open(files, "rb").read())
encoders.encode_base64(part)
part.add_header('Content-Disposition', 'attachment; filename = {}'.format(files))
msg.attach(part)
server = smtplib.SMTP('smtp.gmail.com:587')
server.ehlo(GOOGLE_CLIENT_ID)
server.starttls()
server.docmd('AUTH', 'XOAUTH2 ' + auth_string)
server.sendmail(fromaddr,msg['To'].split(",") + msg['Cc'].split(","), msg.as_string())
server.quit()
When I use the code I get the following error - [ERROR] OSError: [Errno 30] Read-only file system: 'Dump_{}to{}.csv'
I'm quite new to Lambda and Python.
Any help regarding this is appreciated
Error says you cannot write to file system. According to code you try to create CSV files there.
You don't need to create real files to create an attachment. In memory streams are enough.
Your Lambda is allowed to write to /tmp directory.

Resources