pytest mocking with boto3 - python-3.x

just learning python mocking in general and struggling with using Magicmock and pytest with boto3.
Here is my code block
def upload_to_s3(self, local_file, bucket, dest_file):
self.local_file = local_file
self.bucket = bucket
self.dest_file = dest_file
s3_client = self.prime_s3_client() # this method returns the boto3 client
try:
s3_client.upload_file(local_file, bucket, dest_file)
LOG_IT.info('File uploaded to S3 from: %s to %s.', local_file, dest_file)
except Exception:
LOG_IT.critical('The %s failed to upload to S3.', local_file)
This is the test that's not working:
def test_upload_to_s3(self, monkeypatch, aws):
mock_s3_client = MagicMock()
monkeypatch.setattr(boto3, 'client', mock_s3_client)
mock_upload_file = MagicMock()
monkeypatch.setattr(mock_s3_client, 'upload_file', mock_upload_file)
push_to_s3 = aws.upload_to_s3('localfile', 'chumbucket', 'destfile')
mock_upload_file.assert_called()
The error returned:
E AssertionError: Expected 'upload_file' to have been called.
Thank you!

Related

Unittest : How test upload file function with request.file

I am new at unittest and I want test Function that upload an .ond file but I still don't know how can I use request.file correctly to upload file and send in test : here is my code and test code
def upload_ond_file():
if 'ondFile' not in request.files:
return 'No file'
directory = 'src/BDDs/BDD_Onduleurs'
if not os.path.isdir(path + directory):
os.mkdir(path + directory)
file = request.files['ondFile']
file.save(os.path.join(path + directory, file.filename))
The test was like :
import unittest
from urllib import response
from flask_unittest import ClientTestCase
from requests import request
import src.Controller.InverterController as InverterController
from unittest import mock
import io
class SomefileTestCase(unittest.TestCase):
def test_method_called_from_route(self):
m = mock.MagicMock()
request.method = "POST"
testfile
=open('C:/optisolar/optisolarbe/src/Test/uploads/test_controller_inverter/20170131_SC 2500-EV_MS_V6.ond',"r")
data= testfile.read()
data = {'files[]': testfile}
with mock.patch("src.Controller.InverterController.request", m):
result = InverterController.upload_ond_file()
print(result)
if __name__ == '__main__':
unittest.main()
but when execute code it mentione that there is no file :
request.url = <MagicMock name='mock.url' id='1807586099888'>
request.path = <MagicMock name='mock.path' id='1807586136320'>
request.data = <MagicMock name='mock.data' id='1807586164448'>
request.method = <MagicMock name='mock.method' id='1807586192640'>
request.files = <MagicMock name='mock.files' id='1807586224928'>
request.filesname = <MagicMock name='mock.files.name' id='1807586253120'>
request.filespath = <MagicMock name='mock.files.path' id='1807586289360'>
request.values = <MagicMock name='mock.values' id='1807586317600'>
request.args = <MagicMock name='mock.args' id='1807586345984'>
request.form = <MagicMock name='mock.form' id='1807586374176'>
request.getjson = <MagicMock name='mock.get_json()' id='1807586422544'>
No file
C:\Python39\lib\unittest\case.py:550: ResourceWarning: unclosed file <_io.TextIOWrapper name='C:/optisolar/optisolarbe/src/Test/uploads/test_controller_inverter/20170131_SC 2500-EV_MS_V6.ond' mode='r' encoding='cp1252'>
method()
ResourceWarning: Enable tracemalloc to get the object allocation traceback
.
----------------------------------------------------------------------
Ran 1 test in 0.025s
OK
my question is how can I upload file (request.file) correctly and send it in test ! thank you in advance

"Handler'lambda_handler' missing on module 'lambda_function'", "errorType": "Runtime.HandlerNotFound"[ERROR] NameError: name 's3_resource' not defined

I am trying to create a lambda function with the below code and when I try to deploy and test the function I get errors. the idea I need for the function is that lambda should monitor a folder within s3 to see if the objects are in it for more than 1 hours and notify me.
import boto3
from datetime import datetime, timedelta
from dateutil.tz import tzutc, UTC
BUCKET = 'my-bucket'
TOPIC_ARN = 'arn:aws:sns:ap-southeast-2:123456789012:Old-File-Warning'
s3_resource = boto3.resource('s3')
sns_resource = boto3.resource('sns')
sns_topic = sns_resource.Topic(TOPIC_ARN)
for object in s3_resource.Bucket(BUCKET).objects.filter(Prefix='folder1/folder2/'):
if object.last_modified > datetime.now(tzutc()) - timedelta(hours = 1):
message = f"Object {object.key} is more than 1 hour old!"
sns_topic.publish(Message=message)
I am getting this error { "errorMessage": "Handler 'lambda_handler' missing on module 'lambda_function'", "errorType": "Runtime.HandlerNotFound", "stackTrace": [] }
when I try below
def handler_name(event, context):
s3_resource = boto3.resource('s3')
sns_resource = boto3.resource('sns')
sns_topic = sns_resource.Topic(TOPIC_ARN)
return
s3_resource
sns_topic
sns_resource
I get [ERROR] NameError: name 's3_resource' is not defined
What am I doing wrong?
The default name is lambda_handler, not handler_name. So to address your error it should be:
def lambda_handler(event, context):
s3_resource = boto3.resource('s3')
sns_resource = boto3.resource('sns')
sns_topic = sns_resource.Topic(TOPIC_ARN)
return
Please note that there are many more issues with your code. The answer addresses the error you reported only.

Python Boto3 put_object file from lambda in s3

I would like to send a json file in s3 from a lambda. I saw in the documentation that we can send with the function boto3 put_object a file or a bytes object (Body=b'bytes'|file).
But if I'm not wrong, if I send a file in s3 with Body=bytes and then I download my file the content will be not visible.
So in my lambda function, I receive messages from a SQS Queue, I created a file with the message content in the lambda temporary folder /tmp. And I want to get this json file to send it in my_bucket/folder/file.json
I saw many examples to create a file in s3 but Body parameter is in bytes and not a file.
This is my code (python3.7)
def alpaca_consent_customer_dev(event, context): # handler
# TODO implement
request_id = context.aws_request_id
print('START - RequestID: {}'.format(request_id))
# function to write json file
def write_json(target_path, target_file, data):
if not os.path.exists(target_path):
try:
os.makedirs(target_path)
except Exception as e:
print(e)
raise
with open(os.path.join(target_path, target_file), 'w') as f:
json.dump(data, f)
try:
s3 = boto3.client('s3', region_name="us-west-2")
request_id = context.aws_request_id
print('START - RequestID: {}'.format(request_id))
# Get message from SQS queue
for record in event['Records']:
data = record
# Get message from SQS
data_loaded = json.loads(data['body'])
sns_message_id = data_loaded['MessageId']
print('data loaded type:', type(data_loaded))
data_saved = json.dumps(data_loaded)
# Create json file in temporary folder
write_json('/tmp', sns_message_id+'.json', data_saved)
# Check if file exists
print(glob.glob("/tmp/*.json"))
# result: ['/tmp/3bb1c0bc-68d5-5c4d-b827-021301.json']
s3.put_object(Body='/tmp/'+sns_message_id + '.json', Bucket='mybucket', Key='my_sub_bucket/' + datetime.datetime.today().strftime('%Y%m%d')+ '/'+ sns_message_id + '.json')
except Exception as e:
raise Exception('ERROR lambda failed: {}'.format(str(e)))
Thanks for your help. Regards.
There's an official example in the boto3 docs:
import logging
import boto3
from botocore.exceptions import ClientError
def upload_file(file_name, bucket, object_name=None):
"""Upload a file to an S3 bucket
:param file_name: File to upload
:param bucket: Bucket to upload to
:param object_name: S3 object name. If not specified then file_name is used
:return: True if file was uploaded, else False
"""
# If S3 object_name was not specified, use file_name
if object_name is None:
object_name = file_name
# Upload the file
s3_client = boto3.client('s3')
try:
response = s3_client.upload_file(file_name, bucket, object_name)
except ClientError as e:
logging.error(e)
return False
return True
You can just use the upload_file method of the s3 client.

Writing string to S3 with boto3: "'dict' object has no attribute 'put'"

In an AWS lambda, I am using boto3 to put a string into an S3 file:
import boto3
s3 = boto3.client('s3')
data = s3.get_object(Bucket=XXX, Key=YYY)
data.put('Body', 'hello')
I am told this:
[ERROR] AttributeError: 'dict' object has no attribute 'put'
The same happens with data.put('hello') which is the method recommended by the top answers at How to write a file or data to an S3 object using boto3 and with data.put_object: 'dict' object has no attribute 'put_object'.
What am I doing wrong?
On the opposite, reading works great (with data.get('Body').read().decode('utf-8')).
put_object is a method of the s3 object, not the data object.
Here is a full working example with Python 3.7:
import json
import boto3
s3 = boto3.client('s3')
import logging
logger = logging.getLogger()
logger.setLevel(logging.INFO)
def lambda_handler(event, context):
bucket = 'mybucket'
key = 'id.txt'
id = None
# Write id to S3
s3.put_object(Body='Hello!', Bucket=bucket, Key=key)
# Read id from S3
data = s3.get_object(Bucket=bucket, Key=key)
id = data.get('Body').read().decode('utf-8')
logger.info("Id:" + id)
return {
'statusCode': 200,
'body': json.dumps('Id:' + id)
}

TooManyRequestsException for Boto3 Client Organization

I am fetch all child account from the Master AWS Account by boto3 Organization.
Code is working fine. I am able to get child account list.
But if you run my AWS Lambda function again then it fail to get Child Accounts.
Getting following error:
Error while getting AWS Accounts : An error occurred (TooManyRequestsException) when calling the ListAccounts operation: AWS Organizations can't complete your request because another request is already in progress. Try again later
After 20 to 30 minutes, I can see my code work for once and again raise above exception.
I am Run this code by AWS Gateway + AWS Lambda.
Any idea?
Code:
import boto3
class Organizations(object):
"""AWS Organization"""
def __init__(self, access_key, secret_access_key, session_token=None):
self.client = boto3.client('organizations',
aws_access_key_id=access_key,
aws_secret_access_key=secret_access_key,
aws_session_token=session_token
)
def get_accounts(self, next_token=None, max_results=None):
"""Get Accounts List"""
if next_token and max_results:
result = self.client.list_accounts(NextToken=next_token,
MaxResults=max_results)
elif next_token:
result = self.client.list_accounts(NextToken=next_token)
elif max_results:
result = self.client.list_accounts(MaxResults=max_results)
else:
result = self.client.list_accounts()
return result
class AWSAccounts(object):
""" Return AWS Accounts information. """
def get_aws_accounts(self, access_key, secret_access_key, session_token):
""" Return List of AWS account Details."""
org_obj = Organizations(access_key=access_key,
secret_access_key=secret_access_key,
session_token=session_token)
aws_accounts = []
next_token = None
next_result = None
while True:
response = org_obj.get_accounts(next_token, next_result)
for account in response['Accounts']:
account_details = {"name": account["Name"],
"id": account["Id"],
"admin_role_name": self.account_role_name
}
aws_accounts.append(account_details)
if "NextToken" not in response:
break
next_token = response["NextToken"]
return aws_accounts
By Exception Handling, my code is running successfully.
Catch TooManyRequestsException exception by ClientError exception and retry to call AWS list_accounts API by boto3.
We can add time sleep of 0.1 seconds.
Code:
class AWSAccounts(object):
""" Return AWS Accounts information. """
def get_accounts(self, next_token=None, max_results=None):
"""Get Accounts List"""
# If Master AWS account contain more child accounts(150+) then
# Too-Many-Request Exception is raised by the AWS API(boto3).
# So to fix this issue, we are calling API again by Exception Handling.
result = None
while True:
try:
if next_token and max_results:
result = self.client.list_accounts(NextToken=next_token,
MaxResults=max_results)
elif next_token:
result = self.client.list_accounts(NextToken=next_token)
elif max_results:
result = self.client.list_accounts(MaxResults=max_results)
else:
result = self.client.list_accounts()
except botocore.exceptions.ClientError as err:
response = err.response
print("Failed to list accounts:", response)
if (response and response.get("Error", {}).get("Code") ==
"TooManyRequestsException"):
print("Continue for TooManyRequestsException exception.")
continue
break
return result
Configure your boto3 client to use the built-in standard retry mode:
import boto3
from botocore.config import Config
config = Config(
retries = {
'max_attempts': 10,
'mode': 'standard'
}
)
ec2 = boto3.client('ec2', config=config)
Per the documentation, the default mode is 'legacy' which doesn't handle TooManyRequestsException.
See boto3 documentation about retry configuration here: https://boto3.amazonaws.com/v1/documentation/api/latest/guide/retries.html

Resources