A Cloudwatch log is an object with Log Group > Log Stream > Log Events on AWS. I am trying to write tests for this, but the moto mocking raises a client error when applied to boto3.client('logs'). I am looking at other ways to mock the behavior of the log. How would you write a test for this function?
For example:
client = boto3.client('logs')
def get_recent_log_stream_name(logGroupName):
response = client.describe_log_streams(
logGroupName=logGroupName,
orderBy='LastEventTime',
descending=True,
limit=1)
logStreamName = response['logStreams'][0]['logStreamName']
return logStreamName
I would write the test using moto like this:
import boto3
from moto import mock_logs
def get_recent_log_stream_name(logs, logGroupName):
"""Function under test"""
response = logs.describe_log_streams(
logGroupName=logGroupName,
orderBy='LastEventTime',
descending=True,
limit=1)
log_stream_name = response['logStreams'][0]['logStreamName']
return log_stream_name
#mock_logs
def test_get_recent_log_stream_name():
"""Test function"""
log_group_name = 'test-group'
log_stream_name = 'test-stream'
logs = boto3.client('logs')
logs.create_log_group(logGroupName=log_group_name)
logs.create_log_stream(
logGroupName=log_group_name,
logStreamName=log_stream_name,
)
assert get_recent_log_stream_name(logs, log_group_name) == log_stream_name
test_get_recent_log_stream_name()
Related
Hi I have a following class/ function
class A:
def __init__(aws_profile_name, aws_region, ec2_id):
self.session = boto3.session.Session(profile_name=aws_profile_name, aws_region)
self.ec2 = EC2(self.session, aws_region, id=ec2_id)
class EC2:
def __init__(self, session, aws_region, id):
self.session = session
self.region = aws_region
self.id = id
self.ec2_resouce = self.session.resource("ec2", region_name=self.region)
self.ec2_client = self.session.client("ec2", region_name=self.region)
self.instance = self.filter_ec2_by_id()
def filter_ec2_by_id(self):
return self.filter(
InstanceIds=[
self.get_instance_id(),
]
)
def filter(self, kwargs):
instances = self.ec2_resouce.instances.filter(**kwargs)
instance_list = [instance for instance in instances]
return instance_list
And here is my test test.py
from unittest import mock
import sys
from moto import mock_ec2
import boto3
#mock_ec2
def test_mock_session():
mock_session_obj = mock.Mock()
ec2 = boto3.resource("ec2", region_name="us-east-1")
reservation = client.run_instances(ImageId="ami-1234", MinCount=2, MaxCount=2)
instance1 = ec2.Instance(reservation["Instances"][0]["InstanceId"])
ec2 = EC2(mock_session_obj, region="us-east-1", id=instance1)
test_mock_session()
With above test code, the test failed with following error
instance_list = [instance for instance in instances]
TypeError: 'Mock' object is not iterable
I think that is because ec2 filter return a collection, but I am not sure how should i mock the result. Any recommendation is welcomed.
TIA
Moto will already mock boto3 for you, and intercept any calls to AWS, so there is no need to use mock_session_obj.
Just use EC2(boto3.session.Session(), region="us-east-1", id=instance1). When calling the filter-function, Moto will intercept it and return any created instances in the correct format.
I have the sync script which is running & working well, but i see some download files takes time, thought of using async approach here.
import json
import os
import io
import time
import gzip
import re
import logging
from logging.handlers import RotatingFileHandler
import boto3
AWS_KEY = "**"
AWS_SECRET = "**"
QUEUE_URL = "***"
OUTPUT_PATH = "./test"
VISIBILITY_TIMEOUT = 10
REGION_NAME = "region"
sqs = boto3.resource('sqs', region_name=REGION_NAME, aws_access_key_id=AWS_KEY, aws_secret_access_key=AWS_SECRET)
s3 = boto3.client('s3', region_name=REGION_NAME, aws_access_key_id=AWS_KEY, aws_secret_access_key=AWS_SECRET)
queue = sqs.Queue(url=QUEUE_URL)
def handle_response(msg, path):
"""Logic goes here"""
print('message: %s' % msg)
def download_message_files(msg):
for s3_file in msg['files']:
s3_path = s3_file['path']
with io.BytesIO() as f:
s3.download_fileobj(msg['bucket'], s3_path, f)
f.seek(0)
for line in gzip.GzipFile(fileobj=f):
await handle_response(line.decode('UTF-8'), s3_path)
def consume():
while True:
for msg in queue.receive_messages(VisibilityTimeout=VISIBILITY_TIMEOUT):
body = json.loads(msg.body) # grab the actual message body
download_message_files(body)
msg.delete()
time.sleep(sleep_time)
if __name__ == '__main__':
# Setup our root logger
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(name)s %(levelname)s %(message)s")
# Create our FDR logger
logger = logging.getLogger("Consumer")
# Rotate log file handler
RFH = RotatingFileHandler("test.log", maxBytes=20971520, backupCount=5)
# Log file output format
F_FORMAT = logging.Formatter('%(asctime)s %(name)s %(levelname)s %(message)s')
# Set the log file output level to INFO
RFH.setLevel(logging.INFO)
# Add our log file formatter to the log file handler
RFH.setFormatter(F_FORMAT)
# Add our log file handler to our logger
logger.addHandler(RFH)
consume()
I have tried converting this using aioboto3 and got struck in queue approach.
session = aioboto3.Session()
sqs = session.resource('sqs', region_name=REGION_NAME, aws_access_key_id=AWS_KEY, aws_secret_access_key=AWS_SECRET)
s3 = session.client('s3', region_name=REGION_NAME, aws_access_key_id=AWS_KEY, aws_secret_access_key=AWS_SECRET)
queue = sqs.Queue(url=QUEUE_URL) <---- this gives error as 'ResourceCreatorContext' object has no attribute 'Queue'
As i could understand from this there is no attribute, but could anyone guide me to make this working with async nature.
You can use asyncio and aioboto3 together.
Instead of creating a resource, you can use client. The difference between an aioboto3.client and aioboto3.resource can be found in this answer.
This is a simple working example:
import aioboto3
async def consume():
async with aioboto3.Session().client(service_name='sqs', region_name=REGION_NAME, aws_access_key_id=AWS_KEY, aws_secret_access_key=AWS_SECRET) as client:
messages = await (client.receive_messages(VisibilityTimeout=VISIBILITY_TIMEOUT)
for message in messages:
# Do something
This should solve the error you are facing. This solution can also be extended to S3 as per your requirements.
I am trying to mock secret manager. Here is the code which is written for secret manager using boto3 which I am trying to mock and test.
utils.py
import boto3
secret_id = os.environ.get("SECRETS")
client = boto3.client('secretsmanager')
response = client.get_secret_value(SecretId=secret_id)
secrets = json.loads(response['SecretString'])
S3_BUCKET_NAME = secrets["S3_BUCKET_NAME"]
SQS_QUEUE_NAME = secrets["SQS_Queue_Name"]
these variables are then used in different methods.
conftest.py
#pytest.fixture(scope='session', autouse=True)
def secret_manager_resource(aws_credentials):
"""Secret Manager mock client"""
with mock_secretsmanager():
conn = boto3.client("secretsmanager", region_name="us-east-1")
logger.info(f"Secret manager connection {conn}")
yield conn
test_file.py
#contextmanager
def secret_manager_setup(secret_manager_resource):
secret_manager_resource.create_secret(Name="test", SecretString="""{"S3_BUCKET_NAME": "test","SQS_Queue_Name": "test_queue"}""")
yield
class TestSecretManager:
def test_secret_manager(self, secret_manager_resource):
with secret_manager_setup(secret_manager_resource):
try:
result = secret_manager_resource.get_secret_value(SecretId="test")
json_result = json.loads(result['SecretString'])
assert json_result["S3_BUCKET_NAME"] == "test"
assert json_result["SQS_Queue_Name"] == "test_queue"
except Exception as err:
print("Error ---", err)
class TestClass:
def test_some_class(test_var):
from functions.something.some import something
something = someClass({}, param)
When I run pytest it directly goes inside TestClass and calls secret Manager and throws error as it is trying to connect to actual secret manager. Could someone suggest me what can be done to over come this issue?
TestClass is not mocked - so I wouldn't expect that to work. You could use Moto as a class-decorator to ensure everything inside someClass is mocked.
Note that the class-decorator creates a mock around test-methods only, so the code-under-test would have to be inside a test-method for this to work.
#mock_secretsmanager()
class TestClass:
def test_something():
from functions.something.some import something
something = someClass({}, param)
See http://docs.getmoto.org/en/latest/docs/getting_started.html#class-decorator for the documentation and more examples around this.
I am trying to create a small service to respond to Envoy's rate limiting queries. I have compiled all the relevant protobuff files and the one relevant for the service I am trying to implement is here:
https://github.com/envoyproxy/envoy/blob/v1.17.1/api/envoy/service/ratelimit/v3/rls.proto
There is a service definition in there but inside of the "compiled" python file, all I see about it is this:
_RATELIMITSERVICE = _descriptor.ServiceDescriptor(
name='RateLimitService',
full_name='envoy.service.ratelimit.v3.RateLimitService',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=1531,
serialized_end=1663,
methods=[
_descriptor.MethodDescriptor(
name='ShouldRateLimit',
full_name='envoy.service.ratelimit.v3.RateLimitService.ShouldRateLimit',
index=0,
containing_service=None,
input_type=_RATELIMITREQUEST,
output_type=_RATELIMITRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_RATELIMITSERVICE)
DESCRIPTOR.services_by_name['RateLimitService'] = _RATELIMITSERVICE
here is my feeble attempt at implementing the service
import logging
import asyncio
import grpc
from envoy.service.ratelimit.v3.rls_pb2 import RateLimitResponse, RateLimitRequest
class RL:
def ShouldRateLimit(self, request):
result = RateLimitResponse()
def add_handler(servicer, server):
rpc_method_handlers = {
'ShouldRateLimit': grpc.unary_unary_rpc_method_handler(
RL.ShouldRateLimit,
request_deserializer=RateLimitRequest.FromString,
response_serializer=RateLimitResponse.SerializeToString,
)
}
generic_handler = grpc.method_handlers_generic_handler(
'envoy.service.ratelimit.v3.RateLimitService',
rpc_method_handlers
)
server.add_generic_rpc_handlers((generic_handler,))
async def serve():
server = grpc.aio.server()
add_handler(RL(), server)
listen_addr = '[::]:5051'
server.add_insecure_port(listen_addr)
logging.info(f'Starting server on {listen_addr}')
await server.start()
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
asyncio.run(serve())
How am I supposed to return (or even instantiate) a RateLimitResponse back to the caller ?
I am fetch all child account from the Master AWS Account by boto3 Organization.
Code is working fine. I am able to get child account list.
But if you run my AWS Lambda function again then it fail to get Child Accounts.
Getting following error:
Error while getting AWS Accounts : An error occurred (TooManyRequestsException) when calling the ListAccounts operation: AWS Organizations can't complete your request because another request is already in progress. Try again later
After 20 to 30 minutes, I can see my code work for once and again raise above exception.
I am Run this code by AWS Gateway + AWS Lambda.
Any idea?
Code:
import boto3
class Organizations(object):
"""AWS Organization"""
def __init__(self, access_key, secret_access_key, session_token=None):
self.client = boto3.client('organizations',
aws_access_key_id=access_key,
aws_secret_access_key=secret_access_key,
aws_session_token=session_token
)
def get_accounts(self, next_token=None, max_results=None):
"""Get Accounts List"""
if next_token and max_results:
result = self.client.list_accounts(NextToken=next_token,
MaxResults=max_results)
elif next_token:
result = self.client.list_accounts(NextToken=next_token)
elif max_results:
result = self.client.list_accounts(MaxResults=max_results)
else:
result = self.client.list_accounts()
return result
class AWSAccounts(object):
""" Return AWS Accounts information. """
def get_aws_accounts(self, access_key, secret_access_key, session_token):
""" Return List of AWS account Details."""
org_obj = Organizations(access_key=access_key,
secret_access_key=secret_access_key,
session_token=session_token)
aws_accounts = []
next_token = None
next_result = None
while True:
response = org_obj.get_accounts(next_token, next_result)
for account in response['Accounts']:
account_details = {"name": account["Name"],
"id": account["Id"],
"admin_role_name": self.account_role_name
}
aws_accounts.append(account_details)
if "NextToken" not in response:
break
next_token = response["NextToken"]
return aws_accounts
By Exception Handling, my code is running successfully.
Catch TooManyRequestsException exception by ClientError exception and retry to call AWS list_accounts API by boto3.
We can add time sleep of 0.1 seconds.
Code:
class AWSAccounts(object):
""" Return AWS Accounts information. """
def get_accounts(self, next_token=None, max_results=None):
"""Get Accounts List"""
# If Master AWS account contain more child accounts(150+) then
# Too-Many-Request Exception is raised by the AWS API(boto3).
# So to fix this issue, we are calling API again by Exception Handling.
result = None
while True:
try:
if next_token and max_results:
result = self.client.list_accounts(NextToken=next_token,
MaxResults=max_results)
elif next_token:
result = self.client.list_accounts(NextToken=next_token)
elif max_results:
result = self.client.list_accounts(MaxResults=max_results)
else:
result = self.client.list_accounts()
except botocore.exceptions.ClientError as err:
response = err.response
print("Failed to list accounts:", response)
if (response and response.get("Error", {}).get("Code") ==
"TooManyRequestsException"):
print("Continue for TooManyRequestsException exception.")
continue
break
return result
Configure your boto3 client to use the built-in standard retry mode:
import boto3
from botocore.config import Config
config = Config(
retries = {
'max_attempts': 10,
'mode': 'standard'
}
)
ec2 = boto3.client('ec2', config=config)
Per the documentation, the default mode is 'legacy' which doesn't handle TooManyRequestsException.
See boto3 documentation about retry configuration here: https://boto3.amazonaws.com/v1/documentation/api/latest/guide/retries.html