Name error - "name 'ssm_parameter_namee' is not defined", - aws-ssm

I am trying to update the parameters in SSM parameters store and got the below error. What mistake am I doing? pls clarify.
Lambda Code:
#Lambda code
logger = logging.getLogger()
logger.setLevel(logging.INFO)
ssm_client = boto3.client('ssm')
parameter_name = ''
def lambda_handler(event, context):
logger.info('Printing event: {}'.format(event))
process_sns_event(event)
return None
def process_sns_event(event):
for record in (event['Records']):
event_message = record['Sns']['Message']
# convert the event message to json
message_json = json.loads(event_message)
# obtain the image state
image_state = (message_json['state']['status'])
# obtain the image name
image_name = (message_json['name'])
# assign SSM parameter based on image_name
#parameter_name = f'/ec2-image-builder/{{image_name}}/latest'
def path(imagename):
first = "/ec2-image-builder/"
last = "/latest"
result = first + imagename + last
return result
parameter_name = path(image_name)
logger.info('image_name: {}'.format(image_name))
logger.info('ssm_parameter_name: {}'.format(parameter_name))
# update the SSM parameter if the image state is available
if (image_state == 'AVAILABLE'):
logger.info('Image is available')
# obtain ami id
ami = message_json['outputResources']['amis'][0]
recipe_name = message_json['name']
logger.info('AMI ID: {}'.format(ami['image']))
# update SSM parameter
response = ssm_client.put_parameter(
#Name=parameter_name,
Name='/ec2-image-builder/linux/latest',
Description='Latest AMI ID',
Value=ami['image'],
Type='String',
Overwrite=True,
Tier='Standard'
)
logger.info('SSM Updated: {}'.format(response))
# add tags to the SSM parameter
ssm_client.add_tags_to_resource(
ResourceType='Parameter',
ResourceId=ssm_parameter_namee,
Tags=[
{
'Key': 'Source',
'Value': 'EC2 Image Builder'
},
{
'Key': 'AMI_REGION',
'Value': ami['region']
},
{
'Key': 'AMI_ID',
'Value': ami['image']
},
{
'Key': 'AMI_NAME',
'Value': ami['name']
},
{
'Key': 'RECIPE_NAME',
'Value': recipe_name
},
{
'Key': 'SOURCE_PIPELINE_ARN',
'Value': message_json['sourcePipelineArn']
},
],
)
return None
Error output
Response on test:
{ "errorMessage": "name 'ssm_parameter_namee' is not defined",
"errorType": "NameError", "requestId":
"54ad245c-84f3-4c46-9e9b-1798f86a8bce", "stackTrace": [
" File "/var/task/lambda_function.py", line 19, in lambda_handler\n process_sns_event(event)\n",
" File "/var/task/lambda_function.py", line 71, in process_sns_event\n ResourceId=ssm_parameter_namee,\n" ] }

The answer is in your error ...
Typo name or namee ? Is it ssm_parameter_namee or ssm_parameter_name ?
I highly recommend using an IDE, that finger points you to such simple things :)
logger.info('ssm_parameter_name: {}'.format(parameter_name))
ResourceId=ssm_parameter_namee

Related

UnauthorizedOperation when calling the CreateSnapshot operation from AWS Lambda function

**An error occurred (UnauthorizedOperation) when calling the CreateSnapshot operation: You are not authorized to perform this operation. Encoded authorization failure message: jL5ZYRDd52Y_Xpt7xet7GIyJZkUpGhgJGwCsg
AWSLambdaBasicExecutionRole AWS managed: Provides write permissions to CloudWatch Logs.
s3-read-and-write-policy Customer inline
ebs-cloudtrail-read-policy Customer inline
ebs-ssm-read-write-policy Customer inline
ebs-volume-and-snapshot-read-policy
def createEBSSnapshots(volumes_to_delete,ec2Client,ssmClient):
print('Initiating create snapshot requests')
for volume in volumes_to_delete:
# TODO: write code...
print('Creating snapshot for ',volume)
today = str(date.today())
# print("Today's date:", today)
ec2Client.create_snapshot(
Description='This snapshot is generated for volume which was not utilized since last '+str(timeWindowDeleteVol)+' hours.',
# OutpostArn='string',
VolumeId=volume,
TagSpecifications=[
{
'ResourceType': 'snapshot',
'Tags': [
{
'Key': 'unusedEBSSnapshot',
'Value': 'true'
},
{
'Key': 'unusedVolumeID',
'Value': volume
},
{
'Key': 'creationDate',
'Value': today
},
]
},
],
# DryRun=True
)

AWS DynamoDB Transaction issues (Python)

I am trying to update 2 tables transactionally. first table is called CUSTOMER_TABLE, second is called CUSTOMER_JOB_TABLE.
for the first table, I create a new row if it doesnt exist. if it does exist i add to the currentProcessedCount column the value from this specific process. for the second table, i always create a new row. the 2 updates need to be transactional. i get the following error and I cant figure out what the reason is. Can someone help?
Response:
{
"errorMessage": "An error occurred (TransactionCanceledException) when calling the TransactWriteItems operation: Transaction cancelled, please refer cancellation reasons for specific reasons [ValidationError, None]",
"errorType": "TransactionCanceledException",
"stackTrace": [
" File \\"/var/task/app.py\\", line 149, in lambda_handler\\n c_table_response = update_customer_table(customer_id, customer_monthly_limit, number_of_rows,\\n",
" File \\"/var/task/app.py\\", line 226, in update_customer_table\\n response = dynamodb_client.transact_write_items(\\n",
" File \\"/opt/python/botocore/client.py\\", line 316, in _api_call\\n return self._make_api_call(operation_name, kwargs)\\n",
" File \\"/opt/python/botocore/client.py\\", line 635, in _make_api_call\\n raise error_class(parsed_response, operation_name)\\n"
]
}
below is my method for the call
import boto3
dynamodb_client = boto3.client('dynamodb')
# grab static env variable
CUSTOMER_ID = os.environ['CUSTOMER_ID']
BUCKET_NAME = os.environ['BUCKET_NAME']
CUSTOMER_TABLE_NAME = os.environ['CUSTOMER_TABLE_NAME']
CUSTOMER_JOB_TABLE_NAME = os.environ['CUSTOMER_JOB_TABLE_NAME']
def update_customer_table(customer_id, customer_monthly_limit, number_of_rows, year_month, uuid, date_time, batch_no):
response = dynamodb_client.transact_write_items(
TransactItems=[
{
'Update': {
'TableName': CUSTOMER_TABLE_NAME,
'Key': {
'PK': {'S': customer_id},
'SK': {'N': str(year_month)},
},
'ExpressionAttributeNames': {
'#ml': "MonthlyLimit",
'#cpc': "currentProcessedCount"
},
'ExpressionAttributeValues': {
':ml': {'N': str(customer_monthly_limit)},
':cpc': {'N': str(number_of_rows)}
},
'UpdateExpression': "SET #ml = :ml ADD #cpc :cpc"
}
},
{
'Put': {
'TableName': CUSTOMER_JOB_TABLE_NAME,
'Item': {
'PK': {'S': f'{customer_id}_{uuid}'},
'SK': {'N': str(year_month)},
'CustomerId': {'S': customer_id},
'UUID': {'S': uuid},
'StartDateTime': {'N': date_time.strftime('%Y%m%d%H%M')},
'NumberOfSplitFiles': {'N': str(batch_no - 1)},
'TotalRowCount': {'N': str(number_of_rows)}
}
}
}
]
)
return response
This is a problem, not sure if it's the only one.
'ExpressionAttributeValues': {
':ml': {'N': str(customer_monthly_limit)},
':cpc': {'N': str(number_of_rows)}
},
'UpdateExpression': "SET #ml = :mlv ADD #cpc :cpc"
:mlv doesn't match :ml

How to create s3 bucket with logging enabled and make it private using boto3?

I want to create a bucket with
Logging
Encryption
Private, and
Alert
when accessed without https. How can I achieve this?
I have tried few lines using boto3 but getting error in logging?
def create_S3_Bucket(env, filepath):
s3_client= AWSresourceconnect(filepath,'s3')
bucket_name ="s3bucket123"
print(bucket_name)
try:
s3_bucket= s3_client.create_bucket(Bucket=bucket_name)
print('bucket created')
print(s3_bucket)
response = s3_client.put_bucket_encryption(Bucket=bucket_name,
ServerSideEncryptionConfiguration={
'Rules': [
{
'ApplyServerSideEncryptionByDefault': {
'SSEAlgorithm': 'AES256'
}
},
]
}
)
print("response of encrytpion")
print(response) #prints metadata successfully
responselogging = s3_client.put_bucket_logging(
Bucket= bucket_name,
BucketLoggingStatus={
'LoggingEnabled': {
'TargetBucket':bucket_name,
'TargetGrants': [
{
'Grantee': {
'Type': 'Group',
'URI': 'http://acs.amazonaws.com/groups/global/AllUsers',
},
'Permission': 'READ',
},
],
'TargetPrefix': 'test/',
},
},
)
print("response of logging")
print(responselogging)
Output= bucket_name
except Exception as e:
Output = "error:" + str(e)
print(e) #error as An error occurred (InvalidTargetBucketForLogging) when calling the PutBucketLogging operation: You must give the log-delivery group WRITE and READ_ACP permissions to the target bucket
bucket_name = ''
retrun Output
I want to enable
Logging
Private bucket and objects
Encryption

Cannot create ResourceGroup using boto3: Query format not valid

I am trying to create a resource group using the following boto3 snippet:
kwargs = {
'Name': 'cluster.foo.io',
'Description': 'AWS resources assigned to the foo cluster.',
'ResourceQuery': {
'Type': 'TAG_FILTERS_1_0',
'Query': '[{"Key": "foo.io/cluster", "Values": ["cluster.foo.io"]}]',
},
'Tags': {
'foo.io/cluster': 'cluster.foo.io'
}
}
client = boto3.client("resource-groups")
resp = client.create_group(**kwargs)
But I'm getting the following error:
File "/Users/benjamin/.pyenv/versions/3.7.3/Python.framework/Versions/3.7/lib/python3.7/site-packages/botocore/client.py", line 357, in _api_call
return self._make_api_call(operation_name, kwargs)
File "/Users/benjamin/.pyenv/versions/3.7.3/Python.framework/Versions/3.7/lib/python3.7/site-packages/botocore/client.py", line 661, in _make_api_call
raise error_class(parsed_response, operation_name)
botocore.errorfactory.BadRequestException: An error occurred (BadRequestException)
when calling the CreateGroup operation: Query not valid:
Query format not valid: check JSON syntax
I keep comparing the Query to the example in the documentation but either I'm not seeing a difference or I'm way off in left field. I even used the json module as follows:
resp = self.resource_client.create_group(
Name='cluster.foo.io',
Description="AWS resources assigned to the foo cluster",
ResourceQuery={
"Type": "TAG_FILTERS_1_0",
"Query": json.dumps([{"Key": "foo.io/cluster", "Values": ["cluster.foo.io"]}]),
},
Tags={
"foo.io/cluster": "cluster.foo.io",
},
)
Any help would be appreciated!
The query parameter is missing ResourceTypeFilters and TagFilters. So, ResourceQuery should look like this:
'ResourceQuery': {
'Type': 'TAG_FILTERS_1_0',
'Query': "{\"ResourceTypeFilters\": [\"AWS::AllSupported\"], \"TagFilters\": [{\"Key\": \"foo.io/cluster\", \"Values\": [\"cluster.foo.io\"]}]}"
}
So, your code should be replaced as follows (the main section to be replaced is ResourceQuery:
query = {
"ResourceTypeFilters": ["AWS::AllSupported"],
"TagFilters": [{
"Key": "foo.io/cluster",
"Values": ["cluster.foo.io"]
}]
}
resource_query = {
'Type': 'TAG_FILTERS_1_0',
'Query': json.dumps(query)
}
kwargs = {
'Name': 'cluster.foo.io',
'Description': 'AWS resources assigned to the foo cluster.',
'ResourceQuery': resource_query,
'Tags': {
'foo.io/cluster': 'cluster.foo.io'
}
}
client = boto3.client("resource-groups")
resp = client.create_group(**kwargs)
I referred the example CLI shown here.

unable to tag the ec2 resources boto3 python

I would like to tag the host that I am spining up using boto3 python api
response = client.allocate_hosts(
AutoPlacement='on'|'off',
AvailabilityZone='string',
ClientToken='string',
InstanceType='string',
Quantity=123,
TagSpecifications=[
{
'ResourceType': 'dedicated-host',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
},
])
Here is what I am doing
Availability Zone,Instance Type , Quantity are parameterized and I use dictionary to input data
count = 10
input_dict = {}
input_dict['AvailabilityZone'] = 'us-east-1a'
input_dict['InstanceType'] = 'c5.large'
input_dict['Quantity'] = int(count)
instance = client.allocate_hosts(**input_dict,)
print(str(instance))
This code works for me but i need to tag the resource too
TagSpecifications=[
{
'ResourceType': 'customer-gateway'|'dedicated-host'|'dhcp-options'|'elastic-ip'|'fleet'|'fpga-image'|'image'|'instance'|'internet-gateway'|'launch-template'|'natgateway'|'network-acl'|'network-interface'|'reserved-instances'|'route-table'|'security-group'|'snapshot'|'spot-instances-request'|'subnet'|'transit-gateway'|'transit-gateway-attachment'|'transit-gateway-route-table'|'volume'|'vpc'|'vpc-peering-connection'|'vpn-connection'|'vpn-gateway',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
},
]
how can I input that into the dictionary .. It seems like tag specification has dictionary inside dict .. I am making syntax errors. I tried the below code without success.
input_dict['TagSpecifications'] = [{'ResourceType':'dedicated-host','Tags':[{'key':'Name','Value':'demo'},]},]
The easiest way is to simply pass values directly:
response = client.allocate_hosts(
AvailabilityZone='us-east-1a',
InstanceType='c5.large',
Quantity=10,
TagSpecifications=[
{
'ResourceType': 'dedicated-host',
'Tags': [
{
'Key': 'Name',
'Value': 'Demo'
}
]
}
])

Resources