The following code works fine locally, but fails in AWS Lambda:
authURL = os.environ['authURL_env']
reportURL = os.environ['reportURL_env']
FirstDayOfPreviousMonth = dt.date.today()-dt.timedelta(days=1)
LastDayOfPreviousMonth = dt.date.today()-dt.timedelta(days=1)
payload = json.dumps({
"email": os.environ['email_env'],
"password": os.environ['password_env']
})
headers = {
'Content-Type': 'application/json'
}
apiConn = url.PoolManager()
# try:
tokenResponse = apiConn.request('POST', authURL, headers=headers, body=payload)
authToken = json.loads(tokenResponse.data)
payload = json.dumps({
"start_date": str(FirstDayOfPreviousMonth),
"end_date": str(LastDayOfPreviousMonth),
"interval": "day",
"dimensions": [
"supply_tag_id",
"demand_partner_id"
]
})
headers = {
'Authorization': authToken['token'],
'Content-Type': 'application/json'
}
response = apiConn.request('POST', reportURL, headers=headers, body=payload)
vendor_df = pd.read_json(response.data)
In AWS Lambda I get the following error:
{
"errorMessage": "Expected file path name or file-like object, got <class 'bytes'> type",
"errorType": "TypeError",
"requestId": "9f594e9e-7703-420b-b981-e4352f1d64db",
"stackTrace": [
" File \"/var/task/lambda_function.py\", line 56, in lambda_handler\n springserve_df = pd.read_json(response.data)\n",
" File \"/opt/python/pandas/util/_decorators.py\", line 207, in wrapper\n return func(*args, **kwargs)\n",
" File \"/opt/python/pandas/util/_decorators.py\", line 311, in wrapper\n return func(*args, **kwargs)\n",
" File \"/opt/python/pandas/io/json/_json.py\", line 588, in read_json\n json_reader = JsonReader(\n",
" File \"/opt/python/pandas/io/json/_json.py\", line 673, in __init__\n data = self._get_data_from_filepath(filepath_or_buffer)\n",
" File \"/opt/python/pandas/io/json/_json.py\", line 710, in _get_data_from_filepath\n self.handles = get_handle(\n",
" File \"/opt/python/pandas/io/common.py\", line 823, in get_handle\n raise TypeError(\n"
]
}
To add more mystery to it, this exact code works in an older Lambda. Its only when I try to create it in a new lambda that it begins to fail.
Related
Does Boto3 client support connectors for GoogleAds and FacebookAds? According to documentation we can use Custom Connector but when i try to use it in the code i get the below error saying it should be one of the built in types.
[ERROR] ParamValidationError: Parameter validation failed:
Unknown parameter in connectorProfileConfig.connectorProfileProperties: "CustomConnector", must be one of: Amplitude, Datadog, Dynatrace, GoogleAnalytics, Honeycode, InforNexus, Marketo, Redshift, Salesforce, ServiceNow, Singular, Slack, Snowflake, Trendmicro, Veeva, Zendesk, SAPOData
Unknown parameter in connectorProfileConfig.connectorProfileCredentials: "CustomConnector", must be one of: Amplitude, Datadog, Dynatrace, GoogleAnalytics, Honeycode, InforNexus, Marketo, Redshift, Salesforce, ServiceNow, Singular, Slack, Snowflake, Trendmicro, Veeva, Zendesk, SAPOData
Traceback (most recent call last):
File "/var/task/lambda_function.py", line 34, in lambda_handler
response = client.create_connector_profile(
File "/var/runtime/botocore/client.py", line 391, in _api_call
return self._make_api_call(operation_name, kwargs)
File "/var/runtime/botocore/client.py", line 691, in _make_api_call
request_dict = self._convert_to_request_dict(
File "/var/runtime/botocore/client.py", line 739, in _convert_to_request_dict
request_dict = self._serializer.serialize_to_request(
File "/var/runtime/botocore/validate.py", line 360, in serialize_to_request
raise ParamValidationError(report=report.generate_report())
Code in Lambda :
import json
import boto3
def lambda_handler(event, context):
client = boto3.client('appflow')
### Google Ads
response = client.create_connector_profile(
connectorProfileName='GoogleAdsConn',
connectorType='CustomConnector',
# connectorLabel='GoogleAds',
connectionMode='Public',
connectorProfileConfig= {
"connectorProfileProperties": {
'CustomConnector': {
# 'profileProperties': {
# 'string': 'string'
# },
'oAuth2Properties': {
'tokenUrl': 'https://oauth2.googleapis.com/token',
'oAuth2GrantType': 'AUTHORIZATION_CODE'
# ,'tokenUrlCustomProperties': {
# 'string': 'string'
# }
}
}
},
"connectorProfileCredentials": {
"CustomConnector": {
"authenticationType": "OAUTH2",
"oauth2": {
"accessToken": "myaccesstoken",
"clientId": "myclientid",
"clientSecret": "myclientsecret",
"oAuthRequest": {
"authCode": "string",
"redirectUri": "string"
},
"refreshToken": "myrefreshtoken"
}
}
}
}
)
return {
'response': response
}
Any leads on this will be appreciated.
Thanks!
The issue was with older boto3 version. Adding a new lambda layer with latest boto3 version(1.24.70) and updating code with profileProperties it worked seamlessly. Below is the complete working code.
import json
import boto3
def lambda_handler(event, context):
client = boto3.client('appflow')
### Google Ads
response = client.create_connector_profile(
connectorProfileName='GoogleAdsConnThruLambda',
connectorType='CustomConnector',
connectorLabel='GoogleAds',
connectionMode='Public',
connectorProfileConfig= {
"connectorProfileProperties": {
'CustomConnector': {
'profileProperties': {
'developerToken': 'developerToken',
'instanceUrl':'https://googleads.googleapis.com',
'managerID':'managerID',
'apiVersion':'v11'
},
'oAuth2Properties': {
'tokenUrl': 'https://oauth2.googleapis.com/token',
'oAuth2GrantType': 'AUTHORIZATION_CODE'
# ,'tokenUrlCustomProperties': {
# "string":"string"
# }
}
}
},
"connectorProfileCredentials": {
"CustomConnector": {
"authenticationType": "OAUTH2",
"oauth2": {
"accessToken": "accessToken",
"clientId": "clientId",
"clientSecret": "clientSecret",
"oAuthRequest": {
"authCode": "authCode",
"redirectUri": "https://<your_region>.console.aws.amazon.com/appflow/oauth"
},
"refreshToken": "refreshToken"
}
}
}
}
)
return {
'response': response
}
I have documents like this
class Users(Document):
name = StringField(required=True)
email = EmailField(required=True, primary_key=True)
preferences = ListField(ReferenceField(Preferences, required=True))
languages = ListField(ReferenceField(Languages), required=True)
class Languages(Document):
name = StringField(required=True,unique=True)
active = BooleanField(default=True)
class Preferences(Document):
name = DictField(required=True,unique=True)
active = BooleanField(default=True)
I am trying to retrieve information from these 3 collections .
return code from my python
output = Users.objects.aggregate([
{
'$lookup':
{
"from": "languages",
"localField": "languages",
"foreignField": "_id",
"as": "languages"
}
},
{
'$lookup':
{
"from": "preferences",
"localField": "preferences",
"foreignField": "_id",
"as": "preferences"
}
}
])
return jsonify({'result': output})
but getting below error :
File "D:\user.py", line 51, in get
return jsonify({'result': output})
File "C:\Users\Ideapad\AppData\Local\Programs\Python\Python310\Lib\site-packages\flask\json_init_.py", line 355, in jsonify
f"{dumps(data, indent=indent, separators=separators)}\n",
File "C:\Users\Ideapad\AppData\Local\Programs\Python\Python310\Lib\site-packages\flask\json_init_.py", line 133, in dumps
rv = json.dumps(obj, **kwargs)
File "C:\Users\Ideapad\AppData\Local\Programs\Python\Python310\Lib\json_init.py", line 238, in dumps
**kw).encode(obj)
File "C:\Users\Ideapad\AppData\Local\Programs\Python\Python310\Lib\json\encoder.py", line 201, in encode
chunks = list(chunks)
File "C:\Users\Ideapad\AppData\Local\Programs\Python\Python310\Lib\json\encoder.py", line 431, in _iterencode
yield from _iterencode_dict(o, _current_indent_level)
File "C:\Users\Ideapad\AppData\Local\Programs\Python\Python310\Lib\json\encoder.py", line 405, in _iterencode_dict
yield from chunks
File "C:\Users\Ideapad\AppData\Local\Programs\Python\Python310\Lib\json\encoder.py", line 438, in _iterencode
o = default(o)
File "C:\Users\Ideapad\AppData\Local\Programs\Python\Python310\Lib\site-packages\flask_mongoengine\json.py", line 19, in default
return superclass.default(self, obj)
File "C:\Users\Ideapad\AppData\Local\Programs\Python\Python310\Lib\site-packages\flask\json_init.py", line 57, in default
return super().default(o)
File "C:\Users\Ideapad\AppData\Local\Programs\Python\Python310\Lib\json\encoder.py", line 179, in default
raise TypeError(f'Object of type {o.class.name} '
TypeError: Object of type CommandCursor is not JSON serializable
its working fine with the json.dumps
dumps(output, ensure_ascii=False)
I am trying out
import json
import uuid
import boto3
def lambda_handler(event, context):
dynamo_client = boto3.resource('dynamodb')
loadeo_carrier_company = dynamo_client.Table('loadeo_carrier_company')
item = {}
item = event['body']
print(item)
item['company_id'] = str(uuid.uuid4())
print (type(item))
try:
loadeo_carrier_company.put_item(
Item=item
)
return {
"statusCode": 200,
"headers" : {
"Access-Control-Allow-Origin" : "*"
},
"message": json.dumps("Record has been inserted"),
"body": item
}
except Exception as e:
return {
"statusCode": 500,
"headers" : {
"Access-Control-Allow-Origin" : "*"
},
"message": "Error: Unable to save record!"
}
This code.
When I test it through Lambda Test event it is working fine.
But when I create an API and try it out with postman showes an internal server error but when I look at the cloud watch logs The below error is shown.
[ERROR] TypeError: 'str' object does not support item assignment
Traceback (most recent call last):
File "/var/task/lambda_function.py", line 19, in lambda_handler
item['company_id'] = str(uuid.uuid4())
NOTE:
The Lambda test event is working fine.
"body": {
"company_name" : "waveaxis pvt ltd",
"mc_number" : "00000",
"email_adderess": "waveaxis#waveaxis.co.in",
"phone_number" : "+91 1234567890",
"company_address" : "Kormangala, Bengaluru"
},
After Trying the answer:
When I test it from lambda test event:
{
"errorMessage": "the JSON object must be str, bytes or bytearray, not dict",
"errorType": "TypeError",
"stackTrace": [
" File \"/var/task/lambda_function.py\", line 17, in lambda_handler\n item = json.loads(event['body'])\n",
" File \"/var/lang/lib/python3.8/json/__init__.py\", line 341, in loads\n raise TypeError(f'the JSON object must be str, bytes or bytearray, '\n"
]
}
and when I call it from postman.
Execution stops before try block with no error
Edit 2: print the json for event
{
"resource":"/",
"path":"/",
"httpMethod":"PUT",
"headers":{
"Accept":"*/*",
"Accept-Encoding":"gzip, deflate, br",
"Authorization":"Bearer eyJraWQiOiJCUUl6ZkxcL1VOdm9QTDVxNExlWGFRNXNxOG1mVmhmXC9rK3ZJUDdYc0p0VjQ9IiwiYWxnIjoiUlMyNTYifQ.eyJzdWIiOiJkYjdkODBmOC1mY2VhLTQwYjItYTZhYi1jMjhhNTZiMTI1NDIiLCJ0b2tlbl91c2UiOiJhY2Nlc3MiLCJzY29wZSI6ImNhcnJpZXJcL3JlYWQgY2FycmllclwvdXBkYXRlIGNhcnJpZXJcL2RlbGV0ZSIsImF1dGhfdGltZSI6MTYwMjA0NDQ5NywiaXNzIjoiaHR0cHM6XC9cL2NvZ25pdG8taWRwLnVzLWVhc3QtMS5hbWF6b25hd3MuY29tXC91cy1lYXN0LTFfWW5nVnJxYUFGIiwiZXhwIjoxNjAyMDQ4MDk3LCJpYXQiOjE2MDIwNDQ0OTcsInZlcnNpb24iOjIsImp0aSI6ImExMzg4ZGUyLTRhZWQtNGI2MC04YjM0LWYzN2I1N2RjM2ZmMiIsImNsaWVudF9pZCI6IjVlZzU1NWhpNzAwZ21lbWc3N3B0NWkzbDIyIiwidXNlcm5hbWUiOiJHb29nbGVfMTEyNjUyMTUzMDI4OTQyNjAzNDM5In0.XMy9GP03o5EYrcLtQFzrMV6KID4IlDI_n0WrHa8osY_7CeeDjaCjH6Dtr766TAommLUzcLoKt-NrBUdq0Zfx-BL919j25rwiZXJbHiZP_4y9n891ddOXfPabO7n8O84-63W6l13QEBozuc21vXi7vuE_dSJ7KAgute46KP3LyoS73WPDhYim_7HZJO8pVedk64hhGNZsYWv6VU5QeQyqPl926spA25ZBo_z5dcoBnMZ_i2n5nz6qxRcINOKfMXL1f4_nDRbtKb5Pd33hKnsKYLkxEI0mrT1JKPJhkJRg9vGqaKcd13oqrigJRFSXYuVQuKNDluc38KbQJcwUoXDjjA",
"Content-Type":"application/json",
"Host":"661ny3iw92.execute-api.us-east-2.amazonaws.com",
"Postman-Token":"654cfdd0-8080-48a5-8758-3aa7e5bcefcc",
"User-Agent":"PostmanRuntime/7.26.5",
"X-Amzn-Trace-Id":"Root=1-5fa13860-63da2718701bf3fc458c78e1",
"X-Forwarded-For":"157.49.141.105",
"X-Forwarded-Port":"443",
"X-Forwarded-Proto":"https"
},
"multiValueHeaders":{
"Accept":[
"*/*"
],
"Accept-Encoding":[
"gzip, deflate, br"
],
"Authorization":[
"Bearer eyJraWQiOiJCUUl6ZkxcL1VOdm9QTDVxNExlWGFRNXNxOG1mVmhmXC9rK3ZJUDdYc0p0VjQ9IiwiYWxnIjoiUlMyNTYifQ.eyJzdWIiOiJkYjdkODBmOC1mY2VhLTQwYjItYTZhYi1jMjhhNTZiMTI1NDIiLCJ0b2tlbl91c2UiOiJhY2Nlc3MiLCJzY29wZSI6ImNhcnJpZXJcL3JlYWQgY2FycmllclwvdXBkYXRlIGNhcnJpZXJcL2RlbGV0ZSIsImF1dGhfdGltZSI6MTYwMjA0NDQ5NywiaXNzIjoiaHR0cHM6XC9cL2NvZ25pdG8taWRwLnVzLWVhc3QtMS5hbWF6b25hd3MuY29tXC91cy1lYXN0LTFfWW5nVnJxYUFGIiwiZXhwIjoxNjAyMDQ4MDk3LCJpYXQiOjE2MDIwNDQ0OTcsInZlcnNpb24iOjIsImp0aSI6ImExMzg4ZGUyLTRhZWQtNGI2MC04YjM0LWYzN2I1N2RjM2ZmMiIsImNsaWVudF9pZCI6IjVlZzU1NWhpNzAwZ21lbWc3N3B0NWkzbDIyIiwidXNlcm5hbWUiOiJHb29nbGVfMTEyNjUyMTUzMDI4OTQyNjAzNDM5In0.XMy9GP03o5EYrcLtQFzrMV6KID4IlDI_n0WrHa8osY_7CeeDjaCjH6Dtr766TAommLUzcLoKt-NrBUdq0Zfx-BL919j25rwiZXJbHiZP_4y9n891ddOXfPabO7n8O84-63W6l13QEBozuc21vXi7vuE_dSJ7KAgute46KP3LyoS73WPDhYim_7HZJO8pVedk64hhGNZsYWv6VU5QeQyqPl926spA25ZBo_z5dcoBnMZ_i2n5nz6qxRcINOKfMXL1f4_nDRbtKb5Pd33hKnsKYLkxEI0mrT1JKPJhkJRg9vGqaKcd13oqrigJRFSXYuVQuKNDluc38KbQJcwUoXDjjA"
],
"Content-Type":[
"application/json"
],
"Host":[
"661ny3iw92.execute-api.us-east-2.amazonaws.com"
],
"Postman-Token":[
"654cfdd0-8080-48a5-8758-3aa7e5bcefcc"
],
"User-Agent":[
"PostmanRuntime/7.26.5"
],
"X-Amzn-Trace-Id":[
"Root=1-5fa13860-63da2718701bf3fc458c78e1"
],
"X-Forwarded-For":[
"157.49.141.105"
],
"X-Forwarded-Port":[
"443"
],
"X-Forwarded-Proto":[
"https"
]
},
"queryStringParameters":"None",
"multiValueQueryStringParameters":"None",
"pathParameters":"None",
"stageVariables":"None",
"requestContext":{
"resourceId":"529nsbfu6a",
"resourcePath":"/",
"httpMethod":"PUT",
"extendedRequestId":"VbW_FFkYCYcF3PQ=",
"requestTime":"03/Nov/2020:11:00:48 +0000",
"path":"/dev",
"accountId":"272075499248",
"protocol":"HTTP/1.1",
"stage":"dev",
"domainPrefix":"661ny3iw92",
"requestTimeEpoch":1604401248435,
"requestId":"970dd9d2-9b35-45c5-b194-806060e27d10",
"identity":{
"cognitoIdentityPoolId":"None",
"accountId":"None",
"cognitoIdentityId":"None",
"caller":"None",
"sourceIp":"157.49.141.105",
"principalOrgId":"None",
"accessKey":"None",
"cognitoAuthenticationType":"None",
"cognitoAuthenticationProvider":"None",
"userArn":"None",
"userAgent":"PostmanRuntime/7.26.5",
"user":"None"
},
"domainName":"661ny3iw92.execute-api.us-east-2.amazonaws.com",
"apiId":"661ny3iw92"
},
"body":"{\r\n \"company_name\": \"waveaxis pvt ltd\",\r\n \"mc_number\": \"00000\",\r\n \"email_adderess\": \"waveaxis#waveaxis.co.in\",\r\n \"phone_number\": \"+91 1234567890\",\r\n \"company_address\": \"Kormangala, Bengaluru\"\r\n}\r\n",
"isBase64Encoded":False
}
Your event['body'] is probably just a json string, not actual json, from what I remember.
Thus, instead of
item = event['body']
you can use
item = json.loads(event['body'])
which should parse json string into json object.
Update
Based on the posted form of the event, you should use ast, not json
import ast
item = ast.literal_eval(event['body'])
I am trying to create a resource group using the following boto3 snippet:
kwargs = {
'Name': 'cluster.foo.io',
'Description': 'AWS resources assigned to the foo cluster.',
'ResourceQuery': {
'Type': 'TAG_FILTERS_1_0',
'Query': '[{"Key": "foo.io/cluster", "Values": ["cluster.foo.io"]}]',
},
'Tags': {
'foo.io/cluster': 'cluster.foo.io'
}
}
client = boto3.client("resource-groups")
resp = client.create_group(**kwargs)
But I'm getting the following error:
File "/Users/benjamin/.pyenv/versions/3.7.3/Python.framework/Versions/3.7/lib/python3.7/site-packages/botocore/client.py", line 357, in _api_call
return self._make_api_call(operation_name, kwargs)
File "/Users/benjamin/.pyenv/versions/3.7.3/Python.framework/Versions/3.7/lib/python3.7/site-packages/botocore/client.py", line 661, in _make_api_call
raise error_class(parsed_response, operation_name)
botocore.errorfactory.BadRequestException: An error occurred (BadRequestException)
when calling the CreateGroup operation: Query not valid:
Query format not valid: check JSON syntax
I keep comparing the Query to the example in the documentation but either I'm not seeing a difference or I'm way off in left field. I even used the json module as follows:
resp = self.resource_client.create_group(
Name='cluster.foo.io',
Description="AWS resources assigned to the foo cluster",
ResourceQuery={
"Type": "TAG_FILTERS_1_0",
"Query": json.dumps([{"Key": "foo.io/cluster", "Values": ["cluster.foo.io"]}]),
},
Tags={
"foo.io/cluster": "cluster.foo.io",
},
)
Any help would be appreciated!
The query parameter is missing ResourceTypeFilters and TagFilters. So, ResourceQuery should look like this:
'ResourceQuery': {
'Type': 'TAG_FILTERS_1_0',
'Query': "{\"ResourceTypeFilters\": [\"AWS::AllSupported\"], \"TagFilters\": [{\"Key\": \"foo.io/cluster\", \"Values\": [\"cluster.foo.io\"]}]}"
}
So, your code should be replaced as follows (the main section to be replaced is ResourceQuery:
query = {
"ResourceTypeFilters": ["AWS::AllSupported"],
"TagFilters": [{
"Key": "foo.io/cluster",
"Values": ["cluster.foo.io"]
}]
}
resource_query = {
'Type': 'TAG_FILTERS_1_0',
'Query': json.dumps(query)
}
kwargs = {
'Name': 'cluster.foo.io',
'Description': 'AWS resources assigned to the foo cluster.',
'ResourceQuery': resource_query,
'Tags': {
'foo.io/cluster': 'cluster.foo.io'
}
}
client = boto3.client("resource-groups")
resp = client.create_group(**kwargs)
I referred the example CLI shown here.
I am trying to copy multiple files in a source bucket to a destination bucket using AWS lambda and am getting the error below. Bucket structures are as follows
Source Buckets
mysrcbucket/Input/daily/acctno_pin_xref/ABC_ACCTNO_PIN_XREF_FULL_20170926_0.csv.gz
mysrcbucket/Input/daily/acctno_pin_xref/ABC_ACCTNO_PIN_XREF_FULL_20170926_1.csv.gz
mysrcbucket/Input/daily/acctno_pin_xref/ABC_ACCTNO_PIN_XREF_count_20170926.inf
Destination Buckets
mydestbucket/Input/daily/acctno_pin_xref/ABC_ACCTNO_PIN_XREF_FULL_20170926_0.csv.gz
mydestbucket/Input/daily/acctno_pin_xref/ABC_ACCTNO_PIN_XREF_FULL_20170926_1.csv.gz
mydestbucket/Input/daily/acctno_pin_xref/ABC_ACCTNO_PIN_XREF_count_20170926.inf
I wrote the lambda function below but am getting the error below. Can someone help me explain what I am doing wrong
{ "errorMessage": "expected string or bytes-like object", "errorType": "TypeError", "stackTrace": [
[
"/var/task/lambda_function.py",
17,
"lambda_handler",
"s3.Object(dest_bucket,dest_key).copy_from(CopySource= { 'Bucket': obj.bucket_name , 'Key' : obj.key})"
],
[
"/var/runtime/boto3/resources/factory.py",
520,
"do_action",
"response = action(self, *args, **kwargs)"
],
[
"/var/runtime/boto3/resources/action.py",
83,
"__call__",
"response = getattr(parent.meta.client, operation_name)(**params)"
],
[
"/var/runtime/botocore/client.py",
312,
"_api_call",
"return self._make_api_call(operation_name, kwargs)"
],
[
"/var/runtime/botocore/client.py",
575,
"_make_api_call",
"api_params, operation_model, context=request_context)"
],
[
"/var/runtime/botocore/client.py",
627,
"_convert_to_request_dict",
"params=api_params, model=operation_model, context=context)"
],
[
"/var/runtime/botocore/hooks.py",
227,
"emit",
"return self._emit(event_name, kwargs)"
],
[
"/var/runtime/botocore/hooks.py",
210,
"_emit",
"response = handler(**kwargs)"
],
[
"/var/runtime/botocore/handlers.py",
208,
"validate_bucket_name",
"if VALID_BUCKET.search(bucket) is None:"
] ] }
Lambda Function Code
import boto3
import json
s3 = boto3.resource('s3')
def lambda_handler (event, context):
bucket = s3.Bucket('mysrcbucket')
dest_bucket=s3.Bucket('mydestbucket')
print(bucket)
print(dest_bucket)
for obj in bucket.objects.filter(Prefix='Input/daily/acctno_pin_xref/ABC_ACCTNO_PIN_XREF',Delimiter='/'):
dest_key=obj.key
print(dest_key)
s3.Object(dest_bucket,dest_key).copy_from(CopySource= { 'Bucket': obj.bucket_name , 'Key' : obj.key})
The issue is with:
s3.Object(dest_bucket, dest_key).copy_from(CopySource= {'Bucket': obj.bucket_name,
'Key': obj.key})
change dest_bucket to dest_bucket.name:
s3.Object(dest_bucket.name, dest_key).copy_from(CopySource= {'Bucket': obj.bucket_name,
'Key': obj.key})
dest_bucket is a resource and name is its identifier.