Nodemailer: Missing start boundary with ses transporte - node.js

import * as SES from 'aws-sdk/clients/ses';
import * as nodemailer from 'nodemailer';
const transporter = nodemailer.createTransport({SES: new SES({
accessKeyId: 'XXXX',
secretAccessKey: 'XXXX',
region: 'XXXX',
})});
transporter.sendMail({
from: 'test-from#test.it',
to: 'test-to#test.it',
attachments: [{
filename: 'attachment.zip',
content: file // internal.Readable from busboy upload
}],
subject: 'test',
html: 'test'
});
error:
InvalidParameterValue: Missing start boundary
at Request.extractError (/opt/backend/node_modules/aws-sdk/lib/protocol/query.js:50:29)
at Request.callListeners (/opt/backend/node_modules/aws-sdk/lib/sequential_executor.js:106:20)
at Request.emit (/opt/backend/node_modules/aws-sdk/lib/sequential_executor.js:78:10)
at Request.emit (/opt/backend/node_modules/aws-sdk/lib/request.js:686:14)
at Request.transition (/opt/backend/node_modules/aws-sdk/lib/request.js:22:10)
at AcceptorStateMachine.runTo (/opt/backend/node_modules/aws-sdk/lib/state_machine.js:14:12)
at /opt/backend/node_modules/aws-sdk/lib/state_machine.js:26:10
at Request.<anonymous> (/opt/backend/node_modules/aws-sdk/lib/request.js:38:9)
at Request.<anonymous> (/opt/backend/node_modules/aws-sdk/lib/request.js:688:12)
at Request.callListeners (/opt/backend/node_modules/aws-sdk/lib/sequential_executor.js:116:18)
libs
{
"nodemailer": "^6.7.6",
"aws-sdk": "^2.1167.0"
}

Related

DigitalOcean Spaces / Amazon S3 "InvalidArgument: null at Request.extractError"

InvalidArgument: null
at Request.extractError (P:\Upwork\MyProject\backend\node_modules\aws-sdk\lib\services\s3.js:700:35)
at Request.callListeners (P:\Upwork\MyProject\backend\node_modules\aws-sdk\lib\sequential_executor.js:106:20)
at Request.emit (P:\Upwork\MyProject\backend\node_modules\aws-sdk\lib\sequential_executor.js:78:10)
at Request.emit (P:\Upwork\MyProject\backend\node_modules\aws-sdk\lib\request.js:688:14)
at Request.transition (P:\Upwork\MyProject\backend\node_modules\aws-sdk\lib\request.js:22:10)
at AcceptorStateMachine.runTo (P:\Upwork\MyProject\backend\node_modules\aws-sdk\lib\state_machine.js:14:12)
at P:\Upwork\MyProject\backend\node_modules\aws-sdk\lib\state_machine.js:26:10
at Request.<anonymous> (P:\Upwork\MyProject\backend\node_modules\aws-sdk\lib\request.js:38:9)
at Request.<anonymous> (P:\Upwork\MyProject\backend\node_modules\aws-sdk\lib\request.js:690:12)
at Request.callListeners (P:\Upwork\MyProject\backend\node_modules\aws-sdk\lib\sequential_executor.js:116:18) {
code: 'InvalidArgument',
region: null,
time: 2020-12-31T15:39:45.724Z,
requestId: '',
extendedRequestId: undefined,
cfId: undefined,
statusCode: 400,
retryable: false,
retryDelay: 85.1667642693943
}
This error occurs when trying to upload to DigitalOcean Spaces or Amazon S3.
The error message is very vague just supplying InvalidArgument as a reason.
code: 'InvalidArgument' suggests that there is an issue with the supplied uploadParams.
const uploadParams = {
Bucket: process.env.DIGITAL_OCEAN_PUBLIC_SPACE_NAME,
Key: `profile-picture/${userUUID}.jpg`,
Body: body,
ACL: 'public',
};
In my case we can see above that I was trying to use ACL: 'public'.
This is not a valid ACL value, a value of ACL: 'public-read' should be used. For private objects you should use ACL: 'private'. The documentation can be found here.
If you receive this error check that your bucket name, ACL values and all of your uploadParams are valid.

Updating a dynamoDB table (incrementing an integer) using transactions in lambda

I'm trying to update the follower-count for two items in a dynamoDB table using node.js 12.x. Here is my code:
const AWS = require('aws-sdk');
AWS.config.update({region: "us-east-2"});
const dynamoDb = new AWS.DynamoDB.DocumentClient();
exports.handler = async (event) => {
const followingUser = "#FRIEND#"+ event.queryStringParameters.followingUser;
const followedUser = "USER#" + event.queryStringParameters.followedUser;
const followedMetadata = "#METADATA#" + followedUser;
const followingMetadata = "#METADATA#" + followingUser;
const followingUsername = "USER#" + followingUser;
const current_time = Date.now();
console.log(event);
try{
const response = await dynamoDb.transactWrite({
TransactItems: [
{
Put: {
TableName: "rememoriesDBv2",
Item: {
"PK": followedUser,
"SK": followingUser,
"followedUser": event.queryStringParameters.followedUser,
"followingUser": event.queryStringParameters.followingUser,
"timestamp":current_time
},
ConditionExpression: "attribute_not_exists(SK)",
ReturnValuesOnConditionCheckFailure: "ALL_OLD"
}
},
{
Update:{
TableName: "rememoriesDBv2",
Key: { "PK": { "S": followedUser}, "SK": { "S": followedMetadata} },
UpdateExpression: "SET followers = followers + :i",
ExpressionAttributeNames: {"followers": "followers"},
// UpdateExpression: "ADD followers :i",
// ExpressionAttributeNames={'followers': 'followers'},
// ExpressionAttributeValues: {":i": {"N": "1"}},
// UpdateExpression: "set followers = followers + :i",
ExpressionAttributeValues:{ ":i": {N: 1}},
ReturnValuesOnConditionCheckFailure: "ALL_OLD"
}
},
// {
// Update:{
// TableName: "rememoriesDBv2",
// Key: { "PK": followingUser, "SK":followingMetadata},
// UpdateExpression: "SET following = following + :i",
// // UpdateExpression: "set following = following + :i",
// ExpressionAttributeValues: {":i": 1},
// ReturnValuesOnConditionCheckFailure: "ALL_OLD"
// }
// },
]}).promise();
}
catch(err){
console.log(err)
}
// TODO implement
const returnValue = {
statusCode: 200,
body: JSON.stringify(event.queryStringParameters.followingUser + " is now following " + event.queryStringParameters.followedUser),
};
return returnValue;
};
I've been running into problems the entire time, but here is the current error:
INFO ValidationException: ExpressionAttributeNames contains invalid key: Syntax error; key: "followers"
at Request.extractError (/var/runtime/node_modules/aws-sdk/lib/protocol/json.js:51:27)
at Request.callListeners (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:106:20)
at Request.emit (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:78:10)
at Request.emit (/var/runtime/node_modules/aws-sdk/lib/request.js:683:14)
at Request.transition (/var/runtime/node_modules/aws-sdk/lib/request.js:22:10)
at AcceptorStateMachine.runTo (/var/runtime/node_modules/aws-sdk/lib/state_machine.js:14:12)
at /var/runtime/node_modules/aws-sdk/lib/state_machine.js:26:10
at Request.<anonymous> (/var/runtime/node_modules/aws-sdk/lib/request.js:38:9)
at Request.<anonymous> (/var/runtime/node_modules/aws-sdk/lib/request.js:685:12)
at Request.callListeners (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:116:18) {
code: 'ValidationException',
time: 2020-07-31T15:34:22.073Z,
requestId: 'B6NJ2EC686PFVBFE0P4MVFBB17VV4KQNSO5AEMVJF66Q9ASUAAJG',
statusCode: 400,
retryable: false,
retryDelay: 47.858884374085854
}
I've also gotten this error:
INFO ValidationException: Invalid UpdateExpression: Incorrect operand type for operator or function; operator or function: +, operand type: M
at Request.extractError (/var/runtime/node_modules/aws-sdk/lib/protocol/json.js:51:27)
at Request.callListeners (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:106:20)
at Request.emit (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:78:10)
at Request.emit (/var/runtime/node_modules/aws-sdk/lib/request.js:683:14)
at Request.transition (/var/runtime/node_modules/aws-sdk/lib/request.js:22:10)
at AcceptorStateMachine.runTo (/var/runtime/node_modules/aws-sdk/lib/state_machine.js:14:12)
at /var/runtime/node_modules/aws-sdk/lib/state_machine.js:26:10
at Request.<anonymous> (/var/runtime/node_modules/aws-sdk/lib/request.js:38:9)
at Request.<anonymous> (/var/runtime/node_modules/aws-sdk/lib/request.js:685:12)
at Request.callListeners (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:116:18) {
code: 'ValidationException',
time: 2020-07-31T14:19:56.774Z,
requestId: 'ROQC1RTQTD4784CK8JBQ47FKKBVV4KQNSO5AEMVJF66Q9ASUAAJG',
statusCode: 400,
retryable: false,
retryDelay: 31.52114733740706
}
Ultimately, I'm unable to find much AWS documentation, and am not sure where to go from here. If anyone could help me find out what exactly is going on with my code and what is wrong with it, it'd be greatly appreciated.
Here is the table schema
It then has random elements depending on the type.

dynamodb TransactWriteItems error: An unknown operation was requested

I'm trying to update multiple items using TransactWriteItems, But I have got the following error:
{
UnknownOperationException: An unknown operation was requested.
at Request.extractError (project-dir\node_modules\aws-sdk\lib\protocol\json.js:51:27)
at Request.callListeners (project-dir\node_modules\aws-sdk\lib\sequential_executor.js:106:20)
at Request.emit (project-dir\node_modules\aws-sdk\lib\sequential_executor.js:78:10)
at Request.emit (project-dir\node_modules\aws-sdk\lib\request.js:683:14)
at Request.transition (project-dir\node_modules\aws-sdk\lib\request.js:22:10)
at AcceptorStateMachine.runTo (project-dir\node_modules\aws-sdk\lib\state_machine.js:14:12)
at project-dir\node_modules\aws-sdk\lib\state_machine.js:26:10
at Request.<anonymous> (project-dir\node_modules\aws-sdk\lib\request.js:38:9)
at Request.<anonymous> (project-dir\node_modules\aws-sdk\lib\request.js:685:12)
at Request.callListeners (project-dir\node_modules\aws-sdk\lib\sequential_executor.js:116:18)
message: 'An unknown operation was requested.',
code: 'UnknownOperationException',
time: 2019-06-21T18:28:46.776Z,
requestId: '',
statusCode: 400,
retryable: false,
retryDelay: 17.98291928629798
}
My Code is given below:
const dynamodb = new aws.DynamoDB({ endpoint: "http://localhost:8000" });
const result = await dynamodb
.transactWriteItems({
TransactItems: [{
"Update":{
"TableName":"dbTable1",
"Key":{
"id": { "S":"table-primary-key-id-01" }
},
"ConditionExpression": "#id = :id",
"UpdateExpression":"set #orderNo = :orderNo",
"ExpressionAttributeNames": {
"#id": "id",
"#orderNo":"orderNo"
},
"ExpressionAttributeValues":{
":id":{"S":"table-primary-key-id-01"},
":orderNo":{"N":"9"}
}
}
}]
})
.promise();
Any help would be very much appreciable. Thanks in advance.
I see you are running the TransactWriteItems operation on a local dynamodb instance. Unfortunately AWS has not implemented support for Transactions API call for dynamodb local instances.

AWS SDK and NODE JS --> UnknownEndpoint

I have followed this guide: https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/ec2-example-creating-an-instance.html
When I run the code below - I get the UnknowEndPoint error.
I have created an AWS IAM user and added the user to a group with the policy name "IAMFullAccess" - I don't know which other policy would fit (I can see in the AWS dashboard that my credentials has been used programmatically - So I guess this part works well...)?
I have placed my credentials in the .aws folder in a file named credentials.
I am not sure what actually to put inside the 'KeyName' below - currently I have entered the Key Pair Name I find by clicking on one of my existing AWS EC2 instances in the dashboard. Is this correct?
Is there some security group or similar I need to edit - to able to connect NodeJS to my AWS account? Otherwise I have no clue what to do?
In start.js file:
process.env.AWS_SDK_LOAD_CONFIG=1;
var AWS_SDK = require('./aws_sdk');
var aws_sdk = new AWS_SDK();
aws_sdk.CopyInstance();
In aws_sdk.js file:
function AWS_SDK() {
this.CopyInstance = function() {
try {
// Load the AWS SDK for Node.js
var AWS = require('aws-sdk');
//Set the region
AWS.config.update({region: 'us-east-2a'});
var instanceParams = {
ImageId: 'ami-0...',
InstanceType: 't1.micro',
KeyName: '<Key_name>',
MinCount: 1,
MaxCount: 1
};
// Create a promise on an EC2 service object
var instancePromise = new AWS.EC2({apiVersion: '2016-11-15'}).runInstances(instanceParams).promise();
// Handle promise's fulfilled/rejected states
instancePromise.then(
function (data) {
console.log(data);
var instanceId = data.Instances[0].InstanceId;
console.log("Created instance", instanceId);
// Add tags to the instance
var tagParams = {
Resources: [instanceId], Tags: [
{
Key: 'Name',
Value: 'SDK Sample'
}
]
};
// Create a promise on an EC2 service object
var tagPromise = new AWS.EC2({apiVersion: '2016-11-15'}).createTags(tagParams).promise();
// Handle promise's fulfilled/rejected states
tagPromise.then(
function (data) {
console.log("Instance tagged");
}).catch(
function (err) {
console.error(err, err.stack);
});
}).catch(
function (err) {
console.error(err, err.stack);
});
}
catch(e){
wl.info('Error: ' + e);
}
}
function create() {
if(globalAWS === null)
globalAWS = new AWS_SDK();
return globalAWS;
}
module.exports = create;
ERROR:
{ UnknownEndpoint: Inaccessible host: ec2.us-east-2a.amazonaws.com'.
This service may not be available in theus-east-2a' region.
at Request.ENOTFOUND_ERROR (D:\Workspace\BitBucket\Test\node_modules\aws-sdk\lib\event_listeners.js:486:46)
at Request.callListeners (D:\Workspace\BitBucket\Test\node_modules\aws-sdk\lib\sequential_executor.js:106:20)
at Request.emit (D:\Workspace\BitBucket\Test\node_modules\aws-sdk\lib\sequential_executor.js:78:10)
at Request.emit (D:\Workspace\BitBucket\Test\node_modules\aws-sdk\lib\request.js:683:14)
at ClientRequest.error (D:\Workspace\BitBucket\Test\node_modules\aws-sdk\lib\event_listeners.js:325:22)
at ClientRequest. (D:\Workspace\BitBucket\Test\node_modules\aws-sdk\lib\http\node.js:93:19)
at ClientRequest.emit (events.js:182:13)
at ClientRequest.EventEmitter.emit (domain.js:442:20)
at TLSSocket.socketErrorListener (_http_client.js:391:9)
at TLSSocket.emit (events.js:182:13) message: 'Inaccessible host: ec2.us-east-2a.amazonaws.com\'. This service may not be
available in theus-east-2a\' region.', code: 'UnknownEndpoint',
region: 'us-east-2a', hostname: 'ec2.us-east-2a.amazonaws.com',
retryable: true, originalError: { Error: getaddrinfo ENOTFOUND
ec2.us-east-2a.amazonaws.com ec2.us-east-2a.amazonaws.com:443
at GetAddrInfoReqWrap.onlookup [as oncomplete] (dns.js:57:26)
message:
'getaddrinfo ENOTFOUND ec2.us-east-2a.amazonaws.com ec2.us-east-2a.amazonaws.com:443',
errno: 'ENOTFOUND',
code: 'NetworkingError',
syscall: 'getaddrinfo',
hostname: 'ec2.us-east-2a.amazonaws.com',
host: 'ec2.us-east-2a.amazonaws.com',
port: 443,
region: 'us-east-2a',
retryable: true,
time: 2019-01-14T20:03:42.177Z }, time: 2019-01-14T20:03:42.177Z } 'UnknownEndpoint: Inaccessible host:
ec2.us-east-2a.amazonaws.com\'. This service may not be available in
theus-east-2a\' region.\n at Request.ENOTFOUND_ERROR
(D:\Workspace\BitBucket\Test\node_modules\aws-sdk\lib\event_listeners.js:486:46)\n
at Request.callListeners
(D:\Workspace\BitBucket\Test\node_modules\aws-sdk\lib\sequential_executor.js:106:20)\n
at Request.emit
(D:\Workspace\BitBucket\Test\node_modules\aws-sdk\lib\sequential_executor.js:78:10)\n
at Request.emit
(D:\Workspace\BitBucket\Test\node_modules\aws-sdk\lib\request.js:683:14)\n
at ClientRequest.error
(D:\Workspace\BitBucket\Test\node_modules\aws-sdk\lib\event_listeners.js:325:22)\n
at ClientRequest.
(D:\Workspace\BitBucket\Test\node_modules\aws-sdk\lib\http\node.js:93:19)\n
at ClientRequest.emit (events.js:182:13)\n at
ClientRequest.EventEmitter.emit (domain.js:442:20)\n at
TLSSocket.socketErrorListener (_http_client.js:391:9)\n at
TLSSocket.emit (events.js:182:13)'
Process finished with exit code 0
If I change the REGION to "us-east-2" (delete the 'a' at the end) the error changes to this:
{ Unsupported: The requested configuration is currently not supported. Please check the documentation for supported configurations.
at Request.extractError (D:\Workspace\BitBucket\Test\node_modules\aws-sdk\lib\services\ec2.js:50:35)
at Request.callListeners (D:\Workspace\BitBucket\Test\node_modules\aws-sdk\lib\sequential_executor.js:106:20)
at Request.emit (D:\Workspace\BitBucket\Test\node_modules\aws-sdk\lib\sequential_executor.js:78:10)
at Request.emit (D:\Workspace\BitBucket\Test\node_modules\aws-sdk\lib\request.js:683:14)
at Request.transition (D:\Workspace\BitBucket\Test\node_modules\aws-sdk\lib\request.js:22:10)
at AcceptorStateMachine.runTo (D:\Workspace\BitBucket\Test\node_modules\aws-sdk\lib\state_machine.js:14:12)
at D:\Workspace\BitBucket\Test\node_modules\aws-sdk\lib\state_machine.js:26:10
at Request.<anonymous> (D:\Workspace\BitBucket\Test\node_modules\aws-sdk\lib\request.js:38:9)
at Request.<anonymous> (D:\Workspace\BitBucket\Test\node_modules\aws-sdk\lib\request.js:685:12)
at Request.callListeners (D:\Workspace\BitBucket\Test\node_modules\aws-sdk\lib\sequential_executor.js:116:18)
message:
'The requested configuration is currently not supported. Please check the documentation for supported configurations.',
code: 'Unsupported',
time: 2019-01-14T20:31:55.954Z,
requestId: '815a44e2-5d0d-453e-a4ff-6faac2695064',
statusCode: 400,
retryable: false,
retryDelay: 51.269952198296934 } 'Unsupported: The requested configuration is currently not supported. Please check the documentation for supported configurations.\n at Request.extractError (D:\\Workspace\\BitBucket\\Test\\node_modules\\aws-sdk\\lib\\services\\ec2.js:50:35)\n at Request.callListeners (D:\\Workspace\\BitBucket\\Test\\node_modules\\aws-sdk\\lib\\sequential_executor.js:106:20)\n at Request.emit (D:\\Workspace\\BitBucket\\Test\\node_modules\\aws-sdk\\lib\\sequential_executor.js:78:10)\n at Request.emit (D:\\Workspace\\BitBucket\\Test\\node_modules\\aws-sdk\\lib\\request.js:683:14)\n at Request.transition (D:\\Workspace\\BitBucket\\Test\\node_modules\\aws-sdk\\lib\\request.js:22:10)\n at AcceptorStateMachine.runTo (D:\\Workspace\\BitBucket\\Test\\node_modules\\aws-sdk\\lib\\state_machine.js:14:12)\n at D:\\Workspace\\BitBucket\\Test\\node_modules\\aws-sdk\\lib\\state_machine.js:26:10\n at Request.<anonymous> (D:\\Workspace\\BitBucket\\Test\\node_modules\\aws-sdk\\lib\\request.js:38:9)\n at Request.<anonymous> (D:\\Workspace\\BitBucket\\Test\\node_modules\\aws-sdk\\lib\\request.js:685:12)\n at Request.callListeners (D:\\Workspace\\BitBucket\\Test\\node_modules\\aws-sdk\\lib\\sequential_executor.js:116:18)'
Update 1:
I changed the REGION from to 'us-est-2' and now also updated my ~/.aws/config file (before the file only contained 1 line/row: 'region=us-west-2'):
[default]
region=us-west-2
output=json
Now I get this error (when I try to decode it in the CMD) - I still don't understand what extra roles I have to give/grant my IAM-AWS user to be able read the error message?
An error occurred (AccessDenied) when calling the DecodeAuthorizationMessage operation: User: arn:aws:iam::0046xxxxxxx:user/user_name is not authorized to perform: sts:DecodeAuthorizationMessage
You have configured your AWS region as us-east-2a. That isn't a region, that is an availability zone. Your region should be configured as us-east-2.
After I added the IAM role: "AdministratorAccess" all issues were gone :) The question is now - which role(s) are actually needed to be granted for it to work..."AdministratorAccess" was just for testing purpose.

AWS S3 parse URI error with docker containers

On my local environment, I develop a file upload feature with a express/node backend and an AWS S3 server (using scality/S3). Both services are dockerized in their own container, and the communication works fine.
My problem is that the S3 server seems not to understand the upload request. Here is the code I try to use :
const s3 = new S3({
accessKeyId: 'accessKey1',
secretAccessKey: 'verySecretKey1',
endpoint: 's3server:8000',
sslEnabled: false,
s3ForcePathStyle: true,
});
function uploadFile(file) {
const params = {
Body: file,
Bucket: 'testbucket',
Key: 'testkey',
ACL: 'public-read',
};
s3.upload(params, (err, data) => {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response
});
}
The S3 server receives the request but send back the error:
{"name":"S3","clientIP":"::ffff:172.18.0.5","clientPort":45066,"httpMethod":"PUT","httpURL":"/testbucket/testkey","time":1502458550488,"req_id":"7f4fac280644b5cf203c","level":"info","message":"received request","hostname":"faf8cb0b47d4","pid":103}
{"name":"S3","bytesSent":192,"clientIP":"::ffff:172.18.0.5","clientPort":45066,"httpMethod":"PUT","httpURL":"/testbucket/testkey","httpCode":400,"time":1502458550491,"req_id":"7f4fac280644b5cf203c","elapsed_ms":2.607924,"level":"info","message":"responded with error XML","hostname":"faf8cb0b47d4","pid":103}
And the node backend logs the error:
{ InvalidURI: Couldn't parse the specified URI.
at Request.extractError (/usr/src/api/node_modules/aws-sdk/lib/services/s3.js:577:35)
at Request.callListeners (/usr/src/api/node_modules/aws-sdk/lib/sequential_executor.js:105:20)
at Request.emit (/usr/src/api/node_modules/aws-sdk/lib/sequential_executor.js:77:10)
at Request.emit (/usr/src/api/node_modules/aws-sdk/lib/request.js:683:14)
at Request.transition (/usr/src/api/node_modules/aws-sdk/lib/request.js:22:10)
at AcceptorStateMachine.runTo (/usr/src/api/node_modules/aws-sdk/lib/state_machine.js:14:12)
at /usr/src/api/node_modules/aws-sdk/lib/state_machine.js:26:10
at Request.<anonymous> (/usr/src/api/node_modules/aws-sdk/lib/request.js:38:9)
at Request.<anonymous> (/usr/src/api/node_modules/aws-sdk/lib/request.js:685:12)
at Request.callListeners (/usr/src/api/node_modules/aws-sdk/lib/sequential_executor.js:115:18)
at Request.emit (/usr/src/api/node_modules/aws-sdk/lib/sequential_executor.js:77:10)
at Request.emit (/usr/src/api/node_modules/aws-sdk/lib/request.js:683:14)
at Request.transition (/usr/src/api/node_modules/aws-sdk/lib/request.js:22:10)
at AcceptorStateMachine.runTo (/usr/src/api/node_modules/aws-sdk/lib/state_machine.js:14:12)
at /usr/src/api/node_modules/aws-sdk/lib/state_machine.js:26:10
at Request.<anonymous> (/usr/src/api/node_modules/aws-sdk/lib/request.js:38:9)
message: 'Couldn\'t parse the specified URI.',
code: 'InvalidURI',
region: null,
time: 2017-08-11T13:35:50.510Z,
requestId: '7f4fac280644b5cf203c',
extendedRequestId: '7f4fac280644b5cf203c',
cfId: undefined,
statusCode: 400,
retryable: false,
retryDelay: 57.08331622136704 }
I saw some answers about utf-8 encoding problems but it didn't work on my case :/
Does anyone has an idea about why it can't parse the URI ?
Thank you for your time !
You can fix this by providing your own config.json using volumes, as suggested here.
Copy config.json.
Add "s3server": "us-east-1", to "restEndpoints".
docker run -v $(pwd)/config.json:/usr/src/app/config.json ...
or if you're using docker compose, add something like the following to your docker-compose.yaml:
s3server:
image: scality/s3server
restart: always
expose: [8000]
stdin_open: true
tty: true
container_name: s3server
volumes:
- "${PWD}/s3config.json:/usr/src/app/config.json"

Resources