Cannot get proxy parameter working with lambda integration - node.js

I have an api endpoint in API gateway defined as:
/guide/{proxy+}
which has get method and request paths defined as proxy.
Then I use lambda proxy which has and URL path parameters as:
proxy - method.request.path.proxy
I my lambda function I want to get the S3 file from a bucket based on that path, so I have:
var AWS = require('aws-sdk');
var s3 = new AWS.S3();
exports.handler = async (event, context, callback) => {
var bucketName = 'my-bucket';
var keyName = 'guide/' + event.proxy;
var fileContent;
let data = await getS3File(bucketName, keyName)
let response = {
"statusCode": 200,
"body": data.Body
};
callback(null, response);
};
async function getS3File(bucket, key) {
const params = {
Bucket: bucket,
Key: key
};
return s3.getObject(params, (err) => {
if (err) {
// handle errors
}
}).promise();
}
Unfortunately, when I test this in test in api gateway, when I pass path as:
en/guide.pdf I get the following response:
{
"errorMessage": "The specified key does not exist.",
"errorType": "NoSuchKey",
}
The file is 100% there, when I test it within lambda (as a test), it gets downloaded. Why is so? Cannot figure it out.
NOTE: I do not want to select Use Lambda Proxy integration checkbox, because otherwise I loose some options then.

Related

Read from Secrets Manager in AWS Lambda function

I am trying to use the Node.js sample code that AWS Secrets Manager provides to read a secret value, and am putting this code inside a Lambda function. However, I can't seem to get into the function that handles the response from getting the secret value.
The Lambda role has AdministratorAccess permissions to rule out it being a permissions issue.
Lambda Code:
exports.handler = async (event) => {
// Load the AWS SDK
var AWS = require('aws-sdk'),
region = "us-east-1",
secretName = "/my-secrets/level1/level2",
secret,
decodedBinarySecret;
var client = new AWS.SecretsManager({
region: region
});
console.log('above')
client.getSecretValue({SecretId: secretName}, function(err, data) {
console.log('in')
if (err) {
throw err;
}
else {
if ('SecretString' in data) {
secret = data.SecretString;
} else {
let buff = new Buffer(data.SecretBinary, 'base64');
decodedBinarySecret = buff.toString('ascii');
}
}
console.log(secret)
});
console.log('below')
};
OUTPUT
2020-03-05T18:51:54.547Z a3101875-a1f4-4b6f-ac62-3c2f93f5941f INFO above
2020-03-05T18:51:54.947Z a3101875-a1f4-4b6f-ac62-3c2f93f5941f INFO below
Because the secret exists, I would expect to see "in" and the secret lines in the output...what is preventing it from getting inside that function?
Change your call to be a promise:
const data = await client.getSecretValue({SecretId: secretName}).promise();
The problem you are running into is that the lambda is ending execution before your callback is executed. AWS Lambda Function Handler in Node.js
The above solution works, but for a full code example, please refer to this link: https://github.com/awsdocs/aws-doc-sdk-examples/blob/main/javascriptv3/example_code/secrets/src/secrets_getsecretvalue.js

How to write to an existing file in a S3 bucket based on the pre signed URL?

I've been searching for a way to write to a JSON file in a S3 bucket from the pre signed URL. From my research it appears it can be done but these are not in Node:
http PUT a file to S3 presigned URLs using ruby
PUT file to S3 with presigned URL
Uploading a file to a S3 Presigned URL
Write to a AWS S3 pre-signed url using Ruby
How to create and read .txt file with fs.writeFile to AWS Lambda
Not finding a Node solution from my searches and using a 3rd party API I'm trying to write the callback to a JSON that is in a S3 bucket. I can generate the pre signed URL with no issues but when I try to write dummy text to the pre signed URL I get:
Error: ENOENT: no such file or directory, open
'https://path-to-file-with-signed-url'
When I try to use writeFile:
fs.writeFile(testURL, `This is a write test: ${Date.now()}`, function(err) {
if(err) return err
console.log("File written to")
})
and my understanding of the documentation under file it says I can use a URL. I'm starting to believe this might be a permissions issue but I'm not finding any luck in the documentation.
After implementing node-fetch I still get an error (403 Forbidden) writing to a file in S3 based on the pre signed URL, here is the full code from the module I've written:
const aws = require('aws-sdk')
const config = require('../config.json')
const fetch = require('node-fetch')
const expireStamp = 604800 // 7 days
const existsModule = require('./existsModule')
module.exports = async function(toSignFile) {
let checkJSON = await existsModule(`${toSignFile}.json`)
if (checkJSON == true) {
let testURL = await s3signing(`${toSignFile}.json`)
fetch(testURL, {
method: 'PUT',
body: JSON.stringify(`This is a write test: ${Date.now()}`),
}).then((res) => {
console.log(res)
}).catch((err) => {
console.log(`Fetch issue: ${err}`)
})
}
}
async function s3signing(signFile) {
const s3 = new aws.S3()
aws.config.update({
accessKeyId: config.aws.accessKey,
secretAccessKey: config.aws.secretKey,
region: config.aws.region,
})
params = {
Bucket: config.aws.bucket,
Key: signFile,
Expires: expireStamp
}
try {
// let signedURL = await s3.getSignedUrl('getObject', params)
let signedURL = await s3.getSignedUrl('putObject', params)
console.log('\x1b[36m%s\x1b[0m', `Signed URL: ${signedURL}`)
return signedURL
} catch (err) {
return err
}
}
Reviewing the permissions I have no issues with uploading and write access has been set in the permissions. In Node how can I write to a file in the S3 bucket using that file's pre-signed URL as the path?
fs is the filesystem module. You can't use it as an HTTP client.
You can use the built-in https module, but I think you'll find it easier to use node-fetch.
fetch('your signed URL here', {
method: 'PUT',
body: JSON.stringify(data),
// more options and request headers and such here
}).then((res) => {
// do something
}).catch((e) => {
// do something else
});
Was looking for an elegant way to transfer s3 file to an s3 signed url using PUT. Most examples I found were using the PUT({body : data}). I came across one suggestion to read the data to a readable stream and then pipe it to the PUT. However I still didn't like the notion of loading large files into memory and then assigning them to the put stream. Piping read to write is always better in memory and performance. Since the s3.getObject().createReadStream() returns a request object, which supports pipe, all that we need to do is to pipe it correctly to the PUT request which exposes a write stream.
Get object function
async function GetFileReadStream(key){
return new Promise(async (resolve,reject)=>{
var params = {
Bucket: bucket,
Key: key
};
var fileSize = await s3.headObject(params)
.promise()
.then(res => res.ContentLength);
resolve( {stream : s3.getObject(params).createReadStream(),fileSize});
});
}
Put object function
const request = require('request');
async function putStream(presignedUrl,readStream){
return new Promise((resolve,reject)=>{
var putRequestWriteStream = request.put({url:presignedUrl,headers:{'Content-Type':'application/octet-stream','Content-Length':readStream.fileSize }});
putRequestWriteStream.on('response', function(response) {
var etag = response.headers['etag'];
resolve(etag);
})
.on('end', () =>
console.log("put done"))
readStream.stream.pipe(putRequestWriteStream);
});
}
This works great with a very small memory foot print. Enjoy.

Why can't I upload a file to s3 with my Lambda function?

I am creating a lambda function which I've tied to API gateway and my code doesn't seem to fire the s3.putObject event.
I can console.log the AWS, s3, params variables with no problems but when trying to use the putObject function, nothing fires...the rest of my code simply seems to run.
Does anyone know what I might need to do?
I've set a region on my s3 instance, an API version
Logged out my variables
Checked cloudwatch logs for changes
exports.handler = async (event) => {
const AWS = require('aws-sdk');
const s3 = new AWS.S3({region: "us-east-1", apiVersion: '2006-03-01'});
const params = {
Bucket: bucketName,
Key: 'file.txt',
ContentType: 'text/plain',
Body: JSON.stringify(event)};
// The below doesn't seem to run or log out
s3.putObject(params).promise().then(data => {
console.log('complete:PUT Object',data);
})
.catch(err => {
console.log('failure:PUT Object', err);
});
return JSON.stringify(event);
};
I expect to be able to go into my s3 bucket and see a file uploaded. Instead its empty
Because you're using exports.handler = async (event) in your code, you're using async/await (https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function).
Within an async function you can use the await syntax to make a promise to a synchronous call:
try {
const res = await s3.upload(params, options).promise();
console.log('complete:', res);
} catch(err) {
console.log('error:', err);
}
This is a modern approach to callbacks and it's consistent (no mixture of callbacks and async functions).
You aren't waiting for the promise to return before telling lambda to return a result. Promises are great if you have a heap of chaining, but in this simple case, a call back should be enough.
Also if you are using async you need to make sure the runtime is 8.10 or above.
exports.handler = async (event, context, callback) => {
const AWS = require('aws-sdk');
const s3 = new AWS.S3({region: "us-east-1", apiVersion: '2006-03-01'});
const params = {
Bucket: bucketName,
Key: 'file.txt',
ContentType: 'text/plain',
Body: JSON.stringify(event)};
console.log(JSON.stringify(event));
// The below doesn't seem to run or log out
s3.upload(params, options, function(err, data) {
console.log(err, data);
if(!err) {
callback(null, "All Good");
}
else {
callback(err);
}
});
};
A little more reading on the context object. https://docs.aws.amazon.com/lambda/latest/dg/nodejs-prog-model-handler.html

How to parse the Content-type of the data being uploaded through aws Api gateway in aws Lambda (node.js)?

I am creating a solution where files and form-data are submitted from html-form to Api gateway and then to AWS lambda for processing and these data can be of "metapart/form-data" or "application/x-www-form-urlencoded" and also in my Api gateway binary support is already configured.I googled it but could not find which node.js library to use to parse the header Content-type.Below is the code I am using:
enter code here
const AWS = require('aws-sdk'); 
var s3 = new AWS.S3({apiVersion: '2006-03-01'});
const querystring = require('querystring');
AWS.config.region = 'xxx';
// "exports.handler" must match the entrypoint defined in the lambda Config.
exports.handler = function (event, context, callback) {
var bodyBuffer = new Buffer (String(event ['body-json']),'base64');
const params1 = querystring.parse(event.body);
var param = {Bucket:'mybucket', Key:'abc.csv', Body: bodyBuffer};
console.log("s3");
s3.upload(param, function(err, bodyBuffer) {
if (err) {
console.log(err, err.stack); // an error occurred
}
else console.log(bodyBuffer); // successful response
console.log('actually done!');
context.done();
});
const my_field1 = params1['my-field1'];
const html = `<!DOCTYPE html><p>You said: ` + my_field1 + `</p>`;
if(my_field1==''){
callback (null, "textfield1Notfilled");
}
callback(null, html);
}
(For now this code uploads the file to S3 bucket and displays the formdata on html page)
Please help!!
You can try using event.headers to get all the request headers and then use event.headers['Content-Type'].

How would you upload a file through mocha/chai for testing?

I'm using the method where the client sends a request to the server to upload a file to an s3 bucket, and then the server sends back a signed request to allow the client to do this. I'm following this tutorial -
https://devcenter.heroku.com/articles/s3-upload-node
Does anyone know how I can write an API endpoint test for this? I'm not doing the client side code since it's an iPhone app but I still want to test my endpoint in my tests.
Based on a code like that, from your link :
app.get('/sign-s3', (req, res) => {
const s3 = new aws.S3();
const fileName = req.query['file-name'];
const fileType = req.query['file-type'];
const s3Params = {
Bucket: S3_BUCKET,
Key: fileName,
Expires: 60,
ContentType: fileType,
ACL: 'public-read'
};
s3.getSignedUrl('putObject', s3Params, (err, data) => {
if(err){
console.log(err);
return res.end();
}
const returnData = {
signedRequest: data,
url: `https://${S3_BUCKET}.s3.amazonaws.com/${fileName}`
};
res.write(JSON.stringify(returnData));
res.end();
});
});
I would do a unit test, as a full integration test will depend on your aws account on your test env. For that I would mock req and s3.getSignedUrl and test that getSignedUrl is called with the correct parameters. I would also add a test, still with mock, to be sure a correct json is returned.

Resources