How to properly call AWS.CloudFront.Signer in Node.js? - node.js

I tried to get a signed URL in a node.js app, my code is shown below.
var AWS = require('aws-sdk');
const fs = require("fs");
var options = { keypairId: 'keypairId', privateKeyPath: 'privateKeyPath', expireTime: (new Date().getTime() + 3000) };
var url = 'cloudfrontURL' + objectpath;
const key = fs.readFileSync('privateKeyPath').toString("ascii");
const id = 'keypairId';
const signer = new AWS.CloudFront.Signer(id, key);
const params = {
url: url,
expires: 1538999532,
};
signer.getSignedUrl(params, function (err, data) {
if (err) { console.log(err) }
console.log(data);
});
but I am getting an error, which is shown below
AWS.CloudFront.Signer is not a constructor
What is the reason for this?

Related

Lambda NodeJS works intermittent with async method

I have a lambda function written in node.js. The lambda logic is to extract secrets from aws secret manager and pass the params to another module with a token to make a soap call.This lambda was working as expected without async method to get secreats when secreats hardcoded.
But after adding an async getsecrets method the lambda works intermittently while testing in aws console. The getsecreats returns params every time. But the lambda terminates without the intended output. It doesn't make any soap call.
The logic of the lambda code is
Get the secret
Pass the secret to the CallServicewithToken()
get XML data from soap and populate it into the database.
Why it works intermittently when introducing async calls? I have tried introducing async to all methods. still the same issue. Appreciate any input/help.
The code is as below
'use strict';
var soap = require('strong-soap').soap;
const aws = require("aws-sdk");
const request = require('request');
process.env['NODE_TLS_REJECT_UNAUTHORIZED'] = 0;
var rfcUrl = process.env.rfcser_url;
var testUrl = process.env.faccser_url;
var region = process.env.region;
var secretName = process.env.secretName;
var oauthurl = process.env.oauthurl;
var rfcRequestArgs;
var parsedResult;
var tokenrequest;
exports.handler = async function(event, context,callback) {
const secret = await getSecrets();
CallServicewithToken();
};
async function getSecrets() {
const config = { region : region, apiVersion: 'latest' };
let secretManager = new aws.SecretsManager(config);
const Result = await secretManager.getSecretValue({ SecretId: secretName }).promise();
parsedResult = JSON.parse(Result.SecretString);
tokenrequest = {
url: oauthurl,
form: {
client_id: parsedResult.client_id,
client_secret: parsedResult.client_secret,
grant_type:'client_credentials',
scope:parsedResult.scope
}
};
console.log('client_id: ' + parsedResult.client_id);
console.log('client_secret: ' + parsedResult.client_secret);
console.log('testservice_userid: ' + parsedResult.testservice_userid);
}
function CallServicewithToken() {
console.log('Calling CallServicewithToken ');
request.post(tokenrequest, (err, res, body) => {
if (err) {
console.log(' error2: ' + err);
return;
}
rfcRequestArgs = {
UserName: parsedResult.service_username
};
var tokenobj = JSON.parse(body);
var token = 'Bearer '+ tokenobj.access_token;
var credentials = {
Authorization:{
AuthToken: token
}
}
var options = {};
console.log('Calling Service.');
soap.createClient(rfcUrl, options, function(err, client) {
client.addSoapHeader(
`<aut:Authorization xmlns:aut="http://soap.xyznet.net">
<aut:AuthToken>${token}</aut:AuthToken>
</aut:Authorization>`
);
var method = client['GetSourceLocationData'];
method(RequestArgs, function(err, result, envelope, soapHeader) {
if (err) {
console.log('error3: ' + err);
return;
}
else
{
console.log('Received response from GetSourceLocationData().');
CallTESTService(JSON.stringify(result));
}
});
function CallTESTService(LocData)
{
var testRequestArgs = {
UserID: parsedResult.testservice_userid,
AuthorizationKey: parsedResult.testservice_authorizationkey,
LocationData: LocData
};
console.log('Calling test Service.');
options = {};
soap.createClient(testUrl, options, function(err, client) {
client.addSoapHeader(
`<Authorization xmlns="testWebService">
<AuthToken>${token}</AuthToken>
</Authorization>`
);
var test_method = client['UpdateLocationData'];
console.log('Called UpdateLocationData service method.');
test_method(testRequestArgs, function(err, result, envelope, soapHeader) {
if(err) {
console.log('test error: ' + err);
return;
}
else
{
console.log('Response: \n' + JSON.stringify(result));
console.log('Data updated through test service method.');
}
});
});
}
});
});
}

getting 403 from lambda calling api gateway

I have an api post end point which would update a customer's information in dynamodb. It is set to authenticate using AWS_IAM. I am getting 403 from my lambda when calling this api. I have allowed execute-api:Invoke permission to the api for the role lambda uses. I see in this post that I need to create a canonical request. I was able to come up with the below code and I still get a 403. I can't figure out what is missing and wish if a different eye can spot the problem. Please help!
"use strict";
const https = require("https");
const crypto = require("crypto");
exports.handler = async (event, context, callback) => {
try {
var attributes = {
customerId: 1,
body: { firstName: "abc", lastName: "xyz" }
};
await updateUsingApi(attributes.customerId, attributes.body)
.then((result) => {
var jsonResult = JSON.parse(result);
if (jsonResult.statusCode === 200) {
callback(null, {
statusCode: jsonResult.statusCode,
statusMessage: "Attributes saved successfully!"
});
} else {
callback(null, jsonResult);
}
})
.catch((err) => {
console.log("error: ", err);
callback(null, err);
});
} catch (error) {
console.error("error: ", error);
callback(null, error);
}
};
function sign(key, message) {
return crypto.createHmac("sha256", key).update(message).digest();
}
function getSignatureKey(key, dateStamp, regionName, serviceName) {
var kDate = sign("AWS4" + key, dateStamp);
var kRegion = sign(kDate, regionName);
var kService = sign(kRegion, serviceName);
var kSigning = sign(kService, "aws4_request");
return kSigning;
}
function updateUsingApi(customerId, newAttributes) {
var request = {
partitionKey: `MY_CUSTOM_PREFIX_${customerId}`,
sortKey: customerId,
payLoad: newAttributes
};
var data = JSON.stringify(request);
var apiHost = new URL(process.env.REST_API_INVOKE_URL).hostname;
var apiMethod = "POST";
var path = `/stage/postEndPoint`;
var { amzdate, authorization, contentType } = getHeaders(host, method, path);
const options = {
host: host,
path: path,
method: method,
headers: {
"X-Amz-Date": amzdate,
Authorization: authorization,
"Content-Type": contentType,
"Content-Length": data.length
}
};
return new Promise((resolve, reject) => {
const req = https.request(options, (res) => {
if (res && res.statusCode !== 200) {
console.log("response from api", res);
}
var response = {
statusCode: res.statusCode,
statusMessage: res.statusMessage
};
resolve(JSON.stringify(response));
});
req.on("error", (e) => {
console.log("error", e);
reject(e.message);
});
req.write(data);
req.end();
});
}
function getHeaders(host, method, path) {
var algorithm = "AWS4-HMAC-SHA256";
var region = "us-east-1";
var serviceName = "execute-api";
var secretKey = process.env.AWS_SECRET_ACCESS_KEY;
var accessKey = process.env.AWS_ACCESS_KEY_ID;
var contentType = "application/x-amz-json-1.0";
var now = new Date();
var amzdate = now
.toJSON()
.replace(/[-:]/g, "")
.replace(/\.[0-9]*/, "");
var datestamp = now.toJSON().replace(/-/g, "").replace(/T.*/, "");
var canonicalHeaders = `content-type:${contentType}\nhost:${host}\nx-amz-date:${amzdate}\n`;
var signedHeaders = "content-type;host;x-amz-date";
var payloadHash = crypto.createHash("sha256").update("").digest("hex");
var canonicalRequest = [
method,
path,
canonicalHeaders,
signedHeaders,
payloadHash
].join("/n");
var credentialScope = [datestamp, region, serviceName, "aws4_request"].join(
"/"
);
const sha56 = crypto
.createHash("sha256")
.update(canonicalRequest)
.digest("hex");
var stringToSign = [algorithm, amzdate, credentialScope, sha56].join("\n");
var signingKey = getSignatureKey(secretKey, datestamp, region, serviceName);
var signature = crypto
.createHmac("sha256", signingKey)
.update(stringToSign)
.digest("hex");
var authorization = `${algorithm} Credential=${accessKey}/${credentialScope}, SignedHeaders=${signedHeaders}, Signature=${signature}`;
return { amzdate, authorization, contentType };
}

How can I upload multiple images to an s3 bucket in a lambda function using node.js?

I am not very familiar with node and trying to upload an array of media objects to an s3 bucket using an AWS Lambda node function.
the payload has an album which is an array of key/data dictionaries. My code is as below but I'm certain this is wrong.
const awsServerlessExpress = require('aws-serverless-express');
const app = require('./app');
const server = awsServerlessExpress.createServer(app);
const AWS = require("aws-sdk");
const docClient = new AWS.DynamoDB.DocumentClient();
var s3 = new AWS.S3();
var s3Params = {
Bucket: 'bucketid',
ContentEncoding: 'base64',
ContentType: 'image/jpeg'
};
exports.handler = async (event, context) => {
console.log(event);
var body = JSON.parse(event.body);
if (typeof body.album !== 'undefined' && body.album) {
body.album.forEach(function (value) {
var data = body.album.mediaString;
let mediaData = new Buffer(data, 'base64');
var mediaKey = body.album.mediaKey;
try {
s3Params = {
Bucket: 'bucketID',
Key: mediaKey,
Body: mediaData
};
try {
const stored = await s3.upload(s3Params).promise();
console.log("stored successfully");
return { body: JSON.stringify(data) };
} catch (err) {
console.log("error storing");
console.log(err);
return { error: err };
}
} catch (err) {
return { error: err };
}
});
return { body: JSON.stringify(data) };
} else {
return { error: 'error'};
}
};
I have an error that s3 not found. Just wondering if I'm going about this all wrong.
When I only upload one image with the following code everything works fine:
const awsServerlessExpress = require('aws-serverless-express');
const app = require('./app');
const server = awsServerlessExpress.createServer(app);
const AWS = require("aws-sdk");
const docClient = new AWS.DynamoDB.DocumentClient();
var s3 = new AWS.S3();
var s3Params = {
Bucket: 'bucketID',
ContentEncoding: 'base64',
ContentType: 'image/jpeg'
};
exports.handler = async (event, context) => {
var body = JSON.parse(event.body);
var data = body.mediaString;
let mediaData = new Buffer(data, 'base64');
var mediaKey = body.mediaKey;
try {
s3Params = {
Bucket: 'bucketID',
Key: mediaKey,
Body: mediaData
};
try {
const stored = await s3.upload(s3Params).promise();
console.log("stored successfully");
return { body: JSON.stringify(data) };
} catch (err) {
console.log("error storing");
console.log(err);
return { error: err };
}
} catch (err) {
return { error: err };
}
};

Unable to fetch list of all S3 objects using NodeJs

Kindly excuse my knowledge with NodeJs, as I've just started with it. I've following lambda function which isn't fetching list of objects (more than 1000) in S3 and stuck in infinite loop, resulting lambda timimg out. Not sure what's wrong here
Code:
console.log('Loading');
const AWS = require('aws-sdk');
var request=true;
const awsOptions = {
region: "us-east-1"
};
const s3 = new AWS.S3(awsOptions);
var list = [];
exports.handler = async (event, context, callback) => {
const SrcBucket = event.Records[0].s3.bucket.name;
const trigger_file = event.Records[0].s3.object.key;
var bucketParams = {
Bucket: SrcBucket,
Prefix: 'Test/'
};
do
{
s3.listObjects(bucketParams, (err, data) => {
if (err)
console.log("Error", err);
else
{
list.push(data.Contents);
if (data.IsTruncated)
bucketParams.Marker = data.NextMarker;
else
request = false;
}
});
} while (request);
callback(null, {
listLen: list.length
});

How to return listObjectsV2 as json

I'm trying to call the AWS S3 Bucket and get my list of image URLs but it's not retrieving it.
I'm using Express for the back end and React for the front end. When i manually enter static data for the array of JSON it does show on the server. I'm using listObjectsV2 to get the list
const express = require('express');
const app = express();
const aws = require('aws-sdk');
const s3 = new aws.S3({
accessKeyId: "secretId",
secretAccessKey: "secretKey",
region : 'eu-west-2',
Bucket: 'my-aws-bucket'
});
let params = {
Bucket: "my-aws-bucket",
MaxKeys: 6
};
app.get('/api/images', (req, res) => {
let images = [];
s3.listObjectsV2(params, function(err, data) {
if (err) {
console.log("Error", err);
} else {
var href = this.request.httpRequest.endpoint.href;
var bucketUrl = href + "my-aws-bucket" + '/';
data.Contents.map(function(photo) {
var photoKey = photo.Key;
var photoUrl = bucketUrl + encodeURIComponent(photoKey);
images.push(photoUrl);
});
}
});
res.json(images);
});
const port = 5001;
app.listen(port, () => console.log(`Server is on port: ${port}`));
i expected my images empty array that i declared to be filled with image URL such as
[https://s3.eu-west-2.amazonaws.com/my-aws-bucket/images/image1.png,https://s3.eu-west-2.amazonaws.com/my-aws-bucket/images/image2.png]
However nothing is being filled in the array. If i manually add stuff into the images array like let images = [1,2,3,4]; then when i go to http://localhost:5001/api/images it does show the [1,2,3,4], but nothing with the code above that should work.
I worked it out in the end, i just had to simply move the
res.json(images); just after the closing else{}, so this is the fixed code
s3.listObjectsV2(params, function(err, data) {
if (err) {
console.log("Error", err);
} else {
var href = this.request.httpRequest.endpoint.href;
var bucketUrl = href + "my-aws-bucket" + '/';
data.Contents.map(function(photo) {
var photoKey = photo.Key;
var photoUrl = bucketUrl + encodeURIComponent(photoKey);
images.push(photoUrl);
});
}
res.json(images);
});

Resources