How to fetch aws ecs container ip address in nodejs project - node.js

I have uploaded my API project (Node.js project) to AWS ECS container and my project contains swagger documentation. In swagger I want to indicate the current host Ip address that the API is run on but I cannot find the right code to fetch it. There is a solution for that? since I have managed to implement it on .NetCore API.
How does it looks right now:
Thx in advance.

You can make use of AWS ECS metadata endpoint http://172.17.0.1:51678/v1/metadata from an ECS task to fetch details about the container instance. The details fetched can then be used to get the private/public ip address of the instance. Example:
import http from 'http';
import util from 'util';
import AWS from 'aws-sdk';
export const getIPAddresses = async () => {
try {
let options: any = {
hostname: '172.17.0.1',
port: 51678,
path: '/v1/metadata',
method: 'GET'
}
let containerInstanceDetails: any = await httpGet(options);
containerInstanceDetails = JSON.parse(containerInstanceDetails);
const cluster = containerInstanceDetails["Cluster"];
const containerInstanceArn = containerInstanceDetails["ContainerInstanceArn"];
const containerInstanceUUID = containerInstanceArn.split('/')[2];
let params: any = {
cluster: cluster,
containerInstances: [containerInstanceUUID]
}
if (!AWS.config.region) {
AWS.config.update({
region: <your_aws_region>
});
}
const ecs = new AWS.ECS({ 'region': <your_aws_region> });
const ec2 = new AWS.EC2({ 'region': <your_aws_region> });
const describeContainerInstancesAsync = util.promisify(ecs.describeContainerInstances).bind(ecs);
const describeInstancesAsync = util.promisify(ec2.describeInstances).bind(ec2);
let data = await describeContainerInstancesAsync(params);
const ec2InstanceId = data.containerInstances[0].ec2InstanceId;
params = {
InstanceIds: [
ec2InstanceId
]
}
data = await describeInstancesAsync(params);
return [data.Reservations[0].Instances[0].PrivateIpAddress, data.Reservations[0].Instances[0].PublicIpAddress];
}
catch(err) {
console.log(err);
}
}
async function httpGet(options) {
return new Promise((resolve, reject) => {
http.get(options, response => {
response.setEncoding('utf8');
response.on('data', data => {
resolve(data);
});
}).on('error', error => {
reject(error.message);
});
});
}

Related

How to mock a class that belongs to a module dependency? (And ensure that certain method inside the mocked class has been called) Jest - NodeJs

I am trying to mock the S3 class inside the aws-sdk module while macking sure that the methods inside the class S3 can be spyed.
I am able to mock the S3 class inside aws-sdk however I cannot spy the methods inside the class.
Any ideas on how to approach this problem?
These are my code snippets:
services/s3.js
const AWS = require('aws-sdk');
const uploadAsset = async (param) => {
try {
const response = await s3.upload(param).promise();
return response;
} catch (e) {
console.log(e);
}
}
module.exports = { uploadAsset }
services.s3.test.js
const AWS = require('aws-sdk');
const { uploadAsset } = require('../services/s3')
jest.mock('aws-sdk', () => {
return {
S3: class {
constructor() { }
upload(param) { // 👈 I want to make sure that this method is called
return {
promise: () => {
return Promise.resolve(
{
Location: `http://${param.Bucket}.s3.amazonaws.com/${param.Key}`,
Key: param.Key
}
)
}
}
}
}
}
});
describe('uploadAsset() functionality', () => {
it('should upload an asset', async () => {
const uploadPath = 'users/profilePicture';
const base64Str = '/9j/4AAQSkZJRgABAQAAAQABAAD/';
const buffer = Buffer.from(base64Str, 'base64');
const s3 = new AWS.S3();
const response = await uploadAsset({
Bucket: 'BucketName,
Key: `KeyName`,
Body: buffer,
});
const spy = jest.spyOn(s3, 'deleteObject')
expect(spy).toBeCalled(); // 🚨 This spy nevers gets called
});
});
Any insights would be helpful.
Thanks.
I mocked the aws-sdk successfully. However my spy in the S3 never gets called.
I am almost positive that this is a scope problem. I think my spyOn method only affects my local S3 class instance. However I still have no idea how to test this specific case scenario.

Delivering image from S3 to React client via Context API and Express server

I'm trying to download a photo from an AWS S3 bucket via an express server to serve to a react app but I'm not having much luck. Here are my (unsuccessful) attempts so far.
The Workflow is as follows:
Client requests photo after retrieving key from database via Context API
Request sent to express server route (important so as to hide the true location from the client)
Express server route requests blob file from AWS S3 bucket
Express server parses image to base64 and serves to client
Client updates state with new image
React Client
const [profilePic, setProfilePic] = useState('');
useEffect(() => {
await actions.getMediaSource(tempPhoto.key)
.then(resp => {
console.log('server resp: ', resp.data.data.newTest) // returns ����\u0000�\u0000\b\u0006\
const url = window.URL || window.webkitURL;
const blobUrl = url.createObjectURL(resp.data.data.newTest);
console.log("blob ", blobUrl);
setProfilePic({ ...profilePic, image : resp.data.data.newTest });
})
.catch(err => errors.push(err));
}
Context API - just axios wrapped into its own library
getMediaContents = async ( key ) => {
return await this.API.call(`http://localhost:5000/${MEDIA}/mediaitem/${key}`, "GET", null, true, this.state.accessToken, null);
}
Express server route
router.get("/mediaitem/:key", async (req, res, next) => {
try{
const { key } = req.params;
// Attempt 1 was to try with s3.getObject(downloadParams).createReadStream();
const readStream = getFileStream(key);
readStream.pipe(res);
// Attempt 2 - attempt to convert response to base 64 encoding
var data = await getFileStream(key);
var test = data.Body.toString("utf-8");
var container = '';
if ( data.Body ) {
container = data.Body.toString("utf-8");
} else {
container = undefined;
}
var buffer = (new Buffer.from(container));
var test = buffer.toString("base64");
require('fs').writeFileSync('../uploads', test); // it never wrote to this directory
console.log('conversion: ', test); // prints: 77+977+977+977+9AO+/vQAIBgYH - this doesn't look like base64 to me.
delete buffer;
res.status(201).json({ newTest: test });
} catch (err){
next(ApiError.internal(`Unexpected error > mediaData/:id GET -> Error: ${err.message}`));
return;
}
});
AWS S3 Library - I made my own library for using the s3 bucket as I'll need to use more functionality later.
const getFileStream = async (fileKey) => {
const downloadParams = {
Key: fileKey,
Bucket: bucketName
}
// This was attempt 1's return without async in the parameter
return s3.getObject(downloadParams).createReadStream();
// Attempt 2's intention was just to wait for the promise to be fulfilled.
return await s3.getObject(downloadParams).promise();
}
exports.getFileStream = getFileStream;
If you've gotten this far you may have realised that I've tried a couple of things from different sources and documentation but I'm not getting any further. I would really appreciate some pointers and advice on what I'm doing wrong and what I could improve on.
If any further information is needed then just let me know.
Thanks in advance for your time!
Maybe it be useful for you, that's how i get image from S3, and process image on server
Create temporary directory
createTmpDir(): Promise<string> {
return mkdtemp(path.join(os.tmpdir(), 'tmp-'));
}
Gets the file
readStream(path: string) {
return this.s3
.getObject({
Bucket: this.awsConfig.bucketName,
Key: path,
})
.createReadStream();
}
How i process file
async MainMethod(fileName){
const dir = await this.createTmpDir();
const serverPath = path.join(
dir,
fileName
);
await pipeline(
this.readStream(attachent.key),
fs.createWriteStream(serverPath + '.jpg')
);
const createFile= await sharp(serverPath + '.jpg')
.jpeg()
.resize({
width: 640,
fit: sharp.fit.inside,
})
.toFile(serverPath + '.jpeg');
const imageBuffer = fs.readFileSync(serverPath + '.jpeg');
//my manipulations
fs.rmSync(dir, { recursive: true, force: true }); //delete temporary folder
}

NodeJS Amazon AWS SDK S3 client stops working intermittently

I have NodeJS express web server that serves files from AWS S3. Most of the time this exact code works correctly and serves files for a wide verity of applications with large numbers of requests in Production. The NodeJS web server is running across multiple nodes on a docker swarm server.
After about 2-3 weeks this stops working. There is no response from S3Client GetObjectCommand, there no error returned or anything. This starts working again only after restarting the NodeJS Docker container.
I read the S3 SDK docs that indicate a that the SDK will retry automatically.
Each AWS SDK implements automatic retry logic.
Questions:
How can we make this code more resilient and not need a restart?
Is the error handling correct? I'm wondering why there is no seemingly no response or error returned at all in this situation.
Is it necessary to configure the re-try settings?
NodeJS version: node:lts-alpine
Module: #aws-sdk/client-s3
Controllers
AWS Controller
const consoleLogger = require('../logger/logger.js').console;
const { S3Client, GetObjectCommand } = require('#aws-sdk/client-s3');
const config = {
"credentials": {
"accessKeyId": "example",
"secretAccessKey": "example"
},
"endpoint": "example",
"sslEnabled": true,
"forcePathStyle": true
}
const s3client = new S3Client(config);
const awsCtrl = {};
awsCtrl.getObject = async (key) => {
// Get object from Amazon S3 bucket
let data;
try {
// Data is returned as a ReadableStream
data = await s3client.send(new GetObjectCommand({ Bucket: "example", Key: key }));
console.log("Success", data);
} catch (e) {
consoleLogger.error("AWS S3 error: ", e);
const awsS3Error = {
name: e.name || null,
status: e.$metadata.httpStatusCode || 500
};
throw awsS3Error;
}
return data;
}
module.exports = awsCtrl;
Files Controller
const queryString = require('query-string');
const consoleLogger = require('../logger/logger.js').console;
const httpCtrl = require('./http.ctrl');
const jwtCtrl = require('./jwt.ctrl');
const awsCtrl = require('./aws.ctrl');
filesCtrl.deliverFile = async (req, res) => {
/* Get object from AWS S3 */
let fileObjectStream;
try {
fileObjectStream = await awsCtrl.getObject(filePath);
} catch (e) {
consoleLogger.error(`Unable to get object from AWS S3`, e);
if (e.status && e.status === 404) {
result.error = `Not found`;
result.status = 404;
return res.status(result.status).json(result);
}
return res.status(e.status || 500).json(result);
}
const filename = lookupResponse.data.filename;
// Set response header: Content-Disposition
res.attachment(filename);
// API response object stream download to client
return fileObjectStream.Body.pipe(res);
}
API
const express = require('express');
const router = express.Router();
const filesCtrl = require('../../controllers/files.ctrl');
const filesValidation = require('../validation/files');
router.get('/:fileId', [filesValidation.getFile], (req, res, next) => {
return filesCtrl.deliverFile(req, res);
});

AWS Transcribe client does not provide an export named 'transcribeClient'

I'm trying to integrate AWS Transcribe in my Node.JS application. AWS S3 and Polly works fine, but AWS Transcribe does not. I'm using the example code of AWS.
When I want to start a transcribe job by the AWS example code I receive the following error: The requested module './libs/transcribeClient.js' does not provide an export named 'transcribeClient'
That was also the only file where I received the error that required is not defined. I wonder why it only happens with AWS transcribe but not with the other services as well? I'm also able to start a transcribe job via the AWS CLI.
That AWS Transcribe code does not work - transcribeClient.js:
const AWS_BUCKET_NAME="X"
const AWS_REGION="eu-central-1"
const AWS_ACCESS_KEY="XXX"
const AWS_SECRET_KEY="XXX"
// snippet-start:[transcribe.JavaScript.createclientv3]
const { TranscribeClient } = require('#aws-sdk/client-transcribe');
// Create anAmazon EC2 service client object.
const transcribeClient = new TranscribeClient({ AWS_REGION, AWS_ACCESS_KEY, AWS_SECRET_KEY });
module.exports = { transcribeClient };
That AWS Polly code works - pollyClient.js:
const AWS_BUCKET_NAME="X"
const AWS_REGION="eu-central-1"
const AWS_ACCESS_KEY="XXX"
const AWS_SECRET_KEY="XXX"
// snippet-start:[polly.JavaScript.createclientv3]
const { PollyClient } =require( "#aws-sdk/client-polly");
// Create an Amazon S3 service client object.
const pollyClient = new PollyClient({ AWS_REGION, AWS_ACCESS_KEY, AWS_SECRET_KEY});
module.exports = { pollyClient };
I'm looking forward to reading from you! Thanks!
I solved it. Now it's working with my Node.js 12 environment.
package.json
I changed "type": "modules" to "type": "commonjs".
transcribeClient.js needs to look like this:
Here I changed export to module.exports.
const { TranscribeClient } = require("#aws-sdk/client-transcribe");
const transcribeClient = new TranscribeClient({ AWS_REGION, AWS_ACCESS_KEY, AWS_SECRET_KEY});
module.exports = { transcribeClient };
transcribe_create_job.js needs to look like this:
Here I changed the import statement to require.
const { StartTranscriptionJobCommand } = require("#aws-sdk/client-transcribe");
const { transcribeClient } = require("./libs/transcribeClient.js")
// Set the parameters
const params = {
TranscriptionJobName: "test123",
LanguageCode: "en-GB", // For example, 'en-US'
MediaFormat: "webm", // For example, 'wav'
Media: {
MediaFileUri: "https://x.s3.eu-central-1.amazonaws.com/dlpasiddi.webm",
},
};
const run = async () => {
try {
const data = await transcribeClient.send(
new StartTranscriptionJobCommand(params)
);
console.log("Success - put", data);
return data; // For unit tests.
} catch (err) {
console.log("Error", err);
}
};
run();

To mock AWS SES with Sinon

I'm trying to mock SES with Sinon, but facing below error. Tried using aws-sdk-mock, but it's not working.
Error: TypeError: Cannot stub non-existent own property sendEmail
Code snippet of test class:
import * as AWS from 'aws-sdk';
const sandbox = sinon.createSandbox();
sandbox.stub(AWS.SES, 'sendEmail').returns({promise: () => true});
Actual class:
import * as AWS from 'aws-sdk';
import * as _ from 'lodash';
export async function sendAlertMailOnFailure(status:any)
{
// load AWS SES
var ses = new AWS.SES();
const params = {
Destination: {
ToAddresses: <to_address>
},
Message: {...},
Source: <sender_address>
}
ses.sendEmail(params, (err, data) => {
if (err) {
log.error("Error sending mail::");
log.error(err, err.stack);
}
})
}
Is there any way to mock SES with Sinon or with aws-sdk-mock?
My answer here is not a direct solution for SES, but it is a working solution I'm using for mocking DynamoDB.DocumentClient and SQS. Perhaps you can adapt my working example for SES and other aws-sdk clients in your unit tests.
I just spent hours trying to get AWS SQS mocking working, without resorting to the aws-sdk-mock requirement of importing aws-sdk clients inside a function.
The mocking for AWS.DynamoDB.DocumentClient was pretty easy, but the AWS.SQS mocking had me stumped until I came across the suggestion to use rewire.
My lambda moves bad messages to a SQS FailQueue (rather than letting the Lambda fail and return the message to the regular Queue for retries, and then DeadLetterQueue after maxRetries). The unit tests needed to mock the following SQS methods:
SQS.getQueueUrl
SQS.sendMessage
SQS.deleteMessage
I'll try to keep this example code as concise as I can while still including all the relevant parts:
Snippet of my AWS Lambda (index.js):
const AWS = require('aws-sdk');
AWS.config.update({region:'eu-west-1'});
const docClient = new AWS.DynamoDB.DocumentClient();
const sqs = new AWS.SQS({ apiVersion: '2012-11-05' });
// ...snip
Abridged Lambda event records (event.json)
{
"valid": {
"Records": [{
"messageId": "c292410d-3b27-49ae-8e1f-0eb155f0710b",
"receiptHandle": "AQEBz5JUoLYsn4dstTAxP7/IF9+T1S994n3FLkMvMmAh1Ut/Elpc0tbNZSaCPYDvP+mBBecVWmAM88SgW7iI8T65Blz3cXshP3keWzCgLCnmkwGvDHBYFVccm93yuMe0i5W02jX0s1LJuNVYI1aVtyz19IbzlVksp+z2RxAX6zMhcTy3VzusIZ6aDORW6yYppIYtKuB2G4Ftf8SE4XPzXo5RCdYirja1aMuh9DluEtSIW+lgDQcHbhIZeJx0eC09KQGJSF2uKk2BqTGvQrknw0EvjNEl6Jv56lWKyFT78K3TLBy2XdGFKQTsSALBNtlwFd8ZzcJoMaUFpbJVkzuLDST1y4nKQi7MK58JMsZ4ujZJnYvKFvgtc6YfWgsEuV0QSL9U5FradtXg4EnaBOnGVTFrbE18DoEuvUUiO7ZQPO9auS4=",
"body": "{ \"key1\": \"value 1\", \"key2\": \"value 2\", \"key3\": \"value 3\", \"key4\": \"value 4\", \"key5\": \"value 5\" }",
"attributes": {
"ApproximateReceiveCount": "1",
"SentTimestamp": "1536763724607",
"SenderId": "AROAJAAXYIAN46PWMV46S:steve.goossens#bbc.co.uk",
"ApproximateFirstReceiveTimestamp": "1536763724618"
},
"messageAttributes": {},
"md5OfBody": "e5b16f3a468e6547785a3454cfb33293",
"eventSource": "aws:sqs",
"eventSourceARN": "arn:aws:sqs:eu-west-1:123456789012:sqs-queue-name",
"awsRegion": "eu-west-1"
}]
}
}
Abridged unit test file (test/index.test.js):
const AWS = require('aws-sdk');
const expect = require('chai').expect;
const LamdbaTester = require('lambda-tester');
const rewire = require('rewire');
const sinon = require('sinon');
const event = require('./event');
const lambda = rewire('../index');
let sinonSandbox;
function mockGoodSqsMove() {
const promiseStubSqs = sinonSandbox.stub().resolves({});
const sqsMock = {
getQueueUrl: () => ({ promise: sinonSandbox.stub().resolves({ QueueUrl: 'queue-url' }) }),
sendMessage: () => ({ promise: promiseStubSqs }),
deleteMessage: () => ({ promise: promiseStubSqs })
}
lambda.__set__('sqs', sqsMock);
}
describe('handler', function () {
beforeEach(() => {
sinonSandbox = sinon.createSandbox();
});
afterEach(() => {
sinonSandbox.restore();
});
describe('when SQS message is in dedupe cache', function () {
beforeEach(() => {
// mock SQS
mockGoodSqsMove();
// mock DynamoDBClient
const promiseStub = sinonSandbox.stub().resolves({'Item': 'something'});
sinonSandbox.stub(AWS.DynamoDB.DocumentClient.prototype, 'get').returns({ promise: promiseStub });
});
it('should return an error for a duplicate message', function () {
return LamdbaTester(lambda.handler)
.event(event.valid)
.expectReject((err, additional) => {
expect(err).to.have.property('message', 'Duplicate message: {"Item":"something"}');
});
});
});
});
You need to use prototype in AWS to stub it:
import AWS from 'aws-sdk';
const sandbox = sinon.createSandbox();
sandbox.stub(AWS.prototype, 'SES').returns({
sendEmail: () => {
return true;
}
});
The error seems to indicate that AWS is being imported as undefined.
It might be that your ES6 compiler isn't automatically turning this line:
import AWS from 'aws-sdk';
...into an import of everything in aws-sdk into AWS.
Change it to this:
import * as AWS from 'aws-sdk';
...and that may fix the issue.
(Disclaimer: I can't reproduce the error in my environment which is compiling with Babel v7 and automatically handles either approach)
Using require & without using prototype. This is working for me for mocking DynamoDB.
const aws = require('aws-sdk');
const sinon = require('sinon');
const sandbox = sinon.createSandbox();
this.awsStub = sandbox.stub(aws, 'DynamoDB').returns({
query: function() {
return {
promise: function() {
return {
Items: []
};
}
};
}
});
Packages:
"aws-sdk": "^2.453.0"
"sinon": "^7.3.2"
I was able to use awk-sdk-mock by doing the following:
test class
const AWSMock = require('aws-sdk-mock');
const AWS = require('aws-sdk');
AWSMock.setSDKInstance(AWS);
...
AWSMock.mock('SES', 'sendRawEmail', mockSendEmail);
// call method that needs to mock send an email goes below
sendEmail(to, from, subject, body, callback);
function mockSendEmail(params, callback) {
console.log('mock email');
return callback({
MessageId: '1234567',
});
}
Actual class
const aws = require('aws-sdk');
const nodemailer = require('nodemailer');
function sendEmail(to, from, subject, body, callback) {
let addresses = to;
if (!Array.isArray(addresses)) {
addresses = [addresses];
}
let replyTo = [];
if (from) {
replyTo.push(from);
}
let data = {
to: addresses,
replyTo,
subject,
text: body,
};
nodemailer.createTransport({ SES: new aws.SES({ apiVersion: '2010-12-01' }) }).sendMail(data, callback);
}
const AWS = require('aws-sdk');
...
const sandbox = sinon.createSandbox();
sandbox.stub(AWS, 'SES').returns({
sendRawEmail: () => {
console.log("My sendRawEmail");
return {
promise: function () {
return {
MessageId: '987654321'
};
}
};
}
});
let ses = new AWS.SES({ region: 'us-east-1' });
let result = ses.sendRawEmail(params).promise();

Resources