ecs.runTask not executing in Lambda - node.js

I have a lambda function that is supposed to start an ecs task when invoked. It gets all the way down to the "Starting execution..." log then it logs "done.". It seems to just skip right over ecs.runTask(). I have tried getting the returned json output by setting the runtask function to a variable, but that has not helped. I have also tried changing some of my parameters and that has not worked as well.
const AWS = require('aws-sdk');
var ecs = new AWS.ECS()
exports.handler = async (event) => {
var params = {
cluster: "ec2-cluster",
enableECSManagedTags: true,
launchType: "FARGATE",
count: 1,
platformVersion: 'LATEST',
networkConfiguration: {
awsvpcConfiguration: {
assignPublicIp: "ENABLED",
securityGroups: [ "sg" ],
subnets: [ "subnet" ]
}
},
startedBy: "testLambda",
taskDefinition: "definition"
}
console.log("Starting execution...");
ecs.runTask(params, function(err, data) {
console.log(err, data)
});
// console.log(myReturn)
console.log("done.")
}
When I run this locally everything works great. When I run this in lambda however it does not start my task.

In your case, you will need to add Promise to ecs.runTask(). In the AWS Lambda documentation, they mentioned that we have to
Make sure that any background processes or callbacks in your code are complete before the code exits.
meaning that we need to await for the ecs.runTask() process to resolve. Here is an example of how we can apply async/await on aws-sdk. From the references above, the way to make your code work would be:
const AWS = require('aws-sdk');
var ecs = new AWS.ECS()
exports.handler = async (event) => {
var params = {
cluster: "ec2-cluster",
enableECSManagedTags: true,
launchType: "FARGATE",
count: 1,
platformVersion: 'LATEST',
networkConfiguration: {
awsvpcConfiguration: {
assignPublicIp: "ENABLED",
securityGroups: [ "sg" ],
subnets: [ "subnet" ]
}
},
startedBy: "testLambda",
taskDefinition: "definition"
}
console.log("Starting execution...");
// Added promise here
await ecs.runTask(params).promise();
// console.log(myReturn)
console.log("done.")
This is a common mistake that we might make when we first get into Lambda, especially when we try to make our Lambda functions work with some other AWS services via aws-sdk.

Related

Manual approval task in step function - invoke through a script

I have a step function with an activity task that should wait for an input from a script that I will run from my terminal. If the script is invoked, the task that is waiting should get succeeded
Could someone provide examples on how to achieve this?
Any helpful documentations or referenced code links are appreciated.
Would I need an activity worker to invoke this?
Can a script running in my terminal invoke the lambda and mark it as succeeded?
node report-choice-step-success.js --stepfunction-arn <SFN-EXEC> --step-name ManualTask
Script report-choice-step-success.js
const main = () => {
let sfnClient;
const rolename = `StepFunctionExecuter-LOCAL`;
return getcreds({ accountId: '123456789012', region: 'us-east-1', rolename })
.then(params => {
sfnClient = new AWS.StepFunctions(params)
})
.then(() => startmystepfunction(sfnClient));
};
const startmystepfunction = (sfnClient) => {
const stateMachineArn = `arn:aws:states:us-east-1:123456789012:stateMachine:MYSTEPFUNCTION`;
const name = `Manual step`;
const executionParams = { name, stateMachineArn };
return sfnClient.startExecution(executionParams).promise()
.then(response => {
if (response && response.executionArn) {
print(`Started SFN execution for arn: ${response.executionArn}`);)
};
How should I modify the task so that it waits for a manual input so that it gets succeeded?
{
"Comment": "My state machine",
"StartAt": "Manual step",
"States": {
"ManualStep": {
"Type": "Task",
"Resource": "arn:aws:states:::activity:manualtask",
"End": true
}
}
}
I could get the Activity ARN from the executionARN using the getExecutionHistory method and filtered the scheduled activities.
Then I used that particular activityARN to getActivityTask and then used the sendTaskSuccess method to transition the step to succeeded.
sfnClient.getActivityTask(params).promise()
.then(data => {
var params = {
output: data.input.toString(),
taskToken: data.taskToken.toString()
};
sfnClient.sendTaskSuccess(params).promise()
}).catch(err => {
console.error(err);
console.error(err.stack);
});

How to assign serviceAccount using gcloud compute nodejs client?

I'm trying to create a new virtual machine using gcloud compute nodejs client:
const Compute = require('#google-cloud/compute');
const compute = new Compute();
async function createVM() {
try {
const zone = await compute.zone('us-central1-a');
const config = {
os: 'ubuntu',
http: true,
https: true,
metadata: {
items: [
{
key: 'startup-script-url',
value: 'gs://<path_to_startup_script>/startup_script.sh'
},
],
},
};
const data = await zone.createVM('vm-9', config);
const operation = data[1];
await operation.promise();
return console.log(' VM Created');
} catch (err) {
console.error(err);
return Promise.reject(err);
}
}
I have a serviceAccount with the needed roles for this VM to call other resources but I can't figure how to where to assign the serviceAccount when creating the new VM. Any pointers are greatly appreciated, I haven't been able to find any documentation and I'm stuck.
You can specify the service account to use in the new VM by adding a serviceAccounts field within the options for config passed into createVM. Here is an example snippet:
zone.createVM('name', {
serviceAccounts: [
{
email: '...',
scopes: [
'...'
]
}
]
})
Reference:
Service Account and Access Scopes or Method: instances.insert
createVM - The config object can take all the parameters of the instance resource.

Mock AWS services for NodeJS unit tests

I am looking at setting up some unit tests for a NodeJS project, but I am wondering how to mock up my usage of AWS services. I am using a wide variety: SNS, SQS, DynamoDB, S3, ECS, EC2, Autoscaling, etc. Does anybody have any good leads on how I might mock these up?
I just spent hours trying to get AWS SQS mocking working, without resorting to the aws-sdk-mock requirement of importing aws-sdk clients inside a function.
The mocking for AWS.DynamoDB.DocumentClient was pretty easy, but the AWS.SQS mocking had me stumped until I came across the suggestion to use rewire.
My lambda moves bad messages to a SQS FailQueue (rather than letting the Lambda fail and return the message to the regular Queue for retries, and then DeadLetterQueue after maxRetries). The unit tests needed to mock the following SQS methods:
SQS.getQueueUrl
SQS.sendMessage
SQS.deleteMessage
I'll try to keep this example code as concise as I can while still including all the relevant parts:
Snippet of my AWS Lambda (index.js):
const AWS = require('aws-sdk');
AWS.config.update({region:'eu-west-1'});
const docClient = new AWS.DynamoDB.DocumentClient();
const sqs = new AWS.SQS({ apiVersion: '2012-11-05' });
// ...snip
Abridged Lambda event records (event.json)
{
"valid": {
"Records": [{
"messageId": "c292410d-3b27-49ae-8e1f-0eb155f0710b",
"receiptHandle": "AQEBz5JUoLYsn4dstTAxP7/IF9+T1S994n3FLkMvMmAh1Ut/Elpc0tbNZSaCPYDvP+mBBecVWmAM88SgW7iI8T65Blz3cXshP3keWzCgLCnmkwGvDHBYFVccm93yuMe0i5W02jX0s1LJuNVYI1aVtyz19IbzlVksp+z2RxAX6zMhcTy3VzusIZ6aDORW6yYppIYtKuB2G4Ftf8SE4XPzXo5RCdYirja1aMuh9DluEtSIW+lgDQcHbhIZeJx0eC09KQGJSF2uKk2BqTGvQrknw0EvjNEl6Jv56lWKyFT78K3TLBy2XdGFKQTsSALBNtlwFd8ZzcJoMaUFpbJVkzuLDST1y4nKQi7MK58JMsZ4ujZJnYvKFvgtc6YfWgsEuV0QSL9U5FradtXg4EnaBOnGVTFrbE18DoEuvUUiO7ZQPO9auS4=",
"body": "{ \"key1\": \"value 1\", \"key2\": \"value 2\", \"key3\": \"value 3\", \"key4\": \"value 4\", \"key5\": \"value 5\" }",
"attributes": {
"ApproximateReceiveCount": "1",
"SentTimestamp": "1536763724607",
"SenderId": "AROAJAAXYIAN46PWMV46S:steve.goossens#bbc.co.uk",
"ApproximateFirstReceiveTimestamp": "1536763724618"
},
"messageAttributes": {},
"md5OfBody": "e5b16f3a468e6547785a3454cfb33293",
"eventSource": "aws:sqs",
"eventSourceARN": "arn:aws:sqs:eu-west-1:123456789012:sqs-queue-name",
"awsRegion": "eu-west-1"
}]
}
}
Abridged unit test file (test/index.test.js):
const AWS = require('aws-sdk');
const expect = require('chai').expect;
const LamdbaTester = require('lambda-tester');
const rewire = require('rewire');
const sinon = require('sinon');
const event = require('./event');
const lambda = rewire('../index');
let sinonSandbox;
function mockGoodSqsMove() {
const promiseStubSqs = sinonSandbox.stub().resolves({});
const sqsMock = {
getQueueUrl: () => ({ promise: sinonSandbox.stub().resolves({ QueueUrl: 'queue-url' }) }),
sendMessage: () => ({ promise: promiseStubSqs }),
deleteMessage: () => ({ promise: promiseStubSqs })
}
lambda.__set__('sqs', sqsMock);
}
describe('handler', function () {
beforeEach(() => {
sinonSandbox = sinon.createSandbox();
});
afterEach(() => {
sinonSandbox.restore();
});
describe('when SQS message is in dedupe cache', function () {
beforeEach(() => {
// mock SQS
mockGoodSqsMove();
// mock DynamoDBClient
const promiseStub = sinonSandbox.stub().resolves({'Item': 'something'});
sinonSandbox.stub(AWS.DynamoDB.DocumentClient.prototype, 'get').returns({ promise: promiseStub });
});
it('should return an error for a duplicate message', function () {
return LamdbaTester(lambda.handler)
.event(event.valid)
.expectReject((err, additional) => {
expect(err).to.have.property('message', 'Duplicate message: {"Item":"something"}');
});
});
});
});
Take a look at LocalStack. It provides an easy-to-use test/mocking framework for developing AWS-related applications by spinnin up the AWS-compatible APIs on your local machine or in Docker. It supports two dozen of AWS APIs and SQS is among them. It is really a great tool for functional testing without using a separate environment in AWS for that.

How to get EC2 public ip using aws-sdk Javascript

I want to get the EC2 instance's public ip using aws-sdk for Javascript. Upon executing the code below, the return gives { Reservations: [] }.
'use strict';
const AWS = require('aws-sdk');
AWS.config.loadFromPath('./aws.json');
AWS.config.update({ region: 'ap-northeast-1' });
const ec2 = new AWS.EC2({ apiVersion: '2016-11-15' });
const params = {
Filters: [
{
Name: 'ip-address',
Values: [
'ip-address'
]
}
],
InstanceIds: [
"i-0acf483a5cbdfdbeb"
]
};
ec2.describeInstances(params, function (err, data) {
if (err) {
console.log(err);
}
console.log(data);
});
The credentials used has been verified on IAM and is allowed access to the EC2 instance. Why can't its public ip be retrieved?
Node: 7.1.0
OS: CentOS 7.3/Windows 10
I think that you don't need 'Filters' part in your params object.
Use this:
const params = {
InstanceIds: [
"i-0acf483a5cbdfdbeb"
]
};
To get public ip use data.Reservations[0].Instances[0].PublicIpAddress
These are all the parameters that you need:
var params = {
Filters: [
{
Name: 'instance-id',
Values: [
'i-0492dce5669fd6d22'
]
},
],
};
Documentation

Data returned from promises with AWS-SDK for node and bluebird

I'm trying to fetch for each region in AWS all their Elastic IPs registered. The piece of code that I'm currently handling is the following:
logger.info('About to fetch Regions');
ec2.describeRegionsPromised({}).then(function (data) {
var addressesPromises = [];
logger.info('Fetched Regions');
logger.info(data);
_.forEach(data.Regions, function (region) {
var ec2Addresses = _.create(ec2, {region: region.RegionName});
addressesPromises.push(ec2Addresses.describeAddressesPromised());
});
logger.info('About to fetch addresses per region');
return Promise.all(addressesPromises);
}).then(function (data) {
logger.info('Fetched addresses per region');
logger.debug(data);
}).catch(function (err) {
logger.error('There was an error when fetching regions and addresses');
logger.error(err);
});
This works ok, but my problem is that I'm looking at the second .then promised-callback function data parameter and its data is an array with the same length of the regions returned on the first request.
I know that I'm only using 1 Elastic IP in one region. For alll the other regions I don't have any associated.
The Regions returned are the following (it's actually a formatted JSON):
Regions=[RegionName=eu-west-1, Endpoint=ec2.eu-west-1.amazonaws.com, RegionName=ap-southeast-1, Endpoint=ec2.ap-southeast-1.amazonaws.com, RegionName=ap-southeast-2, Endpoint=ec2.ap-southeast-2.amazonaws.com, RegionName=eu-central-1, Endpoint=ec2.eu-central-1.amazonaws.com, RegionName=ap-northeast-1, Endpoint=ec2.ap-northeast-1.amazonaws.com, RegionName=us-east-1, Endpoint=ec2.us-east-1.amazonaws.com, RegionName=sa-east-1, Endpoint=ec2.sa-east-1.amazonaws.com, RegionName=us-west-1, Endpoint=ec2.us-west-1.amazonaws.com, RegionName=us-west-2, Endpoint=ec2.us-west-2.amazonaws.com]
In JSON it would be:
{ Regions: [] } //and so on
And the Elastic IP returned are the following:
[ { Addresses: [ [PublicIp=XX.XX.XXX.XXX, AllocationId=eipalloc-XXXXXXXX, Domain=vpc] ] },
{ Addresses: [ [PublicIp=XX.XX.XXX.XXX, AllocationId=eipalloc-XXXXXXXX, Domain=vpc] ] },
{ Addresses: [ [PublicIp=XX.XX.XXX.XXX, AllocationId=eipalloc-XXXXXXXX, Domain=vpc] ] },
{ Addresses: [ [PublicIp=XX.XX.XXX.XXX, AllocationId=eipalloc-XXXXXXXX, Domain=vpc] ] },
{ Addresses: [ [PublicIp=XX.XX.XXX.XXX, AllocationId=eipalloc-XXXXXXXX, Domain=vpc] ] },
{ Addresses: [ [PublicIp=XX.XX.XXX.XXX, AllocationId=eipalloc-XXXXXXXX, Domain=vpc] ] },
{ Addresses: [ [PublicIp=XX.XX.XXX.XXX, AllocationId=eipalloc-XXXXXXXX, Domain=vpc] ] },
{ Addresses: [ [PublicIp=XX.XX.XXX.XXX, AllocationId=eipalloc-XXXXXXXX, Domain=vpc] ] },
{ Addresses: [ [PublicIp=XX.XX.XXX.XXX, AllocationId=eipalloc-XXXXXXXX, Domain=vpc] ] } ]
On the response, I have an array of objects where their object key-values are all the same per each region request, which is false.
I would have expected in the second response the values resolution per each region, having the rest of them set to null, undefined, or similar.
To sum up. I don't get why resolving the values of an array of promises (using .all) will get an array of identical values in each spot - not being the expected result.
What's going on here? Thanks in advance!
I found the issue.
As #Roamer-1888 indicated the creation of the ec2Addresses object was incorrect. Instead, I should have created a new instance using the AWS SDK constructor for EC2 objects. However, the key-point of the issue was another one. Firstly the code...
logger.info('About to fetch Regions');
ec2.describeRegionsPromised({}).then(function (data) {
var addressesPromises = [];
logger.info('Fetched Regions');
_.forEach(data.Regions, function (region) {
var ec2Addresses = AWS.ec2(
{
region: region.RegionName,
endpoint: region.Endpoint
}
);
addressesPromises.push(ec2Addresses.describeAddressesPromised());
});
logger.info('About to fetch addresses per region');
return Promise.all(addressesPromises);
}).then(function (data) {
logger.info(arguments);
logger.info('Fetched addresses per region');
logger.debug(data);
}).catch(function (err) {
logger.error('There was an error when fetching regions and addresses');
logger.error(err);
});
As you can notice here, ec2Addresses is created invoking AWS.ec2() and not new AWS.ec2(), this is because the AWS-promised module creates the object and return it back promisifying it. (https://github.com/davidpelayo/aws-promised/blob/master/ecs.js):
'use strict';
var AWS = require('aws-sdk');
var memoize = require('lodash/function/memoize');
var promisifyAll = require('./lib/util/promisifyAll');
function ecs(options) {
return promisifyAll(new AWS.ECS(options));
}
/**
* Returns an instance of AWS.ECS which has Promise methods
* suffixed by "Promised"
*
* e.g.
* createService => createServicePromised
*
* #param options
*/
module.exports = memoize(ecs);
The issue was the last line of code:
module.exports = memoize(ecs)
This line of code was caching the previous execution including its previous configuration.
It turns out I ended up debugging the application and I realise the regions and endpoints were the array of promises were being executed were the same, and there was an error there.
By deleting the memoize(ecs) the expected result is the one I'm getting:
info: Fetched addresses per region
debug: Addresses=[], Addresses=[], Addresses=[], Addresses=[], Addresses=[], Addresses=[PublicIp=XX.XXX.XXX.XXX, AllocationId=eipalloc-XXXXXX, Domain=vpc], Addresses=[], Addresses=[], Addresses=[]
Thanks for reading and helping.
I found out a way of requesting addresses of different regions without creating a new EC2 object. In other words, by reusing the existing EC2 instance, by switching the endpoint like follows:
_.forEach(data.Regions, function (region) {
//var ec2Addresses = AWS.ec2(
// {
// region: region.RegionName,
// endpoint: region.Endpoint
// }
//);
var ep = new AWS.Endpoint(region.Endpoint);
ec2.config.region = region.RegionName;
ec2.endpoint = ep;
addressesPromises.push(ec2.describeAddressesPromised());
});
logger.info('About to fetch addresses per region');

Resources