below is a simplification of my code.
const AWS = require('aws-sdk');
exports.handler = async (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = true;
AWS.config.update({region: 'cn-north-1'});
// Create CloudWatch service object
var cw = new AWS.CloudWatch({apiVersion: '2010-08-01'});
var params = {
MetricData: [
{
MetricName: 'PAGES_VISITED',
Dimensions: [
{
Name: 'UNIQUE_PAGES',
Value: 'URLS'
},
],
Unit: 'None',
Value: 1.0
},
],
Namespace: 'MyNewNameSpace'
};
cw.putMetricData(params, function(err, data) {
if (err) {
console.log("Error", err);
} else {
console.log("Success", JSON.stringify(data));
}
});
callback(null, "the result");
};
It seems that once I set the callbackWaitsForEmptyEventLoop = false then the metric cannot be put up there. I donot understand this conflict.
If you set callbackWaitsForEmptyEventLoop = false then your function execution terminates before all the callbacks are done. In this case, the function terminates before the callback from cw.putMetricData is ever called, so your code is not executed. It is likely that the operation on CloudWatch actually happens, but that you just don't see the callback, as it does not happen.
Here's your function, using the async/await model, without callbacks and without callbackWaitsForEmptyEventLoop:
const AWS = require('aws-sdk');
exports.handler = async event => {
AWS.config.update({region: 'cn-north-1'});
// Create CloudWatch service object
var cw = new AWS.CloudWatch({apiVersion: '2010-08-01'});
var params = {...};
await cw.putMetricData(params)
.promise()
.then(data => {console.log("Success", JSON.stringify(data));})
.catch(err => {console.log("Error", err);})
return "the result";
};
Related
batchWrite doesn't work with async in Laqmbda. The code is going to insert one record tho, it can't. However, when I remove async, It works.
const AWS = require("aws-sdk");
const documentClient = new AWS.DynamoDB.DocumentClient();
AWS.config.update({ region: "us-west-2" });
const tableName = "BlrSession-56pfbzohnvdqpac6asb627z2wu-dev";
exports.handler = async (event, context, callback) => {
try {
let games = [];
games.push({
PutRequest: {
Item: {
id: Math.random().toString(36).substring(2) + Date.now().toString(36),
},
},
});
let params = {
RequestItems: {
[tableName]: games,
},
};
documentClient.batchWrite(params, function (err, data) {
if (err) {
callback(err);
} else {
callback(null, data);
}
});
} catch (err) {
return err;
}
};
The result is below. There is no error.
Ensuring latest function changes are built...
Starting execution...
Result:
null
Finished execution.
Have you guys got the same behavior?
You can't combine the callback method with the async/await method. The easiest thing to do here is to make it all async/await (and don't forget the .promise() on the call).
const AWS = require("aws-sdk");
const documentClient = new AWS.DynamoDB.DocumentClient();
AWS.config.update({ region: "us-west-2" });
const tableName = "BlrSession-56pfbzohnvdqpac6asb627z2wu-dev";
exports.handler = async (event, context, callback) => {
try {
let games = [];
games.push({
PutRequest: {
Item: {
id: Math.random().toString(36).substring(2) + Date.now().toString(36),
},
},
});
let params = {
RequestItems: {
[tableName]: games,
},
};
return await documentClient.batchWrite(params).promise();
} catch (err) {
return err;
}
};
I have gone through similar threads to fix this issue but I have had no luck. Both lambdas can be trigger independently of one another, and I am able to invoke the second Lambda through the command line, but my code does not work.
'use strict'
/* eslint max-statements: ['error', 100, { 'ignoreTopLevelFunctions': true }] */
const RespHelper = require('../../lib/response')
const { uuid } = require('uuidv4')
const AWS = require('aws-sdk')
const DB = require('./dynamo')
const respHelper = new RespHelper()
const Dynamo = new DB()
const lambda = new AWS.Lambda({
region: 'us-west-2'
})
const secondLambda = async (lambdaData) => {
var params = {
LogType: 'Tail',
FunctionName: 'second_lambda_name',
InvocationType: 'RequestResponse',
Payload: JSON.stringify(lambdaData)
}
lambda.invoke(params, function (err, data) {
if (err) {
console.log(err)
} else {
console.log(`Success: ${data.Payload}`)
}
})
}
exports.handler = async event => {
const id = uuid()
let bodyData = {
uuid: id,
user: 'owner#email.com',
processingStatus: 'IN_PROGRESS'
}
let payloadData = {
uuid: id,
user: 'owner#email.com',
processingStatus: 'COMPLETE'
}
try {
await Dynamo.writeRecordToDB(bodyData)
await secondLambda(payloadData)
return respHelper.sendResponse(200, { message: bodyData })
} catch (err) {
console.log(`Failure: ${err}`)
return respHelper.sendResponse(400, { message: 'ERROR' })
}
}
I have double checked the lambda role and it has the Invoke Lambda and Invoke Asynchronous Invoke permission on all resources. Console outputs don't give me any indication of why this is not working. Any help is appreciated.
You're awaiting a callback when you need to await a promise
const secondLambda = async lambdaData =>
lambda
.invoke({
LogType: 'Tail',
FunctionName: 'second_lambda_name',
InvocationType: 'RequestResponse',
Payload: JSON.stringify(lambdaData),
})
.promise()
I am trying to invoke multiple lambda functions (one lambda function, that would run separate parallel processes) from another lambda function. The first one runs as cron lambda that just queries docs from db and then invokes another lambda with doc's params. This cron lambda runs every five minutes and queries the docs correctly. I was testing the second lambda with two documents. The problem is that every time the second lambda gets invoked it only process one document - every time it processes the other one it didn't process on the previous invoke:
Ex:
doc 1
doc 2
First, invoke of second lambda -> process doc 1
Second, invoke of second lambda -> process doc 2
Third, invoke of second lambda -> process doc 1
Forth invoke of second lambda -> process doc 2
etc...
First (cron) lambda code:
aws.config.update({
region : env.lambdaRegion,
accessKeyId: env.lambdaAccessKeyId,
secretAccessKey: env.lambdaSecretAccessKey,
});
const lambda = new aws.Lambda({
region: env.lambdaRegion,
});
exports.handler = async (event: any, context: any) => {
context.callbackWaitsForEmptyEventLoop = false;
return new Promise(async (resolve, reject) => {
for (let i = 0; i < 100; i++) {
const doc = await mongo.db.collection('docs').
findOneAndUpdate(
{
status: 1,
lambdaProcessing: null,
},
{ $set: { lambdaProcessing: new Date() } },
{
sort: { processedAt: 1 },
returnNewDocument: true,
},
);
if (doc.value && doc.value._id) {
const params = {
FunctionName: env.lambdaName,
InvocationType: 'Event',
Payload: JSON.stringify({ docId: doc.value._id }),
};
lambda.invoke(params);
} else {
if (doc.lastErrorObject && doc.lastErrorObject.n === 0) {
break;
}
}
}
resolve();
});
};
Second lambda function:
exports.handler = async (event: any, ctx: any) => {
ctx.callbackWaitsForEmptyEventLoop = false;
if (event && event.docId) {
const doc = await mongo.db.collection('docs').findById(event.docId);
return await processDoc(doc);
} else {
throw new Error('doc ID is not present.');
}
};
To run multiple lambdas in parallel without an "ugly" cronjob solution I would recommend using AWS step functions with type Parallel. You can set up the logic in your serverless.yml, the function calls itself are lambda functions. You can pass data by the second argument of callback. If the data is larger than 32kb I would recommend using an S3 bucket/database though.
Example serverless.yml
stepFunctions:
stateMachines:
test:
name: 'test'
definition:
Comment: "Testing tips-like state structure"
StartAt: GatherData
States:
GatherData:
Type: Parallel
Branches:
-
StartAt: GatherDataA
States:
GatherDataA:
Type: Task
Resource: "arn:aws:lambda:#{AWS::Region}:#{AWS::AccountId}:function:${self:service}-${opt:stage, self:provider.stage}-firstA"
TimeoutSeconds: 15
End: true
-
StartAt: GatherDataB
States:
GatherDataB:
Type: Task
Resource: "arn:aws:lambda:#{AWS::Region}:#{AWS::AccountId}:function:${self:service}-${opt:stage, self:provider.stage}-firstB"
TimeoutSeconds: 15
End: true
Next: ResolveData
ResolveData:
Type: Task
Resource: "arn:aws:lambda:#{AWS::Region}:#{AWS::AccountId}:function:${self:service}-${opt:stage, self:provider.stage}-resolveAB"
TimeoutSeconds: 15
End: true
Example handlers
module.exports.firstA = (event, context, callback) => {
const data = {
id: 3,
somethingElse: ['Hello', 'World'],
};
callback(null, data);
};
module.exports.firstB = (event, context, callback) => {
const data = {
id: 12,
somethingElse: ['olleH', 'dlroW'],
};
callback(null, data);
};
module.exports.resolveAB = (event, context, callback) => {
console.log("resolving data from a and b: ", event);
const [dataFromA, dataFromB] = event;
callback(null, event);
};
More information see
https://serverless.com/plugins/serverless-step-functions/
https://docs.aws.amazon.com/step-functions/latest/dg/amazon-states-language-common-fields.html
The key was to create new seperate aws.Lambda() instance for every lambda we want to invoke, then we have to resolve and await every lambda we invoked (promieses array). This is OK if the invoked lambdas doesn't need to be awaited, so we don't waste processing time on AWS - so invoked lambda starts processing and then resolves without awaiting its response so the main (cron) lambda can resolve.
Fixed (cron) lambda handler:
aws.config.update({
region : env.lambdaRegion,
accessKeyId: env.lambdaAccessKeyId,
secretAccessKey: env.lambdaSecretAccessKey,
});
exports.handler = async (event: any, context: any) => {
context.callbackWaitsForEmptyEventLoop = false;
return new Promise(async (resolve, reject) => {
const promises: any = [];
for (let i = 0; i < 100; i++) {
const doc = await global['mongo'].db.collection('docs').
findOneAndUpdate(
{
status: 1,
lambdaProcessing: null,
},
{ $set: { lambdaProcessing: new Date() } },
{
sort: { processedAt: 1 },
returnNewDocument: true,
},
);
if (doc.value && doc.value._id) {
const params = {
FunctionName: env.lambdaName,
InvocationType: 'Event',
Payload: JSON.stringify({ docId: doc.value._id }),
};
const lambda = new aws.Lambda({
region: env.lambdaRegion,
maxRetries: 0,
});
promises.push(
new Promise((invokeResolve, invokeReject) => {
lambda.invoke(params, (error, data) => {
if (error) { console.error('ERROR: ', error); }
if (data) { console.log('SUCCESS:', data); }
// Resolve invoke promise in any case.
invokeResolve();
});
}),
);
} else {
if (doc.lastErrorObject && doc.lastErrorObject.n === 0) {
break;
}
}
}
await Promise.all(promises);
resolve();
});
};
Second (processing) lambda:
exports.handler = async (event: any, ctx: any) => {
ctx.callbackWaitsForEmptyEventLoop = false;
if (event && event.docId) {
const doc = await mongo.db.collection('docs').findById(event.docId);
processDoc(doc);
return ctx.succeed('Completed.');
} else {
throw new Error('Doc ID is not present.');
}
};
I don't know if there is any better way of achieving this using strictly lambda functions, but this works.
I am trying to use a lambda function to alter a database and then send a push notification.
I don't want to wait for the push notification server to reply. In the occasional case that the push notification is unsuccessful, that is not a concern. It is more important that the function executes in a timely manner.
Currently I'm using the following two functions. Everything works as expected except that there doesn't seem to be any time saving. ie, when there is no device token and push is not required the function is very fast. When a push is required it is very slow. That tells me what I'm doing is wrong and the function is still waiting for a callback.
I have not used node much and know there are perils with trying to use asynchronous models from other languages. Just wondering how to overcome this case.
Function for Database Insertion:
const AWS = require('aws-sdk');
var mysql = require('mysql');
var lambda = new AWS.Lambda();
exports.handler = (event, context, callback) => {
var connection = mysql.createConnection({
host: "databaseHost",
user: "databaseUser",
password: "databasePassword",
database: "databaseName",
multipleStatements: true
});
var sql = "INSERT INTO someTable SET item_id = ?, item_name = ?"
var inserts = [event.itemId, event.itemName];
connection.query(sql, inserts, function (error, results, fields) {
connection.end();
// Handle error after the release.
if (error) {
callback(error);
} else {
if (event.userToken !== null) {
callback(null, results);
var pushPayload = { "deviceToken": event.deviceToken };
var pushParams = {
FunctionName: 'sendPushNotification',
InvocationType: 'RequestResponse',
LogType: 'Tail',
Payload: JSON.stringify(pushPayload)
};
lambda.invoke(pushParams, function (err, data) {
if (err) {
context.fail(err);
} else {
context.succeed(data.Payload);
}
});
} else {
//callback(null, results);
callback(null, results);
}
}
});
};
Push notification function:
const AWS = require('aws-sdk');
var ssm = new AWS.SSM({ apiVersion: '2014-11-06' });
var apn = require("apn");
exports.handler = function (event, context) {
var options = {
token: {
key: "key",
keyId: "keyId",
teamId: "teamId"
},
production: true
};
var token = event.deviceToken;
var apnProvider = new apn.Provider(options);
var notification = new apn.Notification();
notification.alert = "message";
notification.topic = "com.example.Example";
context.callbackWaitsForEmptyEventLoop = false;
apnProvider.send(notification, [deviceToken]).then((response) => {
context.succeed(event);
});
};
In pushParams change value of InvocationType to "Event" so that calling lambda will not wait for the response. It will just invoke lambda and return you the callback.
example:
var pushParams = {
FunctionName: 'sendPushNotification',
InvocationType: 'Event',
LogType: 'Tail',
Payload: JSON.stringify(pushPayload)
};
Basically, I have a DynamoDB connection within a Lambda function. Will post code below. This DynamoDB connection seems to be behaving properly - it's able to call the listTable and describeTable functions successfully, which means it's got the right configuration - but querying it returns nothing, even on queries I know are correct and have tested on the Dynamo console.
UPDATE: Was able to successfully query with a string on a separate index, but still unable to query based on a binary...
Here's a part of the Lambda function:
const AWS = require('aws-sdk');
const SNS = new AWS.SNS({ apiVersion: '2010-03-31', region: 'sa-east-1' });
const DDB = new AWS.DynamoDB({ apiVersion: '2012-08-10', region: 'sa-east-1' })
const Lambda = new AWS.Lambda({ apiVersion: '2015-03-31' });
const async = require('async');
const CREATE_NOTIFICATIONS = 'create-notifications'
const QUERY_TOKENS = 'query-tokens'
function getUsers(functionName, message, callback) {
var msg = JSON.parse(message);
var users = [];
console.log(DDB);
async.forEachOf(msg.targetsb64, function(value, index, cb) {
console.log("getUsers b64: ", value)
console.log(typeof(value))
DDB.describeTable({
TableName: 'tsGroups'
}, function(err, data) {
console.log(err)
console.log(data.Table.KeySchema)
})
DDB.query({
TableName: 'tsGroups',
KeyConditionExpression: "identifier = :v_user",
ExpressionAttributeValues: {
":v_user": {"B": value}
}
}, function(err, data) {
if (err) {
cb(err)
} else {
console.log("data: ", data)
console.log("items: ", data.Items)
data.Items.forEach(function(item) {
users.push.apply(users, item.users.BS)
})
cb()
}
})
}, function(err) {
if (err) {
callback(err)
} else {
console.log("getUsers users: ", users);
const promises = users.map((user) => invokeQueryTokens(functionName, msg, user));
Promise.all(promises).then(() => {
const result = `Users messaged: ${users.length}`;
console.log(result);
callback(null, result);
});
}
})
}
I've tried using KeyConditions instead of KeyConditionExpression, to no avail. Value refers to a base64 identifier string that's passed along from an earlier Lambda function - I've tried hard-coding the correct value, doesn't help. The describeTable function is only there to detail that DynamoDB is connecting properly, and in the correct region.
TL;DR: The data.Items value in the above code snippet is always an empty array, even when doing a query I know should return something. What's my error here?
Thanks, and cheers!