AWS Lambda NodeJS Connect to RDS Postgres Database - node.js

I'm trying to test connectivity between my Lambda and an RDS instance. I have them both on the same private subnets with all ports open in the security group. When I trigger the Lambda I do see a connection opened on the RDS instance. However, the Lambda times out after 4 minutes, 40 seconds. The PG environment variables are set in the Lambda configuration.
const { Client } = require('pg');
const client = new Client();
var hello = [
{ name: 'test', description: 'testerface' }
];
exports.handler = async (event, context, callback) => {
// Postgres Connect
client.connect();
const res = client.query('SELECT $1::text as message', ['Hello world!']);
console.log(res);
var response = {
"statusCode": 200,
"headers": {
"Content-Type" : "application/json"
},
"body": JSON.stringify(hello),
"isBase64Encoded": false
};
callback(null, response);
};
How can I get back the response from the connection in the Lambda's logs - or even better in the response body?

You need to handle the client connection better. That means catching any exceptions that the client may through and releasing the connection properly. This code will return the output of the query to the response body:
const pg = require('pg')
const pool = new pg.Pool()
async function query (q) {
const client = await pool.connect()
let res
try {
await client.query('BEGIN')
try {
res = await client.query(q)
await client.query('COMMIT')
} catch (err) {
await client.query('ROLLBACK')
throw err
}
} finally {
client.release()
}
return res
}
exports.handler = async (event, context, callback) => {
try {
const { rows } = await query("select * from pg_tables")
console.log(JSON.stringify(rows[0]))
var response = {
"statusCode": 200,
"headers": {
"Content-Type" : "application/json"
},
"body": JSON.stringify(rows),
"isBase64Encoded": false
};
callback(null, response);
} catch (err) {
console.log('Database ' + err)
callback(null, 'Database ' + err);
}
};

Related

node.js variable not surviving code block

I'm experimenting with Node.js in AWS Lambda. And, I've run into a problem with the code below. Result value and error value are always returned blank. I'm pretty sure this is just a scope issue I'm to tired to see. How do I capture the return value and pass it back in the response? I'm sure the programs is connecting to redis because I get an error message if I change the port or URL and I don't when they're set properly.
The return code:
{
"statusCode": 200,
"body": "{\"key\":\"q1\"}"
}
The program code:
const Redis = require("ioredis");
const redis = new Redis(6379, 'fredflenstone.lhpxwy.az.0002.use2.cache.amazonaws.com');
exports.handler = async(event)=>{
let key=event.key;
let response;
let resultValue;
let errorValue;
redis.get(key, (err, result) => {
if (err) {
errorValue=err;
} else {
resultValue=result;
}
});
response={
key: key,
resultValue: resultValue,
errorValue: errorValue
};
return {
statusCode: 200,
body: JSON.stringify(response)
};
};
The problem is due to promises. Your handler execution is completing before redis is returning the result. Following snippet should work:
const Redis = require("ioredis");
const redis = new Redis(6379, 'fredflenstone.lhpxwy.az.0002.use2.cache.amazonaws.com');
exports.handler = async(event)=>{
let key=event.key;
let response;
let resultValue;
let errorValue;
try{
resultValue = await redis.get(key);
}catch(error) {
errorValue = error;
}
response={
key: key,
resultValue: resultValue,
errorValue: errorValue
};
return {
statusCode: 200,
body: JSON.stringify(response)
};
};
It is because your call to "redis.get" is not resolved when "response" is sent.
You need to wait for the response :
await new Promise((resolve) => {
redis.get(key, (err, result) => {
if (err) {
errorValue=err;
} else {
resultValue=result;
}
resolve();
});
})
or even better turn the redis response into a promise response :
await new Promise((resolve, reject) => {
redis.get(key, (err, result) => {
if (err) {
reject(err);
} else {
resolve(result);
}
})
})
.then((result) => resultValue = result)
.catch((err) => errorValue = err)

Can't return JSON from Cosmos DB (MongoDB schema) using a NodeJS Azure Function

I'm trying to retrieve data from the cosmosdb(MongoDB schema) using a azure function. I can log the data but I can't retrieve it to the client. It just shows nothing, whenn I try calling the function using the browser or postman. Any idea how to return this array of mongodb objects?
import { AzureFunction, Context, HttpRequest } from "#azure/functions";
import { MongoClient } from "mongodb";
import assert from "assert";
const url = "";
const httpTrigger: AzureFunction = async function (context: Context, req: HttpRequest): Promise<void> {
MongoClient.connect(url, async (err, db) => {
let dbo = db.db("MyDB");
var cursor = await dbo.collection("test").find({}).toArray((err, docs) => {
if(err) {
context.res = {
body: err,
}
}
if (docs) {
context.res.set('content-type', 'application/json')
context.res = {
status: 200,
body: docs
};
console.log(context.res );
}
else {
context.res = {
status: 400,
body: "No docs"
};
}
context.res.headers = { 'Content-Type':'application/json' };
context.done();
console.log("DONE");
});
db.close();
});
};
export default httpTrigger;
Regarding the issue, please refer to the following code
import { AzureFunction, Context, HttpRequest } from "#azure/functions";
import { MongoClient } from "mongodb";
const url = "";
const httpTrigger: AzureFunction = async function (context: Context, req: HttpRequest): Promise<void> {
try {
var client =await MongoClient.connect(url , {useNewUrlParser: true,useUnifiedTopology: true})
const db=client.db("test");
const cursor =db.collection('Users').find({'saying':'English'});
const docs =await cursor.toArray();
console.log("done")
context.res={
status: 200,
body: docs,
headers: {
'Content-Type': 'application/json'
}
}
} catch (error) {
context.log(error);
context.res={
status: 500,
body: error
}
}finally{
await client.close();
console.log("closed")
}
};
export default httpTrigger;

Nodejs 10: Why does DynamoDB put function giving success multiple times

I am writing node js 10.x lambda function to put details into DynamoDB table.
Below is code
const AWS = require('aws-sdk');
var db = new AWS.DynamoDB.DocumentClient();
var tableName="xyz";
exports.handler = async (event) => {
// TODO implement
console.log("Event: "+ JSON.stringify(event));
var response = {
statusCode: 200,
"headers": {
"Access-Control-Allow-Origin" : "*",
"Access-Control-Allow-Credentials" : true
},
};
await db.put({
TableName: tableName,
Item: {
userid: event.userid,
}
}, (error, data) => {
if (error) {
console.log("error:"+ error);
}
else{
console.log("Success");
}
}).promise();
return response;
};
I am getting kind on random number of success return
Output execution 1
2019-11-07T07:03:45.388Z f451dfc1-01ea-41d0-a998-945cb0f18be1 INFO Success
2019-11-07T07:03:45.510Z f451dfc1-01ea-41d0-a998-945cb0f18be1 INFO Success
2019-11-07T07:03:45.511Z f451dfc1-01ea-41d0-a998-945cb0f18be1 INFO Success
Output execution 2
2019-11-07T07:08:19.270Z 3ce51f5d-bbbc-4dd6-b46f-2149ee9bb9cf INFO Success
Output execution 3
2019-11-07T07:08:27.410Z 2625bba5-b8e1-40e4-8704-7c0d486f6dff INFO Success
2019-11-07T07:08:27.431Z 2625bba5-b8e1-40e4-8704-7c0d486f6dff INFO Success
**
does anyone know the cause of this problem?
I am relatively new to node js 10.x. so please help me if I have missed something in code
**
you are using a callback and promise at the same time, remove the callback.
You can try something like
exports.handler = async (event, context) => {
const params = {
TableName: tableName,
Item: {
userid: event.userid,
}
};
try {
const data = await dynamoDB.put(params).promise();
console.log("Data: ", data);
} catch(error) {
console.error("Error:", error);
}
}

API works randomly while putting data into dynamoDB

I'm trying to put data into Dynamodb using serverless deployment. I have added the permission to write to Dynamodb.
The api is always sending {"message": "Internal server error"} but is able to put the data into db once if tried 5,6 times.
Following is the serverless.yaml config
handler: dynamoUpdate.handler
events:
- http:
path: /createdbentry
method: get
cors: true
Following is the code:
const AWS = require('aws-sdk')
AWS.config.update({ region: process.env.REGION || 'us-east-1' })
var ddb = new AWS.DynamoDB({apiVersion: '2012-08-10'});
exports.handler = async (event) => {
return await createDBEntry("i123","Working");
}
const sendRes = (status, body) => {
var response = {
statusCode: status,
headers: {
"Content-Type": "application/json"
},
body: body
};
return response;
};
const createDBEntry = async function(id,result) {
var params = {
TableName: 'emplist',
Item: {
'ID' : {S:id},
'Summary':{S: result},
}
};
ddb.putItem(params, function(err, data) {
console.log("Here comes me.")
if (err) {
console.log("Opps Error");
return sendRes(403,err);
} else {
console.log("Complete")
return sendRes(200,data);
}
});
}
How can it be resolved ?
The problem is you are not returning any promise or awaiting anything async in your function called createDBEntry. Thus your handler returns undefined which makes apigateway return 500 internal server error.
You are mixing callbacks and async/await.
Your createDBEntry function should look like this.
const createDBEntry = async function(id, result) {
var params = {
TableName: 'emplist',
Item: {
'ID' : {S:id},
'Summary':{S: result},
}
};
try {
let data = await ddb.putItem(params).promise()
return sendRes(200, JSON.stringify(data))
} catch (err) {
console.log("Oops Error");
return sendRes(403, err.toString());
}
}
When you return from an async handler in lambda the current execution environment is frozen immediately (unlike when you use a non-async handler with a callback). This is why most of the time the writes to the database does not succeed.

Access Postgres database with AWS Lambda function using Node.js

My 'handler.js' functions connects to the Postgres database and performs few queries using node.js. My function worked successfully in local, but when accessed as AWS lambda, the queries don't work.
I have also added the corresponding vpc security groups and subnet ids in 'serverless.yml'.
The error logs in 'Cloudwatch' just shows the console.log statements before and after queries and the function terminates with timed out error. I could not figure out the issue.
I have attached the example 'handler.js' code below:
var { Pool, Client } = require('pg');
export function apiTest(event, context, callback) {
var connectionString = 'postgresql://username:password#database.server.com:xxxx/dbname';
var client = new Client({
connectionString: connectionString,
})
client.connect();
console.log('Connected to PostgreSQL database');
client.query('SELECT * from table', (err, res) => {
console.log("inside query");
var jsonString = JSON.stringify(res.rows);
var jsonObj = JSON.parse(jsonString);
const headers = {
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Credentials": true
};
// Return status code 500 on error
if (err) {
const response = {
statusCode: 500,
headers: headers,
body: JSON.stringify({
status: false
})
};
callback(null, response);
client.end();
return;
}
const response = {
statusCode: 200,
headers: headers,
body: JSON.stringify(jsonObj)
};
callback(null, response);
console.log("query success")
client.end()
context.succeed(context);
})
}

Resources