Access Postgres database with AWS Lambda function using Node.js - node.js

My 'handler.js' functions connects to the Postgres database and performs few queries using node.js. My function worked successfully in local, but when accessed as AWS lambda, the queries don't work.
I have also added the corresponding vpc security groups and subnet ids in 'serverless.yml'.
The error logs in 'Cloudwatch' just shows the console.log statements before and after queries and the function terminates with timed out error. I could not figure out the issue.
I have attached the example 'handler.js' code below:
var { Pool, Client } = require('pg');
export function apiTest(event, context, callback) {
var connectionString = 'postgresql://username:password#database.server.com:xxxx/dbname';
var client = new Client({
connectionString: connectionString,
})
client.connect();
console.log('Connected to PostgreSQL database');
client.query('SELECT * from table', (err, res) => {
console.log("inside query");
var jsonString = JSON.stringify(res.rows);
var jsonObj = JSON.parse(jsonString);
const headers = {
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Credentials": true
};
// Return status code 500 on error
if (err) {
const response = {
statusCode: 500,
headers: headers,
body: JSON.stringify({
status: false
})
};
callback(null, response);
client.end();
return;
}
const response = {
statusCode: 200,
headers: headers,
body: JSON.stringify(jsonObj)
};
callback(null, response);
console.log("query success")
client.end()
context.succeed(context);
})
}

Related

API works randomly while putting data into dynamoDB

I'm trying to put data into Dynamodb using serverless deployment. I have added the permission to write to Dynamodb.
The api is always sending {"message": "Internal server error"} but is able to put the data into db once if tried 5,6 times.
Following is the serverless.yaml config
handler: dynamoUpdate.handler
events:
- http:
path: /createdbentry
method: get
cors: true
Following is the code:
const AWS = require('aws-sdk')
AWS.config.update({ region: process.env.REGION || 'us-east-1' })
var ddb = new AWS.DynamoDB({apiVersion: '2012-08-10'});
exports.handler = async (event) => {
return await createDBEntry("i123","Working");
}
const sendRes = (status, body) => {
var response = {
statusCode: status,
headers: {
"Content-Type": "application/json"
},
body: body
};
return response;
};
const createDBEntry = async function(id,result) {
var params = {
TableName: 'emplist',
Item: {
'ID' : {S:id},
'Summary':{S: result},
}
};
ddb.putItem(params, function(err, data) {
console.log("Here comes me.")
if (err) {
console.log("Opps Error");
return sendRes(403,err);
} else {
console.log("Complete")
return sendRes(200,data);
}
});
}
How can it be resolved ?
The problem is you are not returning any promise or awaiting anything async in your function called createDBEntry. Thus your handler returns undefined which makes apigateway return 500 internal server error.
You are mixing callbacks and async/await.
Your createDBEntry function should look like this.
const createDBEntry = async function(id, result) {
var params = {
TableName: 'emplist',
Item: {
'ID' : {S:id},
'Summary':{S: result},
}
};
try {
let data = await ddb.putItem(params).promise()
return sendRes(200, JSON.stringify(data))
} catch (err) {
console.log("Oops Error");
return sendRes(403, err.toString());
}
}
When you return from an async handler in lambda the current execution environment is frozen immediately (unlike when you use a non-async handler with a callback). This is why most of the time the writes to the database does not succeed.

Node JS serverless rest API lambda function that first performs GET request and then POST if condition is met

I'm new to NodeJS and I'm supposed to write a serverless rest API for a online store (school project). The team I'm in is responsible of the orders customers place. To be able to place the order there has to be enough quantity in inventory (another API), so we need to check quantity in inventory using GET before we store the order in a database using POST. How should we go about this? This is what I have tried, but I end up getting timeout. The code below is based on this example: aws-node-rest-api-with-dynamodb for me to get the hang of NodeJS and serverless.
.yml file
functions:
create:
handler: todos/test.f
events:
- http:
path: todos
method: post
cors: true
test.js
const create = require("./create.js");
exports.f = function() {
const https = require('https');
https.get('url goes here', (resp) => {
let data = '';
// A chunk of data has been recieved.
resp.on('data', (chunk) => {
data += chunk;
});
// The whole response has been received. Print out the result.
resp.on('end', () => {
console.log(data);
var str = String(data);
console.log("Check: " + (str.trim() == "OK"))
create.c(); //also tried create.create();
});
}).on("error", (err) => {
console.log("Error: " + err.message);
});
}
create.js
'use strict';
const uuid = require('uuid');
const dynamodb = require('./dynamodb');
exports.c = function (){
console.log("Fire!");
}
module.exports.create = (event, context, callback) => {
const timestamp = new Date().getTime();
const data = JSON.parse(event.body);
if (typeof data.text !== 'string') {
console.error('Validation Failed');
callback(null, {
statusCode: 400,
headers: { 'Content-Type': 'text/plain' },
body: 'Couldn\'t create the todo item.',
});
return;
}
const params = {
TableName: 'todos',
Item: {
id: uuid.v1(),
text: data.text,
checked: false,
createdAt: timestamp,
updatedAt: timestamp,
},
};
// write the todo to the database
dynamodb.put(params, (error) => {
// handle potential errors
if (error) {
console.error(error);
callback(null, {
statusCode: error.statusCode || 501,
headers: { 'Content-Type': 'text/plain' },
body: 'Couldn\'t create the todo item.',
});
return;
}
// create a response
const response = {
statusCode: 200,
body: JSON.stringify(params.Item),
};
callback(null, response);
});
};
Any thoughts on how to get this to work?

AWS Lambda NodeJS Connect to RDS Postgres Database

I'm trying to test connectivity between my Lambda and an RDS instance. I have them both on the same private subnets with all ports open in the security group. When I trigger the Lambda I do see a connection opened on the RDS instance. However, the Lambda times out after 4 minutes, 40 seconds. The PG environment variables are set in the Lambda configuration.
const { Client } = require('pg');
const client = new Client();
var hello = [
{ name: 'test', description: 'testerface' }
];
exports.handler = async (event, context, callback) => {
// Postgres Connect
client.connect();
const res = client.query('SELECT $1::text as message', ['Hello world!']);
console.log(res);
var response = {
"statusCode": 200,
"headers": {
"Content-Type" : "application/json"
},
"body": JSON.stringify(hello),
"isBase64Encoded": false
};
callback(null, response);
};
How can I get back the response from the connection in the Lambda's logs - or even better in the response body?
You need to handle the client connection better. That means catching any exceptions that the client may through and releasing the connection properly. This code will return the output of the query to the response body:
const pg = require('pg')
const pool = new pg.Pool()
async function query (q) {
const client = await pool.connect()
let res
try {
await client.query('BEGIN')
try {
res = await client.query(q)
await client.query('COMMIT')
} catch (err) {
await client.query('ROLLBACK')
throw err
}
} finally {
client.release()
}
return res
}
exports.handler = async (event, context, callback) => {
try {
const { rows } = await query("select * from pg_tables")
console.log(JSON.stringify(rows[0]))
var response = {
"statusCode": 200,
"headers": {
"Content-Type" : "application/json"
},
"body": JSON.stringify(rows),
"isBase64Encoded": false
};
callback(null, response);
} catch (err) {
console.log('Database ' + err)
callback(null, 'Database ' + err);
}
};

Serverless + Firebase: API is not reply message

I am creating an api which will update a Firebase Realtime database, using Nodejs, Serverless, Javascript.
my .js file:
var admin = require("firebase-admin");
var serviceAccount = require("../xxxxxxx-firebase-adminsdk-xxxxxxx.json");
module.exports.saveState = (event, context, callback) => {
const body = JSON.parse(event.body);
var timestamp_create = body.timestamp_create;
try {
admin.initializeApp({
credential: admin.credential.cert(serviceAccount),
databaseURL: "https://xxxxxxx.firebaseio.com"
});
} catch (err) {
console.log('Firebase initialization error', err.stack);
}
var db = admin.database();
var ref = db.ref('item/');
ref.child(timestamp_create).set({
state: 'empty'
}, function (error) {
if (error) {
console.log("Data could not be saved." + error);
} else {
console.log("Data saved successfully.");
return callback(null, {
headers: {
"Access-Control-Allow-Origin": "*",
},
statusCode: 200,
body: JSON.stringify({ msg: "Data saved successfully." })
});
}
});
};
I deploy API to AWS and test on Postman:
Problem and my attempts:
After saving item to Firebase Realtime Database, the api didn't reply the message. Api fell into timeout.
I checked CloudWatch, the log is printed ("Data saved successfully."), but it didn't execute the return callback at all.
I tried api in "serverless offline", it works normally, and returns the message.
The API doesn't return callback in AWS environment(lambda).
Any Suggestion is appreciated.
After long google and test, I came to a solution for this, leave it here for reference:
The firebase database connection is not released after saving data, so that AWS Lambda cannot return the callback. The simple solution is close database connection after task
var ref = db.ref('item/');
ref.child(timestamp_create).set({
state: 'empty'
}, function (error) {
if (error) {
console.log("Data could not be saved." + error);
} else {
console.log("Data saved successfully.");
db.gpOffline(); // this line will release database connection
return callback(null, {
headers: {
"Access-Control-Allow-Origin": "*",
},
statusCode: 200,
body: JSON.stringify({ msg: "Data saved successfully." })
});
}
});

Serverless framework can't use promise?

I'm trying to make REST apis with the serverless framework.
Some of the functions are asynchronous.
So I'm using Promise.
But the promise is not working (no response)
So, I'm using the await keyword. It works fine.
I think this is bad way. How to use promise in serverless framework?
Any advice or suggestion would be appreciated. Thank you in advance.
You can use the promise of many ways. Personally, separate the promise in another function.
I made a example with request module:
const request = require("request");
// The promise
const requestPromise = (url, options) =>
new Promise((resolve, reject) => {
options = options || {};
const processRequest = (err, response) => (err ? reject(err) : resolve(response));
request(url, options, processRequest);
});
// You can use like this
module.exports = (event,context) => {
let url = event.url;
requestPromise(url)
.then(response => {
// Do something
context.succeed({succeed: true /* put return data here */})
})
.catch(details => context.fail({error: true, details: details}));
}
// Or this
module.exports = async (event,context) => {
try {
let url = event.url;
let response = await requestPromise(url);
// Do something
context.succeed({succeed: true /* put return data here */});
} catch (details) {
context.fail({error: true, details: details});
}
}
If you use async/wait, you need add try/catch to handler errors.
I am coding a serverless-kubeless api now for the mysql world database. I had to solve this problem yesterday. I arrived at the following solution. It's not feature complete. But you didn't ask for that. So here is a working GET endpoint which accepts various query parameters to customise the query.
'use strict';
const pool = require('./database');
module.exports.handler = async (event, context) => new Promise((resolve, reject) => {
let request = event.extensions.request;
let response = event.extensions.response;
try{
let handleResults = (err, results, fields) => {
if(err){
response.status(500).send({
success: false,
message: err.message,
});
}else{
response.status(200).send({
success: true,
count: results.length,
data: results,
});
}
}
if(typeof(request.query.id) !== "undefined"){
// search for a specific region by id
if (Number.isNaN(Number(request.query.id))) {
response.status(500).send({
success: false,
message: "id query param was not a number",
});
}
pool.query("select id,name,code,country_id from regions where id = ?", [request.query.id], handleResults);
}else if(typeof(request.query.country) !== "undefined"){
// search for a region list from a specific country
if (Number.isNaN(Number(request.query.country))) {
response.status(500).send({
success: false,
message: "country query param was not a number",
});
}
pool.query("select id,name,code,country_id from regions where country_id = ?", [request.query.country], handleResults);
}else{
response.status(400).send({
success: false,
message: "Could not find country, or region query parameter. Require a search term"
});
}
}catch(exception){
response.status(500).send({
success: false,
message: exception.message
});
}
});
and database.js:
const mysql = require("mysql");
const util = require('util');
const pool = mysql.createPool({
connectionLimit: 10,
host: process.env.DATABASE_HOSTNAME,
user: process.env.DATABASE_USERNAME,
port: process.env.DATABASE_PORT,
password: process.env.DATABASE_PASSWORD,
database: process.env.DATABASE_NAME,
});
pool.getConnection((err, connection) => {
if (err) {
if (err.code === 'PROTOCOL_CONNECTION_LOST') {
console.error('Database connection was closed.');
}
if (err.code === 'ER_CON_COUNT_ERROR') {
console.error('Database has too many connections.');
}
if (err.code === 'ECONNREFUSED') {
console.error('Database connection was refused.');
}
}
if (connection) connection.release();
return;
});
// Magic happens here.
pool.query = util.promisify(pool.query);
module.exports = pool;
I commonly do stuff with Promises in my serverless projects:
//this would me in a module like: services/myhttpservice.js (for example)
//wrap the GET HTTP request in a Promise
module.exports.GetUrlPromise = function(url, cookie_session_value) {
console.log(new Date().getTime() + " GetUrlPromise() CALLED: " + url);
var j = request.jar();
if(cookie_session_value){
var cookie1 = request.cookie(cookie_name + '=' + cookie_session_value);
j.setCookie(cookie1, cookie_domain);// domain used by the cookie, maybe make more generic?
}
// create the "Basic" auth header
var auth = "Basic " + Buffer.from(basic_username + ":" + basic_password).toString("base64");
//create request options
var options = {
'method': 'GET',
'url': url,
'jar': j,
'headers': {
'Authorization': auth,// set Basic auth header that is the base64 of the un:pw combo
'Content-Type': 'application/json'
}
};
return new Promise((resolve, reject) => {
request(options, function (error, response, body) {
if(error){
console.log('error:', error);
reject(error);
}else{
console.log('statusCode:', response && response.statusCode);
// object for returning response results
var http_resp = {};
http_resp._session = GetCookieValue(response);
http_resp.body = body;
http_resp.statusCode = response.statusCode;
//http_resp.response = response;
http_resp.requestType = 'GET';
console.log(JSON.stringify(http_resp));
resolve(http_resp);
}
});
});
}
It gives me the ability to make promised calls to my services easily:
//in my controller code:
myhttpservice.GetUrlPromise(page_url, user_session)
.then((http_resp)=>{ etc...
Await and async are not bad practices if used correctly.
If you don't have promises depending on each other you can call them in 'parallel' by adding all promises (without await) in an array and use const responses = await Promise.all(promisesArray) to wait for all responses to be successful.
For more information refer to this answer which explains very well Call async/await functions in parallel

Resources