I have a AWS Lambda (Node.js) talking to an Aurora database. Both belong to the same VPC, with internet access enabled via subnet. The RDS cluster also has a inbound rule that allows traffic from the VPC, used for the Lambda (which should be the same VPC). To my surprise, I found that the RDSDataService from AWS-SDK fails to connect to the database, whereas when I use mysql pacakge, it works. Following are the 2 code snippets.
I would like it very much to use AWS-SDK, as that will reduce the deployment bundle size, as I don't have to include that in the bundle that at all. Is there anyway to achieve that?
Failed attempt to use RDSDataService
const AWS = require("aws-sdk");
const rdsData = new AWS.RDSDataService({
params: {
dbClusterOrInstanceArn: 'rds.cluster.arn',
awsSecretStoreArn: 'rds.cluster.secret.arn',
database: 'mydb'
},
endpoint: 'mydb.endpoint'
});
return new Promise((resolve, reject) => {
try {
rdsData.executeSql({
dbClusterOrInstanceArn: 'rds.cluster.arn',
awsSecretStoreArn: 'rds.cluster.secret.arn',
database: 'mydb',
sqlStatements: "select 1 + 1 as result;"
}, (err, data) => {
if (err) {
reject(err);
}
const response = {
statusCode: 200,
body: JSON.stringify(data),
};
resolve(response);
});
} catch (er) {
reject(er);
}
});
Working implementation using mysql
const mysql = require('mysql');
const connection = mysql.createConnection({
host: 'mydb.endpoint',
user: 'user',
password: 'password',
port: 3306,
database: 'mydb',
debug: false
});
connection.connect(function (err) {
if (err) context.fail();
else {
connection.query('select 1 + 1 as result', function (error, results, fields) {
if (error) throw error;
resolve('The solution is: ' + JSON.stringify(results, undefined, 2));
});
}
});
connection.end();
As it turned out, Data API is not yet available for my region. The supported regions are listed here: https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/data-api.html#data-api.regions
Related
I'm using the snowflake node driver to connect to a DB. When running the connector from a local server I have no issues. However, when I try the same function running in lambda I can't seem to connect. There are no errors, exceptions, or timeouts... just nothing. Here is the code I'm using per their documentation.
var snowflake = require("snowflake-sdk");
var connection = snowflake.createConnection({
account: "****",
username: "******",
password: "******",
});
connect(connection);
const response = {
statusCode: 200,
body: JSON.stringify("Hello from Lambda!"),
};
return response;
function connect(connection) {
console.log("in connection");
let connection_ID;
try {
connection.connect(function (err, conn) {
if (err) {
console.error("Unable to connect: " + err);
} else {
console.log("Successfully connected to Snowflake");
// Optional: store the connection ID.
connection_ID = conn.getId();
}
console.log(connection_ID);
});
} catch (err) {
console.log(err);
}
}
For clarity, my lambda has no issues connecting to other API's, and is not running behind a VPC.
Any help would be greatly appreciated.
If you have not selected any VPC for your lambda function, it will use the default VPC of the region.
Can you select a VPC, which has access to the snowflake public endpoints and check.
If still an issue, please post the Cloud watch logs, it should give a clue.
You can also check on snowflake History page, if you get any Client-side connection request from the lambda or not.
I am new to AWS. I am trying to connect to AWS RDS postgreSQL instance using Lambda functions. I followed the aws documentation. But it uses python for Lambda functions. Below is my code.
'use strict';
var pg = require('pg');
exports.handler = function (event, context) {
var dbConfig = {
username: '<username>',
password: '<password>',
database: '<database>',
host: '<db-endpoint>',
};
var client = new pg.Client(dbConfig);
try {
client.connect();
context.callbackWaitsForEmptyEventLoop = false;
client.end();
}
catch (err) {
console.log(err);
client.end();
}
};
I am getting timeout error as below
START RequestId: 368e619e-ed9d-4241-93a5-764ee01aa847 Version: $LATEST
2020-06-15T16:28:18.911Z 368e619e-ed9d-4241-93a5-764ee01aa847 INFO connected
END RequestId: 368e619e-ed9d-4241-93a5-764ee01aa847
REPORT RequestId: 368e619e-ed9d-4241-93a5-764ee01aa847 Duration: 20020.16 ms Billed Duration: 20000 ms Memory Size: 128 MB Max Memory Used: 70 MB Init Duration: 150.01 ms
2020-06-15T16:28:38.901Z 368e619e-ed9d-4241-93a5-764ee01aa847 Task timed out after 20.02 seconds
Please advise on the error.
I have few other questions to ensure if my code is correct
I gave db instance endpoint url for db-endpoint. is that right? or if not what should i use there?
is there any proper documentation available, for the beginners like me, about Lambda functions with nodejs to connect postgres on RDS?
You're not returning anything from the lambda. So the request keeps hanging without a response until it times out.
Use the third argument callback supplied to the handler to respond or return a Promise.
'use strict';
var pg = require('pg');
exports.handler = function (event, context,callback) {
var dbConfig = {
username: '<username>',
password: '<password>',
database: '<database>',
host: '<db-endpoint>',
};
var client = new pg.Client(dbConfig);
try {
client.connect();
context.callbackWaitsForEmptyEventLoop = false;
client.end();
//send the response
callback(null,"Some Response")
}
catch (err) {
console.log(err);
client.end();
callback(err)
}
};
AWS example : AWS Lambda NodeJS Connect to RDS Postgres Database
You can read the official js docs with all methods and properties here : https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/RDS.html
Hope this helps !
Here is the answer for async/await syntax
const { Client } = require("pg");
exports.handler = async (event, context, callback) => {
const dbConfig = {
host: process.env.RDS_HOSTNAME,
user: process.env.RDS_USERNAME,
password: process.env.RDS_PASSWORD,
port: process.env.RDS_PORT,
database: process.env.RDS_DATABASE,
};
const client = new Client(dbConfig);
try {
await client.connect();
const res = await client.query("SELECT * FROM your_table");
await client.end();
callback(null, res.rows);
} catch (err) {
await client.end();
callback(err)
}
};
I was trying to make a post query the db (RDS) using handler.async.
However, I ran into the following issues.
Half of the time, the lambda function completes but the query is not successfully sent to RDS. The other half of the time, it will be completely send to lambda. Tried adding a setTimeout function to increase lambda execution time by 3 secs and the query will be sent all the time.
Also the log will shows the error:
INFO Error: Cannot enqueue Query after fatal error.
The following are my code:
var mysql = require('mysql');
var connection = mysql.createConnection({
host : '***',
user : '***',
password : '***',
database : '***'
});
exports.handler = async (event) => {
const sql = `INSERT INTO forms VALUES(777,2,3,4,5,6,7,8,9,10,11);`;
const query = (x) => {
return new Promise ((resolve,reject)=>{
resolve(connection.query(x, function (error, results, fields) {
console.log(error)
console.log(results)
console.log(fields)
}))})}
await query(sql)
}
With the timeout function,
var mysql = require('mysql');
var connection = mysql.createConnection({
host : '***',
user : '***',
password : '***',
database : '***'
});
exports.handler = async (event) => {
const sql = `INSERT INTO forms VALUES(777,2,3,4,5,6,7,8,9,10,11);`;
const query = (x) => {
return new Promise ((resolve,reject)=>{
resolve(connection.query(x, function (error, results, fields) {
console.log(error)
console.log(results)
console.log(fields)
}))})}
await query(sql)
await wait(3000)
}
const wait = (x) => {
return new Promise ((resolve,reject)=>{
setTimeout(()=>{resolve(console.log("delay"))}, x);
})
}
The first value is a primary key. A constant 777 is sent to check, if error shows duplicate primary key, it will mean that the query is successfully sent. If there is no error, it means that the query is unsuccessfully sent although lambda finishes.
execution result succeeded but shows:
START RequestId: e541fe4b-6927-4fbb-90b4-750f77e5f460 Version: $LATEST
2019-12-19T01:54:45.212Z e541fe4b-6927-4fbb-90b4-750f77e5f460 INFO Error: **Cannot enqueue Query after fatal error**.
at Protocol._validateEnqueue (/var/task/node_modules/mysql/lib/protocol/Protocol.js:212:16)
at Protocol._enqueue (/var/task/node_modules/mysql/lib/protocol/Protocol.js:138:13)
at Connection.query (/var/task/node_modules/mysql/lib/Connection.js:201:25)
at /var/task/index.js:14:24
at new Promise (<anonymous>)
at query (/var/task/index.js:13:10)
at Runtime.exports.handler (/var/task/index.js:20:7)
at Runtime.handleOnce (/var/runtime/Runtime.js:66:25) {
code: 'PROTOCOL_ENQUEUE_AFTER_FATAL_ERROR',
fatal: false
}2019-12-19T01:54:45.213Z e541fe4b-6927-4fbb-90b4-750f77e5f460 INFO undefined2019-12-19T01:54:45.213Z e541fe4b-6927-4fbb-90b4-750f77e5f460 INFO undefined2019-12-19T01:54:45.262Z e541fe4b-6927-4fbb-90b4-750f77e5f460 INFO delayEND RequestId: e541fe4b-6927-4fbb-90b4-750f77e5f460
REPORT RequestId: e541fe4b-6927-4fbb-90b4-750f77e5f460 Duration: 51.09 ms Billed Duration: 100 ms Memory Size: 128 MB Max Memory Used: 80 MB
May you please advise and also tell me which is the best way to execute it??
Managing RDBMS connections in any environment is not a trivial task. Lambda adds a layer of complexity here. You need to understand the distinction between warm and cold restarts, what it means for resources created outside of your handler function, when connection pools are appropriate, and when and how to release connections.
Persistent connections to a database are not particularly suitable in a microservices, FaaS environment like Lambda. That's one reason that Aurora Serverless supports an HTTP Data API (and hopefully other DB engines will too at some point).
Read How To: Manage RDS Connections from AWS Lambda Serverless Functions.
Also be aware of the new Amazon RDS Proxy with AWS Lambda.
In your particular case, the most obvious concern is that you are repeatedly creating DB connections but never releasing them (unless that is a built-in feature of the mysql package's query function that I'm not aware of).
You can increase the lambda timeout upto 15 minutes. But if you are calling the lambda through api gateway, the timeout is 29 seconds.
here is the code working for me.
const mysql = require('mysql');
const con = mysql.createConnection({
host: process.env.RDS_HOSTNAME,
user: process.env.RDS_USERNAME,
password: process.env.RDS_PASSWORD,
port: process.env.RDS_PORT,
connectionLimit: 10,
multipleStatements: true,// Prevent nested sql statements
debug: true
// ,database:'testdb1'
});
exports.handler = async (event) => {
try {
const data = await new Promise((resolve, reject) => {
con.connect(function (err) {
if (err) {
reject(err);
}
const sql = `INSERT INTO forms VALUES(777,2,3,4,5,6,7,8,9,10,11);`;
con.query(sql, function (err, result) {
if (err) {
console.log("Error->" + err);
reject(err);
}
resolve(result);
});
})
});
return {
statusCode: 200,
body: JSON.stringify(data)
}
} catch (err) {
return {
statusCode: 400,
body: err.message
}
}
};
reference: aws lambda with rds mysql DDL command not working
I am using NodeJS SDK. In the basic sample that follows I am opening a bucket to insert a single record. I have put each method in a promise to force them run one after another (sequentially) so I can measure each method’s running time.
My OS: Ubuntu 16.04
'use strict';
const couchbase = require('couchbase');
const cluster = new couchbase.Cluster('couchbase://localhost');
const uuid = require('uuid/v4');
console.time('auth');
cluster.authenticate('administrator', 'adminadmin');
console.timeEnd('auth');
function open() {
return new Promise((resolve, reject) => {
console.time('open');
let bucket = cluster.openBucket('test', function (err) {
if (err) {
console.error(err);
reject(err);
}
resolve(bucket);
});
});
}
function insert(bucket, obj) {
return new Promise((resolve, reject) => {
console.time('upsert');
bucket.upsert(`uuid::${blog.name}`, blog, function (err, result) {
if (err) {
console.log(err);
reject(err);
}
resolve(bucket);
});
});
}
function dc(bucket) { // disconnect
return new Promise((resolve, reject) => {
console.time('dc');
bucket.disconnect();
resolve('ok');
});
}
// data to insert
let blog = {
id: uuid(),
name: 'Blog A',
posts: [
{
id: uuid(),
title: 'Post 1',
content: 'lorem ipsum'
}
]
};
open().then((bucket) => {
console.timeEnd('open');
insert(bucket, blog).then((bucket) => {
console.timeEnd('upsert');
dc(bucket).then((res) => {
console.timeEnd('dc');
console.log(res);
});
});
});
The output is:
auth: 0.237ms
open: 58117.771ms <--- this shows the problem
upsert: 57.006ms
dc: 0.149ms
ok
I ran sdk-doctor. It gave me two lines worth mentioning:
“WARN: Your connection string specifies only a single host. You should consider adding additional static nodes from your cluster to this list to improve your applications fault-tolerance”
“INFO: Failed to retreive cluster information (status code: 401)”
and the summary is:
Summary:
[WARN] Your connection string specifies only a single host. You should consider adding additional static nodes from your cluster to this list to improve your applications fault-tolerance
Would anyone please help?
According to this answer in the Couchbase forum, it seemed that my DNS servers were not configured properly.
It looks as though your DNS servers may be configured improperly. As part of the normal bootstrap procedure, we attempt to resolve SRV records for the hostname that is provided, it looks like you’re DNS servers may be timing out when trying to do this, causing a substantial delay when connecting. A quick way to test this theory is to add an additional hostname to your bootstrap list to disqualify the connection string from our DNS-SRV policy (for instance, use: couchbase://localhost,invalidhostname).
I want to use use tedious in my Azure web app to follow this tutorial https://learn.microsoft.com/en-us/azure/sql-database/sql-database-connect-query-nodejs I get the error "Uncaught Error: Module name "tedious" has not been loaded yet" with require('tedious').Connection. How do I load this module in Azure?
The javascript code:
var Connection = require('tedious').Connection;
var Request = require('tedious').Request;
// Create connection to database
var config = {
userName: '******', // update me
password: '*****', // update me
server: '*******', // update me
options: {
database: 'signals' //update me
}
}
var connection = new Connection(config);
// Attempt to connect and execute queries if connection goes through
connection.on('connect', function(err) {
if (err) {
console.log(err)
}
else{
queryDatabase()
}
});
function queryDatabase(){
console.log("test");
console.log("test");
console.log('Reading rows from the Table...');
// Read all rows from table
request = new Request(
"SELECT * FROM signals",
function(err, rowCount, rows) {
console.log(rowCount + ' row(s) returned');
}
);
request.on('row', function(columns) {
columns.forEach(function(column) {
console.log("%s\t%s", column.metadata.colName, column.value);
});
});
connection.execSql(request);
}
How do I load this module in Azure?
In Azure, you can install Node.js module through Kudu Debug Console which could be accessed via https://<your-web-app-name>.scm.azurewebsites.net/DebugConsole
cd to D:\home\site\wwwroot in the console.
run the following command inside the wwwroot directory: npm install tedious