AWS Elasticsearch returns No living Connection - node.js

Helo Everyone, Im using AWS-Elasticsearch service and access it using Lambda. The When I try to connect elasticsearch from Lambda It throws an error
{ Error: No Living connections
at sendReqWithConnection (/var/task/node_modules/elasticsearch/src/lib/transport.js:226:15)
at next (/var/task/node_modules/elasticsearch/src/lib/connection_pool.js:214:7)
at /var/task/node_modules/async-listener/glue.js:188:31
at process._tickCallback (internal/process/next_tick.js:61:11)
message: 'No Living connections',
body: undefined,
status: undefined }
I'm using Nodejsto connect es domain
const elasticsearch = require('elasticsearch');
const httpAwsEs = require('http-aws-es');
const AWS = require('aws-sdk');
const esClient = new elasticsearch.Client({
host: 'endpointAddress',
connectionClass: httpAwsEs,
httpAuth: 'userName:Passwod',
amazonES: {
region: 'us-east-1',
credentials: new AWS.EnvironmentCredentials('AWS')
}
});
module.exports = esClient;
I've tested with on another account which was working fine, What would be the issue,
Thanks for Reading.

Related

Getting getaddrinfo ENOTFOUND when trying to connect to my AWS Neptune with node.js and gremlin

I am trying to connect to my Amazon Neptune instance from a API GW. I am using Node.js and Lambda
My YML looks like this
NeptuneDBCluster:
Type: "AWS::Neptune::DBCluster"
Outputs:
NeptuneEndpointAddress:
Description: Neptune DB endpoint address
Value: !GetAtt NeptuneDBCluster.Endpoint
Export:
Name: !Sub ${env:STAGE}-neptune-endpoint-address
My code looks like this
const gremlin = require('gremlin');
const {
NEPTUNE_ENDPOINT
} = process.env;
const { cardinality: { single } } = gremlin.process;
const DriverRemoteConnection = gremlin.driver.DriverRemoteConnection;
const Graph = gremlin.structure.Graph;
async function createUserNode(event, context, callback) {
const url = 'wss://" + NEPTUNE_ENDPOINT + ":8182/gremlin';
const dc = new DriverRemoteConnection(url);
const parsedBody = JSON.parse(event.body);
try {
const graph = new Graph();
const g = graph.traversal().withRemote(dc);
const vertex = await g.addV(parsedBody.type)
.property(single, 'name', parsedBody.name)
.property(single, 'username', parsedBody.username)
.property('age', parsedBody.age)
.property('purpose', parsedBody.purpose)
.next();
if (vertex.value) {
return callback(null, {
statusCode: 200,
body: vertex.value
});
}
} catch (error) {
console.error(error);
}
};
I keep getting the folowing error from Cloudwatch (I also tried creating a local js file and it gives the same error)
ERROR Error: getaddrinfo ENOTFOUND my-url
at GetAddrInfoReqWrap.onlookup [as oncomplete] (dns.js:66:26) {
errno: 'ENOTFOUND',
code: 'ENOTFOUND',
syscall: 'getaddrinfo',
hostname: 'my-url'
}
I also tried to write the endpoint without getting it from process.env and I am still facing the same issue. What am i missing?
Alright for anyone being as confused as I am when trying Neptune for first time. You need to create a database instance aswell, I thought the Serverless Framework would do this to me but now I know.

Error: read ECONNRESET for DynamoDB put request using DocumentClient

I am trying to create a new item in my DynamoDB table using the put function for DocumentClient, but am getting an error that references ECONNRESET. When others have referenced ECONNRESET on stack overflow, it seems that it might be a proxy issue for them. I am not sure how I would go about debugging this though.
Here are the docs I have been using:
https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/dynamodb-example-document-client.html
https://docs.amplify.aws/guides/functions/dynamodb-from-js-lambda/q/platform/js/
Here is the code
import AWS from 'aws-sdk';
AWS.config.update({region: 'us-east-1'})
const docClient = new AWS.DynamoDB.DocumentClient({apiVersion: '2012-08-10'});
export const createItem = async (tableName, item) => {
const params = {
TableName: tableName,
Item: item
};
console.log(params);
try {
await docClient.put(params).promise();
console.log("Success");
} catch (err) {
console.log(err);
}
}
and here is the error I get
Error: read ECONNRESET
at TLSWrap.onStreamRead (internal/stream_base_commons.js:209:20) {
errno: -4077
code: 'TimeoutError',
syscall: 'read',
time: 2021-09-25T12:30:23.577z,
region: 'us-east-1',
hostname: 'dynamodb.us-east-1.amazonaws.com',
retryable: true
}
Screenshot of code and terminal:
https://i.stack.imgur.com/f4JvP.png
Somebody helped me out. I was using a company CLI via a proxy to do manual local testing. I had to use this command in the CLI pc login aws --shared-credentials which is pretty specific to where I work.
I also had to include this code:
const proxy = require('proxy-agent');
AWS.config.update({
httpOptions: {
agent: proxy(process.env.HTTP_PROXY)
}
});

How do i connect to aws elastic search via javascript client in react js?

I have a react js with typescript project and code below. here are the steps i followed. I ran following commands to install and then ran the code. it always times out. the keys that i'm using belong to the aws user who has full access to the ES domain. also, i have found it being used , this is someone else's code , as such (right below)
var AWS = require('aws-sdk');
var connectionClass = require('http-aws-es');
var elasticsearch = require('elasticsearch');
var elasticClient = new elasticsearch.Client({
host: ***,
log: 'error',
connectionClass: connectionClass,
**amazonES:** {
credentials: new AWS.EnvironmentCredentials('AWS')
}
});
it defines amazonES (in bold above). when i try to use as above , amazonES is not even defined for the elasticsearch client , is that in the previous version. ??? Below is mycode.
npm install elasticsearch7.x ( this downloaded the latest version elasticsearch.js 16.7.1 -> https://www.npmjs.com/package/elasticsearch)
npm install http-aws-es
npm install aws-sdk
code is
import es from 'elasticsearch';
import connectionClass from 'http-aws-es';
import AWS from 'aws-sdk';
let client = elasticsearch.Client({
hosts: ['host'],
connectionClass: require('http-aws-es'),
awsConfig: new AWS.Config({
accessKeyId: 'AKID', secretAccessKey: 'SECRET', region: 'us-west-2'
})
});
client.ping({
requestTimeout: 5000
}, function (error) {
if (error) {
console.log(error);
} else {
console.log('success');
}
});

AWS Credentials error: could not load credentials from any providers. ElasticSearch Service node.js

I was having a problem that I think should be posted on the internet. I may not know the internal issue, but I think I have a solution. Anyway the problem:
I'm hosting an ElasticSearch Service on AWS, and I'm trying to access that service locally and or through my ec2 service hosted on AWS.
But when I try to locally I get this error: Request Timeout after 30000ms
When I try it on my ec2 I get this error: AWS Credentials error: Could not load credentials from any providers
Here was how I set up the credentials and made the query:
const AWS = require('aws-sdk');
const connectionClass = require('http-aws-es');
const elasticsearch = require('elasticsearch');
try {
var elasticClient = new elasticsearch.Client({
host: "https://some-elastic.us-east-1.es.amazonaws.com/",
log: 'error',
connectionClass: connectionClass,
amazonES: {
region: 'us-east-1',
credentials: new AWS.Credentials('id', 'key')
}
});
elasticClient.indices.delete({
index: 'foo',
}).then(function (resp) {
console.log("Successful query!");
console.log(JSON.stringify(resp, null, 4));
}, function (err) {
console.trace(err.message);
});
} catch (err) {
console.log(err);
} finally {
}
So as stated I kept getting this error. I tried many other variations to pass the credentials.
My vague understanding of the problem is that the credentials being set in the amazonES object are being ignored, or that the region isn't being passed along with the credentials. So AWS doesn't know where to search for the credentials.
Anyway here is the solution:
AWS.config.update({
secretAccessKey: 'key',
accessKeyId: 'id',
region: 'your region ex. us-east-1'
});
var elasticClient = new elasticsearch.Client({
host: "https://some-elastic.us-east-1.es.amazonaws.com/",
log: 'error',
connectionClass: connectionClass,
amazonES: {
credentials: new AWS.EnvironmentCredentials('AWS'),
}
});
It's a bit of a buggy situation. I couldn't find this solution anywhere online and I hope it helps someone out who runs into the same errors in the future.

How to access AWS Elasticsearch from Node JS

I have my nodejs lambda trying to index data into ES, but it's failing because of this error:
2018-01-15T23:39:09.578Z (node:1) UnhandledPromiseRejectionWarning: Unhandled promise rejection (rejection id: 131): Authorization Exception ::
User: anonymous is not authorized to perform: es:ESHttpPost on resource: ****
I have applied AmazonESFullAccess policy to the Lambda function role.
What am I missing?
I was able to resolve this issue. Code was missing 'connectionClass' and 'amazonES' parameters.
var AWS = require('aws-sdk');
var connectionClass = require('http-aws-es');
var elasticsearch = require('elasticsearch');
var elasticClient = new elasticsearch.Client({
host: ***,
log: 'error',
connectionClass: connectionClass,
amazonES: {
credentials: new AWS.EnvironmentCredentials('AWS')
}
});

Resources