How to fix this is not a function nodejs - node.js

I am following this article from medium https://blog.bitsrc.io/serverless-backend-using-aws-lambda-hands-on-guide-31806ceb735e
Everything works except when I attempt to add a record to the DynamoDB I get an error that say "this is not a function"
const AWS = require ("aws-sdk");
const client = new AWS.DynamoDB.DocumentClient();
const uuid = require ("uuid");
module.exports.myHero = async (event) => {
const data = JSON.parse(event.body);
const params = {
TableName: "myHeros",
Item: {
id: uuid(),
name: data.name,
checked: false
}
};
await client.put(params).promise();
return {
statusCode: 200,
body: JSON.stringify(data)
};
};
{
"errorMessage": "client.put(...).promise is not a function",
"errorType": "TypeError",
"stackTrace": [
"module.exports.myHero (/var/task/create.js:30:27)"
]
}

In almost all cases, when you call a method xyz() on an AWS client object and it fails with ‘xyz is not a function’, the problem is that you are using an old version of an SDK that does not actually support that method.
Upgrading to the latest AWS SDK version will fix this problem.

When initializing dynamodb client new AWS.DynamoDB.DocumentClient()' please pass options (at-least region parameter) to DocumentClient function.

Related

Google Cloud Vision - How to Send Request Properties with Node.js

I'm using Google Cloud Vision to detect text on an image. This works about 80% of the time. The other 20%, I get this error:
Error: 3 INVALID_ARGUMENT: Request must specify image and features.
at Object.callErrorFromStatus (C:\Users\emily\workspace\bot\node_modules\#grpc\grpc-js\build\src\call.js:31:26)
at Object.onReceiveStatus (C:\Users\emily\workspace\bot\node_modules\#grpc\grpc-js\build\src\client.js:180:52)
at Object.onReceiveStatus (C:\Users\emily\workspace\bot\node_modules\#grpc\grpc-js\build\src\client-interceptors.js:336:141)
at Object.onReceiveStatus (C:\Users\emily\workspace\bot\node_modules\#grpc\grpc-js\build\src\client-interceptors.js:299:181)
at C:\Users\emily\workspace\bot\node_modules\#grpc\grpc-js\build\src\call-stream.js:160:78
at processTicksAndRejections (node:internal/process/task_queues:78:11) {
code: 3,
details: 'Request must specify image and features.',
metadata: Metadata { internalRepr: Map(0) {}, options: {} },
note: 'Exception occurred in retry method that was not classified as transient'
When I googled this issue, it seems I need to send specific headers with my request to resolve this, basically like as specified here: https://cloud.google.com/vision/docs/ocr#specify_the_language_optional
However, I have no idea how to send these request parameters with the Node.js code I'm using and I can't find any examples anywhere. Can someone please help me figure out how to use this? My current code is this:
// Performs text detection on the image file using GCV
(async () => {
await Jimp.read(attachment.url).then(image => {
return image
.invert()
.contrast(0.5)
.brightness(-0.25)
.write('temp.png');
});
const [result] = await googleapis.textDetection('temp.png');
const fullImageResults = result.textAnnotations;
Thanks!
If you are using Node.js with Vision API you can refer to this sample quickstart code for using Node.js Client Library in Vision API for TEXT_DETECTION.
For the error that you are facing, you can refer to the below code to add request parameters:
index.js :
async function quickstart() {
const vision = require('#google-cloud/vision');
const client = new vision.ImageAnnotatorClient();
const request = {
"requests": [
{
"image": {
"source": {
"imageUri": "gs://bucket1/download.png"
}
},
"features": [
{
"type": "TEXT_DETECTION"
}
],
"imageContext": {
"languageHints": ["en"]
}
}
]
};
const [result] = await client.batchAnnotateImages(request);
const detections = result.responses[0].fullTextAnnotation;
console.log(detections.text);
}
quickstart().catch(console.error);
Here in the above code I have stored the image in GCS and used the path of that image in my code.
Image :
Output :
It was the best of
times, it was the worst
of times, it was the age
of wisdom, it was the
age of foolishness...
If you want to use the image file stored in the local system you can refer to the below code.
Since your file is in the local system, first you need to convert it to a base64 encoded string format and pass the same in the request parameters in your code.
index.js :
async function quickstart() {
const vision = require('#google-cloud/vision');
const client = new vision.ImageAnnotatorClient();
const request ={
"requests":[
{
"image":{
"content":"/9j/7QBEUGhvdG9...image contents...eYxxxzj/Coa6Bax//Z"
},
"features": [
{
"type":"TEXT_DETECTION"
}
],
"imageContext": {
"languageHints": ["en"]
}
}
]
};
const [result] = await client.batchAnnotateImages(request);
const detections = result.responses[0].fullTextAnnotation;
console.log(detections.text);
}
quickstart();

Error trying to set auto back up Firestore, cloud function

I am following this tutorial here: Tutorial
everything seems ok and it allows me to do everything in the tutorial, but when I run the function I get this error.
textPayload: "TypeError: Cannot read property 'charCodeAt' of undefined
at peg$parsetemplate (/workspace/node_modules/google-gax/build/src/pathTemplateParser.js:304:17)
at Object.peg$parse [as parse] (/workspace/node_modules/google-gax/build/src/pathTemplateParser.js:633:18)
at new PathTemplate (/workspace/node_modules/google-gax/build/src/pathTemplate.js:55:54)
at segments.forEach.segment (/workspace/node_modules/google-gax/build/src/pathTemplate.js:120:29)
at Array.forEach (<anonymous>)
at PathTemplate.render (/workspace/node_modules/google-gax/build/src/pathTemplate.js:114:23)
at FirestoreAdminClient.databasePath (/workspace/node_modules/#google-cloud/firestore/build/src/v1/firestore_admin_client.js:904:57)
at exports.scheduledFirestoreExport (/workspace/index.js:13:31)
at Promise.resolve.then (/layers/google.nodejs.functions-framework/functions-framework/node_modules/#google-cloud/functions-framework/build/src/invoker.js:330:28)
at process._tickCallback (internal/process/next_tick.js:68:7)
insertId: "000000-8410c5c7-8304-42b6-b2b6-dd55a54e8cab"
resource: {2}
timestamp: "2020-07-11T18:14:35.981Z"
severity: "ERROR"
labels: {1}
logName: "projects/b-b-b-app/logs/cloudfunctions.googleapis.com%2Fcloud-functions"
trace: "projects/b-b-b-app/traces/d7c07a715d0106225d9963ce2a046489"
receiveTimestamp: "2020-07-11T18:14:44.813410062Z"
}
I can't see what the problem may be.
I changed the buckets and the app ids like asked in the tutorial.
I am on a Blaze plan and can export the database to the bucket manually by using shell command and using
gcloud firestore export gs://bbbdata-backup
I am using the GCP console on the firebase site and using this code.
const firestore = require('#google-cloud/firestore');
const client = new firestore.v1.FirestoreAdminClient();
const bucket = 'gs://bbbdata-backup'
exports.scheduledFirestoreExport = (event, context) => {
const databaseName = client.databasePath(
process.env.GCLOUD_PROJECT,
'(default)'
);
return client
.exportDocuments({
name: databaseName,
outputUriPrefix: bucket,
// Leave collectionIds empty to export all collections
// or define a list of collection IDs:
// collectionIds: ['users', 'posts']
collectionIds: [],
})
.then(responses => {
const response = responses[0];
console.log(`Operation Name: ${response['name']}`);
return response;
})
.catch(err => {
console.error(err);
});
};
Following the tutorial referred by the OP I run into precisely the same error. Runtime used: Node.js 14.
Root cause of the issue: value of process.env.GCLOUD_PROJECT is undefined.
Workaround: Go to GCP console -> Home. Note your Project ID. Replace process.env.GCLOUD_PROJECT with the 'Project ID' string. The Cloud Function will then work as expected
Note: it appears to be a known issue that GCLOUD_PROJECT environment variable was missing in the Node.js 10 runtime. This bug report contains a lot of additional pointers: https://github.com/firebase/firebase-functions/issues/437
I had a similar issue last year, probably you are missing some permission, I would do it this way, hope this works for you:
import * as functions from 'firebase-functions'
import { auth } from 'google-auth-library'
export const generateBackup = async () => {
const client = await auth.getClient({
scopes: [
'https://www.googleapis.com/auth/datastore',
'https://www.googleapis.com/auth/cloud-platform'
]
})
const path = `YOUR_FOLDER_NAME_FOR_THE_BACKUP`
const BUCKET_NAME = `YOUR_BUCKET_NAME_HERE`
const projectId = await auth.getProjectId()
const url = `https://firestore.googleapis.com/v1beta1/projects/${projectId}/databases/(default):exportDocuments`
const backup_route = `gs://${BUCKET_NAME}/${path}`
return client.request({
url,
method: 'POST',
data: {
outputUriPrefix: backup_route,
// collectionsIds: [] // if you want to specify which collections to export, none means all
}
})
.catch(async (e) => {
return Promise.reject({ message: e.message })
})
}
You can then decide that is your trigger for this function and execute it accordingly.
Note: Go to the IAM section of your project and find the App Engine service account, you will need to add the role Cloud Datastore Import Export Admin, otherwise, It will fail.
You can read more about it here It's very detailed.
Cheers.

Timeout error awaiting promise in Lambda?

I am testing a Serverless lambda function and get a timeout error which I believe is due to an await promise().
module.exports.create = async (event) => {
const provider = event.requestContext.identity.cognitoAuthenticationProvider
....//stuff here where I split auth token to get ids...
const cognito = new AWS.CognitoIdentityServiceProvider({
apiVersion: "2016-04-18"
});
const getUserParams = {
UserPoolId: userPoolId,
Username: userPoolUserId
};
const data =JSON.parse(event.body)
const getUser = await cognito.adminGetUser(getUserParams).promise()
const params = {
Item:{
userId: event.requestContext.identity.cognitoIdentityId,
email: getUser, //!timeout issue probably here!
content: data
}
};
try {
const { Listing } = await connectToDatabase()
const listing = await Listing.create({userId: params.Item.userId, email: params.Item.email
In researching a solution, I have come across people splitting up the lambda into two functions so that they collectively pass the timeout. I do not know how to reference a lambda within a lambda, nor am I sure this is the correct approach.
You change the timeout for lambda function
default timeout for lambda function is 3 sec you can override in below the function code basic settings
For anyone googling this: turns out adminGetUser needs a NAT Gateway configured in order for it to be able to retrieve data from Cognito. I was getting a timeout error because it was not executing, period. Read here: https://aws.amazon.com/premiumsupport/knowledge-center/internet-access-lambda-function/.

Can I run Cognito in a Lambda function?

I want to sign up users with Cognito in a Lambda function. However I am receiving "TypeError: fetch is not a function"
My code is basically step 3 in this. However I keep getting the above-mentioned error, even though I have node-fetch installed. From what I understand, the Cognito SDK makes use of fetch. So is this simply not possible? Will I need to spin up a Node server?
const AWS = require("aws-sdk");
const AmazonCognitoIdentity = require("amazon-cognito-identity-js");
//Configuring pool data of Cognito Identity Pool
const poolData = {
UserPoolId: "us-east-2_aCvZbFzeS",
ClientId: "4nv2krchr77pbrq3cpk0q0kknu"
};
const userPool = new AmazonCognitoIdentity.CognitoUserPool(poolData);
AWS.config.region = "us-east-2";
const attributeList = [];
attributeList.push(
new AmazonCognitoIdentity.CognitoUserAttribute({
Name: "email",
Value: "sampleEmail#gmail.com"
})
);
userPool.signUp(
"sampleEmail#gmail.com",
"SamplePassword123",
attributeList,
null,
function(err, result) {
if (err) {
console.log(err);
return;
}
const cognitoUser = result.user;
console.log("user name is " + cognitoUser.getUsername());
}
);
const data = JSON.parse(event.body);
const headers = {
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Credentials": true
};
const response = {
statusCode: 200,
headers: headers,
"Content-Type": "application/json",
body: JSON.stringify(data.age)
};
callback(null, response);
};
//I keep receiving this error when attempting to hit the endpoint with Postman:
"errorMessage": "Uncaught error in your 'hello' handler",
"errorType": "TypeError",
"stackTrace": [
"TypeError: fetch is not a function"
You can definitely use Cognito from Lambda! Source: have done it.
You may not be able to use the AWS Cognito JS SDK from Lambda nicely, though.
The AWS Cognito JS SDK appears to be designed for client-side applications, where fetch is a built-in. You have installed node-fetch, but the SDK is not loading it because it doesn't think it needs to, because it is expecting it to be built-in.
I see two options:
If you aren't particularly attached to JS, you could use another language where you are confident that the library is designed and tested for server-side applications.
If you are attached to JS or have a large sunk cost, you could hack up the AWS Cognito JS SDK locally before deploying the code to Lambda to make it require node-fetch or otherwise make it functional server-side.
This thread has a good description of the same issue and some workarounds; probably the best one for you is:
global.fetch = require('node-fetch')
const AmazonCognitoIdentity = require('amazon-cognito-identity-js');
in your script, which should make it appear as a built-in to the SDK's code without hacking up the internals.

How to make calls to elasticsearch apis through NodeJS?

I have been tasked with making a POST api call to elastic search api,
https://search-test-search-fqa4l6ubylznt7is4d5yxlmbxy.us-west-2.es.amazonaws.com/klove-ddb/recipe/_search
I don't have any previous experience with making api calls to AWS services.
So, I tried this -
axios.post('https://search-test-search-fqa4l6ubylznt7is4d5yxlmbxy.us-west-2.es.amazonaws.com/klove-ddb/recipe/_search')
.then(res => res.data)
.then(res => console.log(res));
But I was getting {"Message":"User: anonymous is not authorized to perform: es:ESHttpPost"}
I also checked out with some IAM roles and added AWSESFullAccess policies to my profile.
Still I can't make anything work out.
Please help me.
The reason your seeing the error User: anonymous is not authorized to perform: es:ESHttpPost is because you're making requesting data without letting ElasticSearch know who you are - this is why it says 'Anonymous'.
There are a couple ways of authentication, the easiest being using the elasticsearch library. With this library you'll give the library a set of credentials (access key, secret key) to the IAM role / user. It will use this to create signed requests. Signed requests will let AWS know who's actually making the request, so it won't be received as anonymous, but rather, yourself.
Another way of getting this to work is to adjust your access policy to be IP-based:
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Principal": {
"AWS": "*"
},
"Action": "es:*",
"Condition": {
"IpAddress": {
"aws:SourceIp": [
"AAA.BBB.CCC.DDD"
]
}
},
"Resource": "YOUR_ELASTICSEARCH_CLUSTER_ARN"
}
]
}
This particular policy will be wide open for anyone with the ip(range) that you provide here. It will spare you the hassle of having to go through signing your requests though.
A library that helps setting up elasticsearch-js with AWS ES is this one
A working example is the following:
const AWS = require('aws-sdk')
const elasticsearch = require('elasticsearch')
const awsHttpClient = require('http-aws-es')
let client = elasticsearch.Client({
host: '<YOUR_ES_CLUSTER_ID>.<YOUR_ES_REGION>.es.amazonaws.com',
connectionClass: awsHttpClient,
amazonES: {
region: '<YOUR_ES_REGION>',
credentials: new AWS.Credentials('<YOUR_ACCESS_KEY>', '<YOUR_SECRET_KEY>')
}
});
client.search({
index: 'twitter',
type: 'tweets',
body: {
query: {
match: {
body: 'elasticsearch'
}
}
}
})
.then(res => console.log(res));
The Elasticsearch npm package is going to be deprecated soon, use #elastic/elasticsearch and #acuris/aws-es-connection so you don't have to provide IAM Credentails to the function.
Here the code, I use:
'use strict';
const { Client } = require('#elastic/elasticsearch');
const { createAWSConnection, awsGetCredentials } = require('#acuris/aws-es-
connection');
module.exports.get_es_interests = async event => {
const awsCredentials = await awsGetCredentials();
const AWSConnection = createAWSConnection(awsCredentials);
const client = new Client({
...AWSConnection,
node: 'your-endpoint',
});
let bodyObj = {};
try {
bodyObj = JSON.parse(event.body);
} catch (jsonError) {
console.log('There was an error parsing the JSON Object', jsonError);
return {
statusCode: 400
};
}
let keyword = bodyObj.keyword;
const { body } = await client.search({
index: 'index-name',
body: {
query: {
match: {
name: {
query: keyword,
analyzer: "standard"
}
}
}
}
});
var result = body.hits.hits;
return result;
};
Now there's https://github.com/gosquared/aws-elasticsearch-js
Import them in
const AWS = require('aws-sdk');
const ElasticSearch = require('#elastic/elasticsearch');
const { createConnector } = require('aws-elasticsearch-js');
Configure client using named profile that can be found on ~/.aws/config. You can verify this by doing: cat ~/.aws/config which should output something like:
[profile work]
region=ap-southeast-2
[default]
region = ap-southeast-1
const esClient = new ElasticSearch.Client({
nodes: [
'<aws elastic search domain here>'
],
Connection: createConnector({
region: '<region>',
getCreds: callback =>
callback(
null,
new AWS.SharedIniFileCredentials({ profile: '<target profile>' })
)
})
});
Then you can start using it like:
// this query will delete all documents in an index
await esClient.delete_by_query({
index: '<your index here>',
body: {
query: {
match_all: {}
}
}
});
References:
https://github.com/gosquared/aws-elasticsearch-js
https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-delete-by-query.html
https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/SharedIniFileCredentials.html

Resources