Missing region in AWS rekognition in node js - node.js

//Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
//PDX-License-Identifier: MIT-0 (For details, see https://github.com/awsdocs/amazon-rekognition-developer-guide/blob/master/LICENSE-SAMPLECODE.)
const AWS = require('aws-sdk')
const bucket = 'bucket' // the bucketname without s3://
const photo_source = 'source.jpg'
const photo_target = 'target.jpg'
const config = new AWS.Config({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
region: process.env.AWS_REGION
})
const client = new AWS.Rekognition();
const params = {
SourceImage: {
S3Object: {
Bucket: bucket,
Name: photo_source
},
},
TargetImage: {
S3Object: {
Bucket: bucket,
Name: photo_target
},
},
SimilarityThreshold: 70
}
client.compareFaces(params, function(err, response) {
if (err) {
console.log(err, err.stack); // an error occurred
} else {
response.FaceMatches.forEach(data => {
let position = data.Face.BoundingBox
let similarity = data.Similarity
console.log(`The face at: ${position.Left}, ${position.Top} matches with ${similarity} % confidence`)
}) // for response.faceDetails
} // if
});
Above code is from official aws webpage (https://docs.aws.amazon.com/rekognition/latest/dg/faces-comparefaces.html). The code is for implementing comparing faces in image using node js. When comparing images using the above code a error occurs which says missing region on config. Then when i checked the code eventhough config object is created in this code but it is not being used anywhere. Can someone tell me where i must use the config object. If the error is appearing due to any other reason please tell me the reason

Related

How to get the DisconnectTimestamp from Amazon Connect call in NodeJS

My call recordings are being pushed at S3 and stored with contactId_timestamp.wav as filename.
For now i can get/download the files by specifically providing the file name as key, now i wanted to create the filename by myself as contactId + disconnecttimestamp i can get ge contactId through getContactId() but how to get the disconnecttimestamp?
My goal is same what we are experiencing in Contact Flow Search the recordings can be played with respect to contactId.
Here is how i am downloading the recordings from S3.
require("dotenv").config();
const expres = require("express");
const app = expres();
app.listen(3001);
const aws = require("aws-sdk");
aws.config.update({
secretAccessKey: process.env.ACCESS_SECRET,
accessKeyId: process.env.ACCESS_KEY,
region: process.env.REGION
})
const BUCKET = process.env.BUCKET
const s3 = new aws.S3(secretAccessKey = process.env.ACCESS_SECRET, accessKeyId = process.env.ACCESS_KEY);
app.get("/download/filename", async(req, res)=>{
const filename = req.params.filename
let x = await s3.getObject({Bucket:BUCKET, Key:filename}).promise();
res.send(x.Body);
})
And Than hitting the http://localhost:3001/download/0989c085-16d1-478b-8858-1ccddb2990f4_20220303T16:46_UTC.wav
If you have the ContactID for the call you can use describeContact to get the contact info which includes the DisconnectTimestamp.
Something along these lines should work.
const AWS = require('aws-sdk');
aws.config.update({
secretAccessKey: process.env.ACCESS_SECRET,
accessKeyId: process.env.ACCESS_KEY,
region: process.env.REGION
})
const connect = new AWS.Connect({ region: process.env.REGION });
var params = {
ContactId: 'STRING_VALUE', /* required */
InstanceId: 'STRING_VALUE' /* required - the connect instance ID */
};
connect.describeContact(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else var DisconnectTimestamp = data.Contact.DisconnectTimestamp); // successful response
});
more info here https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Connect.html#describeContact-property

Download entire S3 bucket recursively, node wrapper

I am trying to sync my local folder with files from s3 bucket(full bucket dir structure). I tried using node-s3-client npm package but the connection to the package is failing. The process just exits without any output.
here are the code snippets:
const s3Client = require('s3');
let client = s3Client.createClient({
s3Options: {
accessKeyId: config.accessKeyId,
secretAccessKey: config.secretAccessKey,
region: config.region,
},
});
let params = {
localDir: localdirName,
deleteRemoved: true,
s3Params: {
Bucket: Bname,
},
};
let uploader = client.downloadDir(params);
uploader.on('error', (err) => {
throw err;
});
uploader.on('progress', () =>
console.log('progress: ', uploader.progressAmount, uploader.progressTotal),
);
uploader.on('end', () => console.log('Upload completed!'));
If this can't be resolved, please help me through a workaround, Thanks!

AWS textract methods in node js are not getting invoked

I want to extract text from image using node js so created a lambda in aws. Please find the below code snippet. Issue is that the textract method detectDocumentText is not getting invoked.
As far as permission I had given s3 full access and textract full access to the lambda. Am I missing anything?
var AWS = require("aws-sdk");
var base64 = require("base-64");
var fs = require("fs");
exports.handler = async (event, context, callback) => {
// Input for textract can be byte array or S3 object
AWS.config.region = "us-east-1";
//AWS.config.update({ region: 'us-east-1' });
var textract = new AWS.Textract({ apiVersion: "2018-06-27" });
//var textract = new AWS.Textract();
console.log(textract);
var params = {
Document: {
/* required */
//'Bytes': imageBase64
S3Object: {
Bucket: "717577",
Name: "Picture2.png"
}
}
};
textract.detectDocumentText(params, function(err, data) {
if (err) {
console.log(err); // an error occurred
} else {
console.log(data); // successful response
callback(null, data);
}
});
};
As well as I don't see any error logs in cloudwatch logs.
The problem is that you have marked your method as async which means that you are returning a promise. In your case you are not returning a promise so for lambda there is no way to complete the execution of the method. You have two choices here
Remove async
Or more recommended way is to convert your callback style to use promise. aws-sdk support .promise method on all methods so you could leverage that. The code will look like this
var AWS = require("aws-sdk");
var base64 = require("base-64");
var fs = require("fs");
exports.handler = async (event, context) => {
// Input for textract can be byte array or S3 object
AWS.config.region = "us-east-1";
//AWS.config.update({ region: 'us-east-1' });
var textract = new AWS.Textract({ apiVersion: "2018-06-27" });
//var textract = new AWS.Textract();
console.log(textract);
var params = {
Document: {
/* required */
//'Bytes': imageBase64
S3Object: {
Bucket: "717577",
Name: "Picture2.png"
}
}
};
const data = await textract.detectDocumentText(params).promise();
return data;
};
Hope this helps.

Error: Unable to create a POST object policy without a bucket, region, and credentials

I am trying to generate a PresignedPost URL using AWS javascript sdk,
I am getting the following error -
Error: Unable to create a POST object policy without a bucket, region, and credentials
I have AWS credentials configured on my local with S3 access,
In the code am setting the region while creating the S3 Client,
While passing the bucket name as a param to the client method.
Following is the code snippet -
const AWS = require('aws-sdk');
let util = require('util');
let s3Client = new AWS.S3({
region: 'us-east-1'
});
let postSignedUrl = async () => {
try {
let postSigningParams = {
Expires: 60,
Bucket: "some-bucket-name,
Conditions: [["content-length-range", 100, 10000000]],
Fields: {
key: 'test/image.jpg'
}
}
let s3createPresignedPost = util.promisify(s3Client.createPresignedPost).bind(s3Client);
let postSignedUrl = await s3createPresignedPost('putObject',postSigningParams);
console.log('postSigningParams => ', postSignedUrl);
} catch (error) {
console.error(error);
}
}
postSignedUrl();
Error stack trace -
Error: Unable to create a POST object policy without a bucket, region, and credentials
at features.constructor.preparePostFields (/Users/aniruddhanarendraraje/Documents/work/pocs/node-crud-app/snippets/node_modules/aws-sdk/lib/servi
ces/s3.js:943:13)
at finalizePost (/Users/aniruddhanarendraraje/Documents/work/pocs/node-crud-app/snippets/node_modules/aws-sdk/lib/services/s3.js:906:22)
at /Users/aniruddhanarendraraje/Documents/work/pocs/node-crud-app/snippets/node_modules/aws-sdk/lib/services/s3.js:923:24
at finish (/Users/aniruddhanarendraraje/Documents/work/pocs/node-crud-app/snippets/node_modules/aws-sdk/lib/config.js:349:7)
at /Users/aniruddhanarendraraje/Documents/work/pocs/node-crud-app/snippets/node_modules/aws-sdk/lib/config.js:367:9
at SharedIniFileCredentials.get (/Users/aniruddhanarendraraje/Documents/work/pocs/node-crud-app/snippets/node_modules/aws-sdk/lib/credentials.js:
127:7)
at getAsyncCredentials (/Users/aniruddhanarendraraje/Documents/work/pocs/node-crud-app/snippets/node_modules/aws-sdk/lib/config.js:361:24)
at Config.getCredentials (/Users/aniruddhanarendraraje/Documents/work/pocs/node-crud-app/snippets/node_modules/aws-sdk/lib/config.js:381:9)
at features.constructor.createPresignedPost (/Users/aniruddhanarendraraje/Documents/work/pocs/node-crud-app/snippets/node_modules/aws-sdk/lib/ser
vices/s3.js:918:14)
at Promise (internal/util.js:274:30)
I tried the following, but didn't work -
AWS.config.update({region:'us-east-1'});
let s3Client = new AWS.S3();
A similar AWS S3 sdk method works -
let presignedUrl = async () => {
let signingParams = {
Bucket: 'some-bucket-name',
Key: 'test/image.jpg',
Expires: 60
}
let s3GetSignedUrl = util.promisify(s3Client.getSignedUrl).bind(s3Client);
let signedUrl = await s3GetSignedUrl('putObject',signingParams);
console.log('signedUrl => ', signedUrl);
}
Did a stupid mistake,
was passing in putObject as param in createPresignedPost method call by mistake
let postSignedUrl = await s3createPresignedPost('putObject',postSigningParams);
should be just -
let postSignedUrl = await s3createPresignedPost(postSigningParams);

nodejs aws s3 replace files

Im trying to upload a folder from a local directory to an AWS S3 Bucket.
I have the following code.
var s3 = require('s3');
var awsS3Client = new AWS.S3({
accessKeyId: 'XXXXXXX',
secretAccessKey: 'XXXXXXX'
});
var options = {
s3Client: awsS3Client
};
var client = s3.createClient(options);
var params = {
localDir: "./zips",
deleteRemoved: true, // default false, whether to remove s3 objects
// that have no corresponding local file.
s3Params: {
Bucket: "node-files",
Prefix: "test/unzip/"
},
};
var uploader = client.uploadDir(params);
uploader.on('error', function (err) {
console.error("unable to sync:", err.stack);
});
uploader.on('progress', function () {
console.log("progress", uploader.progressAmount, uploader.progressTotal);
});
uploader.on('end', function () {
console.log("done uploading");
});
All works fine when uploading for the first time, the directory and all of its files are in tact and in the bucket.
However when i try a second time, the buffer just gets stuck and times out.
Im assuming i either need to set some kind of option to overwrite the existing files?

Resources