How to return prematurely from lambda - node.js

I'm trying to trigger a lambda when I drop a new file in a bucket.
This code is working as in it's detecting the file and send the info to my API.
I'm also trying to ignore every file not name "text.txt" but I can't figure out how to return from the lambda inside that includes block
const http = require('http');
exports.handler = async (event, context) => {
return new Promise((resolve, reject) => {
const srcRegion = event.Records[0].awsRegion;
const srcEventTime = event.Records[0].eventTime;
const srcEventName = event.Records[0].eventName;
const srcIP = event.Records[0].requestParameters.sourceIPAddress;
const srcBucket = event.Records[0].s3.bucket.name;
const srcKey = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
const srcETag = event.Records[0].s3.object.eTag;
if (!srcKey.includes('text.txt')) {
resolve('Not original file');
}
const data = JSON.stringify({
lambda: {
src_bucket: srcBucket,
src_key: srcKey,
src_region: srcRegion,
src_event_time: srcEventTime,
src_event_name: srcEventName,
src_ip: srcIP,
src_etag: srcETag
}
});
const options = {
host: '*****************'
path: '*****************'
port: '*****************'
method: '*****************'
headers: '*****************'
};
const req = http.request(options, (res) => {
res.on('data', function(d) {
console.log(d);
});
});
req.on('error', (e) => {
// reject(e.message);
resolve('Error');
});
// send the request
req.write(data);
req.end();
resolve('Success');
});
};

Try this inside the includes block: context.done(undefined, 'Done.')

Related

How to download file from gitlab synchronously using NodeJS

I need to download a file from a private gitlab server and I need the method to be synchronous. This was by previous async code and it works fine because I was using promises. But I'm having trouble converting it to synchronous. The other posts i've seen on SO either ended up using async code or didn't have options for headers.
const https = require('https');
const fs = require('fs');
const gitlabUrl = 'https://gitlab.custom.private.com';
const gitlabAcessToken = 'xmyPrivateTokenx';
const gLfilePath = '/api/v4/projects/1234/repository/files/FolderOne%2Ftest.txt/raw?ref=main';
const gLfileName='test.txt';
function downloadFileFromGitlab(filePath, fileName) {
return new Promise((resolve, reject) => {
var options = {
path: filePath,
headers: {
'PRIVATE-TOKEN': gitlabAccessToken
}
};
var url = gitlabUrl
var file = fs.createWriteStream(fileName);
const request = https.get(url, options, (response) => {
response.pipe(file);
file.on('finish', () => {
file.close();
resolve();
});
file.on('error', (err) => {
file.close();
reject(err);
});
});
request.on('error', error => {
throw console.error(error);
});
});
}
downloadFileFromGitlab(gLfilePath,gLfileName);
I was able to figure it out using curl
function downloadFileFromGitlab(filePath, fileName) {
let curlCommand = "curl -s " + gitlabUrl + filePath + " -H 'PRIVATE-TOKEN:" + gitlabAccessToken +"'";
let file = child_process.execSync(curlCommand);
fse.writeFileSync(fileName,file);
}

How can I upload multiple images to an s3 bucket in a lambda function using node.js?

I am not very familiar with node and trying to upload an array of media objects to an s3 bucket using an AWS Lambda node function.
the payload has an album which is an array of key/data dictionaries. My code is as below but I'm certain this is wrong.
const awsServerlessExpress = require('aws-serverless-express');
const app = require('./app');
const server = awsServerlessExpress.createServer(app);
const AWS = require("aws-sdk");
const docClient = new AWS.DynamoDB.DocumentClient();
var s3 = new AWS.S3();
var s3Params = {
Bucket: 'bucketid',
ContentEncoding: 'base64',
ContentType: 'image/jpeg'
};
exports.handler = async (event, context) => {
console.log(event);
var body = JSON.parse(event.body);
if (typeof body.album !== 'undefined' && body.album) {
body.album.forEach(function (value) {
var data = body.album.mediaString;
let mediaData = new Buffer(data, 'base64');
var mediaKey = body.album.mediaKey;
try {
s3Params = {
Bucket: 'bucketID',
Key: mediaKey,
Body: mediaData
};
try {
const stored = await s3.upload(s3Params).promise();
console.log("stored successfully");
return { body: JSON.stringify(data) };
} catch (err) {
console.log("error storing");
console.log(err);
return { error: err };
}
} catch (err) {
return { error: err };
}
});
return { body: JSON.stringify(data) };
} else {
return { error: 'error'};
}
};
I have an error that s3 not found. Just wondering if I'm going about this all wrong.
When I only upload one image with the following code everything works fine:
const awsServerlessExpress = require('aws-serverless-express');
const app = require('./app');
const server = awsServerlessExpress.createServer(app);
const AWS = require("aws-sdk");
const docClient = new AWS.DynamoDB.DocumentClient();
var s3 = new AWS.S3();
var s3Params = {
Bucket: 'bucketID',
ContentEncoding: 'base64',
ContentType: 'image/jpeg'
};
exports.handler = async (event, context) => {
var body = JSON.parse(event.body);
var data = body.mediaString;
let mediaData = new Buffer(data, 'base64');
var mediaKey = body.mediaKey;
try {
s3Params = {
Bucket: 'bucketID',
Key: mediaKey,
Body: mediaData
};
try {
const stored = await s3.upload(s3Params).promise();
console.log("stored successfully");
return { body: JSON.stringify(data) };
} catch (err) {
console.log("error storing");
console.log(err);
return { error: err };
}
} catch (err) {
return { error: err };
}
};

AWS Lambda Custom Nodejs Container Shows Runtime Error

I have built a AWS Lambda function with custom container image. I am trying to convert an excel file to pdf with Libreoffice - getting the file from S3 and saving it to a file and converting it to pdf and then uploading it back to S3.
Here the code.
const fs = require('fs');
const getStream = require('get-stream');
const { Readable } = require('stream')
const { S3Client, GetObjectCommand, PutObjectCommand } = require("#aws-sdk/client-s3");
const libre = require('libreoffice-convert');
const path = require('path');
exports.handler = async (event) => {
const bucket = event.queryStringParameters.bucket;
const file = event.queryStringParameters.file;
const convertedFile = event.queryStringParameters.convertedFile;
if (event.queryStringParameters['warmup'] !== undefined) {
return {
result: true,
message: 'warmed up'
}
}
const client = new S3Client({ region: "ap-south-1" });
const command = new GetObjectCommand({ Bucket: bucket, Key: file });
const response = await client.send(command);
const objectData = response.Body;
const writeStream = fs.createWriteStream("/tmp/sample.xlsx");
objectData.pipe(writeStream);
var end = new Promise((resolve, reject) => {
objectData.on('close', resolve(true));
objectData.on('end', resolve(true));
objectData.on('error', reject(false));
});
let completed = await end;
if (completed) {
const extend = '.pdf'
const outputPath = `/tmp/sample${extend}`;
const enterPath = '/tmp/sample.xlsx';
var readingFile = new Promise((resolve, reject) => {
fs.readFile(enterPath, (err, data)=>{
if (err) {
reject(false);
}
resolve(data);
});
});
var fileData = await readingFile;
var converting = new Promise((resolve, reject) => {
libre.convert(fileData, extend, undefined, (err, done) => {
if (err) {
reject(false)
}
fs.writeFileSync(outputPath, done);
resolve(true)
});
})
var converted = await converting;
if (converted) {
var convertedFileStream = fs.createReadStream(outputPath);
const uploadCommand = new PutObjectCommand({ Bucket: bucket, Key: convertedFile, Body: convertedFileStream });
const lastResponse = await client.send(uploadCommand);
const returnResponse = {
result: true,
message: 'success',
bucket: event.queryStringParameters.bucket,
file: event.queryStringParameters.file,
convertedFile: event.queryStringParameters.convertedFile
};
if (event.queryStringParameters['returnEvent'] !== undefined) {
returnResponse['returnEvent'] = event;
}
return returnResponse;
}
}
return completed;
};
However, I am getting this error at time. Sometimes, it is success, but, sometimes it throws this error.
{
"errorType": "Error",
"errorMessage": "false",
"stack": [
"Error: false",
" at _homogeneousError (/function/node_modules/aws-lambda-ric/lib/Runtime/CallbackContext.js:56:16)",
" at postError (/function/node_modules/aws-lambda-ric/lib/Runtime/CallbackContext.js:72:34)",
" at done (/function/node_modules/aws-lambda-ric/lib/Runtime/CallbackContext.js:99:13)",
" at fail (/function/node_modules/aws-lambda-ric/lib/Runtime/CallbackContext.js:113:13)",
" at /function/node_modules/aws-lambda-ric/lib/Runtime/CallbackContext.js:148:24",
" at processTicksAndRejections (internal/process/task_queues.js:97:5)"
]
}
I dont know Nodejs on a great deal so I think if the code is not written the correct way. Any ideas what I am doing wrong here ?
Like #hoangdv when I logged errors I came to know that the file saving to the disk was not correct. So, I changed the area of the code where it saves to like this and then it worked.
const client = new S3Client({ region: "ap-south-1" });
const command = new GetObjectCommand({ Bucket: bucket, Key: file });
const { Body } = await client.send(command);
await new Promise((resolve, reject) => {
Body.pipe(fs.createWriteStream(filePath))
.on('error', err => reject(err))
.on('close', () => resolve())
})
const excelFile = fs.readFileSync(filePath);

Not going into AWS HttpClient.handleRequest to elasticsearch in lambda, Nodejs

I know this same question was basically asked and answered, however, trying to implement the answer did not get it to work. Here is the original question: AWS.HttpClient handleRequest is not working in AWS lambda
I tried putting async/await on multiple different portions of the request, but none of them worked as mentioned in one of the comments in the referred to link.
The situation is that I have a lambda function that listens for events in the S3 buckets, when an event happens it is supposed to index the documents in elasticsearch service. The issue happens when a the PUT request is sent to es.
I have done the test event with an S3 bucket and it WORKS, but for some reason it will hang/not go into the handleRequest function when I run an actual event to my S3 bucket.
Here is my code:
Index.js
const AWS = require('aws-sdk');
const s3 = new AWS.S3()
const elastic_client = require('elastic.js');
exports.handler = async (event, context) => {
const Bucket = event.Records[0].s3.bucket.name;
const Key = event.Records[0].s3.object.key;
const data = await s3.getObject({ Bucket, Key }).promise();
for (const quote_doc of data.Body) {
elastic_client.indexQuote(quote_doc);
}
}
elastic.js
var AWS = require('aws-sdk');
require('dotenv').config();
var region = process.env.AWS_REGION;
var domain = process.env.AWS_ELASTIC_DOMAIN;
function indexQuote(quote) {
var endpoint = new AWS.Endpoint(domain);
var request = new AWS.HttpRequest(endpoint, region);
var index = 'quotes';
var type = '_doc';
var id = quote.QuoteId;
request.method = 'PUT';
request.path += index + '/' + type + '/' + id;
request.body = JSON.stringify(quote);
request.headers['host'] = domain;
request.headers['Content-Type'] = 'application/json';
request.headers['Content-Length'] = Buffer.byteLength(request.body);
var credentials = new AWS.EnvironmentCredentials('AWS');
credentials.accessKeyId = process.env.AWS_ACCESS_KEY_ID;
credentials.secretAccessKey = process.env.AWS_SECRET_ACCESS_KEY;
var signer = new AWS.Signers.V4(request, 'es');
signer.addAuthorization(credentials, new Date());
var client = new AWS.HttpClient();
client.handleRequest(request, null, function(response) { // Here is where it gets hung up
console.log(response.statusCode + ' ' + response.statusMessage); // Never outputs this
var responseBody = '';
response.on('data', function (chunk) {
responseBody += chunk;
});
response.on('end', function (chunk) {
console.log('Response body: ' + responseBody);
});
}, function(error) {
console.log('Error: ' + error);
});
}
The confusing part for me is that it works fine when i do a test event, and it works fine when I index it locally on my own computer, but then just doesn't go into the handleRequest. Any help/direction is appreciated, thank you.
Edit:
package.json
{
"dependencies": {
"aws-sdk": "*",
"aws-xray-sdk": "^3.2.0",
"dotenv": "^8.2.0"
}
}
Try wrapping the handleRequest function inside a Promise. Your function indexQuote() would look almost the same, but at the end it would return a Promise
function indexQuote(quote) {
...
return new Promise((resolve, reject) => {
client.handleRequest(request, null,
response => {
const { statusCode, statusMessage, headers } = response;
let body = '';
response.on('data', chunk => {
body += chunk;
});
response.on('end', () => {
const data = {
statusCode,
statusMessage,
headers
};
if (body) {
data.body = body;
}
resolve(data);
});
},
err => {
reject(err);
});
});
And then you can await and inspect the result:
const result = await indexQuote(quote);
console.log("Index result: " + result);

How to send back the data got from response.on('end') to the client-side

I'm new to NodeJs and I'm having the problem with response.on('end') I still can't find out the method to send the data I got from the response to the client side.
exports.getCheckoutSession = catchAsync(async (req, res, next) => {
const uuidv1 = require('uuid/v1');
const https = require('https');
const tour = await Tour.findById(req.params.tourId);
console.log(tour);
//parameters send to MoMo get get payUrl
var endpoint = 'https://test-payment.momo.vn/gw_payment/transactionProcessor';
var hostname = 'https://test-payment.momo.vn';
var path = '/gw_payment/transactionProcessor';
var partnerCode = 'MOMO';
var accessKey = 'accessKey';
var serectkey = 'secretKey';
var orderInfo = 'pay with MoMo';
var returnUrl = 'https://momo.vn/return';
var notifyurl = 'https://callback.url/notify';
var amount = (tour.price * 23000).toString();
console.log(amount);
var orderId = req.params.tourId;
var requestId = req.params.tourId;
var requestType = 'captureMoMoWallet';
var extraData = 'merchantName=;merchantId='; //pass empty value if your merchant does not have stores else merchantName=[storeName]; merchantId=[storeId] to identify a transaction map with a physical store
//before sign HMAC SHA256 with format
//partnerCode=$partnerCode&accessKey=$accessKey&requestId=$requestId&amount=$amount&orderId=$oderId&orderInfo=$orderInfo&returnUrl=$returnUrl&notifyUrl=$notifyUrl&extraData=$extraData
var rawSignature =
'partnerCode=' +
partnerCode +
'&accessKey=' +
accessKey +
'&requestId=' +
requestId +
'&amount=' +
amount +
'&orderId=' +
orderId +
'&orderInfo=' +
orderInfo +
'&returnUrl=' +
returnUrl +
'&notifyUrl=' +
notifyurl +
'&extraData=' +
extraData;
//puts raw signature
console.log('--------------------RAW SIGNATURE----------------');
console.log(rawSignature);
//signature
const crypto = require('crypto');
var signature = crypto
.createHmac('sha256', serectkey)
.update(rawSignature)
.digest('hex');
console.log('--------------------SIGNATURE----------------');
console.log(signature);
//json object send to MoMo endpoint
var body = JSON.stringify({
partnerCode: partnerCode,
accessKey: accessKey,
requestId: requestId,
amount: amount,
orderId: orderId,
orderInfo: orderInfo,
returnUrl: returnUrl,
notifyUrl: notifyurl,
extraData: extraData,
requestType: requestType,
signature: signature
});
//Create the HTTPS objects
var options = {
hostname: 'test-payment.momo.vn',
port: 443,
path: '/gw_payment/transactionProcessor',
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Content-Length': Buffer.byteLength(body)
}
};
//Send the request and get the response
console.log('Sending....');
var req = https.request(options, res => {
console.log(`Status: ${res.statusCode}`);
console.log(`Headers: ${JSON.stringify(res.headers)}`);
console.log('Type of body', JSON.stringify(res.body));
res.setEncoding('utf8');
let fullBody = '';
res.on('data', body => {
fullBody += body;
console.log(' Real Body');
console.log(fullBody);
//console.log('Type of body', body.payUrl);
// console.log(JSON.parse(body).payUrl);
// res.redirect(JSON.parse(body).payUrl);
});
res.on('end', () => {
const payURL = JSON.parse(fullBody).payUrl;
console.log('payUrl', payURL);
console.log('No more data in response.');
});
});
req.on('error', e => {
console.log(`problem with request: ${e.message}`);
});
// write data to request body
req.write(body);
req.end();
});
This is the url I got from response
payUrl https://test-payment.momo.vn/gw_payment/payment/qr?partnerCode=MOMO&accessKey=F8BBA842ECF85&requestId=5f38cc86954a6206211e2842&amount=23000&orderId=5f38cc86954a6206211e2842&signature=37ae247d56efd9ed6630b7d7d1435b88ffb8895956da5711a62ebbab8118aa7b&requestType=captureMoMoWallet
Can you please tell how could i send the data from res.on('end'), the "payURL" in the picture above, to client-side. I have tried some methods like res.writeHead, res.send, res.json( ) .... But they all returned error: res.send, res.writeHead, res.json... is not a function
This is my client-side, . If you guys don't mind , please also show me how to automatically redirect the payURL site above when the client click my button. Should I keep using window.location.replace like above ?
export const bookTour = async tourId => {
try {
const res = await fetch(
`http://localhost:3000/api/v1/bookings/checkout-session/${tourId}`,
{
method: 'POST',
body: 'a=1'
}
).then(res => window.location.replace(res.redirectURL));
console.log('The res', res);
} catch (err) {
showAlert('error', err);
}
};
This is my index.js
if (bookBtn) {
bookBtn.addEventListener('click', e => {
e.target.textContent = 'Processing...';
const tourId = e.target.dataset.tourId;
bookTour(tourId);
});
}
You're shadowing the req/res-variables from your getCheckoutSession-handler by using the same names for your http-request. If you change it to:
const request = https.request(options, response => {
// ...
let fullBody = '';
response.on('data', body => {
fullBody += body;
});
response.on('end', () => {
const payURL = JSON.parse(fullBody).payUrl;
// access the handler "res" object here
res.send(payURL);
// alternatively use res.json({payURL}) to send a json response
});
});
it should work fine.
Note: Nowadays you should definitely use const/let instead of var (see this for more information)
Simple,
res.on('end', () => {
const payURL = JSON.parse(fullBody).payUrl;
res.json({
payURL: payURL
})
});
or other way
res.on('end', () => {
const payURL = JSON.parse(fullBody).payUrl;
res.status(200).send({
payURL: payURL
});
});

Resources