create password pdf file in lambda nodejs - node.js

here is a problem in aws lambda using node.js for creating password in copied pdf file.
const aws = require("aws-sdk");
const fs = require("fs");
const QPDF = require("node-qpdf");
const s3 = new aws.S3();
exports.handler = async (event) => {
const params = {
Bucket: "BucketName",
Key: "key"
};
const s3Object = await s3.getObject(params).promise();
fs.writeFileSync('/tmp/test.pdf', s3Object.Body.toString('base64'),{'encoding':'base64'});
var options = {
keyLength: 128,
password: 'abc123',
restrictions: {
print: 'low',
useAes: 'y'
}
}
QPDF.encrypt('/tmp/test.pdf',options,(err)=>{
if(err)console.log(err,err.stack);
});
fs.exists ( '/tmp/test.pdf', function (exists) {
console.log (exists);
const file = fs.readFileSync('/tmp/test.pdf');
console.log(file);
const params = {
Bucket: "BucketName",
Key: "test.pdf",
Body: file
};
s3.upload(params, (err,data) => {
if (err) console.log(err);
console.log(data);
});
});
};
this is my code!
i could copy my pdf file in S3 and successed upload the copy pdf file in S3.
So next step, i wanted to create a password in that copy pdf file.
So i used code here,
var options = {
keyLength: 128,
password: 'abc123',
restrictions: {
print: 'low',
useAes: 'y'
}
}
QPDF.encrypt('/tmp/test.pdf',options,(err)=>{
if(err)console.log(err,err.stack);
});
but there is an error.
Response:
{
"errorType": "Error",
"errorMessage": "/bin/sh: qpdf: command not found\n",
"trace": [
"Error: /bin/sh: qpdf: command not found",
"",
" at Socket.<anonymous> (/opt/nodejs/node_modules/node-qpdf/index.js:124:17)",
" at Object.onceWrapper (events.js:300:26)",
" at Socket.emit (events.js:210:5)",
" at Socket.EventEmitter.emit (domain.js:476:20)",
" at addChunk (_stream_readable.js:308:12)",
" at readableAddChunk (_stream_readable.js:289:11)",
" at Socket.Readable.push (_stream_readable.js:223:10)",
" at Pipe.onStreamRead (internal/stream_base_commons.js:182:23)"
]
}
i installed npm qpdf and node-qpdf and checked the installed npms.
what is the problem?

QPDF is a command-line program, you'll need to have it installed (system-wise) before using it.
I tested your code on Ubuntu after installing QPDF and it worked. You can refer to the repository link above for other systems.
sudo apt-get install qpdf

You need to build a standalone package and add it to the zip, you upload to AWS Lambda. Here is more information what needs to be done in order to generate the package: https://github.com/qpdf/qpdf/issues/352

You need to have the QPDF command line program in the run time. AWS Lambda has concept of AWS Layers which gives ability to solve these kind of issues. You can basically upload your program as zip on AWS Layer and then while creating the Lambda function you can give the reference of the layer created.
You can read more about it here -
https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html
Good things is you can keep your code separate and your command line program separate.
Layers can also be shared across different lambda functions.
Hope this helps.

Related

How to resolve ENOENT: no such file or directory, open '/index.js' issue occurred in Lambda Function?

I have an API written in node.js which was working fine with async calls then suddenly throwing below error after a recent code deployment.
2022-08-01T12:11:41.332Z d759bb19-f1d3-4fd7-8c5f-852fb41afe6a ERROR Unhandled Promise Rejection
{
"errorType": "Runtime.UnhandledPromiseRejection",
"errorMessage": "Error: ENOENT: no such file or directory, open '/index.js'",
"reason": {
"errorType": "Error",
"errorMessage": "ENOENT: no such file or directory, open '/index.js'",
"code": "ENOENT",
"errno": -2,
"syscall": "open",
"path": "/index.js",
"stack": [
"Error: ENOENT: no such file or directory, open '/index.js'"
]
},
"promise": {},
"stack": [
"Runtime.UnhandledPromiseRejection: Error: ENOENT: no such file or directory, open '/index.js'",
" at process.<anonymous> (/var/runtime/index.js:35:15)",
" at process.emit (events.js:314:20)",
" at process.emit (/var/task/src/api/ecmAuthApi/webpack:/Project/node_modules/source-map-support/source-map-support.js:516:1)",
" at processPromiseRejections (internal/process/promises.js:209:33)",
" at processTicksAndRejections (internal/process/task_queues.js:98:32)"
]
}
And below is my relevant source code
// service.js
const getUserPermissions = async (token, correlationId) => {
const testUserBucketParams = {
Bucket: authConstants.META_DATA_TEMPLATE_S3_BUCKET_NAME,
Key: authConstants.TEST_USERS,
};
try {
const testUsersFile = await s3Connector
.getData(testUserBucketParams, { awsRequestId: correlationId });
const testUsersMapping = getJson(testUsersFile);
const payloadData = await Promise.resolve(
validateToken(
token, authConstants.LIBERTY_USER_POOL_JWK_URL, testUsersMapping, correlationId,
),
);
return await Promise.resolve(getDataFromDB(payloadData, correlationId));
} catch (error) {
return 1;
}
and the Util function
// utils
import AWS from 'aws-sdk';
import { validateS3Fetch } from '../util/commonUtil';
const s3 = new AWS.S3();
const getData = async (params, context) => {
const file = await s3.getObject(params).promise(); // the error occurs here
validateS3Fetch(file, params, context);
return file.Body.toString('utf-8');
};
export default { getData };
FYI, this implementation was working perfect until i redeploy the code again today. And there were no changes in package.json too. But it works fine locally.
What am I missing here? Is it an issue with AWS lambda?
There is nothing wrong in the implementation. But my code was attached with datadog and it's API key was expired. After updating the key, it's working fine both locally and remotely :)

Error: read ECONNRESET for DynamoDB put request using DocumentClient

I am trying to create a new item in my DynamoDB table using the put function for DocumentClient, but am getting an error that references ECONNRESET. When others have referenced ECONNRESET on stack overflow, it seems that it might be a proxy issue for them. I am not sure how I would go about debugging this though.
Here are the docs I have been using:
https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/dynamodb-example-document-client.html
https://docs.amplify.aws/guides/functions/dynamodb-from-js-lambda/q/platform/js/
Here is the code
import AWS from 'aws-sdk';
AWS.config.update({region: 'us-east-1'})
const docClient = new AWS.DynamoDB.DocumentClient({apiVersion: '2012-08-10'});
export const createItem = async (tableName, item) => {
const params = {
TableName: tableName,
Item: item
};
console.log(params);
try {
await docClient.put(params).promise();
console.log("Success");
} catch (err) {
console.log(err);
}
}
and here is the error I get
Error: read ECONNRESET
at TLSWrap.onStreamRead (internal/stream_base_commons.js:209:20) {
errno: -4077
code: 'TimeoutError',
syscall: 'read',
time: 2021-09-25T12:30:23.577z,
region: 'us-east-1',
hostname: 'dynamodb.us-east-1.amazonaws.com',
retryable: true
}
Screenshot of code and terminal:
https://i.stack.imgur.com/f4JvP.png
Somebody helped me out. I was using a company CLI via a proxy to do manual local testing. I had to use this command in the CLI pc login aws --shared-credentials which is pretty specific to where I work.
I also had to include this code:
const proxy = require('proxy-agent');
AWS.config.update({
httpOptions: {
agent: proxy(process.env.HTTP_PROXY)
}
});

Failure code 4 while uploading file to SFTP server using Node.js

I have written code to establish a SFTP connection and transfer files to the SFTP server using Node.js sftp.put command. I'm getting the following error while transferring the file. I can establish the connection successfully. But I cannot read/write files to the server. I have attached the code below
Code
let sftp = new client();
let filename = "sound.mp3";
const filePath = path.join(__dirname, '../audio', filename)
const putConfig = {
flags: 'w', // w - write and a - append
encoding: null, // use null for binary files
mode: 0o666, // mode to use for created file (rwx)
autoClose: true // automatically close the write stream when finished
};
sftp.connect({
host: 'host',
port: '22',
username: 'xxxx',
password: 'xxx'
}).then(() => {
return sftp.put(filePath, '/', putConfig)
}).then(data => {
console.log(data, 'the data info');
}).catch(err => {
console.log(err, 'catch error');
});
Error
Error: put->put: Failure /data
at fmtError (D:\project\node_modules\ssh2-sftp-client\src\utils.js:53:18)
at SftpClient.put (D:\project\node_modules\ssh2-sftp-client\src\index.js:684:13)
at processTicksAndRejections (internal/process/task_queues.js:93:5) {
code: 4,
custom: true
}
D:\project\node_modules\ssh2\lib\protocol\crypto\poly1305.js:20
function J(a){if(b.onAbort)b.onAbort(a);L(a);O=!0;a=new WebAssembly.RuntimeError("abort("+a+"). Build with -s ASSERTIONS=1 for more info.");r(a);throw a;}var V="data:application/octet-stream;base64,",W="data:application/octet-stream;base64,AGFzbQEAAAABIAZgAX8Bf2ADf39/AGABfwBgAABgAAF/YAZ/f39/f38AAgcBAWEBYQAAAwsKAAEDAQAAAgQFAgQFAXABAQEFBwEBgAKAgAIGCQF/AUGAjMACCwclCQFiAgABYwADAWQACQFlAAgBZgAHAWcABgFoAAUBaQAKAWoBAAqGTQpPAQJ/QYAIKAIAIgEgAEEDakF8cSICaiEAAkAgAkEAIAAgAU0bDQAgAD8AQRB0SwRAIAAQAEUNAQtBgAggADYCACABDwtBhAhBMDYCAEF/C4wFAg5+Cn8gACgCJCEUIAAoAiAhFSAAKAIcIREgACgCGCESIAAoAhQhEyACQRBPBEAgAC0ATEVBGHQhFyAAKAIEIhZBBWytIQ8gACgCCCIYQQVsrSENIAAoAgwiGUEFbK0hCyAAKAIQIhpBBWytIQkgADUCACEIIBqtIRAgGa0hDiAYrSEMIBatIQoDQCASIAEtAAMiEiABLQAEQQh0ciABLQAFQRB0ciABLQAGIhZBGHRyQQJ2Qf///x9xaq0iAyAOfiABLwAAIAEtAAJBEHRyIBNqIBJBGHRBgICAGHFqrSIEIBB+fCARIAEtAAdBCHQgFnIgAS0ACEEQdHIgAS0ACSIRQRh0ckEEdkH///8fcWqtIgUgDH58IAEtAApBCHQgEXIgAS0AC0EQdHIgAS0ADEEYdHJBBnY
RuntimeError: abort(Error: put->put: Failure /data). Build with -s ASSERTIONS=1 for more info.
at process.J (D:\project\node_modules\ssh2\lib\protocol\crypto\poly1305.js:20:53)
at process.emit (events.js:210:5)
at process.EventEmitter.emit (domain.js:475:20)
at processPromiseRejections (internal/process/promises.js:201:33)
at processTicksAndRejections (internal/process/task_queues.js:94:32)
The second argument of SftpClient.put is a path to the target remote file, not only path to the target remote folder.
So it should be like:
return sftp.put(filePath, '/' + filename, putConfig)

AWS lambda function delete files from S3 folder

I import some data from Funnel to S3 bucket. After that, Lambda function copy data to table in Redshift and I tried to delete all copied object from bucket folder but I keep getting timeout.
This is my code:
const Promise = require('bluebird');
const {Pool} = require('pg');
const AWS = require('aws-sdk');
async function emptyS3Directory(bucket, dir) {
const listParams = {
Bucket: bucket,
Prefix: dir
};
var s3 = new AWS.S3();
s3.listObjectsV2(listParams, function(err, data) // Here I always getting timeout{
});
.....
}
EDIT....
This is code of the function.
async function DeleteAllDataFromDir(bucket, dir) {
const listParams = {
Bucket: bucket,
Prefix: dir
};
var s3 = new AWS.S3();
const listedObjects = await s3.listObjects(listParams).promise();
console.log("reponse", listedObjects);
if (listedObjects.Contents.length === 0) return;
const deleteParams = {
Bucket: bucket,
Delete: { Objects: [] }
};
listedObjects.Contents.forEach(({ Key }) => {
deleteParams.Delete.Objects.push({ Key });
});
await s3.deleteObjects(deleteParams).promise();
if (listedObjects.IsTruncated) await DeleteAllDataFromDir(bucket, dir);
}
The first time I set the time out to 2 minutes, then I changed it to 10 minutes and I get the same error::
{
"errorType": "NetworkingError",
"errorMessage": "connect ETIMEDOUT IP:port",
"code": "NetworkingError",
"message": "connect ETIMEDOUT IP:port",
"errno": "ETIMEDOUT",
"syscall": "connect",
"address": "IP",
"port": port,
"region": "eu-west-2",
"hostname": "hostName",
"retryable": true,
"time": "2020-12-10T08:36:29.984Z",
"stack": [
"Error: connect ETIMEDOUT 52.95.148.74:443",
" at TCPConnectWrap.afterConnect [as oncomplete] (net.js:1107:14)"
]
}
It appears that your bucket may reside in a different region than your lambda function based on the nature of the error.
Provide the region hash as an option when constructing your S3 client.
var s3 = new AWS.S3({region: 'bucket-region-hash'});
To figure the region hash, go to S3 Management Console.
Then from the sidebar, click "Buckets". In the resulting view, you'll find the region hash. It's the one marked in gold as shown in the picture below.

upload failed: { Error: unable to verify the first certificate

I wrote a small code in AWS-Lambda(Node.js) to send the file to some API. I am able to run the code but i am getting the upload error.
Error: Function Logs:
START RequestId: 08ad7fab-3658-11e8-8483-a7fbad976cb7 Version: $LATEST
2018-04-02T09:27:17.787Z 08ad7fab-3658-11e8-8483-a7fbad976cb7 upload failed: { Error: unable to verify the first certificate
at Error (native)
at TLSSocket.<anonymous> (_tls_wrap.js:1092:38)
at emitNone (events.js:86:13)
at TLSSocket.emit (events.js:185:7)
at TLSSocket._finishInit (_tls_wrap.js:610:8)
at TLSWrap.ssl.onhandshakedone (_tls_wrap.js:440:38) code: 'UNABLE_TO_VERIFY_LEAF_SIGNATURE' }
END RequestId: 08ad7fab-3658-11e8-8483-a7fbad976cb7.
The code which i have written is :
console.log('Loading Function');
const fs = require('fs');
const request = require('request');
const url = require('url');
const https = require('https');
https.globalAgent.options.ca = rootCas;
var rootCas = require('ssl-root-cas').create();
// default for all https requests
// (whether using https directly, request, or another module)
require('https').globalAgent.options.ca = rootCas;
exports.handler = (event, context, callback) => {
var formData = {
// Pass a simple key-value pair
my_field: 'my_value',
// Pass data via Buffers
my_buffer: new Buffer([1, 2, 3]),
// Pass data via Streams
my_file: fs.createReadStream(__dirname + '/myfile.csv'),
// Pass multiple values /w an Array
// attachments: [
// fs.createReadStream(__dirname + '/myfile.txt'),
// fs.createReadStream(__dirname + '/myfile.txt')
// ],
};
var req = request.post({url:'https://abc.xyz.com:443/file/', formData: formData}, function optionalCallback(err, httpResponse, body) {
if (err) {
return console.error('upload failed:', err);
}
console.log('Upload successful! Server responded with:', body);
});
};
When i am trying to run the same code locally, i am not getting any error even the module ssl is able to download all the certificates. The same when i am trying in AWS-Lambda , the code wont be able to download the certificates. I even tried in the Configure test with adding the JSON as rejectUnauthorized": false ,still not able to overcome with the error.

Resources