multiple images upload in s3 bucket using nodejs in aws lambda? - node.js

I am using AWS lambda function and nodejs. I tried to insert multiple images in s3 bucket. find the below for your reference.
var AWS = require('aws-sdk');
var fileType = require('file-type');
var bucket = 'testing';
var s3 = new AWS.S3();
exports.handler = (event, context, callback) => {
var Userid = event['userid'];
var media = event['media'];
media.forEach(function(eachrecord){
var fileBuffer = Buffer.from(eachrecord, 'base64');
var fileTypeInfo = fileType(fileBuffer);
var randomstring = require("randomstring");
var fileName = Userid+'/'+'media/'+`${randomstring.generate()}.${fileTypeInfo.ext}`;
var ext = fileTypeInfo.ext;
var params = {
Body: fileBuffer,
Key: fileName,
Bucket: bucket,
ContentEncoding: 'base64',
ContentType: fileTypeInfo.mime
};
s3.upload(params, function(err, data){
if(err)
{
callback(err, null);
}
else
{
let response= {body: JSON.stringify(data.Location)};
let mediaurl =response.body;
console.log(mediaurl);
}
});
});
context.succeed('done');
};
In media, I have send multiple base64 encode array value in event. but how to implement forloop concept and media insert array value into s3 bucket?

Related

Extract zip file in S3 Bucket using AWS lambda functions in nodeJs "Error: Invalid CEN header (bad signature)"

I am struggling with unzipping the contents in AWS S3. AWS S3 does not provide the functionality of unzipping the zip folder in the S3 bucket directly. I facing one error . upload code screenshot attached.
"Error: Invalid CEN header (bad signature)"
Any advice or guidance would be greatly appreciated.
My node Js code to upload the zip file:
const AWS = require('aws-sdk');
const s3 = new AWS.S3({signatureVersion: 'v4'});
exports.handler = async (event,context) => {
const bucket = 'bucket-name';
console.log(event)
const body = event.body;
const key=JSON.parse(body).key
console.log(key)
const params = {
Bucket: bucket,
Key: key,
ContentType: 'application/zip',
Expires: 60
};
try{
const signedURL = await s3.getSignedUrl('putObject', params);
const response = {
err:{},
body:"url send",
url:signedURL
};
return response;
}catch(e){
const response = {
err:e.message,
body:"error occured"
};
return response;
}};
My NodeJs code to extract the zip file:
const S3Unzip = require('s3-unzip');
exports.s3_unzip = function(event, context, callback) {
const filename = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
const bucketname = event.Records[0].s3.bucket.name;
console.log(event.Records[0].s3.object.key);
new S3Unzip({
bucket: bucketname,
file: filename,
deleteOnSuccess: true,
verbose: true,
}, function(err, success) {
if (err) {
callback(err);
} else {
callback(null);
}
});
}

How to upload a file from the filesystem to s3

I have a file in the path ./files/myfile.zip
That I would like to upload to s3.
I have a function:
const writeS3Async = (bucketName, fileName, strInput) => {
const params = {
Bucket: bucketName,
Key: fileName,
Body: strInput,
};
return s3.upload(params).promise();
};
The writeS3Async method is used for uploading strings into an s3 object, not files.
Interestingly, I could not find a decent piece of code for a direct file upload.
You need to read the file first. Something like this
var AWS = require('aws-sdk'),
fs = require('fs');
fs.readFile('./files/myfile.zip', function (err, data) {
if (err) { throw err; }
var s3 = new AWS.S3();
const params = {
Bucket: bucketName,
Key: fileName,
Body: data
};
return s3.client.putObject(params).promise();
});

Having trouble uploading image to AWS s3 bucket with node js

i'm trying to upload an image to s3 using nodejs and aws sdk. It keeps returning a weird error: "Inaccessible host: `images.dynamodb.us-east-1.amazonaws.com'. This service may not be available in the 'us-east-1' region
This is my lambda code:
exports.handler = function(event,context,callback){
var s3 = new AWS.S3();
const image = event.body.imageBinary;
var buf = new Buffer.from(image.replace(/^data:image\/\w+;base64,/, ""),'base64');
const type = image.split(';')[0].split('/')[1];
var params = {
Bucket: process.env.BUCKET,
Key: `${AccountId}.${type}`,
Body: buf,
ContentEncoding: 'base64',
ContentType: `image/${type}`
};
s3.upload(params, function(err, resp){
if (err) {
console.log(err);
} else {
console.log('succesfully uploaded the image!: ' + JSON.stringify(resp));
}
});
}
I even tried setting the AWS object configuration (with key, secret key and region) but got the same response
my aws sdk version: "aws-sdk": "^2.610.0"
Any help would be nice
Thanks!!!
Lambda support node.js v12. Which allows you to write async/await code
const AWS = require('aws-sdk');
const s3 = new AWS.S3({
region: 'us-east-1',
apiVersion: '2006-03-01',
});
exports.handler = async(event,context) => {
const image = event.body.imageBinary;
const buf = new Buffer.from(image.replace(/^data:image\/\w+;base64,/, ""),'base64');
const type = image.split(';')[0].split('/')[1];
var params = {
Bucket: process.env.BUCKET,
Key: `${AccountId}.${type}`,
Body: buf,
};
const options = {
ACL: 'private',
CacheControl: 'max-age=86400',
ContentType: `image/${type}`,
ContentEncoding: 'base64',
};
await s3.upload(params, options).promise();
}

untar/decompress to a stream in node

I am trying to write an AWS Lambda that will take a tar.gz from a S3 bucket, inflate it and then unpack it whilst streaming the files back to another S3 bucket.
I have this code:
var AWS = require('aws-sdk');
var fs = require('fs');
var zlib = require('zlib');
var uuid = require('uuid/v4');
var tar = require('tar-stream')
var pack = tar.pack()
var s3 = new AWS.S3();
exports.handler = (event, context, callback) => {
var bucket = event.Records[0].s3.bucket.name;
var key = event.Records[0].s3.object.key;
var file = 'S3://' + bucket + '/' + key;
console.log(bucket)
console.log(key)
var readParams = {
Bucket: bucket,
Key: key
};
var dataStream = s3.getObject(readParams).createReadStream();
var extract = tar.extract()
extract.on('entry', function(header, stream, next) {
console.log(header.name)
var writeParams = {
Bucket: process.env.JOB_PROCESSING_BUCKET,
Key: uuid() + '-' + header.name,
Body: stream
};
s3.upload(writeParams).
on('httpUploadProgress', function(evt) {
console.log('Progress:', evt.loaded, '/', evt.total);
}).
send(function(err, data) {
if (err) console.log("An error occurred", err);
console.log("Uploaded the file at", data.Location);
});
stream.on('end', function() {
next() // ready for next entry
})
stream.resume() // just auto drain the stream
})
extract.on('finish', function() {
// all entries read
})
dataStream.pipe(zlib.createGunzip()).pipe(extract);
callback(null, 'Gunzip Lambda Function');
};
It pulls the file, sorts the gzipping out and then i can see each file being extracted on entry. The code then tries to steam the file to S3 which creates a 0kb file hangs around like its reading the stream then continues onto the next.
Why cant it seem to read/processes the stream body?
Is there a better way of doing this?
Thanks
I don't know if it's the best solution but the following code works for me.
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
const tar = require('tar-stream');
const zlib = require('zlib');
const stream = require('stream');
const uuid = require('uuid');
exports.get = (event, context) => {
var params = {
Bucket: event.Records[0].s3.bucket.name,
Key: event.Records[0].s3.object.key
};
var dataStream = s3.getObject(params).createReadStream();
var extract = tar.extract();
extract.on('entry', function(header, inputStream, next) {
inputStream.pipe(uploadFromStream(s3,header));
inputStream.on('end', function() {
next(); // ready for next entry
});
inputStream.resume(); // just auto drain the stream
});
extract.on('finish', function() {
// all entries read
});
dataStream.pipe(zlib.createGunzip()).pipe(extract);
}
function uploadFromStream(s3,header) {
var pass = new stream.PassThrough();
var writeParams = {
Bucket: process.env.JOB_PROCESSING_BUCKET,
Key: uuid.v1() + '-' + header.name,
Body: pass
};
s3.upload(writeParams, function(err, data) {
context.done(err, data);
});
return pass;
}
Tried for a couple of hours to get this to work, turns out the 'finish' event has been replaced with 'end'. So - answer above works great, just small change -
inputStream.on('end', function() {
next(); // ready for next entry
});
- Should be -
inputStream.on('finish', function() {
next(); // ready for next entry
});

stream the contents of an S3 object into hash algorithm node.js

I'm new to node.js and I'm trying to write a AWS lambda function that would stream the content of an s3 object into the node's crypto module to create a md5 checksum value of the s3 object. Not sure why but everytime I run the code it would generate different hash values on the console.log. can anyone point me in the right direction to fix my code? appreciate the help!
var crypto = require('crypto');
var fs = require('fs');
var AWS = require('aws-sdk');
var s3 = new AWS.S3();
exports.handler = (event, context, callback) => {
var params = {
Bucket: 'bucket_name',
Key: 'key',
};
var hash = crypto.createHash('md5');
var stream = s3.getObject(params, function(err, data) {
if (err){
console.log(err);
return;
}
}).createReadStream();
stream.on('data', function (data) {
hash.update(data, 'utf-8')
})
stream.on('end', function () {
console.log(hash.digest('hex'))
})
};
You were close. You are mixing the "callback" style method signature with a "createReadStream" signature. Try this:
const crypto = require('crypto');
const fs = require('fs');
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
exports.handler = (event, context, callback) => {
let params = {
Bucket: 'bucket_name',
Key: 'key',
};
let hash = crypto.createHash('md5');
let stream = s3.getObject(params).createReadStream();
stream.on('data', (data) => {
hash.update(data);
});
stream.on('end', () => {
let digest = hash.digest('hex');
console.log(digest);
callback(null, digest);
});
};
Not directly an answer, but you can also add the md5 has as a ETag when uploading a file to S3.
const crypt = require('crypto');
const fs = require('fs').promises;
const aws = require('aws-sdk');
async function uploadFileToS3WithMd5Hash(bucket, filename, s3Key = null) {
const data = await fs.readFile(filename);
const md5Base64 = crypt.createHash("md5").update(data).digest('base64');
if (!s3Key) {
s3Key = filename;
}
/** Should you want to get the MD5 in hex format: */
// const md5Hex = Buffer.from(md5Base64, 'base64').toString('hex');
return new Promise((res, rej) => {
const s3 = new aws.S3();
s3.putObject({
Bucket: bucket,
Key: s3Key,
Body: data,
ContentMD5: md5Base64,
}, (err, resp) => err ? rej(err) : res(resp));
})
}
uploadFileToS3WithMd5Hash('your-own-bucket', 'file.txt')
.then(console.log)
.catch(console.error);
So by checking the ETag for an object on S3, you would get the hex-string of the files MD5 hash.
In some cases (see this post by Dennis), MD5 checksum is computed automatically upon upload.

Resources