Buffer a gz file from s3 using lambda nodejs - node.js

i have a gz file in s3 and need to fetch the content of the file. The below code doesn't print the actual content of the file.
const params = {
Bucket: mybucket,
Key: mykey.gz,
};
s3.getObject(params, (err, data) => {
if (err) {
console.log(err);
const message = `Error getting object`;
console.log(message);
callback(message);
} else {
const payload = data.Body.toString('ascii');
console.log('printing contents ', payload)
How can we fetch the contents of .gz file from s3?

Related

How to upload pdf file to s3 in javascript?

I need to uplaod a pdf file from UI(written in Javascript) to Amazon S3 but I am trying to upload the file to the S3, I am getting some unicode format text and when I copy that text to notepad, or say, any other text editor I can the human readable text
I am using pdfmake to get the content of the file and upload it using getBufffer method.
var content = generatePDF(base64Img);
pdfMake.createPdf(content).getBuffer(function (data) {//Code}
The code that i used to upload the file to S3.
var params = {
Bucket: bucketName,
Key: file_name,
Body: data.toString(),
ContentType: 'application/pdf'
}
s3.upload(params, function (err, data) {
if (err) {
// code
}else{
//code
}
The file is getting uploaded successfully but I am getting the text like
!
" #$%&!' ()*')+,
!
!
!
!
But I am pasting it to other text editor, I am getting
Date: 04/20/19
I solved the above problem by passing the data from getBuffer to S3.
In S3, I passed to a buffer like
var data = new Buffer(event.data, 'binary');
uploaded the data to S3.
var params = {
Bucket: bucketName,
Key: file_name,
Body: data,
ContentType: 'application/pdf'
}
s3.upload(params, function (err, data) {
if (err) {
// code
}else{
//code
}
To upload a file from client end directly to s3 bucket you can use multer-s3.
FROM CLIENT END:
axios.post(url, data, {
onUploadProgress: ProgressEvent => {
this.setState({
loaded: (ProgressEvent.loaded / ProgressEvent.total * 100),
})
},
})
.then(res => { // then print response status
toast.success('Upload Success!')
})
.catch(err => { // then print response status
toast.error('Upload Failed!')
})
SERVER SIDE:
const upload = multer({
storage: multerS3({
s3: s3,
acl: 'public-read',
bucket: BUCKET_NAME,
key: function (req, file, cb) {
UPLOADED_FILE_NAME = Date.now() + '-' + file.originalname;
cb(null, UPLOADED_FILE_NAME);
}
})
}).array('file');
app.post('/upload', function (req, res) {
upload(req, res, function (err) {
if (err instanceof multer.MulterError) {
return res.status(500).json(err)
// A Multer error occurred when uploading.
} else if (err) {
return res.status(500).json(err)
// An unknown error occurred when uploading.
}
console.log('REQUEST FILE IS', UPLOADED_FILE_NAME)
return res.status(200).send(UPLOADED_FILE_NAME)
// Everything went fine.
})
});

How to upload .mp4 video to S3 bucket using node js

Below is the Rest API to upload the video to S3,unable to play the video by downloading as players throwing invalid file format.
app.post('/insert-video', async(req, res) => {
const {
data,
name: fileName,
size: fileSize,
type: fileType
} = req.body
// ASW code start
const base64data = new Buffer(data, 'binary');
let params = {
Bucket: "uploadvidoe",
Key: fileName,
ContentType: fileType,
Body: base64data
};
try {
let uploadPromise = await new AWS.S3().putObject(params).promise();
console.log("Successfully uploaded data to bucket");
} catch (e) {
console.log("Error uploading data: ", e);
}
});

Link to image uploaded in S3 bucket does not display image

Hi Im new to AWS lambda and S3. Im trying to create an API that will allow me to upload an image. I have following lambda code to upload the file. After upload i see that the file size is correct but the file is corrupted.
let encodedImage = event.body;
console.log(encodedImage);
let decodedImage = Buffer.from(encodedImage, "binary");
console.log(decodedImage.length);
const filePath = `${Date.now()}.jpg`;
const params = {
Bucket: "manufacturer-theme-assets",
Key: filePath,
"Body": decodedImage,
ContentType: "image/jpeg",
ACL: "public-read"
};
s3.putObject(params, (err, data) => {
if (err) {
callback(err, null);
} else {
let response = {
statusCode: 200,
"body": JSON.stringify(data)
"isBase64Encoded": false
};
callback(null, response);
}
});
Make sure you are using relevant content type for the image, and please share the corrupted image link from S3 or error which you get while opening the file
Else try this first place and check:
const filePath = `${Date.now()}.jpg`;
var params = {
ACL: "public-read",
Body: "decodedImage",
Bucket: "manufacturer-theme-assets",
Key: filePath
};
s3.putObject(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response

How to upload binary files to s3 using node js using s3 key attribute

I need to download and extracted tar.gz files from s3 bucket and upload to another bucket.The files are downloaded properly and the result is returning as binary data using s3.getObject.
I need to pass the binary data (files) to s3 bucket to upload using s3.putOBject.
But I dont know what "name" has to give for "Key" params in s3.putOBject when we push the binary files to s3.putObject. Kindly help me.
This is my code
var bucketName = "my.new.Bucket-Latest";
var fileKey = "Employee.tar.gz";
var params = { Bucket: bucketName, Key: fileKey };
s3.getObject(params, function(err, data,callback) {
if (err) {
console.log(err);
callback(err);
}
else {
zlib.gunzip(data.Body, function (err, result) {
if (err) {
console.log(err);
} else {
var extractedData = result;
s3.putObject({
Bucket: "bucketName",
Key: " ",
Body: extractedData,
ContentType: 'content-type'
}, function (err) {
console.log('uploaded file: ' + err);
});
}
});
}
});
Well even your own code says, that key is filename. Just generate some filename and assign to key property. It is like filesystem. When you creating a new file, you should give some name to it.

NodeJS Request Upload Image

Trying to upload an image using nodejs, express & request module but keep getting 415 from Amazon S3 instance.
fs.createReadStream(req.files.image.path).pipe(request.post(defaults.url, {
form: {
param_1: '',
param_2: ''
}
}, function(error, response, body){
if(error){
callback(error, null);
}else{
if(response.statusCode === 200){
callback({}, body);
}else{
callback(body, response);
}
}
}));
I think the image is not getting appended on the request but i'm not 100%. Any advise?
pipe expects a Writable stream as parameter. you can use res object of express directly as the pipe destination. But if you like to upload to s3, you can read the file from req stream and use putObject to write it to s3
var fs = require ('fs')
fs.readFile(req.files.image.path, function (err, data) {
var AWS = require('./aws_config')
var s3 = new AWS.S3()
var bucket = ''
s3.putObject({
ACL: 'public-read', // by default private access
Bucket: bucket,
Key: file_name,
Body: data
}, function (err, data) {
if (err) {
console.log(err)
res.send(500, {msg: 'image upload failed', error: err})
} else {
console.log('S3 upload Successful')
res.send({})
}
});
If you like to download you can use pipe to redirect the read object to response directly
app.get('/download/:file', function(req, res, next) {
var AWS = require('./aws_config')
var s3 = new AWS.S3()
s3.getObject({
Bucket: '',
Key: req.params.file
}, function (err, data) {
if (err) console.log (err)
var fs = require ('fs')
var filePath = __dirname + "/downloads/" + req.params.file
fs.writeFile(filePath, data.Body, function (err) {
if (err) console.log(err)
else {
res.attachment(filePath)
var filestream = fs.createReadStream(filePath);
filestream.pipe(res);
// TODO: delete file from server ?
}
});
})
})

Resources