Hi Im new to AWS lambda and S3. Im trying to create an API that will allow me to upload an image. I have following lambda code to upload the file. After upload i see that the file size is correct but the file is corrupted.
let encodedImage = event.body;
console.log(encodedImage);
let decodedImage = Buffer.from(encodedImage, "binary");
console.log(decodedImage.length);
const filePath = `${Date.now()}.jpg`;
const params = {
Bucket: "manufacturer-theme-assets",
Key: filePath,
"Body": decodedImage,
ContentType: "image/jpeg",
ACL: "public-read"
};
s3.putObject(params, (err, data) => {
if (err) {
callback(err, null);
} else {
let response = {
statusCode: 200,
"body": JSON.stringify(data)
"isBase64Encoded": false
};
callback(null, response);
}
});
Make sure you are using relevant content type for the image, and please share the corrupted image link from S3 or error which you get while opening the file
Else try this first place and check:
const filePath = `${Date.now()}.jpg`;
var params = {
ACL: "public-read",
Body: "decodedImage",
Bucket: "manufacturer-theme-assets",
Key: filePath
};
s3.putObject(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response
Related
I kept trying to retrieve image from S3, and there is no error message... but I cannot see the actual image on my page.
I used the express, node.js to make a little application.
Here are my codes.. please help me to fix this....
upload image on S3-----------
(req, res, next) => {
const file = req.file; //to get this "file", I used a multer.diskStorage on routes
const fileData = fs.readFileSync(file.path);
const fileName = file.path.substring(8);
var params = {
Bucket: "test-s3-may",
Key: fileName,
Body: fileData,
ContentType: file.mimetype,
ACL: "public-read"
};
s3.upload(params, function(err, data)
{if(err) {return next(err);}
Retrieve image from S3-----
function viewAlbum(filename) {
var params = {"Bucket": 'test-s3-may',
"Key": filename};
s3.getObject(params, function(err, file){
if(err) {return "we got a error";}
else{
var url = "data:image/jpeg;base64,"+ encode(file.Body);
}
return url; })
}
function encode(data)
{
var res = (Buffer.from(data).toString('base64'));
return res;
}
and I used "url" on view with
<img src= >
Is there any problem on uploading?
It is so strange that there is a error message "This is not a supported format" on window explorer
when I download the image file what I uploaded through this app....
I need to uplaod a pdf file from UI(written in Javascript) to Amazon S3 but I am trying to upload the file to the S3, I am getting some unicode format text and when I copy that text to notepad, or say, any other text editor I can the human readable text
I am using pdfmake to get the content of the file and upload it using getBufffer method.
var content = generatePDF(base64Img);
pdfMake.createPdf(content).getBuffer(function (data) {//Code}
The code that i used to upload the file to S3.
var params = {
Bucket: bucketName,
Key: file_name,
Body: data.toString(),
ContentType: 'application/pdf'
}
s3.upload(params, function (err, data) {
if (err) {
// code
}else{
//code
}
The file is getting uploaded successfully but I am getting the text like
!
" #$%&!' ()*')+,
!
!
!
!
But I am pasting it to other text editor, I am getting
Date: 04/20/19
I solved the above problem by passing the data from getBuffer to S3.
In S3, I passed to a buffer like
var data = new Buffer(event.data, 'binary');
uploaded the data to S3.
var params = {
Bucket: bucketName,
Key: file_name,
Body: data,
ContentType: 'application/pdf'
}
s3.upload(params, function (err, data) {
if (err) {
// code
}else{
//code
}
To upload a file from client end directly to s3 bucket you can use multer-s3.
FROM CLIENT END:
axios.post(url, data, {
onUploadProgress: ProgressEvent => {
this.setState({
loaded: (ProgressEvent.loaded / ProgressEvent.total * 100),
})
},
})
.then(res => { // then print response status
toast.success('Upload Success!')
})
.catch(err => { // then print response status
toast.error('Upload Failed!')
})
SERVER SIDE:
const upload = multer({
storage: multerS3({
s3: s3,
acl: 'public-read',
bucket: BUCKET_NAME,
key: function (req, file, cb) {
UPLOADED_FILE_NAME = Date.now() + '-' + file.originalname;
cb(null, UPLOADED_FILE_NAME);
}
})
}).array('file');
app.post('/upload', function (req, res) {
upload(req, res, function (err) {
if (err instanceof multer.MulterError) {
return res.status(500).json(err)
// A Multer error occurred when uploading.
} else if (err) {
return res.status(500).json(err)
// An unknown error occurred when uploading.
}
console.log('REQUEST FILE IS', UPLOADED_FILE_NAME)
return res.status(200).send(UPLOADED_FILE_NAME)
// Everything went fine.
})
});
I'm trying to download PDF content with data from a remote location and upload the content into S3 as a pdf file. I'm using NodeJS, in the context of an AWS lambda. The s3.putObject parameter function resolves successfully, and a pdf file is saved into the S3 bucket as intended, but the document is blank when viewed, suggesting that all of the data may not have been passed to s3.putObject.
Here is my code.
const request = require('request')
const viewUrl = "https://link_to_downloadable_pdf/"
const options = {
url: viewUrl,
headers: {
'Content-Type': 'application/pdf'
}
};
request(options, function(err, res, body){
if(err){return console.log(err)}
const base64data = new Buffer(body, 'binary');
const params = {
Bucket: "myS3bucket",
Key: "my-pdf.pdf",
ContentType: "application/pdf",
Body: base64data,
ACL: 'public-read'
};
s3.putObject(params, function(err, data) {
if (err) {
console.log(err);
} else {
callback(null, JSON.stringify(data))
}
})
When I test the URL in Postman, it returns the PDF with data included. Any idea why the NodeJS code may not be doing the same thing?
Can you try this code? :)
import AWS from 'aws-sdk'
const request = require('request')
const S3 = new AWS.S3()
var promise = new Promise((resolve, reject) => {
return request({ url : 'https://link_to_downloadable_pdf/', encoding : null },
function(err, res, body){
if(err)
return reject({ status:500,error:err })
return resolve({ status:200, body: body})
})
})
promise.then((pdf) => {
if(pdf.status == 200)
{
console.log('uploading file..')
s3.putObject({
Bucket: process.env.bucket,
Body: pdf.body,
Key: 'my-pdf.pdf',
ACL:'public-read'
}, (err,data) => {
if(err)
console.log(err)
else
console.log('uploaded')
})
}
})
I'll be attentive to anything. hope to help you
I am creating a CSV file on the fly from JSON content and uploading generated CSV file on S3 bucket rather than first saving the file locally.
Below is my code snippet, as using below code my CSV file uploaded on S3 bucket but it does not seems to be in correct CSV format.
var uploadCSVFileOnS3Bucket = function(next, csvFileContent,results) {
console.log("uploadCSVFileOnS3Bucket function started");
var bufferObject = new Buffer.from(JSON.stringify(csvFileContent));
var filePath = configurationHolder.config.s3UploadFilePath;
var s3 = new AWS.S3();
var params = {
Bucket: 'bucket_name'
Key: 's3UploadFilePath',
Body: bufferObject,
CacheControl:'public, max-age=86400'
}
s3.upload(params, function(err, data) {
if (err) {
console.log("Error at uploadCSVFileOnS3Bucket function",err);
next(err);
} else {
console.log("File uploaded Successfully");
next(null, filePath);
}
});
};
Also, I am using "json2csv" npm module for generating csv file content from JSON.
Below is the code:
var generateCSVFile = function(next,callback,csvFileContent) {
console.log("generateCSVFile function started",csvFileContent);
if(csvFileContent && csvFileContent.length>0) {
var fields = ['field1','field2','field3',........];
var csv = json2csv({ data: csvFileContent, fields: fields });
console.log('created',csv);
next(null,csv);
}
else {
next(null,[]);
}
}
Please let us know where the above code is going wrong.
Hi I tried again with below headers values and it worked for me. Below is the code :
var s3 = new AWS.S3();
var params = {
Bucket: bucketName,
Key: filePath,
Body: csvFileContent,
ContentType: 'application/octet-stream',
ContentDisposition: contentDisposition(filePath, {
type: 'inline'
}),
CacheControl: 'public, max-age=86400'
}
s3.putObject(params, function(err, data) {
if (err) {
console.log("Error at uploadCSVFileOnS3Bucket function", err);
next(err);
} else {
console.log("File uploaded Successfully");
next(null, filePath);
}
});
add ContentDisposition: 'attachment' in your params as well.
otherwise you can also read file and upload to s3
fs.readFile(FILEPATH, function(err, file_buffer) {
var params = {
Bucket: //bucketname,
Key:key,
ContentDisposition: 'attachment',
Body: file_buffer
};
s3.upload(params, function(err, data) {
if (err) {
console.log("Error in upload");
callback(err, null)
}
if (data) {
console.log("Upload Success", data);
callback(null, data)
}
});
});
using async await
import { parse } from "json2csv";
const saveCsv = async () => {
const payload = [{ a: 1, b: 2 }]
const csvPayload = parse(payload, { header: true, defaultValue: "-----"});
const s3Key = 'filename.csv';
const bucketName = 'bucket-name';
await s3.put(bucketName, s3Key, csvPayload);
}
just like that without creating a buffer and using JSON.stringify()
Try this, it worked for me:
var fs = require('file-system')
var AWS = require('aws-sdk')
AWS.config.update({
region: '', // use appropriate region
accessKeyId: '', // use your access key
secretAccessKey: '' // user your secret key
})
var s3 = new AWS.S3()
fs.readFile('contacts.csv','utf-8', (err, data) => {
if (err) throw err;
const params = {
Bucket: 'testBucket', // pass your bucket name
Key: 'contacts.csv', // file will be saved as testBucket/contacts.csv
Body : data
};
s3.upload(params, (s3Err, data) => {
if (s3Err) throw s3Err
console.log(`File uploaded successfully at ${data.Location}`)
});
});
My Lambda is receiving binary data of an image from my user in request body (event.body).
I try uploading it to S3 with no error, but when I download, the image is corrupted/ can't be opened.
I also need to return the URl of the uploaded image to the user.
Please Help!
module.exports.uploadImage = (event, context, callback) => {
var buf = new Buffer(new Buffer(event.body).toString('base64').replace(/^data:image\/\w+;base64,/, ""),'base64');
var data = {
Key: Date.now()+"",
Body: buf,
ContentEncoding: 'base64',
ContentType: 'image/png',
ACL: 'public-read'
};
s3Bucket.putObject(data, function(err, data){
if (err) {
console.log(err);
console.log('Error uploading data: ', data);
} else {
console.log('succesfully uploaded the image!');
}
callback(null,data);
});
};
You can upload the image to S3 as node Buffer. The SDK does the converting for you.
const AWS = require("aws-sdk");
var s3 = new AWS.S3();
module.exports.handler = (event, context, callback) => {
var buf = Buffer.from(event.body.replace(/^data:image\/\w+;base64,/, ""),"base64");
var data = {
Bucket: "sample-bucket",
Key: Date.now()+"",
Body: buf,
ContentType: 'image/png',
ACL: 'public-read'
};
s3.putObject(data, function(err, data){
if (err) {
console.log(err);
console.log('Error uploading data: ', data);
} else {
console.log('succesfully uploaded the image!');
}
callback(null,data);
});
};