Image uploaded to S3 becomes corrupted - node.js

I’m having issues uploading a file from postman to aws lambda + s3. If I understand correctly the image has to be a base64 string and send via JSON to work with lambda and API Gateway so I converted an image to a base64 and I’m using the base64 string in postman
The file uploads to S3, but when I download the s3 object and open it I get
So I don’t think I’m uploading it correctly. I’ve used a base64 to image converter and the image appears so the base64 string is correct before sending it via postman so something in my setup is off. What am I doing wrong? I appreciate the help!
upload.js
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
exports.handler = async (event, context, callback) => {
let data = JSON.parse(event.body);
let file = data.base64String;
const s3Bucket = "upload-test3000";
const objectName = "helloworld.jpg";
const objectData = data.base64String;
const objectType = "image/jpg";
try {
const params = {
Bucket: s3Bucket,
Key: objectName,
Body: objectData,
ContentType: objectType
};
const result = await s3.putObject(params).promise();
return sendRes(200, `File uploaded successfully at https:/` + s3Bucket + `.s3.amazonaws.com/` + objectName);
} catch (error) {
return sendRes(404, error);
}
};
const sendRes = (status, body) => {
var response = {
statusCode: status,
headers: {
"Content-Type": "application/json",
"Access-Control-Allow-Headers": "Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token",
"Access-Control-Allow-Methods": "OPTIONS,POST,PUT",
"Access-Control-Allow-Credentials": true,
"Access-Control-Allow-Origin": "*",
"X-Requested-With": "*"
},
body: body
};
return response;
};
.png

When building the params you should add content encoding, otherwise you're just uploading the text data:
const params = {
Bucket: s3Bucket,
Key: objectName,
Body: objectData,
ContentType: objectType,
ContentEncoding: 'base64'
};
edit
Okay I have checked the file, I think you might be misunderstanding what will happen when you store the image in base64.
Windows or a browser for that matter can't read a jpg file in base64 (as far as I know), it must be converted first. When you have an image in the browser with a base64 source, the browser handles this conversion on the fly but the base64 data inside the "helloworld.jpg" container is useless in windows without converting it.
There's two options, either convert once it reaches your server then upload directly as utf8 or have a layer in between, converting the image as it's requested.

The problem might be the passing format of image in Body, as it is not getting passed as expected by s3.upload parameters (It says Body key must be Buffer to be passed).
So, The simple solution is pass the Body as buffer, if your file is present in any location in the directory then don't pass it like
// Wrong Way
const params = {
Bucket: 'Your-Buket-Name',
Key: 'abc.png', // destFileName i.e the name of file to be saved in s3 bucket
Body: 'Path-To-File'
}
Now, the problem is the file gets uploaded as Raw Text, which is corrupted format and will not be readable by the OS on downloading.
So, to get it working pass it like
// Correct Way According to aws-sdk library
const fs = require('fs');
const imageData = fs.readFileSync('Path-To-File'); // returns buffer
const params = {
Bucket: 'Your-Buket-Name',
Key: 'abc.png', // destFileName i.e the name of file to be saved in s3 bucket
Body: imageData // image buffer
}
const uploadedFile = await s3.upload(params).promise();
Note: During answer i was using -> "aws-sdk": "^2.1025.0"
Hope this will help you or somebody else. Thanks!

I got it working by adding the base64 string in JSON format like so
and then sent
let decodedImage = Buffer.from(encodedImage, 'base64'); as the Body param.
updated upload.js
const AWS = require('aws-sdk');
var s3 = new AWS.S3();
exports.handler = async (event) => {
let encodedImage = JSON.parse(event.body).base64Data;
let decodedImage = Buffer.from(encodedImage, 'base64');
var filePath = "user-data/" + event.queryStringParameters.username + ".jpg"
var params = {
"Body": decodedImage,
"Bucket": process.env.UploadBucket,
"Key": filePath
};
try {
let uploadOutput = await s3.upload(params).promise();
let response = {
"statusCode": 200,
"body": JSON.stringify(uploadOutput),
"isBase64Encoded": false
};
return response;
}
catch (err) {
let response = {
"statusCode": 500,
"body": JSON.stringify(eerr),
"isBase64Encoded": false
};
return response;
}
};
I found this article to be super helpful

Related

uploading a uri or should it be converted?

I am sending image data from my react native application to my node js backend which i want to upload to S3 . I want to know exactly which format i must change the data to in order to upload it to my S3 . Below is the formdata which i am logging in my backend at the moment .
[
'file',
{
uri: 'file:///var/mobile/Containers/Data/Application/CA974BC6-6943-4135-89DE-235BC593A54F/Library/Caches/ExponentExperienceData/%2540lb2020%252Fmy/ImagePicker/D7119C77-60D0-46CC-A194-4F1FDE0D9A3D.jpg',
type: 'image/jpeg',
name: 'hi.jpg'
}
]
My backend has this code below also . Would making the above code equal file work ? if not , suggestions will be appreciated .
const params = {
Bucket:"myarrowbucket", // bucket you want to upload to
Key: "filename"+".png",
Body: file,
ContentType:'image/png',
ACL: "public-read",
};
I have tried uploading and the image doesnt open correctly on S3 or gives me Error: Unsupported body payload object
Updated code - - no path found error
app.post("/upload", async (req, res) => {
const uri = (req.body._parts[0][1].uri)
const file = uri.substring(7);
const fileStream = fs.createReadStream(file);
const params = {
Bucket:"myarrowbucket", // bucket you want to upload to
Key: "filename"+".png",
Body: fileStream,
ContentType:'image/png',
ACL: "public-read",
};
const data = await client.upload(params).promise();
return data.Location; // returns the url location
});
I have tried uploading and the image doesnt open correctly on S3 or gives me > Error: Unsupported body payload object
You need to provide a stream to the S3 client.
app.post("/upload", fileUpload(), async (req, res) => {
const uri = (req.body._parts[0][1].uri)
const file = uri.substring(7);
const params = {
Bucket:"myarrowbucket", // bucket you want to upload to
Key: "filename"+".png",
Body: Buffer.from(req.files[0].data, 'binary'), <-- PROVIDE DATA FROM FORM-DATA
ACL: "public-read",
};
const data = await client.upload(params).promise();
return data.Location; // returns the url location
});
You can use a library like form-data to handle the form data conversion.

IPFS Pinata service not accepting file

I have a code as shown below that uploads files from the browser and saves in the server, once it has been saved to the server, I want the server to connect to the Pinata API so the file can also be saved to the IPFS node.
let data = new FormData();
const fileBuffer = Buffer.from(`./public/files/${fileName}`, 'utf-8');
data.append('file', fileBuffer, `${fileName}`);
axios.post('https://api.pinata.cloud/pinning/pinJSONToIPFS',
data,
{
headers: {
'Content-Type': `multipart/form-data; boundary= ${data._boundary}`,
'pinata_api_key': pinataApiKey,
'pinata_secret_api_key': pinataSecretApiKey
}
}
).then(function (response) {
console.log("FILE UPLOADED TO IPFS NODE", fileName);
console.log(response);
}).catch(function (error) {
console.log("FILE WASNT UPLOADED TO IPFS NODE", fileName);
console.log(error);
});
The issue i'm having is that after creating a buffer of my file and wrapping it in a formdata, the pinata API returns an error :
data: {
error: 'This API endpoint requires valid JSON, and a JSON content-type'
}
If i convert the data to string like JSON.stringify(data) and change the content-type to application/json, the file buffer will be uploaded successfully as string.
I hope explained it well to get a solution. Thanks.
It looks like you're attempting to upload a file to the pinJSONToIPFS endpoint, which is intended to purely be used for JSON that is passed in via a request body.
In your situation I would recommend using Pinata's pinFileToIPFS endpoint
Here's some example code based on their documentation that may be of help:
//imports needed for this function
const axios = require('axios');
const fs = require('fs');
const FormData = require('form-data');
export const pinFileToIPFS = (pinataApiKey, pinataSecretApiKey) => {
const url = `https://api.pinata.cloud/pinning/pinFileToIPFS`;
//we gather a local file for this example, but any valid readStream source will work here.
let data = new FormData();
data.append('file', fs.createReadStream('./yourfile.png'));
return axios.post(url,
data,
{
maxContentLength: 'Infinity', //this is needed to prevent axios from erroring out with large files
headers: {
'Content-Type': `multipart/form-data; boundary=${data._boundary}`,
'pinata_api_key': pinataApiKey,
'pinata_secret_api_key': pinataSecretApiKey
}
}
).then(function (response) {
//handle response here
}).catch(function (error) {
//handle error here
});
};
The proper code to pin any file to IPFS is as below.
Apparently, even Pinata support staff didn't know this.
You need to set an object with the property name filepath as your last parameter. The name doesn't matter, it can be a duplicate, it can be the same as others, or it can be unique.
const url = "https://api.pinata.cloud/pinning/pinFileToIPFS";
const fileContents = Buffer.from(bytes);
const data = new FormData();
data.append("file", fileContents, {filepath: "anyname"});
const result = await axios
.post(url, data, {
maxContentLength: -1,
headers: {
"Content-Type": `multipart/form-data; boundary=${data._boundary}`,
"pinata_api_key": userApiKey,
"pinata_secret_api_key": userApiSecret,
"path": "somename"
}
});
Code to upload a file on IPFS using Pinata.
There are two methods available to upload files/images on Pinata. One is with Pinata SDK and the second is the pinFileToIPFS endpoint.
If you are uploading files from Next.js then you cannot convert your image into binary using fs.createReadStream or Buffer.from. These packages support the Node side. So if you want to upload the file with Next.js on Pinata then you can use this code.
// convert file into binary
const data = new FormData();
data.append("title", file.name);
data.append("file", file);
const url = "https://api.pinata.cloud/pinning/pinFileToIPFS";
// pass binary data into post request
const result = await axios.post(url, data, {
maxContentLength: -1,
headers: {
"Content-Type": `multipart/form-data; boundary=${data._boundary}`,
pinata_api_key: "your_pinata_key",
pinata_secret_api_key:
"your_pinata_secret",
path: "somename",
},
});
console.log("RESULT", result);
this will upload a file to ipfs under the path ipfs://{cid}/images/{fileId}
const PINATA_BASE_URL = "https://api.pinata.cloud";
const PINATA_PIN_URI = "/pinning/pinFileToIPFS";
const fileExt = file.type.split("/")[1];
let nftId = 1
// creates a 64byte string '0000...0001' to follow ERC-1155 standard
const paddedId = createPaddedHex(nftId);
const ipfsFileId = `${paddedId}.${fileExt}`;
const ipfsImageFilePath = `/images/${ipfsFileId}`;
const fileUploadData = new FormData();
// this uploads the file and renames the uploaded file to the path created above
fileUploadData.append("file", file, ipfsImageFilePath);
fileUploadData.append(
"pinataOptions",
'{"cidVersion": 1, "wrapWithDirectory": true}'
);
fileUploadData.append(
"pinataMetadata",
`{"name": "${ipfsImageFilePath}", "keyvalues": {"company": "Pinata"}}`
);
const pinataUploadRes = await axios.post(
PINATA_BASE_URL + PINATA_PIN_URI,
fileUploadData,
{
headers: {
Authorization: `Bearer ${PINATA_JWT}`,
},
}
);
const ipfsCID = pinataUploadRes.data.IpfsHash;

How to upload base64 encoded pdf directly to s3 with nodejs/aws-sdk?

I'm attempting to upload a base64 encoded pdf to S3 with the following code without having to write the file to the filesystem.
const AWS = require('aws-sdk');
exports.putBase64 = async (object_name, buffer, bucket) => {
const params = {
Key: object_name,
Body: buffer,
Bucket: bucket,
ContentEncoding: 'base64',
ContentType: 'application/pdf'
};
const response = await S3.upload(params).promise();
return response;
};
Where buffer is a blank pdf encoded to base64. When attempting to open the file on s3, I get "We can't open this file
Something went wrong." upon attempting to open it.
However, if I write the base64 encoding into a file and THEN upload it, it works.
await fs.writeFileSync(`./somepdf.pdf`, base_64, 'base64');
exports.put = async (object_name, file_location, bucket, content_type) => {
const file_content = fs.readFileSync(file_location);
const params = {
Key: object_name,
Body: './somepdf.pdf',
Bucket: bucket,
ContentType: 'application/pdf'
};
const response = await S3.upload(params).promise();
return response;
};
I notice that when uploading the file directly, the file encoding when viewing the file through a text editor it isn't base64 encoded, but viewing the file uploaded as strictly defined contentencoding base64 shows the base64. I attempted to convert the base64 to a blob using atob but that yielded the same results, so I assume there's a parameter I maybe missing or a header.
I had the same issue and managed to solve it by making this change:
const AWS = require('aws-sdk');
const S3 = new AWS.S3();
exports.putBase64 = async (object_name, buffer, bucket) => {
const params = {
Key: object_name,
Body: Buffer.from(buffer, 'base64'), // <---------
Bucket: bucket,
ContentType: 'application/pdf'
};
return await S3.upload(params).promise();
};
Create a new buffer
const newBuffer = buffer.replace(/^data:.+;base64,/, "")
Now use this new buffer in params. This should work!

Corrupted image on uploading image to AWS-S3 via signed url

I'm trying to upload images to aws-s3 via a signed-url from NodeJS server (not from a browser). The image to upload has been generated by NodeJS. I'm getting the signed-url from aws and succeeding to upload it to s3.
But my image is corrupted. For some reason, S3 is adding some headers to my image (compare image attached).
What am I doing wrong?
getting the signed url:
try {
var params = {
Bucket: bucketName,
Key: 'FILE_NAME.png',
Expires: 60
};
const url = await s3.getSignedUrlPromise('putObject', params);
return url;
} catch (err) {
throw err;
}
uploading to s3
var stats = fs.statSync(filePath);
var fileSizeInBytes = stats["size"];
const imageBuffer = fs.readFileSync(filePath);
var formData = {
'file': {
value: imageBuffer,
options: {
filename: 'FILE_NAME.png'
}
}
};
request({
method: 'put',
url,
headers: {
'Content-Length': fileSizeInBytes,
'Content-MD': md5(imageBuffer)
},
formData
}, function (err, res, body) {
console.log('body',body);
});
Compare between the actual image and the uploaded image to s3. S3 added some headers:
I know this is old but I struggled with the same issue for a while. When uploading using a pre-sgined url, DO NOT use new FormData();
One thing I noticed that all of my files on s3 were exactly 2kb larger than the originals.
<input type="file" id="upload"/>
var upload = document.getElementById('upload');
var file = upload.files[0];
//COMMENTED OUT BECAUSE IT WAS CAUSING THE ISSUE
//const formData = new FormData();
//formData.append("file", file);
// Assuming axios
const config = {
onUploadProgress: function(progressEvent) {
var percentCompleted = Math.round(
(progressEvent.loaded * 100) / progressEvent.total
);
console.log(percentCompleted);
},
header: {
'Content-Type': file.type
}
};
axios.put(S3SignedPutURL, file, config)
.then(async res => {
callback({res, key})
})
.catch(err => {
console.log(err);
})
I followed the above solution for react js
What I was doing before uploading an image is passing through the createObject URL and then passing it to the API body.
if (e.target.files && e.target.files[0]) {
let img = e.target.files[0];
**setImage(URL.createObjectURL(img))**
Correct Way:
if (e.target.files && e.target.files[0]) {
let img = e.target.files[0];
**setImage(img)**
Work For me, Thanks Sam Munroe
Came here in 2023, was facing the same problem using formdata, but in postman, before handing it to the front end department.
To handle it in postman, use the type of request body as binary:
And don't forget to add the proper headers.
Try to specify the content type in the request as Content-Type multipart/form-data.

Why is my S3 upload not uploading correctly?

I upload an image file using the following format:
var body = fs.createReadStream(tempPath).pipe(zlib.createGzip());
var s3obj = new AWS.S3({params: {Bucket: myBucket, Key: myKey}});
var params = {
Body: body,
ACL: 'public-read',
ContentType: 'image/png'
};
s3obj.upload(params, function(err, data) {
if (err) console.log("An error occurred with S3 fig upload: ", err);
console.log("Uploaded the image file at: ", data.Location);
});
The image successfully uploads to my S3 bucket (there are no error messages and I see it in the S3-console), but when I try to display it on my website, it returns a broken img icon. When I download the image using the S3-console file downloader I am unable to open it with the error that the file is "damaged or corrupted".
If I upload a file manually using the S3-console, I can correctly display it on my website, so I'm pretty sure there's something wrong with how I'm uploading.
What is going wrong?
I eventually found the answer to my question. I needed to post one more parameter because the file is gzip'd (from using var body = ...zlib.createGzip()). This fixed my problem:
var params = {
Body: body,
ACL: 'public-read',
ContentType: 'image/png',
ContentEncoding: 'gzip'
};
Theres a very nice node module s3-upload-stream to upload (and first compress) images to S3, here's their example code which is very well documented:
var AWS = require('aws-sdk'),
zlib = require('zlib'),
fs = require('fs');
s3Stream = require('s3-upload-stream')(new AWS.S3()),
// Set the client to be used for the upload.
AWS.config.loadFromPath('./config.json');
// or do AWS.config.update({accessKeyId: 'akid', secretAccessKey: 'secret'});
// Create the streams
var read = fs.createReadStream('/path/to/a/file');
var compress = zlib.createGzip();
var upload = s3Stream.upload({
"Bucket": "bucket-name",
"Key": "key-name"
});
// Optional configuration
upload.maxPartSize(20971520); // 20 MB
upload.concurrentParts(5);
// Handle errors.
upload.on('error', function (error) {
console.log(error);
});
/* Handle progress. Example details object:
{ ETag: '"f9ef956c83756a80ad62f54ae5e7d34b"',
PartNumber: 5,
receivedSize: 29671068,
uploadedSize: 29671068 }
*/
upload.on('part', function (details) {
console.log(details);
});
/* Handle upload completion. Example details object:
{ Location: 'https://bucketName.s3.amazonaws.com/filename.ext',
Bucket: 'bucketName',
Key: 'filename.ext',
ETag: '"bf2acbedf84207d696c8da7dbb205b9f-5"' }
*/
upload.on('uploaded', function (details) {
console.log(details);
});
// Pipe the incoming filestream through compression, and up to S3.
read.pipe(compress).pipe(upload);

Resources