Axios get a file from URL and upload to s3 - node.js

I'm trying to get files from a site using axios.get, and then uploading it directly to S3. However, the files are corrupted or not encoded properly, and can't be opened after upload. File types range from .jpg, .png to .pdf. Here is my code:
axios.get(URL, {
responseEncoding: 'binary',
responseType: 'document',
}).then((response) => {
return new Promise((resolve, reject) => {
const s3Bucket = nconf.get('AWS_S3_BUCKET');
s3.upload({
'ACL': 'public-read',
'Body': response.data,
'Bucket': s3Bucket,
'Key': `static/${filePath}/${fileManaged.get('filename')}`,
}, function(err) {
if (err) {
return reject(err);
}
});
});
});
I've tried modifying responseType to arraybuffer and creating a buffer using Buffer.from(response.data, 'binary').toString('base64'), to no avail. What am I missing?

I was able to get it working by using an arraybuffer and the .putObject function instead of .upload
axios.get(encodeURI(url), {
responseType: 'arraybuffer',
}).then((response) => {
s3.putObject({
'ACL': 'public-read',
'Body': response.data,
'Bucket': s3Bucket,
'Key': `static/${filePath}/${fileManaged.get('filename')}`,
} function(err) {

Axios encodes the response body in utf8.
You should use other library like request.

the response from John Xu is correct. But in my case I had to add: Buffer.from(image.data, 'utf8') as stated above in order to get the correct buffer similar to a request response. here is my code:
const AWS = require('aws-sdk');
const axios = require('axios');
/**
* saveImage saves an image file into s3
* #param {*} fullname absolute path and file name of the file to be uploaded
* #param {*} filecontent buffer of the image file
*/
var uploadFile = async function (s3_creds, fullname, filecontent, filetype) {
const s3 = new AWS.S3(s3_creds);
return new Promise((resolve, reject) => {
// Add a file to a Space
var params = {
Key: fullname, // absolute path of the file
Body: filecontent,
Bucket: "docserpcloud",
ACL: "public-read", // or private
ContentEncoding: 'binary',
ContentType: filetype
};
// console.log(params)
s3.putObject(params, function (err, data) {
if (err) {
console.log(err, err.stack);
reject(err)
} else {
resolve(data);
console.log(data);
}
});
})
}
var getFilefromURL = async function (imageuri) {
// console.log (imageuri)
return new Promise((resolve, reject) => {
try {
axios.get(encodeURI(imageuri), {
responseType: "arraybuffer"
}).then((response) => {
resolve(response)
})
} catch (err) {
reject(err)
}
})
}
/**
* saveImageFromUrl gest a file from an url and saves a copy on s3 bucket
* #param {*} imageuri full URL to an image
* #param {*} fullname absolute path and filename of the file to be writen on s3
*/
var saveFileFromUrl = async function (s3_creds, imageuri, fullname) {
return new Promise((resolve, reject) => {
getFilefromURL(imageuri).then(image => {
// console.log(image.res)
uploadFile(s3_creds, fullname, Buffer.from(image.data, 'utf8'), image.headers['content-type']).then(s3response => {
resolve(s3response)
}).catch(err => {
reject(err)
})
}).catch(err => {
reject(err)
})
})
}
module.exports = {
uploadFile: uploadFile,
getFilefromURL: getFilefromURL,
saveFileFromUrl: saveFileFromUrl
}
async function main() {
try {
var s3_creds = {
"accessKeyId": "acessid",
"endpoint": "xxxx.digitaloceanspaces.com",
"secretAccessKey": "Vttkia0....."
};
await saveFileFromUrl(s3_creds, "https://gitlab.com/qtree/erpcloud_logos/-/raw/master/pdf_logo2.png?inline=true", 'media/pddd.png');
} catch {}
}
main();
update s3_creds to fit your credentials and run it to upload the pdf logo.
Regards,
Enrique

Related

Upload a stream to s3

I read Pipe a stream to s3.upload()
but im having difficulty with I am not sure if that actually solves and I have tried.
What I am doing is a get call to www.example.com. this returns a stream, I want to upload that stream to s3.
heres my try.
fetch('https://www.example.com',fileName{
method: 'GET',
headers: {
'Authorization': "Bearer " + myAccessToken,
},
})
.then(function(response) {
return response.text();
})
.then(function(data) {
uploadToS3(data)
});
const uploadToS3 = (data) => {
// Setting up S3 upload parameters
const params = {
Bucket:myBucket,
Key: "fileName",
Body: data
};
// Uploading files to the bucket
s3.upload(params, function(err, data) {
if (err) {
throw err;
}
console.log(`File uploaded successfully. ${data.Location}`);
});
};
output: ///File uploaded successfully. https://exampleBucket.s3.amazonaws.com/fileName.pdf
however this is blank.
I figured it out, but i did not keep using fetch.
and I actually download the file, then upload it. then delete the file.
function getNewFilesFromExampleDotCom(myAccessToken, fileName, fileKey) {
let url2 = 'https://example.com' + fileKey;
axios
.get(url2, {
headers: { 'Authorization': "Bearer " + myAccessToken },
responseType: 'stream',
})
.then(response => {
let file = fileName;
response.data.pipe(fs.createWriteStream(file))
let myFileInfo = [];
if( myFileInfo.length > 0){
myFileInfo.splice(0, myFileInfo.length)
}
myFileInfo.push(file)
processArray(myFileInfo)
console.log(file + " saved")
})
.catch(error => console.log(error));
}
async function processArray(array) {
for (const item of array) {
await delayedLog(item);
}
console.log('Downloaded!');
console.log('Uploading to s3!');
}
function delay() {
return new Promise(resolve => setTimeout(resolve, 300));
}
async function delayedLog(item) {
await delay();
uploadFiles(item)
}
async function uploadFiles(file){
uploadToS3List(file)
await new Promise((resolve, reject) => setTimeout(resolve, 1000));
deleteMyFiles(file)
}
const uploadToS3List = (fileName) => {
// Read content from the file
const fileContent = fs.readFileSync(fileName);
// Setting up S3 upload parameters
const params = {
Bucket:"myBucketName",
Key: fileName,
Body: fileContent
};
// Uploading files to the bucket
s3.upload(params, function(err, data) {
if (err) {
throw err;
}
console.log(`File uploaded successfully. ${data.Location}`);
});
};
function deleteMyFiles(path){
fs.unlink(path, (err) => {
console.log(path + " has been deleted")
if (err) {
console.error(err)
return
}
})
}

SVG files uploaded from S3 not displaying properly

Hi my SVG files are displaying correctly when using src from local folder. but when I use the GetObject from AWS it's not displaying correctly.
I already set the ContentType as image/svg+xml but for some reason when I check the Network Console, it still returns as octet stream.
When i console.log the ContentType of the returned object in my backend from GetObject. it displays image/svg+xml
i already checked in aws console the meta data and its also image/svg+xml
This is my upload helper function:
exports.uploadTos3withKey = (file, folder) => {
return new Promise((resolve, reject) => {
s3.upload(
{
Key: `${folder}/${file.name}`,
Bucket: process.env.AWS_S3_BUCKETNAME,
Body: fs.readFileSync(file.path),
ACL: "public-read",
ContentType: file.type,
},
(err, data) => {
if (err) return reject(err);
return resolve({
bucket: data.Bucket,
key: data.Key,
});
}
);
});
};
Here's my get request to return my s3 objects
exports.display = async (req, res) => {
const { key, bucket } = req.query;
try {
const data = await getObjectFromS3(key, bucket);
if (data) {
console.log(data.ContentType);
res.setHeader("Content-Type", data.ContentType);
res.send(data.Body);
}
} catch (e) {
res.status(400).json({ e });
}
};
exports.getObjectFromS3 = async (key, bucket) => {
const params = {
Bucket: bucket,
Key: key,
};
const res = await new Promise((resolve, reject) => {
s3.getObject(params, function (err, data) {
err == null ? resolve(data) : reject(err);
});
});
return res;
};
Here's the output of that console.log
image/svg+xml
GET /api/v1/media/display?bucket=mmg-bucket&key=hero/image/2022/2/mbc-media-group.svg 304 914.466 ms - -
Here's the Images of aws and the network console
Everything works well with jpg, and png files. it only occurs on SVG files
AWS console image Network Console image

Node.js upload Image Stream.Readable to S3

My lambda is triggered by a request from the browser. The browser sends an image as multipart/form-data.
The lambda uses busboy to parse the request:
function parseForm(event: IHttpEvent) {
return new Promise(
(resolve, reject) => {
const busboy = new Busboy({
headers: event.headers,
limits: { files: 10 },
});
const imageResponse = new Map<string, IImageParseResponse>();
busboy.on("file", (id, file, filename, encoding, mimeType) => {
imageResponse.set(id, { file, filename, mimeType });
});
busboy.on("error", (error) => reject(`Parse error: ${error}`));
busboy.on("finish", () => resolve(imageResponse));
busboy.write(event.body, event.isBase64Encoded ? "base64" : "binary");
busboy.end();
}
);
}
When I parsed the request I want to upload the file to AWS S3.
export async function handler(event: IHttpEvent) {
var res = await parseForm(event);
const s3 = new S3Client({ region: "eu-central-1" });
for (const [k, v] of res) {
console.log(`File ${v.filename} ${v.mimeType} streaming`);
const stream = new Readable().wrap(v.file);
const upload = new Upload({
client: s3,
params: {
Key: v.filename,
Bucket: "my-image-bucket",
Body: stream,
ContentType: v.mimeType,
},
});
upload.on("httpUploadProgress", (p) => console.log(p));
const result = await upload.done();
console.log(result);
return result;
}
}
This does not work. However the Browser will receive a 200 OK with a null body response. What confuses me even more is that console.log(result); does not log anything to console.
Where is my mistake? I dont't fully understand the mechanics of streams. But as far as I understand it will be more memory-efficient. In the future I plan to upload multiple images at once. And in order to save cost I want my method to be as efficient as possible.
In general I did 2 mistakes.
Tried to upload the stream when it was already read to the end by busboy
I did not properly wait for the completion of the upload to s3 before terminating the function.
In the end i ended up with the following:
const s3 = new S3Client({ region: "eu-central-1" });
const { BUCKET_NAME, MAX_IMAGE_SIZE } = process.env;
export async function handler(event: IHttpEvent) {
const results = await parseForm(event);
const response = [];
for (const r of results) {
if (r.status === "fulfilled") {
const value: any = r.value.result;
response.push({
id: r.value.id,
key: value.Key,
url: value.Location,
});
}
if (r.status === "rejected")
response.push({ id: r.reason.id, reason: r.reason.error });
}
return response;
}
async function doneHandler(
id: string,
uploadMap: Map<string, Upload>
): Promise<{ id: string; result: ServiceOutputTypes }> {
try {
var result = await uploadMap.get(id).done();
} catch (e: any) {
var error = e;
} finally {
uploadMap.delete(id);
if (error) throw { id, error };
return { id, result };
}
}
function parseForm(event: IHttpEvent) {
return new Promise( (resolve, reject) => {
const busboy = new Busboy({
headers: event.headers,
limits: { files: 1, fileSize: parseInt(MAX_IMAGE_SIZE) },
});
const responses: Promise<{
id: string;
result: ServiceOutputTypes;
}>[] = [];
const uploads = new Map<string, Upload>();
busboy.on("file", (id, file, filename, encoding, mimeType) => {
uploads.set(
id,
new Upload({
client: s3,
params: {
Bucket: BUCKET_NAME,
Body: new Readable().wrap(file),
Key: filename,
ContentType: mimeType,
ContentEncoding: encoding,
},
})
);
responses.push(doneHandler(id, uploads));
file.on("limit", async () => {
const aborts = [];
for (const [k, upload] of uploads) {
aborts.push(upload.abort());
}
await Promise.all(aborts);
return reject(new Error("File is too big."));
});
});
busboy.on("error", (error: any) => {
reject(new Error(`Parse error: ${error}`));
});
busboy.on("finish", async () => {
const res = await Promise.allSettled(responses);
resolve(res);
});
busboy.write(event.body, event.isBase64Encoded ? "base64" : "binary");
busboy.end();
}
);
}
This solution also handles file-limits and tries to abort all pending uploads to S3

Multiple file upload to S3 with Node.js & Busboy

I'm trying to implement an API endpoint that allows for multiple file uploads.
I don't want to write any file to disk, but to buffer them and pipe to S3.
Here's my code for uploading a single file. Once I attempt to post multiple files to the the endpoint in route.js, it doesn't work.
route.js - I'll keep this as framework agnostic as possible
import Busboy from 'busboy'
// or const Busboy = require('busboy')
const parseForm = async req => {
return new Promise((resolve, reject) => {
const form = new Busboy({ headers: req.headers })
let chunks = []
form.on('file', (field, file, filename, enc, mime) => {
file.on('data', data => {
chunks.push(data)
})
})
form.on('error', err => {
reject(err)
})
form.on('finish', () => {
const buf = Buffer.concat(chunks)
resolve({
fileBuffer: buf,
fileType: mime,
fileName: filename,
fileEnc: enc,
})
})
req.pipe(form)
})
}
export default async (req, res) => {
// or module.exports = async (req, res) => {
try {
const { fileBuffer, ...fileParams } = await parseForm(req)
const result = uploadFile(fileBuffer, fileParams)
res.status(200).json({ success: true, fileUrl: result.Location })
} catch (err) {
console.error(err)
res.status(500).json({ success: false, error: err.message })
}
}
upload.js
import S3 from 'aws-sdk/clients/s3'
// or const S3 = require('aws-sdk/clients/s3')
export default (buffer, fileParams) => {
// or module.exports = (buffer, fileParams) => {
const params = {
Bucket: 'my-s3-bucket',
Key: fileParams.fileName,
Body: buffer,
ContentType: fileParams.fileType,
ContentEncoding: fileParams.fileEnc,
}
return s3.upload(params).promise()
}
I couldn't find a lot of documentation for this but I think I've patched together a solution.
Most implementations appear to write the file to disk before uploading it to S3, but I wanted to be able to buffer the files and upload to S3 without writing to disk.
I created this implementation that could handle a single file upload, but when I attempted to provide multiple files, it merged the buffers together into one file.
The one limitation I can't seem to overcome is the field name. For example, you could setup the FormData() like this:
const formData = new FormData()
fileData.append('file[]', form.firstFile[0])
fileData.append('file[]', form.secondFile[0])
fileData.append('file[]', form.thirdFile[0])
await fetch('/api/upload', {
method: 'POST',
body: formData,
}
This structure is laid out in the FormData.append() MDN example. However, I'm not certain how to process that in. In the end, I setup my FormData() like this:
Form Data
const formData = new FormData()
fileData.append('file1', form.firstFile[0])
fileData.append('file2', form.secondFile[0])
fileData.append('file3', form.thirdFile[0])
await fetch('/api/upload', {
method: 'POST',
body: formData,
}
As far as I can tell, this isn't explicitly wrong, but it's not the preferred method.
Here's my updated code
route.js
import Busboy from 'busboy'
// or const Busboy = require('busboy')
const parseForm = async req => {
return new Promise((resolve, reject) => {
const form = new Busboy({ headers: req.headers })
const files = [] // create an empty array to hold the processed files
const buffers = {} // create an empty object to contain the buffers
form.on('file', (field, file, filename, enc, mime) => {
buffers[field] = [] // add a new key to the buffers object
file.on('data', data => {
buffers[field].push(data)
})
file.on('end', () => {
files.push({
fileBuffer: Buffer.concat(buffers[field]),
fileType: mime,
fileName: filename,
fileEnc: enc,
})
})
})
form.on('error', err => {
reject(err)
})
form.on('finish', () => {
resolve(files)
})
req.pipe(form) // pipe the request to the form handler
})
}
export default async (req, res) => {
// or module.exports = async (req, res) => {
try {
const files = await parseForm(req)
const fileUrls = []
for (const file of files) {
const { fileBuffer, ...fileParams } = file
const result = uploadFile(fileBuffer, fileParams)
urls.push({ filename: result.key, url: result.Location })
}
res.status(200).json({ success: true, fileUrls: urls })
} catch (err) {
console.error(err)
res.status(500).json({ success: false, error: err.message })
}
}
upload.js
import S3 from 'aws-sdk/clients/s3'
// or const S3 = require('aws-sdk/clients/s3')
export default (buffer, fileParams) => {
// or module.exports = (buffer, fileParams) => {
const params = {
Bucket: 'my-s3-bucket',
Key: fileParams.fileName,
Body: buffer,
ContentType: fileParams.fileType,
ContentEncoding: fileParams.fileEnc,
}
return s3.upload(params).promise()
}

Uploading PDF Content Into An S3 Bucket

I'm trying to download PDF content with data from a remote location and upload the content into S3 as a pdf file. I'm using NodeJS, in the context of an AWS lambda. The s3.putObject parameter function resolves successfully, and a pdf file is saved into the S3 bucket as intended, but the document is blank when viewed, suggesting that all of the data may not have been passed to s3.putObject.
Here is my code.
const request = require('request')
const viewUrl = "https://link_to_downloadable_pdf/"
const options = {
url: viewUrl,
headers: {
'Content-Type': 'application/pdf'
}
};
request(options, function(err, res, body){
if(err){return console.log(err)}
const base64data = new Buffer(body, 'binary');
const params = {
Bucket: "myS3bucket",
Key: "my-pdf.pdf",
ContentType: "application/pdf",
Body: base64data,
ACL: 'public-read'
};
s3.putObject(params, function(err, data) {
if (err) {
console.log(err);
} else {
callback(null, JSON.stringify(data))
}
})
When I test the URL in Postman, it returns the PDF with data included. Any idea why the NodeJS code may not be doing the same thing?
Can you try this code? :)
import AWS from 'aws-sdk'
const request = require('request')
const S3 = new AWS.S3()
var promise = new Promise((resolve, reject) => {
return request({ url : 'https://link_to_downloadable_pdf/', encoding : null },
function(err, res, body){
if(err)
return reject({ status:500,error:err })
return resolve({ status:200, body: body})
})
})
promise.then((pdf) => {
if(pdf.status == 200)
{
console.log('uploading file..')
s3.putObject({
Bucket: process.env.bucket,
Body: pdf.body,
Key: 'my-pdf.pdf',
ACL:'public-read'
}, (err,data) => {
if(err)
console.log(err)
else
console.log('uploaded')
})
}
})
I'll be attentive to anything. hope to help you

Resources