Angular 6 file upload - node.js

I have a REST API that upload images to s3 and returns the response. The API works perfectly using Postman.
The problem arrises when calling the API from frontend. I am using Angular 6.
I am getting Error: Unsupported content type: application/json error. Although I am setting the headers properly.
Here is my Angular 6 code.
export class UploadComponent {
percentDone: number;
uploadSuccess: boolean;
constructor(private http: HttpClient) {}
upload(file: File) {
this.singleBasicUpload(file);
}
singleBasicUpload(file: File) {
const headers = new HttpHeaders({
'Content-Type': 'multipart/form-data',
});
const options = { headers: headers };
this.http.post(`${BASE_URL}/upload`, file, options).subscribe(response => {
console.log('response', response);
});
}
}
And here is my S3 code in backend Node.js
AWS.config.update({
accessKeyId: constants.IAM_USER_KEY,
secretAccessKey: constants.IAM_USER_SECRET,
});
const BUCKET_NAME = constants.BUCKET_NAME;
const ACL = 'public-read';
const S3 = new AWS.S3();
export async function S3Upload(req, res) {
const chunks = [];
let fname;
let fileType;
let fileEncodingType;
const busboy = new Busboy({
headers: req.headers,
});
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
fname = filename.replace(/ /g, '_');
fileType = mimetype;
fileEncodingType = encoding;
file.on('data', data => {
// you will get chunks here will pull all chunk to an array and later concat it.
console.log(chunks.length);
chunks.push(data);
});
file.on('end', () => {
console.log(`File [${filename}] Finished`);
});
});
busboy.on('finish', () => {
const userId = UUID();
const params = {
Bucket: BUCKET_NAME, // your s3 bucket name
Key: `${userId}-${fname}`,
Body: Buffer.concat(chunks), // concatinating all chunks
ACL,
ContentEncoding: fileEncodingType, // optional
ContentType: fileType, // required
};
// we are sending buffer data to s3.
S3.upload(params, (err, s3res) => {
if (err) {
res.send({
err,
status: 'error',
});
} else {
return res.send({
data: s3res,
message: 'Image successfully uploaded.',
});
}
});
});
req.pipe(busboy);
}
Network description

Related

Getting Postman unsupportedMediatype error when uploading files to s3 Bucket?

I am trying to upload Images to aws s3 bucket. but i got unsupportedMediaType 415 error in postman. Not getting, where Its going wrong. Could you help me out?
postman formdata request => taking file, fileName, and Id
In service.js file:
const aws = require('aws-sdk');
const fs = require('fs');
const bucketName = process.env.bucketName;
// Initiating S3 instance
const s3 = new aws.S3({
secretAccessKey: process.env.bucketSecretAccessKey,
accessKeyId: process.env.bucketAccessKeyId,
region: process.env.region
});
// Options you can choose to set to accept files upto certain size limit
// const options = {partSize: 10 * 1024 * 1024, queueSize: 1};
module.exports = {
upload: async function(payload) {
let fileContent = fs.readFileSync(payload.file)
const params = {
Bucket: bucketName,
ContentType: 'image/jpeg',
Key: payload.client_id/payload.familyName/payload.fileName,
Body: fileContent
};
let fileResp = null;
await s3.upload(params, function(err, data) {
if (err) {
console.log(err)
throw err;
}
fileResp = data;
console.log(`File uploaded successfully. ${data.Location}`);
});
return fileResp;
},
};
I am trying to upload Images to aws s3 bucket. but i got unsupportedMediaType 415 error in postman. Not getting, where Its going wrong. Could you help me out?
postman formdata request => taking file, fileName, and Id
Api call through url:
uploadImage : async function(req, h) {
try{
let responseFile = null;
const uploadImage = await s3.upload(req.payload).then( async (resp) => {
responseFile = {fileUrl: resp.Location};
if(!!responseFile){
const updateFileName = await Family.updateOne({_id: req.payload.id},{
image: req.payload.fileName,
fileLocation : resp.Location
});
console.log(updateFileName)
}
}).catch((err) => {
console.log(err)
responseFile = err.message;
});
} catch (error) {
console.error(error.message);
return h.response({ error: responseMessages.internal_server_error }).code(500);
}
},
})
``````````````````````````````````````````
postman response:
`````````````````````
{
"statusCode": 415,
"error": "Unsupported Media Type",
"message": "Unsupported Media Type"
}
[1]: https://i.stack.imgur.com/ggHCj.png
Use below
const params = {
Bucket: bucketName,
Key: `${payload.client_id}/${payload.familyName}/${payload.fileName}`,
Body: fileContent
};
There are two issues
Key. path is not correct
Content-type is not required
follow above snippet and try
payload: {
output: 'stream',
parse: true,
allow: 'multipart/form-data',
multipart: true,
timeout: false,
},
I have added this payload in my route.js file, then it worked for me.

Node.js upload Image Stream.Readable to S3

My lambda is triggered by a request from the browser. The browser sends an image as multipart/form-data.
The lambda uses busboy to parse the request:
function parseForm(event: IHttpEvent) {
return new Promise(
(resolve, reject) => {
const busboy = new Busboy({
headers: event.headers,
limits: { files: 10 },
});
const imageResponse = new Map<string, IImageParseResponse>();
busboy.on("file", (id, file, filename, encoding, mimeType) => {
imageResponse.set(id, { file, filename, mimeType });
});
busboy.on("error", (error) => reject(`Parse error: ${error}`));
busboy.on("finish", () => resolve(imageResponse));
busboy.write(event.body, event.isBase64Encoded ? "base64" : "binary");
busboy.end();
}
);
}
When I parsed the request I want to upload the file to AWS S3.
export async function handler(event: IHttpEvent) {
var res = await parseForm(event);
const s3 = new S3Client({ region: "eu-central-1" });
for (const [k, v] of res) {
console.log(`File ${v.filename} ${v.mimeType} streaming`);
const stream = new Readable().wrap(v.file);
const upload = new Upload({
client: s3,
params: {
Key: v.filename,
Bucket: "my-image-bucket",
Body: stream,
ContentType: v.mimeType,
},
});
upload.on("httpUploadProgress", (p) => console.log(p));
const result = await upload.done();
console.log(result);
return result;
}
}
This does not work. However the Browser will receive a 200 OK with a null body response. What confuses me even more is that console.log(result); does not log anything to console.
Where is my mistake? I dont't fully understand the mechanics of streams. But as far as I understand it will be more memory-efficient. In the future I plan to upload multiple images at once. And in order to save cost I want my method to be as efficient as possible.
In general I did 2 mistakes.
Tried to upload the stream when it was already read to the end by busboy
I did not properly wait for the completion of the upload to s3 before terminating the function.
In the end i ended up with the following:
const s3 = new S3Client({ region: "eu-central-1" });
const { BUCKET_NAME, MAX_IMAGE_SIZE } = process.env;
export async function handler(event: IHttpEvent) {
const results = await parseForm(event);
const response = [];
for (const r of results) {
if (r.status === "fulfilled") {
const value: any = r.value.result;
response.push({
id: r.value.id,
key: value.Key,
url: value.Location,
});
}
if (r.status === "rejected")
response.push({ id: r.reason.id, reason: r.reason.error });
}
return response;
}
async function doneHandler(
id: string,
uploadMap: Map<string, Upload>
): Promise<{ id: string; result: ServiceOutputTypes }> {
try {
var result = await uploadMap.get(id).done();
} catch (e: any) {
var error = e;
} finally {
uploadMap.delete(id);
if (error) throw { id, error };
return { id, result };
}
}
function parseForm(event: IHttpEvent) {
return new Promise( (resolve, reject) => {
const busboy = new Busboy({
headers: event.headers,
limits: { files: 1, fileSize: parseInt(MAX_IMAGE_SIZE) },
});
const responses: Promise<{
id: string;
result: ServiceOutputTypes;
}>[] = [];
const uploads = new Map<string, Upload>();
busboy.on("file", (id, file, filename, encoding, mimeType) => {
uploads.set(
id,
new Upload({
client: s3,
params: {
Bucket: BUCKET_NAME,
Body: new Readable().wrap(file),
Key: filename,
ContentType: mimeType,
ContentEncoding: encoding,
},
})
);
responses.push(doneHandler(id, uploads));
file.on("limit", async () => {
const aborts = [];
for (const [k, upload] of uploads) {
aborts.push(upload.abort());
}
await Promise.all(aborts);
return reject(new Error("File is too big."));
});
});
busboy.on("error", (error: any) => {
reject(new Error(`Parse error: ${error}`));
});
busboy.on("finish", async () => {
const res = await Promise.allSettled(responses);
resolve(res);
});
busboy.write(event.body, event.isBase64Encoded ? "base64" : "binary");
busboy.end();
}
);
}
This solution also handles file-limits and tries to abort all pending uploads to S3

Multiple file upload to S3 with Node.js & Busboy

I'm trying to implement an API endpoint that allows for multiple file uploads.
I don't want to write any file to disk, but to buffer them and pipe to S3.
Here's my code for uploading a single file. Once I attempt to post multiple files to the the endpoint in route.js, it doesn't work.
route.js - I'll keep this as framework agnostic as possible
import Busboy from 'busboy'
// or const Busboy = require('busboy')
const parseForm = async req => {
return new Promise((resolve, reject) => {
const form = new Busboy({ headers: req.headers })
let chunks = []
form.on('file', (field, file, filename, enc, mime) => {
file.on('data', data => {
chunks.push(data)
})
})
form.on('error', err => {
reject(err)
})
form.on('finish', () => {
const buf = Buffer.concat(chunks)
resolve({
fileBuffer: buf,
fileType: mime,
fileName: filename,
fileEnc: enc,
})
})
req.pipe(form)
})
}
export default async (req, res) => {
// or module.exports = async (req, res) => {
try {
const { fileBuffer, ...fileParams } = await parseForm(req)
const result = uploadFile(fileBuffer, fileParams)
res.status(200).json({ success: true, fileUrl: result.Location })
} catch (err) {
console.error(err)
res.status(500).json({ success: false, error: err.message })
}
}
upload.js
import S3 from 'aws-sdk/clients/s3'
// or const S3 = require('aws-sdk/clients/s3')
export default (buffer, fileParams) => {
// or module.exports = (buffer, fileParams) => {
const params = {
Bucket: 'my-s3-bucket',
Key: fileParams.fileName,
Body: buffer,
ContentType: fileParams.fileType,
ContentEncoding: fileParams.fileEnc,
}
return s3.upload(params).promise()
}
I couldn't find a lot of documentation for this but I think I've patched together a solution.
Most implementations appear to write the file to disk before uploading it to S3, but I wanted to be able to buffer the files and upload to S3 without writing to disk.
I created this implementation that could handle a single file upload, but when I attempted to provide multiple files, it merged the buffers together into one file.
The one limitation I can't seem to overcome is the field name. For example, you could setup the FormData() like this:
const formData = new FormData()
fileData.append('file[]', form.firstFile[0])
fileData.append('file[]', form.secondFile[0])
fileData.append('file[]', form.thirdFile[0])
await fetch('/api/upload', {
method: 'POST',
body: formData,
}
This structure is laid out in the FormData.append() MDN example. However, I'm not certain how to process that in. In the end, I setup my FormData() like this:
Form Data
const formData = new FormData()
fileData.append('file1', form.firstFile[0])
fileData.append('file2', form.secondFile[0])
fileData.append('file3', form.thirdFile[0])
await fetch('/api/upload', {
method: 'POST',
body: formData,
}
As far as I can tell, this isn't explicitly wrong, but it's not the preferred method.
Here's my updated code
route.js
import Busboy from 'busboy'
// or const Busboy = require('busboy')
const parseForm = async req => {
return new Promise((resolve, reject) => {
const form = new Busboy({ headers: req.headers })
const files = [] // create an empty array to hold the processed files
const buffers = {} // create an empty object to contain the buffers
form.on('file', (field, file, filename, enc, mime) => {
buffers[field] = [] // add a new key to the buffers object
file.on('data', data => {
buffers[field].push(data)
})
file.on('end', () => {
files.push({
fileBuffer: Buffer.concat(buffers[field]),
fileType: mime,
fileName: filename,
fileEnc: enc,
})
})
})
form.on('error', err => {
reject(err)
})
form.on('finish', () => {
resolve(files)
})
req.pipe(form) // pipe the request to the form handler
})
}
export default async (req, res) => {
// or module.exports = async (req, res) => {
try {
const files = await parseForm(req)
const fileUrls = []
for (const file of files) {
const { fileBuffer, ...fileParams } = file
const result = uploadFile(fileBuffer, fileParams)
urls.push({ filename: result.key, url: result.Location })
}
res.status(200).json({ success: true, fileUrls: urls })
} catch (err) {
console.error(err)
res.status(500).json({ success: false, error: err.message })
}
}
upload.js
import S3 from 'aws-sdk/clients/s3'
// or const S3 = require('aws-sdk/clients/s3')
export default (buffer, fileParams) => {
// or module.exports = (buffer, fileParams) => {
const params = {
Bucket: 'my-s3-bucket',
Key: fileParams.fileName,
Body: buffer,
ContentType: fileParams.fileType,
ContentEncoding: fileParams.fileEnc,
}
return s3.upload(params).promise()
}

Uploading PDF Content Into An S3 Bucket

I'm trying to download PDF content with data from a remote location and upload the content into S3 as a pdf file. I'm using NodeJS, in the context of an AWS lambda. The s3.putObject parameter function resolves successfully, and a pdf file is saved into the S3 bucket as intended, but the document is blank when viewed, suggesting that all of the data may not have been passed to s3.putObject.
Here is my code.
const request = require('request')
const viewUrl = "https://link_to_downloadable_pdf/"
const options = {
url: viewUrl,
headers: {
'Content-Type': 'application/pdf'
}
};
request(options, function(err, res, body){
if(err){return console.log(err)}
const base64data = new Buffer(body, 'binary');
const params = {
Bucket: "myS3bucket",
Key: "my-pdf.pdf",
ContentType: "application/pdf",
Body: base64data,
ACL: 'public-read'
};
s3.putObject(params, function(err, data) {
if (err) {
console.log(err);
} else {
callback(null, JSON.stringify(data))
}
})
When I test the URL in Postman, it returns the PDF with data included. Any idea why the NodeJS code may not be doing the same thing?
Can you try this code? :)
import AWS from 'aws-sdk'
const request = require('request')
const S3 = new AWS.S3()
var promise = new Promise((resolve, reject) => {
return request({ url : 'https://link_to_downloadable_pdf/', encoding : null },
function(err, res, body){
if(err)
return reject({ status:500,error:err })
return resolve({ status:200, body: body})
})
})
promise.then((pdf) => {
if(pdf.status == 200)
{
console.log('uploading file..')
s3.putObject({
Bucket: process.env.bucket,
Body: pdf.body,
Key: 'my-pdf.pdf',
ACL:'public-read'
}, (err,data) => {
if(err)
console.log(err)
else
console.log('uploaded')
})
}
})
I'll be attentive to anything. hope to help you

Is there a way to upload to S3 from a url using node.js?

I found this question, but it doesn't seem to answer my question as I think it's still talking about local files.
I want to take, say, and imgur.com link and upload it to S3 using node. Is knox capable of this or do I need to use something else?
Not sure where to get started.
I’m not using knox but the official AWS SDK for JavaScript in Node.js. I issue a request to a URL with {encoding: null} in order to retrieve the data in buffer which can be passed directly to the Body parameter for s3.putObject(). Here below is an example of putting a remote image in a bucket with aws-sdk and request.
var AWS = require('aws-sdk');
var request = require('request');
AWS.config.loadFromPath('./config.json');
var s3 = new AWS.S3();
function put_from_url(url, bucket, key, callback) {
request({
url: url,
encoding: null
}, function(err, res, body) {
if (err)
return callback(err, res);
s3.putObject({
Bucket: bucket,
Key: key,
ContentType: res.headers['content-type'],
ContentLength: res.headers['content-length'],
Body: body // buffer
}, callback);
})
}
put_from_url('http://a0.awsstatic.com/main/images/logos/aws_logo.png', 'your_bucket', 'media/aws_logo.png', function(err, res) {
if (err)
throw err;
console.log('Uploaded data successfully!');
});
For those that are looking for a solution that doesn't involves callbacks, and prefeers promises, based on #micmia code here is an alternative:
var AWS = require('aws-sdk'),
request = require('request');
const bucketName='yourBucketName';
const bucketOptions = {...Your options};
var s3 = new AWS.S3(options);
function UploadFromUrlToS3(url,destPath){
return new Promise((resolve,reject)=> {
request({
url: url,
encoding: null
}, function(err, res, body) {
if (err){
reject(err);
}
var objectParams = {
ContentType: res.headers['content-type'],
ContentLength: res.headers['content-length'],
Key: destPath,
Body: body
};
resolve(s3.putObject(objectParams).promise());
});
});
}
UploadFromUrlToS3(
'http://a0.awsstatic.com/main/images/logos/aws_logo.png',
'your/s3/path/aws_logo.png' )
.then(function() {
console.log('image was saved...');
}).catch(function(err) {
console.log('image was not saved!',err);
});
Building on #Yuri's post, for those who would like to use axios instead of request & ES6 syntax for a more modern approach + added the required Bucket property to params (and it downloads any file, not only images):
const uploadFileToS3 = (url, bucket, key) => {
return axios.get(url, { responseType: "arraybuffer", responseEncoding: "binary" }).then((response) => {
const params = {
ContentType: response.headers["content-type"],
ContentLength: response.data.length.toString(), // or response.header["content-length"] if available for the type of file downloaded
Bucket: bucket,
Body: response.data,
Key: key,
};
return s3.putObject(params).promise();
});
}
uploadFileToS3(<your_file_url>, <your_s3_path>, <your_s3_bucket>)
.then(() => console.log("File saved!"))
.catch(error) => console.log(error));
Same thing as the above answer but with fetch:
async function upload(url: string, key: string, bucket: string) {
const response = await fetch(url)
const contentType = response.headers.get("content-type") ?? undefined;
const contentLength =
response.headers.get("content-length") != null
? Number(response.headers.get("content-length"))
: undefined;
return s3
.putObject({
Bucket: bucket,
Key: key,
ContentType: contentType,
ContentLength: contentLength,
Body: response.body, // buffer
})
.promise();
}
Yes. There's an example of doing this in the knox README
http.get('http://google.com/doodle.png', function(res){
var headers = {
'Content-Length': res.headers['content-length']
, 'Content-Type': res.headers['content-type']
};
client.putStream(res, '/doodle.png', headers, function(err, res){
// Logic
});
});

Resources