Image in FormData to Node Server then to S3 - node.js

I have an angular 2 application that is sending an image wrapped inside a FormData object to my node server.
I can receive the image on the server and write it to a file, then redownload it:
var busboy = new Busboy({ headers: req.headers });
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
if(mimetype == "image/png"){
var saveTo = path.join('/home/ec2-user/bonappetite/tmp', path.basename(filename) + ".png");
} else if(mimetype == "image/jpeg") {
var saveTo = path.join('/home/ec2-user/bonappetite/tmp', path.basename(filename) + ".jpg");
} else {
var saveTo = path.join('/home/ec2-user/bonappetite/tmp', path.basename(filename));
}
file.pipe(fs.createWriteStream(saveTo));
});
busboy.on('finish', function() {
console.log("url retrieved");
res.writeHead(200, { 'Connection': 'close' });
res.end(savedUrl);
});
return req.pipe(busboy);
What I am trying to do is take the image inside the FormData and upload it to my S3 bucket. I am able to upload data myself, but I cant seem to upload the image that I have saved from the post request
var AWS = require('aws-sdk');
AWS.config.loadFromPath('./aws_config.json');
var s3Bucket = new AWS.S3( { params: {Bucket: 'xxxx'} } );
params = {Key: filename, Body: file};
s3Bucket.putObject(params, function(err, data) {
if (err) {
console.log(err)
} else {
var urlParams = {Bucket: 'xxxx', Key: filename};
s3Bucket.getSignedUrl('getObject', urlParams, function(err, url){
if(err){
console.log(err);
res.status( err.status || 500 );
res.send( error );
}else {
res.setHeader('Content-Type', 'application/json');
url = url;
res.send({"url":url});
}
})
}
});
How can I take the file that I have just uploaded to my server and stream it to S3?
Do i even need to save the file to disk prior to sending it to s3?
Thanks

Found Solution to this problem without having to save the file.
Rather than using putObject, use upload.
var AWS = require('aws-sdk');
AWS.config.loadFromPath('./aws_config.json');
var s3Bucket = new AWS.S3( { params: {Bucket: 'xxxx'} } );
params = {Key: filename, Body: file};
s3Bucket.upload(params, function(err, data) {
if (err) {
console.log(err)
} else {
var urlParams = {Bucket: 'xxxx', Key: filename};
s3Bucket.getSignedUrl('getObject', urlParams, function(err, url){
if(err){
console.log(err);
res.status( err.status || 500 );
res.send( error );
}else {
res.setHeader('Content-Type', 'application/json');
url = url;
res.send({"url":url});
}
})
}
});

Related

Uploading PDF to Amazon S3 and display in browser

const fs = require('fs');
const pdf = require('html-pdf');
const AWS = require("aws-sdk");
//Read html file
function readHTMLfile(path, callback) {
fs.readFile(path, {encoding: 'utf-8'}, function(err, html) {
if(err) {
callback(err);
}
else {
callback(null, html)
}
})
}
//importing HTML file
readHTMLfile(__dirname + '/Nda.html', function(err, html) {
if(err) {
console.log('Error: ',err)
}
else {
var handlebars = require("handlebars");
var template = handlebars.compile(html);
var replacables = {
url: 'http://politicalmemorabilia.com/wp-content/uploads/2014/10/2000px-Aaron_Burr_signature.png'
}
var HtmlToSend = template(replacables);
//pdf.create(HtmlToSend).toStream(function(err, stream){
pdf.create(HtmlToSend).toBuffer(function(err, buf){
if(err){
return console.log(err);
}
else {
console.log('This is a buffer:', buf);
//stream.pipe(fs.createWriteStream('./Nda2.pdf'));
//var readableStream = fs.readFileSync('./Nda2.pdf', 'base64')
// console.log('Readable Stream: ',readableStream)
// var buf = new Buffer('readableStream', 'base64')
// console.log('Buffer String ',buf.toString())
//console.log('Readable Stream ',readableStream)
//console.log('Pdf Content', ./Nda2.pdf);
AWS.config.update({
accessKeyId: "AKIAIRJAKT756L72NPBA",
secretAccessKey: "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"
});
var s3Bucket = new AWS.S3({ params: { Bucket: "vizitor-profilepicture" } });
var options = {
Key: `nda/Nda`,
Body: 'buf',
ContentEncoding: "buffer",
//ContentDisposition: "inline",
ContentType: "application/pdf"
};
s3Bucket.upload(options, function(err, data) {
if (err) {
console.log(err);
console.log("Error uploading data: ", data);
} else {
console.log('Data: ',data)
console.log("data: ", data.Location);
console.log("succesfully uploaded pdf!");
}
});
}
});
}
})
I am importing a HTML file and converted it into pdf using "html-pdf" module and used handlebars to embed dynamic URL in HTML file.I then converted the HTML file to buffer and want to upload to amazon s3 as pdf. For this, I have used buffer in body option. I am getting the correct buffer but when I upload to amazon s3 as pdf file a link is generated but on opening link in browser I get an error "Failed to load pdf"
I have made s3 bucket for public view so that's not the issue here.

Uploading PDF Content Into An S3 Bucket

I'm trying to download PDF content with data from a remote location and upload the content into S3 as a pdf file. I'm using NodeJS, in the context of an AWS lambda. The s3.putObject parameter function resolves successfully, and a pdf file is saved into the S3 bucket as intended, but the document is blank when viewed, suggesting that all of the data may not have been passed to s3.putObject.
Here is my code.
const request = require('request')
const viewUrl = "https://link_to_downloadable_pdf/"
const options = {
url: viewUrl,
headers: {
'Content-Type': 'application/pdf'
}
};
request(options, function(err, res, body){
if(err){return console.log(err)}
const base64data = new Buffer(body, 'binary');
const params = {
Bucket: "myS3bucket",
Key: "my-pdf.pdf",
ContentType: "application/pdf",
Body: base64data,
ACL: 'public-read'
};
s3.putObject(params, function(err, data) {
if (err) {
console.log(err);
} else {
callback(null, JSON.stringify(data))
}
})
When I test the URL in Postman, it returns the PDF with data included. Any idea why the NodeJS code may not be doing the same thing?
Can you try this code? :)
import AWS from 'aws-sdk'
const request = require('request')
const S3 = new AWS.S3()
var promise = new Promise((resolve, reject) => {
return request({ url : 'https://link_to_downloadable_pdf/', encoding : null },
function(err, res, body){
if(err)
return reject({ status:500,error:err })
return resolve({ status:200, body: body})
})
})
promise.then((pdf) => {
if(pdf.status == 200)
{
console.log('uploading file..')
s3.putObject({
Bucket: process.env.bucket,
Body: pdf.body,
Key: 'my-pdf.pdf',
ACL:'public-read'
}, (err,data) => {
if(err)
console.log(err)
else
console.log('uploaded')
})
}
})
I'll be attentive to anything. hope to help you

How to Upload CSV file on S3 Bucket using NodeJS?

I am creating a CSV file on the fly from JSON content and uploading generated CSV file on S3 bucket rather than first saving the file locally.
Below is my code snippet, as using below code my CSV file uploaded on S3 bucket but it does not seems to be in correct CSV format.
var uploadCSVFileOnS3Bucket = function(next, csvFileContent,results) {
console.log("uploadCSVFileOnS3Bucket function started");
var bufferObject = new Buffer.from(JSON.stringify(csvFileContent));
var filePath = configurationHolder.config.s3UploadFilePath;
var s3 = new AWS.S3();
var params = {
Bucket: 'bucket_name'
Key: 's3UploadFilePath',
Body: bufferObject,
CacheControl:'public, max-age=86400'
}
s3.upload(params, function(err, data) {
if (err) {
console.log("Error at uploadCSVFileOnS3Bucket function",err);
next(err);
} else {
console.log("File uploaded Successfully");
next(null, filePath);
}
});
};
Also, I am using "json2csv" npm module for generating csv file content from JSON.
Below is the code:
var generateCSVFile = function(next,callback,csvFileContent) {
console.log("generateCSVFile function started",csvFileContent);
if(csvFileContent && csvFileContent.length>0) {
var fields = ['field1','field2','field3',........];
var csv = json2csv({ data: csvFileContent, fields: fields });
console.log('created',csv);
next(null,csv);
}
else {
next(null,[]);
}
}
Please let us know where the above code is going wrong.
Hi I tried again with below headers values and it worked for me. Below is the code :
var s3 = new AWS.S3();
var params = {
Bucket: bucketName,
Key: filePath,
Body: csvFileContent,
ContentType: 'application/octet-stream',
ContentDisposition: contentDisposition(filePath, {
type: 'inline'
}),
CacheControl: 'public, max-age=86400'
}
s3.putObject(params, function(err, data) {
if (err) {
console.log("Error at uploadCSVFileOnS3Bucket function", err);
next(err);
} else {
console.log("File uploaded Successfully");
next(null, filePath);
}
});
add ContentDisposition: 'attachment' in your params as well.
otherwise you can also read file and upload to s3
fs.readFile(FILEPATH, function(err, file_buffer) {
var params = {
Bucket: //bucketname,
Key:key,
ContentDisposition: 'attachment',
Body: file_buffer
};
s3.upload(params, function(err, data) {
if (err) {
console.log("Error in upload");
callback(err, null)
}
if (data) {
console.log("Upload Success", data);
callback(null, data)
}
});
});
using async await
import { parse } from "json2csv";
const saveCsv = async () => {
const payload = [{ a: 1, b: 2 }]
const csvPayload = parse(payload, { header: true, defaultValue: "-----"});
const s3Key = 'filename.csv';
const bucketName = 'bucket-name';
await s3.put(bucketName, s3Key, csvPayload);
}
just like that without creating a buffer and using JSON.stringify()
Try this, it worked for me:
var fs = require('file-system')
var AWS = require('aws-sdk')
AWS.config.update({
region: '', // use appropriate region
accessKeyId: '', // use your access key
secretAccessKey: '' // user your secret key
})
var s3 = new AWS.S3()
fs.readFile('contacts.csv','utf-8', (err, data) => {
if (err) throw err;
const params = {
Bucket: 'testBucket', // pass your bucket name
Key: 'contacts.csv', // file will be saved as testBucket/contacts.csv
Body : data
};
s3.upload(params, (s3Err, data) => {
if (s3Err) throw s3Err
console.log(`File uploaded successfully at ${data.Location}`)
});
});

File Uploading to S3, but times out and file currupt

Bellow is the code that I am using to upload a file to S3 using NodeJS and aws-sdk. Everything seems to work fine, however the browser just hangs. Also when I try to open the image on S3, it shows up broken.
module.exports = function(app) {
app.post('/upload', function(req, res){
var AWS = require('aws-sdk');
var accessKeyId = process.env.AWS_ACCESS_KEY;
var secretAccessKey = process.env.AWS_SECRET_KEY;
//pull var from heroku else send to dev
AWS.config.update({
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey
});
var s3 = new AWS.S3();
var file = req.files.filechooser;
if (file) {
var params = {
Bucket: 'mybucket',
Body: new Buffer(file, 'binary'),
ACL:'public-read',
ContentType: file.type,
Key: "ugc/" + file.name,
ContentLength: file.size
};
s3.putObject(params, function(err, data){
if (err) {
console.log("Error uploading data: ", err);
} else {
console.log("Successfully uploaded data to myBucket/myKey");
}
}).
on('httpUploadProgress', function(chunk) {
console.log("Uploaded", chunk.loaded, "of", chunk.total, "bytes");
}).
on('httpDone', function() {
console.log("done");
});
console.log("called");
} else {
console.log('Nothing to upload.');
}
});
}
You need to complete the call to putObject, after rehistering for listeners, to actually start the upload with send. By default, if callback is provided to an S3 method returning a Response, then you must initiate the request manually with send();
on('httpUploadProgress', function(chunk) {
console.log("Uploaded", chunk.loaded, "of", chunk.total, "bytes");
}).
on('httpDone', function() {
console.log("done");
}).
send(); // Actually initiate the request
See
http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Request.html
http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Request.html#send-property
For details.

NodeJS Request Upload Image

Trying to upload an image using nodejs, express & request module but keep getting 415 from Amazon S3 instance.
fs.createReadStream(req.files.image.path).pipe(request.post(defaults.url, {
form: {
param_1: '',
param_2: ''
}
}, function(error, response, body){
if(error){
callback(error, null);
}else{
if(response.statusCode === 200){
callback({}, body);
}else{
callback(body, response);
}
}
}));
I think the image is not getting appended on the request but i'm not 100%. Any advise?
pipe expects a Writable stream as parameter. you can use res object of express directly as the pipe destination. But if you like to upload to s3, you can read the file from req stream and use putObject to write it to s3
var fs = require ('fs')
fs.readFile(req.files.image.path, function (err, data) {
var AWS = require('./aws_config')
var s3 = new AWS.S3()
var bucket = ''
s3.putObject({
ACL: 'public-read', // by default private access
Bucket: bucket,
Key: file_name,
Body: data
}, function (err, data) {
if (err) {
console.log(err)
res.send(500, {msg: 'image upload failed', error: err})
} else {
console.log('S3 upload Successful')
res.send({})
}
});
If you like to download you can use pipe to redirect the read object to response directly
app.get('/download/:file', function(req, res, next) {
var AWS = require('./aws_config')
var s3 = new AWS.S3()
s3.getObject({
Bucket: '',
Key: req.params.file
}, function (err, data) {
if (err) console.log (err)
var fs = require ('fs')
var filePath = __dirname + "/downloads/" + req.params.file
fs.writeFile(filePath, data.Body, function (err) {
if (err) console.log(err)
else {
res.attachment(filePath)
var filestream = fs.createReadStream(filePath);
filestream.pipe(res);
// TODO: delete file from server ?
}
});
})
})

Resources