NodeJS Request Upload Image - node.js

Trying to upload an image using nodejs, express & request module but keep getting 415 from Amazon S3 instance.
fs.createReadStream(req.files.image.path).pipe(request.post(defaults.url, {
form: {
param_1: '',
param_2: ''
}
}, function(error, response, body){
if(error){
callback(error, null);
}else{
if(response.statusCode === 200){
callback({}, body);
}else{
callback(body, response);
}
}
}));
I think the image is not getting appended on the request but i'm not 100%. Any advise?

pipe expects a Writable stream as parameter. you can use res object of express directly as the pipe destination. But if you like to upload to s3, you can read the file from req stream and use putObject to write it to s3
var fs = require ('fs')
fs.readFile(req.files.image.path, function (err, data) {
var AWS = require('./aws_config')
var s3 = new AWS.S3()
var bucket = ''
s3.putObject({
ACL: 'public-read', // by default private access
Bucket: bucket,
Key: file_name,
Body: data
}, function (err, data) {
if (err) {
console.log(err)
res.send(500, {msg: 'image upload failed', error: err})
} else {
console.log('S3 upload Successful')
res.send({})
}
});
If you like to download you can use pipe to redirect the read object to response directly
app.get('/download/:file', function(req, res, next) {
var AWS = require('./aws_config')
var s3 = new AWS.S3()
s3.getObject({
Bucket: '',
Key: req.params.file
}, function (err, data) {
if (err) console.log (err)
var fs = require ('fs')
var filePath = __dirname + "/downloads/" + req.params.file
fs.writeFile(filePath, data.Body, function (err) {
if (err) console.log(err)
else {
res.attachment(filePath)
var filestream = fs.createReadStream(filePath);
filestream.pipe(res);
// TODO: delete file from server ?
}
});
})
})

Related

Trying to send readable stream (audio) to amazon s3 - runtime error

I am trying to write a function that takes the mp3 url of the recording and then uploads that to S3. However, I keep getting a runtime error and the callback is never reached. If I move the callback below s3.upload(...) then the statement "attempting to upload mp3 is never logged.
exports.handler = function(context, event, callback) {
const twiml = new Twilio.twiml.VoiceResponse();
var AWS = require('aws-sdk');
var s3 = new AWS.S3();
var getUri = require('get-uri');
AWS.config.update({
accessKeyId: "...",
secretAccessKey: "..."
});
var client = context.getTwilioClient();
const recording_id = event.RecordingSid;
const uri = event.RecordingUrl + ".mp3";
getUri(uri, function (err, rs) {
if (err) {
console.log(err.message);
throw err;
}
var params = {
ACL: "public-read",
Body: rs,
Bucket: "...",
Key: "audio.mp3",
ContentType: 'audio/mp3'
};
s3.upload(params, function(err,data) {
console.log("attempting to upload mp3");
if (err) {
console.log("there is an error");
console.log(err.status);
throw err.message;
}
else {
console.log("Your upload has been successful.");
}
callback(null, twiml);
});
});
console.log("at the end");
};
Is there any other way to access the recording and put them in my public s3 bucket? Why is this never executing s3.upload(...).
Any insights into this is helpful! Thanks in advance!
app.get('/uploadsong',function(req,res){
console.log("Hi there")
var URI = 'http://sensongsmp3download.info/Kaala%20(2018)%20-%20Sensongsmp3.info/Thanga%20Sela%20--%20Sensongsmp3.Info.mp3';
var buffer = [];
request
.get(URI)
.on('error', function(err) {
console.log("error")
}).on('data',function(data){
buffer.push(data);
}).on('end',function(){
var completeSong = Buffer.concat(buffer);
var data = {
Body:completeSong,
Key: 'sample.mp3',
ContentType: 'audio/mp3'
}
s3Bucket.putObject(data, function(err, data){
if (err)
{
console.log('Error uploading data: ', data);
} else
{
console.log('upload successfull')
res.send('done');
}
})
})
})
here are the modules i have used
var request = require('request');
I contacted Twilio regarding this they responded that Twilio Functions have a strict 5 second time-out and the upload from the Twilio Function to S3 Bucket takes more than 5 seconds. My workaround was sending a string with all mp3 URLs separated by comma and a space. The lambda function would then parse through all the links and store all links in an array which would be used for audio playback.

how to retrieve image from s3 with nodejs

Please let me know how to retrieve image from s3 with nodejs? Honestly, I could upload an image to s3 with nodejs as follows but the problem is how can I complete to retrieve image from s3?
router.get('/image/:imageId', function (req, res, next) {
// ????
});
var s3 = new aws.S3({ accessKeyId: config.awsAccessId, secretAccessKey: config.awsAccessKey});
var upload = multer({
storage: multerS3({
s3: s3,
bucket: config.bucket,
key: function (req, file, cb) {
cb(null, file.originalname);
}
})
});
router.post('/upload/:id', upload.array('file', 3), function(req, res, next) {
res.send('Successfully uploaded ' + req.files.length + ' files!');
});
I've finally found that,
var params = { Bucket: config.bucket, Key: req.params.imageId };
s3.getObject(params, function(err, data) {
res.writeHead(200, {'Content-Type': 'image/jpeg'});
res.write(data.Body, 'binary');
res.end(null, 'binary');
});
If you use lambda with API gateway to retrieve images then there will be no need to using security keys with appropriate permissions.
Read an image from the bucket and send it as base64 to directly use it in source of image tag in HTML.
const AWS = require('aws-sdk');
//*/ get reference to S3 client
var s3 = new AWS.S3();
exports.handler = (event, context, callback) => {
var params = {
"Bucket": "bucket-name",
"Key": "object-name"
};
s3.getObject(params, function(err, data){
if(err) {
callback(err, null);
} else {
let image = new Buffer(data.Body).toString('base64');
image = "data:"+data.ContentType+";base64,"+image;
let response = {
"statusCode": 200,
"headers": {
"Access-Control-Allow-Origin": "*",
'Content-Type': data.ContentType
},
"body":image,
"isBase64Encoded": true
};
callback(null, response);
}
});
};
You're looking for the getObject() method.
Assuming that you are using aws-sdk then you can use getObject method.
Here is sample code
exports.getObjects = function (req, res) {
var item = req.body;
var params = { Bucket: req.params.bucketName, Key: 'keyname'}; // keyname can be a filename
s3.getObject(params, function (err, data) {
if (err) {
return res.send({ "error": err });
}
res.send({ data });
});
}
This link may be helpful to you.
A better and faster approach is piping the stream to response, works on Minio S3 Client but I believe it also works on aws amazon js client.
const Minio = require('minio');
const s3Client = new Minio.Client({
endPoint: 'ep',
accessKey: 'ak',
secretKey: 'sk'
});
router.get('/image/:imageId', (req, res) => {
const { imageId } = req.params;
s3Client.getObject('bucket', imageId, (err, stream) => {
if (err) return res.status(500).send(err);
const contentType = stream.headers['content-type'];
contentType && res.setHeader('Content-Type', contentType);
stream.pipe(res);
});
});
This is what I use with aws-sdk
const params = { Bucket: "YOUR_BUCKET_NAME", Key: "YOUR_FILENAME"};
s3.getObject(params).createReadStream().pipe(res);

How to Upload CSV file on S3 Bucket using NodeJS?

I am creating a CSV file on the fly from JSON content and uploading generated CSV file on S3 bucket rather than first saving the file locally.
Below is my code snippet, as using below code my CSV file uploaded on S3 bucket but it does not seems to be in correct CSV format.
var uploadCSVFileOnS3Bucket = function(next, csvFileContent,results) {
console.log("uploadCSVFileOnS3Bucket function started");
var bufferObject = new Buffer.from(JSON.stringify(csvFileContent));
var filePath = configurationHolder.config.s3UploadFilePath;
var s3 = new AWS.S3();
var params = {
Bucket: 'bucket_name'
Key: 's3UploadFilePath',
Body: bufferObject,
CacheControl:'public, max-age=86400'
}
s3.upload(params, function(err, data) {
if (err) {
console.log("Error at uploadCSVFileOnS3Bucket function",err);
next(err);
} else {
console.log("File uploaded Successfully");
next(null, filePath);
}
});
};
Also, I am using "json2csv" npm module for generating csv file content from JSON.
Below is the code:
var generateCSVFile = function(next,callback,csvFileContent) {
console.log("generateCSVFile function started",csvFileContent);
if(csvFileContent && csvFileContent.length>0) {
var fields = ['field1','field2','field3',........];
var csv = json2csv({ data: csvFileContent, fields: fields });
console.log('created',csv);
next(null,csv);
}
else {
next(null,[]);
}
}
Please let us know where the above code is going wrong.
Hi I tried again with below headers values and it worked for me. Below is the code :
var s3 = new AWS.S3();
var params = {
Bucket: bucketName,
Key: filePath,
Body: csvFileContent,
ContentType: 'application/octet-stream',
ContentDisposition: contentDisposition(filePath, {
type: 'inline'
}),
CacheControl: 'public, max-age=86400'
}
s3.putObject(params, function(err, data) {
if (err) {
console.log("Error at uploadCSVFileOnS3Bucket function", err);
next(err);
} else {
console.log("File uploaded Successfully");
next(null, filePath);
}
});
add ContentDisposition: 'attachment' in your params as well.
otherwise you can also read file and upload to s3
fs.readFile(FILEPATH, function(err, file_buffer) {
var params = {
Bucket: //bucketname,
Key:key,
ContentDisposition: 'attachment',
Body: file_buffer
};
s3.upload(params, function(err, data) {
if (err) {
console.log("Error in upload");
callback(err, null)
}
if (data) {
console.log("Upload Success", data);
callback(null, data)
}
});
});
using async await
import { parse } from "json2csv";
const saveCsv = async () => {
const payload = [{ a: 1, b: 2 }]
const csvPayload = parse(payload, { header: true, defaultValue: "-----"});
const s3Key = 'filename.csv';
const bucketName = 'bucket-name';
await s3.put(bucketName, s3Key, csvPayload);
}
just like that without creating a buffer and using JSON.stringify()
Try this, it worked for me:
var fs = require('file-system')
var AWS = require('aws-sdk')
AWS.config.update({
region: '', // use appropriate region
accessKeyId: '', // use your access key
secretAccessKey: '' // user your secret key
})
var s3 = new AWS.S3()
fs.readFile('contacts.csv','utf-8', (err, data) => {
if (err) throw err;
const params = {
Bucket: 'testBucket', // pass your bucket name
Key: 'contacts.csv', // file will be saved as testBucket/contacts.csv
Body : data
};
s3.upload(params, (s3Err, data) => {
if (s3Err) throw s3Err
console.log(`File uploaded successfully at ${data.Location}`)
});
});

Uploading Image from AWS Lambda to S3 via API Gateway in Binary format

My Lambda is receiving binary data of an image from my user in request body (event.body).
I try uploading it to S3 with no error, but when I download, the image is corrupted/ can't be opened.
I also need to return the URl of the uploaded image to the user.
Please Help!
module.exports.uploadImage = (event, context, callback) => {
var buf = new Buffer(new Buffer(event.body).toString('base64').replace(/^data:image\/\w+;base64,/, ""),'base64');
var data = {
Key: Date.now()+"",
Body: buf,
ContentEncoding: 'base64',
ContentType: 'image/png',
ACL: 'public-read'
};
s3Bucket.putObject(data, function(err, data){
if (err) {
console.log(err);
console.log('Error uploading data: ', data);
} else {
console.log('succesfully uploaded the image!');
}
callback(null,data);
});
};
You can upload the image to S3 as node Buffer. The SDK does the converting for you.
const AWS = require("aws-sdk");
var s3 = new AWS.S3();
module.exports.handler = (event, context, callback) => {
var buf = Buffer.from(event.body.replace(/^data:image\/\w+;base64,/, ""),"base64");
var data = {
Bucket: "sample-bucket",
Key: Date.now()+"",
Body: buf,
ContentType: 'image/png',
ACL: 'public-read'
};
s3.putObject(data, function(err, data){
if (err) {
console.log(err);
console.log('Error uploading data: ', data);
} else {
console.log('succesfully uploaded the image!');
}
callback(null,data);
});
};

Image in FormData to Node Server then to S3

I have an angular 2 application that is sending an image wrapped inside a FormData object to my node server.
I can receive the image on the server and write it to a file, then redownload it:
var busboy = new Busboy({ headers: req.headers });
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
if(mimetype == "image/png"){
var saveTo = path.join('/home/ec2-user/bonappetite/tmp', path.basename(filename) + ".png");
} else if(mimetype == "image/jpeg") {
var saveTo = path.join('/home/ec2-user/bonappetite/tmp', path.basename(filename) + ".jpg");
} else {
var saveTo = path.join('/home/ec2-user/bonappetite/tmp', path.basename(filename));
}
file.pipe(fs.createWriteStream(saveTo));
});
busboy.on('finish', function() {
console.log("url retrieved");
res.writeHead(200, { 'Connection': 'close' });
res.end(savedUrl);
});
return req.pipe(busboy);
What I am trying to do is take the image inside the FormData and upload it to my S3 bucket. I am able to upload data myself, but I cant seem to upload the image that I have saved from the post request
var AWS = require('aws-sdk');
AWS.config.loadFromPath('./aws_config.json');
var s3Bucket = new AWS.S3( { params: {Bucket: 'xxxx'} } );
params = {Key: filename, Body: file};
s3Bucket.putObject(params, function(err, data) {
if (err) {
console.log(err)
} else {
var urlParams = {Bucket: 'xxxx', Key: filename};
s3Bucket.getSignedUrl('getObject', urlParams, function(err, url){
if(err){
console.log(err);
res.status( err.status || 500 );
res.send( error );
}else {
res.setHeader('Content-Type', 'application/json');
url = url;
res.send({"url":url});
}
})
}
});
How can I take the file that I have just uploaded to my server and stream it to S3?
Do i even need to save the file to disk prior to sending it to s3?
Thanks
Found Solution to this problem without having to save the file.
Rather than using putObject, use upload.
var AWS = require('aws-sdk');
AWS.config.loadFromPath('./aws_config.json');
var s3Bucket = new AWS.S3( { params: {Bucket: 'xxxx'} } );
params = {Key: filename, Body: file};
s3Bucket.upload(params, function(err, data) {
if (err) {
console.log(err)
} else {
var urlParams = {Bucket: 'xxxx', Key: filename};
s3Bucket.getSignedUrl('getObject', urlParams, function(err, url){
if(err){
console.log(err);
res.status( err.status || 500 );
res.send( error );
}else {
res.setHeader('Content-Type', 'application/json');
url = url;
res.send({"url":url});
}
})
}
});

Resources