Combine Jimp-module with Amazon AWS S3 in NodeJS - node.js

The following I want to achieve:
Upload a picture to my backoffice
Resize this picture to reduce image-size.
Upload this picture to Amazon AWS S3.
I couldn't figure out how to directly store the picture to Amazon AWS S3 and therefore I upload it first to my backoffice.
My code:
router.post('/fileupload', function(req, res){
// Prepare data
var file = req.files.upfile;
var uploadpath = 'profilepicture/' + req.session.user + '.jpg';
var AWS = require('aws-sdk');
var fs = require('fs');
AWS.config.update({
accessKeyId: **,
secretAccessKey: **
});
// Upload file
file.mv(uploadpath,function(err){
if (err) throw err;
// Read in the file, convert it to base64, store to S3
Jimp.read(uploadpath, function (err, data) {
if (err) throw err;
// Reduce size
data.resize(400, Jimp.AUTO).quality(100).write(uploadpath);
var s3 = new AWS.S3();
var stream = fs.createReadStream(uploadpath);
s3.putObject({
Bucket: bucketAmazon,
Key: req.session.user + '.jpg',
ContentType: 'image/jpg',
Body: stream,
ContentEncoding: 'base64',
ACL: 'public-read',
Metadata: {
'Content-Type': 'image/jpeg'
}
}, function (resp) {
console.log(arguments);
console.log('Successfully uploaded package.');
return res.render('settings', {
user: req.session.user,
logged: true,
wrongPicture: false
});
});
});
});
});
However, when I run this code: the file is uploaded to my backoffice and cropped correctly but in Amazon AWS S3 it shows that the size is '0 B'.
If I remove the line data.resize(400, Jimp.AUTO).quality(100).write(uploadpath), then the file is uploaded correctly to Amazon AWS S3, but ofcourse the picture is not reduced.

You can use the write callback to guarantee that the file is written before the upload starts.
...
data.resize(400, Jimp.AUTO).quality(100).write(uploadpath, () => {
// upload to s3 code here
});

Related

Is it possible to upload a file to S3 without reading or duplicating it?

I'm trying to upload a file to S3 but the file size is too large and we need to do it very frequently . So I was looking for if there is any option to upload a file to S3 using nodejs , without reading the content of the file wholly.
The below code was working fine , but it was reading the file each time I want to upload .
const aws = require("aws-sdk");
aws.config.update({
secretAccessKey: process.env.ACCESS_SECRET,
accessKeyId: process.env.ACCESS_KEY,
region: process.env.REGION,
});
const BUCKET = process.env.BUCKET;
const s3 = new aws.S3();
const fileName = "logs.txt";
const uploadFile = () => {
fs.readFile(fileName, (err, data) => {
if (err) throw err;
const params = {
Bucket: BUCKET, // pass your bucket name
Key: fileName, // file will be saved as testBucket/contacts.csv
Body: JSON.stringify(data, null, 2),
};
s3.upload(params, function (s3Err, data) {
if (s3Err) throw s3Err;
console.log(`File uploaded successfully at ${data.Location}`);
});
});
};
uploadFile();
You can make use of streams.
First create a readStream of the file you want to upload. You then can pipe it to aws s3 by passing it as Body.
import { createReadStream } from 'fs';
const inputStream = createReadStream('sample.txt');
s3
.upload({ Key: fileName, Body: inputStream, Bucket: BUCKET })
.promise()
.then(console.log, console.error)
You can use multipart upload:
AWS article:
https://aws.amazon.com/blogs/aws/amazon-s3-multipart-upload/
SO Question about the same for python: Can I stream a file upload to S3 without a content-length header?
JS API reference manual: https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3/ManagedUpload.html
The basic example is:
var upload = new AWS.S3.ManagedUpload({
params: {Bucket: 'bucket', Key: 'key', Body: stream}
});
so you have to provide a stream as an input.
const readableStream = fs.createReadStream(filePath);
JS api documented here: https://nodejs.org/api/fs.html#fscreatereadstreampath-options
Of course, you can process the data while reading it and then pass it to the S3 API, you just have to implement the Stream API.

How do I set my images uploaded to S3 with nodejs script to display instead of download?

I have a node js script that uploads files to AWS S3 through the command line. The problem Im having is when I try to view the file in the browser it automatically downloads it.
I have done some research and most other posts point out the headers, but I have verified the headers are correct (image/png)
Additionally, when I upload the same file through the AWS console (log into AWS), I am able to view the file within the browser.
var fs = require('fs');
var path = require('path');
AWS.config.update({region: myRegion});
s3 = new AWS.S3({apiVersion: '2006-03-01'});
var uploadParams = {
Bucket: process.argv[2],
Key: '', // Key set below
Body: '', // Body set below after createReadStream
ContentType: 'image/jpeg',
ACL: 'public-read',
ContentDisposition: 'inline'
};
var file = process.argv[3];
var fileStream = fs.createReadStream(file);
fileStream.on('error', function(err) {
console.log('File Error', err);
});
uploadParams.Body = fileStream;
uploadParams.Key = path.basename(file);
s3.putObject(uploadParams, function(errBucket, dataBucket) {
if (errBucket) {
console.log("Error uploading data: ", errBucket);
} else {
console.log(dataBucket);
}
});
I get successful upload, but unable to view file in browser as it auto downloads.
You have to specify the contentDisposition as part of request headers. You can not specify it as part of request paramenters. Specify it in headers explicitly as below .
var params = {Bucket : "bucketname" , Key : "keyName" , Body : "actualData"};
s3.putObject(params).
on('build',function(req){
req.httpRequest.headers['Content-Type'] = 'application/pdf' ; // Whatever you want
req.httpRequest.headers['ContentDisposition'] = 'inline';
}).
send( function(err,data){
if(err){
console.log(err);
return res.status(400).json({sucess: false});
}else{
console.log(success);
return res.status(200).json({success: true});
}
});
Code to upload obejcts/images to s3
module.exports = function(app, models) {
var fs = require('fs');
var AWS = require('aws-sdk');
var accessKeyId = "ACESS KEY HERE";
var secretAccessKey = "SECRET KEY HERE";
AWS.config.update({
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey
});
var s3 = new AWS.S3();
app.post('/upload', function(req, res){
var params = {
Bucket: 'bucketname',
Key: 'keyname.png',
Body: "GiveSomeRandomWordOraProperBodyIfYouHave"
};
s3.putObject(params, function (perr, pres) {
if (perr) {
console.log("Error uploading data: ", perr);
} else {
console.log("Successfully uploaded data to myBucket/myKey");
}
});
});
}
The above code will make sure the object has been uploaded to s3. You cab see it listed in s3 bucket in the browser but you cant view its contents in s3 bucket.
You can not view items within S3. S3 is a storage box. you can only download and upload elements in it. If you need to view the contents you would have to download and view it in the browser or any explorer of your choice. If you simply need to list the objects in s3. Use the below code.
Code to list objects of s3
var AWS = require('aws-sdk');
AWS.config.update({accessKeyId: 'mykey', secretAccessKey: 'mysecret', region: 'myregion'});
var s3 = new AWS.S3();
var params = {
Bucket: 'bucketName',
Delimiter: '/',
Prefix: 's/prefix/objectPath/'
}
s3.listObjects(params, function (err, data) {
if(err)throw err;
console.log(data);
});
Use S3 list to list the elements of S3. This way you can view them. Create a hyperlink for each of the listed item and make it point to s3 download url. This way you can view in the browser and also download it if you need.
In case if you need to view the contents of it via node JS, use the code below to load the image as if you are loading it from a remote URL.
Code to Download contents:
var fs = require('fs'),
request = require('request');
var download = function(uri, filename, callback){
request.head(uri, function(err, res, body){
console.log('content-type:', res.headers['content-type']);
console.log('content-length:', res.headers['content-length']);
request(uri).pipe(fs.createWriteStream(filename)).on('close', callback);
});
};
download('httpo://s3/URL' 'name.png', function(){
console.log('done');
});
Code to load image into a buffer :
const request = require('request');
let url = 'http://s3url/image.png';
request({ url, encoding: null }, (err, resp, buffer) => {
// typeof buffer === 'object'
// Use the buffer
// This buffer will now contains the image data
});
Use the above to load the image into a buffer. Once its in buffer, you can manipulate it the way you need. The above code wont downloads the image but it help you to manipuate the image in s3 using a buffer.
Contains Example Code. The link will contain Specific Node JS code examples for uploading and Manipulating objects of s3. use it for reference.

nodeJS AWS S3 upload data wrong encoding

I have a little app to upload a file to AWS S3, it is uploading ok, but when I download the file from S3 bucket, it is encoded, shows type:buffer etc...
if I upload the same file from the console, it shows fine.
Here the code to upload
const fs = require('fs');
const AWS = require('aws-sdk');
const s3 = new AWS.S3({
accessKeyId: process.env.AWS_ACCESS_KEY,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY
});
const fileName = 'su.csv';
const uploadFile = () => {
fs.readFile(fileName, (err, data) => {
if (err) throw err;
const params = {
Bucket: 'mybukk22-test', // pass your bucket name
Key: 'su.csv', // file will be saved as testBucket/contacts.csv
Body: JSON.stringify(data, null, 2)
};
s3.upload(params, function (s3Err, data) {
if (s3Err) throw s3Err
console.log(`File uploaded successfully at ${data.Location}`)
});
});
};
uploadFile();
Is the problem on the body? how to save the same file as on client?
thanks
The issue is that you are trying to do a JSON.stringify on a Buffer, because fs.readFile returns you a Buffer. To make it work you could change your params to the following:
const params = {
Bucket: 'mybukk22-test', // pass your bucket name
Key: 'su.csv', // file will be saved as testBucket/contacts.csv
Body: data
};
(Just pass data 1:1 as the Body of your upload operation)
Otherwise, if you like to stick to your solution just cast data to a string like this: JSON.stringify(data.toString(), null, 2)

Upload Image directly to bucket on s3 amazon issue

I am a newbie on Amazon s3. I am facing the issue regarding uploading the image directly from frontend to amazon s3 bucket.
I also viewed many tutorials but there is no proper explanation of upload image directly to s3. They teach us how to upload image through local computer to s3.
I implemented the code but not able to identified that how I can upload image without saving to my local machine.
Here is the code which I implemented but not luck to achieve it:
s3ImageUpload: (req, res) => {
var fs = require('fs');
var multer = require('multer');
var AWS = require('aws-sdk');
var path = require('path');
var awsCredFile = path.join(__dirname, '.', 'key.json');
console.log('awsCredFile is');
console.log(awsCredFile);
AWS.config.loadFromPath(awsCredFile);
var s3 = new AWS.S3();
var photoBucket = new AWS.S3({params: {Bucket: 'testbucket'}});
var sampleFile = {
//"_id" : 345345,
//"fieldname" : "uploads[]",
//"originalname" : "female.JPG",
//"encoding" : "base64",
//"mimetype" : "image/jpeg",
"destination" : "./public/images",
"filename" : "female.jpeg",
"path" : "/images/female.jpeg",
//"size" : 251556,
//"user" : "579fbe61adac4a8a73b6f508"
};
var filePathToSend = path.join(__dirname, '../public', sampleFile.path);
function uploadToS3(filepath, destFileName, callback) {
photoBucket
.upload({
ACL: 'public-read',
Body: fs.createReadStream(filepath),
Key: destFileName.toString(),
ContentType: 'image/png'
})
// http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3/ManagedUpload.html#httpUploadProgress-event
.on('httpUploadProgress', function(evt) { console.log(evt); })
// http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3/ManagedUpload.html#send-property
.send(callback);
}
multer({limits: {fileSize:10*1024*1024}});
console.log('filePathToSend is ');
console.log(filePathToSend);
uploadToS3(filePathToSend, sampleFile.filename, function (err, data) {
if (err) {
console.error(err);
return res.status(500).send('failed to upload to s3').end();
}
res.status(200)
.send('File uploaded to S3: '
+ data.Location.replace(/</g, '<')
+ '<br/><img src="' + data.Location.replace(/"/g, '"') + '"/>')
.end();
});
console.log('uploading now...');
}
Pre-signed URLs could be a good solution for you.
On your backend, you can sign an URL using the AWS SDK. Then you just have to send this URL back to the client. Finally, the client can upload to file using this URL.
https://docs.aws.amazon.com/AmazonS3/latest/dev/PresignedUrlUploadObject.html

Can't upload images in nodejs using aws-sdk

I've tried using aws-sdk and knox and I get status code 301 trying to upload images. I get status code 301 and message - 'The bucket you are attempting to access must be addressed using the specified endpoint. Please send all future requests to this endpoint. This works in php.
AWS.config.loadFromPath(__dirname + '/config/config.json');
fs.readFile(source, function (err, data) {
var s3 = new AWS.S3();
s3.client.createBucket({Bucket: 'mystuff'}, function() {
var d = {
Bucket: 'mystuff',
Key: 'img/test.jpg',
Body: data,
ACL: 'public-read'
};
s3.client.putObject(d, function(err, res) {
if (err) {
console.log("Error uploading data: ", err);
callback(err);
} else {
console.log("Successfully uploaded data to myBucket/myKey");
callback(res);
}
});
});
});
I actually solved this problem. In your config you have to have a region, since my bucket was "US Standard", I left my region blank and it worked.
config.json -
{ "accessKeyId": "secretKey", "secretAccessKey": "secretAccessKey", "region": ""}
go to s3 management console select one of your files and click on proporties - > look at the file link.
US standard
https://s3.amazonaws.com/yourbucket/
host in your console window
yourbucket.s3.amazonaws.com/
us-west-1
https://s3-us-west-1.amazonaws.com/yourbucket/
host in your console window
yourbucket.s3-us-west-1.amazonaws.com/
Did you try .send()?
I can upload to S3 by below code.
http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/AWSRequest.html
var s3object = {Bucket: 'mystuff', Key: name, Body : data['data']};
s3.client.putObject(s3object).done(function(resp){
console.log("Successfully uploaded data");
}).fail(function(resp){
console.log(resp);
}).send();
I have the same problem with the new SDK and solved it by setting the endpoint option explicitly.
Reference : http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#constructor_details
Snippet:
var AWS = require('aws-sdk');
var s3 = new AWS.S3({ endpoint :'https://s3-your-region-varies.amazonaws.com' }),
myBucket = 'your-bucket-name';
var params = {Bucket: myBucket, Key: 'myUpload', Body: "Test"};
s3.putObject(params, function(err, data) {
if (err) {
console.log(err)
} else {
console.log("Successfully uploaded data to "+myBucket+"/testKeyUpload");
}
});
Alternatively, you can solve this by setting the region in your config file and you just have to be precise of your region name.

Resources