how to retrieve image from s3 with nodejs - node.js

Please let me know how to retrieve image from s3 with nodejs? Honestly, I could upload an image to s3 with nodejs as follows but the problem is how can I complete to retrieve image from s3?
router.get('/image/:imageId', function (req, res, next) {
// ????
});
var s3 = new aws.S3({ accessKeyId: config.awsAccessId, secretAccessKey: config.awsAccessKey});
var upload = multer({
storage: multerS3({
s3: s3,
bucket: config.bucket,
key: function (req, file, cb) {
cb(null, file.originalname);
}
})
});
router.post('/upload/:id', upload.array('file', 3), function(req, res, next) {
res.send('Successfully uploaded ' + req.files.length + ' files!');
});

I've finally found that,
var params = { Bucket: config.bucket, Key: req.params.imageId };
s3.getObject(params, function(err, data) {
res.writeHead(200, {'Content-Type': 'image/jpeg'});
res.write(data.Body, 'binary');
res.end(null, 'binary');
});

If you use lambda with API gateway to retrieve images then there will be no need to using security keys with appropriate permissions.
Read an image from the bucket and send it as base64 to directly use it in source of image tag in HTML.
const AWS = require('aws-sdk');
//*/ get reference to S3 client
var s3 = new AWS.S3();
exports.handler = (event, context, callback) => {
var params = {
"Bucket": "bucket-name",
"Key": "object-name"
};
s3.getObject(params, function(err, data){
if(err) {
callback(err, null);
} else {
let image = new Buffer(data.Body).toString('base64');
image = "data:"+data.ContentType+";base64,"+image;
let response = {
"statusCode": 200,
"headers": {
"Access-Control-Allow-Origin": "*",
'Content-Type': data.ContentType
},
"body":image,
"isBase64Encoded": true
};
callback(null, response);
}
});
};

You're looking for the getObject() method.

Assuming that you are using aws-sdk then you can use getObject method.
Here is sample code
exports.getObjects = function (req, res) {
var item = req.body;
var params = { Bucket: req.params.bucketName, Key: 'keyname'}; // keyname can be a filename
s3.getObject(params, function (err, data) {
if (err) {
return res.send({ "error": err });
}
res.send({ data });
});
}
This link may be helpful to you.

A better and faster approach is piping the stream to response, works on Minio S3 Client but I believe it also works on aws amazon js client.
const Minio = require('minio');
const s3Client = new Minio.Client({
endPoint: 'ep',
accessKey: 'ak',
secretKey: 'sk'
});
router.get('/image/:imageId', (req, res) => {
const { imageId } = req.params;
s3Client.getObject('bucket', imageId, (err, stream) => {
if (err) return res.status(500).send(err);
const contentType = stream.headers['content-type'];
contentType && res.setHeader('Content-Type', contentType);
stream.pipe(res);
});
});

This is what I use with aws-sdk
const params = { Bucket: "YOUR_BUCKET_NAME", Key: "YOUR_FILENAME"};
s3.getObject(params).createReadStream().pipe(res);

Related

S3 callbacks get ignored

I'm trying to upload a base64 encoded image to S3 through this route, but the callbacks get completely ignored and the code jumps straight to res.json("SUCCESS");
route
AWS.config.update({
accessKeyId: "xxxxxxxxxxxxxx",
secetAccessKey: "xxxxxxxxxxxxxxxxxxxxxx",
region: "us-east-1"
});
const s3 = new AWS.S3();
....
router.post("/imageupload", async (req, res) => {
const base64 = req.body.base64;
try {
const params = {
Bucket: process.env.bucketName,
Key: "images/newImage",
Body: base64
};
await s3.putObject(params, function(err, data) {
if (err) res.json(err);
else res.json(data);
});
res.json("SUCCESS");
} catch (e) {
console.log(e.message);
res.status(500).json(e.message);
}
});
Any help is much appreciated thanks!
EDIT FIXED:
I figured out what the problem was:
I had recently reformatted my computer which meant I had to reinstall AWS cli AND reconfigure aws creds.
That was it.
The AWS documentation for using-promises.
var s3 = new AWS.S3({apiVersion: '2006-03-01', region: 'us-west-2'});
var params = {
Bucket: 'bucket',
Key: 'example2.txt',
Body: 'Uploaded text using the promise-based method!'
};
var putObjectPromise = s3.putObject(params).promise();
putObjectPromise.then(function(data) {
console.log('Success');
}).catch(function(err) {
console.log(err);
});
You can also promisify all functions by using a library such as bluebird
AWS.config.setPromisesDependency(require('bluebird'));
Here's an example using your code
router.post("/imageupload", async (req, res) => {
const base64 = req.body.base64;
try {
const params = {
Bucket: process.env.bucketName,
Key: "images/newImage",
Body: base64
};
const data = await s3.putObject(params).promise()
res.json(data);
} catch (e) {
console.log(e.message);
res.status(500).json(e.message);
}
});

Uploading PDF Content Into An S3 Bucket

I'm trying to download PDF content with data from a remote location and upload the content into S3 as a pdf file. I'm using NodeJS, in the context of an AWS lambda. The s3.putObject parameter function resolves successfully, and a pdf file is saved into the S3 bucket as intended, but the document is blank when viewed, suggesting that all of the data may not have been passed to s3.putObject.
Here is my code.
const request = require('request')
const viewUrl = "https://link_to_downloadable_pdf/"
const options = {
url: viewUrl,
headers: {
'Content-Type': 'application/pdf'
}
};
request(options, function(err, res, body){
if(err){return console.log(err)}
const base64data = new Buffer(body, 'binary');
const params = {
Bucket: "myS3bucket",
Key: "my-pdf.pdf",
ContentType: "application/pdf",
Body: base64data,
ACL: 'public-read'
};
s3.putObject(params, function(err, data) {
if (err) {
console.log(err);
} else {
callback(null, JSON.stringify(data))
}
})
When I test the URL in Postman, it returns the PDF with data included. Any idea why the NodeJS code may not be doing the same thing?
Can you try this code? :)
import AWS from 'aws-sdk'
const request = require('request')
const S3 = new AWS.S3()
var promise = new Promise((resolve, reject) => {
return request({ url : 'https://link_to_downloadable_pdf/', encoding : null },
function(err, res, body){
if(err)
return reject({ status:500,error:err })
return resolve({ status:200, body: body})
})
})
promise.then((pdf) => {
if(pdf.status == 200)
{
console.log('uploading file..')
s3.putObject({
Bucket: process.env.bucket,
Body: pdf.body,
Key: 'my-pdf.pdf',
ACL:'public-read'
}, (err,data) => {
if(err)
console.log(err)
else
console.log('uploaded')
})
}
})
I'll be attentive to anything. hope to help you

How to Upload CSV file on S3 Bucket using NodeJS?

I am creating a CSV file on the fly from JSON content and uploading generated CSV file on S3 bucket rather than first saving the file locally.
Below is my code snippet, as using below code my CSV file uploaded on S3 bucket but it does not seems to be in correct CSV format.
var uploadCSVFileOnS3Bucket = function(next, csvFileContent,results) {
console.log("uploadCSVFileOnS3Bucket function started");
var bufferObject = new Buffer.from(JSON.stringify(csvFileContent));
var filePath = configurationHolder.config.s3UploadFilePath;
var s3 = new AWS.S3();
var params = {
Bucket: 'bucket_name'
Key: 's3UploadFilePath',
Body: bufferObject,
CacheControl:'public, max-age=86400'
}
s3.upload(params, function(err, data) {
if (err) {
console.log("Error at uploadCSVFileOnS3Bucket function",err);
next(err);
} else {
console.log("File uploaded Successfully");
next(null, filePath);
}
});
};
Also, I am using "json2csv" npm module for generating csv file content from JSON.
Below is the code:
var generateCSVFile = function(next,callback,csvFileContent) {
console.log("generateCSVFile function started",csvFileContent);
if(csvFileContent && csvFileContent.length>0) {
var fields = ['field1','field2','field3',........];
var csv = json2csv({ data: csvFileContent, fields: fields });
console.log('created',csv);
next(null,csv);
}
else {
next(null,[]);
}
}
Please let us know where the above code is going wrong.
Hi I tried again with below headers values and it worked for me. Below is the code :
var s3 = new AWS.S3();
var params = {
Bucket: bucketName,
Key: filePath,
Body: csvFileContent,
ContentType: 'application/octet-stream',
ContentDisposition: contentDisposition(filePath, {
type: 'inline'
}),
CacheControl: 'public, max-age=86400'
}
s3.putObject(params, function(err, data) {
if (err) {
console.log("Error at uploadCSVFileOnS3Bucket function", err);
next(err);
} else {
console.log("File uploaded Successfully");
next(null, filePath);
}
});
add ContentDisposition: 'attachment' in your params as well.
otherwise you can also read file and upload to s3
fs.readFile(FILEPATH, function(err, file_buffer) {
var params = {
Bucket: //bucketname,
Key:key,
ContentDisposition: 'attachment',
Body: file_buffer
};
s3.upload(params, function(err, data) {
if (err) {
console.log("Error in upload");
callback(err, null)
}
if (data) {
console.log("Upload Success", data);
callback(null, data)
}
});
});
using async await
import { parse } from "json2csv";
const saveCsv = async () => {
const payload = [{ a: 1, b: 2 }]
const csvPayload = parse(payload, { header: true, defaultValue: "-----"});
const s3Key = 'filename.csv';
const bucketName = 'bucket-name';
await s3.put(bucketName, s3Key, csvPayload);
}
just like that without creating a buffer and using JSON.stringify()
Try this, it worked for me:
var fs = require('file-system')
var AWS = require('aws-sdk')
AWS.config.update({
region: '', // use appropriate region
accessKeyId: '', // use your access key
secretAccessKey: '' // user your secret key
})
var s3 = new AWS.S3()
fs.readFile('contacts.csv','utf-8', (err, data) => {
if (err) throw err;
const params = {
Bucket: 'testBucket', // pass your bucket name
Key: 'contacts.csv', // file will be saved as testBucket/contacts.csv
Body : data
};
s3.upload(params, (s3Err, data) => {
if (s3Err) throw s3Err
console.log(`File uploaded successfully at ${data.Location}`)
});
});

NodeJS Request Upload Image

Trying to upload an image using nodejs, express & request module but keep getting 415 from Amazon S3 instance.
fs.createReadStream(req.files.image.path).pipe(request.post(defaults.url, {
form: {
param_1: '',
param_2: ''
}
}, function(error, response, body){
if(error){
callback(error, null);
}else{
if(response.statusCode === 200){
callback({}, body);
}else{
callback(body, response);
}
}
}));
I think the image is not getting appended on the request but i'm not 100%. Any advise?
pipe expects a Writable stream as parameter. you can use res object of express directly as the pipe destination. But if you like to upload to s3, you can read the file from req stream and use putObject to write it to s3
var fs = require ('fs')
fs.readFile(req.files.image.path, function (err, data) {
var AWS = require('./aws_config')
var s3 = new AWS.S3()
var bucket = ''
s3.putObject({
ACL: 'public-read', // by default private access
Bucket: bucket,
Key: file_name,
Body: data
}, function (err, data) {
if (err) {
console.log(err)
res.send(500, {msg: 'image upload failed', error: err})
} else {
console.log('S3 upload Successful')
res.send({})
}
});
If you like to download you can use pipe to redirect the read object to response directly
app.get('/download/:file', function(req, res, next) {
var AWS = require('./aws_config')
var s3 = new AWS.S3()
s3.getObject({
Bucket: '',
Key: req.params.file
}, function (err, data) {
if (err) console.log (err)
var fs = require ('fs')
var filePath = __dirname + "/downloads/" + req.params.file
fs.writeFile(filePath, data.Body, function (err) {
if (err) console.log(err)
else {
res.attachment(filePath)
var filestream = fs.createReadStream(filePath);
filestream.pipe(res);
// TODO: delete file from server ?
}
});
})
})

Is there a way to upload to S3 from a url using node.js?

I found this question, but it doesn't seem to answer my question as I think it's still talking about local files.
I want to take, say, and imgur.com link and upload it to S3 using node. Is knox capable of this or do I need to use something else?
Not sure where to get started.
I’m not using knox but the official AWS SDK for JavaScript in Node.js. I issue a request to a URL with {encoding: null} in order to retrieve the data in buffer which can be passed directly to the Body parameter for s3.putObject(). Here below is an example of putting a remote image in a bucket with aws-sdk and request.
var AWS = require('aws-sdk');
var request = require('request');
AWS.config.loadFromPath('./config.json');
var s3 = new AWS.S3();
function put_from_url(url, bucket, key, callback) {
request({
url: url,
encoding: null
}, function(err, res, body) {
if (err)
return callback(err, res);
s3.putObject({
Bucket: bucket,
Key: key,
ContentType: res.headers['content-type'],
ContentLength: res.headers['content-length'],
Body: body // buffer
}, callback);
})
}
put_from_url('http://a0.awsstatic.com/main/images/logos/aws_logo.png', 'your_bucket', 'media/aws_logo.png', function(err, res) {
if (err)
throw err;
console.log('Uploaded data successfully!');
});
For those that are looking for a solution that doesn't involves callbacks, and prefeers promises, based on #micmia code here is an alternative:
var AWS = require('aws-sdk'),
request = require('request');
const bucketName='yourBucketName';
const bucketOptions = {...Your options};
var s3 = new AWS.S3(options);
function UploadFromUrlToS3(url,destPath){
return new Promise((resolve,reject)=> {
request({
url: url,
encoding: null
}, function(err, res, body) {
if (err){
reject(err);
}
var objectParams = {
ContentType: res.headers['content-type'],
ContentLength: res.headers['content-length'],
Key: destPath,
Body: body
};
resolve(s3.putObject(objectParams).promise());
});
});
}
UploadFromUrlToS3(
'http://a0.awsstatic.com/main/images/logos/aws_logo.png',
'your/s3/path/aws_logo.png' )
.then(function() {
console.log('image was saved...');
}).catch(function(err) {
console.log('image was not saved!',err);
});
Building on #Yuri's post, for those who would like to use axios instead of request & ES6 syntax for a more modern approach + added the required Bucket property to params (and it downloads any file, not only images):
const uploadFileToS3 = (url, bucket, key) => {
return axios.get(url, { responseType: "arraybuffer", responseEncoding: "binary" }).then((response) => {
const params = {
ContentType: response.headers["content-type"],
ContentLength: response.data.length.toString(), // or response.header["content-length"] if available for the type of file downloaded
Bucket: bucket,
Body: response.data,
Key: key,
};
return s3.putObject(params).promise();
});
}
uploadFileToS3(<your_file_url>, <your_s3_path>, <your_s3_bucket>)
.then(() => console.log("File saved!"))
.catch(error) => console.log(error));
Same thing as the above answer but with fetch:
async function upload(url: string, key: string, bucket: string) {
const response = await fetch(url)
const contentType = response.headers.get("content-type") ?? undefined;
const contentLength =
response.headers.get("content-length") != null
? Number(response.headers.get("content-length"))
: undefined;
return s3
.putObject({
Bucket: bucket,
Key: key,
ContentType: contentType,
ContentLength: contentLength,
Body: response.body, // buffer
})
.promise();
}
Yes. There's an example of doing this in the knox README
http.get('http://google.com/doodle.png', function(res){
var headers = {
'Content-Length': res.headers['content-length']
, 'Content-Type': res.headers['content-type']
};
client.putStream(res, '/doodle.png', headers, function(err, res){
// Logic
});
});

Resources