Uploading PDF Content Into An S3 Bucket - node.js

I'm trying to download PDF content with data from a remote location and upload the content into S3 as a pdf file. I'm using NodeJS, in the context of an AWS lambda. The s3.putObject parameter function resolves successfully, and a pdf file is saved into the S3 bucket as intended, but the document is blank when viewed, suggesting that all of the data may not have been passed to s3.putObject.
Here is my code.
const request = require('request')
const viewUrl = "https://link_to_downloadable_pdf/"
const options = {
url: viewUrl,
headers: {
'Content-Type': 'application/pdf'
}
};
request(options, function(err, res, body){
if(err){return console.log(err)}
const base64data = new Buffer(body, 'binary');
const params = {
Bucket: "myS3bucket",
Key: "my-pdf.pdf",
ContentType: "application/pdf",
Body: base64data,
ACL: 'public-read'
};
s3.putObject(params, function(err, data) {
if (err) {
console.log(err);
} else {
callback(null, JSON.stringify(data))
}
})
When I test the URL in Postman, it returns the PDF with data included. Any idea why the NodeJS code may not be doing the same thing?

Can you try this code? :)
import AWS from 'aws-sdk'
const request = require('request')
const S3 = new AWS.S3()
var promise = new Promise((resolve, reject) => {
return request({ url : 'https://link_to_downloadable_pdf/', encoding : null },
function(err, res, body){
if(err)
return reject({ status:500,error:err })
return resolve({ status:200, body: body})
})
})
promise.then((pdf) => {
if(pdf.status == 200)
{
console.log('uploading file..')
s3.putObject({
Bucket: process.env.bucket,
Body: pdf.body,
Key: 'my-pdf.pdf',
ACL:'public-read'
}, (err,data) => {
if(err)
console.log(err)
else
console.log('uploaded')
})
}
})
I'll be attentive to anything. hope to help you

Related

nodejs Lambda with S3 upload via API Gateway

I've been trying to get a simple serverless API Gateway -> NodeJS Lambda -> S3 working however it appears that the Lambda just uploads corrupt files.
This code would download the file from a URL then straight upload to S3.
I've tried both putObject & upload (with the different params) with no success. Looking at the file sizes when I download the original is is 24KB and the downloaded (corrupt) image from S3 is 44KB.
I simply test the application by doing a POST to the API Gateway URL.
Any ideas?
var url =
"https://upload.wikimedia.org/wikipedia/commons/thumb/1/1d/AmazonWebservices_Logo.svg/500px-AmazonWebservices_Logo.svg.png"
module.exports.upload = function(event, context, callback) {
https.get(url, function(res) {
var body = ""
res.on("data", function(chunk) {
// Agregates chunks
body += chunk
})
res.on("end", function() {
console.log(body)
// Once you received all chunks, send to S3 - putObject only
var params = {
Bucket: S3_BUCKET_NAME,
Key: "aws-logo.png",
Body: body
}
var s3Params = {
Bucket: S3_BUCKET_NAME,
Key: "aws-logo-upload.png",
Body: body,
ContentType: "image/png"
}
s3.upload(s3Params, function(err, data) {
// s3.putObject(params, function(err, data) {
if (err) {
console.log("error")
console.error(err, err.stack)
callback(null, { statusCode: 404, error })
} else {
console.log("ok")
console.log(data)
let response = {
statusCode: 200
}
callback(null, response)
}
})
})
})
}
The following code works for me outside of API Gateway/Lambda. It yields a PNG in S3 that's downloadable as a valid 23.7 KB image. I'd expect the equivalent to work in Lambda.
const AWS = require('aws-sdk');
const https = require('https');
const s3 = new AWS.S3();
const logourl =
'https://upload.wikimedia.org/wikipedia/commons/thumb/1/1d/AmazonWebservices_Logo.svg/500px-AmazonWebservices_Logo.svg.png';
const getThenUpload = (url, callback) => {
https.get(url, (res) => {
const data = [];
res.on('data', (chunk) => {
data.push(chunk);
});
res.on('end', () => {
const params = {
Bucket: S3_BUCKET_NAME,
Key: 'aws-logo-upload.png',
Body: Buffer.concat(data),
ContentType: 'image/png',
};
s3.upload(params, (err, rsp) => {
if (err) {
console.error(err, err.stack);
callback(err, { statusCode: 404, err });
} else {
console.log(rsp);
callback(null, { statusCode: 200 });
}
});
});
});
};
getThenUpload(logourl, (err, data) => {
if (err) {
console.error(`Error: ${err}`);
} else {
console.log(`Data: ${JSON.stringify(data)}`);
}
});

How to Upload CSV file on S3 Bucket using NodeJS?

I am creating a CSV file on the fly from JSON content and uploading generated CSV file on S3 bucket rather than first saving the file locally.
Below is my code snippet, as using below code my CSV file uploaded on S3 bucket but it does not seems to be in correct CSV format.
var uploadCSVFileOnS3Bucket = function(next, csvFileContent,results) {
console.log("uploadCSVFileOnS3Bucket function started");
var bufferObject = new Buffer.from(JSON.stringify(csvFileContent));
var filePath = configurationHolder.config.s3UploadFilePath;
var s3 = new AWS.S3();
var params = {
Bucket: 'bucket_name'
Key: 's3UploadFilePath',
Body: bufferObject,
CacheControl:'public, max-age=86400'
}
s3.upload(params, function(err, data) {
if (err) {
console.log("Error at uploadCSVFileOnS3Bucket function",err);
next(err);
} else {
console.log("File uploaded Successfully");
next(null, filePath);
}
});
};
Also, I am using "json2csv" npm module for generating csv file content from JSON.
Below is the code:
var generateCSVFile = function(next,callback,csvFileContent) {
console.log("generateCSVFile function started",csvFileContent);
if(csvFileContent && csvFileContent.length>0) {
var fields = ['field1','field2','field3',........];
var csv = json2csv({ data: csvFileContent, fields: fields });
console.log('created',csv);
next(null,csv);
}
else {
next(null,[]);
}
}
Please let us know where the above code is going wrong.
Hi I tried again with below headers values and it worked for me. Below is the code :
var s3 = new AWS.S3();
var params = {
Bucket: bucketName,
Key: filePath,
Body: csvFileContent,
ContentType: 'application/octet-stream',
ContentDisposition: contentDisposition(filePath, {
type: 'inline'
}),
CacheControl: 'public, max-age=86400'
}
s3.putObject(params, function(err, data) {
if (err) {
console.log("Error at uploadCSVFileOnS3Bucket function", err);
next(err);
} else {
console.log("File uploaded Successfully");
next(null, filePath);
}
});
add ContentDisposition: 'attachment' in your params as well.
otherwise you can also read file and upload to s3
fs.readFile(FILEPATH, function(err, file_buffer) {
var params = {
Bucket: //bucketname,
Key:key,
ContentDisposition: 'attachment',
Body: file_buffer
};
s3.upload(params, function(err, data) {
if (err) {
console.log("Error in upload");
callback(err, null)
}
if (data) {
console.log("Upload Success", data);
callback(null, data)
}
});
});
using async await
import { parse } from "json2csv";
const saveCsv = async () => {
const payload = [{ a: 1, b: 2 }]
const csvPayload = parse(payload, { header: true, defaultValue: "-----"});
const s3Key = 'filename.csv';
const bucketName = 'bucket-name';
await s3.put(bucketName, s3Key, csvPayload);
}
just like that without creating a buffer and using JSON.stringify()
Try this, it worked for me:
var fs = require('file-system')
var AWS = require('aws-sdk')
AWS.config.update({
region: '', // use appropriate region
accessKeyId: '', // use your access key
secretAccessKey: '' // user your secret key
})
var s3 = new AWS.S3()
fs.readFile('contacts.csv','utf-8', (err, data) => {
if (err) throw err;
const params = {
Bucket: 'testBucket', // pass your bucket name
Key: 'contacts.csv', // file will be saved as testBucket/contacts.csv
Body : data
};
s3.upload(params, (s3Err, data) => {
if (s3Err) throw s3Err
console.log(`File uploaded successfully at ${data.Location}`)
});
});

How can I return data from a callback function in Node.Js

I am trying to create a function that returns back either the error data from AWS or the { ETag: '"74..."' } data response from the callback. This code currently will write my buffer file to the the s3 bucket. But I want to return my etag number or the error data back from the function but I keep getting undefined. Any help would be appreciated.
function aws(file, name) {
var s3 = new AWS.S3();
s3.putObject({
Bucket: 'Bucket-Name',
ACL: 'public-read',
Key: name,
Body: file
}, function(err, data) {
if (err) {
console.log('Something went wrong')
return err;
} else {
console.log('Successfully uploaded image');
console.log(data);
return data;
}
});
}
var response = aws(buffer, 'file.png');
Solved my problem with a Promise. Hope this helps someone else someday :)
const aws = function (file, name) {
return new Promise((resolve, reject) => {
let s3 = new AWS.S3();
s3.putObject({
Bucket: 'Bucket-Name',
ACL: 'public-read',
Key: name,
Body: file
}, function (err, data) {
if (err) {
console.log('Something went wrong')
reject(err);
} else {
console.log('Successfully uploaded image');
resolve(data);
}
});
});
}
aws(buffer, 'file.png')
.then(response => {
res.set({ 'Content-Type': 'application/json' });
res.status(200);
res.send(response);
})
.catch(console.error);

Uploading Image from AWS Lambda to S3 via API Gateway in Binary format

My Lambda is receiving binary data of an image from my user in request body (event.body).
I try uploading it to S3 with no error, but when I download, the image is corrupted/ can't be opened.
I also need to return the URl of the uploaded image to the user.
Please Help!
module.exports.uploadImage = (event, context, callback) => {
var buf = new Buffer(new Buffer(event.body).toString('base64').replace(/^data:image\/\w+;base64,/, ""),'base64');
var data = {
Key: Date.now()+"",
Body: buf,
ContentEncoding: 'base64',
ContentType: 'image/png',
ACL: 'public-read'
};
s3Bucket.putObject(data, function(err, data){
if (err) {
console.log(err);
console.log('Error uploading data: ', data);
} else {
console.log('succesfully uploaded the image!');
}
callback(null,data);
});
};
You can upload the image to S3 as node Buffer. The SDK does the converting for you.
const AWS = require("aws-sdk");
var s3 = new AWS.S3();
module.exports.handler = (event, context, callback) => {
var buf = Buffer.from(event.body.replace(/^data:image\/\w+;base64,/, ""),"base64");
var data = {
Bucket: "sample-bucket",
Key: Date.now()+"",
Body: buf,
ContentType: 'image/png',
ACL: 'public-read'
};
s3.putObject(data, function(err, data){
if (err) {
console.log(err);
console.log('Error uploading data: ', data);
} else {
console.log('succesfully uploaded the image!');
}
callback(null,data);
});
};

Is there a way to upload to S3 from a url using node.js?

I found this question, but it doesn't seem to answer my question as I think it's still talking about local files.
I want to take, say, and imgur.com link and upload it to S3 using node. Is knox capable of this or do I need to use something else?
Not sure where to get started.
I’m not using knox but the official AWS SDK for JavaScript in Node.js. I issue a request to a URL with {encoding: null} in order to retrieve the data in buffer which can be passed directly to the Body parameter for s3.putObject(). Here below is an example of putting a remote image in a bucket with aws-sdk and request.
var AWS = require('aws-sdk');
var request = require('request');
AWS.config.loadFromPath('./config.json');
var s3 = new AWS.S3();
function put_from_url(url, bucket, key, callback) {
request({
url: url,
encoding: null
}, function(err, res, body) {
if (err)
return callback(err, res);
s3.putObject({
Bucket: bucket,
Key: key,
ContentType: res.headers['content-type'],
ContentLength: res.headers['content-length'],
Body: body // buffer
}, callback);
})
}
put_from_url('http://a0.awsstatic.com/main/images/logos/aws_logo.png', 'your_bucket', 'media/aws_logo.png', function(err, res) {
if (err)
throw err;
console.log('Uploaded data successfully!');
});
For those that are looking for a solution that doesn't involves callbacks, and prefeers promises, based on #micmia code here is an alternative:
var AWS = require('aws-sdk'),
request = require('request');
const bucketName='yourBucketName';
const bucketOptions = {...Your options};
var s3 = new AWS.S3(options);
function UploadFromUrlToS3(url,destPath){
return new Promise((resolve,reject)=> {
request({
url: url,
encoding: null
}, function(err, res, body) {
if (err){
reject(err);
}
var objectParams = {
ContentType: res.headers['content-type'],
ContentLength: res.headers['content-length'],
Key: destPath,
Body: body
};
resolve(s3.putObject(objectParams).promise());
});
});
}
UploadFromUrlToS3(
'http://a0.awsstatic.com/main/images/logos/aws_logo.png',
'your/s3/path/aws_logo.png' )
.then(function() {
console.log('image was saved...');
}).catch(function(err) {
console.log('image was not saved!',err);
});
Building on #Yuri's post, for those who would like to use axios instead of request & ES6 syntax for a more modern approach + added the required Bucket property to params (and it downloads any file, not only images):
const uploadFileToS3 = (url, bucket, key) => {
return axios.get(url, { responseType: "arraybuffer", responseEncoding: "binary" }).then((response) => {
const params = {
ContentType: response.headers["content-type"],
ContentLength: response.data.length.toString(), // or response.header["content-length"] if available for the type of file downloaded
Bucket: bucket,
Body: response.data,
Key: key,
};
return s3.putObject(params).promise();
});
}
uploadFileToS3(<your_file_url>, <your_s3_path>, <your_s3_bucket>)
.then(() => console.log("File saved!"))
.catch(error) => console.log(error));
Same thing as the above answer but with fetch:
async function upload(url: string, key: string, bucket: string) {
const response = await fetch(url)
const contentType = response.headers.get("content-type") ?? undefined;
const contentLength =
response.headers.get("content-length") != null
? Number(response.headers.get("content-length"))
: undefined;
return s3
.putObject({
Bucket: bucket,
Key: key,
ContentType: contentType,
ContentLength: contentLength,
Body: response.body, // buffer
})
.promise();
}
Yes. There's an example of doing this in the knox README
http.get('http://google.com/doodle.png', function(res){
var headers = {
'Content-Length': res.headers['content-length']
, 'Content-Type': res.headers['content-type']
};
client.putStream(res, '/doodle.png', headers, function(err, res){
// Logic
});
});

Resources