nodejs Lambda with S3 upload via API Gateway - node.js

I've been trying to get a simple serverless API Gateway -> NodeJS Lambda -> S3 working however it appears that the Lambda just uploads corrupt files.
This code would download the file from a URL then straight upload to S3.
I've tried both putObject & upload (with the different params) with no success. Looking at the file sizes when I download the original is is 24KB and the downloaded (corrupt) image from S3 is 44KB.
I simply test the application by doing a POST to the API Gateway URL.
Any ideas?
var url =
"https://upload.wikimedia.org/wikipedia/commons/thumb/1/1d/AmazonWebservices_Logo.svg/500px-AmazonWebservices_Logo.svg.png"
module.exports.upload = function(event, context, callback) {
https.get(url, function(res) {
var body = ""
res.on("data", function(chunk) {
// Agregates chunks
body += chunk
})
res.on("end", function() {
console.log(body)
// Once you received all chunks, send to S3 - putObject only
var params = {
Bucket: S3_BUCKET_NAME,
Key: "aws-logo.png",
Body: body
}
var s3Params = {
Bucket: S3_BUCKET_NAME,
Key: "aws-logo-upload.png",
Body: body,
ContentType: "image/png"
}
s3.upload(s3Params, function(err, data) {
// s3.putObject(params, function(err, data) {
if (err) {
console.log("error")
console.error(err, err.stack)
callback(null, { statusCode: 404, error })
} else {
console.log("ok")
console.log(data)
let response = {
statusCode: 200
}
callback(null, response)
}
})
})
})
}

The following code works for me outside of API Gateway/Lambda. It yields a PNG in S3 that's downloadable as a valid 23.7 KB image. I'd expect the equivalent to work in Lambda.
const AWS = require('aws-sdk');
const https = require('https');
const s3 = new AWS.S3();
const logourl =
'https://upload.wikimedia.org/wikipedia/commons/thumb/1/1d/AmazonWebservices_Logo.svg/500px-AmazonWebservices_Logo.svg.png';
const getThenUpload = (url, callback) => {
https.get(url, (res) => {
const data = [];
res.on('data', (chunk) => {
data.push(chunk);
});
res.on('end', () => {
const params = {
Bucket: S3_BUCKET_NAME,
Key: 'aws-logo-upload.png',
Body: Buffer.concat(data),
ContentType: 'image/png',
};
s3.upload(params, (err, rsp) => {
if (err) {
console.error(err, err.stack);
callback(err, { statusCode: 404, err });
} else {
console.log(rsp);
callback(null, { statusCode: 200 });
}
});
});
});
};
getThenUpload(logourl, (err, data) => {
if (err) {
console.error(`Error: ${err}`);
} else {
console.log(`Data: ${JSON.stringify(data)}`);
}
});

Related

Upload a stream to s3

I read Pipe a stream to s3.upload()
but im having difficulty with I am not sure if that actually solves and I have tried.
What I am doing is a get call to www.example.com. this returns a stream, I want to upload that stream to s3.
heres my try.
fetch('https://www.example.com',fileName{
method: 'GET',
headers: {
'Authorization': "Bearer " + myAccessToken,
},
})
.then(function(response) {
return response.text();
})
.then(function(data) {
uploadToS3(data)
});
const uploadToS3 = (data) => {
// Setting up S3 upload parameters
const params = {
Bucket:myBucket,
Key: "fileName",
Body: data
};
// Uploading files to the bucket
s3.upload(params, function(err, data) {
if (err) {
throw err;
}
console.log(`File uploaded successfully. ${data.Location}`);
});
};
output: ///File uploaded successfully. https://exampleBucket.s3.amazonaws.com/fileName.pdf
however this is blank.
I figured it out, but i did not keep using fetch.
and I actually download the file, then upload it. then delete the file.
function getNewFilesFromExampleDotCom(myAccessToken, fileName, fileKey) {
let url2 = 'https://example.com' + fileKey;
axios
.get(url2, {
headers: { 'Authorization': "Bearer " + myAccessToken },
responseType: 'stream',
})
.then(response => {
let file = fileName;
response.data.pipe(fs.createWriteStream(file))
let myFileInfo = [];
if( myFileInfo.length > 0){
myFileInfo.splice(0, myFileInfo.length)
}
myFileInfo.push(file)
processArray(myFileInfo)
console.log(file + " saved")
})
.catch(error => console.log(error));
}
async function processArray(array) {
for (const item of array) {
await delayedLog(item);
}
console.log('Downloaded!');
console.log('Uploading to s3!');
}
function delay() {
return new Promise(resolve => setTimeout(resolve, 300));
}
async function delayedLog(item) {
await delay();
uploadFiles(item)
}
async function uploadFiles(file){
uploadToS3List(file)
await new Promise((resolve, reject) => setTimeout(resolve, 1000));
deleteMyFiles(file)
}
const uploadToS3List = (fileName) => {
// Read content from the file
const fileContent = fs.readFileSync(fileName);
// Setting up S3 upload parameters
const params = {
Bucket:"myBucketName",
Key: fileName,
Body: fileContent
};
// Uploading files to the bucket
s3.upload(params, function(err, data) {
if (err) {
throw err;
}
console.log(`File uploaded successfully. ${data.Location}`);
});
};
function deleteMyFiles(path){
fs.unlink(path, (err) => {
console.log(path + " has been deleted")
if (err) {
console.error(err)
return
}
})
}

Direct Image to S3 download function - access to data.key

I've been trying to figure this out for two days now.
I have the below function working, it basically takes URL and upload the file to S3. However the problem is I'm trying to return the file location on S3 (data.key) out of the main function but I'm either getting undefined or promise(pending).
var express = require("express"),
axios = require('axios').default,
stream = require("stream"),
aws = require('aws-sdk')
async function downloadImage(url) {
let contentType = 'application/octet-stream'
const s3 = new aws.S3();
// This returns undefined
var imageUrl
// This returns Promise { <pending>}
// var imageUrl = await uploadS3()
var imageRequest = axios({
method: 'get',
url: url,
responseType: 'stream'
}).then(function (response) {
if (response.status === 200) {
contentType = response.headers['content-type'];
response.data.pipe(uploadS3());
}
}).catch(function (error) {
console.log(error.message);
})
// This is where I can't get return
return imageUrl
function uploadS3() {
var pass = new stream.PassThrough();
var params = {
Bucket: "test_bucket",
Key: "test/" + Date.now().toString(),
Body: pass,
ContentType: contentType,
}
s3.upload(params, function (err, data) {
if (err) {
console.log(err)
} else {
// I have access to data.key only here
imageUrl = data.key
console.log(imageUrl)
}
})
return pass
}
}
I was trying something like this. This gets the value up to the Then of the first function, but still no luck getting it out of the main funciton
async function uploadImage(url) {
let contentType = 'application/octet-stream'
const s3 = new aws.S3();
// var imageUrl = await uploadStream()
// var imageUrl = await Promise.all
var imageUrl
var imageRequest = axios({
method: 'get',
url: url,
responseType: 'stream'
}).then(function (response) {
if (response.status === 200) {
const { writeStream, promise } = uploadStream();
contentType = response.headers['content-type'];
upload = response.data.pipe(writeStream);
promise.then((data) => {
// I have access to it here, but not
console.log(data.key)
imageUrl = data.key
}).catch((err) => {
console.log('upload failed.', err.message);
});
}
}).catch(function (error) {
console.log(error.message);
})
return imageUrl
function uploadStream() {
var pass = new stream.PassThrough();
var params = {
Bucket: "test_bucket",
Key: "test/" + Date.now().toString(),
Body: pass,
ContentType: contentType,
}
return {
writeStream: pass,
promise: s3.upload(params).promise(),
};
}
}
I was able to get it to work the following way if anybody is interested. I have a Promise created which returns a result (which is file location on S3) once upload is completed.
I then use this inside of my main functions to basically do download and then return the location.
var express = require("express"),
axios = require('axios').default,
stream = require("stream"),
aws = require('aws-sdk')
// This is now can be run from any other function while getting access to file location (result)
uploadImageAsync("imageURLGoesHere").then(function (result) {
// result contains the location on S3 data.key (this can be changed in the code to return anything else from data)
console.log(result)
}, function (err) {
console.log(err);
})
async function uploadImageAsync(url) {
let contentType = 'application/octet-stream'
const s3 = new aws.S3();
return new Promise(function (resolve, reject) {
var imageRequest = axios({
method: 'get',
url: url,
responseType: 'stream'
}).then(function (response) {
if (response.status === 200) {
const { writeStream, promise } = uploadStream();
contentType = response.headers['content-type'];
response.data.pipe(writeStream);
return new Promise(function (resolve, reject) {
promise.then((data) => {
resolve(data.key);
}).catch((err) => {
console.log('upload failed.', err.message);
});
});
}
}).catch(function (error) {
console.log(error.message);
})
imageRequest.then(function (result) {
resolve(result);
})
function uploadStream() {
var pass = new stream.PassThrough();
var params = {
Bucket: "test_bucket",
Key: "test/" + Date.now().toString(),
Body: pass,
ContentType: contentType,
}
return {
writeStream: pass,
promise: s3.upload(params).promise(),
};
}
})
}

Uploading PDF Content Into An S3 Bucket

I'm trying to download PDF content with data from a remote location and upload the content into S3 as a pdf file. I'm using NodeJS, in the context of an AWS lambda. The s3.putObject parameter function resolves successfully, and a pdf file is saved into the S3 bucket as intended, but the document is blank when viewed, suggesting that all of the data may not have been passed to s3.putObject.
Here is my code.
const request = require('request')
const viewUrl = "https://link_to_downloadable_pdf/"
const options = {
url: viewUrl,
headers: {
'Content-Type': 'application/pdf'
}
};
request(options, function(err, res, body){
if(err){return console.log(err)}
const base64data = new Buffer(body, 'binary');
const params = {
Bucket: "myS3bucket",
Key: "my-pdf.pdf",
ContentType: "application/pdf",
Body: base64data,
ACL: 'public-read'
};
s3.putObject(params, function(err, data) {
if (err) {
console.log(err);
} else {
callback(null, JSON.stringify(data))
}
})
When I test the URL in Postman, it returns the PDF with data included. Any idea why the NodeJS code may not be doing the same thing?
Can you try this code? :)
import AWS from 'aws-sdk'
const request = require('request')
const S3 = new AWS.S3()
var promise = new Promise((resolve, reject) => {
return request({ url : 'https://link_to_downloadable_pdf/', encoding : null },
function(err, res, body){
if(err)
return reject({ status:500,error:err })
return resolve({ status:200, body: body})
})
})
promise.then((pdf) => {
if(pdf.status == 200)
{
console.log('uploading file..')
s3.putObject({
Bucket: process.env.bucket,
Body: pdf.body,
Key: 'my-pdf.pdf',
ACL:'public-read'
}, (err,data) => {
if(err)
console.log(err)
else
console.log('uploaded')
})
}
})
I'll be attentive to anything. hope to help you

Uploading Image from AWS Lambda to S3 via API Gateway in Binary format

My Lambda is receiving binary data of an image from my user in request body (event.body).
I try uploading it to S3 with no error, but when I download, the image is corrupted/ can't be opened.
I also need to return the URl of the uploaded image to the user.
Please Help!
module.exports.uploadImage = (event, context, callback) => {
var buf = new Buffer(new Buffer(event.body).toString('base64').replace(/^data:image\/\w+;base64,/, ""),'base64');
var data = {
Key: Date.now()+"",
Body: buf,
ContentEncoding: 'base64',
ContentType: 'image/png',
ACL: 'public-read'
};
s3Bucket.putObject(data, function(err, data){
if (err) {
console.log(err);
console.log('Error uploading data: ', data);
} else {
console.log('succesfully uploaded the image!');
}
callback(null,data);
});
};
You can upload the image to S3 as node Buffer. The SDK does the converting for you.
const AWS = require("aws-sdk");
var s3 = new AWS.S3();
module.exports.handler = (event, context, callback) => {
var buf = Buffer.from(event.body.replace(/^data:image\/\w+;base64,/, ""),"base64");
var data = {
Bucket: "sample-bucket",
Key: Date.now()+"",
Body: buf,
ContentType: 'image/png',
ACL: 'public-read'
};
s3.putObject(data, function(err, data){
if (err) {
console.log(err);
console.log('Error uploading data: ', data);
} else {
console.log('succesfully uploaded the image!');
}
callback(null,data);
});
};

NodeJS Request Upload Image

Trying to upload an image using nodejs, express & request module but keep getting 415 from Amazon S3 instance.
fs.createReadStream(req.files.image.path).pipe(request.post(defaults.url, {
form: {
param_1: '',
param_2: ''
}
}, function(error, response, body){
if(error){
callback(error, null);
}else{
if(response.statusCode === 200){
callback({}, body);
}else{
callback(body, response);
}
}
}));
I think the image is not getting appended on the request but i'm not 100%. Any advise?
pipe expects a Writable stream as parameter. you can use res object of express directly as the pipe destination. But if you like to upload to s3, you can read the file from req stream and use putObject to write it to s3
var fs = require ('fs')
fs.readFile(req.files.image.path, function (err, data) {
var AWS = require('./aws_config')
var s3 = new AWS.S3()
var bucket = ''
s3.putObject({
ACL: 'public-read', // by default private access
Bucket: bucket,
Key: file_name,
Body: data
}, function (err, data) {
if (err) {
console.log(err)
res.send(500, {msg: 'image upload failed', error: err})
} else {
console.log('S3 upload Successful')
res.send({})
}
});
If you like to download you can use pipe to redirect the read object to response directly
app.get('/download/:file', function(req, res, next) {
var AWS = require('./aws_config')
var s3 = new AWS.S3()
s3.getObject({
Bucket: '',
Key: req.params.file
}, function (err, data) {
if (err) console.log (err)
var fs = require ('fs')
var filePath = __dirname + "/downloads/" + req.params.file
fs.writeFile(filePath, data.Body, function (err) {
if (err) console.log(err)
else {
res.attachment(filePath)
var filestream = fs.createReadStream(filePath);
filestream.pipe(res);
// TODO: delete file from server ?
}
});
})
})

Resources