Issues with uploading to S3 from node stream using request - node.js

Using Node.js, I am trying to upload a large file (700MB ~ 1GB), that i get as a response to a POST request (using request), to my S3 bucket.
Using the aws-sdk for Javascript iv'e tried 2 approaches but each had a different issue:
Approach 1 - Trying to invoke the s3.upload() function on the response event:
const sendRequest = (data) => {
try {
return new Promise((resolve, reject) => {
AWS.config.loadFromPath('./config/awsConfig.json');
let s3 = new AWS.S3({ params:{ Bucket:'myBucket', Key:'path/to/file.csv' } });
request({
method: 'POST',
uri: 'https://www.example.com/apiEndpoint',
headers: {
host: 'example.com',
'content-type': 'application/json'
},
body: JSON.stringify(data)
}).on('response', (response) => { // 1st approach
if (200 == response.statusCode) {
s3.upload({
Body: response,
ACL: 'public-read',
CacheControl: "5184000"
}, (err, data) => { //2 months
console.log(err, data);
});
}
}).on('error', (error) => {
reject();
}).on('end', () => {
resolve();
});
});
} catch (error) {
throw new Error('Unable to get and upload file');
}
}
Result: s3.upload() is called once. The file is created in the bucket but has no data in it (zero bytes).
Approach 2 - Trying to invoke the s3.upload() function on the data event:
const sendRequest = (data) => {
try {
return new Promise((resolve, reject) => {
AWS.config.loadFromPath('./config/awsConfig.json');
let s3 = new AWS.S3({ params:{ Bucket:'myBucket', Key:'path/to/file.csv' } });
request({
method: 'POST',
uri: 'https://www.example.com/apiEndpoint',
headers: {
host: 'example.com',
'content-type': 'application/json'
},
body: JSON.stringify(data)
}).on('data', (data) => { // 2nd approach
s3.upload({
Body: data,
ACL: 'public-read',
CacheControl: "5184000"
}, (err, data) => { //2 months
console.log(err, data);
});
}).on('error', (error) => {
reject();
}).on('end', () => {
resolve();
});
});
} catch (error) {
throw new Error('Unable to get and upload file');
}
}
Result: s3.upload() is called every time there is a data event. The file is created in the bucket but each time the event is emitted, the new data overwrites the old one. In the end there is only the last data that was emitted (7kb ~ 10kb).
Also, after resolve() is called, s3.upload() is still being called multiple times.
Notes:
1) The function returns a Promise because my entire process is synchronous.
2) Both approaches are taken from the answers to Stream response from nodejs request to s3 and from Piping from request.js to s3.upload results in a zero byte file
3) A 3rd approach is to stream to a local file on my server and only then upload to s3. I would very much like to avoid that.
Any ideas on how to get it to work?

Related

Axios posts the first part of the file and hangs

I'm trying to send a file from my NodeJS Lambda function to Dailymotion, and I'm using the following code:
// Download the original file to the `tmp` folder of Lambda.
await axios.get('https://cdn.mysite.com/source.mp4', {
responseType: 'stream'
})
.then( response => {
response.data.pipe(fs.createWriteStream('/tmp/video.mp4'));
})
.catch(error => {
console.log(error);
});
const form = new FormData();
form.append('file', fs.createReadStream('/tmp/video.mp4'));
// Post the file to Dailymotion API.
axios.post('https://upload-xx.xxx.dailymotion.com/upload?uuid=xxxxx&seal=xxxxx&extra=xxxxx', form, {
headers: {
...form.getHeaders,
'Content-Type': 'multipart/formdata',
'Content-Length': fs.statSync('/tmp/video.mp4').size
},
})
.then(response => {
console.log(response);
})
.catch(error => {
console.log(error);
});
I can use the following URL to check the upload progress of the file: https://upload-xx.xxx.dailymotion.com/progress?uuid=xxxxx, but it seems that it uploads only the first chunk of the file and then stops, and I'm not getting any error or response.
Did I miss anything here?
When you use await, the result in your case is not a promise but stream. So there's no sense in adding old-style .then and .catch to not-promise essense.
Try the following.
try {
const stream = await axios.get('https://cdn.mysite.com/source.mp4', {
responseType: 'stream'
});
stream.pipe(fs.createWriteStream('/tmp/video.mp4'));
stream.on('error', (err) => console.log({ err }));
stream.on('close', async () => {
try {
const form = new FormData();
form.append('file', fs.createReadStream('/tmp/video.mp4'));
// Post the file to Dailymotion API.
const postThis = await axios.post('https://upload-xx.xxx.dailymotion.com/upload?uuid=xxxxx&seal=xxxxx&extra=xxxxx', form, {
headers: {
...form.getHeaders,
'Content-Type': 'multipart/formdata',
'Content-Length': fs.statSync('/tmp/video.mp4').size
},
});
console.log({ postThis })
} catch (err) { console.log({ err }) }
})
} catch (err) { console.log({ err }) }

nodejs Lambda with S3 upload via API Gateway

I've been trying to get a simple serverless API Gateway -> NodeJS Lambda -> S3 working however it appears that the Lambda just uploads corrupt files.
This code would download the file from a URL then straight upload to S3.
I've tried both putObject & upload (with the different params) with no success. Looking at the file sizes when I download the original is is 24KB and the downloaded (corrupt) image from S3 is 44KB.
I simply test the application by doing a POST to the API Gateway URL.
Any ideas?
var url =
"https://upload.wikimedia.org/wikipedia/commons/thumb/1/1d/AmazonWebservices_Logo.svg/500px-AmazonWebservices_Logo.svg.png"
module.exports.upload = function(event, context, callback) {
https.get(url, function(res) {
var body = ""
res.on("data", function(chunk) {
// Agregates chunks
body += chunk
})
res.on("end", function() {
console.log(body)
// Once you received all chunks, send to S3 - putObject only
var params = {
Bucket: S3_BUCKET_NAME,
Key: "aws-logo.png",
Body: body
}
var s3Params = {
Bucket: S3_BUCKET_NAME,
Key: "aws-logo-upload.png",
Body: body,
ContentType: "image/png"
}
s3.upload(s3Params, function(err, data) {
// s3.putObject(params, function(err, data) {
if (err) {
console.log("error")
console.error(err, err.stack)
callback(null, { statusCode: 404, error })
} else {
console.log("ok")
console.log(data)
let response = {
statusCode: 200
}
callback(null, response)
}
})
})
})
}
The following code works for me outside of API Gateway/Lambda. It yields a PNG in S3 that's downloadable as a valid 23.7 KB image. I'd expect the equivalent to work in Lambda.
const AWS = require('aws-sdk');
const https = require('https');
const s3 = new AWS.S3();
const logourl =
'https://upload.wikimedia.org/wikipedia/commons/thumb/1/1d/AmazonWebservices_Logo.svg/500px-AmazonWebservices_Logo.svg.png';
const getThenUpload = (url, callback) => {
https.get(url, (res) => {
const data = [];
res.on('data', (chunk) => {
data.push(chunk);
});
res.on('end', () => {
const params = {
Bucket: S3_BUCKET_NAME,
Key: 'aws-logo-upload.png',
Body: Buffer.concat(data),
ContentType: 'image/png',
};
s3.upload(params, (err, rsp) => {
if (err) {
console.error(err, err.stack);
callback(err, { statusCode: 404, err });
} else {
console.log(rsp);
callback(null, { statusCode: 200 });
}
});
});
});
};
getThenUpload(logourl, (err, data) => {
if (err) {
console.error(`Error: ${err}`);
} else {
console.log(`Data: ${JSON.stringify(data)}`);
}
});

Extract body from Lambda callback in node js

I'm trying to extract the body from the callback returned from a Lambda fuction. I am calling the function using the code below:
const pdfBuffer = await new Promise((resolve, reject) => {
lambda.invoke(params, function(err, data) {
if (err) {
console.log('STACK ERROR IS' + err, err.stack)
reject(err);
} else {
console.log('Returned DATA is: ' + JSON.stringify(data));
resolve(data);
}
});
});
Prescription.deletePrescription(programId);
const returnedPDF = JSON.parse(pdfBuffer.Payload)
response.status(200);
response.set({
'Content-Type': 'application/pdf',
'Content-Disposition': `attachment; filename="${filename}.pdf"`,
'X-Publish-Filename': `"${filename}.pdf"`,
});
response.end(new Buffer(returnedPDF.body, 'binary'));
And the Lambda callback function looks like this:
return callback(null, {
statusCode: 200,
body: data,
isBase64Encoded: true,
headers: {
'Content-Type': 'application/pdf',
},
})
The data object I am getting back looks like this:
{"StatusCode":200,"ExecutedVersion":"$LATEST","Payload":"{\"statusCode\":200,\"body\":\"JVBERi0xLjQKJdPr6eEKMSAwIG9iago8PC9DcmVhdG9yIChDa=\",\"headers\":{\"Content-Type\":\"application/pdf\"}}"}
I've tried a number of things to get the body of this nested object? Including JSON.parse() as I thought that it was stringified, however I'm not having any luck.
Many thanks in advance.

Uploading PDF Content Into An S3 Bucket

I'm trying to download PDF content with data from a remote location and upload the content into S3 as a pdf file. I'm using NodeJS, in the context of an AWS lambda. The s3.putObject parameter function resolves successfully, and a pdf file is saved into the S3 bucket as intended, but the document is blank when viewed, suggesting that all of the data may not have been passed to s3.putObject.
Here is my code.
const request = require('request')
const viewUrl = "https://link_to_downloadable_pdf/"
const options = {
url: viewUrl,
headers: {
'Content-Type': 'application/pdf'
}
};
request(options, function(err, res, body){
if(err){return console.log(err)}
const base64data = new Buffer(body, 'binary');
const params = {
Bucket: "myS3bucket",
Key: "my-pdf.pdf",
ContentType: "application/pdf",
Body: base64data,
ACL: 'public-read'
};
s3.putObject(params, function(err, data) {
if (err) {
console.log(err);
} else {
callback(null, JSON.stringify(data))
}
})
When I test the URL in Postman, it returns the PDF with data included. Any idea why the NodeJS code may not be doing the same thing?
Can you try this code? :)
import AWS from 'aws-sdk'
const request = require('request')
const S3 = new AWS.S3()
var promise = new Promise((resolve, reject) => {
return request({ url : 'https://link_to_downloadable_pdf/', encoding : null },
function(err, res, body){
if(err)
return reject({ status:500,error:err })
return resolve({ status:200, body: body})
})
})
promise.then((pdf) => {
if(pdf.status == 200)
{
console.log('uploading file..')
s3.putObject({
Bucket: process.env.bucket,
Body: pdf.body,
Key: 'my-pdf.pdf',
ACL:'public-read'
}, (err,data) => {
if(err)
console.log(err)
else
console.log('uploaded')
})
}
})
I'll be attentive to anything. hope to help you

AWS Node Lambda to send back binary image in response

I am using an AWS Node Lambda to resize an image and send that image back in binary format, and I am not sure if I am handling this right or not. Basically, I want to send the binary data back in my response in a way that it can just be loaded without any front end changes. I have a perfectly good version working that returns base64 data, but how would I modify this to return binary instead?
Here is my resizing function:
function imageSizer(url, args) {
return new Promise((resolve, reject) => {
bufferRequest.get(url, args, function (err, res, body) {
if (err) {
console.log(err);
reject(err);
}
const originalFormat = url.includes('png') ? 'png' : 'jpeg';
let newSize = (args.exact === true)
? sharp(body).resize(args.width, args.height)
: sharp(body).resize(args.width, args.height).max();
newSize.toFormat(originalFormat)
.toBuffer()
.then((outputBuffer) => {
const newImage = "data:" + res.headers["content-type"] + ";base64," + new Buffer(outputBuffer).toString('base64');
resolve(newImage);
})
.catch((error) => {
console.log(error);
reject(error);
})
});
})
}
Here is my handler:
function handler(event, context) {
imageSizer(event.url, event.queryStringParameters);
.then((result) => {
context.succeed({
statusCode: 200,
headers: { 'Content-Type': 'image/jpeg'},
body: result
});
})
.catch((error) => {
context.succeed({
statusCode: 502,
headers: { 'Content-Type': 'application/json' },
body: `Error sizing image ${error}`
});
})
}
As mentioned, this works fine for base64 but I am unsure what changes to make to const newImage = to send back binary data the browser can use to load the image.
I went with this solution:
Change:
.then((outputBuffer) => {
const newImage = "data:" + res.headers["content-type"] + ";base64," + new Buffer(outputBuffer).toString('base64');
resolve(newImage);
})
To:
.then((outputBuffer) => {
resolve(outputBuffer);
})
This returns the buffer in binary format, without having to do anything ourselves.

Resources