AWS Node Lambda to send back binary image in response - node.js

I am using an AWS Node Lambda to resize an image and send that image back in binary format, and I am not sure if I am handling this right or not. Basically, I want to send the binary data back in my response in a way that it can just be loaded without any front end changes. I have a perfectly good version working that returns base64 data, but how would I modify this to return binary instead?
Here is my resizing function:
function imageSizer(url, args) {
return new Promise((resolve, reject) => {
bufferRequest.get(url, args, function (err, res, body) {
if (err) {
console.log(err);
reject(err);
}
const originalFormat = url.includes('png') ? 'png' : 'jpeg';
let newSize = (args.exact === true)
? sharp(body).resize(args.width, args.height)
: sharp(body).resize(args.width, args.height).max();
newSize.toFormat(originalFormat)
.toBuffer()
.then((outputBuffer) => {
const newImage = "data:" + res.headers["content-type"] + ";base64," + new Buffer(outputBuffer).toString('base64');
resolve(newImage);
})
.catch((error) => {
console.log(error);
reject(error);
})
});
})
}
Here is my handler:
function handler(event, context) {
imageSizer(event.url, event.queryStringParameters);
.then((result) => {
context.succeed({
statusCode: 200,
headers: { 'Content-Type': 'image/jpeg'},
body: result
});
})
.catch((error) => {
context.succeed({
statusCode: 502,
headers: { 'Content-Type': 'application/json' },
body: `Error sizing image ${error}`
});
})
}
As mentioned, this works fine for base64 but I am unsure what changes to make to const newImage = to send back binary data the browser can use to load the image.

I went with this solution:
Change:
.then((outputBuffer) => {
const newImage = "data:" + res.headers["content-type"] + ";base64," + new Buffer(outputBuffer).toString('base64');
resolve(newImage);
})
To:
.then((outputBuffer) => {
resolve(outputBuffer);
})
This returns the buffer in binary format, without having to do anything ourselves.

Related

Fetch multiple files and write to AWS S3 with nodejs Lambda function

I have an array of image urls that I get from an SQS message. I need to download the images and store them in an S3 bucket. If downloading or storing an image fails, I need to catch the error, so I can push the image to another SQS queue for retrying later.
What I have so far does download and store the images, but I don't know how to access the results of the fetch and putObject functions. Also I'm not sure if I'm going about this the right way or if there's a more efficient/better/elegant way to do this.
This is what I have now
const AWS = require("aws-sdk");
const fetch = require("node-fetch")
const s3 = new AWS.S3();
exports.handler = function(event, context) {
// SQS may invoke with multiple messages
for (const message of event.Records) {
const bodyData = JSON.parse(message.body);
const bucket = 'my_images_bucket';
const images = bodyData.images;
let urls = [];
for (const image of images) {
urls.push(image);
}
let promises = urls.map(image => {
fetch(image)
.then((response) => {
if (!response.ok) {
throw new Error('An error occurred while fetching ' + image + ': ' + response.statusText);
}
return response;
})
.then(async res => {
try {
const buffer = await res.buffer();
console.log(image);
// store
return s3.putObject(
{
Bucket: bucket,
Key: image,
Body: buffer,
ContentType: "image/jpeg"
}
).promise();
} catch (e) {
console.log('An error occurred while storing image ' + image + ': ' + e);
}
})
.catch((error) => {
console.error(error);
});
});
Promise.all(promises)
.then(d => {
console.log('All images downloaded.');
console.log('PromiseAll result: ' + d);
}).catch(e => {
console.log('Whoops something went wrong!', e);
});
}
}
The output I get from this:
INFO All images downloaded.
INFO PromiseAll result: ,,,,
INFO https://myserver/10658272812/image14.jpg
INFO https://myserver/10658272810/image12.jpg
INFO https://myserver/10658272804/image6.jpg
INFO https://myserver/10658272813/image15.jpg
INFO https://myserver/10658272816/image18.jpg
I attach the code that I wrote for a similar problem.
const s3Put = (filename, data, mime, s3Params = {}) => {
return new Promise((resolve, reject) => {
s3.putObject({
Bucket: bucket,
Key: filename,
Body: data,
ContentType:
...s3Params
}, (err, data) => {
if (err) {
return reject(err);
}
return resolve(data);
});
});
};
let filePromise = s3Put(to, content, fileMime, s3Params)
.then(() => console.log("MyCode"))
.catch(err => {
const error = {
message: `S3: ${(err.pri && err.pri.message) || (err.internal && err.internal.message)}`,
to
};
errors.push(error);
return onError(error);
});

Upload a stream to s3

I read Pipe a stream to s3.upload()
but im having difficulty with I am not sure if that actually solves and I have tried.
What I am doing is a get call to www.example.com. this returns a stream, I want to upload that stream to s3.
heres my try.
fetch('https://www.example.com',fileName{
method: 'GET',
headers: {
'Authorization': "Bearer " + myAccessToken,
},
})
.then(function(response) {
return response.text();
})
.then(function(data) {
uploadToS3(data)
});
const uploadToS3 = (data) => {
// Setting up S3 upload parameters
const params = {
Bucket:myBucket,
Key: "fileName",
Body: data
};
// Uploading files to the bucket
s3.upload(params, function(err, data) {
if (err) {
throw err;
}
console.log(`File uploaded successfully. ${data.Location}`);
});
};
output: ///File uploaded successfully. https://exampleBucket.s3.amazonaws.com/fileName.pdf
however this is blank.
I figured it out, but i did not keep using fetch.
and I actually download the file, then upload it. then delete the file.
function getNewFilesFromExampleDotCom(myAccessToken, fileName, fileKey) {
let url2 = 'https://example.com' + fileKey;
axios
.get(url2, {
headers: { 'Authorization': "Bearer " + myAccessToken },
responseType: 'stream',
})
.then(response => {
let file = fileName;
response.data.pipe(fs.createWriteStream(file))
let myFileInfo = [];
if( myFileInfo.length > 0){
myFileInfo.splice(0, myFileInfo.length)
}
myFileInfo.push(file)
processArray(myFileInfo)
console.log(file + " saved")
})
.catch(error => console.log(error));
}
async function processArray(array) {
for (const item of array) {
await delayedLog(item);
}
console.log('Downloaded!');
console.log('Uploading to s3!');
}
function delay() {
return new Promise(resolve => setTimeout(resolve, 300));
}
async function delayedLog(item) {
await delay();
uploadFiles(item)
}
async function uploadFiles(file){
uploadToS3List(file)
await new Promise((resolve, reject) => setTimeout(resolve, 1000));
deleteMyFiles(file)
}
const uploadToS3List = (fileName) => {
// Read content from the file
const fileContent = fs.readFileSync(fileName);
// Setting up S3 upload parameters
const params = {
Bucket:"myBucketName",
Key: fileName,
Body: fileContent
};
// Uploading files to the bucket
s3.upload(params, function(err, data) {
if (err) {
throw err;
}
console.log(`File uploaded successfully. ${data.Location}`);
});
};
function deleteMyFiles(path){
fs.unlink(path, (err) => {
console.log(path + " has been deleted")
if (err) {
console.error(err)
return
}
})
}

Axios posts the first part of the file and hangs

I'm trying to send a file from my NodeJS Lambda function to Dailymotion, and I'm using the following code:
// Download the original file to the `tmp` folder of Lambda.
await axios.get('https://cdn.mysite.com/source.mp4', {
responseType: 'stream'
})
.then( response => {
response.data.pipe(fs.createWriteStream('/tmp/video.mp4'));
})
.catch(error => {
console.log(error);
});
const form = new FormData();
form.append('file', fs.createReadStream('/tmp/video.mp4'));
// Post the file to Dailymotion API.
axios.post('https://upload-xx.xxx.dailymotion.com/upload?uuid=xxxxx&seal=xxxxx&extra=xxxxx', form, {
headers: {
...form.getHeaders,
'Content-Type': 'multipart/formdata',
'Content-Length': fs.statSync('/tmp/video.mp4').size
},
})
.then(response => {
console.log(response);
})
.catch(error => {
console.log(error);
});
I can use the following URL to check the upload progress of the file: https://upload-xx.xxx.dailymotion.com/progress?uuid=xxxxx, but it seems that it uploads only the first chunk of the file and then stops, and I'm not getting any error or response.
Did I miss anything here?
When you use await, the result in your case is not a promise but stream. So there's no sense in adding old-style .then and .catch to not-promise essense.
Try the following.
try {
const stream = await axios.get('https://cdn.mysite.com/source.mp4', {
responseType: 'stream'
});
stream.pipe(fs.createWriteStream('/tmp/video.mp4'));
stream.on('error', (err) => console.log({ err }));
stream.on('close', async () => {
try {
const form = new FormData();
form.append('file', fs.createReadStream('/tmp/video.mp4'));
// Post the file to Dailymotion API.
const postThis = await axios.post('https://upload-xx.xxx.dailymotion.com/upload?uuid=xxxxx&seal=xxxxx&extra=xxxxx', form, {
headers: {
...form.getHeaders,
'Content-Type': 'multipart/formdata',
'Content-Length': fs.statSync('/tmp/video.mp4').size
},
});
console.log({ postThis })
} catch (err) { console.log({ err }) }
})
} catch (err) { console.log({ err }) }

How to store any type of file in pouch db using ionic 4?

this is how I try it
await this.base64.encodeFile(uri).then((base64File: string) => {
console.log(base64File);
base64_data = base64File
alert(JSON.stringify(base64_data));
console.log(base64_data);
}, (err) => {
console.log(err);
alert(JSON.stringify(err));
});
this.db.putAttachment(att_id,att_name,base64_data, att_file_type).then(function (result) {
console.log(result);
alert(JSON.stringify(result));
}).catch(function (err) {
// ouch, an error
console.log(err);
alert(JSON.stringify(err));
});
it works on image file but it return "" for pdf files
you can convert file data to binary data or base64
this example convert to binary data
first download file with XHR request can use jquery ajax , axios or ...
set responseType: 'arraybuffer'
after download or before download create document in pouch db
axios.get(url, {
progress: false, responseType: 'arraybuffer',
onDownloadProgress: (progressEvent) => {
precent = (100 * progressEvent.loaded / progressEvent.total)
console.log(precent)
}
})
.then(resp => {
//get db
let db = $db.dbModel
//set attach
db.get(doc._id).then((doc) => {
db.putAttachment(doc._id, 'index.mp4', doc._rev, new Blob([new Uint8Array(resp.data)], {type: 'video/mp4'}), 'video/mp4')
.then(res => {
// console.log('success store file')
})
})
})
https://github.com/mohammadnazari110/pwa_offline_video_download

Issues with uploading to S3 from node stream using request

Using Node.js, I am trying to upload a large file (700MB ~ 1GB), that i get as a response to a POST request (using request), to my S3 bucket.
Using the aws-sdk for Javascript iv'e tried 2 approaches but each had a different issue:
Approach 1 - Trying to invoke the s3.upload() function on the response event:
const sendRequest = (data) => {
try {
return new Promise((resolve, reject) => {
AWS.config.loadFromPath('./config/awsConfig.json');
let s3 = new AWS.S3({ params:{ Bucket:'myBucket', Key:'path/to/file.csv' } });
request({
method: 'POST',
uri: 'https://www.example.com/apiEndpoint',
headers: {
host: 'example.com',
'content-type': 'application/json'
},
body: JSON.stringify(data)
}).on('response', (response) => { // 1st approach
if (200 == response.statusCode) {
s3.upload({
Body: response,
ACL: 'public-read',
CacheControl: "5184000"
}, (err, data) => { //2 months
console.log(err, data);
});
}
}).on('error', (error) => {
reject();
}).on('end', () => {
resolve();
});
});
} catch (error) {
throw new Error('Unable to get and upload file');
}
}
Result: s3.upload() is called once. The file is created in the bucket but has no data in it (zero bytes).
Approach 2 - Trying to invoke the s3.upload() function on the data event:
const sendRequest = (data) => {
try {
return new Promise((resolve, reject) => {
AWS.config.loadFromPath('./config/awsConfig.json');
let s3 = new AWS.S3({ params:{ Bucket:'myBucket', Key:'path/to/file.csv' } });
request({
method: 'POST',
uri: 'https://www.example.com/apiEndpoint',
headers: {
host: 'example.com',
'content-type': 'application/json'
},
body: JSON.stringify(data)
}).on('data', (data) => { // 2nd approach
s3.upload({
Body: data,
ACL: 'public-read',
CacheControl: "5184000"
}, (err, data) => { //2 months
console.log(err, data);
});
}).on('error', (error) => {
reject();
}).on('end', () => {
resolve();
});
});
} catch (error) {
throw new Error('Unable to get and upload file');
}
}
Result: s3.upload() is called every time there is a data event. The file is created in the bucket but each time the event is emitted, the new data overwrites the old one. In the end there is only the last data that was emitted (7kb ~ 10kb).
Also, after resolve() is called, s3.upload() is still being called multiple times.
Notes:
1) The function returns a Promise because my entire process is synchronous.
2) Both approaches are taken from the answers to Stream response from nodejs request to s3 and from Piping from request.js to s3.upload results in a zero byte file
3) A 3rd approach is to stream to a local file on my server and only then upload to s3. I would very much like to avoid that.
Any ideas on how to get it to work?

Resources