In NodeJS, how to download files from S3 - node.js

In ExpressJS, I would like to download files previously uploaded to an Amazon S3 bucket.
Here is my current route:
const express = require('express');
const AWS = require('aws-sdk');
const mammoth = require('mammoth');
const fs = require('fs').promises
const path = require('path')
const router = express.Router();
router.put('/:id/download', async (req, res, next) => {
console.log('hitting download route')
var id = req.params.id;
let upload = await Upload.query().findById( id ).eager('user');
console.log("file to download is: ", upload.name)
AWS.config.update({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
});
const s3 = new AWS.S3();
// var fileStream = fs.createWriteStream('/tmp/file.docx');
// var s3Stream = s3.getObject(params).createReadStream();
const downloadFromS3 = async () => {
const params = {
Bucket: process.env.AWS_BUCKET,
Key: upload.file_url.split("com/").reverse()[0]
};
const { Body } = await s3.getObject(params).promise()
await fs.writeFile(`${ __dirname }/download.docx`, Body)
return Body
}
// mammoth.convertToHtml({ path: '/Users/dariusgoore/Downloads/1585930968750.docx' })
// .then(async function(result) {
// await Upload.query().findById( id )
// .patch({
// html: result.value,
// conversion_messages: result.messages
// })
// res.json(result);
// })
// .done();
res.send(downloadFromS3)
});
I get no errors, but the file is not created, or if I manually create the file, it remains empty.

If I've understood you correctly the issue is that you're not waiting for the file to be written to the local file system, you're returning it in the response via express.
Give this code a go.
const express = require('express')
const AWS = require('aws-sdk')
const mammoth = require('mammoth')
const fs = require('fs').promises
const path = require('path')
const router = express.Router()
const s3 = new AWS.S3()
AWS.config.update({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
})
const downloadFromS3 = async (key, location) => {
const params = {
Bucket: process.env.AWS_BUCKET,
Key: key,
}
const { Body } = await s3.getObject(params).promise()
await fs.writeFile(location, Body)
return true
}
router.put('/:id/download', async (req, res, next) => {
console.log('hitting download route')
const upload = await Upload.query()
.findById(req.params.id)
.eager('user')
console.log('file to download is: ', upload.name)
const key = upload.file_url.split('com/').reverse()[0]
const location = `${__dirname}/${key}.docx`
await downloadFromS3(key, location)
res.send({ key, location })
})

import { S3 } from 'aws-sdk';
import fs from 'fs';
export default class S3Service {
s3: S3;
constructor() {
this.s3 = new S3({
apiVersion: *****,
region: ********
});
}
//Download File
async download(bucketName: string, keyName: string, localDest?: string): Promise<any> {
if (typeof localDest == 'undefined') {
localDest = keyName;
}
const params = {
Bucket: bucketName,
Key: keyName
};
console.log("params: ", params);
let writeStream = fs.createWriteStream(localDest);
return new Promise<any>((resolve, reject) => {
const readStream = this.s3.getObject(params).createReadStream();
// Error handling in read stream
readStream.on("error", (e) => {
console.error(e);
reject(e);
});
// Resolve only if we are done writing
writeStream.once('finish', () => {
resolve(keyName);
});
// pipe will automatically finish the write stream once done
readStream.pipe(writeStream);
});
}
}

Related

Save html file from S3 and set is as content to puppeteer

I have a Lambda function in AWS which fetches an html file from S3 after a PUT event in the bucket. The Lambda function fetches it and saves it in /tmp/tml and then loads it again so as to be set as content in puppeteer and produce a pdf. My code is the following:
const chromium = require("#sparticuz/chrome-aws-lambda");
const AWS = require('aws-sdk');
const fs = require('fs');
const path = require('path');
const IAM_USER_KEY = "asdfadsfasd";
const IAM_USER_SECRET = "asdfdsafasdfds";
const s3bucket = new AWS.S3({
accessKeyId: IAM_USER_KEY,
secretAccessKey: IAM_USER_SECRET
});
const copyRecursiveSync = function (src, dest) {
const exists = fs.existsSync(src);
const stats = exists && fs.statSync(src);
const isDirectory = exists && stats.isDirectory();
if (isDirectory) {
if (!fs.existsSync(dest)) {
fs.mkdirSync(dest);
}
fs.readdirSync(src).forEach(function (childItemName) {
copyRecursiveSync(path.join(src, childItemName), path.join(dest, childItemName));
});
} else {
fs.copyFileSync(src, dest);
}
};
function uploadObjectToS3Bucket(objectName, objectData) {
const params = {
Bucket: 'asdfasdfsadf',
Key: objectName,
Body: objectData,
ContentType: 'application/pdf'
};
s3bucket.upload(params, function(err, data) {
if (err) throw err;
console.log('File uploaded successfully');
});
}
function downloadFromS3(bucket, key, location){
const params = {
Bucket: bucket,
Key: key,
};
const rs = s3bucket.getObject(params).createReadStream();
const ws = fs.createWriteStream(location);
rs.pipe(ws);
return true;
}
exports.handler = async (event, context, callback) => {
copyRecursiveSync('mylayerfiles/tml/', '/tmp/tml/');
console.log('Assets copied to /tmp/tml \n');
const bucket = event.Records[0].s3.bucket.name;
const key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
const html_file_name = key.split('/').pop();
console.log('Launching browser');
const browser = await chromium.puppeteer.launch({ headless: true, executablePath: await chromium.executablePath, args: ['--no-sandbox', '--disable-setuid-sandbox','--disable-web-security',
'--disable-dev-shm-usage',
'--single-process']});
console.log("Browser launched");
const page = await browser.newPage();
console.log(`Saving in /tmp/tml/${html_file_name}`);
downloadFromS3(bucket, key, `/tmp/tml/${html_file_name}`);
const bufferFile = async (relPath) => {
const data = fs.readFileSync(relPath, { encoding: 'utf8' }, function (err) {
if (err) {
console.log("readfile failed: " + err);
return 400;
} else {
console.log("readfile succeeded");
}
});
return data;};
const BUFFER = await bufferFile(`/tmp/tml/${html_file_name}`);
console.log('html file read from /tmp');
await page.setContent(content);
console.log('html set as content');
const pdfConfig = {
printBackground: true,
landscape: false,
width: "338.63mm",
height: "190.5mm"
};
await page.emulateMediaType('screen');
const pdf = await page.pdf(pdfConfig);
console.log('Uploading to S3 bucket');
const key_to_save = key.replace(/\.[^/.]+$/, ".pdf");
console.log(key_to_save);
uploadObjectToS3Bucket(key_to_save, pdf);
console.log('Uploaded to S3 bucket');
await browser.close();
console.log('Browser closed');
return 200;}
catch (err) {
console.log(err);
return 500}
};
However, the problems I'm facing are two:
The file sometimes is not written in /tmp/tml for some reason(!)
If written, it is not read correctly and subsequently, the setcontent() function does not product the proper pdf.

How to perform an HTTP post request using express on Cloud Functions for Firebase using busboy

Hi I am trying to insert data in the database using a POST Request but the data is not being inserted.
On further investigation I found that to upload form-data, busboy needs to be used for image upload in firebase functions but I am not able to find a solution for using busboy with post method.
Hence if someone can please help me resolve this issue.
Below is the code for reference.
app.js
const express = require('express')
//const router = true;
const router = new express.Router()
const userInfo = require('../models/userProfile')
const multer = require('multer');
var fs = require('fs');
var path = require('path');
var JSONStream = require('JSONStream');
const planCheck = require('../models/planDetails');
const login = require('../models/login_creditionals');
const {Storage} = require('#google-cloud/storage');
const {format} = require('util');
const busboy = require('busboy');
const storage = new Storage({
projectId: "biz-1",
keyFilename: "/Users/akm/pixNodes/functions/pixbiz-65a402.json"
});
const bucket = storage.bucket("gs://biz-1");
const upload = multer({
storage: multer.memoryStorage(),
limits: {
fileSize: 10 * 1024 * 1024
}
})
const uploadImageToStorage = (file) => {
return new Promise((resolve, reject) => {
if (!file) {
reject('No image file');
}
let newFileName = `${Date.now() + path.extname(file.originalname)}`;
let fileUpload = bucket.file(newFileName);
const blobStream = fileUpload.createWriteStream({
metadata: {
contentType: file.mimetype
}
});
blobStream.on('error', (error) => {
reject('Something is wrong! Unable to upload at the moment.');
});
blobStream.on('finish', () => {
// The public URL can be used to directly access the file via HTTP.
const url = format(`https://storage.googleapis.com/${bucket.name}/${fileUpload.name}`);
resolve(url);
});
blobStream.end(file.buffer);
});
}
router.post('/userprofile/check' ,upload.single('profile_pic'), async (req,res) => {
var reqFiles;
var reqFilesUrl;
reqFiles = req.file;
if(reqFiles) {
// const imagerUrl = await uploadImageToStorage(reqFiles)
reqFilesUrl = imagerUrl;
console.log(reqFilesUrl);
const notify = new userInfo({
userId: req.body.userId,
mobile_number : req.body.mobileNumber,
profile_pic: reqFilesUrl
})
try {
console.log('success insert data');
await notify.save((err,post) => {
if(err) {
console.log(err);
}
//console.log('data saved', post);
res.status(201).send(post);
});
// });
// res.status(201).send();
console.log('201');
} catch(e) {
//res.status(401);
return res.send(e);
}

Nodejs S3 - Delete Multiple Images

My code it's responding as successful but the images are still in my bucket.
I'm trying to delete images from my s3Bucket (upload works perfect) but the Images delete process it's not working properly.
Here's my code:
myRouter
router.delete("/:id", auth, async (req, res) => {
const productSelected = await Product.findById(req.params.id);
const result = await drop(productSelected);
console.log('>>>>>>> ', result);
res.send(result);
});
myS3Class
const aws = require('aws-sdk');
const multer = require('multer');
const multerS3 = require('multer-s3');
const AWS_CONSTANTS = require('../constants');
const config = require('config');
aws.config.update({
secretAccessKey: config.get('AWS_SECRET_ACCESS_KEY'),
accessKeyId: config.get('AWS_ACCESS_KEY_ID'),
region: AWS_CONSTANTS.region
})
const s3 = new aws.S3();
const drop = async (data) => {
let obj = [];
let result = "";
data.images.map((image) => {
obj.push({ Key: `${AWS_CONSTANTS.bucket}/${image}` }); // This is the full path
});
const options = {
Bucket: AWS_CONSTANTS.bucket,
Delete: {
Objects: obj,
Quiet: false,
},
}
try {
await s3.deleteObjects(
options,
function (err, data) {
if (err) console.log('err ==>', err);
console.log('delete successfully', data);
result = data;
}
).promise();
} catch(error) {
return { success: false, data: null }
}
return result;
}
module.exports.drop = drop;
This is the response from my code:
>>>>>>> delete successfully {
Deleted: [ { Key: 'my-bucketfolder/1655743085375Python.png' } ],
Errors: []
}

Can't upload files from Lambda to S3

I tested on my localhost, then checked on s3 and saw that there was a new file created.
But when testing on Lambda, although there is no error, there is no file on S3. The log of s3.upload(params).promise() is also not displayed.
var fs = require('fs');
var AWS = require('aws-sdk');
exports.handler = async (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false
try {
AWS.config.update({
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey
});
var s3 = new AWS.S3();
var path = 'myfile.txt';
var file_buffer = fs.readFileSync(path);
console.log(file_buffer);
var params = {
Bucket: 'bucket-dev',
Key: '2222.txt',
Body: file_buffer
};
console.log("1111");
s3.upload(params).promise()
.then(function(data) {
console.log("Successfully uploaded to");
callback(null, "All Good");
})
.catch(function(err) {
console.error(err, err.stack);
callback(err);
});
console.log("2222");
return context.logStreamName
} catch (err) {
console.log(err);
callback(err);
}
}
Thanks
Try not to mix and match async and callback. Something like this might be closer to what you want...
var fs = require("fs");
var AWS = require("aws-sdk");
exports.handler = async (event, context) => {
AWS.config.update({
accessKeyId,
secretAccessKey,
});
const s3 = new AWS.S3();
const path = "myfile.txt";
const file_buffer = fs.readFileSync(path);
const params = {
Bucket: "bucket-dev",
Key: "2222.txt",
Body: file_buffer,
};
console.log("1111");
const res = await s3.upload(params).promise();
console.log("Successfully uploaded", res);
return "All good";
};

Is there any way to upload fluent-ffmpeg converted videos directly to s3 without storing them on local?

Is it possible to store ffmpeg output directly to s3 without downloading it in local or any other storage?
Below is my understanding of ffmpeg which converts format of video. I have done conversion part but i need to store it's output directly to s3 bucket so anyone have idea regarding this problem ?
const AWS = require('aws-sdk');
const fs = require('fs');
const ffmpeg = require('fluent-ffmpeg');
const axios = require('axios');
const s3 = new AWS.S3({
endpoint: 's3-ap-south-1.amazonaws.com', // Put you region
accessKeyId: S3_ACCESS_KEY_ID, // Put you accessKeyId
secretAccessKey: S3_ACCESS_SECRET_KEY, // Put you accessKeyId
Bucket: S3_BUCKET_NAME, // Put your bucket name
signatureVersion: 'v4',
region: 'ap-south-1' // Put you region
});
var params = {
Bucket: S3_BUCKET_NAME,
Delimiter: '',
Prefix: S3_STORE_PATH
};
s3.listObjects(params, function (err, data) {
if (err) throw err;
console.log(data);
data.Contents.forEach(function (obj, index) {
const file_name = obj.Key;
const type = "mp4";
console.log(obj.Key)
const url = s3.getSignedUrl('getObject', {
Bucket: S3_BUCKET_NAME,
Key: obj.Key,
Expires: signedUrlExpireSeconds
});
console.log("SIGNED URL= ", url);
const filename = file_name.split('.').slice(0, -1).join('.');
const localFileOutput = `${filename}.${type}`;
// const localFileOutput = `${bucket_url}${filename}.${type}`;
console.log(localFileOutput);
const key = `${filename}.${type}`;
const convert_video = async (req,res) => {
await new Promise((resolve, reject) => {
ffmpeg().input(url)
.toFormat('mp4')
.output(localFileOutput)
.on('end', async () => {
const params = {
Bucket: S3_BUCKET_NAME,
Key: key,
Body: localFileOutput
}
// const fileContent = await fs.readFileSync(localFileOutput);
await s3.putObject(params).promise();
resolve();
}).run();
});
// res.send("success")
}
convert_video();
});
});

Resources