I want to download some image files from s3 bucket on my local system using Promises in node.js.
var params = {
Bucket: bucket_name',
Key: 'key'
};
var fileStream = fs.createWriteStream('path/to/file.jpg');
I tried this which is working
s3.getObject(params).createReadStream.pipe(fileStream);
But I want my code look like this
return s3.getObject(params).promise()
.then(function(data) {
//console.log(data.Body);
// No idea about this section
})
.catch(function(err) {
throw err;
});
I have to use Promise to ensure all images should be downloaded.
One possible solution is to use bluebird and create a function that returns a promise on the end of the stream:
const B = require('bluebird');
function downloadFromS3 (object) {
var p = B.Promise.defer();
var stream = s3.getObject(params).createReadStream()
stream.pipe(fileStream);
stream.on('error', (e) => p.reject(e))
stream.on('end', () => p.resolve())
return p.promise;
}
downloadFromS3(params)
.then(() => console.log('finished'))
.catch(() => console.log('failed'))
Not sure if this code specifically would work, but it may give you a direction to look into.
The below snippet worked for me;
async function getObjectStreamSync(params, dest) {
return new Promise((resolve, reject) => {
// create read stream for object
let stream = s3.getObject(params).createReadStream();
var fileStream = fs.createWriteStream(dest);
stream.pipe(fileStream);
// on error reject the Promise
stream.on('error', (err) => reject(new Error(err)));
// on end resolve the Promise
stream.on('end', () => resolve());
});
}
await getObjectStreamSync(params, "path/to/file/file.ext");
Here, wrapped the stream within Promise. And by listening to the emitted events reject/resolve the Promise.
streamToPromise = require('stream-to-promise');
var fileStream = fs.createWriteStream('path/to/file.jpg');
streamToPromise(fileStream).then(function () {
console.log('Image saved to file.');
});
s3.getObject(params).createReadStream.pipe(fileStream);
Here's a native promise solution with error detection on the read stream and on the write stream.
function streamPromise(stream) {
return new Promise((resolve, reject) => {
stream.on('end', () => {
resolve('end');
});
stream.on('finish', () => {
resolve('finish');
});
stream.on('error', (error) => {
reject(error);
});
});
}
async function s3Download(srcBucket, srcKey, outputPath) {
var objReq = s3.getObject({
Bucket: srcBucket,
Key: srcKey
});
let outStream = fs.createWriteStream(outputPath);
let readStream = objReq.createReadStream();
readStream.on('error', (err) => {
console.warn('s3download error', err);
outStream.emit("error", err);
});
readStream.pipe(outStream);
return streamPromise(outStream);
}
Here is a snippet to use async/await with NodeJS 8:
const AWS = require('aws-sdk');
const fs = require('fs-extra');
const decompress = require('decompress');
const s3 = new AWS.S3();
const s3Params = {
Bucket: s3Location.bucketName,
Key: s3Location.objectKey,
};
const s3Object = await s3.getObject(s3Params).promise();
await fs.writeFile('myfile.zip', s3Object.Body);
await decompress('myfile.zip', 'myFileDir');
/* The compressed file is retrieved as "myfile.zip".
Content will be extracted in myFileDir directory */
Related
I have a Node.js AWS Lambda function and I am trying to read records from a CSV file in S3 and print its contents.
Below is my code to achieve the same however I am getting Null as output.
Code:
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
const csv = require('csv-parser');
const bucket = 'awslambdabuckets';
const objectkey = 'record.csv';
const params = { Bucket: bucket, Key: objectkey };
const results = [];
exports.handler = async function (event, ctx, callback) {
try {
const file = s3.getObject(params).createReadStream();
file
.pipe(csv())
.on('data', function (data) {
results.push;
})
.on('end', () => {
console.log(results);
callback(null, results);
});
} catch (err) {
console.log(err);
callback(Error(err));
}
};
Output:
Can someone help me point out what's the problem and how to fix it.
You are not pushing the data to the result, see and make changes as below
exports.handler = async function (event, ctx, callback) {
try {
const file = s3.getObject(params).createReadStream();
file
.pipe(csv())
.on('data', function (data) {
results.push(data);
})
.on('end', () => {
console.log(results);
callback(null, results);
});
} catch (err) {
console.log(err);
callback(Error(err));
}
};
You are not pushing data to the array:
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
const csv = require('csv-parser');
const bucket = 'awslambdabuckets';
const objectkey = 'record.csv';
const params = { Bucket: bucket, Key: objectkey };
const results = [];
exports.handler = function (event, ctx, callback) {
try {
const file = s3.getObject(params).createReadStream();
file
.pipe(csv())
.on('data', function (data) {
results.push(data); // --> here
})
.on('end', () => {
console.log(results);
callback(null, results);
});
} catch (err) {
console.log(err);
callback(Error(err));
}
};
As the title says, how do you make a direct download link with a file from mongoDB(GridFsBucket) using express?
The file should be downloadable from memory, as i dont want to save it temporarily on the server.
I have this method:
async function downloadFileFromDB(fileId) {
var gridfsbucket = new mongoose.mongo.GridFSBucket(mongoose.connection.db, {
chunkSizeBytes: 1024,
bucketName: 'filesBucket'
});
try {
const stream = gridfsbucket.openDownloadStream(fileId)
const fileBuffer = Buffer.from(stream)
return fileBuffer
} catch (err) {
stream.on('error', () => {
console.log("Some error occurred in download:" + error);
})
console.log(err);
}
}
And this route:
router.get('/download-file', async (req,res) => {
const fileId = req.query.fileId
const ObjectFileId = new ObjectId(fileId)
const fileBuffer = await fileFacade.downloadFileFromDB(ObjectFileId)
res.download(fileBuffer)
})
But res.download wants a path and not a buffer. Aswell im not sure i can make a buffer directly from the openDownloadStream method.
Can anyone help?
I believe you need to write the data to your res object. I accomplished this like:
const readStream = gridfs.openDownloadStreamByName(filename);
readStream.on("data", (chunk) => {
res.write(chunk);
});
readStream.on("end", () => {
res.status(200).end();
mongoClient.close();
});
readStream.on("error", (err) => {
console.log(err);
res.status(500).send(err);
});
So, you may just have to do:
res.write(fileBuffer).end();
//// Instead of doing:
// res.download(fileBuffer);
Using the below code, I'm trying to download a file from one S3 bucket and upload it to another S3 bucket programmatically. The code executing without any issues/exceptions but the file is not getting processed.
const AWS = AWSXRay.captureAWS(require('aws-sdk'))
const S3 = new AWS.S3()
const fs = require('fs')
exports.index = async (event, context) => {
var getParams = {
Bucket: 'my-s3-test-bucket1',
Key: 'SampleVideo.mp4'
}
const inputFilename = '/tmp/SampleVideo.mp4';
const writeStream = fs.createWriteStream(inputFilename)
new Promise ((resolve, reject) => {S3.getObject(getParams).createReadStream().pipe(writeStream).on('end', () => {console.log('end');return resolve();}).on('error',(error) => {console.log('error');return reject(error);})});
writeStream.on('finish', function () {
var putParams = {
Body: fs.createReadStream(inputFilename),
Bucket: 'my-s3-test-bucket2',
Key: 'transfer-' + 'OutputVideo.mp4',
}
S3.upload(putParams, function (err, data) {
if (err) console.log(err, err.stack)
else console.log('logging data' + data)
})
})
}
It seems that because you are using async handler, your function completes prematurely before its body has a chance to fully execute.
You can wrap your code in a promise, as shown in the AWS docs, to actually tell your function to wait for its entire body to execute:
const AWS = AWSXRay.captureAWS(require('aws-sdk'))
const S3 = new AWS.S3()
const fs = require('fs')
exports.index = async (event, context) => {
const promise = new Promise(function(resolve, reject) {
var getParams = {
Bucket: 'my-s3-test-bucket1',
Key: 'SampleVideo.mp4'
}
const inputFilename = '/tmp/SampleVideo.mp4';
const writeStream = fs.createWriteStream(inputFilename)
new Promise ((resolve, reject) => {S3.getObject(getParams).createReadStream().pipe(writeStream).on('end', () => {console.log('end');return resolve();}).on('error',(error) => {console.log('error');return reject(error);})});
writeStream.on('finish', function () {
var putParams = {
Body: fs.createReadStream(inputFilename),
Bucket: 'my-s3-test-bucket2',
Key: 'transfer-' + 'OutputVideo.mp4',
}
S3.upload(putParams, function (err, data) {
if (err) console.log(err, err.stack)
else console.log('logging data' + data)
})
})
}
return promise
}
I am using the code below to read a json file in google firebase storage and then return the content of the file. The code works but all I am getting on the client side is null
exports.updateProductCatalogue = functions.https.onCall(async (data, context) => {
const filepath = data.filepath
const bucketname = data.bucket
const remoteFile = bucket.file("storeid.json");
let buffer = '';
remoteFile.createReadStream()
.on('error', function(err) {console.log(err)})
.on('data', function(response) {
buffer += response
console.log(buffer)
})
.on('end', function() {
//console.log(buffer);
console.log("FINISHED!!")
})
return buffer
})
this is my client side js call
function getUpdatedCatalogue(){
var getStorageData = firebase.functions().httpsCallable('updateProductCatalogue');
var callData = {
"bucket":"test"
}
getStorageData(callData).then(function(result){
console.log(result)
}).catch(function(error){
console.log(error)
})
}
The cloud console.log is showing that the content is read and shown in log but client side console.log is returning null. Here is the file file i am reading.
Why am I not getting the file content returned and displayed on client side? how can I fix this?
The problem is that you're returning the buffer before the stream finishes reading the file.
Try this (not tested),
exports.updateProductCatalogue = functions.https.onCall(async (data, context) => {
const filepath = data.filepath;
const bucketname = data.bucket;
const remoteFile = bucket.file("storeid.json");
return new Promise(resolve, reject) => {
let buffer = '';
remoteFile.createReadStream()
.on('error', function(err) {
console.log(err);
reject(err);
})
.on('data', function(response) {
buffer += response;
console.log(buffer);
})
.on('end', function() {
console.log("FINISHED!!")
resolve(buffer);
});
});
});
I have json file uploaded to s3
then I wrote the following code to Query this file
const aws = require('aws-sdk');
const s3 = new aws.S3();
const bucket = 'hotels.mserver.online';
const objectKey = 'hotelsrates.json';
exports.handler = (event,context,callback) => {
// TODO implement
const response = getS3Objects(bucket,objectKey); //s3.listObjectsV2({}).promise();
console.log(response);
};
function getS3Objects(bucket,key) {
return s3.getObject({ Bucket:bucket, Key:key, ResponseContentType:'application/json '})
.promise().then(file => { return file })
.catch(error => { return error });
}`
but the result is getting null .
I understand what you are trying to accomplish here but that is not the right way to do it.
function getS3Objects(bucket,key){
return s3.getObject({Bucket:bucket,Key:key,ResponseContentType:'application/json'})
.promise().then(file=>{return file})
.catch(error =>{return error});
}`
The part above will still return a promise object, which means that you need to handle it accordingly. Instead of const response = getS3Objects(bucket,objectKey); you want to do
getS3Objects(bucket,objectKey).then(response => console.log(response));
Inside of your handler function.
Furthermore, your usage of s3.getObject function is incorrect. Where first argument is an object - parameters, and the second argument is a callback function.
s3.getObject(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data);
Therefore in your case, you want to modify your getS3Objects function a bit. If you want to use promises, then you can do it like this.
function getS3Objects(bucket, key) {
return new Promise((resolve, reject) => {
s3.getObject(
{
Bucket: bucket,
Key: key,
ResponseContentType: 'application/json'
},
(err, data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
}
);
});
}
Another way that you can do this is as follows:
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
async function readFile(Bucket, Key) {
const params = {
Bucket,
Key,
ResponseContentType: 'application/json',
};
const f = await s3.getObject(params).promise();
return f.Body.toString('utf-8');
}
readFile('mybucket', 'xyz.json').then(console.log);