Download AWS S3 file directly to a directory - node.js

I want to automatically save the file im downloading from AWS S3 to my application folder. Right now this can be done manually. Code below:
router.get("/download", (req, res) => {
//File S3 URL
var fileKey =
"key";
AWS.config.update({
accessKeyId: IAM_USER_KEY,
secretAccessKey: IAM_USER_SECRET,
Bucket: BUCKET_NAME
});
var s3 = new AWS.S3();
var file = fs.createWriteStream("test.csv");
var options = {
Bucket: "name",
Key: fileKey
};
res.attachment(fileKey);
var fileStream = s3
.getObject(options)
.createReadStream()
.on("error", error => {
console.log(error);
res.json({ error: "An error has occured, check console." });
})
.on("httpData", function(data) {
file.write(data);
})
.on("httpDone", function() {
file.end();
});
fileStream.pipe(res);
// fse.writeFileSync("text.csv");
});
As mentioned before, the file can be download and saved manually. But how can write the file and save it automatically in an specific folder?
Thank you

Here's an example of downloading an S3 object to a specific local file:
const AWS = require('aws-sdk');
var s3 = new AWS.S3({apiVersion: '2006-03-01'});
var params = {Bucket: 'mybucket', Key: 'test.csv'};
var file = require('fs').createWriteStream('/tmp/test.csv');
s3.getObject(params).createReadStream().pipe(file);

The code for jarmod's answer, promisified:
const AWS = require('aws-sdk');
const s3 = new AWS.S3({apiVersion: '2006-03-01'});
const params = {Bucket: 'mybucket', Key: 'test.csv'};
const file = require('fs').createWriteStream('/tmp/test.csv');
new Promise((resolve, reject) => {
const pipe = s3.getObject(params).createReadStream().pipe(file);
pipe.on('error', reject);
pipe.on('close', resolve);
});

Related

Is there any way to upload fluent-ffmpeg converted videos directly to s3 without storing them on local?

Is it possible to store ffmpeg output directly to s3 without downloading it in local or any other storage?
Below is my understanding of ffmpeg which converts format of video. I have done conversion part but i need to store it's output directly to s3 bucket so anyone have idea regarding this problem ?
const AWS = require('aws-sdk');
const fs = require('fs');
const ffmpeg = require('fluent-ffmpeg');
const axios = require('axios');
const s3 = new AWS.S3({
endpoint: 's3-ap-south-1.amazonaws.com', // Put you region
accessKeyId: S3_ACCESS_KEY_ID, // Put you accessKeyId
secretAccessKey: S3_ACCESS_SECRET_KEY, // Put you accessKeyId
Bucket: S3_BUCKET_NAME, // Put your bucket name
signatureVersion: 'v4',
region: 'ap-south-1' // Put you region
});
var params = {
Bucket: S3_BUCKET_NAME,
Delimiter: '',
Prefix: S3_STORE_PATH
};
s3.listObjects(params, function (err, data) {
if (err) throw err;
console.log(data);
data.Contents.forEach(function (obj, index) {
const file_name = obj.Key;
const type = "mp4";
console.log(obj.Key)
const url = s3.getSignedUrl('getObject', {
Bucket: S3_BUCKET_NAME,
Key: obj.Key,
Expires: signedUrlExpireSeconds
});
console.log("SIGNED URL= ", url);
const filename = file_name.split('.').slice(0, -1).join('.');
const localFileOutput = `${filename}.${type}`;
// const localFileOutput = `${bucket_url}${filename}.${type}`;
console.log(localFileOutput);
const key = `${filename}.${type}`;
const convert_video = async (req,res) => {
await new Promise((resolve, reject) => {
ffmpeg().input(url)
.toFormat('mp4')
.output(localFileOutput)
.on('end', async () => {
const params = {
Bucket: S3_BUCKET_NAME,
Key: key,
Body: localFileOutput
}
// const fileContent = await fs.readFileSync(localFileOutput);
await s3.putObject(params).promise();
resolve();
}).run();
});
// res.send("success")
}
convert_video();
});
});

CSV not correct format on s3 bucket not giving the data correctly displayed

const fs = require('fs');
const AWS = require('aws-sdk');
const s3 = new AWS.S3({
accessKeyId: 'key',
secretAccessKey: 'accesskey'
});
const fileName = 'atest.csv';
const uploadFile = () => {
fs.readFile(fileName, (err, data) => {
if (err) throw err;
const params = {
Bucket: 'mybucket1s-resized', // pass your bucket name
Key: 'FlightReporttest.csv', // file will be saved as testBucket/contacts.csv
ContentDisposition: 'attachment',
Body: JSON.stringify(data, null, 2)
};
s3.upload(params, function(s3Err, data) {
if (s3Err) throw s3Err
console.log(`File uploaded successfully at ${data.Location}`)
});
});
};
uploadFile();
It uploads to the bucket fine but when I download that csv I get a
different format `` [csv format when downloaded from s3][1] [1]:
https://i.stack.imgur.com/DF46l.png
any suggestions? if I change the body to anything else it throws an error
Body: stream
^
ReferenceError: stream is not defined
at fs.readFile (C:\Users\cloudone\Desktop\Work\Projects info\Pageman_Flight_Reports\flightreportupload\index.js:17:16)
at FSReqWrap.readFileAfterClose [as oncomplete] (fs.js:511:3)```
You wouldn't want to JSON.stringify() a CSV file on upload. If you were to remove that from your code it would upload correctly, just pass data
I've also tidied up the code a little below
const AWS = require('aws-sdk')
const fs = require('fs')
const s3 = new AWS.S3({
accessKeyId: 'key',
secretAccessKey: 'accesskey',
})
const uploadFile = fileName => {
const params = {
Bucket: 'mybucket1s-resized',
Key: 'FlightReporttest.csv',
ContentDisposition: 'attachment',
Body: fs.readFileSync(fileName),
}
return s3.upload(params).promise()
}
uploadFile('atest.csv')

How to save .wav to s3 bucket from URL in lambda function

I have a url for a .wav file. I'd like to save it to an S3 bucket from a Lambda function.
Do I have to download it first?
What's the best way to do this?
exports.handler = async (event) => {
// imports
const fs = require('fs');
const AWS = require('aws-sdk');
AWS.config.update({ region: process.env.REGION || 'us-east-1' })
const s3 = new AWS.S3();
// get URL
const body = parseBody(event['body']);
const url = body.url;
// download file?
// HOW TO DO THIS using async?
const file_name = magic_save(url)
// upload to S3
var bucketName = `some_bucket`;
var keyName = 'audio.wav';
const fileContent = fs.readFileSync(file_name);
var params = { 'Bucket': bucketName, 'Key': keyName, 'Body': fileContent };
try {
console.log('saving...');
const data = await s3.putObject(params).promise();
console.log("Successfully saved object to " + bucketName + "/" + keyName);
} catch (err) {
console.log('err');
console.log(err);
};
Best will be to stream the file directly to s3 like this
const got = require("got");
const aws = require("aws-sdk");
const s3Client = new aws.S3();
const Bucket = 'somebucket';
const Key = "some/audio.wav";
exports.handler = async (event) => {
// get URL
const body = parseBody(event['body']);
const url = body.url;
const stream = got.stream(url);
const response = await s3Client.upload({Bucket, Key, Body: stream}).promise();
console.log(response);
};

Having trouble uploading image to AWS s3 bucket with node js

i'm trying to upload an image to s3 using nodejs and aws sdk. It keeps returning a weird error: "Inaccessible host: `images.dynamodb.us-east-1.amazonaws.com'. This service may not be available in the 'us-east-1' region
This is my lambda code:
exports.handler = function(event,context,callback){
var s3 = new AWS.S3();
const image = event.body.imageBinary;
var buf = new Buffer.from(image.replace(/^data:image\/\w+;base64,/, ""),'base64');
const type = image.split(';')[0].split('/')[1];
var params = {
Bucket: process.env.BUCKET,
Key: `${AccountId}.${type}`,
Body: buf,
ContentEncoding: 'base64',
ContentType: `image/${type}`
};
s3.upload(params, function(err, resp){
if (err) {
console.log(err);
} else {
console.log('succesfully uploaded the image!: ' + JSON.stringify(resp));
}
});
}
I even tried setting the AWS object configuration (with key, secret key and region) but got the same response
my aws sdk version: "aws-sdk": "^2.610.0"
Any help would be nice
Thanks!!!
Lambda support node.js v12. Which allows you to write async/await code
const AWS = require('aws-sdk');
const s3 = new AWS.S3({
region: 'us-east-1',
apiVersion: '2006-03-01',
});
exports.handler = async(event,context) => {
const image = event.body.imageBinary;
const buf = new Buffer.from(image.replace(/^data:image\/\w+;base64,/, ""),'base64');
const type = image.split(';')[0].split('/')[1];
var params = {
Bucket: process.env.BUCKET,
Key: `${AccountId}.${type}`,
Body: buf,
};
const options = {
ACL: 'private',
CacheControl: 'max-age=86400',
ContentType: `image/${type}`,
ContentEncoding: 'base64',
};
await s3.upload(params, options).promise();
}

File Upload to S3 - 0 bytes

I'm using the text-to-mp3 module to create an text-to-speech MP3 file. Right now, the file is being saved to my file system successfully. I'm trying to upload it to S3 as well. It shows up in S3, but the file is empty (0 bytes).
txtomp3.getMp3(myverse, function(err, binaryStream){
if(err){
console.log(err);
return;
}
var file = fs.createWriteStream("FileName.mp3"); // write it down the file
file.write(binaryStream);
file.end();
var myfile = fs.createReadStream("FileName.mp3");
AWS.config.update({ accessKeyId: '...', secretAccessKey: '...' });
var s3 = new AWS.S3();
s3.putObject({
Bucket: 'myverses',
Key: 'del2.mp3',
Body: myfile,
ACL: 'public-read'
},function (resp) {
console.log(arguments);
console.log('Successfully uploaded package.');
});
});
});
Uploading a File to an Amazon S3 Bucket in the AWS Documentation has clear example on how to upload a file:
// Load the AWS SDK for Node.js
var AWS = require('aws-sdk');
// Set the region
AWS.config.update({region: 'REGION'});
// Create S3 service object
s3 = new AWS.S3({apiVersion: '2006-03-01'});
// call S3 to retrieve upload file to specified bucket
var uploadParams = {Bucket: process.argv[2], Key: '', Body: ''};
var file = process.argv[3];
var fs = require('fs');
var fileStream = fs.createReadStream(file);
fileStream.on('error', function(err) {
console.log('File Error', err);
});
uploadParams.Body = fileStream;
var path = require('path');
uploadParams.Key = path.basename(file);
// call S3 to retrieve upload file to specified bucket
s3.upload (uploadParams, function (err, data) {
if (err) {
console.log("Error", err);
} if (data) {
console.log("Upload Success", data.Location);
}
});
Here is a solution using upload method for store object in bucket as 0 byte in laravel`
aws.php
return [
'credentials' => [
'key' => 'XXXXX',
'secret' => 'XXXXXXXXX',
],
'region' => 'us-east-2',
'version' => 'latest',
];
$s3 = \App::make('aws')->createClient('s3');
$adapter = new AwsS3Adapter($s3, 'bucket_name');
$s4 = $adapter->getClient()->upload(
'bucket_name',
$fileName,
fopen($imageFile->getPathName(), 'rb'),
'authenticated-read',
['params' =>
[
'ContentType' => $imageFile->getClientMimeType()
]
]
);
Any one help me using putObject
$s3 = s3->putObject(array(
'Bucket' => 'bucket-name',
'Key' => $fileName,
'sourceFile' => fopen($imageFile, 'rb'),
'contentLength' => $imageFile->getClientSize(),
'ACL' => 'authenticated-read'
));

Resources