I have a file in the path ./files/myfile.zip
That I would like to upload to s3.
I have a function:
const writeS3Async = (bucketName, fileName, strInput) => {
const params = {
Bucket: bucketName,
Key: fileName,
Body: strInput,
};
return s3.upload(params).promise();
};
The writeS3Async method is used for uploading strings into an s3 object, not files.
Interestingly, I could not find a decent piece of code for a direct file upload.
You need to read the file first. Something like this
var AWS = require('aws-sdk'),
fs = require('fs');
fs.readFile('./files/myfile.zip', function (err, data) {
if (err) { throw err; }
var s3 = new AWS.S3();
const params = {
Bucket: bucketName,
Key: fileName,
Body: data
};
return s3.client.putObject(params).promise();
});
Related
I am completely new to node js .
I am trying to code below steps:
Download a file from AWS S3 folder
Then upload it to some other AWS s3 folder.
So I have searched online and created similar code in node js .
The below code is for the same .
What I see here is the downloadFile and uploadFile functions run in parallel and uploadFile runs first, it seems.
How to run them in sequence?
const aws = require('aws-sdk');
var s3 = new aws.S3();
var fs = require('fs');
// TODO implement
var params = { Bucket: "buckets3", Key: "input_pdf_img/Gas_bill_sample.pdf" };
const filename = 'Gas_bill_sample.pdf';
const bucketName = "translation-bucket-qa-v1";
const key = "input_pdf_img/Gas_bill_sample.pdf";
const key2 = "output_pdf2docx_img/"+filename;
//console.log(filename);
const tmp_filename = "/tmp/Gas_bill_sample.pdf";
console.log(filename);
const downloadFile = (tmp_filename, bucketName, key) => {
const params2 = {
Bucket: bucketName,
Key: key
};
s3.getObject(params, (err, data) => {
if (err) console.error(err);
fs.writeFileSync(tmp_filename, data.Body.toString());
//console.log(`${filePath} has been created!`);
});
};
//downloadFile(tmp_filename, bucketName, key);
//console.log('download done');
//await sleep(1000);
//upload
const uploadFile = (tmp_filename) => {
// Read content from the file
const fileContent = fs.readFileSync(tmp_filename);
// Setting up S3 upload parameters
const params2 = {
Bucket: bucketName,
Key: key2, // File name you want to save as in S3
Body: fileContent
};
// Uploading files to the bucket
s3.upload(params2, function(err, data) {
if (err) {
throw err;
}
console.log(`File uploaded successfully. ${data.Location}`);
});
};
downloadFile(tmp_filename, bucketName, key);
console.log('download done');
//setTimeout(() => {console.log("Let the download finish")}, 6000);
uploadFile(tmp_filename);
//setTimeout(() => {console.log("Let the download finish")}, 6000);const aws = require('aws-sdk');
var s3 = new aws.S3();
var fs = require('fs');
// TODO implement
var params = { Bucket: "buckets3", Key: "input_pdf_img/Gas_bill_sample.pdf" };
const filename = 'Gas_bill_sample.pdf';
const bucketName = "translation-bucket-qa-v1";
const key = "input_pdf_img/Gas_bill_sample.pdf";
const key2 = "output_pdf2docx_img/"+filename;
//console.log(filename);
const tmp_filename = "/tmp/Gas_bill_sample.pdf";
console.log(filename);
const downloadFile = (tmp_filename, bucketName, key) => {
const params2 = {
Bucket: bucketName,
Key: key
};
s3.getObject(params, (err, data) => {
if (err) console.error(err);
fs.writeFileSync(tmp_filename, data.Body.toString());
//console.log(`${filePath} has been created!`);
});
};
//downloadFile(tmp_filename, bucketName, key);
//console.log('download done');
//await sleep(1000);
//upload
const uploadFile = (tmp_filename) => {
// Read content from the file
const fileContent = fs.readFileSync(tmp_filename);
// Setting up S3 upload parameters
const params2 = {
Bucket: bucketName,
Key: key2, // File name you want to save as in S3
Body: fileContent
};
// Uploading files to the bucket
s3.upload(params2, function(err, data) {
if (err) {
throw err;
}
console.log(`File uploaded successfully. ${data.Location}`);
});
};
downloadFile(tmp_filename, bucketName, key);
console.log('download done');
//setTimeout(() => {console.log("Let the download finish")}, 6000);
uploadFile(tmp_filename);
//setTimeout(() => {console.log("Let the download finish")}, 6000);
Tried time out and other ways but no help.
Since the const runs in parallel error is "No such file or directory" as the download file runs after uploadFile.
I need to save file in S3 Digital Ocean space by URL. It can't be downloaded and then saved to S3, because we use firebase-functions, and there restricted to use file system. Is there way to save it directly from url or other ways? For example by stream?
const fileName = 'image.jpg';
const url = 'imageUrl';
const res = await https.get(url, (stream) => stream.pipe(res));
await s3
.upload({
Bucket: 'name',
Key: `content/${fileName}`,
Body: res,
ACL: 'public-read'
})
.promise();
All problem were in wrong path to /tmp directory.
Docs. Final code for Firebase functions:
const fs = require('fs');
const s3 = require('../../services/storage');
const download = require('download');
const saveMediaItemToStorage = async (sourceId, item) => {
// * creating file name
const fileName = `${item.id}.${item.extension}`;
// * saving file to /tmp folder
await download(item.originalMediaUrl, '/tmp', { filename: fileName });
const blob = fs.readFileSync(`/tmp/${fileName}`);
// * saving file to s3
await s3
.upload({
Bucket: 'name',
Key: `content/${sourceId}/${fileName}`,
Body: blob,
ACL: 'public-read'
})
.promise();
// * remove file from temp folder
fs.unlink(`/tmp/${fileName}`, function (err) {
if (err) return console.log(err);
console.log('file deleted successfully');
});
};
module.exports = saveMediaItemToStorage;
I am struggling with unzipping the contents in AWS S3. AWS S3 does not provide the functionality of unzipping the zip folder in the S3 bucket directly. I facing one error . upload code screenshot attached.
"Error: Invalid CEN header (bad signature)"
Any advice or guidance would be greatly appreciated.
My node Js code to upload the zip file:
const AWS = require('aws-sdk');
const s3 = new AWS.S3({signatureVersion: 'v4'});
exports.handler = async (event,context) => {
const bucket = 'bucket-name';
console.log(event)
const body = event.body;
const key=JSON.parse(body).key
console.log(key)
const params = {
Bucket: bucket,
Key: key,
ContentType: 'application/zip',
Expires: 60
};
try{
const signedURL = await s3.getSignedUrl('putObject', params);
const response = {
err:{},
body:"url send",
url:signedURL
};
return response;
}catch(e){
const response = {
err:e.message,
body:"error occured"
};
return response;
}};
My NodeJs code to extract the zip file:
const S3Unzip = require('s3-unzip');
exports.s3_unzip = function(event, context, callback) {
const filename = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
const bucketname = event.Records[0].s3.bucket.name;
console.log(event.Records[0].s3.object.key);
new S3Unzip({
bucket: bucketname,
file: filename,
deleteOnSuccess: true,
verbose: true,
}, function(err, success) {
if (err) {
callback(err);
} else {
callback(null);
}
});
}
I am using AWS lambda function and nodejs. I tried to insert multiple images in s3 bucket. find the below for your reference.
var AWS = require('aws-sdk');
var fileType = require('file-type');
var bucket = 'testing';
var s3 = new AWS.S3();
exports.handler = (event, context, callback) => {
var Userid = event['userid'];
var media = event['media'];
media.forEach(function(eachrecord){
var fileBuffer = Buffer.from(eachrecord, 'base64');
var fileTypeInfo = fileType(fileBuffer);
var randomstring = require("randomstring");
var fileName = Userid+'/'+'media/'+`${randomstring.generate()}.${fileTypeInfo.ext}`;
var ext = fileTypeInfo.ext;
var params = {
Body: fileBuffer,
Key: fileName,
Bucket: bucket,
ContentEncoding: 'base64',
ContentType: fileTypeInfo.mime
};
s3.upload(params, function(err, data){
if(err)
{
callback(err, null);
}
else
{
let response= {body: JSON.stringify(data.Location)};
let mediaurl =response.body;
console.log(mediaurl);
}
});
});
context.succeed('done');
};
In media, I have send multiple base64 encode array value in event. but how to implement forloop concept and media insert array value into s3 bucket?
I want to upload a file that LZMA Format(unity asset bundle) to Amazon server using PUT method but I always get an error said:
"errorMessage": "Expected params.Body to be a string, Buffer, Stream, Blob, or typed array object".
I try to buffer the file but it is not working so what is the best way to upload this type of file.
var aws = require('aws-sdk');
var s3 = new aws.S3();
var mime = require('mime-types');
//.....
var mimetype = mime.lookup(f.name);
if (mimetype == false) {
mimetype = 'application/octet-stream';
}
//is not working
// var buffer = Buffer.from(new Uint8Array (f));
// fs.readFile(f, function (err, data)
s3.putObject({
Bucket: "my backet",
Key: f.name,
Body: f,
ContentType: mimetype,
CacheControl: 'no-cache',
Expires: 0
}, function(err, data) {
if (err) {
context.fail(err, "error");
}
console.log("success:" + f.name);
});