Having trouble uploading image to AWS s3 bucket with node js - node.js

i'm trying to upload an image to s3 using nodejs and aws sdk. It keeps returning a weird error: "Inaccessible host: `images.dynamodb.us-east-1.amazonaws.com'. This service may not be available in the 'us-east-1' region
This is my lambda code:
exports.handler = function(event,context,callback){
var s3 = new AWS.S3();
const image = event.body.imageBinary;
var buf = new Buffer.from(image.replace(/^data:image\/\w+;base64,/, ""),'base64');
const type = image.split(';')[0].split('/')[1];
var params = {
Bucket: process.env.BUCKET,
Key: `${AccountId}.${type}`,
Body: buf,
ContentEncoding: 'base64',
ContentType: `image/${type}`
};
s3.upload(params, function(err, resp){
if (err) {
console.log(err);
} else {
console.log('succesfully uploaded the image!: ' + JSON.stringify(resp));
}
});
}
I even tried setting the AWS object configuration (with key, secret key and region) but got the same response
my aws sdk version: "aws-sdk": "^2.610.0"
Any help would be nice
Thanks!!!

Lambda support node.js v12. Which allows you to write async/await code
const AWS = require('aws-sdk');
const s3 = new AWS.S3({
region: 'us-east-1',
apiVersion: '2006-03-01',
});
exports.handler = async(event,context) => {
const image = event.body.imageBinary;
const buf = new Buffer.from(image.replace(/^data:image\/\w+;base64,/, ""),'base64');
const type = image.split(';')[0].split('/')[1];
var params = {
Bucket: process.env.BUCKET,
Key: `${AccountId}.${type}`,
Body: buf,
};
const options = {
ACL: 'private',
CacheControl: 'max-age=86400',
ContentType: `image/${type}`,
ContentEncoding: 'base64',
};
await s3.upload(params, options).promise();
}

Related

Upgrade aws-sdk to version 3 - streaming S3 upload

I am trying to upgrade a program to aws-sdk version 3, but I am getting the error:
NotImplemented: A header you provided implies functionality that is not implemented
The function get_s3_stream needs to return a stream to the main program.
SDK version 2, this works:
var AWS = require("aws-sdk");
var s3 = new AWS.S3({apiVersion: '2006-03-01'});
const stream = require('stream');
function get_s3_stream() {
var pass = new stream.PassThrough();
var params = {Bucket: "bucketname", Key: "testfile1.txt", Body: pass};
s3.upload(params, function(err, data) {
console.log(err, data);
});
return pass;
}
const inputStream = stream.Readable.from(["input string1"])
const outStream = get_s3_stream()
inputStream.pipe(outStream);
SDK version 3, doesn't work:
const { S3Client, PutObjectCommand } = require("#aws-sdk/client-s3");
const s3Client = new S3Client({ region: "us-east-1" });
const stream = require('stream')
function get_s3_stream() {
const pass = new stream.PassThrough();
var params = {Bucket: "bucketname", Key: "testfile2.txt", Body: pass};
s3Client.send(new PutObjectCommand(params, function(err, data) {
console.log(err, data);
}));
return pass;
};
const inputStream = stream.Readable.from(["input string2"])
const outStream = get_s3_stream()
inputStream.pipe(outStream);
How can a stream be sent to S3 with the new version?
Apparently this is a know issue: https://github.com/aws/aws-sdk-js-v3/issues/1920.
A workaround in order to use passthrough streams would be to use Upload from #aws-sdk/lib-storage:
const { S3Client } = require("#aws-sdk/client-s3");
const { Upload } = require('#aws-sdk/lib-storage');
const stream = require('stream')
const s3Client = new S3Client({ region: "us-east-1" });
function get_s3_stream() {
const pass = new stream.PassThrough();
const upload = new Upload({
client: s3Client,
params: {
Bucket: 'bucketname',
Key: 'testfile2.txt',
Body: pass,
ContentType: 'text/plain',
},
});
upload.done().then((res, error) => {
console.log(res);
});
return pass;
}

Is there any way to upload fluent-ffmpeg converted videos directly to s3 without storing them on local?

Is it possible to store ffmpeg output directly to s3 without downloading it in local or any other storage?
Below is my understanding of ffmpeg which converts format of video. I have done conversion part but i need to store it's output directly to s3 bucket so anyone have idea regarding this problem ?
const AWS = require('aws-sdk');
const fs = require('fs');
const ffmpeg = require('fluent-ffmpeg');
const axios = require('axios');
const s3 = new AWS.S3({
endpoint: 's3-ap-south-1.amazonaws.com', // Put you region
accessKeyId: S3_ACCESS_KEY_ID, // Put you accessKeyId
secretAccessKey: S3_ACCESS_SECRET_KEY, // Put you accessKeyId
Bucket: S3_BUCKET_NAME, // Put your bucket name
signatureVersion: 'v4',
region: 'ap-south-1' // Put you region
});
var params = {
Bucket: S3_BUCKET_NAME,
Delimiter: '',
Prefix: S3_STORE_PATH
};
s3.listObjects(params, function (err, data) {
if (err) throw err;
console.log(data);
data.Contents.forEach(function (obj, index) {
const file_name = obj.Key;
const type = "mp4";
console.log(obj.Key)
const url = s3.getSignedUrl('getObject', {
Bucket: S3_BUCKET_NAME,
Key: obj.Key,
Expires: signedUrlExpireSeconds
});
console.log("SIGNED URL= ", url);
const filename = file_name.split('.').slice(0, -1).join('.');
const localFileOutput = `${filename}.${type}`;
// const localFileOutput = `${bucket_url}${filename}.${type}`;
console.log(localFileOutput);
const key = `${filename}.${type}`;
const convert_video = async (req,res) => {
await new Promise((resolve, reject) => {
ffmpeg().input(url)
.toFormat('mp4')
.output(localFileOutput)
.on('end', async () => {
const params = {
Bucket: S3_BUCKET_NAME,
Key: key,
Body: localFileOutput
}
// const fileContent = await fs.readFileSync(localFileOutput);
await s3.putObject(params).promise();
resolve();
}).run();
});
// res.send("success")
}
convert_video();
});
});

Extract zip file in S3 Bucket using AWS lambda functions in nodeJs "Error: Invalid CEN header (bad signature)"

I am struggling with unzipping the contents in AWS S3. AWS S3 does not provide the functionality of unzipping the zip folder in the S3 bucket directly. I facing one error . upload code screenshot attached.
"Error: Invalid CEN header (bad signature)"
Any advice or guidance would be greatly appreciated.
My node Js code to upload the zip file:
const AWS = require('aws-sdk');
const s3 = new AWS.S3({signatureVersion: 'v4'});
exports.handler = async (event,context) => {
const bucket = 'bucket-name';
console.log(event)
const body = event.body;
const key=JSON.parse(body).key
console.log(key)
const params = {
Bucket: bucket,
Key: key,
ContentType: 'application/zip',
Expires: 60
};
try{
const signedURL = await s3.getSignedUrl('putObject', params);
const response = {
err:{},
body:"url send",
url:signedURL
};
return response;
}catch(e){
const response = {
err:e.message,
body:"error occured"
};
return response;
}};
My NodeJs code to extract the zip file:
const S3Unzip = require('s3-unzip');
exports.s3_unzip = function(event, context, callback) {
const filename = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
const bucketname = event.Records[0].s3.bucket.name;
console.log(event.Records[0].s3.object.key);
new S3Unzip({
bucket: bucketname,
file: filename,
deleteOnSuccess: true,
verbose: true,
}, function(err, success) {
if (err) {
callback(err);
} else {
callback(null);
}
});
}

CSV not correct format on s3 bucket not giving the data correctly displayed

const fs = require('fs');
const AWS = require('aws-sdk');
const s3 = new AWS.S3({
accessKeyId: 'key',
secretAccessKey: 'accesskey'
});
const fileName = 'atest.csv';
const uploadFile = () => {
fs.readFile(fileName, (err, data) => {
if (err) throw err;
const params = {
Bucket: 'mybucket1s-resized', // pass your bucket name
Key: 'FlightReporttest.csv', // file will be saved as testBucket/contacts.csv
ContentDisposition: 'attachment',
Body: JSON.stringify(data, null, 2)
};
s3.upload(params, function(s3Err, data) {
if (s3Err) throw s3Err
console.log(`File uploaded successfully at ${data.Location}`)
});
});
};
uploadFile();
It uploads to the bucket fine but when I download that csv I get a
different format `` [csv format when downloaded from s3][1] [1]:
https://i.stack.imgur.com/DF46l.png
any suggestions? if I change the body to anything else it throws an error
Body: stream
^
ReferenceError: stream is not defined
at fs.readFile (C:\Users\cloudone\Desktop\Work\Projects info\Pageman_Flight_Reports\flightreportupload\index.js:17:16)
at FSReqWrap.readFileAfterClose [as oncomplete] (fs.js:511:3)```
You wouldn't want to JSON.stringify() a CSV file on upload. If you were to remove that from your code it would upload correctly, just pass data
I've also tidied up the code a little below
const AWS = require('aws-sdk')
const fs = require('fs')
const s3 = new AWS.S3({
accessKeyId: 'key',
secretAccessKey: 'accesskey',
})
const uploadFile = fileName => {
const params = {
Bucket: 'mybucket1s-resized',
Key: 'FlightReporttest.csv',
ContentDisposition: 'attachment',
Body: fs.readFileSync(fileName),
}
return s3.upload(params).promise()
}
uploadFile('atest.csv')

Download AWS S3 file directly to a directory

I want to automatically save the file im downloading from AWS S3 to my application folder. Right now this can be done manually. Code below:
router.get("/download", (req, res) => {
//File S3 URL
var fileKey =
"key";
AWS.config.update({
accessKeyId: IAM_USER_KEY,
secretAccessKey: IAM_USER_SECRET,
Bucket: BUCKET_NAME
});
var s3 = new AWS.S3();
var file = fs.createWriteStream("test.csv");
var options = {
Bucket: "name",
Key: fileKey
};
res.attachment(fileKey);
var fileStream = s3
.getObject(options)
.createReadStream()
.on("error", error => {
console.log(error);
res.json({ error: "An error has occured, check console." });
})
.on("httpData", function(data) {
file.write(data);
})
.on("httpDone", function() {
file.end();
});
fileStream.pipe(res);
// fse.writeFileSync("text.csv");
});
As mentioned before, the file can be download and saved manually. But how can write the file and save it automatically in an specific folder?
Thank you
Here's an example of downloading an S3 object to a specific local file:
const AWS = require('aws-sdk');
var s3 = new AWS.S3({apiVersion: '2006-03-01'});
var params = {Bucket: 'mybucket', Key: 'test.csv'};
var file = require('fs').createWriteStream('/tmp/test.csv');
s3.getObject(params).createReadStream().pipe(file);
The code for jarmod's answer, promisified:
const AWS = require('aws-sdk');
const s3 = new AWS.S3({apiVersion: '2006-03-01'});
const params = {Bucket: 'mybucket', Key: 'test.csv'};
const file = require('fs').createWriteStream('/tmp/test.csv');
new Promise((resolve, reject) => {
const pipe = s3.getObject(params).createReadStream().pipe(file);
pipe.on('error', reject);
pipe.on('close', resolve);
});

Resources