Upload a file from S3 to Youtube using node.js - node.js

Uploading a file from S3 to youtube, using this code:
s3.setBucket('MyBucket');
s3.get('MyBucket/5/' + filename, null, 'stream', function(err, response) {
googleapis.discover('youtube', 'v3').execute(function(err, client) {
var metadata = {
snippet: { title: title, description: description},
status: { privacyStatus: 'public' }
};
client
.youtube.videos.insert({ part: 'snippet,status'}, metadata)
.withMedia('video/mp4', response)
.withAuthClient(auth)
.execute(function(err, result) {
if (err) console.log(err);
else console.log(JSON.stringify(result, null, ' '));
response.redirect('/share?set=yt&id=' + result.id);
}); }); });
does not work, because the line:
.withMedia('video/mp4', response)
It's a replacement of the original one, that works:
fs.readFileSync('/temp/myfile.png')
In other words: If I upload a local file on my laptop, this will work because I'm using the filesystem object.

In case anyone is looking for the answer, here it is:
var googleapis = require('googleapis'),
OAuth2 = googleapis.auth.OAuth2,
ytdapi = googleapis.youtube('v3'),
AWS = require('aws-sdk'),
s3 = new AWS.S3;
var s3data = {
Bucket: 'BUCKET_NAME',
Key: 'VIDEO_NAME'
};
s3.getObject(s3data, function (err, data) {
var params = {
auth: oauth2Client,
part: 'snippet,status',
resource: {
snippet: {
title: 'Title',
description: 'Description'
},
status: {
privacyStatus: 'public'
}
},
media: {
mimeType: 'video/mp4',
body: data.Body
}
};
ytdapi.videos.insert(params, function (err, result) {
if (err) console.log(err);
else console.log(JSON.stringify(result, null, ' '));
});
});

This question is a bit old but here is how I uploaded a video stored in S3 to Youtube using NodeJS v6.10.3.
const AWS = require('aws-sdk'); // v1.6.0
const s3 = new AWS.S3({region: 'eu-west-1', apiVersion: '2006-03-01'});
const {google} = require('googleapis'); // v26.0.1
const YoutubeApi = google.youtube('v3');
const s3data = {
Bucket: bucketName,
Key: fileName
};
let fileStream = s3.getObject(s3data).createReadStream();
const params = {
auth: oauth2Client, // create this using 'google-auth-library'
part: 'snippet,status',
resource: {
snippet: {
title: 'Title',
description: 'description'
},
status: {
privacyStatus: 'private'
}
},
media: {
mimeType: 'video/mp4',
body: fileStream // stream to stream copy
}
};
YoutubeApi.videos.insert(params, function (err, result) {
if (err) {
console.error('error');
console.error(err);
}
else {
console.log('success');
}
});

Related

How to delete a file image on amazon s3 when

I have a program Model, and i the program has an image attribute which I use multers3 to upload when creating the Program.
The challenge that I am facing now is that, when I delete the program, everything gets deleted on my local machine but I realized that the file(image) still exists on my Aws s3 console. How do I get the file deleted both on my database and on Amazon s3?
Here are my Program routes
This is how I delete my Program
router.delete("/:id/delete", function (req, res) {
const ObjectId = mongoose.Types.ObjectId;
let query = { _id: new ObjectId(req.params.id) };
Program.deleteOne(query, function (err) {
if (err) {
console.log(err);
}
res.send("Success");
});
});
and this is how i creates my program.
router.post("/create", upload.single("cover"), async (req, res, next) => {
const fileName = req.file != null ? req.file.filename : null;
const program = new Program({
programtype: req.body.programtype,
title: req.body.title,
description: req.body.description,
programImage: req.file.location,
});
try {
console.log(program);
const programs = await program.save();
res.redirect("/programs");
} catch {
if (program.programImage != null) {
removeprogramImage(program.programImage);
}
res.render("programs/new");
}
});
Looking through the Multer-s3 repo, I can't find anything which mentions deleting from s3. There is this function in the source code, but, I can't figure out how to use it.
You could try using the AWS SDK directly via deleteObject:
const s3 = new aws.S3({
accessKeyId: 'access-key-id',
secretAccessKey: 'access-key',
Bucket: 'bucket-name',
});
s3.deleteObject({ Bucket: 'bucket-name', Key: 'image.jpg' }, (err, data) => {
console.error(err);
console.log(data);
});
I had exactly the same problem which is "that the file(image) still exists on my Aws s3 console" it could be because of passing image location instead of image name
When uploading the image to aws here is the respone
{
fieldname: 'name',
originalname: 'apple.png',
encoding: '7bit',
mimetype: 'image/png',
size: 59654,
bucket: 'my-bucket-name',
key: 'apple-1426277135446.png', //=> what i needed to pass as(key)
acl: 'public-read',
contentType: 'application/octet-stream',
contentDisposition: null,
storageClass: 'STANDARD',
serverSideEncryption: null,
metadata: null,
location: 'https://my-bucket-name.Xx.xu-eXst-3.amazonaws.com/apple-
1426277135446.png', // => this is what i was passing to deleteObject as "key"
etag: '"CXXFE*#&SHFLSKKSXX"',
versionId: undefined
}
my problem was that i was passing the image location instead of the image name
in deleteObject function
s3.deleteObject({ Bucket: 'bucket-name', Key: 'image.jpg' }, (err, data)
// key in the argument has to be the filename with extension without
// URL like: https://my-bucket-name.s3.ff-North-1.amazonaws.com/
=> {
console.error(err);
console.log(data);
});
so eventually i could extract the name of the file(image) with extension
and passed to the function above
here is what i used the function from this answer answer
function parseUrlFilename(url, defaultFilename = null) {
// 'https://my-bucket-name.Xx.xu-eXst-3.amazonaws.com/apple-
1426277135446.png'
let filename = new URL(url,
"https://example.com").href.split("#").shift().split("?").shift().split("/").pop(); //No need to change "https://example.com"; it's only present to allow for processing relative URLs.
if(!filename) {
if(defaultFilename) {
filename = defaultFilename;
//No default filename provided; use a pseudorandom string.
} else {
filename = Math.random().toString(36).substr(2, 10);
}
}
// resulting apple-1426277135446.png'
return filename;
}
I had exactly the same problem and fixed by given code,
s3.deleteObjects(
{
Bucket: 'uploads-images',
Delete: {
Objects: [{ Key: 'product-images/slider-image.jpg' }],
Quiet: false,
},
},
function (err, data) {
if (err) console.log('err ==>', err);
console.log('delete successfully', data);
return res.status(200).json(data);
}
);
This works exactly for me.
Example of file deletion from url (file location) on amazone server
This code allows you to have the fileKey from the url
Before you need install urldecode
npm i urldecode
public async deleteFile(location: string) {
let fileKey = decoder(location)
const datas = fileKey.split('amazonaws.com/')
fileKey = datas.pop();
const params = {
Bucket: 'Your Bucket',
Key: fileKey,
};
await this.AWS_S3.deleteObject(params).promise();
}

Missing signature, uploader from createPresignedPost from aws-sdk-js

Missing signature, uploader from createPresignedPost from aws-sdk with the below code. Not getting the relevant result to upload file to s3 as per the react-dropzone-uploader. While uploading file facing 403 error from aws s3
const params = {
Bucket: myBucket,
Key: myKey,
Fields: {
Key: myKey,
AWSAccessKeyId: process.env.AWS_ACCESS_KEY_ID,
acl: "public-read",
},
Expires: signedUrlExpireSeconds,
Conditions: [
['content-length-range', 0, 100000000], // 100 Mb
{'acl': 'public-read'}
]
};
s3.createPresignedPost(params, function(err, data) {
if (err) {
console.error('Presigning post data encountered an error', err);
} else {
// console.log('The post data is', data);
data.fileUrl = data.url+"/"+data.fields.key;
data.uploadUrl = data.url+"/";
res.status(200).send(
new apiResponse.responseObject(200, data, null).getResObject()
);
}
});
https://react-dropzone-uploader.js.org/docs/s3#s3-using-put-instead-of-post
You don't need to pass AWSAccessKeyId inside the params when generating the presigned URL. Also the 'acl' should be outside the Fields property.
return new Promise((resolve, reject) => {
const params = {
Bucket: myBucket,
Fields: {
key: myKey
},
Expires: signedUrlExpireSeconds,
Conditions: [
['content-length-range', 0, 10000000], // 10 Mb
{'acl': 'public-read'}
]
};
s3.createPresignedPost(params, (err, data) => {
resolve(data);
});
});

AWS service's textract method "getDocumentTextDetection" is only returning "JobStatus" and nothing else in response

I am trying to textract the pdf file with aws-sdk's service Textract.getDocumentTextDetection In which I am passing JobId returned by method "startDocumentTextDetection".
Here first I am trying to upload the image/pdf by using s3 service's method "upload" which is successfully running and returning me response like this
{
ETag: '"9d022c9c496f2663d2fgv45d181e475"',
Location:
'https://bucket-name.s3.amazonaws.com/beacon-logo.png',
key: 'beacon-logo.png',
Key: 'beacon-logo.png',
Bucket: 'bucket-name'
}
After this I am calling textract's method "startDocumentTextDetection" to get the JobId, which is returning me this:
{ JobId:
'11df404ce0fa7c958ba23579890a52388132a01d326802a1eggh76915c55qw1e' }
After this I am calling AWS service's another method "getDocumentTextDetection" to extract words from uploaded file which is returning me object:
{ JobStatus: 'IN_PROGRESS' }
which is wrong.
The method "getDocumentTextDetection" should return something like this:
https://docs.aws.amazon.com/textract/latest/dg/API_GetDocumentTextDetection.html
And below is all the code till written till now:
const textract = new AWS.Textract({
apiVersion: "2018-06-27",
accessKeyId: "my-access-key-id",
secretAccessKey: "my-secret-access-key",
region: "region",
ACL: "public-read"
});
const s3 = new AWS.S3();
router.post("/", function (req, res, next) {
const paramsS3Upload = {
Bucket: "bucket-name",
Key: req.files.document.name,
ACL: "public-read",
ContentType: req.files.document.mimetype,
Body: req.files.document.data
};
var s3options = {
partSize: 10 * 1024 * 1024,
queueSize: 1
};
s3.upload(paramsS3Upload, s3options, function (s3Err, s3data) {
if (s3Err) throw s3Err;
var params = {
DocumentLocation: {
S3Object: {
Bucket: s3data.Bucket,
Name: s3data.Key,
Version: s3data.VersionId
}
},
NotificationChannel: {
RoleArn: "arn:aws:iam::id:role/role-name",
SNSTopicArn: "arn:aws:sns:region:id:topic-name"
}
};
textract.startDocumentTextDetection(params, function (err, data) {
if (err) console.log(err, err.stack);
else {
var textDetectionParams = {
JobId: data.JobId
};
textract.getDocumentTextDetection(textDetectionParams, function (
err,
textDetectData
) {
if (err) console.log(err, err.stack);
else {
console.log(textDetectData); // Not getting proper response here!!
res.send({
data: textDetectData
});
}
});
}
});
});
});
The expected output should be like this:
https://docs.aws.amazon.com/textract/latest/dg/API_GetDocumentTextDetection.html. Not able to figureout whats going wrong.

s3PutObject has stopped working all of a sudden from Lambda

I have this code in my Lambda function
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
exports.handler = (event, context, callback) => {
const bucketName = "dhaval-upload";
let data = {
firstname: event.firstname,
lastname: event.lastname,
email: event.email,
userAgent: event.userBrowser,
userIP: event.userIP
};
let params = {
Body: JSON.stringify(data),
Bucket: bucketName,
Key: event.email
};
s3.putObject(params);
callback(null, { message: event.email});
};
This returns the success callback but does not write the object in s3.
and this was working but now for some reason it does not.
Please Note :- I checked cloudwatch logs and could not find the reason for the issue from it.
may be you can try it like this, you should use callbacks / async to your code
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
exports.handler = (event, context, callback) => {
const bucketName = "dhaval-upload";
let data = {
firstname: event.firstname,
lastname: event.lastname,
email: event.email,
userAgent: event.userBrowser,
userIP: event.userIP
};
let params = {
Body: JSON.stringify(data),
Bucket: bucketName,
Key: event.email
};
// s3.putObject(params);
// callback(null, { message: event.email});
// === it works like this ===
s3.putObject(params, (err, data) => {
if (err) callback(null, err);
else callback(null, { message: event.email });
});
};

How to Upload CSV file on S3 Bucket using NodeJS?

I am creating a CSV file on the fly from JSON content and uploading generated CSV file on S3 bucket rather than first saving the file locally.
Below is my code snippet, as using below code my CSV file uploaded on S3 bucket but it does not seems to be in correct CSV format.
var uploadCSVFileOnS3Bucket = function(next, csvFileContent,results) {
console.log("uploadCSVFileOnS3Bucket function started");
var bufferObject = new Buffer.from(JSON.stringify(csvFileContent));
var filePath = configurationHolder.config.s3UploadFilePath;
var s3 = new AWS.S3();
var params = {
Bucket: 'bucket_name'
Key: 's3UploadFilePath',
Body: bufferObject,
CacheControl:'public, max-age=86400'
}
s3.upload(params, function(err, data) {
if (err) {
console.log("Error at uploadCSVFileOnS3Bucket function",err);
next(err);
} else {
console.log("File uploaded Successfully");
next(null, filePath);
}
});
};
Also, I am using "json2csv" npm module for generating csv file content from JSON.
Below is the code:
var generateCSVFile = function(next,callback,csvFileContent) {
console.log("generateCSVFile function started",csvFileContent);
if(csvFileContent && csvFileContent.length>0) {
var fields = ['field1','field2','field3',........];
var csv = json2csv({ data: csvFileContent, fields: fields });
console.log('created',csv);
next(null,csv);
}
else {
next(null,[]);
}
}
Please let us know where the above code is going wrong.
Hi I tried again with below headers values and it worked for me. Below is the code :
var s3 = new AWS.S3();
var params = {
Bucket: bucketName,
Key: filePath,
Body: csvFileContent,
ContentType: 'application/octet-stream',
ContentDisposition: contentDisposition(filePath, {
type: 'inline'
}),
CacheControl: 'public, max-age=86400'
}
s3.putObject(params, function(err, data) {
if (err) {
console.log("Error at uploadCSVFileOnS3Bucket function", err);
next(err);
} else {
console.log("File uploaded Successfully");
next(null, filePath);
}
});
add ContentDisposition: 'attachment' in your params as well.
otherwise you can also read file and upload to s3
fs.readFile(FILEPATH, function(err, file_buffer) {
var params = {
Bucket: //bucketname,
Key:key,
ContentDisposition: 'attachment',
Body: file_buffer
};
s3.upload(params, function(err, data) {
if (err) {
console.log("Error in upload");
callback(err, null)
}
if (data) {
console.log("Upload Success", data);
callback(null, data)
}
});
});
using async await
import { parse } from "json2csv";
const saveCsv = async () => {
const payload = [{ a: 1, b: 2 }]
const csvPayload = parse(payload, { header: true, defaultValue: "-----"});
const s3Key = 'filename.csv';
const bucketName = 'bucket-name';
await s3.put(bucketName, s3Key, csvPayload);
}
just like that without creating a buffer and using JSON.stringify()
Try this, it worked for me:
var fs = require('file-system')
var AWS = require('aws-sdk')
AWS.config.update({
region: '', // use appropriate region
accessKeyId: '', // use your access key
secretAccessKey: '' // user your secret key
})
var s3 = new AWS.S3()
fs.readFile('contacts.csv','utf-8', (err, data) => {
if (err) throw err;
const params = {
Bucket: 'testBucket', // pass your bucket name
Key: 'contacts.csv', // file will be saved as testBucket/contacts.csv
Body : data
};
s3.upload(params, (s3Err, data) => {
if (s3Err) throw s3Err
console.log(`File uploaded successfully at ${data.Location}`)
});
});

Resources