Cannot get data.Location while uploading to AWS s3 bucket - node.js

I successfully upload my files to the aws s3 bucket, but cannot get its location back , to store it back to my DB.
Here is my function:
const uploadFile = (filename, key) => {
return new Promise((resolve, reject)=> {
fs.readFile(filename, (err, data) => {
if(err){
reject(err);
};
const params = {
Bucket: "BUCKET_NAME",
Key: `student_${key}`, // File name you want to save as in S3
Body: data,
ACL: 'public-read'
};
s3.upload(params, function(err, data){
if(err){
throw err;
}
resolve(data.Location);
});
});
})
};
My router :
uploadFile.uploadFile(request.file.path, request.file.originalname).then((addr) => {
student_photo = addr;
})
Eventually I get empty string (when I console.log this).

The decision I found was to create a Promise to a function uploadFile, which in terms make it "thenable". So in .then() part I make query request to store info in my SQL.

Related

Node : Wait the python script to write the file then upload it to s3

I have done the following code. Where I create a file by a python script then upload it to S3 then give the user the ability to download it.
exports.createFeature = async (req, res, next) => {
let retourUrl = await uploadFile(req.body)
res.status(201).json(retourUrl)
};
function uploadFile(feature) {
return new Promise(async (resolve, reject) => {
let options = {
scriptPath: 'pathToDcript',
args: [arg1, arg2, arg3]
};
PythonShell.run('script.py', options, function (err) {
if (err) throw err;
console.log('file has been created !');
//read the file
let contents = fs.readFileSync('pathToFile', {encoding:'utf8', flag:'r'});
//convert it to buffer
const fileContent = Buffer.from(contents, "utf-8");
// Setting up S3 upload parameters
let key = keyUserData+feature.userId+'/fileName'
const params = {
Bucket: bucket,
Key: key, // File name you want to save as in S3
Body: fileContent
};
// Uploading files to the bucket
s3.upload(params, function(err, data) {
if (err) {
throw err;
}
//console.log(`File uploaded successfully. ${data.Location}`);
});
// delete the file
fs.unlinkSync('pathToFile');
//get url for download
const presignedURL = s3.getSignedUrl('getObject', {
Bucket: bucket,
Key: key,
Expires: 60*5
})
resolve(presignedURL)
})
});
}
But I have the download url before the file is been uploaded to S3, any idea on how I can make it wait till all finish ?
if you want to use s3.upload with a callback. you need to change your code as mentioned below.
exports.createFeature = async (req, res, next) => {
let retourUrl = await uploadFile(req.body)
res.status(201).json(retourUrl)
};
function uploadFile(feature) {
return new Promise((resolve, reject) => {
let options = {
scriptPath: 'pathToDcript',
args: [arg1, arg2, arg3]
};
PythonShell.run('script.py', options, function (err) {
if (err) throw err;
console.log('file has been created !');
//read the file
let contents = fs.readFileSync('pathToFile', { encoding: 'utf8', flag: 'r' });
//convert it to buffer
const fileContent = Buffer.from(contents, "utf-8");
// Setting up S3 upload parameters
let key = keyUserData + feature.userId + '/fileName'
const params = {
Bucket: bucket,
Key: key, // File name you want to save as in S3
Body: fileContent
};
// Uploading files to the bucket
s3.upload(params, function (err, data) {
if (err) {
throw err;
}
// delete the file
fs.unlinkSync('pathToFile');
//get url for download
const presignedURL = s3.getSignedUrl('getObject', {
Bucket: bucket,
Key: key,
Expires: 60 * 5
})
//console.log(`File uploaded successfully. ${data.Location}`);
resolve(presignedURL)
});
})
});
}
The S3 upload method of the AWS SDK returns a Promise which can be awaited on.
For example:
await s3.upload(...)
Note that in this case the callback function to the Python script should be changed to an async function, in order to allow the await syntax. For example:
PythonShell.run('script.py', options, async function (err)

Query S3 json file in AWS

I have json file uploaded to s3
then I wrote the following code to Query this file
const aws = require('aws-sdk');
const s3 = new aws.S3();
const bucket = 'hotels.mserver.online';
const objectKey = 'hotelsrates.json';
exports.handler = (event,context,callback) => {
// TODO implement
const response = getS3Objects(bucket,objectKey); //s3.listObjectsV2({}).promise();
console.log(response);
};
function getS3Objects(bucket,key) {
return s3.getObject({ Bucket:bucket, Key:key, ResponseContentType:'application/json '})
.promise().then(file => { return file })
.catch(error => { return error });
}`
but the result is getting null .
I understand what you are trying to accomplish here but that is not the right way to do it.
function getS3Objects(bucket,key){
return s3.getObject({Bucket:bucket,Key:key,ResponseContentType:'application/json'})
.promise().then(file=>{return file})
.catch(error =>{return error});
}`
The part above will still return a promise object, which means that you need to handle it accordingly. Instead of const response = getS3Objects(bucket,objectKey); you want to do
getS3Objects(bucket,objectKey).then(response => console.log(response));
Inside of your handler function.
Furthermore, your usage of s3.getObject function is incorrect. Where first argument is an object - parameters, and the second argument is a callback function.
s3.getObject(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data);
Therefore in your case, you want to modify your getS3Objects function a bit. If you want to use promises, then you can do it like this.
function getS3Objects(bucket, key) {
return new Promise((resolve, reject) => {
s3.getObject(
{
Bucket: bucket,
Key: key,
ResponseContentType: 'application/json'
},
(err, data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
}
);
});
}
Another way that you can do this is as follows:
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
async function readFile(Bucket, Key) {
const params = {
Bucket,
Key,
ResponseContentType: 'application/json',
};
const f = await s3.getObject(params).promise();
return f.Body.toString('utf-8');
}
readFile('mybucket', 'xyz.json').then(console.log);

AWS S3 putObject callback not firing

I have a lambda function trying to place an mp3 file into an S3 bucket, however I am not seeing my file uploaded and more strangely do not see any logging/response from the callback.
My lambda/s3 bucket are all on the same AWS account and the bucket name is definitely correct.
Is there something I'm missing here? Or any explanation why my callback is not being fired?
exports.handler = async (event, context, callback) => {
// prior setup
console.log('about to putObject on s3');
const s3BucketData = {
Bucket: 'media-files',
Key: fileName,
Body: fileDataBuffer,
ContentType: 'audio/mp3'
};
await s3.putObject(s3BucketData, (err, data) => {
console.log('putObject callback executing');
if (err) {
console.log('err occurred storing to s3: ', err)
} else{
console.log(`${fileName} succuessfully uploaded`);
}
context.done();
});
};
First of all, it's a bad practice to stick your methods inside of the handler function. Second of all you have some issue with your runtime. I mean that you choosed node 8.10 with await/async support, but you still trying to use callbacks.
I have some comments for you. I hope it's going to help you.
1) You can simple do that:
export async function handler(event)
{
// body of your function
};
2) AWS services promisified. You have to re-write your s3 method. Take a look at the following snippet. And I've got a question. Are sure that you have to use putObject method instead of upload?
try
{
let s3= new AWS.S3({ region: process.env.AWS_REGION, apiVersion: '2006-03-01' });
let params =
{
Bucket: //aws s3 bucket location (a full path),
Key: //file name/key,
Body: //entity to upload,
ACL: 'public-read' // access policy,
ContentType: 'audio/mp3'
};
let s3Response = await s3.upload(params).promise();
// request successed
console.log(`File uploaded to S3 at ${s3Response.Bucket} bucket. File
location: ${s3Response.Location}`);
return s3Response.Location;
}
// request failed
catch (ex)
{
console.error(ex);
}
If you want to stick callbacks then:
s3.upload(params, (err, data) =>
{
console.log('putObject callback executing');
if (err)
{
console.error('err occurred storing to s3: ', err);
return ;
}
console.log(`${fileName} succuessfully uploaded`);
return data;
});
I hope you'll find this helpfull. Cheers!
Below example help you to convert putObject to promise
exports.handler = (event, context, callback) => {
console.log('about to putObject on s3');
const s3BucketData = {
Bucket: 'media-files',
Key: fileName,
Body: fileDataBuffer,
ContentType: 'audio/mp3'
};
S3.putObject(s3BucketData).promise()
.then(data => {
console.log('complete:PUT Object',data);
callback(null, data);
})
.catch(err => {
console.log('failure:PUT Object', err);
callback(err);
});
};

Uploading PDF Content Into An S3 Bucket

I'm trying to download PDF content with data from a remote location and upload the content into S3 as a pdf file. I'm using NodeJS, in the context of an AWS lambda. The s3.putObject parameter function resolves successfully, and a pdf file is saved into the S3 bucket as intended, but the document is blank when viewed, suggesting that all of the data may not have been passed to s3.putObject.
Here is my code.
const request = require('request')
const viewUrl = "https://link_to_downloadable_pdf/"
const options = {
url: viewUrl,
headers: {
'Content-Type': 'application/pdf'
}
};
request(options, function(err, res, body){
if(err){return console.log(err)}
const base64data = new Buffer(body, 'binary');
const params = {
Bucket: "myS3bucket",
Key: "my-pdf.pdf",
ContentType: "application/pdf",
Body: base64data,
ACL: 'public-read'
};
s3.putObject(params, function(err, data) {
if (err) {
console.log(err);
} else {
callback(null, JSON.stringify(data))
}
})
When I test the URL in Postman, it returns the PDF with data included. Any idea why the NodeJS code may not be doing the same thing?
Can you try this code? :)
import AWS from 'aws-sdk'
const request = require('request')
const S3 = new AWS.S3()
var promise = new Promise((resolve, reject) => {
return request({ url : 'https://link_to_downloadable_pdf/', encoding : null },
function(err, res, body){
if(err)
return reject({ status:500,error:err })
return resolve({ status:200, body: body})
})
})
promise.then((pdf) => {
if(pdf.status == 200)
{
console.log('uploading file..')
s3.putObject({
Bucket: process.env.bucket,
Body: pdf.body,
Key: 'my-pdf.pdf',
ACL:'public-read'
}, (err,data) => {
if(err)
console.log(err)
else
console.log('uploaded')
})
}
})
I'll be attentive to anything. hope to help you

How can I return data from a callback function in Node.Js

I am trying to create a function that returns back either the error data from AWS or the { ETag: '"74..."' } data response from the callback. This code currently will write my buffer file to the the s3 bucket. But I want to return my etag number or the error data back from the function but I keep getting undefined. Any help would be appreciated.
function aws(file, name) {
var s3 = new AWS.S3();
s3.putObject({
Bucket: 'Bucket-Name',
ACL: 'public-read',
Key: name,
Body: file
}, function(err, data) {
if (err) {
console.log('Something went wrong')
return err;
} else {
console.log('Successfully uploaded image');
console.log(data);
return data;
}
});
}
var response = aws(buffer, 'file.png');
Solved my problem with a Promise. Hope this helps someone else someday :)
const aws = function (file, name) {
return new Promise((resolve, reject) => {
let s3 = new AWS.S3();
s3.putObject({
Bucket: 'Bucket-Name',
ACL: 'public-read',
Key: name,
Body: file
}, function (err, data) {
if (err) {
console.log('Something went wrong')
reject(err);
} else {
console.log('Successfully uploaded image');
resolve(data);
}
});
});
}
aws(buffer, 'file.png')
.then(response => {
res.set({ 'Content-Type': 'application/json' });
res.status(200);
res.send(response);
})
.catch(console.error);

Resources