I am creating a CSV file on the fly from JSON content and uploading generated CSV file on S3 bucket rather than first saving the file locally.
Below is my code snippet, as using below code my CSV file uploaded on S3 bucket but it does not seems to be in correct CSV format.
var uploadCSVFileOnS3Bucket = function(next, csvFileContent,results) {
console.log("uploadCSVFileOnS3Bucket function started");
var bufferObject = new Buffer.from(JSON.stringify(csvFileContent));
var filePath = configurationHolder.config.s3UploadFilePath;
var s3 = new AWS.S3();
var params = {
Bucket: 'bucket_name'
Key: 's3UploadFilePath',
Body: bufferObject,
CacheControl:'public, max-age=86400'
}
s3.upload(params, function(err, data) {
if (err) {
console.log("Error at uploadCSVFileOnS3Bucket function",err);
next(err);
} else {
console.log("File uploaded Successfully");
next(null, filePath);
}
});
};
Also, I am using "json2csv" npm module for generating csv file content from JSON.
Below is the code:
var generateCSVFile = function(next,callback,csvFileContent) {
console.log("generateCSVFile function started",csvFileContent);
if(csvFileContent && csvFileContent.length>0) {
var fields = ['field1','field2','field3',........];
var csv = json2csv({ data: csvFileContent, fields: fields });
console.log('created',csv);
next(null,csv);
}
else {
next(null,[]);
}
}
Please let us know where the above code is going wrong.
Hi I tried again with below headers values and it worked for me. Below is the code :
var s3 = new AWS.S3();
var params = {
Bucket: bucketName,
Key: filePath,
Body: csvFileContent,
ContentType: 'application/octet-stream',
ContentDisposition: contentDisposition(filePath, {
type: 'inline'
}),
CacheControl: 'public, max-age=86400'
}
s3.putObject(params, function(err, data) {
if (err) {
console.log("Error at uploadCSVFileOnS3Bucket function", err);
next(err);
} else {
console.log("File uploaded Successfully");
next(null, filePath);
}
});
add ContentDisposition: 'attachment' in your params as well.
otherwise you can also read file and upload to s3
fs.readFile(FILEPATH, function(err, file_buffer) {
var params = {
Bucket: //bucketname,
Key:key,
ContentDisposition: 'attachment',
Body: file_buffer
};
s3.upload(params, function(err, data) {
if (err) {
console.log("Error in upload");
callback(err, null)
}
if (data) {
console.log("Upload Success", data);
callback(null, data)
}
});
});
using async await
import { parse } from "json2csv";
const saveCsv = async () => {
const payload = [{ a: 1, b: 2 }]
const csvPayload = parse(payload, { header: true, defaultValue: "-----"});
const s3Key = 'filename.csv';
const bucketName = 'bucket-name';
await s3.put(bucketName, s3Key, csvPayload);
}
just like that without creating a buffer and using JSON.stringify()
Try this, it worked for me:
var fs = require('file-system')
var AWS = require('aws-sdk')
AWS.config.update({
region: '', // use appropriate region
accessKeyId: '', // use your access key
secretAccessKey: '' // user your secret key
})
var s3 = new AWS.S3()
fs.readFile('contacts.csv','utf-8', (err, data) => {
if (err) throw err;
const params = {
Bucket: 'testBucket', // pass your bucket name
Key: 'contacts.csv', // file will be saved as testBucket/contacts.csv
Body : data
};
s3.upload(params, (s3Err, data) => {
if (s3Err) throw s3Err
console.log(`File uploaded successfully at ${data.Location}`)
});
});
Related
I have a program Model, and i the program has an image attribute which I use multers3 to upload when creating the Program.
The challenge that I am facing now is that, when I delete the program, everything gets deleted on my local machine but I realized that the file(image) still exists on my Aws s3 console. How do I get the file deleted both on my database and on Amazon s3?
Here are my Program routes
This is how I delete my Program
router.delete("/:id/delete", function (req, res) {
const ObjectId = mongoose.Types.ObjectId;
let query = { _id: new ObjectId(req.params.id) };
Program.deleteOne(query, function (err) {
if (err) {
console.log(err);
}
res.send("Success");
});
});
and this is how i creates my program.
router.post("/create", upload.single("cover"), async (req, res, next) => {
const fileName = req.file != null ? req.file.filename : null;
const program = new Program({
programtype: req.body.programtype,
title: req.body.title,
description: req.body.description,
programImage: req.file.location,
});
try {
console.log(program);
const programs = await program.save();
res.redirect("/programs");
} catch {
if (program.programImage != null) {
removeprogramImage(program.programImage);
}
res.render("programs/new");
}
});
Looking through the Multer-s3 repo, I can't find anything which mentions deleting from s3. There is this function in the source code, but, I can't figure out how to use it.
You could try using the AWS SDK directly via deleteObject:
const s3 = new aws.S3({
accessKeyId: 'access-key-id',
secretAccessKey: 'access-key',
Bucket: 'bucket-name',
});
s3.deleteObject({ Bucket: 'bucket-name', Key: 'image.jpg' }, (err, data) => {
console.error(err);
console.log(data);
});
I had exactly the same problem which is "that the file(image) still exists on my Aws s3 console" it could be because of passing image location instead of image name
When uploading the image to aws here is the respone
{
fieldname: 'name',
originalname: 'apple.png',
encoding: '7bit',
mimetype: 'image/png',
size: 59654,
bucket: 'my-bucket-name',
key: 'apple-1426277135446.png', //=> what i needed to pass as(key)
acl: 'public-read',
contentType: 'application/octet-stream',
contentDisposition: null,
storageClass: 'STANDARD',
serverSideEncryption: null,
metadata: null,
location: 'https://my-bucket-name.Xx.xu-eXst-3.amazonaws.com/apple-
1426277135446.png', // => this is what i was passing to deleteObject as "key"
etag: '"CXXFE*#&SHFLSKKSXX"',
versionId: undefined
}
my problem was that i was passing the image location instead of the image name
in deleteObject function
s3.deleteObject({ Bucket: 'bucket-name', Key: 'image.jpg' }, (err, data)
// key in the argument has to be the filename with extension without
// URL like: https://my-bucket-name.s3.ff-North-1.amazonaws.com/
=> {
console.error(err);
console.log(data);
});
so eventually i could extract the name of the file(image) with extension
and passed to the function above
here is what i used the function from this answer answer
function parseUrlFilename(url, defaultFilename = null) {
// 'https://my-bucket-name.Xx.xu-eXst-3.amazonaws.com/apple-
1426277135446.png'
let filename = new URL(url,
"https://example.com").href.split("#").shift().split("?").shift().split("/").pop(); //No need to change "https://example.com"; it's only present to allow for processing relative URLs.
if(!filename) {
if(defaultFilename) {
filename = defaultFilename;
//No default filename provided; use a pseudorandom string.
} else {
filename = Math.random().toString(36).substr(2, 10);
}
}
// resulting apple-1426277135446.png'
return filename;
}
I had exactly the same problem and fixed by given code,
s3.deleteObjects(
{
Bucket: 'uploads-images',
Delete: {
Objects: [{ Key: 'product-images/slider-image.jpg' }],
Quiet: false,
},
},
function (err, data) {
if (err) console.log('err ==>', err);
console.log('delete successfully', data);
return res.status(200).json(data);
}
);
This works exactly for me.
Example of file deletion from url (file location) on amazone server
This code allows you to have the fileKey from the url
Before you need install urldecode
npm i urldecode
public async deleteFile(location: string) {
let fileKey = decoder(location)
const datas = fileKey.split('amazonaws.com/')
fileKey = datas.pop();
const params = {
Bucket: 'Your Bucket',
Key: fileKey,
};
await this.AWS_S3.deleteObject(params).promise();
}
const fs = require('fs');
const pdf = require('html-pdf');
const AWS = require("aws-sdk");
//Read html file
function readHTMLfile(path, callback) {
fs.readFile(path, {encoding: 'utf-8'}, function(err, html) {
if(err) {
callback(err);
}
else {
callback(null, html)
}
})
}
//importing HTML file
readHTMLfile(__dirname + '/Nda.html', function(err, html) {
if(err) {
console.log('Error: ',err)
}
else {
var handlebars = require("handlebars");
var template = handlebars.compile(html);
var replacables = {
url: 'http://politicalmemorabilia.com/wp-content/uploads/2014/10/2000px-Aaron_Burr_signature.png'
}
var HtmlToSend = template(replacables);
//pdf.create(HtmlToSend).toStream(function(err, stream){
pdf.create(HtmlToSend).toBuffer(function(err, buf){
if(err){
return console.log(err);
}
else {
console.log('This is a buffer:', buf);
//stream.pipe(fs.createWriteStream('./Nda2.pdf'));
//var readableStream = fs.readFileSync('./Nda2.pdf', 'base64')
// console.log('Readable Stream: ',readableStream)
// var buf = new Buffer('readableStream', 'base64')
// console.log('Buffer String ',buf.toString())
//console.log('Readable Stream ',readableStream)
//console.log('Pdf Content', ./Nda2.pdf);
AWS.config.update({
accessKeyId: "AKIAIRJAKT756L72NPBA",
secretAccessKey: "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"
});
var s3Bucket = new AWS.S3({ params: { Bucket: "vizitor-profilepicture" } });
var options = {
Key: `nda/Nda`,
Body: 'buf',
ContentEncoding: "buffer",
//ContentDisposition: "inline",
ContentType: "application/pdf"
};
s3Bucket.upload(options, function(err, data) {
if (err) {
console.log(err);
console.log("Error uploading data: ", data);
} else {
console.log('Data: ',data)
console.log("data: ", data.Location);
console.log("succesfully uploaded pdf!");
}
});
}
});
}
})
I am importing a HTML file and converted it into pdf using "html-pdf" module and used handlebars to embed dynamic URL in HTML file.I then converted the HTML file to buffer and want to upload to amazon s3 as pdf. For this, I have used buffer in body option. I am getting the correct buffer but when I upload to amazon s3 as pdf file a link is generated but on opening link in browser I get an error "Failed to load pdf"
I have made s3 bucket for public view so that's not the issue here.
I'm trying to download PDF content with data from a remote location and upload the content into S3 as a pdf file. I'm using NodeJS, in the context of an AWS lambda. The s3.putObject parameter function resolves successfully, and a pdf file is saved into the S3 bucket as intended, but the document is blank when viewed, suggesting that all of the data may not have been passed to s3.putObject.
Here is my code.
const request = require('request')
const viewUrl = "https://link_to_downloadable_pdf/"
const options = {
url: viewUrl,
headers: {
'Content-Type': 'application/pdf'
}
};
request(options, function(err, res, body){
if(err){return console.log(err)}
const base64data = new Buffer(body, 'binary');
const params = {
Bucket: "myS3bucket",
Key: "my-pdf.pdf",
ContentType: "application/pdf",
Body: base64data,
ACL: 'public-read'
};
s3.putObject(params, function(err, data) {
if (err) {
console.log(err);
} else {
callback(null, JSON.stringify(data))
}
})
When I test the URL in Postman, it returns the PDF with data included. Any idea why the NodeJS code may not be doing the same thing?
Can you try this code? :)
import AWS from 'aws-sdk'
const request = require('request')
const S3 = new AWS.S3()
var promise = new Promise((resolve, reject) => {
return request({ url : 'https://link_to_downloadable_pdf/', encoding : null },
function(err, res, body){
if(err)
return reject({ status:500,error:err })
return resolve({ status:200, body: body})
})
})
promise.then((pdf) => {
if(pdf.status == 200)
{
console.log('uploading file..')
s3.putObject({
Bucket: process.env.bucket,
Body: pdf.body,
Key: 'my-pdf.pdf',
ACL:'public-read'
}, (err,data) => {
if(err)
console.log(err)
else
console.log('uploaded')
})
}
})
I'll be attentive to anything. hope to help you
My Lambda is receiving binary data of an image from my user in request body (event.body).
I try uploading it to S3 with no error, but when I download, the image is corrupted/ can't be opened.
I also need to return the URl of the uploaded image to the user.
Please Help!
module.exports.uploadImage = (event, context, callback) => {
var buf = new Buffer(new Buffer(event.body).toString('base64').replace(/^data:image\/\w+;base64,/, ""),'base64');
var data = {
Key: Date.now()+"",
Body: buf,
ContentEncoding: 'base64',
ContentType: 'image/png',
ACL: 'public-read'
};
s3Bucket.putObject(data, function(err, data){
if (err) {
console.log(err);
console.log('Error uploading data: ', data);
} else {
console.log('succesfully uploaded the image!');
}
callback(null,data);
});
};
You can upload the image to S3 as node Buffer. The SDK does the converting for you.
const AWS = require("aws-sdk");
var s3 = new AWS.S3();
module.exports.handler = (event, context, callback) => {
var buf = Buffer.from(event.body.replace(/^data:image\/\w+;base64,/, ""),"base64");
var data = {
Bucket: "sample-bucket",
Key: Date.now()+"",
Body: buf,
ContentType: 'image/png',
ACL: 'public-read'
};
s3.putObject(data, function(err, data){
if (err) {
console.log(err);
console.log('Error uploading data: ', data);
} else {
console.log('succesfully uploaded the image!');
}
callback(null,data);
});
};
I have an angular 2 application that is sending an image wrapped inside a FormData object to my node server.
I can receive the image on the server and write it to a file, then redownload it:
var busboy = new Busboy({ headers: req.headers });
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
if(mimetype == "image/png"){
var saveTo = path.join('/home/ec2-user/bonappetite/tmp', path.basename(filename) + ".png");
} else if(mimetype == "image/jpeg") {
var saveTo = path.join('/home/ec2-user/bonappetite/tmp', path.basename(filename) + ".jpg");
} else {
var saveTo = path.join('/home/ec2-user/bonappetite/tmp', path.basename(filename));
}
file.pipe(fs.createWriteStream(saveTo));
});
busboy.on('finish', function() {
console.log("url retrieved");
res.writeHead(200, { 'Connection': 'close' });
res.end(savedUrl);
});
return req.pipe(busboy);
What I am trying to do is take the image inside the FormData and upload it to my S3 bucket. I am able to upload data myself, but I cant seem to upload the image that I have saved from the post request
var AWS = require('aws-sdk');
AWS.config.loadFromPath('./aws_config.json');
var s3Bucket = new AWS.S3( { params: {Bucket: 'xxxx'} } );
params = {Key: filename, Body: file};
s3Bucket.putObject(params, function(err, data) {
if (err) {
console.log(err)
} else {
var urlParams = {Bucket: 'xxxx', Key: filename};
s3Bucket.getSignedUrl('getObject', urlParams, function(err, url){
if(err){
console.log(err);
res.status( err.status || 500 );
res.send( error );
}else {
res.setHeader('Content-Type', 'application/json');
url = url;
res.send({"url":url});
}
})
}
});
How can I take the file that I have just uploaded to my server and stream it to S3?
Do i even need to save the file to disk prior to sending it to s3?
Thanks
Found Solution to this problem without having to save the file.
Rather than using putObject, use upload.
var AWS = require('aws-sdk');
AWS.config.loadFromPath('./aws_config.json');
var s3Bucket = new AWS.S3( { params: {Bucket: 'xxxx'} } );
params = {Key: filename, Body: file};
s3Bucket.upload(params, function(err, data) {
if (err) {
console.log(err)
} else {
var urlParams = {Bucket: 'xxxx', Key: filename};
s3Bucket.getSignedUrl('getObject', urlParams, function(err, url){
if(err){
console.log(err);
res.status( err.status || 500 );
res.send( error );
}else {
res.setHeader('Content-Type', 'application/json');
url = url;
res.send({"url":url});
}
})
}
});