I am currently using aws-sdk to upload pdf files to bucket S3, like this:
function uploadFile (filePath, remoteFilename, cb) {
var fileBuffer = fs.createReadStream(filePath); // ex.: 'temp/longFileName.pdf'
fileBuffer.on('error', function(err) {
logger.warn('Failed reading local pdf file');
cb(err);
});
s3.upload({
Bucket: 'someBucketName',
Key: remoteFilename,
Body: fileBuffer
}, function (error, response) {
cb(error, { response, remoteFilename });
});
}
the problem is that sometimes the file gets uploaded with 0B size, sometimes it gets uploaded with the correct size, but when I download it, it is corrupt, and of course sometimes it is correctly uploaded and opens properly.
I read the pdf file locally from system file, and that pdf file is correct.
could somebody help me to fix this issue?
update
I am creating a pdf using pdfkit:
function createPdf (data, cb) {
var fs = require('fs');
var PDFDocument = require('pdfkit');
var filePath = 'temp/longFileName.pdf';
var pdf = new PDFDocument({
size: 'LEGAL',
info: {
Title: 'Tile of File Here',
Author: 'Some Author',
}
});
// Write stuff into PDF
pdf.text('Hello World');
// Stream contents to a file
pdf.pipe(
fs.createWriteStream(filePath)
)
.on('finish', function () {
console.log('PDF closed');
});
// Close PDF and write file.
pdf.end();
cb(null, {filePath})
}
once the callback in this function is called, i call the uploadFile function:
function doAll (someData, cb) {
createPdf(someData, function(err, data) {
if (err) console.log(err)
uploadFile(data.filePath, function(err,data) {
if (err) console.log(err)
console.log('finished')
cb(null, 'done');
return;
})
})
}
The problem is that you're calling the callback immediately instead of waiting for the file to be fully written. Your callback function should be inside .on('finish')
pdf.pipe(
fs.createWriteStream('./path/to/file.pdf')
)
.on('finish', function () {
console.log('PDF closed');
cb(null, 'finished'); // The callback should e in here
});
// Close PDF and write file.
pdf.end();
Related
Using the help of busboy I am attempting to save FileStream into a Firebase bucket.
code:
const admin = require('firebase-admin');
const userFilesBucket = admin.storage().bucket(USER_FILES_BUCKET_NAME);
function handlePost(req, res){
const busboy = new Busboy({ headers: req.headers })
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
file.on('data', function(data) {
});
file.on('end', function() {
uploadFile({filename: filename, file:file, mimetype:mimetype})
.catch(err => {
console.log("attemptFileUpload | err:", err)
reject(err)
});
});
});
}
function uploadFile(fileContainer){
const filePath = fileContainer.filename
const file = userFilesBucket.file(filePath);
file.save(fileContainer.file, function(err) {
if (!err) console.log('Sucess | uploaded a blob or file!');
});
}
This will succeed and the file is saved to bucket but at the same time the above Promise catches exception:
The "chunk" argument must be one of type string or Buffer. Received type object
as well as the files are corrupt.
This error tells me I should convert the FileStream to Buffer?
I should also note, that the fileContainer.file is of type FileSream.
Thanks.
The solution was very simple, a miss-read of busboy doc on my part.
Needed to use busboy's file.on(data) listener to access the file data as Buffer, not the original incoming file as FileStream.
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
file.on('data', data => {
uploadFile({filename: filename, file:data, mimetype:mimetype})
.catch(err => {
console.log("attemptFileUpload | err:", err)
});
});
file.on('end', function() {
// move from here, up to file.on(data)
});
});
I need to uplaod a pdf file from UI(written in Javascript) to Amazon S3 but I am trying to upload the file to the S3, I am getting some unicode format text and when I copy that text to notepad, or say, any other text editor I can the human readable text
I am using pdfmake to get the content of the file and upload it using getBufffer method.
var content = generatePDF(base64Img);
pdfMake.createPdf(content).getBuffer(function (data) {//Code}
The code that i used to upload the file to S3.
var params = {
Bucket: bucketName,
Key: file_name,
Body: data.toString(),
ContentType: 'application/pdf'
}
s3.upload(params, function (err, data) {
if (err) {
// code
}else{
//code
}
The file is getting uploaded successfully but I am getting the text like
!
" #$%&!' ()*')+,
!
!
!
!
But I am pasting it to other text editor, I am getting
Date: 04/20/19
I solved the above problem by passing the data from getBuffer to S3.
In S3, I passed to a buffer like
var data = new Buffer(event.data, 'binary');
uploaded the data to S3.
var params = {
Bucket: bucketName,
Key: file_name,
Body: data,
ContentType: 'application/pdf'
}
s3.upload(params, function (err, data) {
if (err) {
// code
}else{
//code
}
To upload a file from client end directly to s3 bucket you can use multer-s3.
FROM CLIENT END:
axios.post(url, data, {
onUploadProgress: ProgressEvent => {
this.setState({
loaded: (ProgressEvent.loaded / ProgressEvent.total * 100),
})
},
})
.then(res => { // then print response status
toast.success('Upload Success!')
})
.catch(err => { // then print response status
toast.error('Upload Failed!')
})
SERVER SIDE:
const upload = multer({
storage: multerS3({
s3: s3,
acl: 'public-read',
bucket: BUCKET_NAME,
key: function (req, file, cb) {
UPLOADED_FILE_NAME = Date.now() + '-' + file.originalname;
cb(null, UPLOADED_FILE_NAME);
}
})
}).array('file');
app.post('/upload', function (req, res) {
upload(req, res, function (err) {
if (err instanceof multer.MulterError) {
return res.status(500).json(err)
// A Multer error occurred when uploading.
} else if (err) {
return res.status(500).json(err)
// An unknown error occurred when uploading.
}
console.log('REQUEST FILE IS', UPLOADED_FILE_NAME)
return res.status(200).send(UPLOADED_FILE_NAME)
// Everything went fine.
})
});
How do I specify mime type for the file that I am uploading. I am following this nodejs example https://cloud.google.com/storage/docs/object-basics#storage-upload-object-nodejs
```
function uploadFile (bucketName, fileName, callback) {
// Instantiates a client
const storageClient = Storage();
// References an existing bucket, e.g. "my-bucket"
const bucket = storageClient.bucket(bucketName);
// Uploads a local file to the bucket, e.g. "./local/path/to/file.txt"
bucket.upload(fileName, (err, file) => {
if (err) {
callback(err);
return;
}
console.log(`File ${file.name} uploaded.`);
callback();
});
}
I always get default
application/octet-stream
Found the answer myself. You need to put this kind of metadata into options. Couldn't find it in any documentation.
function uploadFile (bucketName, fileName, callback) {
// Instantiates a client
const storageClient = Storage();
// References an existing bucket, e.g. "my-bucket"
const bucket = storageClient.bucket(bucketName);
// STARTING FROM HERE
const options = {
metadata: {
contentType: 'image/jpeg',
},
}
// TO HERE
// Uploads a local file to the bucket, e.g. "./local/path/to/file.txt"
bucket.upload(fileName, options, (err, file) => {
if (err) {
callback(err);
return;
}
console.log(`File ${file.name} uploaded.`);
callback();
});
}
I am working on the API for store a file uploaded by the user.
function uploadPhoto(req, res) {
var imagedata = new Buffer('');
req.body.on('data', function (chunk) {
imagedata = Buffer.concat([imagedata, chunk]);
});
req.body.on('end', function (chunk) {
fs.writeFile('success.jpeg', imagedata, 'binary', function (err) {
if (err) throw err
console.log('File saved.')
})
});
}
There is an error when I used req.body.on('data').
The data from postman
When I print out the value of req.body with console.log("message: "+req.body), it was string and has value:
I tried to write to file by using Buffer like this
var writeFile = function (type, cb) {
var data = new Buffer(req.body, type);
fs.writeFile(type + '.jpeg', data, type, function (err) {
cb(null, data.length);
});
}
async.parallel([
writeFile.bind(null, 'binary'),
writeFile.bind(null, 'utf8'),
writeFile.bind(null, 'ascii'),
writeFile.bind(null, 'ucs2'),
writeFile.bind(null, 'base64')
], function (err, results) {
response.status(200).send({});
})
This will create some jpeg files with different size but can't read them as an image.
How can I store this image from the user?
Thank you very much.
This looks like a good case for streams.
function uploadPhoto(req, res) {
var file = fs.createWriteStream(__dirname + '/success.jpeg')
req.pipe(file).on('error', function(err) { console.log(err) })
}
Headers could also help determine what file type and character encoding it has.
var file = fs.createWriteStream(__dirname + '/success.jpeg', {defaultEncoding: req.headers.encoding || 'utf8'})
I'm trying to save a Buffer (of a file uploaded from a form) to Google Cloud storage, but it seems like the Google Node SDK only allows files with a given path to be uploaded (Read / Write streams).
This is what I have used for AWS (S3) - is the anything else similar in the Google node SDK?:
var fileContents = new Buffer('buffer');
var params = {
Bucket: //bucket name
Key: //file name
ContentType: // Set mimetype
Body: fileContents
};
s3.putObject(params, function(err, data) {
// Do something
});
The only way that I have found to do it so far is write the buffer to disk, upload the file using the SDK (specifying the path to the new file) and then delete the file once it's uploaded successfully - the downside to this is that the whole process is significantly slower, to where it seems to be unfeasible to use Google storage. Is there any work around / way to upload a buffer?
.save to save the day! Some code below where I save my "pdf" that I created.
https://googleapis.dev/nodejs/storage/latest/File.html#save
const { Storage } = require("#google-cloud/storage");
const gc = new Storage({
keyFilename: path.join(__dirname, "./path to your service account .json"),
projectId: "your project id",
});
const file = gc.bucket(bucketName).file("tester.pdf");
file.save(pdf, (err) => {
if (!err) {
console.log("cool");
} else {
console.log("error " + err);
}
});
This is actually easy:
let remotePath = 'some/key/to/store.json';
let localReadStream = new stream.PassThrough();
localReadStream.end(JSON.stringify(someObject, null, ' '));
let remoteWriteStream = bucket.file(remotePath).createWriteStream({
metadata : {
contentType : 'application/json'
}
});
localReadStream.pipe(remoteWriteStream)
.on('error', err => {
return callback(err);
})
.on('finish', () => {
return callback();
});
We have an issue about supporting this more easily: https://github.com/GoogleCloudPlatform/gcloud-node/issues/1179
But for now, you can try:
file.createWriteStream()
.on('error', function(err) {})
.on('finish', function() {})
.end(fileContents);
The following snippet is from a google example. The example assumes you have used multer, or something similar, and can access the file at req.file. You can stream the file to cloud storage using middleware that resembles the following:
function sendUploadToGCS (req, res, next) {
if (!req.file) {
return next();
}
const gcsname = Date.now() + req.file.originalname;
const file = bucket.file(gcsname);
const stream = file.createWriteStream({
metadata: {
contentType: req.file.mimetype
},
resumable: false
});
stream.on('error', (err) => {
req.file.cloudStorageError = err;
next(err);
});
stream.on('finish', () => {
req.file.cloudStorageObject = gcsname;
file.makePublic().then(() => {
req.file.cloudStoragePublicUrl = getPublicUrl(gcsname);
next();
});
});
stream.end(req.file.buffer);
}
I have this approach working to me:
const destFileName = `someFolder/${file.name}`;
const fileCloud = this.storage.bucket(bucketName).file(destFileName);
fileCloud.save(file.buffer, {
contentType: file.mimetype
}, (err) => {
if (err) {
console.log("error");
}
});