Trouble saving FileStream to bucket using fireabse admin SDK and busboy - node.js

Using the help of busboy I am attempting to save FileStream into a Firebase bucket.
code:
const admin = require('firebase-admin');
const userFilesBucket = admin.storage().bucket(USER_FILES_BUCKET_NAME);
function handlePost(req, res){
const busboy = new Busboy({ headers: req.headers })
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
file.on('data', function(data) {
});
file.on('end', function() {
uploadFile({filename: filename, file:file, mimetype:mimetype})
.catch(err => {
console.log("attemptFileUpload | err:", err)
reject(err)
});
});
});
}
function uploadFile(fileContainer){
const filePath = fileContainer.filename
const file = userFilesBucket.file(filePath);
file.save(fileContainer.file, function(err) {
if (!err) console.log('Sucess | uploaded a blob or file!');
});
}
This will succeed and the file is saved to bucket but at the same time the above Promise catches exception:
The "chunk" argument must be one of type string or Buffer. Received type object
as well as the files are corrupt.
This error tells me I should convert the FileStream to Buffer?
I should also note, that the fileContainer.file is of type FileSream.
Thanks.

The solution was very simple, a miss-read of busboy doc on my part.
Needed to use busboy's file.on(data) listener to access the file data as Buffer, not the original incoming file as FileStream.
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
file.on('data', data => {
uploadFile({filename: filename, file:data, mimetype:mimetype})
.catch(err => {
console.log("attemptFileUpload | err:", err)
});
});
file.on('end', function() {
// move from here, up to file.on(data)
});
});

Related

nodejs uploading pdf to S3 bucket corrupt file

I am currently using aws-sdk to upload pdf files to bucket S3, like this:
function uploadFile (filePath, remoteFilename, cb) {
var fileBuffer = fs.createReadStream(filePath); // ex.: 'temp/longFileName.pdf'
fileBuffer.on('error', function(err) {
logger.warn('Failed reading local pdf file');
cb(err);
});
s3.upload({
Bucket: 'someBucketName',
Key: remoteFilename,
Body: fileBuffer
}, function (error, response) {
cb(error, { response, remoteFilename });
});
}
the problem is that sometimes the file gets uploaded with 0B size, sometimes it gets uploaded with the correct size, but when I download it, it is corrupt, and of course sometimes it is correctly uploaded and opens properly.
I read the pdf file locally from system file, and that pdf file is correct.
could somebody help me to fix this issue?
update
I am creating a pdf using pdfkit:
function createPdf (data, cb) {
var fs = require('fs');
var PDFDocument = require('pdfkit');
var filePath = 'temp/longFileName.pdf';
var pdf = new PDFDocument({
size: 'LEGAL',
info: {
Title: 'Tile of File Here',
Author: 'Some Author',
}
});
// Write stuff into PDF
pdf.text('Hello World');
// Stream contents to a file
pdf.pipe(
fs.createWriteStream(filePath)
)
.on('finish', function () {
console.log('PDF closed');
});
// Close PDF and write file.
pdf.end();
cb(null, {filePath})
}
once the callback in this function is called, i call the uploadFile function:
function doAll (someData, cb) {
createPdf(someData, function(err, data) {
if (err) console.log(err)
uploadFile(data.filePath, function(err,data) {
if (err) console.log(err)
console.log('finished')
cb(null, 'done');
return;
})
})
}
The problem is that you're calling the callback immediately instead of waiting for the file to be fully written. Your callback function should be inside .on('finish')
pdf.pipe(
fs.createWriteStream('./path/to/file.pdf')
)
.on('finish', function () {
console.log('PDF closed');
cb(null, 'finished'); // The callback should e in here
});
// Close PDF and write file.
pdf.end();

Send a received file throught a service, without save it and pipe it with express js and busboy

I have a service, what receive a post request with a file and json data too. I use the body-parser package either in the app.js. I want to send the file to a "filer" service, and process the answer from that, but I don't want to pipe the request, because I need to process the json content too and make some actions after the filer answered.
const Busboy = require('busboy');
const request = require('request');
const sendFile = (req, file, callback) => {
return request.post({
uri: 'http://localhost:5000/stream',
headers: req.headers,
formData: { value: file }
}, (err, resp, body) => {
if (err) return callback(err);
return callback();
});
};
app.post('/route', (req, res, next) {
const busboy = new Busboy({ headers: req.headers });
busboy.on('file', (fieldName, file) => {
file.on('error', err => reject(err));
return sendFile(req, file, (err, link) => {
file.resume();
if (err) return reject(err);
});
});
busboy.on('field', (fieldName, val) => {
// process the json here...
});
busboy.on('finish', () => {
console.log('busboy.on(finish)');
return next();
});
req.pipe(busboy);
}
The filer service the following:
app.post('/stream', (req, res, next) => {
const busboy = new Busboy({ headers: req.headers });
// here we are ok
busboy.on('file', function (fieldName, file, name) {
// but this part never run
res.send(200, { fileId: fileDoc._id });
});
return req.pipe(busboy);
});
Unfortunatelly the filer service never answer, and I don't know, where is a missing part. Tried to put the file.resume() to some places inside the busboy.on('file'), but doesn't helped.
Its probably because the file stream from busboy is never processed properly by request formData.
Another way is:
Temporarily write the stream to a local file in main service.(using fs.createWriteStream)
Create a stream from that file and pipe it to filer service.(using fs.createReadStream)
Do whatever processing you need to do in main service.
Wait for response from filer service and call next()
This way you can even use the file in main service if you need to or send the file to another service.

writing file from post request in ExpressJS

Working with express to create a file transfer tool, and I've almost gotten everything completed. Just need to figure out how to get the data from the request written to file.
My issue appears to be stemming from not knowing where the file contents are placed in the request object.
My code to process sending the request
let file = watcher.getOneFile(config.thisLocation);
console.dir(file);
let contents = fs.readFileSync(file.fullPath, 'utf-8');
console.log(contents);
let form = {
attachments: [
contents
]
}
rq.post({
url: `http://${homeAddress}:${port}/files/?loc=${config.thisLocation}&file=${file.fileName}`,
headers: {'content-type':'application/x-www-form-urlencoded'},
formData: form
}, (err, res, body) => {
// body = JSON.parse(body);
console.log(body);
});
and when I get the request on the server, I'm not sure where the file contents actually are.
Code for handling the request
app.post('/files', (req, res) => {
console.log(req.query.loc);
// console.dir(req);
let incoming = watcher.getOutputPath(req.query.loc, config.locations);
console.log(incoming);
console.dir(req.body);
// console.log(req.body);
// let body = JSON.parse(req.body);
console.log(req.query);
let filename = path.join(incoming, req.query.file);
console.log(filename);
fs.writeFile(filename, req.body, (err) => {
if(err){
console.error(err);
}
console.log(`Successfully wrote file: ${path.join(incoming, req.query.file)}`);
});
res.sendStatus(200);
});
Where on the Request Object is the file contents?
Unfortunately you can't access the file content in any straightforward way. I recommend you to use busboy or similar package to parse form-data requests.
Here is how can you read file content using busboy and write it to the file system:
const Busboy = require('busboy');
app.post('/files', (req, res) => {
const busboy = new Busboy({ headers: req.headers });
busboy.on('file', (fieldname, file, filename, encoding, mime) => {
const newFilename = `${Date.now()}_${filename}`,
newFile = fs.createWriteStream(newFilename);
file.pipe(newFile);
file.on('end', () => {
console.log(`Finished reading ${filename}`);
});
});
busboy.on('finish', () => {
console.log('Finished parsing form');
res.sendStatus(200);
});
req.pipe(busboy);
});

Store file in Firebase Storage using Cloud Functions REST API request

How do I store a file through a POST (form-data) HTTP REST request in Firebase Cloud Functions so I can handle in the cloud after the REST request and then store in a bucket linked to my project.
For example is very easy to access but storage is not, I've tried to use multiparty and busboy NodeJS libs but the problem still in that Im not able to save it in Firebase Storage.
var busboy = new Busboy({
headers: req.headers
});
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
file.on('data', function(data) {
// const object = data;
// const fileBucket = object.bucket;
// const bucket = gcs.bucket(filename);
// const tempFilePath = path.join(os.tmpdir(), filename);
console.log(data);
});
file.on('end', function() {
console.log('File [' + fieldname + '] Finished');
});
});
busboy.on('field', function(fieldname, val, fieldnameTruncated, valTruncated, encoding, mimetype) {
console.log('Field [' + fieldname + ']: value: ' + inspect(val));
});
busboy.on('finish', function() {
console.log('Done parsing form!');
res.writeHead(200, {
Connection: 'close'
});
res.end();
});
req.pipe(busboy);
At the end I finish doing this solution and works fine
router.post('/upload', function (req, res) {
var busboy = new Busboy({headers: req.headers});
var files = 0, finished = false;
busboy.on('file', function (fieldname, file, filename, encoding, mimetype) {
console.log('File [' + fieldname + ']: filename: ' + filename);
console.log("Uploading: " + filename);
++files;
var path = temp.writeFileSync(file);
var fstream = fs.createWriteStream(path);
fstream.on('finish', function () {
if (--files === 0) {
var bucket = firebase.bucket();
// Upload a local file to a new file to be created in your bucket.
bucket.upload(path+"",function (err, file) {
if (!err) {
console.log("Uploaded: " + path);
res.json({
uploaded: true,
created_at: new Date().getTime(),
filename: filename,
path: path,
mimeType: mimetype
});
}else{
console.error("err: " + err);
var error = new ErrorResponse(400);
error.errors+=err;
res.json(error);
}
});
}
});
file.pipe(fstream);
});
Easiest way to access fireabse/google cloud storage from a firebase function is to use the node js google cloud package for the service, in this case #google-cloud/storage.
Once its part of your functions' dependencies you simply start it up within your function js file:
const gcs = require('#google-cloud/storage')();
Because the function runs in the google cloud environment there is no need to provide authentication info (unless storage and functions aren't part of the same Google Cloud/Firebase projects).

Wait for callback in createWriteStreamToBlockBlob

I'm trying to pipe a stream to Azure's createWriteStreamToBlockBlob but I can't get it to wait until the callback returns before piping to the next part. I need the callback to return before the stream gets to firstLine. Both the blobStream stream and firstLine stream end before the callback.
var blobStream = blobService.createWriteStreamToBlockBlob(config.STORAGE_CONTAINER, _blobFilePath,
function(err, result){
console.log(result);
})
.on("end", function() {
console.log("finished");
})
busboy
.on('file', function(fieldname, file, filename, encoding, mimetype) {
file
.pipe(blobStream)
.pipe(firstLine)
})
The callback function will only be triggered after “finish” event of writeStream emitted. Although we can pipe a readStream into mulit writeStreams, we can’t split pipe into steps.
If you have to handle with the result of Azure function before pipe into firstLine stream, you can store the file in a temp file, then pipe into firstLine in callback closure.
var blobStream = blobsrv.createWriteStreamToBlockBlob(container, filename,
function (err, result, res) {
if (err) {
console.error(err);
} else {
console.log(result);
console.log(res);
fs.createReadStream('tmpfile.txt').pipe(firstLine);
}
})
busboy
.on('file', function (fieldname, file, filename, encoding, mimetype) {
var r = file
.pipe(blobStream)
.pipe(new fs.createWriteStream('tmpfile.txt'));
})
If you just need handle the first line of the content, you can handle it in “data” event of createWriteStreamToBlockBlob().
var blobStream = blobsrv.createWriteStreamToBlockBlob(container, filename,
function (err, result, res) {
if (err) {
console.error(err);
} else {
console.log(result);
console.log(res);
}
})
.on("data", function (chunk){
console.log("get data : "+chunk);
})

Resources