Wait for callback in createWriteStreamToBlockBlob - node.js

I'm trying to pipe a stream to Azure's createWriteStreamToBlockBlob but I can't get it to wait until the callback returns before piping to the next part. I need the callback to return before the stream gets to firstLine. Both the blobStream stream and firstLine stream end before the callback.
var blobStream = blobService.createWriteStreamToBlockBlob(config.STORAGE_CONTAINER, _blobFilePath,
function(err, result){
console.log(result);
})
.on("end", function() {
console.log("finished");
})
busboy
.on('file', function(fieldname, file, filename, encoding, mimetype) {
file
.pipe(blobStream)
.pipe(firstLine)
})

The callback function will only be triggered after “finish” event of writeStream emitted. Although we can pipe a readStream into mulit writeStreams, we can’t split pipe into steps.
If you have to handle with the result of Azure function before pipe into firstLine stream, you can store the file in a temp file, then pipe into firstLine in callback closure.
var blobStream = blobsrv.createWriteStreamToBlockBlob(container, filename,
function (err, result, res) {
if (err) {
console.error(err);
} else {
console.log(result);
console.log(res);
fs.createReadStream('tmpfile.txt').pipe(firstLine);
}
})
busboy
.on('file', function (fieldname, file, filename, encoding, mimetype) {
var r = file
.pipe(blobStream)
.pipe(new fs.createWriteStream('tmpfile.txt'));
})
If you just need handle the first line of the content, you can handle it in “data” event of createWriteStreamToBlockBlob().
var blobStream = blobsrv.createWriteStreamToBlockBlob(container, filename,
function (err, result, res) {
if (err) {
console.error(err);
} else {
console.log(result);
console.log(res);
}
})
.on("data", function (chunk){
console.log("get data : "+chunk);
})

Related

cloud function reads file but it's not returning the content on the client side

I am using the code below to read a json file in google firebase storage and then return the content of the file. The code works but all I am getting on the client side is null
exports.updateProductCatalogue = functions.https.onCall(async (data, context) => {
const filepath = data.filepath
const bucketname = data.bucket
const remoteFile = bucket.file("storeid.json");
let buffer = '';
remoteFile.createReadStream()
.on('error', function(err) {console.log(err)})
.on('data', function(response) {
buffer += response
console.log(buffer)
})
.on('end', function() {
//console.log(buffer);
console.log("FINISHED!!")
})
return buffer
})
this is my client side js call
function getUpdatedCatalogue(){
var getStorageData = firebase.functions().httpsCallable('updateProductCatalogue');
var callData = {
"bucket":"test"
}
getStorageData(callData).then(function(result){
console.log(result)
}).catch(function(error){
console.log(error)
})
}
The cloud console.log is showing that the content is read and shown in log but client side console.log is returning null. Here is the file file i am reading.
Why am I not getting the file content returned and displayed on client side? how can I fix this?
The problem is that you're returning the buffer before the stream finishes reading the file.
Try this (not tested),
exports.updateProductCatalogue = functions.https.onCall(async (data, context) => {
const filepath = data.filepath;
const bucketname = data.bucket;
const remoteFile = bucket.file("storeid.json");
return new Promise(resolve, reject) => {
let buffer = '';
remoteFile.createReadStream()
.on('error', function(err) {
console.log(err);
reject(err);
})
.on('data', function(response) {
buffer += response;
console.log(buffer);
})
.on('end', function() {
console.log("FINISHED!!")
resolve(buffer);
});
});
});

Trouble saving FileStream to bucket using fireabse admin SDK and busboy

Using the help of busboy I am attempting to save FileStream into a Firebase bucket.
code:
const admin = require('firebase-admin');
const userFilesBucket = admin.storage().bucket(USER_FILES_BUCKET_NAME);
function handlePost(req, res){
const busboy = new Busboy({ headers: req.headers })
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
file.on('data', function(data) {
});
file.on('end', function() {
uploadFile({filename: filename, file:file, mimetype:mimetype})
.catch(err => {
console.log("attemptFileUpload | err:", err)
reject(err)
});
});
});
}
function uploadFile(fileContainer){
const filePath = fileContainer.filename
const file = userFilesBucket.file(filePath);
file.save(fileContainer.file, function(err) {
if (!err) console.log('Sucess | uploaded a blob or file!');
});
}
This will succeed and the file is saved to bucket but at the same time the above Promise catches exception:
The "chunk" argument must be one of type string or Buffer. Received type object
as well as the files are corrupt.
This error tells me I should convert the FileStream to Buffer?
I should also note, that the fileContainer.file is of type FileSream.
Thanks.
The solution was very simple, a miss-read of busboy doc on my part.
Needed to use busboy's file.on(data) listener to access the file data as Buffer, not the original incoming file as FileStream.
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
file.on('data', data => {
uploadFile({filename: filename, file:data, mimetype:mimetype})
.catch(err => {
console.log("attemptFileUpload | err:", err)
});
});
file.on('end', function() {
// move from here, up to file.on(data)
});
});

nodejs uploading pdf to S3 bucket corrupt file

I am currently using aws-sdk to upload pdf files to bucket S3, like this:
function uploadFile (filePath, remoteFilename, cb) {
var fileBuffer = fs.createReadStream(filePath); // ex.: 'temp/longFileName.pdf'
fileBuffer.on('error', function(err) {
logger.warn('Failed reading local pdf file');
cb(err);
});
s3.upload({
Bucket: 'someBucketName',
Key: remoteFilename,
Body: fileBuffer
}, function (error, response) {
cb(error, { response, remoteFilename });
});
}
the problem is that sometimes the file gets uploaded with 0B size, sometimes it gets uploaded with the correct size, but when I download it, it is corrupt, and of course sometimes it is correctly uploaded and opens properly.
I read the pdf file locally from system file, and that pdf file is correct.
could somebody help me to fix this issue?
update
I am creating a pdf using pdfkit:
function createPdf (data, cb) {
var fs = require('fs');
var PDFDocument = require('pdfkit');
var filePath = 'temp/longFileName.pdf';
var pdf = new PDFDocument({
size: 'LEGAL',
info: {
Title: 'Tile of File Here',
Author: 'Some Author',
}
});
// Write stuff into PDF
pdf.text('Hello World');
// Stream contents to a file
pdf.pipe(
fs.createWriteStream(filePath)
)
.on('finish', function () {
console.log('PDF closed');
});
// Close PDF and write file.
pdf.end();
cb(null, {filePath})
}
once the callback in this function is called, i call the uploadFile function:
function doAll (someData, cb) {
createPdf(someData, function(err, data) {
if (err) console.log(err)
uploadFile(data.filePath, function(err,data) {
if (err) console.log(err)
console.log('finished')
cb(null, 'done');
return;
})
})
}
The problem is that you're calling the callback immediately instead of waiting for the file to be fully written. Your callback function should be inside .on('finish')
pdf.pipe(
fs.createWriteStream('./path/to/file.pdf')
)
.on('finish', function () {
console.log('PDF closed');
cb(null, 'finished'); // The callback should e in here
});
// Close PDF and write file.
pdf.end();

NodeJS - reading Image Binary File

I am working on the API for store a file uploaded by the user.
function uploadPhoto(req, res) {
var imagedata = new Buffer('');
req.body.on('data', function (chunk) {
imagedata = Buffer.concat([imagedata, chunk]);
});
req.body.on('end', function (chunk) {
fs.writeFile('success.jpeg', imagedata, 'binary', function (err) {
if (err) throw err
console.log('File saved.')
})
});
}
There is an error when I used req.body.on('data').
The data from postman
When I print out the value of req.body with console.log("message: "+req.body), it was string and has value:
I tried to write to file by using Buffer like this
var writeFile = function (type, cb) {
var data = new Buffer(req.body, type);
fs.writeFile(type + '.jpeg', data, type, function (err) {
cb(null, data.length);
});
}
async.parallel([
writeFile.bind(null, 'binary'),
writeFile.bind(null, 'utf8'),
writeFile.bind(null, 'ascii'),
writeFile.bind(null, 'ucs2'),
writeFile.bind(null, 'base64')
], function (err, results) {
response.status(200).send({});
})
This will create some jpeg files with different size but can't read them as an image.
How can I store this image from the user?
Thank you very much.
This looks like a good case for streams.
function uploadPhoto(req, res) {
var file = fs.createWriteStream(__dirname + '/success.jpeg')
req.pipe(file).on('error', function(err) { console.log(err) })
}
Headers could also help determine what file type and character encoding it has.
var file = fs.createWriteStream(__dirname + '/success.jpeg', {defaultEncoding: req.headers.encoding || 'utf8'})

Count bytes from Busboy stream

I'm getting the file stream from Busboy and then i need to count the bytes, grab the first line and then send it to azure storage. It works for files up to about 25MB but after that all of the bytes aren't counted. I'm not sure how to make it wait. I'm using a passthrough stream just to hold the original data while I get the first line.
busboy
.on('file', function(fieldname, file, filename, encoding, mimetype) {
file
.on('data', function(data) {
bytes = bytes + data.length;
})
.on('end', function() {
console.log("end");
})
.pipe(passthrough)
.pipe(firstLine)
.pipe(es.wait(function (err, body) {
blobService.createBlockBlobFromStream(containter, name, passthrough, bytes, function (error, result) {
if(error) {
return sendResponse(error.toString(), res, null);
}
sendResponse(null, res, "done");
});
}));
})
.on('finish', function() {
console.log("busboy done");
});
If you want to pipe data to the blob, createWriteStreamToBlockBlob is the API you might need.
busboy
.on('file', function(fieldname, file, filename, encoding, mimetype) {
file
.on('data', function(data) {
bytes = bytes + data.length;
})
.on('end', function() {
console.log("end");
})
.pipe(passthrough)
.pipe(firstLine)
.pipe(blobService.createWriteStreamToBlockBlob(containter, name, function (error, result) {
if(error) {
return sendResponse(error.toString(), res, null);
}
sendResponse(null, res, "done");
}))
})
.on('finish', function() {
console.log("busboy done");
});

Resources