I'm getting the file stream from Busboy and then i need to count the bytes, grab the first line and then send it to azure storage. It works for files up to about 25MB but after that all of the bytes aren't counted. I'm not sure how to make it wait. I'm using a passthrough stream just to hold the original data while I get the first line.
busboy
.on('file', function(fieldname, file, filename, encoding, mimetype) {
file
.on('data', function(data) {
bytes = bytes + data.length;
})
.on('end', function() {
console.log("end");
})
.pipe(passthrough)
.pipe(firstLine)
.pipe(es.wait(function (err, body) {
blobService.createBlockBlobFromStream(containter, name, passthrough, bytes, function (error, result) {
if(error) {
return sendResponse(error.toString(), res, null);
}
sendResponse(null, res, "done");
});
}));
})
.on('finish', function() {
console.log("busboy done");
});
If you want to pipe data to the blob, createWriteStreamToBlockBlob is the API you might need.
busboy
.on('file', function(fieldname, file, filename, encoding, mimetype) {
file
.on('data', function(data) {
bytes = bytes + data.length;
})
.on('end', function() {
console.log("end");
})
.pipe(passthrough)
.pipe(firstLine)
.pipe(blobService.createWriteStreamToBlockBlob(containter, name, function (error, result) {
if(error) {
return sendResponse(error.toString(), res, null);
}
sendResponse(null, res, "done");
}))
})
.on('finish', function() {
console.log("busboy done");
});
Related
I am trying to upload files through a lambda function and request/response is working fine. Problem is each uploaded file is missing some data so the uploaded file is corrupted. e.g. if I try to upload a 5 Kb file, only 4.5 Kb is getting uploaded. This is confirmed with size variable from logs.
parseMultipart = async (event) => {
return new Promise((resolve, reject) => {
const parsedForm = {};
const bb = new busboy({
headers: {
'content-type': event.headers['Content-Type'] || event.headers['content-type']
}
});
bb.on('file', function (fieldname, file, filename, encoding, mimetype) {
var bufs = [];
var size = 0;
file
.on('data', async (data) => {
//bufs[bufs.length] = data;
await bufs.push(data);
size += data.length;
console.log('size:' + size);
})
.on('end', async () => {
console.log('size in end:' + size);
parsedForm[fieldname] = {
data: Buffer.concat(bufs),
filename: filename,
encoding: encoding,
mimetype: mimetype
};
});
})
.on('field', (fieldname, val) => {
parsedForm[fieldname] = val
})
.on('finish', async () => {
console.log("in finish:");
await resolve(parsedForm);
})
.on('close', () => {
console.log("in close");
resolve(parsedForm);
})
.on('error', error => reject(error))
bb.write(event.body, event.isBase64Encoded ? 'base64' : 'binary');
bb.end();
})
}
What is it that I am missing or doing differently? I have already checked relevant questions on SO for busboy.
Using the help of busboy I am attempting to save FileStream into a Firebase bucket.
code:
const admin = require('firebase-admin');
const userFilesBucket = admin.storage().bucket(USER_FILES_BUCKET_NAME);
function handlePost(req, res){
const busboy = new Busboy({ headers: req.headers })
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
file.on('data', function(data) {
});
file.on('end', function() {
uploadFile({filename: filename, file:file, mimetype:mimetype})
.catch(err => {
console.log("attemptFileUpload | err:", err)
reject(err)
});
});
});
}
function uploadFile(fileContainer){
const filePath = fileContainer.filename
const file = userFilesBucket.file(filePath);
file.save(fileContainer.file, function(err) {
if (!err) console.log('Sucess | uploaded a blob or file!');
});
}
This will succeed and the file is saved to bucket but at the same time the above Promise catches exception:
The "chunk" argument must be one of type string or Buffer. Received type object
as well as the files are corrupt.
This error tells me I should convert the FileStream to Buffer?
I should also note, that the fileContainer.file is of type FileSream.
Thanks.
The solution was very simple, a miss-read of busboy doc on my part.
Needed to use busboy's file.on(data) listener to access the file data as Buffer, not the original incoming file as FileStream.
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
file.on('data', data => {
uploadFile({filename: filename, file:data, mimetype:mimetype})
.catch(err => {
console.log("attemptFileUpload | err:", err)
});
});
file.on('end', function() {
// move from here, up to file.on(data)
});
});
I am working on the API for store a file uploaded by the user.
function uploadPhoto(req, res) {
var imagedata = new Buffer('');
req.body.on('data', function (chunk) {
imagedata = Buffer.concat([imagedata, chunk]);
});
req.body.on('end', function (chunk) {
fs.writeFile('success.jpeg', imagedata, 'binary', function (err) {
if (err) throw err
console.log('File saved.')
})
});
}
There is an error when I used req.body.on('data').
The data from postman
When I print out the value of req.body with console.log("message: "+req.body), it was string and has value:
I tried to write to file by using Buffer like this
var writeFile = function (type, cb) {
var data = new Buffer(req.body, type);
fs.writeFile(type + '.jpeg', data, type, function (err) {
cb(null, data.length);
});
}
async.parallel([
writeFile.bind(null, 'binary'),
writeFile.bind(null, 'utf8'),
writeFile.bind(null, 'ascii'),
writeFile.bind(null, 'ucs2'),
writeFile.bind(null, 'base64')
], function (err, results) {
response.status(200).send({});
})
This will create some jpeg files with different size but can't read them as an image.
How can I store this image from the user?
Thank you very much.
This looks like a good case for streams.
function uploadPhoto(req, res) {
var file = fs.createWriteStream(__dirname + '/success.jpeg')
req.pipe(file).on('error', function(err) { console.log(err) })
}
Headers could also help determine what file type and character encoding it has.
var file = fs.createWriteStream(__dirname + '/success.jpeg', {defaultEncoding: req.headers.encoding || 'utf8'})
I am uploading an image from nodeJs.
Control is not getting into req.on('end'). Nothing is getting printed inside req.on('end'). I could not identify where is the problem. I am making an API and calling from js to upload an image in the server at a specific location.
app.post('/tde/api/photo/:widgetId/:choosenFileName',function(req,res){
console.log("In file Upload..");
console.log(req.params.widgetId);
console.log(req.params.choosenFileName);
res.writeHead(200, { 'Content-Type': 'application/binary' });
var filedata = '';
var chunks = [];
//req.setEncoding('binary');
req.on('data', function(chunk){
//filedata+= chunk;
chunks.push(chunk);
})
req.on('end', function (chunk) {
var dir = 'uploads/'+req.params.widgetId
if (!fs.existsSync(dir)){
fs.mkdirSync(dir);
console.log("directory created..");
}
fs.readdir(dir, function(err, filenames) {
if (err) {
onError(err);
return;
}
filenames.forEach(function(filename) {
console.log(filename);
fs.unlink(dir+'/'+filename, function(err) {
if (err) {
return console.error(err);
}
console.log("File deleted successfully!");
});
});
//fs.writeFile('uploads/'+req.params.widgetId+'/sanmoy.jpg', chunk, function(err) {
var fileName = req.params.choosenFileName;
var widgetId = req.params.widgetId;
//fs.writeFile('uploads/'+widgetId+'/'+fileName, filedata, 'binary', function(err) {
var buffer = Buffer.concat(chunks)
fs.writeFile('uploads/'+widgetId+'/'+fileName, buffer, function(err) {
if (err) {
return console.error(err);
}
console.log("writing file success!");
})
});
});
res.end("File is uploaded");
});
I'm trying to pipe a stream to Azure's createWriteStreamToBlockBlob but I can't get it to wait until the callback returns before piping to the next part. I need the callback to return before the stream gets to firstLine. Both the blobStream stream and firstLine stream end before the callback.
var blobStream = blobService.createWriteStreamToBlockBlob(config.STORAGE_CONTAINER, _blobFilePath,
function(err, result){
console.log(result);
})
.on("end", function() {
console.log("finished");
})
busboy
.on('file', function(fieldname, file, filename, encoding, mimetype) {
file
.pipe(blobStream)
.pipe(firstLine)
})
The callback function will only be triggered after “finish” event of writeStream emitted. Although we can pipe a readStream into mulit writeStreams, we can’t split pipe into steps.
If you have to handle with the result of Azure function before pipe into firstLine stream, you can store the file in a temp file, then pipe into firstLine in callback closure.
var blobStream = blobsrv.createWriteStreamToBlockBlob(container, filename,
function (err, result, res) {
if (err) {
console.error(err);
} else {
console.log(result);
console.log(res);
fs.createReadStream('tmpfile.txt').pipe(firstLine);
}
})
busboy
.on('file', function (fieldname, file, filename, encoding, mimetype) {
var r = file
.pipe(blobStream)
.pipe(new fs.createWriteStream('tmpfile.txt'));
})
If you just need handle the first line of the content, you can handle it in “data” event of createWriteStreamToBlockBlob().
var blobStream = blobsrv.createWriteStreamToBlockBlob(container, filename,
function (err, result, res) {
if (err) {
console.error(err);
} else {
console.log(result);
console.log(res);
}
})
.on("data", function (chunk){
console.log("get data : "+chunk);
})