How to get binary data from ng-file-upload file object? - base64

I'm attempting to use the ng-file-upload directive to provide file upload functionality in my angular app.
I've got it working for the most part - I can select multiple files and loop through to grab the file name and file types. I just can't seem to figure out where the actual binary data of each file is stored in the file object.
I tried using the approach outlined in this post - AngularJS Upload a file and send it to a DB, but that results in a an error that "$q is not defined".
function create_blob(file) {
var deferred = $q.defer();
var reader = new FileReader();
reader.onload = function () {
deferred.resolve(reader.result);
};
reader.readAsDataURL(file);
return deferred.promise;
}
So then I tried the approach outlined in this post - Send an uploaded image to the server and save it in the server, but again I'm running into an error reading "dataURI.split is not a function".
function dataURItoBlob(dataURI) {
var binary = atob(dataURI.split(',')[1]);
var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0];
var array = [];
for (var i = 0; i < binary.length; i++) {
array.push(binary.charCodeAt(i));
}
return new Blob([new Uint8Array(array)], {
type: mimeString
});
}
The code I'm using is as follows:
function create_blob(file) {
var deferred = $q.defer();
var reader = new FileReader();
reader.onload = function () {
deferred.resolve(reader.result);
};
reader.readAsDataURL(file);
return deferred.promise;
}
function dataURItoBlob(dataURI) {
var binary = atob(dataURI.split(',')[1]);
var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0];
var array = [];
for (var i = 0; i < binary.length; i++) {
array.push(binary.charCodeAt(i));
}
return new Blob([new Uint8Array(array)], {
type: mimeString
});
}
$scope.uploadFiles = function (files) {
$scope.files = files;
angular.forEach(files, function (file) {
if (file && !file.$error) {
//var reader = new FileReader();
//console.log(reader.readAsDataURL(file));
//var binary = create_blob(file);
var fileBinary = dataURItoBlob(file);
$http({
url: root + '/DesktopModules/ServiceProxy/API/NetSuite/InsertCaseFile',
method: "POST",
//headers: { 'caseId': id, 'fileName': file.name, fileContent: $.base64.encode(file) }
headers: { 'caseId': id, 'fileName': file.name, fileContent: fileBinary }
}).
success(function (data, status, headers, config) {
//if (data == true) {
// getCase();
// $scope.newMessage = "";
// //toaster.pop('success', "", "Message succesfully submitted.",0);
//}
}).
error(function (data, status, headers, config) {
});
file.upload.progress(function (evt) {
file.progress = Math.min(100, parseInt(100.0 * evt.loaded / evt.total));
});
}
});
}
What am I overlooking?

It depends on what format your DB is accepting for file upload. If it support multipart form data, then you can just use
Upload.upload({file: file, url: my/db/url}).then(...);
if it accepts post requests with file's binary as content of the request (like CouchDB, imgur, ...) then you can do
Upload.http({data: file, url: my/db/url, headers: {'Content-Type': file.type}})...;
if you db just accept json objects and you want to store the file as base64 data url in the database like this question then you can do
Upload.dataUrl(file, true).then(function(dataUrl) {
$http.post(url, {
fileBase64DataUrl: dataUrl,
fileName: file.name,
id: uniqueId
});
})

Related

Azure Functions - NodeJS - Response Body as a Stream

I'd like to return a file from Blob Storage when you hit a given Azure Function end-point. This file is binary data.
Per the Azure Storage Blob docs, the most relevant call appears to be the following since its the only one that doesn't require writing the file to an interim file:
getBlobToStream
However this call gets the Blob and writes it to a stream.
Is there a way with Azure Functions to use a Stream as the value of res.body so that I can get the Blob Contents from storage and immediately write it to the response?
To add some code, trying to get something like this to work:
'use strict';
const azure = require('azure-storage'),
stream = require('stream');
const BLOB_CONTAINER = 'DeContainer';
module.exports = function(context){
var file = context.bindingData.file;
var blobService = azure.createBlobService();
var outputStream = new stream.Writable();
blobService.getBlobToStream(BLOB_CONTAINER, file, outputStream, function(error, serverBlob) {
if(error) {
FileNotFound(context);
} else {
context.res = {
status: 200,
headers: {
},
isRaw: true,
body : outputStream
};
context.done();
}
});
}
function FileNotFound(context){
context.res = {
status: 404,
headers: {
"Content-Type" : "application/json"
},
body : { "Message" : "No esta aqui!."}
};
context.done();
}
Unfortunately we don't have streaming support implemented in NodeJS just yet - it's on the backlog: https://github.com/Azure/azure-webjobs-sdk-script/issues/1361
If you're not tied to NodeJ open to using a C# function instead, you can use the storage sdk object directly in your input bindings and stream request output, instead of using the intermediate object approach.
While #Matt Manson's answer is definitely correct based on the way I asked my question, the following code snippet might be more useful for someone who stumbles across this question.
While I can't send the Stream to the response body directly, I can use a custom stream which captures the data into a Uint8Array, and then sends that to the response body.
NOTE: If the file is REALLY big, this will use a lot of memory.
'use strict';
const azure = require('azure-storage'),
stream = require('stream');
const BLOB_CONTAINER = 'deContainer';
module.exports = function(context){
var file = context.bindingData.file;
var blobService = azure.createBlobService();
var outputStream = new stream.Writable();
outputStream.contents = new Uint8Array(0);//Initialize contents.
//Override the write to store the value to our "contents"
outputStream._write = function (chunk, encoding, done) {
var curChunk = new Uint8Array(chunk);
var tmp = new Uint8Array(this.contents.byteLength + curChunk.byteLength);
tmp.set(this.contents, 0);
tmp.set(curChunk, this.contents.byteLength);
this.contents = tmp;
done();
};
blobService.getBlobToStream(BLOB_CONTAINER, file, outputStream, function(error, serverBlob) {
if(error) {
FileNotFound(context);
} else {
context.res = {
status: 200,
headers: {
},
isRaw: true,
body : outputStream.contents
};
context.done();
}
});//*/
}
function FileNotFound(context){
context.res = {
status: 404,
headers: {
"Content-Type" : "application/json"
},
body : { "Message" : "No esta aqui!"}
};
context.done();
}
I tried #Doug's solution from the last comment above, with a few minor mods in my azure function, and so far, after trying 20 different ideas, this is the only one that actually delivered the file to the browser! Thank you, #Doug...
const fs = require("fs");
const stream = require("stream");
...
const AzureBlob = require('#[my_private_artifact]/azure-blob-storage');
const azureStorage = new AzureBlob(params.connectionString);
//Override the write to store the value to our "contents" <-- Doug's solution
var outputStream = new stream.Writable();
outputStream.contents = new Uint8Array(0);//Initialize contents.
outputStream._write = function (chunk, encoding, done) {
var curChunk = new Uint8Array(chunk);
var tmp = new Uint8Array(this.contents.byteLength + curChunk.byteLength);
tmp.set(this.contents, 0);
tmp.set(curChunk, this.contents.byteLength);
this.contents = tmp;
done();
};
let azureSpeedResult = await azureStorage.downloadBlobToStream(params.containerName, params.objectId, outputStream);
let headers = {
"Content-Length": azureSpeedResult.size,
"Content-Type": mimeType
};
if (params.action == "download") {
headers["Content-Disposition"] = "attachment; filename=" + params.fileName;
}
context.res = {
status: 200,
headers: headers,
isRaw: true,
body: outputStream.contents
};
context.done();
...

How to serve a PDF file stored using CollectionFS in Meteor?

I am working on a Meteor application.
Currently, I have a few PDFs on my server. To serve these already existing PDFs directly to the client, I do it this way and it works very well:
Router.route("/file/:fileName", function() {
var fileName = this.params.fileName;
// console.log(process.env.PWD);
var filePath = process.env.PWD + '/' + fileName;
var fs = Meteor.npmRequire('fs');
var data = fs.readFileSync(filePath);
this.response.writeHead(200, {
"Content-Type": "application/pdf",
"Content-Length": data.length
});
this.response.write(data);
this.response.end();
}, {
where: "server"
});
I save these PDFs to Mongo using CollectionFS (Later, I shall generate PDFs and save them. For now, I am just directly saving these already existing PDFs to Mongo as I first want to get the Mongo part to work.).
testCollection = new FS.Collection("testCollection", {
stores: [new FS.Store.GridFS("testCollection")]
});
testCollection.allow({
'insert': function () {
return true;
}
});
var file = new FS.File(process.env.PWD + '/PDFKitExampleServerSide.pdf');
file.encoding = 'binary';
file.name('myPDF.pdf');
var document = testCollection.insert(file);
console.log(document._id);
My question is, after I save these PDFs to Mongo using CollectionFS (like I do above), how do I retrieve and serve these PDFs?
Router.route("/database/:pdfId", function() {
//need help here
}, { where: "server"});
After a lot of searching and trying, I've finally gotten it to work.
Router.route("/database/:pdfId", function() {
var pdfId = this.params.pdfId;
var file = testCollection.findOne({_id: pdfId});
var readable = file.createReadStream("tmp");
var buffer = new Buffer(0);
readable.on("data", function(b) {
buffer = Buffer.concat([buffer, b]);
});
var response = this.response;
readable.on("end", function() {
response.writeHead(200, {
"Content-Type": "application/pdf",
"Content-Length": buffer.length
});
response.write(buffer);
response.end();
});
}, {
where: "server"
});
I know that this question is old, but I found an easier way to store and retrieve PDFs. Apparently if you store your PDFs in the database and they are smaller than 16MB (which is likely in this type of files) the performance is way slower than if you store the files in your server file system.
For doing that, you can use FS.Store.FileSystem instead of FS.Store.GridFS. The following code works for me:
// Client
var pdfStore = new FS.Store.FileSystem("uploadedFiles");
UploadedFiles = new FS.Collection("uploadedFiles", {
stores: [pdfStore],
filter: {
allow: {
extensions: ['pdf','doc','docx','xls','xlsx','ppt','pptx''jpg','png','jpeg']
}
}
});
// Server
var pdfStore = new FS.Store.FileSystem("uploadedFiles", {path: uploadFilesPath});
UploadedFiles = new FS.Collection("uploadedFiles", {
stores: [pdfStore],
filter: {
maxSize: 5242880, // 5MB in bytes
allow: {
extensions: ['pdf','doc','docx','xls','xlsx','ppt','pptx''jpg','png','jpeg']
},
deny: {
extensions: ['exe']
},
onInvalid: function (message) {
if (Meteor.isClient) {
alert(message);
} else {
console.log(message);
}
}
}
});
And then just use this little helper to retrieve the url to the file:
get_uploaded_link: function(id){
console.log(id);
var file = UploadedFiles.findOne({_id: id});
return file.url();}

XLSX File corrupted when sent from Node.js via Restify to Client

I am working on a project where I am creating an excel file using XLSX node.js library, sending it to a client via Restify where I then use the FileSaver.js library to save it on the local computer. When I write the xlsx workbook to file on the backend, it opens fine, however, when I open it on the client, it is corrupted. I get the error: "Excel cannot open this file. The file format or file extension is not valid. Verify that the file has not been corrupted and that the file extension matches the format of the file".
Here is my code for writing and sending the file on the backend:
var wopts = { bookType:'xlsx', bookSST:false, type:'binary' };
var workbook = xlsx.write(wb, wopts);
res.send(200, workbook);
On the front end, I am using code from the XLSX documentation:
function s2ab(s) {
var buf = new ArrayBuffer(s.length);
var view = new Uint8Array(buf);
for (var i=0; i!=s.length; ++i)
view[i] = s.charCodeAt(i) & 0xFF;
return buf;
}
saveAs(new Blob([s2ab(response.data)],{type:""}), "test.xlsx");
Any thoughts on why this would not work? Any help would be much appreciated. Thanks.
As Luke mentioned in the comments, you have to do a base64 encoding before sending the buffer. Here's a snippet that used the NPM module node-xlsx.
var xlsx = require('node-xlsx');
router.get('/history', function (req, res) {
var user = new User();
user.getHistory(req.user.userId, req.query.offset, req.query.limit)
.then(function (history) {
if (req.headers.contenttype && req.headers.contenttype.indexOf('excel') > -1) {
var data = [['Data', 'amount'], ['19/12/2016', '10']];
var xlsxBuffer = xlsx.build([{ name: 'History', data: data }]);
res.end(xlsxBuffer.toString('base64'));
} else {
res.send(history);
}
})
.catch(function (err) {
res.status(500).send(err);
});
});
And this is the frontend code using Angular:
$scope.getXlsFile = function() {
var config = {
params: {
offset: $scope.offset,
limit: $scope.limit
},
headers: {
'contentType': 'application/vnd.ms-excel',
'responseType': 'arraybuffer'
}
};
$http.get('/api/history', config)
.then(function(res) {
var blob = new Blob([convert.base64ToArrayBuffer(res.data)]);
FileSaver.saveAs(blob, 'historial.xlsx');
})
}
where convert is the following factory:
.factory('convert', function () {
return {
base64ToArrayBuffer: function (base64) {
var binary_string = window.atob(base64);
var len = binary_string.length;
var bytes = new Uint8Array(len);
for (var i = 0; i < len; i++) {
bytes[i] = binary_string.charCodeAt(i);
}
return bytes.buffer;
}
}
})

Read/write binary data to MongoDB in Node.js

I've been able to successfully write binary data (an image) to MongoDB in Node.js. However I can't find clear documentation on how to read it back.
Here's how I'm writing the image to MongoDB:
var imageFile = req.files.myFile;
var imageData = fs.readFileSync(imageFile.path);
var imageBson = {};
imageBson.image = new db.bson_serializer.Binary(imageData);
imageBson.imageType = imageFile.type;
db.collection('images').insert(imageBson, {safe: true},function(err, data) {
I'd appreciate any pointers on reading the image from Mongo using Node. I'm assuming there's a function like "db.bson_deserializer...". Thanks!
Found the answer:
var imageFile = req.files.myFile;
fs.exists(imageFile.path, function(exists) {
if(exists)
{
console.log("File uploaded: " + util.inspect(imageFile));
fs.readFile(imageFile.path, function(err, imageData) {
if (err) {
res.end("Error reading your file on the server!");
}else{
//when saving an object with an image's byte array
var imageBson = {};
//var imageData = fs.readFileSync(imageFile.path);
imageBson.image = new req.mongo.Binary(imageData);
imageBson.imageType = imageFile.mimetype;
console.log("imageBson: " + util.inspect(imageBson));
req.imagesCollection.insert(imageBson, {safe: true},function(err, bsonData) {
if (err) {
res.end({ msg:'Error saving your file to the database!' });
}else{
fs.unlink(imageFile.path); // Deletes the file from the local disk
var imageBson = bsonData[0];
var imageId = imageBson._id;
res.redirect('images/' + imageId);
}
});
}
});
} else {
res.end("Oddly your file was uploaded but doesn't seem to exist!\n" + util.inspect(imageFile));
}
});
The MongoDB part isn't complicated. Once a Buffer is in the model, just let the db save it. MongoDB converts that into BinData. 80% of this code is just getting an image into and out of a PNG file.
People say don't store images in MongoDB, but icons/thumbs are tiny. Having said that, it might be a good idea to have an icons collection and only store them once using a hash of the image data as the _id.
model class example
class MyModel {
_icon: Buffer
get icon(): Buffer {
return this._icon
}
set icon(value: Buffer) {
this._icon = value
}
}
image helper
static async loadImage(url: string) {
var files = require('../lib/files')
var buffer = await files.urlContents(url, true)
return buffer
}
static async saveImage(image: Buffer, path: string) {
var files = require('../lib/files')
files.write(path, image.buffer)
return path
}
files helper
function urlResponse(url, binary) {
var request = require("request")
if (binary)
request = request.defaults({ encoding: null })
return new Promise(function (resolve, reject) {
request(url, function (error, res, body) {
if (error || res.statusCode !== 200 || body.includes('Incomplete response received from application'))
resolve({ statusCode: res?.statusCode !== 200 ? (res?.statusCode || 500) : 500 });
else
resolve(res);
});
});
}
async function urlContents(url, binary) {
var res = await urlResponse(url, binary)
if (binary)
return Buffer.from(res.body)
else
return res.body
}
function write(fileName, contents) {
fs.writeFileSync(fileName, contents)
}
mongodb helper
// ...saving
myModel.icon = loadImage('http://some.site.com/image.png')
collection.insertOne(myModel)
// ..getting
myModel = collection.findOne(query) // now myModel contains icon
saveImage(myModel.icon, '/home/toddmo/pictures/wow.png')

Can't upload file from Angularjs to Expressjs

I'm trying to upload a image from a AngularJS interface to a nodejs server (expressjs).
(I'm using mean.io)
Every time I upload someting, req.body logs "{}" and req.files logs "undefined"
I'm using angular-file-upload directive in AngularJS
Client-side code:
$scope.onFileSelect = function() {
console.log($files);
for (var i = 0; i < $files.length; i++) {
var file = $files[i];
$scope.upload = $upload.upload({
url: 'map/set',
method: 'POST',
headers: {'enctype': 'multipart/form-data'},
data: {myObj: $scope.myModelObj},
file: file,
}).progress(function(evt) {
console.log('percent: ' + parseInt(100.0 * evt.loaded / evt.total));
}).success(function(data, status, headers, config) {
// file is uploaded successfully
console.log(data);
});
}
};
Server-side code
var app = express();
require(appPath + '/server/config/express')(app, passport, db);
app.use(bodyParser({uploadDir:'./uploads'}));
app.post('/map/set', function(req, res) {
console.log(req.body);
console.log(req.files);
res.end('Success');
});
*****Edit*****
HTML Code
<div class="row">
<input id="file" type="file" ng-file-select="onFileSelect()" >
</div>
Hand built request
$scope.onFileSelect = function() {
//$files: an array of files selected, each file has name, size, and type.
//console.log($files);
var xhr = new XMLHttpRequest();
// not yet supported in most browsers, some examples use
// this but it's not safe.
// var fd = document.getElementById('upload').getFormData();
var fd = new FormData();
var files = document.getElementById('myfileinput').files;
console.log(files);
for(var i = 0;i<files.length; i++) {
fd.append("file", files[i]);
}
/* event listeners */
xhr.upload.addEventListener("progress", uploadProgress, false);
xhr.addEventListener("error", uploadFailed, false);
xhr.addEventListener("load", uploadComplete, false);
xhr.addEventListener("abort", uploadCanceled, false);
function uploadComplete(){
console.log("complete");
}
function uploadProgress(){
console.log("progress");
}
function uploadFailed(){
console.log("failed");
}
function uploadCanceled(){
console.log("canceled");
}
xhr.open("POST", "map/set");
xhr.send(fd);
};
The latest version of mean.io uncluding express 4.x as dependency. In the documentation for migration express 3 to 4 you can read, express will no longer user the connect middlewares. Read more about here: https://github.com/visionmedia/express/wiki/Migrating-from-3.x-to-4.x
The new body-parser module only handles urlencoded and json bodies. That means for multipart bodies (file uploads) you need an additional module like busboy or formadible.
Here is an example how I use angular-file-upload with busboy:
The AngularJS Stuff:
$upload.upload({
url: '/api/office/imageUpload',
data: {},
file: $scope.$files
}) …
I write a little helper module to handle uploads with busboy easier. It’s not very clean coded, but do the work:
var env = process.env.NODE_ENV || 'development';
var Busboy = require('busboy'),
os = require('os'),
path = require('path'),
config = require('../config/config')[env],
fs = require('fs');
// TODO: implement file size limit
exports.processFileUpload = function(req, allowedExtensions, callback){
var busboy = new Busboy({ headers: req.headers });
var tempFile = '';
var fileExtenstion = '';
var formPayload = {};
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
fileExtenstion = path.extname(filename).toLowerCase();
tempFile = path.join(os.tmpDir(), path.basename(fieldname)+fileExtenstion);
file.pipe(fs.createWriteStream(tempFile));
});
busboy.on('field', function(fieldname, val, fieldnameTruncated, valTruncated) {
var jsonValue = '';
try {
jsonValue = JSON.parse(val);
} catch (e) {
jsonValue = val;
}
formPayload[fieldname] = jsonValue;
});
busboy.on('finish', function() {
if(allowedExtensions.length > 0){
if(allowedExtensions.indexOf(fileExtenstion) == -1) {
callback({message: 'extension_not_allowed'}, tempFile, formPayload);
} else {
callback(null, tempFile, formPayload)
}
} else {
callback(null, tempFile, formPayload)
}
});
return req.pipe(busboy);
}
In my controller i can use the module that way:
var uploader = require('../helper/uploader'),
path = require('path');
exports.uploadEmployeeImage = function(req,res){
uploader.processFileUpload(req, ['.jpg', '.jpeg', '.png'], function(uploadError, tempPath, formPayload){
var fileExtenstion = path.extname(tempPath).toLowerCase();
var targetPath = "/exampleUploadDir/testFile" + fileExtenstion;
fs.rename(tempPath, targetPath, function(error) {
if(error){
return callback("cant upload employee image");
}
callback(null, newFileName);
});
});
}
I'm going to take a guess here that the header settings are incorrect.
headers: {'enctype': 'multipart/form-data'},
Should be changed to:
headers: {'Content-Type': 'multipart/form-data'},
Ensure you have an 'id' AND 'name' attribute on the file input - not having an id attribute can cause problems on some browsers. Also, try building the request like this:
var xhr = new XMLHttpRequest();
// not yet supported in most browsers, some examples use
// this but it's not safe.
// var fd = document.getElementById('upload').getFormData();
var fd = new FormData();
var files = document.getElementById('myfileinput').files;
for(var i = 0;i<files.length; i++) {
fd.append("file", files[i]);
}
/* event listeners */
xhr.upload.addEventListener("progress", uploadProgress, false);
xhr.addEventListener("error", uploadFailed, false);
xhr.addEventListener("load", uploadComplete, false);
xhr.addEventListener("abort", uploadCanceled, false);
xhr.open("POST", "your/url");
xhr.send(fd);
angular isn't great with file uploads so doing it by hand might help.

Resources