I'm trying to upload a image from a AngularJS interface to a nodejs server (expressjs).
(I'm using mean.io)
Every time I upload someting, req.body logs "{}" and req.files logs "undefined"
I'm using angular-file-upload directive in AngularJS
Client-side code:
$scope.onFileSelect = function() {
console.log($files);
for (var i = 0; i < $files.length; i++) {
var file = $files[i];
$scope.upload = $upload.upload({
url: 'map/set',
method: 'POST',
headers: {'enctype': 'multipart/form-data'},
data: {myObj: $scope.myModelObj},
file: file,
}).progress(function(evt) {
console.log('percent: ' + parseInt(100.0 * evt.loaded / evt.total));
}).success(function(data, status, headers, config) {
// file is uploaded successfully
console.log(data);
});
}
};
Server-side code
var app = express();
require(appPath + '/server/config/express')(app, passport, db);
app.use(bodyParser({uploadDir:'./uploads'}));
app.post('/map/set', function(req, res) {
console.log(req.body);
console.log(req.files);
res.end('Success');
});
*****Edit*****
HTML Code
<div class="row">
<input id="file" type="file" ng-file-select="onFileSelect()" >
</div>
Hand built request
$scope.onFileSelect = function() {
//$files: an array of files selected, each file has name, size, and type.
//console.log($files);
var xhr = new XMLHttpRequest();
// not yet supported in most browsers, some examples use
// this but it's not safe.
// var fd = document.getElementById('upload').getFormData();
var fd = new FormData();
var files = document.getElementById('myfileinput').files;
console.log(files);
for(var i = 0;i<files.length; i++) {
fd.append("file", files[i]);
}
/* event listeners */
xhr.upload.addEventListener("progress", uploadProgress, false);
xhr.addEventListener("error", uploadFailed, false);
xhr.addEventListener("load", uploadComplete, false);
xhr.addEventListener("abort", uploadCanceled, false);
function uploadComplete(){
console.log("complete");
}
function uploadProgress(){
console.log("progress");
}
function uploadFailed(){
console.log("failed");
}
function uploadCanceled(){
console.log("canceled");
}
xhr.open("POST", "map/set");
xhr.send(fd);
};
The latest version of mean.io uncluding express 4.x as dependency. In the documentation for migration express 3 to 4 you can read, express will no longer user the connect middlewares. Read more about here: https://github.com/visionmedia/express/wiki/Migrating-from-3.x-to-4.x
The new body-parser module only handles urlencoded and json bodies. That means for multipart bodies (file uploads) you need an additional module like busboy or formadible.
Here is an example how I use angular-file-upload with busboy:
The AngularJS Stuff:
$upload.upload({
url: '/api/office/imageUpload',
data: {},
file: $scope.$files
}) …
I write a little helper module to handle uploads with busboy easier. It’s not very clean coded, but do the work:
var env = process.env.NODE_ENV || 'development';
var Busboy = require('busboy'),
os = require('os'),
path = require('path'),
config = require('../config/config')[env],
fs = require('fs');
// TODO: implement file size limit
exports.processFileUpload = function(req, allowedExtensions, callback){
var busboy = new Busboy({ headers: req.headers });
var tempFile = '';
var fileExtenstion = '';
var formPayload = {};
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
fileExtenstion = path.extname(filename).toLowerCase();
tempFile = path.join(os.tmpDir(), path.basename(fieldname)+fileExtenstion);
file.pipe(fs.createWriteStream(tempFile));
});
busboy.on('field', function(fieldname, val, fieldnameTruncated, valTruncated) {
var jsonValue = '';
try {
jsonValue = JSON.parse(val);
} catch (e) {
jsonValue = val;
}
formPayload[fieldname] = jsonValue;
});
busboy.on('finish', function() {
if(allowedExtensions.length > 0){
if(allowedExtensions.indexOf(fileExtenstion) == -1) {
callback({message: 'extension_not_allowed'}, tempFile, formPayload);
} else {
callback(null, tempFile, formPayload)
}
} else {
callback(null, tempFile, formPayload)
}
});
return req.pipe(busboy);
}
In my controller i can use the module that way:
var uploader = require('../helper/uploader'),
path = require('path');
exports.uploadEmployeeImage = function(req,res){
uploader.processFileUpload(req, ['.jpg', '.jpeg', '.png'], function(uploadError, tempPath, formPayload){
var fileExtenstion = path.extname(tempPath).toLowerCase();
var targetPath = "/exampleUploadDir/testFile" + fileExtenstion;
fs.rename(tempPath, targetPath, function(error) {
if(error){
return callback("cant upload employee image");
}
callback(null, newFileName);
});
});
}
I'm going to take a guess here that the header settings are incorrect.
headers: {'enctype': 'multipart/form-data'},
Should be changed to:
headers: {'Content-Type': 'multipart/form-data'},
Ensure you have an 'id' AND 'name' attribute on the file input - not having an id attribute can cause problems on some browsers. Also, try building the request like this:
var xhr = new XMLHttpRequest();
// not yet supported in most browsers, some examples use
// this but it's not safe.
// var fd = document.getElementById('upload').getFormData();
var fd = new FormData();
var files = document.getElementById('myfileinput').files;
for(var i = 0;i<files.length; i++) {
fd.append("file", files[i]);
}
/* event listeners */
xhr.upload.addEventListener("progress", uploadProgress, false);
xhr.addEventListener("error", uploadFailed, false);
xhr.addEventListener("load", uploadComplete, false);
xhr.addEventListener("abort", uploadCanceled, false);
xhr.open("POST", "your/url");
xhr.send(fd);
angular isn't great with file uploads so doing it by hand might help.
Related
I am new to nodejs. Need your help. From the nodejs terminal, i want to download an excel file and convert it to csv (say, mocha online.js). Note: i don't want to do this via a browser.
Below is a script i am working on to download and convert to csv. There is no error nor the expected result:
online.js
if (typeof require !== 'undefined') XLSX = require('xlsx');
var XMLHttpRequest = require("xmlhttprequest").XMLHttpRequest;
/* set up XMLHttpRequest */
var url = "http://oss.sheetjs.com/js-xlsx/test_files/formula_stress_test_ajax.xlsx";
var xhr = new XMLHttpRequest();
xhr.open("GET", url, true);
xhr.responseType = "arraybuffer";
describe('suite', function () {
it('case', function () {
var arraybuffer = xhr.response;
/* convert data to binary string */
var data = new Uint8Array(arraybuffer);
var arr = new Array();
for (var i = 0; i != data.length; ++i) arr[i] = String.fromCharCode(data[i]);
var bstr = arr.join("");
/* Call XLSX */
var sheetName = 'Database';
var workbook = XLSX.read(bstr, { type: "binary" });
var worksheet = workbook.Sheets[sheetName];
var csv = XLSX.utils.sheet_to_csv(worksheet);
console.log(csv);
xhr.send();
//.... perform validations here using the csv data
});
})}
I tried myself with this code, and it seems it is working, the only thing is that I spent 15 minutes trying to understand why my open office would not open the file, I eventually understood that they were sending a zip file ... here is the full code, the doc of the http get function is here http.get
You could have used the request module, but it isn't native, request is easier though.
enjoy!
const url = 'http://oss.sheetjs.com/js-xlsx/test_files/formula_stress_test_ajax.xlsx'
const http = require('http')
const fs = require('fs')
http.get(url, (res) => {
debugger
const {
statusCode
} = res;
const contentType = res.headers['content-type'];
console.log(`The type of the file is : ${contentType}`)
let error;
if (statusCode !== 200) {
error = new Error(`Request Failed.\n` +
`Status Code: ${statusCode}`);
}
if (error) {
console.error(error.message);
// consume response data to free up memory
res.resume();
return;
}
res.setEncoding('binary');
let rawData = '';
res.on('data', (chunk) => {
rawData += chunk;
});
res.on('end', () => {
try {
const parsedData = xlsxToCSVFunction(rawData);
// And / Or just put it in a file
fs.writeFileSync('fileName.zip', rawData, 'binary')
// console.log(parsedData);
} catch (e) {
console.error(e.message);
}
});
}).on('error', (e) => {
console.error(`Got error: ${e.message}`);
});
function xlsxToCSVFunction(rawData) {
return rawData //you should return the csv file here whatever your tools are
}
I actually encountered the same problem 3 months ago : here is what I did!
I did not find any nodeJS module that was exactly as I wanted, so I used in2csv (a python shell program) to transform the data; the t option is to use tabulation as the delimiter
1) Step 1: transforming the xlsx file into csv using in2csv
This code takes all the xlsx files in the current directory, transform them into csv files and put them in another directory
var shelljs = require('shelljs/global')
var dir = pwd().stdout.split('/')
dir = dir[dir.length - 1].replace(/\s/g, '\\ ')
mkdir('../'+ dir + 'CSV')
ls('*.xlsx').forEach(function(file) {
// below are the two lines you need
let string = 'in2csv -t ' + file.replace(/\s/g, '\\ ') + ' > ../'+ dir + 'CSV/' + file.replace('xlsx','csv').replace(/\s/g, '\\ ')
exec(string, {silent:true}, function(code, stdout, stderr){
console.log('new file : ' + file.replace('xlsx','csv'))
if(stderr){
console.log(string)
console.log('Program stderr:', stderr)
}
})
});
Step 2: loading the data in a nodejs program:
my script is very long but the main two lines are :
const args = fileContent.split('\n')[0].split(',')
const content = fileContent.split('\n').slice(1).map(e => e.split(','))
And for the benefit of seekers like me...here is a solution using mocha, request and xlsx
var request = require('request');
var XLSX = require('xlsx');
describe('suite', function () {
it('case', function (done) {
var url = "http://oss.sheetjs.com/js-xlsx/test_files/formula_stress_test_ajax.xlsx";
var options = {
url: url,
headers: {
'Content-Type': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
},
encoding: null
};
request.get(options, function (err, res, body){
var arraybuffer = body;
/* convert data to binary string */
var data = arraybuffer;
//var data = new Uint8Array(arraybuffer);
var arr = new Array();
for (var i = 0; i != data.length; ++i) arr[i] = String.fromCharCode(data[i]);
var bstr = arr.join("");
/* Call XLSX */
var sheetName = 'Database';
var workbook = XLSX.read(bstr, { type: "binary" });
var worksheet = workbook.Sheets[sheetName];
var csv = XLSX.utils.sheet_to_csv(worksheet);
console.log(csv);
done();
});
});
});
I am working on a project where I am creating an excel file using XLSX node.js library, sending it to a client via Restify where I then use the FileSaver.js library to save it on the local computer. When I write the xlsx workbook to file on the backend, it opens fine, however, when I open it on the client, it is corrupted. I get the error: "Excel cannot open this file. The file format or file extension is not valid. Verify that the file has not been corrupted and that the file extension matches the format of the file".
Here is my code for writing and sending the file on the backend:
var wopts = { bookType:'xlsx', bookSST:false, type:'binary' };
var workbook = xlsx.write(wb, wopts);
res.send(200, workbook);
On the front end, I am using code from the XLSX documentation:
function s2ab(s) {
var buf = new ArrayBuffer(s.length);
var view = new Uint8Array(buf);
for (var i=0; i!=s.length; ++i)
view[i] = s.charCodeAt(i) & 0xFF;
return buf;
}
saveAs(new Blob([s2ab(response.data)],{type:""}), "test.xlsx");
Any thoughts on why this would not work? Any help would be much appreciated. Thanks.
As Luke mentioned in the comments, you have to do a base64 encoding before sending the buffer. Here's a snippet that used the NPM module node-xlsx.
var xlsx = require('node-xlsx');
router.get('/history', function (req, res) {
var user = new User();
user.getHistory(req.user.userId, req.query.offset, req.query.limit)
.then(function (history) {
if (req.headers.contenttype && req.headers.contenttype.indexOf('excel') > -1) {
var data = [['Data', 'amount'], ['19/12/2016', '10']];
var xlsxBuffer = xlsx.build([{ name: 'History', data: data }]);
res.end(xlsxBuffer.toString('base64'));
} else {
res.send(history);
}
})
.catch(function (err) {
res.status(500).send(err);
});
});
And this is the frontend code using Angular:
$scope.getXlsFile = function() {
var config = {
params: {
offset: $scope.offset,
limit: $scope.limit
},
headers: {
'contentType': 'application/vnd.ms-excel',
'responseType': 'arraybuffer'
}
};
$http.get('/api/history', config)
.then(function(res) {
var blob = new Blob([convert.base64ToArrayBuffer(res.data)]);
FileSaver.saveAs(blob, 'historial.xlsx');
})
}
where convert is the following factory:
.factory('convert', function () {
return {
base64ToArrayBuffer: function (base64) {
var binary_string = window.atob(base64);
var len = binary_string.length;
var bytes = new Uint8Array(len);
for (var i = 0; i < len; i++) {
bytes[i] = binary_string.charCodeAt(i);
}
return bytes.buffer;
}
}
})
Hy every one,
Please , i 'm study on a project using nodeJS, and i would like to know , in which format my node client must send the file to the server ( is it in base64 format or else ?).
my client is :
//client.js
$('#file').on('change', function(e){
encode64(this);
});
function encode64(input) {
if (input.files){
chap.emit('test', { "test" : input.files[0] });
var FR= new FileReader();
FR.readAsDataURL(input.files[0]);
FR.onload = function(e) {
chap.emit('test', { "test" : e.target.result } );
}
}
}
My server side is :
socket.on('test', function(e){
var gs = new gridStore(db, e.test,"w");
gs.writeFile(new Buffer(e.test,"base64"), function(err,calb){
if (!err)
console.log('bien passe');
else
console.log('erreur');
});
});
But this doesn't work , i get this error :
TypeError: Bad argument
at Object.fs.fstat (fs.js:667:11)
Any one could help me ?
Normally this is how you store into gridFs . I have used it to store files. hope it works.
fs = require('fs'),
var gfs = require('gridfs-stream');
var form = new multiparty.Form();
form.parse(req, function (err, fields, files) {
var file = files.file[0];
var filename = file.originalFilename; //filename
var contentType = file.headers['content-type'];
console.log(files)
var tmpPath = file.path ;// temporary path
var writestream = gfs.createWriteStream({filename: fileName});
// open a stream to the temporary file created by Express...
fs.createReadStream(tmpPath)
// and pipe it to gfs
.pipe(writestream);
writestream.on('close', function (file) {
// do something with `file`
res.send(value);
});
})
I'm attempting to use the ng-file-upload directive to provide file upload functionality in my angular app.
I've got it working for the most part - I can select multiple files and loop through to grab the file name and file types. I just can't seem to figure out where the actual binary data of each file is stored in the file object.
I tried using the approach outlined in this post - AngularJS Upload a file and send it to a DB, but that results in a an error that "$q is not defined".
function create_blob(file) {
var deferred = $q.defer();
var reader = new FileReader();
reader.onload = function () {
deferred.resolve(reader.result);
};
reader.readAsDataURL(file);
return deferred.promise;
}
So then I tried the approach outlined in this post - Send an uploaded image to the server and save it in the server, but again I'm running into an error reading "dataURI.split is not a function".
function dataURItoBlob(dataURI) {
var binary = atob(dataURI.split(',')[1]);
var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0];
var array = [];
for (var i = 0; i < binary.length; i++) {
array.push(binary.charCodeAt(i));
}
return new Blob([new Uint8Array(array)], {
type: mimeString
});
}
The code I'm using is as follows:
function create_blob(file) {
var deferred = $q.defer();
var reader = new FileReader();
reader.onload = function () {
deferred.resolve(reader.result);
};
reader.readAsDataURL(file);
return deferred.promise;
}
function dataURItoBlob(dataURI) {
var binary = atob(dataURI.split(',')[1]);
var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0];
var array = [];
for (var i = 0; i < binary.length; i++) {
array.push(binary.charCodeAt(i));
}
return new Blob([new Uint8Array(array)], {
type: mimeString
});
}
$scope.uploadFiles = function (files) {
$scope.files = files;
angular.forEach(files, function (file) {
if (file && !file.$error) {
//var reader = new FileReader();
//console.log(reader.readAsDataURL(file));
//var binary = create_blob(file);
var fileBinary = dataURItoBlob(file);
$http({
url: root + '/DesktopModules/ServiceProxy/API/NetSuite/InsertCaseFile',
method: "POST",
//headers: { 'caseId': id, 'fileName': file.name, fileContent: $.base64.encode(file) }
headers: { 'caseId': id, 'fileName': file.name, fileContent: fileBinary }
}).
success(function (data, status, headers, config) {
//if (data == true) {
// getCase();
// $scope.newMessage = "";
// //toaster.pop('success', "", "Message succesfully submitted.",0);
//}
}).
error(function (data, status, headers, config) {
});
file.upload.progress(function (evt) {
file.progress = Math.min(100, parseInt(100.0 * evt.loaded / evt.total));
});
}
});
}
What am I overlooking?
It depends on what format your DB is accepting for file upload. If it support multipart form data, then you can just use
Upload.upload({file: file, url: my/db/url}).then(...);
if it accepts post requests with file's binary as content of the request (like CouchDB, imgur, ...) then you can do
Upload.http({data: file, url: my/db/url, headers: {'Content-Type': file.type}})...;
if you db just accept json objects and you want to store the file as base64 data url in the database like this question then you can do
Upload.dataUrl(file, true).then(function(dataUrl) {
$http.post(url, {
fileBase64DataUrl: dataUrl,
fileName: file.name,
id: uniqueId
});
})
i need to send a PDF file from angularjs client to NodeJS service.
I did the angularjs service, and when i receive the file its a string like this:
%PDF-1.3
3 0 obj
<</Type /Page
/Parent 1 0 R
/Reso
How can i reconvert this string to PDF in NodeJS?
This is the client code:
var sendByEmail = function () {
$scope.generatingPdf = true;
$('#budget').show();
var pdf = new JsPDF('p', 'pt', 'letter');
var source = $('#budget')[0];
pdf.addHTML(source, 0, 0, function () {
var resultPdf = pdf.output();
BillService.sendByEmail("rbrlnx#gmail.com", resultPdf).then(function () {
});
$('#budget').hide();
});
};
var sendByEmail = function (email, file) {
var deferred = $q.defer();
var data = {
email: email,
file: file
};
BillService.sendByEmail(data, function (result) {
deferred.resolve(result);
}, function () {
deferred.reject();
});
return deferred.promise;
};
The server code controller its empty:
var sendByEmail = function (req, res, next) {
var file = req.body.file;
};
I experimented with this a while ago, and I came up with this. It's not production ready by a long shot maybe you find it useful. It's free of front end libraries (except Angular ofcourse), but assumes you're using Express 4x and body-parser.
The result:
In the browser:
On the server:
What you're seeing:
You're seeing a tiny node server, serving static index.html and angular files, and a POST route receiving a PDF in base64 as delivered by the HTML FileReader API, and saves it to disk.
Instead of saving to disk, you can send it as an email attachment. See for instance here or here for some info on that.
The example below assumes uploading a PDF by a user through a file input, but the idea is the same for all other ways of sending a document to your back end system. The most important thing is to send the pdf data as BASE64, because this is the format that most file writers and email packages use (as opposed to straight up binary for instance..). This also goes for images, documents etc.
How did I do that:
In your HTML:
<div pdfs>Your browser doesn't support File API.</div>
A directive called pdfs:
myApp.directive('pdfs', ['upload', function(upload) {
return {
replace: true,
scope: function() {
files = null;
},
template: '<input id="files" type="file">',
link: function(scope,element) {
element.bind('change', function(evt) {
scope.$apply(function() {
scope.files = evt.target.files;
});
});
},
controller: function($scope, $attrs) {
$scope.$watch('files', function(files) {
//upload.put(files)
if(typeof files !== 'undefined' && files.length > 0) {
for(var i = 0; i<files.length;i++) {
readFile(files[i])
}
}
}, true);
function readFile(file) {
var reader = new FileReader();
reader.addEventListener("loadend", function(evt) {
upload.post({name: file.name, data: reader.result})
})
if(reader.type = 'application/pdf') {
reader.readAsDataURL(file);
}
}
}
}
}]);
A tiny service:
myApp.service('upload', function($http) {
this.post = function(file) {
$http.post('/pdf', file);
}
});
And a node server:
var express = require('express');
var bodyParser = require('body-parser')
var fs = require("fs");
var app = express();
app.use(express.static('.'));
app.use( bodyParser.json({limit: '1mb'}) );
app.post('/pdf', function(req, res){
var name = req.body.name;
var pdf = req.body.data;
var pdf = pdf.replace('data:application/pdf;base64,', '');
res.send('received');
fs.writeFile(name, pdf, 'base64', function(err) {
console.log(err);
});
});
var server = app.listen(3000, function() {
console.log('Listening on port %d', server.address().port);
});