Upload file to node server with angular-file-upload - node.js

I'm sorry about my English. I am use MEAN stack for writting my app. I find out some modules for uploading image and angular-file-upload is my choice. But when I upload the image the percent show on console completed. I'm check upload directory. The file is uploaded but can not read in Image Viever.
Here my code on angular :
$scope.onFileSelect = function($files) {
for (var i = 0; i < $files.length; i++) {
var file = $files[i];
$scope.upload = $upload.upload({
url: '/posts/upload/',
method: 'POST',
file: file,
}).progress(function(evt) {
console.log('percent: ' + parseInt(100.0 * evt.loaded / evt.total));
}).success(function(data, status, headers, config) {
// file is uploaded successfully
console.log(data);
});
}
};
Here my code on Node JS :
exports.upload = function(req, res) {
var data = new Buffer('');
req.on('data', function(chunk) {
data = Buffer.concat([data, chunk]);
});
req.on('end', function() {
req.rawBody = data;
fs.writeFile(config.root + path.sep + 'public/upload' + path.sep + uuid.v1(), data ,function(err){
if(err) throw err;
console.log('ok saved')
});
res.send('ok');
});
}
I guess I do something wrong with Node but I can't find out it. Please tell me what my mistake.

You need to send the preview url for the uploaded image back to angular.
On server side
req.on('end', function() {
req.rawBody = data;
var imgUrl = '/upload' + path.sep + uuid.v1();
fs.writeFile(config.root + path.sep + 'public' + imgUrl, data ,function(err){
if(err) throw err;
//send back the preview url
res.jsonp({
imgUrl: imgUrl
})
});
Client Side
$scope.onFileSelect = function($files) {
for (var i = 0; i < $files.length; i++) {
var file = $files[i];
$scope.upload = $upload.upload({
url: '/posts/upload/',
method: 'POST',
file: file,
}).progress(function(evt) {
console.log('percent: ' + parseInt(100.0 * evt.loaded / evt.total));
}).success(function(data, status, headers, config) {
// file is uploaded successfully
console.log(data);
$scope.imgurl = data.imgUrl;
});
}
};
You can display image like this
<img ng-src="{{imgurl}}" ng-show="imgurl" alt="preview for uploaded image" >

Related

How do I access files from FormData of an AJAX Post

I currently have the following AJAX POST request sending the FormData() of the loaded file.
It takes in however many files uploaded, places them into a new form, and then posts that form as data to the /api/file URL
$('#file').on('change', function(){
var files = $(this).get(0).files;
$('#file').ready(function(){
if (files.length > 0){
var formData = new FormData();
for (var i = 0; i < files.length; i++) {
var file = files[i];
console.log(file);
var tmppath = URL.createObjectURL(event.target.files[0]);
formData.append('userFile', tmppath);
}
$.ajax({
url: '/api/file',
type: 'POST',
data: formData,
processData: false,
contentType: false,
success: function(data){
console.log('upload successful!\n' + data);
document.getElementById('ipfs').href = 'https://ipfs.infura.io/ipfs/' + data;
document.getElementById('ipfs').innerHTML = data;
},
xhr: function() {
var xhr = new XMLHttpRequest();
xhr.upload.addEventListener('progress', function(evt) {
if (evt.lengthComputable) {
var percentComplete = evt.loaded / evt.total;
percentComplete = parseInt(percentComplete * 100);
$('.progress-bar').text(percentComplete + '%');
$('.progress-bar').width(percentComplete + '%');
if (percentComplete === 100) {
$('.progress-bar').html('Done');
}
}
}, false);
return xhr;
}
});
}
});
and the following express node app
app.post('/api/file', async function(req,res){
console.log(await req.files);
});
And all it prints out is Null.

Heroku Node app crashes when uploading files to S3 (message: 'The header content contains invalid characters')

I have an Express app (deployed on Heroku) which allows users to upload images which are stored on Amazon S3. When I try to upload images using my local environment (Windows and all environment variables from Heroku) the uploads are successful.
On Heroku the app crashes with the following error:
NetworkingError: The header content contains invalid characters
Below is my code:
function createID() {
let possible = 'abcdefghijklmnopqrstuvwyz', name = '';
for(var i = 0; i < 8; i++) {
name += possible.charAt(Math.floor(Math.random() * possible.length));
}
return name;
}
exports.upload = function(req, res) {
const s3 = new aws.S3();
let image = new Images();
let busboy = new Busboy({ headers: req.headers });
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
let ext = path.extname(filename).toLowerCase();
let imgID = 'uploads/content/user_' + req.user._id + '/' + createID() + ext;
let options = {partSize: 10 * 1024 * 1024, queueSize: 1};
const s3Params = {
'Bucket': config.S3_BUCKET,
'Key': imgID,
'ContentType': mimetype,
'Body': file
}
s3.upload(s3Params, options, (err, data) => {
if(err){
console.log(err);
return res.send({message: 'Oops! Something went wrong, Try Again.'});
}
const returnData = {
signedRequest: data,
url: `https://${config.S3_BUCKET}.s3.amazonaws.com/${imgID}`
};
});
// OMITTED CODE NOT RELEVANT TO QUESTION

Video Upload using formidable in Nodejs, Error found: post 404 / 502

I'm uploading video file from local to server and then I'll be uploading it to cdn,
the issue i'm facing is my code is running well on local but its not working when i patch it to server.
Here is my code
commonJs
$("#uploadVideo").click(function (e) {
var reader = new FileReader();
var fileInput = document.getElementById('Videofile');
var previewUrl = window.URL.createObjectURL(fileInput.files[0]);
$(".video").attr("src", previewUrl);
var videotype = "video/mp4";
var file_data = $("#Videofile").prop("files")[0];
if (!file_data.type.match(videotype)) {
return "alert('Please upload mp4 files')"
} else {
var metadata = {
'content-type': 'video/mp4',
'size': file_data.size,
'uploaded': new Date(),
}
reader.onload = function (e) {
$("file_data").text("File Content: " + reader.result); // Show the file content
}
reader.readAsBinaryString(file_data);
file_data.onloadedmetadata = function () {
alert("Meta data for audio loaded");
};
};
var form_data = new FormData();
form_data.append("file", file_data)
form_data.append("metdata", metadata)
for (var key of form_data.entries()) {
console.log(key[0] + ', ' + key[1]);
}
if (form_data != undefined) {
$.ajax({
type: "post",
contentType: false,
processData: false,
url: "/api/recordvideo",
data: form_data,
dataType: 'json',
success: function (result) {
if (result) {
$(".video").attr("src", result.videolink);
alert("Successfully Uploaded Video");
console.log("Successfully Uploaded Video");
} else {
alert("Error on Uploading Video");
console.log("Error on Uploading Video");
}
},
error: function (err) {
console.log("error");
}
});
}
e.preventDefault();
e.stopPropagation();
});
ServerSide
app.post('/api/recordvideo',Api.recordvideo);
var Upload = require('gcs-resumable-upload');
ApiService.recordvideo = function (req, res) {
var db = req.db;
console.log("came in cloudupload");
var form = new formidable.IncomingForm();
var filesdata;
form.keepExtensions = true;
form.multiples = false;
form.on('fileBegin', function (name, file){
file.path = 'public/demo/' + file.name;
console.log("fileBegin: " + JSON.stringify(file));
});
form.on('file', function (name, file){
console.log('Uploaded ' + JSON.stringify(file));
var path = file.path;
console.log("came in cloud3 :" + JSON.stringify(path));
});
form.parse(req, function (err, fields, files) {
console.log("came in cloud0" + JSON.stringify(files));
filesdata = files;
});
console.log("came in cloud2");
form.on('end', function (fields, files) {
var userid = appconfig.ObjectID(appconfig.decrypt(req.signedCookies['gid']));
var path = this.openedFiles[0].path;
console.log("came in cloud3 :" + JSON.stringify(path));
fs.createReadStream(path)
.pipe(Upload.upload({ bucket: '******', file: path, metadata: { contentType: this.openedFiles[0].type } }))
.on('finish', function (response) {
console.log("Successfully Uploaded Video :" + JSON.stringify(response));
res.send({ "status": false, "videolink": "https://****/****/" + filesdata.file.name });
});
});
//res.send({ "status": false, "err": null });
}
At start atleast it was uploading to server folder & then in chrome developers tool it used to give response: {readystate : 4, . . . }
And now, I made some changes then it doesnt even hit my api, After few seconds it gives error in chrome developer tools 404() / 502 ()
Well, I got the solution, Previously I was using gcs-resumable-upload module to upload, but now I tried with '#google-cloud/storage' module through which I was able to upload upto 9mb.
const Storage = require('#google-cloud/storage');
var db = req.db;
console.log("came in cloudupload");
var form = new formidable.IncomingForm();
var filesdata;
form.keepExtensions = true;
form.multiples = false;
form.parse(req, function (err, fields, files) {
filesdata = files;
});
form.on('end', function (fields, files) {
var userid = appconfig.ObjectID(appconfig.decrypt(req.signedCookies['gid']));
var path = this.openedFiles[0].path;
const storage = new Storage({
keyFilename: 'gcloudcred.json'
});
const myBucket = storage.bucket('onfvideo');
myBucket.upload(path).then((resp) => {
console.log('uploaded to' + resp);
res.send({ "status": true, "err": null });
}).catch(err => {
console.error('ERROR:', err);
res.send({ "status": false, "err": null });
});
});
};
The Limitation of 9mb I was facing due to .netframework data-transfer limit which i was able to resolve using
<system.web>
<customErrors mode="Off"/>
<httpRuntime targetFramework="4.5" maxRequestLength="7483648" />
</system.web>
Method 2: Using xhr calling RestApi
1. Generated Access token using google-auto-auth module
2. XMLHttpRequest
var fileInput = $("#Videofile").prop("files")[0];
var url = "https://www.googleapis.com/upload/storage/v1/b/bucketname/o?uploadType=media&name=" + fileInput.name;
var http = new XMLHttpRequest();
http.open('POST', url, true);
http.setRequestHeader('Content-type', 'video/mp4');
http.setRequestHeader("Authorization", "Bearer " + token);
http.send(fileInput);
http.onprogress = function (ev) {
if (ev.lengthComputable) {
var percentage = Math.round((ev.loaded / ev.total) * 100);
console.log("percent " + percentage + '%');
}else {
console.log("Unable to compute progress information since the total size is unknown");
}
}
http.onloadstart = function (ev) {console.log("start")}
http.onloadend = function (ev) {}
http.onreadystatechange = function () {
if (http.readyState == 4 && http.status == 200) {
var response = JSON.parse(http.responseText);
alert("Successfully Uploaded Video");
}
}

Read files from folders inside a folder

So basically I have a folder, which holds other folders and each folder has it's own set of images, that I would like to display in a 'ul'. The problem is since I'm using readdir, which is async, how can I write the response, without getting a "Write after .end() error". Here is how my code looks like.
var fs = require('fs'),
url = require('url');
module.exports = function(req, res) {
req.pathName = req.pathName || url.parse(req.url).pathname;
if(req.pathName === '/gallery') {
fs.readdir('./content/images/public', function(err, filenames) {
if (err) {
console.log(err);
return;
}
if(filenames.length) {
var list_content = '';
for (var i = 0; i < filenames.length; i++) {
fs.readdir('./content/images/public/' + filenames[i], function(err, images) {
if (err) {
console.log(err);
return;
}
for (var i = 0; i < images.length; i++) {
list_content += '<li>' + images[i] + '</li>';
}
var list = '<ul>' + list_content + '</ul>Go back to homepage';
res.writeHead(200, {
'Content-Type': 'text/html'
});
res.send(list);
});
}
} else {
res.writeHead(200, {
'Content-Type': 'text/html'
});
res.write('<p>There are no images in the gallery</p>Go back to homepage');
res.end();
}
});
} else {
return true;
}
}
res.end();
}
});
Instead of using res.write() and res.end(), try using res.send(), which does both for you. UPDATE: Also, make sure this code is within a request callback where req, and res object is parsed correctly.
for (var i = 0; i < images.length; i++) {
list_content += '<li>' + images[i] + '</li>';
}
var list = '<ul>' + list_content + '</ul>Go back to homepage';
res.send(list);
then on the front end, receive it in the ajax response callback, like
$.post('xxxx', function(response){
alert(response); //list
});
UPDATE: after seeing your updated code,
module.exports = function(req, res) {
doesnt make sense, it needs to be something like
exports.functionName = function(req,res){
read this
https://www.sitepoint.com/understanding-module-exports-exports-node-js/
module.exports is an object of functions. Not a function. It looks like this
module.exports = {
funcExample: function() {
return 2;
},
funcOtherExample: function() {
return 1;
}

Video from Mongo Grid fs is not playing on Safari browser (also on Cordova app)

I am using Mongodb to store video files as grid fs. It surprised me today when I came to know that video is not playing on Safari browser. However video read from Gridfs is playing fine on Chrome & Firefox. Following are two approach to read video files back from Grid fs. Both approach has same problem. I do the that correct mime type is getting set.
Approach 1:
exports.previewFile = function (req, res) {
var contentId = new DBModule.BSON.ObjectID(req.params.fileid);
log.debug('Calling previewFile inside FileUploadService for content id ' + contentId);
//Read metadata details from fs.files
var query = {_id: contentId};
documentOperationModule.getDocumentByQuery(query, constants.FS_FILES_COLLECTION, function (err, files) {
if (!Utilities.isEmptyList(files)) {
var fileObj = files[0];
var gridStore = DBModule.db.gridStore(contentId, 'r');
gridStore.open(function (err, gridStore) {
var stream = gridStore.stream(true);
if (!Utilities.isEmptyObject(fileObj.metadata)) {
res.setHeader('Content-Type', fileObj.metadata.contentType);
}
stream.on("data", function (chunk) {
log.debug("Chunk of file data");
res.write(chunk);
});
stream.on("end", function () {
log.debug("EOF of file");
res.end();
});
stream.on("close", function () {
log.debug("Finished reading the file");
});
});
} else {
log.error({err: err}, 'Failed to read the content for id ' + contentId);
res.status(constants.HTTP_CODE_INTERNAL_SERVER_ERROR);
res.json({error: contentId + " not found"});
}
});
};
Approach 2:
exports.previewFile = function (req, res) {
var contentId = new DBModule.BSON.ObjectID(req.params.fileid);
log.debug('Calling previewFile inside FileUploadService for content id ' + contentId);
//Read metadata details from fs.files
var query = {_id: contentId};
documentOperationModule.getDocumentByQuery(query, constants.FS_FILES_COLLECTION, function (err, files) {
if (!Utilities.isEmptyList(files)) {
var fileObj = files[0];
var gridStore = DBModule.db.gridStore(contentId, 'r');
gridStore.read(function (err, data) {
if (!err) {
if (!Utilities.isEmptyObject(fileObj.metadata)) {
res.setHeader('Content-Type', fileObj.metadata.contentType);
}
res.end(data);
} else {
log.error({err: err}, 'Failed to read the content for id ' + contentId);
res.status(constants.HTTP_CODE_INTERNAL_SERVER_ERROR);
res.json({error: err});
}
});
} else {
log.error({err: err}, 'Failed to read the content for id ' + contentId);
res.status(constants.HTTP_CODE_INTERNAL_SERVER_ERROR);
res.json({error: contentId + " not found"});
}
});
};
Following is screen of Safari for reference.
Please help
Try this GIST (by https://gist.github.com/psi-4ward)
It makes use of the byte range header
https://gist.github.com/psi-4ward/7099001
Although it does not work for me with safari, it makes sure that the correct hears are set and the correct content is delivered. It could narrow down your problem
EDIT
I've updated the GIST. It works now fine with Safari for me
https://gist.github.com/derMani/218bd18cc926d85a57a1
This should solve your problem
function StreamGridFile(req, res, GridFile) {
if(req.headers['range']) {
// Range request, partialle stream the file
console.log('Range Reuqest');
var parts = req.headers['range'].replace(/bytes=/, "").split("-");
var partialstart = parts[0];
var partialend = parts[1];
var start = parseInt(partialstart, 10);
var end = partialend ? parseInt(partialend, 10) : GridFile.length -1;
var chunksize = (end-start)+1;
res.writeHead(206, {
'Content-disposition': 'filename=xyz',
'Accept-Ranges': 'bytes',
'Content-Type': GridFile.contentType,
'Content-Range': 'bytes ' + start + '-' + end + '/' + GridFile.length,
'Content-Length': chunksize
});
// Set filepointer
GridFile.seek(start, function() {
// get GridFile stream
var stream = GridFile.stream(true);
// write to response
stream.on('data', function(buff) {
// count data to abort streaming if range-end is reached
// perhaps theres a better way?
if(start >= end) {
// enough data send, abort
GridFile.close();
res.end();
} else {
res.write(buff);
}
});
});
} else {
// stream back whole file
console.log('No Range Request');
res.header('Content-Type', GridFile.contentType);
res.header('Content-Length', GridFile.length);
var stream = GridFile.stream(true);
stream.pipe(res);
}
}
Regards
Rolf

Resources