Valums file-uploader on nodejs - multiple file upload - node.js

I'm using valums ajax file-uploader
My nodejs server side looks like this:
fileStream = fs.createWriteStream(__dirname+'/../public/images/houses/'+rndname);
req.pipe(fileStream);
req.on('end', function() {
body = '{"success":"true", "name": "'+rndname+'"}';
res.writeHead(200,
{ 'Content-Type':'text/plain'
, 'Content-Length':body.length
});
res.end(body);
});
client side:
function createUploader(){
var uploader = new qq.FileUploader({
element: document.getElementById('homepic'),
action: '/server/upload',
allowedExtensions: ['jpg', 'png', 'gif'],
multiple:true,
onComplete: function(id, fileName, responseJSON){
$("#homepic").append("<img src='/images/houses/"+responseJSON.name+"' class='mediumpic' /> ");
}
});
}
window.onload = createUploader;
This all works for single file upload just great!
So imagine - i press on upload button, chose pic, it uploads really fast, shows up in screen.
Now i want to upload another one. I choose pic, it uploads on server fast (i see it on server), i get back the response of new filename and success, i put the picture on screen with my append. But the picture does not show up. I try open in new tab just the pic and still nothing even though i see it on the server standing in the right dir. After like 3-5 min of waiting it just shows up, without even page refresh needed. Whats causing this behavior? Is it the piping and i need to call Mario to fix it or something else? :)

Changed my req.pipe to req.on('data') and it started to work. Somehow it seems like my req.pipe didnt close connection and was "expecting" for more data to come even though all file was uploaded. That is why i could not call GET to file and it was in "pending" status.
This fixed my problems:
req.on('data', function(data) {
ws.write(data);
});
req.on('end', function(data) {
var body = '{"success":"true", "name": "'+rndname+'"}';
res.writeHead(200,
{ 'Content-Type':'text/plain'
, 'Content-Length':body.length
});
res.end(body);
});
Even though i found solution to my problem if anyone knows why req.pipe didnt close connection and was stuck hanging for like 2-3mins till the pic appeared, let me know.

I've created an Express 3.x middleware component that allows you to access valums/fineuploader uploaded files through the req.files collection.
All you need to do is add the middleware during your Express configuration, like so:
var fineuploaderExpressMiddleware = require('fineuploader-express-middleware');
app.use(express.cookieParser());
app.use(express.bodyParser());
app.use(fineuploaderExpressMiddleware({ uploadDir: '/tmp ' }));
The component is available here: https://github.com/suprememoocow/fineuploader-express-middleware

Related

Node spawn process

Unable to find out the issue in following script, what i want to achieve with the script is to have a node log server that would listen to post requests with log title and log details as query parameters, write to a file and then throw back as json on get request.
Problem:
It constantly shows loader sometime and gives the required log sometime.
Note:
The process spawning is done to update the browser during the logging, if someone has better solution, plz suggest
Post Call:
http://127.0.0.1:8081/log?title="test"&detail="test detail"
Code:
var express = require("express");
var spawn = require('child_process').spawn;
var fs = require("fs");
var srv = express();
var outputFilename = '/tmp/my.json';
function getParamsObject(context) {
var params = {};
for (var propt_params in context.params) {
params[propt_params] = context.params[propt_params];
//define(params, propt_params, context.params[propt_params]);
}
for (var propt_body in context.body) {
params[propt_body] = context.body[propt_body];
//define(params, propt_body, context.body[propt_body]);
}
for (var propt_query in context.query) {
params[propt_query] = context.query[propt_query];
//define(params, propt_query, context.query[propt_query]);
}
return params;
}
srv.get("/", function(req, res) {
res.send("Hello World From Index\n");
});
srv.get("/Main", function(req, res) {
res.send("Hello World From Main\n");
});
srv.get("/ReadFile", function(req, res) {
fs.readFile("example_one.txt", function(err, data) {
if(err) throw err;
res.send(data.toString());
});
});
srv.get("/ReadFileJSON", function(req, res) {
fs.readFile("example_one.txt", function(err, data) {
if(err) throw err;
res.setHeader("content-type", "application/json");
res.send(new Parser().parse(data.toString()));
});
});
srv.post("/log", function(req, res) {
var input = getParamsObject(req);
if(input.detail) {
var myData = {
Date: (new Date()).toString(),
Title: input.title,
Detail: input.detail
}
fs.writeFile(outputFilename, JSON.stringify(myData, null, 4), function(err) {
if(err) {
console.log(err);
}
});
}
res.setHeader("content-type", "application/json");
res.send({message:"Saved"});
});
srv.get("/log", function(req, res) {
var child = spawn('tail', ['-f', outputFilename]);
child.stdout.pipe(res);
res.on('end', function() {
child.kill();
});
});
srv.listen(8081);
console.log('Server running on port 8081.');
To clarify the question...
You want some requests to write to a log file.
You want to effectively do a log tail over HTTP, and are currently doing that by spawning tail in a child process.
This isn't all that effective.
Problem: It constantly shows loader sometime and gives the required log sometime.
Web browsers buffer data. You're sending the data, sure, but the browser isn't going to display it until a minimum buffer size is reached. And then, there are rules for what will display when all the markup (or just text in this case) hasn't loaded yet. Basically, you can't stream a response to the client and reliably expect the client to do anything with it until it is done streaming. Since you're tailing a log, that puts you in a bad predicament.
What you must do is find a different way to send that data to the client. This is a good candidate for web sockets. You can create a persistent connection between the client and the server and then handle the data immediately rather than worrying about a client buffer. Since you are using Node.js already, I suggest looking into Socket.IO as it provides a quick way to get up and running with web sockets, and long-polling JSON (among others) as a fallback in case web sockets aren't available on the current browser.
Next, there is no need to spawn another process to read a file in the same way tail does. As Trott has pointed out, there is an NPM package for doing exactly what you need: https://github.com/lucagrulla/node-tail Just set up an event handler for the line event, and then fire a line event on the web socket so that your JavaScript client receives it and displays it to the user immediately.
There are a couple of things that seem to stand out as unnecessary complications that may be the source of your problem.
First, the spawn seems unnecessary. It appears you want to open a file for reading and get updated any time something gets added to the file. You can do this in Node with fs.watch(), fs.watchFile(), or the node-tail module. This may be more robust than using spawn() to create a child process.
Second (and less likely to be the source of the problem, I think), you seem to be using query string parameters on a POST request. While not invalid, this is unusual. Usually, if you are using the POST method, you send the data via post, as part of the body of the request. If using the GET method, data is sent as a query string. If you are not using the body to send data, switch to GET.

CasperJS and downloading a file via iFrame and JavaScript

I have a script to test that - on click - generates an iFrame which downloads a file. How can I intercept the response with CasperJS?
I already tried the sequence:
casper.click('element');
casper.withFrame('frame', function(){
console.log(this.getCurrentUrl()); // only return about:blank, but should have URL
console.log("content: " + this.getHTML()); // Yep, empty HMTL
this.on('resource.received', function(resource){
console.log(resource.url); // never executed
});
});
I need the content of the file but can not really produce the URL without clicking the element or changing the script I'm testing.
Ideas?
I tried other events, but none got fired when downloading via the iframe. I found another solution that works - but if you have something better, I'd like to try it.
Here it comes:
// Check downloaded File
.then(function(){
// Fetch URL via internals
var url = this.evaluate(function(){
return $('__saveReportFrame').src; // JavaScript function in the page
});
var file = fs.absolute('plaintext.txt');
this.download(url, file);
var fileString = fs.read(file);
// Whatever test you want to make
test.assert(fileString.match(/STRING/g) !== null, 'Downloaded File is good');
})

Sending additional data with programatically created Dropzone using the sending event

I have the following (simplified for example) angular directive which creates a dropzone
directives.directive('dropzone', ['dropZoneFactory', function(dropZoneFactory){
'use strict';
return {
restrict: 'C',
link : function(scope, element, attrs){
new Dropzone('#'+attrs.id, {url: attrs.url});
var myDropZone = Dropzone.forElement('#'+attrs.id);
myDropZone.on('sending', function(file, xhr, formData){
//this gets triggered
console.log('sending');
formData.userName='bob';
});
}
}
}]);
As you can see the the sending event handler I'm trying to send the username ("bob") along with the uploaded file. However, I can't seem to retrieve it in my route middleware as req.params comes back as an empty array (I've also tried req.body).
My node route
{
path: '/uploads',
httpMethod: 'POST',
middleware: [express.bodyParser({ keepExtensions: true, uploadDir: 'uploads'}),function(request,response){
// comes back as []
console.log(request.params);
//this sees the files fine
console.log(request.files);
response.end("upload complete");
}]
}
Here is what the docs say on the sending event
Called just before each file is sent. Gets the xhr object and the formData objects as second and third parameters, so you can modify them (for example to add a CSRF token) or add additional data.
EDIT
I dropped the programmatic approach for now. I have two forms submitting to the same endpoint, a regular one with just post and a dropzone one. Both work, so I don't think it's an issue with the endpoint rather with how I handle the 'sending' event.
//Receives the POST var just fine
form(action="http://127.0.0.1:3000/uploads", method="post", id="mydz")
input(type="hidden", name="additionaldata", value="1")
input(type="submit")
//With this one I can get the POST var
form(action="http://127.0.0.1:3000/uploads", method="post", id="mydz2", class="dropzone")
input(type="hidden", name="additionaldata", value="1")
OK, I've actually figured it out, thanks to Using Dropzone.js to upload after new user creation, send headers
The sending event:
myDropZone.on('sending', function(file, xhr, formData){
formData.append('userName', 'bob');
});
As opposed to formData.userName = 'bob' which doesn't work for some reason.
I would like to add to NicolasMoise's answer.
As a beginner in webdev I got stuck on how to obtain an instance of Dropzone. I wanted to retrieve an instance of Dropzone that had been generated by the autodiscovery feature. But it turns out that the easiest way to do this is to manually add a Dropzone instance after first telling Dropzone not to auto-discover.
<input id="pathInput"/>
<div id="uploadForm" class="dropzone"/>
<script>
$(document).ready(function(){
Dropzone.autoDiscover = false;
var dZone = new Dropzone("div#uploadForm", {url: "/api/uploads"});
dZone.on("sending", function(file, xhr, data){
data.append("uploadFolder", $("#pathInput")[0].value);
});
});
</script>
Serverside the data will be in request.body.uploadFolder
Nicolas answer is one possible solution to the problem. It is especially useful if you need to alter the file object prior to sending.
An alternative is to use the params option:
var myDropzone = new Dropzone("div#myId",
{ url: "/file/post", params: { 'param_1': 1 }});
cf. the documention
For those that are using thatisuday/ng-dropzone the callback methods are done as such:
<ng-dropzone class="dropzone" options="dzOptions" callbacks="dzCallbacks" methods="dzMethods"></ng-dropzone>
In a controller:
$scope.dzCallbacks = {
sending: function(file, xhr, form) {
console.log('custom sending', arguments);
form.append('a', 'b');
}
};

Strange stream behaviour node.js/knox

I have been working on the answer from this question located here How to make a socket a stream? To connect https response to S3 after imagemagick. As per loganfsmyth recommendation I commented the req.end(image) line however when I attempt to upload a file the server simply times out. I experience similar behaviour when I uncomment the req.end(image) line with the exception that the image successfully uploadsto S3. Can someone clarify for me which way is correct also if it is right to uncomment the req.end(image) line what is the best way to send a response to the browser to prevent it from timing out?
https.get(JSON.parse(queryResponse).data.url,function(res){
graphicsmagick(res)
.resize('50','50')
.stream(function (err, stdout, stderr) {
ws. = fs.createWriteStream(output)
i = []
stdout.on('data',function(data){
i.push(data)
})
stdout.on('close',function(){
var image = Buffer.concat(i)
var req = S3Client.put("new-file-name",{
'Content-Length' : image.length
,'Content-Type' : res.headers['content-type']
})
req.on('response',function(res){ //prepare 'response' callback from S3
if (200 == res.statusCode)
console.log('it worked')
})
//req.end(image) //send the content of the file and an end
})
})
})
Basically the page was being requested twice which caused the image to be overwritten because of the favicon. node.js page refresh calling resources twice?
In the question you link to, the user was using putStream, so calling req.end() is incorrect, however in your case you are using put directly, so you need to call req.end(). Otherwise with it commented out, you never actually use the image value, except for the length, so you never send the image data.
It is hard to tell without seeing the server handler that actually runs this code, but you need to (optionally) return some response, and then .end() the actual connection to the browser too, or it will set there waiting.
So if you have something like this
http.createServer(function(req, browserResponse){
// Other code.
req.on('response',function(s3res){ //prepare 'response' callback from S3
if (200 == s3res.statusCode) console.log('it worked')
// Close the response. You also pass it data to send to the browser.
browserResponse.end();
})
// Other code.
});

Sending text to the browser

I have managed to get file uploading work in Node.js with Express, and in the code i'm checking whether it's an image or not that the user is trying to upload.
If the file was successfully uploaded I want to show a message to the user, directly to the HTML page with the uploading form. The same should be if the file the user tried to upload wasn't an image, or something else happened during the upload.
The code below works (res.send...) but it opens up a new page containing only the message.
My question is: How can I change my code so that the message is sent directly to the HTML page instead? If it could be of any use, i'm using Jade.
Thanks in advance!
app.post('/file-upload', function(req, res, next) {
var fileType = req.files.thumbnail.type;
var divided = fileType.split("/");
var theType = divided[0];
if (theType === "image"){
var tmp_path = req.files.thumbnail.path;
var target_path = './public/images/' + req.files.thumbnail.name;
fs.rename(tmp_path, target_path, function(err) {
if (err) throw err;
fs.unlink(tmp_path, function() {
if (err) {
throw err;
res.send('Something happened while trying to upload, try again!');
}
res.send('File uploaded to: ' + target_path + ' - ' + req.files.thumbnail.size + ' bytes');
});
});
}
else {
res.send('No image!');
}
});
from what I understand you are trying to send a message to an already open browser window?
a few things you can do,
Ajax it, send the post, and process the return info.
Submit it as you are doing now, but set a flash message (look at http://github.com/visionmedia/express-messages) and either res.render the form page, or res.redirect to the form function
now.js or a similar solution. This would let clientside use serverside functions and serverside code to run clientside functions. So what you would do would be on submit, pass the post values to a serverside function, which will process it and trigger a clientside function (display a message)
For my money option #2 is probably the safest bet, as clients without javascript enabled will be able to use it. As for usability #1 or #3 would give a more streamlined appearance to the end user.
You can use WebSockets. I recommend using Socket.IO, it's very easy to work with. On the client-side you would have an event-handler which would use JavaScript to append the new information to that page.
You could then have the server for example say:
socket.emit('error', "Something happened while trying to upload, try again!");
and the client would use:
socket.on('error', function(data){
//alert?
alert(data);
});
http://socket.io/#how-to-use

Resources