I'm writing an app in express.js / node.js
I have a form with an input type "file" for upload images.
I want my images directly uploaded, without user confirmation. And displayed on the user screen after cropped and stored into a directory.
To do that, I use the socketio-file-upload library.
It works great on a computer. I can upload my images and display them.
The problem is with Ipad (I haven't try Iphone yet)
On the web app, when I click on the "browse" button, I can choose an existing picture or take a picture. I try to take a picture and upload it but nothing hapens...
Here is the code of the server for upload :
io.of('/register').on('connection', function (socket) {
/***************FILE UPLOAD***********************/
// Make an instance of socketioFileUploadServer and listen on this socket:
var uploader = new socketioFileUploadServer();
uploader.dir = __dirname + '/client/tmp/markers';
uploader.listen(socket);
// Do something when a file is saved:
uploader.on('saved', function(event){
console.log('Original saved');
// resize and rename image with a unique id
var newName = Math.random().toString(36).substr(2, 9);
// marker 32x32
easyimg.rescrop({src:path.resolve(__dirname, 'client/tmp/markers/' + event.file.name), dst:path.resolve(__dirname, 'client/tmp/markers/' + newName + '_marker'), width:32, height:32}, function(err, image) {
if (err) throw err;
console.log('Resized and cropped: ' + image.width + ' x ' + image.height);
// marker is uploaded - resized - croped, now display it
socket.emit('displayMarker', {markerPath : '/tmp/markers/' + newName + '_marker', markerName : newName});
// remove original from file system
fs.unlink(path.resolve(__dirname, 'client/tmp/markers/' + event.file.name), function(){
if (err) throw err;
console.log('Original removed');
});
});
});
uploader.on('start', function(event){
console.log('Client start upload');
socket.emit('displayOverlay');
});
// Error handler:
uploader.on('error', function(event){
console.log("Error from uploader", event);
});
});
And client side :
document.addEventListener("DOMContentLoaded", function(){
// REGISTRATION MARKER
var socketMarker = io.connect('/register');
var siofuMarker = new SocketIOFileUpload(socketMarker);
siofuMarker.listenOnInput(document.getElementById("uploadMarkerFromExploratorInput"));
// Do something when a file is uploaded:
siofuMarker.addEventListener("complete", function(event){
$('.overlay').hide();
});
// display loader window
socketMarker.on('displayOverlay', displayOverlay);
// server send we can display the marker in the register step1 view
socketMarker.on('displayMarker', function(data) {
$('#markerImage').html('');
$('#markerImage').html('<img src="' + data.markerPath + '" />');
$('#markerImageName').val(data.markerName);
});
});
With a computer, console will display :
Client start upload
Original saved
Resized and cropped: 32 x 32
Original removed
And with Ipad :
Client start upload
Is someone have a idea or another way to have the same result ?
Apreciate your help. C.
As I suspected, you need to add the proper attributes to the HTML markup for the File Input element:
<input type="file" accept="image/*" capture="camera">
That should do the trick. The most important part to understand is the "accepts" attribute.
Related
I have an Electron app which uploads a dropped file to a predefined server with node-ftp. The upload works like a charm, but despite reading a couple of suggestions I cannot figure out how to get information on the actual progress for a progress-bar.
My upload-code so far:
var ftp = new Client();
let uploadfile = fs.createReadStream(f.path);
let newname = uuid(); //some function I use for renaming
ftp.on('ready', function () {
ftp.put(uploadfile, newname, function (err) {
if (err) throw err;
ftp.end();
});
});
c.connect({user: 'test', password: 'test'});
I always stumble across monitoring the 'data' event, but could not find out how or where to access it (as you can see I'm quite new to JavaScript).
Got it. I found the answer in streams with percentage complete
With my code changed to
var ftp = new Client();
let uploadfile = fs.createReadStream(f.path);
let newname = uuid(); //some function I use for renaming
ftp.on('ready', function() {
uploadfile.on('data', function(buffer) {
var segmentLength = buffer.length;
uploadedSize += segmentLength;
console.log("Progress:\t" + ((uploadedSize/f.size*100).toFixed(2) + "%"));
});
ftp.put(uploadfile, newname, function(err) {
if (err) throw err;
ftp.end();
});
});
c.connect({user: 'test', password: 'test'});
I get the percentage uploaded in console. From here it's only a small step to a graphical output.
on client side you can create a byte count for your upload stream (http://www.experts.exchange.com/questions/24041115/upload-file-on-ftp-with-progressbar-and-time-left.html)
set lower limit of the progressbar to 0
set upper limit to file length of upload file
feed the progress bar with the byte count
(http://www.stackoverflow.com/questions/24608048/how-do-i-count-bytecount-in-read-method-of-inputstream)
maybe you can use npm like stream-meter (https://www.npmjs.com/package/stream-meter) or progress-stream (https://www.npmjs.com/package/progress-stream) and pipe your file stream through to feed the progressbar. i am not sure about that because i do not know the internals of the npms. in progress-stream is a function transferred() that would fit exactly
a very accurate way is to have code on the server that gives feedback to the browser (http://www.stackoverflow.com/questions/8480240/progress-bar-for-iframe-uploads)
I'm trying to make a communication between nodejs server and a red pitaya card.
The client connect to the web server, use a form to choose some settings , they are send to a red pitaya who send back a big string of numbers.
Then, the web server write them in a downloadable file.
Its working randomly.
here is the server :
var settings = require('./settings_model');
var fs = require('fs');
var net = require('net');
var msg = [];
module.exports = function(app) {
net.createServer(function (socket) {
socket.on('data', function (data) {
/* test console */
var msg = JSON.stringify(data);
console.log("data received :" + msg);
tab = new Array();
for(i = 0; i < nbPix; i++){
tab[i] = new Array();
fs.appendFile('./public/img.txt', '\n');
for(var j=0 ; j < len; j++){
tab[i][j]= data[i+(j*nbPix)];
if(!isNaN(tab[i][j])){
fs.appendFile('./public/img.txt', tab[i][j]+ " ");
};
};
};
});
app.post('/api/sendSettings', function(req, res) {
// creation of the file
fs.writeFile("./public/img.txt", "", function(err){
if(err){console.log(err)};
console.log("the file was saved");
});
// here we send settings to the red pitaya
socket.write(input);
res.end();
});
}).listen(9000);
};
For tiny values, sometimes it works. I can see the log :
data received :{"type":"Buffer","data":[1,1,....2]}
But, for the same values, it can be divided and bug my 2d array
data receive :{"type":"Buffer","data":[1,1,....1]}
data receive :{"type":"Buffer","data":[2,2,....2]}
And if the data received is too big, system crash with an :
Error : EMFILE: too many open files, open './public/img.txt'
Is there a way to set the received stream size ? i think i need all the data to build my 2d array.
For the EMFILE error, ive tried to change settings like "open file" size. Still not working. It should not try to open more than one file anyway ??
i'm new to this kind of project so please if u can give me hints or docs links, ill gladly accept!
After running the following code, the file handle is still opened on Windows, how to close it?
var Name="1.mp4";
var inp = fs.createReadStream("Temp/" + Name);
var out = fs.createWriteStream("Video/" + Name);
inp.pipe(out);
inp.on("end", function() {
fs.unlink("Temp/" + Name, function (){
out.close();
console.log("unlink this file:",Name );
});
});
screen shot: file handle not colsed on Windows
You need to close inp too not just out. You can do this by calling inp.close(); at the same point you already call out.close();.
Also you could simply move the file with
fs.rename("Temp/" + Name, "Video/" + Name, function() {
console.log("Renamed:", Name)
});
rather than rewriting the file.
I am building a time-lapse camera web application using Raspberry Pi and the Raspberry Pi Camera Module. So far I have built a web application (using NodeJS, Express, AngularJS, and BootStrap 3) that can interact with the Raspberry Camera Module using an open source NodeJS module (https://www.npmjs.org/package/raspicam).
I have a global variable called "setting" that will always change whenever the user changes the camera settings:
var setting = {
mode: "timelapse",
output: "public/images/image%d.jpg", // image1, image2, image3, etc...
encoding: "jpg",
timelapse: 3000, // take a picture every 3 seconds
timeout: 12000 // take a total of 4 pictures over 12 seconds
}
I have three functions in Express that can:
set Camera settings
exports.setCamera = function(req, res) {
setting = {
mode: req.body.mode,
output: req.body.output,
encoding: req.body.encoding,
timelapse: req.body.timelapse,
timeout: req.body.timeout
}
res.json(setting, 200);
console.log('SET CAMERA - ' + JSON.stringify(setting));
}
start the Camera
exports.startCamera = function(req, res) {
camera = new RaspiCam(setting);
camera.on("start", function( err, timestamp ){
console.log("timelapse started at " + timestamp);
});
camera.on("read", function( err, timestamp, filename ){
console.log("timelapse image captured with filename: " + filename);
});
camera.on("exit", function( timestamp ){
console.log("timelapse child process has exited");
res.json(setting, 200);
});
camera.on("stop", function( err, timestamp ){
console.log("timelapse child process has been stopped at " + timestamp);
});
camera.start();
setTimeout(function(){
camera.stop();
}, setting.timeout + 1000);
console.log('START CAMERA - ' + JSON.stringify(setting));
}
stop the Camera
exports.stopCamera = function(req, res) {
camera.stop();
res.json(setting, 200);
console.log('STOP CAMERA - ' + JSON.stringify(setting));
}
As you can see in the "startCamera" function, I am creating a new RaspiCam object called "camera" that passes in the global variable "setting" (which can always change). When the camera object is created, I am also creating "start", "read", "exist", and "stop" functions for it. The problem is that since I am not setting the camera object as a global variable, when the user decides to click Stop halfway during the session, the "stopCamera" function gets called but it does not know what camera.stop() is and says it is undefined. Is there a way I can allow the "stopCamera" function to know what camera.stop() is (which was created in the "startCamera" function)?
Sorry if this is confusing, I don't know how else to describe my problem.. :(
I think you have a problem with how this is architected, but the simple solution to your question is check to see if the camera object has been initialized.
exports.stopCamera = function(req, res) {
if(camera && typeof(camera.stop) == "function") {
camera.stop();
console.log('STOP CAMERA - ' + JSON.stringify(setting));
}
res.json(setting, 200);
}
Is there such a way to capture desktop with node.js not a browser tab?
I have searched a lot but I didn't find any.
What I want is to use node.js to build desktop application.
You can use
http://nodejs.org/api/child_process.html#child_process_child_process_exec_command_options_callback
and
https://en.wikipedia.org/wiki/Scrot
to make screenshot of screen of current user running nodejs application.
Something like this (it is complete expressJS example):
var express = require('express'),
childProcess = require('child_process'),
app = express();
app.get('/screenshot.png', function(request,response){
childProcess.exec('scrot screenshot.png', function(err){
if(err1) {
response.send(503,'Error creating image!');
} else {
response.sendfile('screenshot.png')
}
});
});
app.listen(3000);
But this is quite slow approach.
Why don't you just call an external program?
For example, you could call import:
$ import -window root screenshot.png
The code:
var exec = require('child_process').exec;
exec('import -window root screenshot.png', function (error, stdout, stderr){
// now you have the screenshot
});
Full Resolution
var screenshot = require('desktop-screenshot');
screenshot("screenshot.png", function(error, complete) {
if(error)
console.log("Screenshot failed", error);
else
console.log("Screenshot succeeded");
});
Resize to 400px wide, maintain aspect ratio
var screenshot = require('desktop-screenshot');
screenshot("screenshot.png", {width: 400}, function(error, complete) {
if(error)
console.log("Screenshot failed", error);
else
console.log("Screenshot succeeded");
});
Resize to 400x300, set JPG quality to 60%
var screenshot = require('desktop-screenshot');
screenshot("screenshot.jpg", {width: 400, height: 300, quality: 60}, function(error, complete) {
if(error)
console.log("Screenshot failed", error);
else
console.log("Screenshot succeeded");
});
Perhaps you should rephrase your question as follow:
How to capture desktop screen of computer host where it's running on http client's request and send back the image as response
If that's what you actually mean, you can write your own add-on module using native compiler. For reference, see this post).
For the newcomers ,
There is a npm package called desktop-screenshot for exactly the same purpose.
Here it is
You can find an addon where the screenshot is taken and saved to any user defined path.User can also specify a time interval for the screenshot capture. Here