I'm trying to upload a file using nodejs,socket.io and delivery.js. The file upload is working fine for me, but I need to send some parameters with the file.But I don't see how I can do that! Here's my code so far.
Client.html
var socket = io.connect();
var delivery = new Delivery(socket);
function sendFile(){
delivery.on('delivery.connect',function(delivery){
var file = document.getElementById("file").files[0];
var extraParams = {foo: 'bar'};
delivery.send(file, extraParams); //trying to send the params with file
return false;
});
delivery.on('send.success',function(fileUID){
alert("file was successfully sent.");
});
}
and my server.js
io.sockets.on('connection', function(socket){
var delivery = dl.listen(socket);
delivery.on('receive.success',function(file){
// delivery.on('receive.success',function(file,params){<--tried this also
var params = file.params;
console.log(params);
fs.writeFile(file.name,file.buffer, function(err){
if(err){
console.log('File could not be saved.');
}else{
console.log('File saved.');
};
});
});
});
But my console.log(params) prints undefined. I would be really grateful if anyone could point me to the right direction.
Update: Solved the issue by cloning from github deliver.js,the npmjs code seems to be outdated.
Related
I am new to Ionic. I am trying to upload a file to server and basically what I need is to push a json object to be sent with the FileTransfer.upload and be able to recober this object on the server side using an express middleware from the request:
req.value = ${value_sent_by_ionic_client_upload};
I am current setting the object as a params entry and I can see the object in the FileUploadOptions but the object is not accessible on the server side as a value of request.
Current client side:
var options = new FileUploadOptions();
options.filename = fileURL.substr(fileURL.lastIndexOf('/') + 1);
var params = {};
params.user = StorageService.getUser();
options.params = params;
var ft = new FileTransfer();
ft.upload(fileURL,
encodeURI("http://192.168.192.62:3000/api/meals/picture"),
pictureUploaded,
uploadError,
options);
On server side express middleware:
var user = req.user;
but user is undefined on server side.
How to I pass the user using cordova FileTransfer.upload to make it available from a req.user call?
this is a bit late but maybe it can help someone looking for a code to upload
by the way i don't think cloudinary provides any option to send users
here you're using an upload made with jquery while you're using angularjs for ionic which is not really optimal i might suggest a code like this
$scope.uploadimage = function()
{
var options = new FileUploadOptions()
options.fileKey = "image";
$cordovaFileTransfer.upload('Link-to-your-server', $scope.yourpicture, options).then(function(result) {
console.log("File upload complete");
console.log(result);
$scope.uploadResults = "Upload completed successfully"
}, function(err) {
console.log("File upload error");
console.log(err);
$scope.uploadResults = "Upload failed"
}, function (progress) {
// constant progress updates
console.log(progress);
});
}
I'm trying to POST a raw body with restify. I have the receive side correct, when using POSTman I can send a raw zip file, and the file is correctly created on the server's file system. However, I'm struggling to write my test in mocha. Here is the code I have, any help would be greatly appreciated.
I've tried this approach.
const should = require('should');
const restify = require('restify');
const fs = require('fs');
const port = 8080;
const url = 'http://localhost:' + port;
const client = restify.createJsonClient({
url: url,
version: '~1.0'
});
const testPath = 'test/assets/test.zip';
fs.existsSync(testPath).should.equal(true);
const readStream = fs.createReadStream(testPath);
client.post('/v1/deploy', readStream, function(err, req, res, data) {
if (err) {
throw new Error(err);
}
should(res).not.null();
should(res.statusCode).not.null();
should(res.statusCode).not.undefined();
res.statusCode.should.equal(200);
should(data).not.null();
should(data.endpoint).not.undefined();
data.endpoint.should.equal('http://endpointyouhit:8080');
done();
});
Yet the file size on the file system is always 0. I'm not using my readStream correctly, but I'm not sure how to correct it. Any help would be greatly appreciated.
Note that I want to stream the file, not load it in memory on transmit and receive, the file can potentially be too large for an in memory operation.
Thanks,
Todd
One thing is that you would need to specify a content-type of multi-part/form-data. However, it looks like restify doesn't support that content type, so you're probably out of luck using the restify client to post a file.
To answer my own question, it doesn't appear to be possible to do this with the restify client. I also tried the request module, which claims to have this capability. However, when using their streaming examples, I always had a file size of 0 on the server. Below is a functional mocha integration test.
const testPath = 'test/assets/test.zip';
fs.existsSync(testPath).should.equal(true);
const readStream = fs.createReadStream(testPath);
var options = {
host: 'localhost'
, port: port
, path: '/v1/deploy/testvalue'
, method: 'PUT'
};
var req = http.request(options, function (res) {
//this feels a bit backwards, but these are evaluated AFTER the read stream has closed
var buffer = '';
//pipe body to a buffer
res.on('data', function(data){
buffer+= data;
});
res.on('end', function () {
should(res).not.null();
should(res.statusCode).not.null();
should(res.statusCode).not.undefined();
res.statusCode.should.equal(200);
const json = JSON.parse(buffer);
should(json).not.null();
should(json.endpoint).not.undefined();
json.endpoint.should.equal('http://endpointyouhit:8080');
done();
});
});
req.on('error', function (err) {
if (err) {
throw new Error(err);
}
});
//pipe the readstream into the request
readStream.pipe(req);
/**
* Close the request on the close of the read stream
*/
readStream.on('close', function () {
req.end();
console.log('I finished.');
});
//note that if we end up with larger files, we may want to support the continue, much as S3 does
//https://nodejs.org/api/http.html#http_event_continue
My client sends an image file to the server. It works 5 times and then it suddenly stops. I am pretty new using streams and pipe so I am not sure what I am doing wrong.
Server Code
http.createServer(function(req, res) {
console.log("File received");
// This opens up the writeable stream to `output`
var name = "./test"+i+".jpg";
var writeStream = fs.createWriteStream(name);
// This pipes the POST data to the file
req.pipe(writeStream);
req.on('end', function () {
console.log("File saved");
i++;
});
// This is here incase any errors occur
writeStream.on('error', function (err) {
console.log(err);
});
}).listen(3000);
Client code
var request = require('request');
var fs = require('fs');
setInterval(function () {
var readStream = fs.createReadStream('./test.jpg');
readStream.on('open', function () {
// This just pipes the read stream to the response object (which goes to the client)
readStream.pipe(request.post('http://192.168.1.100:3000/test'));
console.log("Send file to server");
});
}, 1000);
Behaves like a resource exhaustion issue. Not sure which calls throw errors and which just return. Does the server connect on the 6th call? Does the write stream open? Does the pipe open?
Try ending the connection and closing the pipe after the image is saved. Maybe close the write stream too, don't remember if node garbage collects file descriptors.
I had to do the following on the server side to make this work :
res.statusCode = 200;
res.end();
I'm using node js, express and postgresql as backend.
This is the approach I used to make a rest API:
exports.schema = function (inputs, res) {
var query = knex('schema')
.orderBy('sch_title', 'asc')
.select();
query.exec(function (err, schemas) {
if(err){
var response = {
message: 'Something went wrong when trying to fetch schemas',
thrownErr: err
};
console.error(response);
res.send(500, response);
}
if(schemas.length === 0){
var message = 'No schemas was found';
console.error(message);
res.send(400, message);
return;
}
res.send(200, schemas);
});
};
It works but after a while postgres logs an error and it's no longer working:
sorry, too man clients already
Do I need a close each request somehow? Could not find any about this in the express docs. What can be wrong?
This error only occurs on production server. Not on developing machine.
Update
The app only brakes in one 'module'. The rest of the app works fine. So it's only some queries that gives the error.
Just keep one connection open for your whole app. The docs shows an example how to do this.
This code goes in your app.js...
var Knex = require('knex');
Knex.knex = Knex.initialize({
client: 'pg',
connection: {
// your connection config
}
});
And when you want to query in your controllers/middlewares...
var knex = require('knex').knex;
exports.schema = function (req, res) {
var query = knex('schema')
.orderBy('sch_title', 'asc')
.select();
// more code...
};
If you place Knex.initialize inside an app.use or app.VERB, it gets called repeatedly for each request thus you'll end up connecting to PG multiple times.
For most cases, you don't need to do an open+query+close for every HTTP request.
I'm trying out node.js and socket.io. I wan't to use to remove a ping function I have to get updates from my server. Here is an example code of what I'm doing:
var app = require('http').createServer(),
io = require('socket.io').listen(app),
cp = require('child_process');
app.listen(8080);
//I check a global value for all the connected users from the php command line
var t = setInterval(function(){
cp.exec('/usr/bin/php /Users/crear/Projects/MandaFree/symfony api:getRemainingMessages',
function(err, stdout){
if (err) {
io.sockets.emit('error', 'An error ocurred while running child process.');
} else {
io.sockets.emit('change', stdout);
}
console.log('Remaining messages: ' + stdout);
});
}, 3000);
var remaining = io.of('/getRemainingMessages')
.on('connection', function(socket){
socket.on('disconnect', function(){});
});
The Issue here, is that when I call io.sockets.emit() the debug console tells me it is doing something, but it looks like it is not getting to the clients. Because they are doing nothing.
I use to have one interval for every connected client, and when I used socket.emit() it did worked. But it is not the optimal solution.
UPDATE:
Here is my client side code.
var remaining = io.connect('http://127.0.0.1:8080/getRemainingMessages');
remaining.on('change', function(data){
console.log('Remaining messages: ' + data );
$('#count').html(data);
});
remaining.on('error', function(error){
console.log(error);
});
Had a very similar issue couple of days back and looks like socket.io had some changes in the API. I have never worked with symfony and am hoping the issues are the same.
I have a working demo of socket.io sending and receiving a message - uploaded to https://github.com/parj/node-websocket-demo as a reference
Essentially two changes
On Server side - changed socket.on to socket.sockets.on
var socket = io.listen(server);
socket.sockets.on('connection', function(client)
On Client side - URL and port not required as it is autodetected.
var socket = io.connect();
This has been tested using Express 2.5.2 and Socket.io 0.8.7
I have amalgamated your server code with mine, would you be able to try this on the server and my client javascript and client html just to see if it is working?
var socket = io.listen(server);
socket.sockets.on('connection', function(client){
var connected = true;
client.on('message', function(m){
sys.log('Message received: '+m);
});
client.on('disconnect', function(){
connected = false;
});
var t = setInterval(function(){
if (!connected) {
return;
}
cp.exec('/usr/bin/php /Users/crear/Projects/MandaFree/symfony api:getRemainingMessages',
function(err, stdout){
if (err) {
client.send('error : An error ocurred while running child process.');
} else {
client.send('change : ' + stdout);
}
console.log('Remaining messages: ' + stdout);
});
}, 3000);
t();
});