This is my server:
[...]
io.sockets.on('connection', function(socket) {
// watching the xml file
fs.watch('../json/notificheJSON.json', function(curr, prev) {
// on file change we can read the new xml
fs.readFile('../json/notificheJSON.json', function(err, data) {
if (err) throw err;
// parsing the new xml data and converting them into json file
var json = JSON.parse(data);
// send the new data to the client
socket.volatile.emit('notification', json);
});
});
});
[...]
This is my client:
[...]
// creating a new websocket
var socket = io.connect('http://localhost:8080');
// on every message recived we print the new datas inside the #container div
socket.on('notification', function (n) {
// convert the json string into a valid javascript object
//var _data = JSON.parse(data);
for(var i = 0; i < n.notifiche.length; ++i) {
if(n.notifiche[i].id_ricevente == <?php echo $id_utente; ?>){
var numero_not = n.notifiche[i].numero_notifiche ;
}
}
$('#notify, #la_mobile').html(numero_not);
});
[...]
Actually, I run my server using the command line:
nodejs server.js
I would like to run my server on remote hosting. How can I do it?
In some form or fashion you need something that will execute node and your application. If you are using remote host that how that works depends on the host, checkout their documentation to see what they say about running node applications.
There are a couple of options for executing node. You can do it using nom scripts. Another option is using an init script, but since you on a remote host I doubt that is the route you want to take unless you are on a VPS.
Related
I'm new to node.js, I was creating a script to verify Serial and Product with the Gumroad Api package.
From a script I load two variables from an html file and send them to the node server
The node receives them and they are displayed, but I cannot get them to be read by the gumroad package function.
This is the script in send variables page ( and works )
$.post('http://localhost:3000/node_server_test/', {
serial:'xxxxxx-xxxxxx-xxxxxx-xxxxxx',
product:'product_id'
});
The node.js retrive the variables well.
I report only the part of the code concerned.
app.post('/node_server_test', (req, res) => {
var a = req.body.product;
var b = req.body.serial;
console.log(a);
console.log(b);
// var a = "product_id";
// var b = "xxxxxx-xxxxx-xxxxx-xxxxxx"
gumroad.verifyLicense(a,b)
.then(function(license) {
console.log('Licence Valid');
}, function() {
console.log('Invalid Licence');
});
});
in this mode the data is read and reported correctly in the console, but they are not read by the gumroad.verifyLicense(a,b) function, the serial / product pair is always invalid.
If instead I remove the comments from the variable declarations for the function
var a = "product_id";
var b = "xxxxxx-xxxxx-xxxxx-xxxxxx"
the function returns the correct result.
It's possible to do it? where am i wrong?
Thanks
i have a a problem with Node JS, and rethinkdb module.
I'm currently developing program for my thesis
this pieces code of db.js:
var r = require("rethinkdb");
var host = "localhost";
var db = "example";
var port = 28015;
class dbs{
connectToDb(callback){
r.connect({
host:host,
port:port,
db:db
},function(err,connection){
return callback(err,connection);
});
}
streamAllData(tableName,callback){
this.connectToDb(function(err,conn){
r.table(tableName).changes().run(conn,function(err,cursor){
if(err){
return callback(true,err);
}
else{
cursor.next(function(err,rows){
return callback(null,rows);
});
}
});
});
}
}
and this pieces of code from server.js
var dbrs = require("./db");
var rdb = new dbrs();
var io = require("socket.io").listen(https.createServer(options,app).listen(port));
io.on("connection",function(socket){
rdb.streamAllData("userlocation",function(err,data){
socket.emit("broadcast:userlocation",data);
});
});
that result is always sending 7 same data . actually mobile phone sending cordinates to server is clean with configured interval.
this unwanted looping is always crashed my browser when im trying to draw driver location to maps.
that is a screenshot from chrome console
You method name streamAllData does not match your usage of cursor.next, which only fetches a single result. Perhaps you meant to use cursor.each instead?
See https://www.rethinkdb.com/api/javascript/next/
I have an Electron app which uploads a dropped file to a predefined server with node-ftp. The upload works like a charm, but despite reading a couple of suggestions I cannot figure out how to get information on the actual progress for a progress-bar.
My upload-code so far:
var ftp = new Client();
let uploadfile = fs.createReadStream(f.path);
let newname = uuid(); //some function I use for renaming
ftp.on('ready', function () {
ftp.put(uploadfile, newname, function (err) {
if (err) throw err;
ftp.end();
});
});
c.connect({user: 'test', password: 'test'});
I always stumble across monitoring the 'data' event, but could not find out how or where to access it (as you can see I'm quite new to JavaScript).
Got it. I found the answer in streams with percentage complete
With my code changed to
var ftp = new Client();
let uploadfile = fs.createReadStream(f.path);
let newname = uuid(); //some function I use for renaming
ftp.on('ready', function() {
uploadfile.on('data', function(buffer) {
var segmentLength = buffer.length;
uploadedSize += segmentLength;
console.log("Progress:\t" + ((uploadedSize/f.size*100).toFixed(2) + "%"));
});
ftp.put(uploadfile, newname, function(err) {
if (err) throw err;
ftp.end();
});
});
c.connect({user: 'test', password: 'test'});
I get the percentage uploaded in console. From here it's only a small step to a graphical output.
on client side you can create a byte count for your upload stream (http://www.experts.exchange.com/questions/24041115/upload-file-on-ftp-with-progressbar-and-time-left.html)
set lower limit of the progressbar to 0
set upper limit to file length of upload file
feed the progress bar with the byte count
(http://www.stackoverflow.com/questions/24608048/how-do-i-count-bytecount-in-read-method-of-inputstream)
maybe you can use npm like stream-meter (https://www.npmjs.com/package/stream-meter) or progress-stream (https://www.npmjs.com/package/progress-stream) and pipe your file stream through to feed the progressbar. i am not sure about that because i do not know the internals of the npms. in progress-stream is a function transferred() that would fit exactly
a very accurate way is to have code on the server that gives feedback to the browser (http://www.stackoverflow.com/questions/8480240/progress-bar-for-iframe-uploads)
I am wondering if it makes sense to use Node's Buffer for incoming client data to a server. My server and clients are TCP based and I am using <EOF> to determine the end of a message. The message is always stringified JSON.
eg: {"name":"Bob"}<EOF>
In case the entire message does not come through, should I be using Node Buffer to build up the data, or a regular string?
If it is a buffer, I don't think I understand how to correctly build one up. This is my Client constructor that is created each time a socket connects to the Node server.
constructor(socket){
var self = this;
// properties
this.socket = socket;
this.buffer = Buffer.alloc(1024);
this.dataEnd = '<EOF>';
// append <EOF> to every msg
this.socket.write = function(msg){
msg += "<EOF>";
return Socket.prototype.write.call(this, msg);
};
// build up buffer
this.socket.on('data', function(data){
var buffLen = self.buffer.length;
var dataBuffer = Buffer.from(data);
if(buffLen + dataBuffer.length < 1024){
if(data.indexOf(self.dataEnd) === -1){
self.buffer.concat(dataBuffer);
}
}
});
return this;
}
I'm trying to make a communication between nodejs server and a red pitaya card.
The client connect to the web server, use a form to choose some settings , they are send to a red pitaya who send back a big string of numbers.
Then, the web server write them in a downloadable file.
Its working randomly.
here is the server :
var settings = require('./settings_model');
var fs = require('fs');
var net = require('net');
var msg = [];
module.exports = function(app) {
net.createServer(function (socket) {
socket.on('data', function (data) {
/* test console */
var msg = JSON.stringify(data);
console.log("data received :" + msg);
tab = new Array();
for(i = 0; i < nbPix; i++){
tab[i] = new Array();
fs.appendFile('./public/img.txt', '\n');
for(var j=0 ; j < len; j++){
tab[i][j]= data[i+(j*nbPix)];
if(!isNaN(tab[i][j])){
fs.appendFile('./public/img.txt', tab[i][j]+ " ");
};
};
};
});
app.post('/api/sendSettings', function(req, res) {
// creation of the file
fs.writeFile("./public/img.txt", "", function(err){
if(err){console.log(err)};
console.log("the file was saved");
});
// here we send settings to the red pitaya
socket.write(input);
res.end();
});
}).listen(9000);
};
For tiny values, sometimes it works. I can see the log :
data received :{"type":"Buffer","data":[1,1,....2]}
But, for the same values, it can be divided and bug my 2d array
data receive :{"type":"Buffer","data":[1,1,....1]}
data receive :{"type":"Buffer","data":[2,2,....2]}
And if the data received is too big, system crash with an :
Error : EMFILE: too many open files, open './public/img.txt'
Is there a way to set the received stream size ? i think i need all the data to build my 2d array.
For the EMFILE error, ive tried to change settings like "open file" size. Still not working. It should not try to open more than one file anyway ??
i'm new to this kind of project so please if u can give me hints or docs links, ill gladly accept!