I am new to node, so please bear with me. I'm trying to write a function that tests to make sure there is a live connection to the web server before redirecting the page.
This works for the first 6 - 7 clicks, then the page will not redirect anymore - it just sits there. Then a few minutes later, an alert will show.
What is going on?!
var http = require("http");
var url = 'http://example.com/';
mainMenu.click(function () {
var menulink = $(this).attr('rel');
var menuvar = http.get(url, function () {
window.location = menulink;
}).on('error', function () {
alert('Cannot Connect to Server');
});
});
I suspect you have a problem with non-flowing streams.
Since you never consume the data of your responses, the connection never closes. The HTTP Agent limits the number of concurrent connections and refuses to open any new connection after a while.
You should try to manually switch the response to flowing mode:
mainMenu.click(function () {
var menulink = $(this).attr('rel');
var menuvar = http.get(url, function (res) {
// FORCE FLOWING MODE
res.resume();
window.location = menulink;
}).on('error', function () {
alert('Cannot Connect to Server');
});
});
Related
I'm piping to a file an HTTPS request, it works ok 99.9% of calls, but occasionally (maybe when server or network are not available) hangs indefinitely...
This obviously cause my application to stop working and requiring a manual restart...
I have other https connections that used to occasionally hang that always complete now using the following error code on the request object, as suggested on node documentation:
request.on('socket', function(socket) {
socket.setTimeout(10000);
socket.on('timeout', function() { request.abort(); });
});
request.on('error', function(e) {
// Handle the error...
console.error("FAILED!");
});
... but it seems that timeouts on the request are ignored if the destination is piped to a file stream, maybe I should handle an error with a timeout on the filesystem object, but the documentation is not clear if there is an event I have to wait for except for 'finish'...
Here is the sample code, I hope someone can help me:
var https = require('https'),
fs = require('fs');
var opts = {
host: 'www.google.com',
path: '/',
method: 'GET',
port: 443
};
var file = fs.createWriteStream('test.html');
var request = https.request(opts, function(response) {
response.pipe(file);
file.on('finish', function() {
file.close(function(){
console.log("OK!");
});
});
});
request.on('socket', function(socket) {
socket.setTimeout(10000);
socket.on('timeout', function() { request.abort(); });
});
request.on('error', function(e) {
console.error("FAILED!");
});
request.end();
If you wanna try the hang, change host and path with a huge file and disconnect the network cable during the transfer, it should time out after 10 seconds, but it doesn't...
I set up a demo node.js http server that sends a very slow answer and a client similar to your sample code.
When I start the client and then stop the server while sending the response then I also don't get a timeout event on the socket but I get a end event on the response within the client:
var request = https.request(opts, function(response) {
response.pipe(file);
file.on('finish', function() {
file.close(function(){
console.log("OK!");
});
});
response.on('end', function() {
// this is printed when I stop the server
console.log("response ended");
});
});
```
Maybe you could listen to that event?
I'm beginning work on my first Node.js application and running into memory leak issues that i cannot pin down. I want the app to act as service in a way that continually runs and polls and endpoint. I think I may be missing something here. The idea of the project is to have a node application continuously make http requests to an Arduino board i have connected to my network with a web server on it. The networked board responds to the requests with some JSON or XML representing the state of the sensors attached to it. The idea of the node app is to log then emit sensor changes that will eventually be consumed by another electron project.
The node app is currently broken into a couple modules:
proxy: make the http calls to the different endpoints on arduino web server:
var http = require('http'),
KeepAliveAgent = require('keep-alive-agent');
var controllerRequestOpts = {};
function send(path, cb){
var response = '';
//build the request
var request = controllerRequestOpts;
request.path = path;
//make the call to the controller
http.get(request, function(res){
res.on('data', function(chunk){
response += chunk;
});
res.on('end', function(){
cb(null, response);
});
})
.on('error',function(e){
cb(e, null);
});
}
module.exports = function(controllerOptions){
controllerOptions.port = controllerOptions.port || 2222;
controllerRequestOpts = controllerOptions;
controllerRequestOpts.agent = new KeepAliveAgent();
return{
//JSON
queryJson: function(cb){
send('/json', cb);
},
//XML
queryXml: function(cb){
send('/xml', cb);
}
//Additional endpoints
}
}
runner: Loop forever with the interval provided making the proxy calls ot the arduino
var proxy = require('proxy');
var Watcher = require('./watcher');
var timer;
var toUpdate;
function initProxy(controllerParams){
proxy = proxy(controllerParams);
Watcher = new Watcher();
}
function startListening(startOptions){
var query;
//determine the query and callback functions based off configuration
query = startOptions.queryType === 'json'
? proxy.queryJson
: proxy.queryXml;
toUpdate = startOptions.queryType === 'json'
? Watcher.updateLatestJson
: Watcher.updateLatestXml;
//Start running and making requests every 15 seconds
timer = setInterval(function(){
query(handleResponse);
},startOptions.queryInterval);
}
function handleResponse(err, resp){
if(err){
console.log('ERROR: ' + err);
}
else{
toUpdate.call(Watcher, resp);
}
}
function stopListening(){
clearInterval(timer);
process.exit();
}
var runner = {
connect: function(controllerParams){
initProxy(controllerParams);
},
start: function(startOptions){
startListening(startOptions);
return Watcher;
},
stop: function(){
stopListening();
}
};
module.exports = runner;
I have a "Watcher" module which is just a constructor function that emits the changes back to the calling app which looks like:
var runner = require('./index');
var controllerSettings = {
hostname: '192.168.1.150',
port:2222
}
var startOptions = {
queryType: 'json',
queryInterval: 15000
}
runner.connect(controllerSettings);
var watcher = runner.start(startOptions);
watcher.on('P1Changed', printParams)
Everything is working as expected but as the app runs over time the memory usage for node process constantly increases. I'm wondering if i'm using either the http module incorrectly or if the runner shouldn't be doing a setInterval perhaps. Is there a standard way to run this kind of app as a 'service' and not so much as a 'server'
Continuously send multiple HTTP requests will cause node to create huge TLSWrap objects that the GC will not be able to clear for several minutes.
If you wish to send data to the same host(s) over and over again, you need to open a TCP connection (stream) rather than use HTTP requests that have huge overhead.
I have a web app built upon Express. The nodejs backend is using a java server to perform some heavy operations. The dialogue between Express and the java server is done using socketio. The nodejs server is the client and uses socket.io-client to send queries to the java server. The javaserver is based upon netty-socketio.
Here is what I am doing in my nodejs app:
var io = require('socket.io-client')
var socket = io.connect('http://localhost:8080');
socket.on('connect', function () {
console.log('0 Connected!');
socket.emit('myEvent', ['0' ,'here is the query'], function (data) {
console.log('\tSending query ... waiting for ACK0');
console.log(data);
});
socket.on('serverResponse', function (data) {
console.log('\tserverResponse event triggered, data:');
console.log(data);
});
});
When calling this script outside my web app everything is working like a charm, but when I call this code from express my client fails to connect (I don't reach the '0 Connected!' line). There are no error messages.
The weird part is that if I am first running my web app, throwing a query, and then start my java server, the client connects to the java server and everything is working (for that query only). Any clues on how to fix that ?
EDIT 1
Here is a schema of what I am trying to achieve:
client javascript backend java server
via browser <---> node/Express/socketio-client <---> netty-socketio
#client's machine | #my server | #my server (the same)
| |
myDNS:80 localhost:8080
More precisions on the java server. Here is the squeleton:
public class App {
public static void main(String[] args) throws InterruptedException, UnsupportedEncodingException {
Configuration config = new Configuration();
config.setHostname("localhost");
config.setPort(8080);
final SocketIOServer server = new SocketIOServer(config);
server.addEventListener("myEvent", String[].class, new DataListener<String[]>() {
#Override
public void onData(final SocketIOClient client, String[] data, final AckRequest ackRequest) {
//Id of the client
String id = data[0];
//Acknowledge the request:
ackRequest.sendAckData("ACK_"+id);
//doing some calculations ...
// ... ... ...
// ... ... ...
client.sendEvent("serverResponse", new VoidAckCallback(){
#Override
protected void onSuccess() {}
}, "I am the answer from the server");
}
});
server.start();
System.out.println("[JAVA SERVER INFO] Java server started.");
Thread.sleep(60000*3);//Integer.MAX_VALUE);
server.stop();
System.out.println("[JAVA SERVER INFO] Java server stopped.");
}
}
My web app nodejs backend and my java server are running on the same machine, the communication with socket.io is done via localhost:8080. Once again, the weird thing is that the client's script is working when used outside the express framework, this let me think it might be a compatibility problem between socket.io-client and Express.
EDIT 2
I modified my socket.io-client code to see with more details what is happening, I added:
socket.on('connect_error', function(err){
console.log(err);
});
socket.on('connect_timeout', function(){
console.log("connect_timeout");
});
socket.on('reconnect_attempt', function(){
console.log("reconnect_attempt");
});
When I run the client with the java server switched off, I get a 'connect_error' event. When the java server is on I get no message at all. It seems the connection is neither failing nor successful, nothing happen ... Any idea on how to debug this better ?
EDIT 3
Here is the code I am using to handle a request from the browser:
index.js:
var express = require('express');
var router = express.Router();
var controller = require('../controllers/myController.js');
/* GET home page. */
router.get('/', function(req, res, next) {
res.render('index', { title: 'Express' });
});
module.exports = router;
router.post('/api/getProcessedData', function(req, res, next){
var text = req.body.text;
controller.get_processed_data(text, res);
});
myController.js:
var socket = require('socket.io-client')('http://localhost:8080');
module.exports.get_processed_data = function(text, res) {
var timestamp = new Date().getTime();
console.log('starting client');
socket.on('connect', function () {
console.log("client connected.");
socket.emit('myEvent', [timestamp ,text], function (data) {
console.log('\tSending query ... waiting for ACK');
console.log(data);
});
socket.on('serverResponse', function (data) {
console.log('\tserverResponse' event trigged, data:');
res.send(data);
});
});
socket.on('connect_error', function(err){
console.log(err);
});
socket.on('connect_timeout', function(){
console.log("connect_timeout");
});
socket.on('reconnect_attempt', function(){
console.log("reconnect_attempt");
});
socket.on('reconnecting', function(){
console.log("reconnecting");
});
}
The structure of your controller is a bit messed up. Here are some things that are wrong:
You connect to the Java server when the module is loaded, but you don't assign a connect event handler until the route gets hit. This means you will normally miss the connect event except when the server isn't yet running. So, this entirely explains what you observe. If the java server is already up when you start your Express server, you miss the connect event so you never execute any of the logic in your get_processed_data() function.
You install a new connect handler every time the route is hit which means you will get multiple event handlers assigned, though because of the first issue, none of them will likely get hit.
If you want the socket.io connection to be continually connected, this would be one way to rewrite the controller:
var socket = require('socket.io-client')('http://localhost:8080');
socket.on('connect', function () {
console.log("client connected.");
});
socket.on('connect_error', function(err){
console.log(err);
});
socket.on('connect_timeout', function(){
console.log("connect_timeout");
});
socket.on('reconnect_attempt', function(){
console.log("reconnect_attempt");
});
socket.on('reconnecting', function(){
console.log("reconnecting");
});
var transactionCntr = 0;
module.exports.get_processed_data = function(text, res) {
var timestamp = new Date().getTime();
var transactionId = transactionCntr++;
console.log('sending data to client');
function onResponse(data) {
// for concurrency reasons, make sure this is the right
// response. The server must return the same
// transactionId that it was sent
if (data.transactionId === transactionId) {
console.log('\tserverResponse' event trigged, data:');
res.send(data);
socket.off('serverResponse', onResponse);
}
}
socket.on('serverResponse', onResponse);
// send data and transactionId
socket.emit('myEvent', [timestamp ,text, transactionId], function (data) {
console.log('\tSending query ... waiting for ACK');
console.log(data);
});
}
Your current structure has an issue in that it does not appear to have a way to determine which response goes with which request and can have concurrency issues. It would be simpler to just use a separate http request each time because then the response would be uniquely paired with the appropriate request.
With your socket.io connection, you could use some sort of ID in your request/response so you can tell which response belongs to which request. I've shown how that would work in the express server. From your Java server, you would have to echo the transactionId back in the response to the Express server so it can track which response goes with which request.
As your code was, if multiple requests for the '/api/getProcessedData' route are in play at the same time, the responses from the different requests could easily get mixed up. This is an architectural problem of the way you're doing things.
I'm no Java expert, but it looks to me like this line:
Thread.sleep(60000*3);
will sleep your thread for 180,000 milliseconds (3 minutes) and then right after that your code calls server.stop(). So, your Java server shuts itself down after 3 minutes.
So, thus you could only connect to your Java server within the first 3 minutes after you started it.
The logical question here is why are you stopping your server at all?
I'm trying to find a way to do what is shown in the diagram below with Node.js. Is it even possible?
Basically, I need to put an http request to wait for a signal from another http request in order to finish.
According to this you can provide a callback to an http request which you could use to finish the request to your server.
My two cents; try the following code, although it's not properly structured it should do the trick. You can call /first in different tabs multiple times and refresh /second to see them responding one by one.
var http = require('http'),
Promise = require('promise');
var queue = [];
function handleRequest(request, response) {
switch (request.url) {
case "/first":
new Promise(function (resolve) {
var guid = 'some-guid';
// enqueue in redis or whatever you like and save the guid to match later.
queue.push({
data: guid,
resolve: resolve
});
}).then(function () {
response.end('1st finished');
});
break;
case "/second":
// queue consumer calls this (probably with the guid parameter)
queue.length && queue.some(function (item) {
if (item.data === 'some-guid') {
item.resolve();
}
});
response.end();
break;
default:
response.end('call /first, then /second');
break;
}
}
var server = http.createServer(handleRequest);
server.listen(8080, function () {
console.log("Server listening on: http://localhost:%s", 8080);
});
When I open two browser windows in incognito mode (different sessions).
I get a stream for one, which works fine...new tweets come in as expected from the twitter stream api. The problem is that when the other window loads the page, the first incognito window gets disconnected.
Here is the code I'm using. I'm not sure if twitter might not allow simultaneous streams or if its something with my logic.
var mod = module.exports = {}
, twitter = require('twitter')
, c = console;
var io
, sock
, intv = {};
function onNewsInit(data){
setTimeout(function(){
stream(streamQuery);
}, 1000 * 5);
}
function stream(query){
c.log('stream query', query);
twit.stream('statuses/filter', {track:query}, function(stream) {
stream.on('data', function (data) {
c.log(data);
var item = getItem(query, data);
c.log('stream item', item);
if ( item ) {
sock.emit('twitter:item', { item: item });
}
});
stream.on('end', function (response) {
// Handle a disconnection
c.log('stream end');
});
stream.on('destroy', function (response) {
// Handle a 'silent' disconnection from Twitter, no end/error event fired
c.log('stream destroy');
});
// Disconnect stream after five seconds
sock.on('news:end', stream.destroy);
sock.on('disconnect', stream.destroy);
});
}
mod.register = function register(soc, sio) {
c.log('news register');
io = sio;
sock = soc;
sock.on('news:init', onNewsInit);
sock.on('news:end', onNewsEnd);
};
app.js
io.sockets.on('connection', function(sock){
news.register(sock, io);
});
Is there any reason one client would be able to disconnect the other?
It turns out the twitter stream API only allows one simultaneous connection.
As described here:
https://dev.twitter.com/docs/streaming-apis/streams/public
Connections
Each account may create only one standing connection to the public
endpoints, and connecting to a public stream more than once with the
same account credentials will cause the oldest connection to be
disconnected.
Clients which make excessive connection attempts (both successful and
unsuccessful) run the risk of having their IP automatically banned.