Sending contents of var within http.request area through socket - node.js

I have a program written with node that connects to Flash. In Flash when a key is pressed the keypress is sent through a socket to node, which then echoes the keypress back to Flash with some extra text. The Flash part works fine. The node portion basically works, except for one part where I connect to a web page. What I want to do is get the contents of a web page and echo that back to Flash though a socket. Here's my code:
var net = require('net');
var http = require('http'),
sys = require('sys'),
fs = require('fs');
var options = {
host: 'theurl.com',
port: 80,
path: '/',
method: 'POST'
};
var msg;
var mySocket;
var server = net.createServer(function(socket) {
mySocket = socket;
mySocket.on("connect", onConnect);
mySocket.on("data", onData);
});
function onConnect() {
console.log("Connected to Flash");
}
function onData(d) {
if(d == "exit\0") {
console.log("exit");
mySocket.end();
server.close();
} else if(d == "32\0") {
var req = http.request(options, function(res) {
res.setEncoding('utf8');
res.on('data', function (chunk) {
msg = chunk;
console.log("in function: msg = " + msg);
mySocket.write(msg, 'utf8');
});
});
console.log("right after res: msg = " + msg);
mySocket.write("right after res: "+msg, 'utf8');
// write data to request body
req.write('data\n');
req.write('data\n');
req.end();
mySocket.write("32 part: " + d, 'utf8');
} else {
console.log("From Flash = " + d );
mySocket.write(d, 'utf8');
}
}
server.listen(8080, "192.168.0.1");
When I press spacebar (no 32) the data seems to be written but doesn't show up on the Flash side until I press 2 keys. Note that the part outside of the "http.request" area DOES show up on the Flash side correctly. So the mysocket.write within the res.on area doesn't show up until 2 keypresses, but the mysocket.write that says "right after res" shows up just like I want it to. SO I figure that I need one of these things to happen:
1) The chunk returned from the http.request area be visible outside of that area
2) the mysocket.write within the http.request area to work the same way as the mysocket.write outside of the http.request area
3) store the chunk to the hard drive and then reread the contents stored later and send that
Those are the options I've thought of. I think #3 makes no sense since I seem to have the var in the program, but just can't access it where I want to. Not sure about why the socket doesn't write the same within that area as it does outside of that area, but if there were a way to do #1 that seems to me to make the most sense. I am open to suggestions though. Thanks for your time.
Darryl

Related

how to use nodejs async module?

This is a different question and I am unable to get a solution for this, please do not mark it duplicate.
I cannot access variable op outside the function. Should I be using a async module of nodjes?
I have two console.logs. But only inside function log works.
I have tried other questions answers. Still it is not working
var http = require('http');
console.log("hi")
var options = {
host: 'api.usergrid.com',
path: '/siddharth1/sandbox/restaurants'
};
var op = []; //declaring outside function
var req = http.get(options, function(res) {
// Buffer the body entirely for processing as a whole.
var bodyChunks = [];
res.on('data', function(chunk) {
// You can process streamed parts here...
bodyChunks.push(chunk);
}).on('end', function() {
var body = Buffer.concat(bodyChunks);
// ...and/or process the entire body here.
var body2 = JSON.parse(body);
op = body2.entities.map(function(item) {
return item.name;
});
console.log(op); // only this works
})
});
req.on('error', function(e) {
console.log('ERROR: ' + e.message);
});
console.log("outside function " + op); //this doesnt work
console.log('Server listening on port 80');
Node.js instantiates the variable op as an empty array:
var op = []; //declaring outside function
It then calls the .get() function of the http module, and passes it options and a callback function.
var req = http.get(options, function(res) {
...
});
The code inside the callback function is not executed until an HTTP GET request is received by your application.
Node then continues, and executes the remainder of your code:
console.log("outside function " + op); //this doesnt work
The above line is executed and indeed, op is an empty array as you defined it to be an empty array - and nothing has yet modified 'op'.
The server then idles, waiting for any incoming HTTP requests.
Much later, you of course issue a HTTP GET request to your server. The callback function you registered gets called, and the code inside that function executes.
If I were you I would look into some fundamental tutorials on Node.js, specifically looking into it's non-blocking model. Good luck.
Note: Ryan Dahl's original node.js presentation is a rather long video and a little old, but perfectly explains the way Node.js works, and I highly recommend you give it a watch.

what should I use instead of readableStream.push('')

I am trying to implement the ._read function of a readable stream, a problem happens when ._read is called and there isn't data, the documentation says that I can push('') until more data comes, and I should only return false when the stream will never have more data.
https://nodejs.org/api/stream.html#stream_readable_read_size_1
But it also says that if I need to do that then something is terribly wrong with my design.
https://nodejs.org/api/stream.html#stream_stream_push
But I can't find an alternative to that.
code:
var http = require('http');
var https = require('https');
var Readable = require('stream').Readable;
var router = require('express').Router();
var buffer = [];
router.post('/', function(clientRequest, clientResponse) {
var delayedMSStream = new Readable;
delayedMSStream._read = function() {
var a=buffer.shift();
if(typeof a === 'undefined'){
this.push('');
return true;
}
else {
this.push(a);
if(a===null) {
return false;
}
return true;
}
};
//I need to get a url from example.com
https.request({hostname:'example.com'}, function(exampleResponse){
data='';
exampleResponse.on('data',function(chunk){data+=chunk});
exampleResponse.on('end',function(){
var MSRequestOptions = {hostname: data, method: 'POST'};
var MSRequest = https.request(MSRequestOptions, function(MSResponse){
MSResponse.on('end', function () {
console.log("MSResponse.on(end)");//>>>
});//end MSResponse.on(end)
}); //end MSRequest
delayedMSStream.pipe(MSRequest);
});
});
clientRequest.on('data', function (chunk) {
buffer.push(chunk);
});
clientRequest.on('end', function () {//when done streaming audio
buffer.push(null);
});
});//end router.post('/')
explanation:
client sends a POST request streaming audio to my server, my server requests a url from example.com, when example.com responds with the url, my server streams the audio to it.
What's a smarter way to do it?
So if I undertstand the code correctly, you:
receive a request,
make your own request to a remote endpoint and fetch a URL
make a new request to that URL and pipe that to original response.
There are ways to do this other then yours, and even your way would look cleaner to me if you just improve the naming a bit. Also, splitting the huge request into a few functions with smaller responsibility scopes might help.
I would make the endpoint this way:
let http = require('http');
let https = require('https');
let Readable = require('stream').Readable;
let router = require('express').Router();
let buffer = [];
/**
* Gets some data from a remote host. Calls back when done.
* We cannot pipe this directly into your stream chain as we need the complete data to get the end result.
*/
function getHostname(cb) {
https.request({
hostname: 'example.com'
}, function(response) {
let data = '';
response.on('error', err => cb(err)); // shortened for brewity
response.on('data', function(chunk) {
data = data + chunk;
});
response.on('end', function() {
// we're done here.
cb(null, data.toString());
});
});
}
router.post('/', function(request, response) {
// first let's get that url.
getHostname(function(err, hostname) {
if (err) { return response.status(500).end(); }
// now make that other request which we can stream.
https.request({
hostname: hostname,
method: 'POST'
}, function(dataStream) {
dataStream.pipe(response);
});
});
});
Now, as said in the comments, with streams2, you don't have to manage your streams. With node versions pre 0.10 you have had to listen to 'read', 'data' etc events, with newer node versions, it's handled. Furthermore, you don't even need it here, streams are smart enough to handle backpressure on their own.

nodejs/express - stream stdout instantly to the client

I spawned the following child: var spw = spawn('ping', ['-n','10', '127.0.0.1']) and I would like to receive the ping results on the client side (browser) one by one, not as a whole.
So far I tried this:
app.get('/path', function(req, res) {
...
spw.stdout.on('data', function (data) {
var str = data.toString();
res.write(str + "\n");
});
...
}
and that:
...
spw.stdout.pipe(res);
...
In both cases browser waits 10 of the pings to complete, and then prints the result as a whole. I would like to have them one by one, how to accomplish that?
(Client is just making a call to .../path and console.logs the result)
EDIT: Although I do believe that websockets are necessary to implement this, I just want to know whether there are any other ways. I saw several confusing SO answers, and blog posts (in this post, at step one OP streams the logs to the browser) which didn't help, therefore I decided to go for a bounty for some attention.
Here's a complete example using SSE (Server sent events). This works in Firefox and probably Chrome too:
var cp = require("child_process"),
express = require("express"),
app = express();
app.configure(function(){
app.use(express.static(__dirname));
});
app.get('/msg', function(req, res){
res.writeHead(200, { "Content-Type": "text/event-stream",
"Cache-control": "no-cache" });
var spw = cp.spawn('ping', ['-c', '100', '127.0.0.1']),
str = "";
spw.stdout.on('data', function (data) {
str += data.toString();
// just so we can see the server is doing something
console.log("data");
// Flush out line by line.
var lines = str.split("\n");
for(var i in lines) {
if(i == lines.length - 1) {
str = lines[i];
} else{
// Note: The double-newline is *required*
res.write('data: ' + lines[i] + "\n\n");
}
}
});
spw.on('close', function (code) {
res.end(str);
});
spw.stderr.on('data', function (data) {
res.end('stderr: ' + data);
});
});
app.listen(4000);
And the client HTML:
<!DOCTYPE Html>
<html>
<body>
<ul id="eventlist"> </ul>
<script>
var eventList = document.getElementById("eventlist");
var evtSource = new EventSource("http://localhost:4000/msg");
var newElement = document.createElement("li");
newElement.innerHTML = "Messages:";
eventList.appendChild(newElement);
evtSource.onmessage = function(e) {
console.log("received event");
console.log(e);
var newElement = document.createElement("li");
newElement.innerHTML = "message: " + e.data;
eventList.appendChild(newElement);
};
evtSource.onerror = function(e) {
console.log("EventSource failed.");
};
console.log(evtSource);
</script>
</body>
</html>
Run node index.js and point your browser at http://localhost:4000/client.html.
Note that I had to use the "-c" option rather than "-n" since I'm running OS X.
If you are using Google Chrome, changing the content-type to "text/event-stream" does what your looking for.
res.writeHead(200, { "Content-Type": "text/event-stream" });
See my gist for complete example: https://gist.github.com/sfarthin/9139500
This cannot be achieved with the standard HTTP request/response cycle. Basically what you are trying to do is make a "push" or "realtime" server. This can only be achieved with xhr-polling or websockets.
Code Example 1:
app.get('/path', function(req, res) {
...
spw.stdout.on('data', function (data) {
var str = data.toString();
res.write(str + "\n");
});
...
}
This code never sends an end signal and therefore will never respond. If you were to add a call to res.end() within that event handler, you will only get the first ping – which is the expected behavior because you are ending the response stream after the first chunk of data from stdout.
Code Sample 2:
spw.stdout.pipe(res);
Here stdout is flushing the packets to the browser, but the browser will not render the data chunks until all packets are received. Thus the reason why it waits 10 seconds and then renders the entirety of stdout. The major benefit to this method is not buffering the response in memory before sending — keeping your memory footprint lightweight.

Can't get one node.js server to talk to another

I have two node servers, one on port 5000 (call it "Face") and another on port 5001 (call it "Hands")
Both are started via a foreman procfile at the same time. Ports are fixed and the url I'm targeting works in the browser.
When the Hands starts up, it needs to talk to the Face (Facepalm?) and register itself. However, the below code doesn't seem to be working. (This is coffeescript generated JS)
Register gets called during server initialization, after the http server has been started. In case it was a timing issue, I kick off the register function with a setTimeout() of 2 seconds. I know the page that its hitting (/home/register) is available and working.
Right now I can see it get to the "Posting to" console log line. On the Face I have put a console.log in the register code and its never logging anything - meaning I don't think its actually getting hit. (It DOES log if hit from browser) And nothing errors out - it just calls the request and then wanders off to get a sandwich.
Both servers are "roll your own" - not using any frameworks. Let me know if you see a weird typo or need more info. Thanks!
register = function() {
var _this = this;
console.log('Registering with Face Server');
return post_face('/home/register', GLOBAL.data, function(rs) {
console.log(rs);
if (rs.registered) {
GLOBAL.data.registered = true;
return console.log("Registered with face at " + GLOBAL.config.face_host + ":" + GLOBAL.config.face_port);
} else {
throw "ERROR: Could not register with face server! " + GLOBAL.config.face_host + ":" + GLOBAL.config.face_port;
return false;
}
});
};
post_face = function(path, data, cb) {
var post_data, post_options, post_req;
post_data = querystring.stringify({
'registration': data
});
post_options = {
host: GLOBAL.config.face_host,
port: GLOBAL.config.face_port,
path: path,
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': post_data.length
}
};
console.log("Posting to " + post_options.host + ":" + post_options.port);
post_req = http.request(post_options, function(res) {
var response,
_this = this;
console.log(res);
response = "";
res.setEncoding('utf8');
res.on('data', function(chunk) {
return response += chunk;
});
return res.on('end', function() {
return cb.call(_this, response);
});
});
return true;
};
Thanks to Bill above, the answer was that I wasn't actually posting the data and ending the request! Bad copy / paste / edit from some samples I was referring to. Here's the last two lines of code I should have had:
post_req.write(post_data);
post_req.end();

Node.js http request memory and cpu hog

I have a node.js script that continuously requests a page, sort of like a cron job.
However, after a few minutes Node starts to use a lot of CPU (up to 70%) and memory (up to 200mb).
What is wrong with my script?
function cron(path)
{
var http = require('http');
var site = http.createClient(443, 'www.website.com', true);
var request = site.request('GET', path, {'host': 'www.website.com'});
request.end();
request.on('response', function (response) {
setTimeout(function(){cron(path)},15000);
});
}
cron('/path/to/page');
request.on('response', function (response) {
setTimeout(function(){cron(path)},15000);
});
For every response you create a new cron job. Log your responses. If your getting more then 1 from your request then your exponantially creating more cron jobs.
Your creating a function() {} with a reference to path. So the entire scope state is kept. you want to free memory by adding this:
var site = null;
var request = null;
Your calling require("http") inside a function rather then outside in module scope. You only need to get http once so place at the top of your file in module scope.
var http = require('http');
var site = http.createClient(443, 'www.website.com', true);
function cron(path)
{
var request = site.request('GET', path, {'host': 'www.website.com'});
request.end();
var once = true;
request.on('response', doIt);
function doIt(response) {
if (!once) {
once = null;
doIt = function() {};
setTimeout(function(){cron(path)},15000);
}
});
site = null;
request = null;
}
cron('/path/to/page');
In addition to the tips from #Raynos, here's another. I find that recursive calls like this in long running processes make me a bit nervous so I'd err on the side of using setInterval instead. I'd maybe split the cron and the http behaviour apart in case you want to try and re-use that logic, although that'll depend on your context:
e.g. in node 0.4.7:
var https = require('https');
function poll(path)
{
https.get({
host: 'www.website.com',
port: 443,
path: path
}, function(res) {
console.log("Got response: " + res.statusCode);
}).on('error', function(e) {
console.log("Got error: " + e.message);
});
}
function cron(path)
{
return setInterval(function(){
poll(path);
},15000);
}
var intervalId = cron('/path/to/page'); // keep in case you need to use clearInterval

Resources