http.Clientrequest.abort() ends program - node.js

I'm having some issues using Node.js as a http client against an existing long polling server. I'm using 'http' and 'events' as requires.
I've created a wrapper object that contains the logic for handling the http.clientrequest. Here's a simplified version of the code. It works exactly as expected. When I call EndMe it aborts the request as anticipated.
var http = require('http');
var events = require('events');
function lpTest(urlHost,urlPath){
this.options = {
host: urlHost,
port: 80,
path: urlPath,
method: 'GET'
};
var req = {};
events.EventEmitter.call(this);
}
lpTest.super_ = events.EventEmitter;
lpTest.prototype = Object.create(events.EventEmitter.prototype, {
constructor: {
value: lpTest,
enumerable: false
}
});
lpTest.prototype.getData = function getData(){
this.req = http.request(this.options, function(res){
var httpData = "";
res.on('data', function(chunk){
httpData += chunk;
});
res.on('end', function(){
this.emit('res_complete', httpData);
}
};
}
lpTest.prototype.EndMe = function EndMe(){
this.req.abort();
}
module.exports = lpTest;
Now I want to create a bunch of these objects and use them to long poll a bunch of URL's. So I create an object to contain them all, generate each object individually, initiate it, then store it in my containing object. This works a treat, all of the stored long-polling objects fire events and return the data as expected.
var lpObject = require('./lpTest.js');
var objWatchers = {};
function DoSomething(hostURL, hostPath){
var tempLP = new lpObject(hostURL,hostPath);
tempLP.on('res_complete', function(httpData){
console.log(httpData);
this.getData();
});
objWatchers[hosturl + hostPath] = tempLP;
}
DoSomething('firsturl.com','firstpath');
DoSomething('secondurl.com','secondpath);
objWatchers['firsturl.com' + 'firstpath'].getData();
objWatchers['secondurl.com' + 'secondpath'].getData();
Now here's where it fails... I want to be able to stop a long-polling object while leaving the rest going. So naturally I try adding:
objWatchers['firsturl.com' + 'firstpath'].EndMe();
But this causes the entire node execution to cease and return me to the command line. All of the remaining long-polling objects, that are happily doing what they're supposed to do, suddenly stop.
Any ideas?

Could it have something to do with the fact that you are only calling getData() when the data is being returned?
Fixed code:
function DoSomething(hostURL, hostPath){
var tempLP = new lpObject(hostURL,hostPath);
tempLP.on('res_complete', function(httpData){
console.log(httpData);
});
tempLP.getData();
objWatchers[hosturl + hostPath] = tempLP;
}

I have seemingly solved this, although I'm note entirely happy with how it works:
var timeout = setTimeout(function(){
objWatchers['firsturl.com' + 'firstpath'].EndMe();
}, 100);
By calling the closing function on the object after a delay I seem to be able to preserve the program execution. Not exactly ideal, but I'll take it! If anyone can offer a better method please feel free to let me know :)

Related

Subclassing, extending or wrapping Node.js 'request' module to enhance response

I'm trying to extend request in order to hijack and enhance its response and other 'body' params. In the end, I want to add some convenience methods for my API:
var myRequest = require('./myRequest');
myRequest.get(function(err, hijackedResponse, rows) {
console.log(hijackedResponse.metadata)
console.log(rows)
console.log(rows.first)
});
According to the Node docs on inherits, I thought I could make it work (and using the EventEmitter example in the docs works OK). I tried getting it to work using #Trott's suggestion but realized that for my use case it's probably not going to work:
// myRequest.js
var inherits = require('util').inherits;
var Request = require("request").Request;
function MyRequest(options) {
Request.call(this, options);
}
inherits(MyRequest, Request);
MyRequest.prototype.pet = function() {
console.log('purr')
}
module.exports = MyRequest;
I've been toying with extend as well, hoping that I could find a way to intercept request's onRequestResponse prototype method, but I'm drawing blanks:
var extend = require('extend'),
request = require("request")
function myResponse() {}
extend(myResponse, request)
// maybe some magic happens here?
module.exports = myResponse
Ended up with:
var extend = require('extend'),
Ok = require('objectkit').Ok
function MyResponse(response) {
var rows = Ok(response.body).getIfExists('rows');
extend(response, {
metadata: extend({}, response.body),
rows: rows
});
response.first = (function() {
return rows[0]
})();
response.last = (function() {
return rows[rows.length - 1] || rows[0]
})();
delete response.metadata.rows
return response;
}
module.exports = MyResponse
Keep in mind in this example, I cheated and wrote it all inside the .get() method. In my final wrapper module, I'm actually taking method as a parameter.
UPDATED to answer the edited question:
Here's a rough template for the contents of your myResponse.js. It only implements get(). But as a bare bones, this-is-how-this-sort-of-thing-can-be-done demo, I hope it gets you going.
var request = require('request');
var myRequest = {};
myRequest.get = function (callback) {
// hardcoding url for demo purposes only
// could easily get it as a function argument, config option, whatever...
request.get('http://www.google.com/', function (error, response, body) {
var rows = [];
// only checking error here but you might want to check the response code as well
if (!error) {
// mess with response here to add metadata. For example...
response.metadata = 'I am awesome';
// convert body to rows however you process that. I'm just hardcoding.
// maybe you'll use JSON.parse() or something.
rows = ['a', 'b', 'c'];
// You can add properties to the array if you want.
rows.first = 'I am first! a a a a';
}
// now fire the callback that the user sent you...
callback(error, response, rows);
});
};
module.exports = myRequest;
ORIGINAL answer:
Looking at the source code for the Request constructor, it requires an options object that in turn requires a uri property.
So you need to specify such an object as the second parameter in your call():
Request.call(this, {uri: 'http://localhost/'});
You likely don't want to hard code uri like that inside the constructor. You probably want the code to look something more like this:
function MyRequest(options) {
Request.call(this, options);
}
...
var myRequest = new MyRequest({uri: 'http://localhost/'});
For your code to work, you will also need to move util.inherits() above the declaration for MyRequest.prototype.pat(). It appears that util.inherits() clobbers any existing prototype methods of the first argument.

Passing a return from one function to another function that already has set parameters?

Edit: I know JS is asynchronous, I have looked over the How to Return thread. The issue I'm having is that going from "foo" examples to something specific = I'm not quite sure where to re-format this.
Also here is some context: https://github.com/sharkwheels/beanballs/blob/master/bean-to-osc-two.js
I have a question about returns in node. It might be a dumb question, but here goes. I have a function that connects to a socket, and gets OSC messages from processing:
var sock = dgram.createSocket("udp4", function(msg, rinfo) {
try {
// get at all that info being sent out from Processing.
//console.log(osc.fromBuffer(msg));
var getMsg = osc.fromBuffer(msg);
var isMsg = getMsg.args[0].value;
var isName = getMsg.args[1].value;
var isAdd = getMsg.address;
var isType = getMsg.oscType;
// make an array out of it
var isAll = [];
isAll.push(isName);
isAll.push(isMsg);
isAll.push(isAdd);
isAll.push(isType);
// return the array
console.log(isAll);
return isAll;
} catch (error) {
console.log(error);
}
});
Below I have the start of another function, to write some of that array to a BLE device. It needs name and characteristics from a different function. How do I get the below function to use isAll AND two existing parameters?
var writeToChars = function (name, characteristics) { // this is passing values from the BLE setup function
// i need to get isAll to here.
// eventually this will write some values from isAll into a scratch bank.
}
Thanks.
async call in this case be written something like this. state can be maintained in the variables in closure if required. In this particular case - you can do without any state (isAll) as well.
var isAll;
var soc = dgram.createSocket('udp4', oncreatesocket);
function oncreatesocket(msg, rinfo)
{
isAll = parseMessage(msg);
writeData(isAll);
}
function parseMessage(msg) {
...
// code to parse msg and return isAll
}
function writeData() {}
if the writeData is small enough function. It can be inside oncreatesocket without impacting the readability of the code.
Alright. So I figured out what to do, at least in this scenario. I'm sure there is a better way to do this, but for now, this works.
I'm mapping an existing global array of peripherals into the write function, while passing the OSC message to it as a parameter. This solved my issue of "how do I get two pieces of information to the same place". It figures out which peripheral is which and writes a different value to each scratch bank of each peripheral accordingly. Leaving here for future reference.
var writeToBean = function(passThrough){
var passThrough = passThrough;
console.log("in Write to bean: ", passThrough);
_.map(beanArray, function(n){
if(n.advertisement.localName === passThrough.name){
//var name = n.advertisement.localName;
n.discoverSomeServicesAndCharacteristics(['a495ff20c5b14b44b5121370f02d74de'], [scratchThr], function(error, services, characteristics){
var service = services[0];
var characteristic = characteristics[0];
var toSend = passThrough.msg;
console.log("service", service);
console.log("characteristic", characteristic);
if (toSend != null) {
characteristic.write(new Buffer([toSend]), false, function(error) {
if (error) { console.log(error); }
console.log("wrote " + toSend + " to scratch bank 3");
});
}
// not sure how to make the program resume, it stops here. No error, just stops processing.
});
}
});
}

How to manipulate the result set of more than one function using node js

I have node js files restservice.js and mysql.js
In mysql.js i have two functions as elementlevelpricing and pricingdetail
In restservice.js i have api which has the code as :
var workload = req.body;
var workloadinfo = {
workloadId: workload.workloadId,
ownerId: workload.ownerId,
uniqueName: workload.uniqueName,
name: workload.name
}
if(workload.elements && workload.elements.length > 0)
{
var elementlevelpricingSummary = {};
var elementArray = [];
var elementinfo = {};
var metadataModified = {};
var pricingDetail = {};
async.forEachSeries(workload.elements, createResponse, function (err) {
res.send(workloadinfo);
});
function createResponse(elements,callback) {
var resourceIdentifierArray = [];
elementinfo = elements;
resourceIdentifierArray.push(elements.uri);
var resourceIdentifiers = resourceIdentifierArray.join(',');
// Get element level pricing summary
mysql.elementlevelpricing(resourceIdentifiers, function(result){
// do some stuff here
return callback();
});
};
};
I need to call the function pricingdetail in mysql.js and append the result to global variable workloadinfo (which already should have result set of elementlevelpricing and Can thats what is sent within foreachSeries ). Can anyone suggest me the profession way to accomplish this?
Use asynchronous functions. The whole point of Node.js is to avoid blocking. Blocking in Node.js is worse than blocking in threaded environments, because there aren't any other threads (though there may be other clustered processes). You're blocking the only event loop available. That means that your whole server has to wait, doing absolutely no work until your I/O is done.

Bytes sent/received for Node.js HTTP request

Once an HTTP request has been served, I would like to log the number of bytes sent/received.
A simple source for this data is req.connection.bytesRead/.bytesWritten. However, this is problematic for HTTP 1.1 keep-alive connections, as the same socket can be used for multiple requests. I need to log per-request, not per-connection.
The solution must lie on the HTTP side of things, but I see no methods documented for getting the data I need.
What is the proper way to calculate bytes read/written for HTTP requests served by Node.js's http.Server?
Unfortunately, I never found a proper way to do this. I've resorted some fairly terrible duck punching, but it works for my particular use case. In case anyone else stumbles along with this problem, you can start with this and refine from there.
Module #1: "Extra Events"
All this module does is make the response object emit a finishBeforeSocketDestroy event. Since I needed this event in a few places in my application, I effectively made a separate module just for this duck punch. app.use() it before Module #2.
module.exports = function (req, res, next) {
var end = res.end;
res.end = function () {
res.end = end;
res.emit('finishBeforeSocketDestroy');
res.end.apply(this, arguments);
}
next();
}
Module #2: "Stats"
This module creates a req.stats object, containing all sorts of useful goodies for tracking bandwidth usage during usage of the connection, and after it is finished.
var pollTime = 1000;
module.exports = function (req, res, next) {
var pollInterval;
function pollStats () {
if (typeof req.stats._lastMeasuredTime === 'object') {
var secondsSinceLastMeasurement = ((new Date() - req.stats._lastMeasuredTime) / 1000);
req.stats.averageRate = {
read: (req.socket.bytesRead - req.stats.bytesRead) / secondsSinceLastMeasurement,
write: (req.socket.bytesWritten - req.stats.bytesWritten) / secondsSinceLastMeasurement
};
}
req.stats._lastMeasuredTime = new Date();
req.stats.bytesRead = req.socket.bytesRead;
req.stats.bytesWritten = req.socket.bytesWritten;
}
req.stats = {
startTime: new Date(),
endTime: null,
averageRate: {read: null, write: null},
bytesRead: req.socket.bytesRead,
bytesWritten: req.socket.bytesWritten,
_lastMeasuredTime: new Date()
};
pollInterval = setInterval(pollStats, pollTime);
res.on('finishBeforeSocketDestroy', function () {
clearInterval(pollInterval);
pollStats();
req.stats.endTime = new Date();
});
next();
}
Like I said... messy. I'm only posting it as duck punching may be your only option. Also beware that socket may get re-used for multiple HTTP requests, which could cause you to double-count some bytes if you're not careful.
Just store traffic value after each response and calculate difference in 'finish' or 'end' handler:
// server.onRequest:
...
req._prevBytesWritten = 0;
// response.onFinish/onEnd:
...
responseLen = req.socket.bytesWritten - req._prevBytesWritten;
req._prevBytesWritten = req.socket.bytesWritten;

Node.js sending an object with function definitions to worker thread

So I am working on a project in Node.js and I want to open up some extra threads to handle the processing load more efficiently. But I am using classes with function definitions with them and when I try to send those objects to the worker thread, the functions defined in the object disappear and I am only left with the other fields in the object. Is there a way to send the worker an object and preserve the functions so they can be called within the worker?
var cluster = require('cluster');
if(cluster.isMaster){
Monster = function(species){
this.attack = function(){
console.log('CHOMP');
};
this.name = species;
};
var vamp = new Monster('vampire'),
worker = cluster.fork();
worker.send({'monster' : vamp});
}
else{
process.on('message', function(msg) {
console.log(msg.monster); //this logs "{ name: 'vampire' }"
msg.monster.attack(); //TypeError: Object #<Object> has no method 'attack'
});
}
No, there is no way to pass functions between threads. You can pass only JS plain objects (data only) and handle it with functions defined in current thread (like create new object with received data).
Charlie, I realize you asked this question a year ago, but I was wanting to do something very similar and I came across your question which you didn't mark an answer to yet. I thought I would take a "stab" at it and show you what I have done with your code. This different way of organizing code is for me a very acceptable workaround in my node.js work. I am pretty sure this gives you a way to accomplish what you want, even though you can't do it in the manner you wanted.
Declare your "class" outside the cluster code, like this:
var cluster = require('cluster');
var Monster = function(species){
this.attack = function(){
console.log('CHOMP!');
};
this.die = function() {
console.log("Oh, what did I eat? I don't feel so good....\r\n");
process.exit(0);
};
this.scare = function() {
console.log("BOO! I am a " + this.name + "!");
};
this.name = species;
};
if(cluster.isMaster){
worker = cluster.fork();
worker.send({'species' : 'Vampire'});
}
else{
process.on('message', function(msg) {
if(typeof msg.species !== "undefined") {
myMonster = new Monster(msg.species);
myMonster.scare();
myMonster.attack();
myMonster.die();
}
});
}
Give that a whirl and see if this is an answer you can accept!
Ok, stumbled upon this answer, and I found it strange that no one brought this up, but it might be a more modern feature than the question:
eval
let str = "() => { console.log('test') }"
let func = eval(str)
func()
Think it's obvious what's going on here, you can parse any string to javascript, and you can send strings to workers, so you can build and object with functions:
let obj = { a: "() => { ... }" }
and send the object over. (JSON.stringify(obj) first, and than you will have to parse the object first, and than all the substrings seperately)

Resources