I have a server
var connect = require('connect');
var serveStatic = require('serve-static');
var HTMLServer = function(path){
this.path = path;
this.server = connect().use(serveStatic(this.path));
this.startServer = function(callback){
this.server = this.server.listen(8080, callback);
};
this.stopServer = function(callback){
this.server.close(callback);
}
}
And I use it as follows:
var thisServer = new HTMLServer(__dirname);
thisServer.startServer(function(){
console.log('Server running on 8080...');
setTimeout(function(){
thisServer.stopServer(function(){
console.log('Server closed');
});
}, 3000);
});
As expected, server starts and after 3000 milliseconds it stops.
But, if within these 3000 milliseconds I make a request to this server, the stopServer is called, however the server is not closed.
I'm sure this line this.server.close(callback); gets executed, but doesn't close the server as I expect.
How can I fix that?
Is a request to the server changing the server instance in a way that needs a special handling?
Later edit:
I would like to add some precision now that I left the code running. It seems the server does get closed, however not instantly, but after an amount of time that I don't understand, no longer than 5 minutes.
So the close operation seems to be delayed. Can I make it instant somehow?
While #jfriend00 was correct that node.js keeps running until all exiting sockets are finished, the process.exit solution was a bit too radical for my use case and I needed a cleaner solution to close the server gracefully.
Looking into getConnections only added more confusion since it didn't function as expected. (for example it returned 2 connections even if I didn't make any request).
I also looked into server.listening but it returned false even if the server accepted more requests. Perhaps accepts connection from a client that made requests before closing the server.
Anyway, the solution for me was to use the http-shutdown lib which essentially adds the following .shutdown method to your server object.
function addShutdown(server) {
var connections = {};
var isShuttingDown = false;
var connectionCounter = 0;
function destroy(socket, force) {
if (force || (socket._isIdle && isShuttingDown)) {
socket.destroy();
delete connections[socket._connectionId];
}
};
function onConnection(socket) {
var id = connectionCounter++;
socket._isIdle = true;
socket._connectionId = id;
connections[id] = socket;
socket.on('close', function() {
delete connections[id];
});
};
server.on('request', function(req, res) {
req.socket._isIdle = false;
res.on('finish', function() {
req.socket._isIdle = true;
destroy(req.socket);
});
});
server.on('connection', onConnection);
server.on('secureConnection', onConnection);
function shutdown(force, cb) {
isShuttingDown = true;
server.close(function(err) {
if (cb) {
process.nextTick(function() { cb(err) });
}
});
Object.keys(connections).forEach(function(key) {
destroy(connections[key], force);
});
};
server.shutdown = function(cb) {
shutdown(false, cb);
};
server.forceShutdown = function(cb) {
shutdown(true, cb);
};
return server;
};
With this function, I can update my server as follows, and now stopServer works as expected:
var HTMLServer = function(path){
this.path = path;
this.server = connect().use(serveStatic(this.path));
this.startServer = function(callback){
this.server = addShutdown(this.server.listen(8080, callback));
};
this.stopServer = function(callback){
console.log("I was called");
this.server.shutdown(callback);
}
}
I am attempting to hook up a small game with WebSockets. I am using socket.io, socket.io-p2p, and socket.io-p2p-server. I want users to be automatically paired up against any connected player who doesn't have a partner. I want users to only be connected in pairs.
So far simply following the docs I can only get clients to connect using just socket.io. When I attempt to use socket.io-p2p and socket.io-p2p-server I can sometimes get users to connect and other times I get error messages on the screen like
"Missing error handler on socket.
TypeError: Cannot read property 'emit' of undefined"
Someone opened an issue for this problem on the repo and didn't get a response and never got a response
https://github.com/tomcartwrightuk/socket.io-p2p-server/issues/5
I don't know if socket.io-p2p-server is broken or if I am just missing something. Further more socket.io-p2p-server has not been touched much since march.
So my main questions are:
Is socket.io-p2p-server still alive?
Is there a better implementation I can use for these abstractions?
Would writing my own logic instead of using socket.io-p2p-server be worth it?
client side code
import P2P from 'socket.io-p2p'
import io from 'socket.io-client'
const socket = io()
const p2pSocket = new P2P(socket, null, function () {
console.log("my id is: " + p2pSocket.peerId)
})
p2pSocket.on('peer-msg', function (data) {
console.log(data)
})
server side code
var http = require('http')
var httpServer = http.createServer(requestHandler)
var fs = require('fs')
var io = require('socket.io')(httpServer)
var p2pServerModule = require('socket.io-p2p-server')
var p2p = p2pServerModule.Server
var connectedUsers = p2pServerModule.clients
io.use(p2p)
httpServer.listen(8000, 'localhost');
function serveUpFile(req, res, path) {
fs.readFile(path.toString(), function (err, data) {
if (err) {
res.writeHead(500);
return res.end('Error loading index.html');
}
res.writeHead(200)
res.end(data)
})
}
function requestHandler (req, res) {
if (req.url === '/static/bundle.js') {
serveUpFile(req, res, './static/bundle.js')
} else {
serveUpFile(req, res, './index.html')
}
}
io.on('connection', function (client) {
console.log('client connected to the server')
client.on('peer-msg', function (data) {
console.log('Message from peer %s', data)
})
client.on('disconnect', function () {
console.log('client disconnected from the server')
})
})
socket.io-p2p-server will not work with socket.io 1.x as expected.
Change socket.io-p2p-server/index.js:
This part:
if (typeof room === 'object') {
var connectedClients = socket.adapter.rooms[room.name]
} else {
var connectedClients = clients
}
To this:
if (typeof room === 'object' && ('name' in room) && (room.name in socket.adapter.rooms) ) {
var connectedClients = {}
for (var id in socket.adapter.rooms[room.name].sockets) {
connectedClients[id] = clients[id];
}
} else {
var connectedClients = clients;
}
This solution works for me.
I'm using sockjs with standard configuration.
var ws = sockjs.createServer();
ws.on('connection', function(conn) {
conn.on('data', function(message) {
wsParser.parse(conn, message)
});
conn.on('close', function() {
});
});
var server = http.createServer(app);
ws.installHandlers(server, {prefix:'/ws'});
server.listen(config.server.port, config.server.host);
wsParser.parse function works like this:
function(conn, message) {
(...)
switch(message.action) {
case "titleAutocomplete":
titleAutocomplete(conn, message.data);
break;
(...) // a lot more of these
}
Each method called in switch sends back a message to client.
var titleAutocomplete = function(conn, data) {
redis.hgetall("titles:"+data.query, function(err, titles){
if(err) ERR(err);
if(titles) {
var response = JSON.stringify({"action": "titleAutocomplete", "data": {"titles": titles}});
conn.write(response);
}
})
};
Now my problem is that I'd like to make tests for my code (better late than never I guess) and I have no idea how to do it. I started writing normal http tests in with mocha + supertest but I just don't know how to handle websockets.
I'd like to have only one websocket connection to reuse through all tests, I'm binding the websocket connection with user session after first message and I want to test that persistence as well.
How do I make use of ws client's onmessage event and utilize it in my tests? How the tests can tell apart received messages and know which one they are supposed to wait for?
Collegue at work asked if it really needs to be a client connection or would it be possible to just mock it up. It turned out it was the way to go. I wrote a little helper class wsMockjs
var wsParser = require("../wsParser.js");
exports.createConnectionMock = function(id) {
return {
id: id,
cb: null,
write: function(message) {
this.cb(message);
},
send: function(action, data, cb) {
this.cb = cb;
var obj = {
action: action,
data: data
}
var message = JSON.stringify(obj);
wsParser.parse(this, message);
},
sendRaw: function(message, cb) {
this.cb = cb;
wsParser.parse(this, message);
}
}
}
Now in my mocha test I just do
var wsMock = require("./wsMock.js");
ws = wsMock.createConnectionMock("12345-67890-abcde-fghi-jklmn-opqrs-tuvwxyz");
(...)
describe('Websocket server', function () {
it('should set sessionId variable after handshake', function (done) {
ws.send('handshake', {token: data.token}, function (res) {
var msg = JSON.parse(res);
msg.action.should.equal('handshake');
msg.data.should.be.empty;
ws.should.have.property('sessionId');
ws.should.not.have.property('session');
done();
})
})
it('should not return error when making request after handshake', function (done) {
ws.send('titleAutocomplete', {query: "ter"}, function (res) {
var msg = JSON.parse(res);
msg.action.should.equal('titleAutocomplete');
msg.data.should.be.an.Object;
ws.should.have.property('session');
done();
})
})
})
It works like a charm and persist connection state and variables between requests.
Pretty simple node.js question. I want to extend the stream object to re-chunk the data that is coming in from a remote connection. I'm doing multiple telnets and sending commands to other servers, and they send back responses. It looks something like this.
> Hello, this is a command
This is the response to the command.
Sometimes it pauses here (which triggers the 'data' event prematurely).
But the message isn't over until you see the semicolon
;
What I'd like to do is instead of triggering the 'data' event at the pause, is wait for the ; and trigger a custom 'message' event.
I've read and reread this question, but I don't quite get it yet (partially because it's about a writable stream, and partially because I don't yet grok CoffeeScript).
EDIT: I guess I'm asking two things here:
How do I extend/inherit the stream object that net.CreateConnection uses?
Can I just extend the prototype.write to do a 'split' and re-'emit' each part?
Here's a snip of what I'm doing so far, but the chunking should be part of the stream, not part of the 'data' listener:
var net = require('net');
var nodes = [
//list of ip addresses
];
function connectToServer(ip) {
var conn = net.createConnection(3083, ip);
conn.on('connect', function() {
conn.write ("login command;");
});
conn.on('data', function(data) {
var read = data.toString();
var message_list = read.split(/^;/m);
message_list.forEach (function(message) {
console.log("Atonomous message from " + ip + ':' + message);
//I need to extend the stream object to emit these instead of handling it here
//Also, sometimes the data chunking breaks the messages in two,
//but it should really wait for a line beginning with a ; before it emits.
});
});
conn.on('end', function() {
console.log("Lost conncection to " + ip + "!!");
});
conn.on('error', function(err) {
console.log("Connection error: " + err + " for ip " + ip);
});
}
nodes.forEach(function(node) {
connectToServer(node);
});
If I was using a raw stream, I guess it would be something like this (based on code I found elsewhere)?
var messageChunk = function () {
this.readable = true;
this.writable = true;
};
require("util").inherits(messageChunk, require("stream"));
messageChunk.prototype._transform = function (data) {
var regex = /^;/m;
var cold_storage = '';
if (regex.test(data))
{
var message_list = read.split(/^;/m);
message_list.forEach (function(message) {
this.emit("data", message);
});
}
else
{
//somehow store the data until data with a /^;/ comes in.
}
}
messageChunk.prototype.write = function () {
this._transform.apply(this, arguments);
};
But I'm not using a raw stream, I'm using the stream object in the net.createConnection object returns.
Don't use the _transform,_read,_write,or _flush functions you implement directly, those are for the internals of node to use.
Emit a custom event when you see the character ";" in your stream:
var msg = "";
conn.on("data",function(data) {
var chunk = data.toString();
msg += chunk;
if(chunk.search(";") != -1) {
conn.emit("customEvent",msg);
msg = "";
}
});
conn.on("customEvent",function(msg) {
//do something with your message
});
I wrote a pretty simple function that downloads an image from a given URL, resize it and upload to S3 (using 'gm' and 'knox'), I have no idea if I'm doing the reading of a stream to a buffer correctly. (everything is working, but is it the correct way?)
also, I want to understand something about the event loop, how do I know that one invocation of the function won't leak anything or change the 'buf' variable to another already running invocation (or this scenario is impossible because the callbacks are anonymous functions?)
var http = require('http');
var https = require('https');
var s3 = require('./s3');
var gm = require('gm');
module.exports.processImageUrl = function(imageUrl, filename, callback) {
var client = http;
if (imageUrl.substr(0, 5) == 'https') { client = https; }
client.get(imageUrl, function(res) {
if (res.statusCode != 200) {
return callback(new Error('HTTP Response code ' + res.statusCode));
}
gm(res)
.geometry(1024, 768, '>')
.stream('jpg', function(err, stdout, stderr) {
if (!err) {
var buf = new Buffer(0);
stdout.on('data', function(d) {
buf = Buffer.concat([buf, d]);
});
stdout.on('end', function() {
var headers = {
'Content-Length': buf.length
, 'Content-Type': 'Image/jpeg'
, 'x-amz-acl': 'public-read'
};
s3.putBuffer(buf, '/img/d/' + filename + '.jpg', headers, function(err, res) {
if(err) {
return callback(err);
} else {
return callback(null, res.client._httpMessage.url);
}
});
});
} else {
callback(err);
}
});
}).on('error', function(err) {
callback(err);
});
};
Overall I don't see anything that would break in your code.
Two suggestions:
The way you are combining Buffer objects is a suboptimal because it has to copy all the pre-existing data on every 'data' event. It would be better to put the chunks in an array and concat them all at the end.
var bufs = [];
stdout.on('data', function(d){ bufs.push(d); });
stdout.on('end', function(){
var buf = Buffer.concat(bufs);
})
For performance, I would look into if the S3 library you are using supports streams. Ideally you wouldn't need to create one large buffer at all, and instead just pass the stdout stream directly to the S3 library.
As for the second part of your question, that isn't possible. When a function is called, it is allocated its own private context, and everything defined inside of that will only be accessible from other items defined inside that function.
Update
Dumping the file to the filesystem would probably mean less memory usage per request, but file IO can be pretty slow so it might not be worth it. I'd say that you shouldn't optimize too much until you can profile and stress-test this function. If the garbage collector is doing its job you may be overoptimizing.
With all that said, there are better ways anyway, so don't use files. Since all you want is the length, you can calculate that without needing to append all of the buffers together, so then you don't need to allocate a new Buffer at all.
var pause_stream = require('pause-stream');
// Your other code.
var bufs = [];
stdout.on('data', function(d){ bufs.push(d); });
stdout.on('end', function(){
var contentLength = bufs.reduce(function(sum, buf){
return sum + buf.length;
}, 0);
// Create a stream that will emit your chunks when resumed.
var stream = pause_stream();
stream.pause();
while (bufs.length) stream.write(bufs.shift());
stream.end();
var headers = {
'Content-Length': contentLength,
// ...
};
s3.putStream(stream, ....);
Javascript snippet
function stream2buffer(stream) {
return new Promise((resolve, reject) => {
const _buf = [];
stream.on("data", (chunk) => _buf.push(chunk));
stream.on("end", () => resolve(Buffer.concat(_buf)));
stream.on("error", (err) => reject(err));
});
}
Typescript snippet
async function stream2buffer(stream: Stream): Promise<Buffer> {
return new Promise < Buffer > ((resolve, reject) => {
const _buf = Array < any > ();
stream.on("data", chunk => _buf.push(chunk));
stream.on("end", () => resolve(Buffer.concat(_buf)));
stream.on("error", err => reject(`error converting stream - ${err}`));
});
}
You can easily do this using node-fetch if you are pulling from http(s) URIs.
From the readme:
fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png')
.then(res => res.buffer())
.then(buffer => console.log)
Note: this solely answers "How to read a stream into a buffer?" and ignores the context of the original question.
ES2018 Answer
Since Node 11.14.0, readable streams support async iterators.
const buffers = [];
// node.js readable streams implement the async iterator protocol
for await (const data of readableStream) {
buffers.push(data);
}
const finalBuffer = Buffer.concat(buffers);
Bonus: In the future, this could get better with the stage 2 Array.fromAsync proposal.
// 🛑 DOES NOT WORK (yet!)
const finalBuffer = Buffer.concat(await Array.fromAsync(readableStream));
You can convert your readable stream to a buffer and integrate it in your code in an asynchronous way like this.
async streamToBuffer (stream) {
return new Promise((resolve, reject) => {
const data = [];
stream.on('data', (chunk) => {
data.push(chunk);
});
stream.on('end', () => {
resolve(Buffer.concat(data))
})
stream.on('error', (err) => {
reject(err)
})
})
}
the usage would be as simple as:
// usage
const myStream // your stream
const buffer = await streamToBuffer(myStream) // this is a buffer
I suggest loganfsmyths method, using an array to hold the data.
var bufs = [];
stdout.on('data', function(d){ bufs.push(d); });
stdout.on('end', function(){
var buf = Buffer.concat(bufs);
}
IN my current working example, i am working with GRIDfs and npm's Jimp.
var bucket = new GridFSBucket(getDBReference(), { bucketName: 'images' } );
var dwnldStream = bucket.openDownloadStream(info[0]._id);// original size
dwnldStream.on('data', function(chunk) {
data.push(chunk);
});
dwnldStream.on('end', function() {
var buff =Buffer.concat(data);
console.log("buffer: ", buff);
jimp.read(buff)
.then(image => {
console.log("read the image!");
IMAGE_SIZES.forEach( (size)=>{
resize(image,size);
});
});
I did some other research
with a string method but that did not work, per haps because i was reading from an image file, but the array method did work.
const DISCLAIMER = "DONT DO THIS";
var data = "";
stdout.on('data', function(d){
bufs+=d;
});
stdout.on('end', function(){
var buf = Buffer.from(bufs);
//// do work with the buffer here
});
When i did the string method i got this error from npm jimp
buffer: <Buffer 00 00 00 00 00>
{ Error: Could not find MIME for Buffer <null>
basically i think the type coersion from binary to string didnt work so well.
I suggest to have array of buffers and concat to resulting buffer only once at the end. Its easy to do manually, or one could use node-buffers
I just want to post my solution. Previous answers was pretty helpful for my research. I use length-stream to get the size of the stream, but the problem here is that the callback is fired near the end of the stream, so i also use stream-cache to cache the stream and pipe it to res object once i know the content-length. In case on an error,
var StreamCache = require('stream-cache');
var lengthStream = require('length-stream');
var _streamFile = function(res , stream , cb){
var cache = new StreamCache();
var lstream = lengthStream(function(length) {
res.header("Content-Length", length);
cache.pipe(res);
});
stream.on('error', function(err){
return cb(err);
});
stream.on('end', function(){
return cb(null , true);
});
return stream.pipe(lstream).pipe(cache);
}
in ts, [].push(bufferPart) is not compatible;
so:
getBufferFromStream(stream: Part | null): Promise<Buffer> {
if (!stream) {
throw 'FILE_STREAM_EMPTY';
}
return new Promise(
(r, j) => {
let buffer = Buffer.from([]);
stream.on('data', buf => {
buffer = Buffer.concat([buffer, buf]);
});
stream.on('end', () => r(buffer));
stream.on('error', j);
}
);
}
You can do this by:
async function toBuffer(stream: ReadableStream<Uint8Array>) {
const list = []
const reader = stream.getReader()
while (true) {
const { value, done } = await reader.read()
if (value)
list.push(value)
if (done)
break
}
return Buffer.concat(list)
}
or using buffer consumer
const buf = buffer(stream)
You can check the "content-length" header at res.headers. It will give you the length of the content you will receive (how many bytes of data it will send)