Basically, I would like to alter the http response before sending it to the client, using transform streams, but my code below throws an error : [Error: write after end].
Documentation on http://nodejs.org/api/stream.html#stream_writable_end_chunk_encoding_callback says :
Calling write() after calling end() will raise an error.
How can I prevent write() to be called after end() in this case ?
var request = require('request');
var Transform = require('stream').Transform;
var http = require('http');
var parser = new Transform();
parser._transform = function(data, encoding, done) {
console.log(data);
this.push(data);
done();
};
parser.on('error', function(error) {
console.log(error);
});
http.createServer(function (req, resp) {
var dest = 'http://stackoverflow.com/';
var x = request({url:dest, encoding:null})
x.pipe(parser).pipe(resp)
}).listen(8000);
A stream is supposed to be used only once, but you're using the same transform stream for each incoming request. On the first request it will work, but when x closes, so will parser: that's why on the second client request you'll see the write after end error.
To fix this, just create a new transform stream on each use:
function createParser () {
var parser = new Transform();
parser._transform = function(data, encoding, done) {
console.log(data);
this.push(data);
done();
};
return parser;
}
http.createServer(function (req, resp) {
var dest = 'http://stackoverflow.com/';
var x = request({url:dest, encoding:null})
x.pipe(createParser()).pipe(resp)
}).listen(8000);
Related
I have socket.io sending a basic object from server to client. This bit works fine.
Now want to send a stream from server to client, using event-stream (specifically the results of a block-chain query). I am getting unexpected results in the browser console..
var io = require('socket.io')(server);
var dsteem = require('dsteem')
var es = require('event-stream')
var util = require('util')
var client = new dsteem.Client('https://api.steemit.com')
var stream = client.blockchain.getBlockStream()
/* This sends results to stdout, fine
io.on('connection', function(socket){
stream.pipe(es.map(function(block, callback) {
callback(null, util.inspect(block) + '\n')
})).pipe(process.stdout);
// And this sends a simple object to the client
socket.emit('blockchainOps', {"Foo!":"Doo!"} );
});
*/
// Putting both together sends strange connection data to client
io.on('connection', function(socket){
socket.emit('blockchainOps', function() {
stream.pipe(es.map(function(block, callback) {
callback(null, util.inspect(block) + '\n');
}))
})
});
What I get in the client console appears to be some kind of TCP socket function,
ƒ (){if(!n){n=!0;var r=a(arguments);u("sending ack %j",r),e.packet({type:i.ACK,id:t,data:r})}}
Can anyone help me understand what's going on and what I'm doing wrong?
== EDIT UPDATE ==
As suggested in comments, I've tried socket.io-stream to augment event-stream.
var es = require('event-stream')
var util = require('util')
var ss = require('socket.io-stream');
var stream = ss.createStream();
io.on('connection', function(socket){
ss(socket).emit('blockchainOps', stream, function(){
client.blockchain.getBlockStream()
.pipe(es.map(function(block, callback) {
callback(null, util.inspect(block) + '\n')
}))
.pipe(process.stdout)
}());
});
This time I get a socket object returned in the browser console which does not seem to be the stream data I was hoping for.
If anyone is looking for a working socket.io stream example
// server side
const { pipeline } = require('stream')
const server = require('http').Server().listen(8080)
const io = require('socket.io')(server)
const ss = require('socket.io-stream')
io.on('connection', (socket) => ss(socket).on('stream', (stream) => {
pipeline(stream, process.stdout, (err) => err && console.log(err))
}));
// client side
const client = require('socket.io-client')
const socket = client.connect('http://localhost:8080')
socket.on('connect', () => {
const stream = ss.createStream()
ss(socket).emit('stream', stream)
pipeline(process.stdin, stream, (err) => err && console.log(err))
});
You're using socket.emit wrong, you're passing the ACK callback to the client instead of your stream. Have a look at socket.emit signature :socket.emit(eventName[, ...args][, ack]).
You probably want something like
socket.emit('blockchainOps', client.blockchain.getBlockStream());
However, I don't think plain socket io supports passing a Stream like that. To pipe a stream down to the client you could use socketio-stream. It would look like this:
var ss = require('socket.io-stream');
var stream = ss.createStream();
ss(socket).emit('blockchainOps', stream);
client.blockchain.getBlockStream().pipe(stream);
EDIT:
On the client, you should be able to read your stream like this:
<script src="socket.io/socket.io.js"></script>
<script src="socket.io-stream.js"></script>
...
ss(socket).on('blockchainOps', function(stream) {
var binaryString = "";
stream.on('data', function(data) {
for(var i=0;i<data.length;i++) {
binaryString+=String.fromCharCode(data[i]);
}
});
stream.on('end', function(data) {
console.log(binaryString);
binaryString = "";
});
});
I am using express to create a webservice that will read string data from a stream, and respond to the HTTP POST request with that value. Here is the code for the S3Store.js file that defines the readFileFromS3(.) function:
S3Store.js
S3Store.prototype.readFileFromS3 = function(filename, callback) {
var readConfig = {
'Bucket': 'shubham-test',
'Key': filename
};
var readStream = this.s3.getObject(readConfig).createReadStream();
var allData = '';
readStream.on('data', function(data) {
//data = Buffer.concat([allData, data]);
data = allData + data;
console.log("data: " + data);
});
readStream.on('error', function(err) {
callback(err, null);
});
Now, if I call this method from a terminal like this:
s3Instance.readFileFromS3('123.json', function(err, data) {
console.log(data);
});
I see the appropriate string for data logged to the console. However, when I call the same method from inside one of the routes in express for HTTP POST requests, the service responds with a value of data set to empty string. Code for the POST request:
router.post('/resolve', function(req, res) {
var commandJson = req.body;
var appId = commandJson['appId'];
var command = commandJson['text'];
if (appId == undefined || command == undefined) {
res.status(400).send("Malformed Request: appId: " + appId + ", command: " + command);
};
s3Store.readFileFromS3('123.json', function(err, data) {
res.send(data);
});
});
Why does it return an empty string when calling the readFileFromS3(.) from the HTTP POST method and not when I ran the same method directly from the node console?
You're logging the data but you're not passing anything to the completion callback (see below for some more explanation):
S3Store.prototype.readFileFromS3 = function(filename, callback) {
var readConfig = {
'Bucket': 'shubham-test',
'Key': filename
};
var readStream = this.s3.getObject(readConfig).createReadStream();
var allData = [];
// Keep collecting data.
readStream.on('data', function(data) {
allData.push(data);
});
// Done reading, concatenate and pass to completion callback.
readStream.on('end', function() {
callback(null, Buffer.concat(allData));
});
// Handle any stream errors.
readStream.on('error', function(err) {
callback(err, null);
});
};
I took the liberty to rewrite the data collection to use Buffer's instead of strings, but this obviously isn't a requirement.
The callback argument is a completion function, meant to be called when either reading the S3 stream is done, or when it has thrown an error. The error handling was already in place, but not the part where you would call back when all the data from the stream was read, which is why I added the end handler.
At that point, the readStream is exhausted (everything from it has been read into allData), and you call the completion callback when the collected data as second argument.
The common idiom throughout Node is that completion callbacks take (at least) two arguments: the first is either an error, or null when there aren't errors, and the second is the data you want to pass back to the caller (in your case, the anonymous function in your route handler that calls res.send()).
I'm currently working on a small API with nodejs and restify that requires a file upload done, by receiving a binary string.
What I dont know how to do, is test it with mocha, so Ive been doing some search and found this on stack overflow Unit test file upload with mocha, its a fine start but it wont work because its sending a multipart form, and what I require the client to send on the api is the file as a stream.
Heres my controller:
exports.uploadVideo = function(req, res, next) {
var video = "public/video/" + req.params.videoId + ".mp4",
util = require('util'),
exec = require('child_process').exec;
var newFile = fs.createWriteStream("./uploads/" + video);
req.pipe(newFile);
req.on('end', function () {
var cmd = 'qtfaststart ' + './uploads/' + video;
var qtfaststart = exec(cmd, function(error, stdout, stderr){
if (error === "atom not found, is this a valid MOV/MP4 file?\n" || error !== null) {
return next(new restify.ConflictError("Error: " + stdout));
} else {
fs.chmodSync('./uploads/' + video, '644');
Video.findOne( { _id: req.params.videoId }, function(err, video) {
if (err) return next(new restify.ConflictError(err));
if (!video) {
newVideo = new Video({
_id: req.params.videoId,
file: video});
newVideo.save()
} else {
video.file = video;
video.increment();
video.save();
}
});
}
});
});
req.on('error', function(err){
return next(new restify.NetworkConnectTimeoutError(err));
});
};
So given this controller which receives a stream (binary file), and puts the stream together on the backend, how would I test this controller with mocha?
You could just use http for that:
it('should be possible to upload a file', function(done) {
var http = require('http');
var options = require('url').parse(YOUR_URL);
options.method = 'POST';
var req = http.request(options, function(response) {
// TODO: check for errors, correct response, etc...
done(...);
});
require('fs').createReadStream(YOUR_TEST_FILE).pipe(req);
});
You want to use the request module from within mocha. It supports multi-part forms.
I have an http server with a handleRequest callback that runs another script in vm.runInNewContext for each request. The script that runs inside vm.runInNewContext makes some asynchronous http post requests and writes the server response only after getting the responses from the posts.
As a result, the code of handleRequest callback ends before the server response is written.
Is it safe? or is there a way to avoid this situation?
Here is some code:
var server = http.createServer(handleRequest);
server.listen(8080);
var handleRequest = function (request, response) {
// get request data...
var context = {
ServerRequest : request,
ServerResponse : response
};
var stringScript = // a string with the script that posts data
var script = vm.createScript(stringScript);
script.runInNewContext({ context: context });
}
the script string does this:
var request = require('request');
var options = {....}
var req = request.get(options);
req.on('response', function (res) {
var chunks = [];
res.on('data', function(chunk) {
chunks.push(chunk);
});
res.on('end', function() {
var buffer = Buffer.concat(chunks);
var encoding = res.headers['content-encoding'];
if (encoding == 'gzip') {
zlib.gunzip(buffer, function(err, decoded) {
// set response headers and write the response
context.ServerResponse.end(decoded.toString());
});
} else if (encoding == 'deflate') {
zlib.inflate(buffer, function(err, decoded) {
// set response headers and write the response
context.ServerResponse.end(decoded.toString());
})
} else {
// set response headers and write the response
context.ServerResponse.end(buffer.toString());
}
});
});
Simple solution: Return a promise (e.g. use the Q-library) from the VM-script.
script.runInNewContext will return whatever you return from the VM-script. That way you have a "callback" for when the VM code finishes.
// Script for VM
// I simplified it. Just resolve or reject the promise whenever you are done with your work
'use strict';
var defer = q.defer();
doABarrelRoll(function() {
defer.resolve('RESULT');
});
defer.promise; // This line will return the promise.
When returning a value from a VM-script, you do not need any return construction. Just write the thing you want and let the magic happen.
// Script for current context
'use strict';
var server = http.createServer(handleRequest);
server.listen(8080);
var handleRequest = function (request, response) {
// get request data...
var context = {
ServerRequest : request,
ServerResponse : response
};
var stringScript = // a string with the script that posts data
var script = vm.createScript(stringScript);
var prom = script.runInNewContext({
context: context,
q: require('q'),
});
prom.done(function ($result) {
console.log('VM finished with result: ' + $result);
});
}
I 've been well with node.js until RxJS implementation.
Here is my trial code studying-
Reactive-Extensions / rxjs-node
https://github.com/Reactive-Extensions/rxjs-node
rx_http.js
(RxJS wrapper of the http lib of node.js)
var Rx = require("./rx.min");
var http = require("http");
for(var k in http)
{
exports[k] = http[k];
}
exports.createServer = function ()
{
var subject = new Rx.AsyncSubject();
var observable = subject.asObservable();
observable.server = http.createServer(function (request, response)
{
subject.onNext({ request:request, response:response });
subject.onCompleted();
});
return observable;
};
server.js
var http = require('./rx_http');
// rxServer
var serverObservable = http.createServer();
var port = 3000;
serverObservable.server.listen(port);
console.log("Server listening on port: "+port);
// HTTP request event loop function
serverObservable.subscribe(function (data)
{
var req = data.request;
console.log(req.headers);
var res = data.response;
res.writeHead(200, {'Content-Type':"text/html"});
res.end("hello world");
console.log("res content out");
});
// exceptiopn
process.on('uncaughtException', function (err)
{
console.log(['Caught exception:', err.message].join(" "));
});
The code ends up with one-time 'hello world' output to browser, and the RxServer stops reacting to another access (brwoser reload etc.).
I'm on the way to learn RxJS thing, but few documentation found on the web.
Tell me what's wrong with the code, and if you know better implementations, please share.
Thank you.
Use Rx.Subject instead of Rx.AsyncSubject in rx_http.js.
AsyncSubject caches the last value of onNext() and propagates it to the all observers when completed. AsyncSubject
exports.createServer = function ()
{
var subject = new Rx.Subject();
var observable = subject.asObservable();
observable.server = http.createServer(function (request, response)
{
subject.onNext({ request:request, response:response });
});
return observable;
};
Calling oncompleted on the subject when the first request arrives ends the observable sequence. Could you please remove that line an try again.
I hope it helps.
Ahmet Ali Akkas