Nodejs streaming video files to response blocks after number of clients - node.js

I'm experimenting with ways to stream video file to browser client with nodejs. i got a working code that does the job and everything works great
var express = require('express');
var app = express();
var path = require('path');
var fs = require('fs');
app.get('/',function(req, res){
res.sendFile(__dirname+'/index.html');
});
app.get('/clips/:name',function(req,res){
var file = path.resolve(__dirname,req.params.name);
fs.stat(file, function(err, stats) {
if (err) {
if (err.code === 'ENOENT') {
// 404 Error if file not found
return res.sendStatus(404);
}
res.end(err);
}
var range = req.headers.range;
if (!range) {
// 416 Wrong range
return res.sendStatus(416);
}
var positions = range.replace(/bytes=/, "").split("-");
var start = parseInt(positions[0], 10);
var total = stats.size;
var end = positions[1] ? parseInt(positions[1], 10) : total - 1;
var chunksize = (end - start) + 1;
res.writeHead(206, {
"Content-Range": "bytes " + start + "-" + end + "/" + total,
"Accept-Ranges": "bytes",
"Content-Length": chunksize,
"Content-Type": "video/mp4"
});
var stream = fs.createReadStream(file, { start: start, end: end })
.on("open", function() {
stream.pipe(res);
}).on("error", function(err) {
res.end(err);
});
});
});
app.listen(80);
The only problem with this code is when multiple client browser tabs are connected to the server it blocks the next requests until a stream from a previous request is ended.

Related

Chunk/Stream API data using Node.js

We have requirement where we need to write a node application which can read URL of image from database (approx more than million). Use image-size npm package to retrieve image meta data like height, width. Here should be an API which can list out result.
I am able to console log data but when i convert it to API, i need to chunk data so it can start appearing on browser and i'm unable to do that and need help. Here is my code
var express = require('express');
var url = require('url');
var http = require('http');
var sizeOf = require('image-size');
const sql = require('mssql');
var app = express();
var port = process.env.PORT || 3000;
const hostname = 'localhost';
var config1 = {
user: '*********',
password: '*********',
server: '*********',
database: '*******',
port: 1433,
debug: true,
options: {
encrypt: false // Use this if you're on Windows Azure
}
};
app.get('/', function(req, res){
//res.writeHead(200, { 'Content-Type': 'application/json' });
var finalResult = [];
sql.close();
sql.connect(config1, function (err) {
if (err) console.log(err);
const request = new sql.Request()
var myQuery = `select imagename from media`;
request.stream = true;
request.query(myQuery);
request.on('row', row => {
//console.log('Image : ' + row.ImageUrl);
if (row.ImageUrl != ''){
if (row.ImageUrl.indexOf('http') < 0)
row.ImageUrl = "http:" + row.ImageUrl;
var options = url.parse(row.ImageUrl);
http.get(options, function (response) {
if (response.statusCode == 200)
{
var chunks = [];
response.on('data', function (chunk) {
chunks.push(chunk);
}).on('end', function() {
var buffer = Buffer.concat(chunks);
//console.log(options.href);
//console.log(sizeOf(buffer).height);
var result = {};
result.MaskUrl = row.MaskUrl;
result.ImageUrl = options.href;
result.Height = sizeOf(buffer).height;
result.Width = sizeOf(buffer).width;
result.statusCode = 200;
finalResult.push(result);
//console.log(result);
console.log(finalResult);
res.write(result, function(){
res.end();
});
});
}
else
{
var result = {};
result.MaskUrl = row.MaskUrl;
result.ImageUrl = options.href;
result.Height = 0;
result.Width = 0;
result.statusCode = response.statusCode;
finalResult.push(result);
console.log(result);
res.write(result, function(){
res.end();
});
}
});
}
})
request.on('error', err => {
console.log ('Error for ' + row.ImageUrl );
})
request.on('done', err => {
console.log('Last Time' + finalResult.length);
})
// request.query(myQuery,(err,result) =>{
// console.log(result);
// });
});
console.log('Last Time' + finalResult.length);
res.send(finalResult);
});
app.listen(port, hostname, function(){
console.log('ImageSize running on PORT: ' + port);
});
I tried res.write, res.end without any success.
The probable reason for your problem is that here:
res.write(result, function(){
res.end();
});
You end and close the request just after the first image is read.
I would rewrite the code a little and use some functional framework, like scramjet, to stream the data straight from the DB. As Nicholas pointed out it's not super easy to run your code so I'm writing blindly - but if you fix any of my obvious error this should just work:
First:
npm install scramjet JSONStream node-fetch
Next, try this code:
var express = require('express');
var sizeOf = require('image-size');
const sql = require('mssql');
var app = express();
var port = process.env.PORT || 3000;
const hostname = 'localhost';
const {DataStream} = require('scramjet');
const fetch = require('node-fetch');
const JSONStream = require('JSONStream');
var config1 = {
user: '*********',
password: '*********',
server: '*********',
database: '*******',
port: 1433,
debug: true,
options: {
encrypt: false // Use this if you're on Windows Azure
}
};
app.get('/', function(req, res, next){
// you should consider not doing these two lines on each request,
// but I don't want to mess you code...
sql.close();
sql.connect(config1, function (err) {
if (err) next(err);
res.writeHead(200, { 'Content-Type': 'application/json' });
const request = new sql.Request();
var myQuery = `select imagename from media`;
request.stream = true;
request.query(myQuery);
const stream = new DataStream();
request.on('row', row => stream.write(row));
stream.filter(
row => row.ImageUrl !== ''
)
.map(
async row => {
if (row.ImageUrl.indexOf('http') !== 0) // url must start with http.
row.ImageUrl = "http:" + row.ImageUrl;
const response = await fetch(row.ImageUrl);
let size = {width:0, height:0};
if (response.status === 200) {
const buffer = await response.buffer();
size = sizeOf(buffer);
}
return {
MaskUrl: row.MaskUrl,
ImageUrl: row.ImageUrl,
Height: size.height,
Width: size.width,
statusCode: response.status
};
}
)
.pipe(
JSONStream.stringify()
).pipe(
res
);
request.on('error', () => {
res.writeHead(500, { 'Content-Type': 'application/json' });
stream.end("{error:true}");
});
request.on('done', () => stream.end());
});
});
app.listen(port, hostname, function(){
console.log('ImageSize running on PORT: ' + port);
});

Videojs unable to seek flv beyond the buffer line from a node http server

I want to make a player that supports flv, so I have to stream video files through a node http server. For mp4, it works perfect. But, I don't know why, for flv, the browser will not send another request when I click beyong the buffer line.
The server is based on Streaming a video file to an html5 video player with Node.js so that the video controls continue to work?
You can try it by starting the server and opening http://localhost:8080/?path=E:/1.flv
"use strict";
var fs = require("fs");
var http = require("http");
var url = require("url");
var path = require("path");
var MIMETypes = new Map();
MIMETypes.set(".flv", "video/x-flv").set(".mp4", "video/mp4").set(".avi", "video/x-msvideo");
exports.server = http.createServer(function (req, res) {
var query = url.parse(req.url, true).query;
if (!query.path) {
res.end("Empty query");
}
else if (query.path && !query.getStream) {
// return a page.
res.end(`
<head>
<link href="http://vjs.zencdn.net/5.9.2/video-js.css" rel="stylesheet">
</head>
<body>
<video id="my-video" class="video-js" controls preload="auto" width="640" height="264" data-setup="{}">
<source src="http://localhost:8080/?getStream=true&path=${query.path}" type='${MIMETypes.get(path.extname(query.path))}'>
</video>
<script src="http://vjs.zencdn.net/5.9.2/video.js"></script>
</body>`);
}
else {
var mimetype = MIMETypes.get(path.extname(query.path));
fs.stat(query.path, function (err, stats) {
if (err) {
console.log("fail to read file stat", err);
res.end(JSON.stringify(query));
}
else {
var range = req.headers.range;
if (!range) {
res.writeHead(206, {
"Accept-Ranges": "bytes",
"Content-Length": stats.size,
"Content-Type": mimetype
});
var stream = fs.createReadStream(query.path);
stream.pipe(res);
}
else {
var positions = range.replace(/bytes=/, "").split("-");
var start = parseInt(positions[0], 10);
var total = stats.size;
var end = positions[1] ? parseInt(positions[1], 10) : total - 1;
var chunksize = (end - start) + 1;
res.writeHead(206, {
"Content-Range": "bytes " + start + "-" + end + "/" + total,
"Accept-Ranges": "bytes",
"Content-Length": chunksize,
"Content-Type": mimetype
});
var stream_1 = fs.createReadStream(query.path, { start: start, end: end })
.on("open", function () {
stream_1.pipe(res);
}).on("error", function (err) {
res.end(err);
});
}
}
});
}
});
exports.server.listen(8080, "localhost");

Need to send response after an action has completed

I am trying to make a web crawler which crawls IMDB and lists the movie name and rating. This is my index.js file.
Suppose i am crawling for 10 movies. I am then saving the crawled results in a different file say 'message.txt'. Now i want to send this message.txt file as a response to any request. But whenever I make a request it always send me an empty file to my browser initially. Then i notice that it takes some time before the crawled results are saved in the message.txt file. I think this is because all actions are asynchronous in nodejs. So is there a way to send the message.txt file only after crawling is complete?
var express = require('express');
var app = express();
var cheerio = require('cheerio');
var request = require('request');
var fs = require('fs');
app.listen(8080);
console.log('Running');
app.get('/', function(req, res) {
console.log('Recieved the get Request');
var i = 1;
var count = 0;
while (count < 10) {
var url = 'http://www.imdb.com/title/tt' + i + '/';
console.log(url);
count = count + 1;
i = i + 1;
request(url, function(error, response, html) {
if (!error) {
var $ = cheerio.load(html);
var title, ratings, released;
var json = {
title: '',
ratings: '',
released: ''
};
$('.title_wrapper').filter(function() {
var data = $(this);
json.title = data.children().first().text().trim();
json.released = data.children().last().children().last().text().trim();
});
$('.ratingValue').filter(function() {
var data = $(this);
json.ratings = parseFloat(data.text().trim());
});
console.log(json);
fs.appendFile('message.txt', JSON.stringify(json, null, 4) + '\n', function(err) {});
};
});
};
res.sendFile(__dirname + '/index.js');
});
You can use the async package which is great for controlling flow, something like:
console.log('Recieved the get Request');
var i = 1;
var count = 0;
while (count < 10) {
var url = 'http://www.imdb.com/title/tt' + i + '/';
console.log(url);
count = count + 1;
i = i + 1;
async.waterfall([
function sendRequest (callback) {
if (!error) {
var $ = cheero.load(html);
var json = {
title: '',
ratings: '',
released: ''
}
}
$('.title_wrapper').filter(function() {
var data = $(this);
json.title = data.children().first().text().trim();
json.released = data.children().last().children().last().text().trim();
});
$('.ratingValue').filter(function() {
var data = $(this);
json.ratings = parseFloat(data.text().trim());
});
callback(null, JSON.stringify(json, null, 4) + '\n');
},
function appendFile (json, callback) {
fs.appendFile('message.txt', json, function(err) {
if (err) { callback(err); }
callback();
});
}
], function(err) {
res.sendFile(__dirname + '/index.js');
});
fs.appendFile('message.txt', JSON.stringify(json, null, 4) + '\n', function(err) {
//This part is executed after the process has been completed
});
You have to make a callback there as that part will be only called when your operation has been performed.
We are utilizing the callback feature here although there isn't any concrete callback except the err in our case, we don't need any other badly though.
Please try.
fs.appendFile() is asynchronous so the stuff you append to the file won't be there right away when the function returns. So if you want to read send that file to the user, you'll need to do it inside the callback you supply to fs.appendFile().
app.get('/', function(req, res) {
...
fs.appendFile(
'message.txt',
JSON.stringify(json, null, 4) + '\n',
function(err) {
if (err) {
// Log the error and send a message to the user here
return;
}
res.sendFile(__dirname + '/index.js')
}
);
};
});
};
});
You may be tempted to use fs.appendFileSync() instead. That would be fine for a command line tool, but since this is a web server, do not do that. It will lock up the thread while the I/O happens.

Video from Mongo Grid fs is not playing on Safari browser (also on Cordova app)

I am using Mongodb to store video files as grid fs. It surprised me today when I came to know that video is not playing on Safari browser. However video read from Gridfs is playing fine on Chrome & Firefox. Following are two approach to read video files back from Grid fs. Both approach has same problem. I do the that correct mime type is getting set.
Approach 1:
exports.previewFile = function (req, res) {
var contentId = new DBModule.BSON.ObjectID(req.params.fileid);
log.debug('Calling previewFile inside FileUploadService for content id ' + contentId);
//Read metadata details from fs.files
var query = {_id: contentId};
documentOperationModule.getDocumentByQuery(query, constants.FS_FILES_COLLECTION, function (err, files) {
if (!Utilities.isEmptyList(files)) {
var fileObj = files[0];
var gridStore = DBModule.db.gridStore(contentId, 'r');
gridStore.open(function (err, gridStore) {
var stream = gridStore.stream(true);
if (!Utilities.isEmptyObject(fileObj.metadata)) {
res.setHeader('Content-Type', fileObj.metadata.contentType);
}
stream.on("data", function (chunk) {
log.debug("Chunk of file data");
res.write(chunk);
});
stream.on("end", function () {
log.debug("EOF of file");
res.end();
});
stream.on("close", function () {
log.debug("Finished reading the file");
});
});
} else {
log.error({err: err}, 'Failed to read the content for id ' + contentId);
res.status(constants.HTTP_CODE_INTERNAL_SERVER_ERROR);
res.json({error: contentId + " not found"});
}
});
};
Approach 2:
exports.previewFile = function (req, res) {
var contentId = new DBModule.BSON.ObjectID(req.params.fileid);
log.debug('Calling previewFile inside FileUploadService for content id ' + contentId);
//Read metadata details from fs.files
var query = {_id: contentId};
documentOperationModule.getDocumentByQuery(query, constants.FS_FILES_COLLECTION, function (err, files) {
if (!Utilities.isEmptyList(files)) {
var fileObj = files[0];
var gridStore = DBModule.db.gridStore(contentId, 'r');
gridStore.read(function (err, data) {
if (!err) {
if (!Utilities.isEmptyObject(fileObj.metadata)) {
res.setHeader('Content-Type', fileObj.metadata.contentType);
}
res.end(data);
} else {
log.error({err: err}, 'Failed to read the content for id ' + contentId);
res.status(constants.HTTP_CODE_INTERNAL_SERVER_ERROR);
res.json({error: err});
}
});
} else {
log.error({err: err}, 'Failed to read the content for id ' + contentId);
res.status(constants.HTTP_CODE_INTERNAL_SERVER_ERROR);
res.json({error: contentId + " not found"});
}
});
};
Following is screen of Safari for reference.
Please help
Try this GIST (by https://gist.github.com/psi-4ward)
It makes use of the byte range header
https://gist.github.com/psi-4ward/7099001
Although it does not work for me with safari, it makes sure that the correct hears are set and the correct content is delivered. It could narrow down your problem
EDIT
I've updated the GIST. It works now fine with Safari for me
https://gist.github.com/derMani/218bd18cc926d85a57a1
This should solve your problem
function StreamGridFile(req, res, GridFile) {
if(req.headers['range']) {
// Range request, partialle stream the file
console.log('Range Reuqest');
var parts = req.headers['range'].replace(/bytes=/, "").split("-");
var partialstart = parts[0];
var partialend = parts[1];
var start = parseInt(partialstart, 10);
var end = partialend ? parseInt(partialend, 10) : GridFile.length -1;
var chunksize = (end-start)+1;
res.writeHead(206, {
'Content-disposition': 'filename=xyz',
'Accept-Ranges': 'bytes',
'Content-Type': GridFile.contentType,
'Content-Range': 'bytes ' + start + '-' + end + '/' + GridFile.length,
'Content-Length': chunksize
});
// Set filepointer
GridFile.seek(start, function() {
// get GridFile stream
var stream = GridFile.stream(true);
// write to response
stream.on('data', function(buff) {
// count data to abort streaming if range-end is reached
// perhaps theres a better way?
if(start >= end) {
// enough data send, abort
GridFile.close();
res.end();
} else {
res.write(buff);
}
});
});
} else {
// stream back whole file
console.log('No Range Request');
res.header('Content-Type', GridFile.contentType);
res.header('Content-Length', GridFile.length);
var stream = GridFile.stream(true);
stream.pipe(res);
}
}
Regards
Rolf

using WiFly with ws websockets

I have been trying to figure out a way to connect a WiFly from an Arduino to send some accelerometer data to my node.js server. Currently the way that I have it worked out is having three servers:
Http >> This is for clients purposes
Net server >> This is basically for TCP request, this is how my server receives the information from 3. WS websockets >> this takes the data from the Net server and streams it to the client side.
Here is the code:
var http = require('http');
var fs = require('fs');
var path = require('path');
var url = require('url');
var net = require('net');
var sensorData;
var message = {
"data": ''
}
var newValue,
oldValue,
diff;
//Settings
var HTTP_PORT = 9000;
var NET_PORT = 9001;
var WS_PORT = 9002;
//Server
var mimeTypes = {
"html": "text/html",
"jpeg": "image/jpeg",
"jpg": "image/jpeg",
"png": "image/png",
"js": "text/javascript",
"css": "text/css"
};
http.createServer(function (req, res) {
var fileToLoad;
if (req.url == '/') {
fileToLoad = 'index.html';
} else {
fileToLoad = url.parse(req.url).pathname.substr(1);
}
console.log('[HTTP] :: Loading :: ' + 'frontend/' + fileToLoad);
var fileBytes;
var httpStatusCode = 200;
fs.exists('frontend/' + fileToLoad, function (doesItExist) {
if (!doesItExist) {
console.log('[HTTP] :: Error loading :: ' + 'frontend/' + fileToLoad);
httpStatusCode = 404;
}
var fileBytes = fs.readFileSync('frontend/' + fileToLoad);
var mimeType = mimeTypes[path.extname(fileToLoad).split('.')[1]];
res.writeHead(httpStatusCode, {
'Content-type': mimeType
});
res.end(fileBytes);
});
// console.log("[INIT] Server running on HTTP Port");
}).listen(HTTP_PORT);
proxy.on("close", function(){
console.log("Connection has closed");
});
proxy.on("end", function(){
console.log("Connection has ended");
});
var socket;
var clients = [];
var socketObject;
var server = net.createServer(function (socket) {
socketObject = socket;
socket.name = socket.remoteAddress + ":" + socket.remotePort;
clients.push(socket);
console.log(socket);
socket.write("HTTP/1.1 101", function () {
console.log('[CONN] New connection: ' + socket.name + ', total clients: ' + clients.length);
});
socket.setEncoding('utf8');
socket.on('error', function (data) {
console.log(data);
});
socket.on('end', function () {
console.log('[END] Disconnection: ' + socket.name + ', total clients: ' + clients.length);
});
socket.on('data', function (data) {
console.log('[RECV from ' + socket.remoteAddress + "] " + data);
oldValue = newValue;
newValue = data;
diff = Math.abs(newValue) - Math.abs(oldValue);
console.log(Math.abs(newValue) + '-' + Math.abs(oldValue));
message.data = Math.abs(diff);
console.log('[SAVED] ' + message.data);
});
});
server.listen(NET_PORT, function () {
console.log("[INIT] Server running on NET server port", NET_PORT);
});
var WebSocketServer = require('ws').Server,
wss = new WebSocketServer({
port: WS_PORT
});
wss.on('connection', function (ws) {
// ws.send(JSON.stringify(message));
setInterval(function () {
updateXData(ws)
}, 500);
});
function updateXData(ws) {
var newMessage = {
"data": ""
}
newMessage.data = message.data
ws.send(JSON.stringify(newMessage));
}
So the question is: Is there a cleaner way to do this just by using ws to handle the data from the WiFly and then sending it to the client?
Thanks in advance!
Not sure whether this will suit you and might be new to you but you could make use of MQTT, there are free brokers available which are very good and its relatively easy to set up and implement with Arduino equipped with WiFly Shield.
http://mqtt.org/
Hope this helps somewhat!

Resources