OK, this is not due to a dumb mistake: I have forwarded the port and there is no firewall in between.
I try to make the "live streaming" section of jsmpeg (https://github.com/phoboslab/jsmpeg) to work on the Internet. To get the webcam stream on my webpage I modify "stream-server.js"
if( process.argv.length < 3 ) {
console.log(
'Usage: \n' +
'node stream-server.js <secret> [<stream-port> <websocket-port>]'
);
process.exit();
}
var STREAM_SECRET = process.argv[2],
STREAM_PORT = process.argv[3] || 8082,
WEBSOCKET_PORT = process.argv[4] || 8084,
STREAM_MAGIC_BYTES = 'jsmp'; // Must be 4 bytes
var width = 320,
height = 240;
// Websocket Server
var socketServer = new (require('ws').Server)({port: WEBSOCKET_PORT});
socketServer.on('connection', function(socket) {
// Send magic bytes and video size to the newly connected socket
// struct { char magic[4]; unsigned short width, height;}
var streamHeader = new Buffer(8);
streamHeader.write(STREAM_MAGIC_BYTES);
streamHeader.writeUInt16BE(width, 4);
streamHeader.writeUInt16BE(height, 6);
socket.send(streamHeader, {binary:true});
console.log( 'New WebSocket Connection ('+socketServer.clients.length+' total)' );
socket.on('close', function(code, message){
console.log( 'Disconnected WebSocket ('+socketServer.clients.length+' total)' );
});
});
socketServer.broadcast = function(data, opts) {
for( var i in this.clients ) {
if (this.clients[i].readyState == 1) {
this.clients[i].send(data, opts);
}
else {
console.log( 'Error: Client ('+i+') not connected.' );
}
}
};
// HTTP Server to accept incomming MPEG Stream
var streamServer = require('http').createServer( function(request, response) {
var params = request.url.substr(1).split('/');
if( params[0] == STREAM_SECRET ) {
width = (params[1] || 320)|0;
height = (params[2] || 240)|0;
console.log(
'Stream Connected: ' + request.socket.remoteAddress +
':' + request.socket.remotePort + ' size: ' + width + 'x' + height
);
request.on('data', function(data){
socketServer.broadcast(data, {binary:true});
});
}
else {
console.log(
'Failed Stream Connection: '+ request.socket.remoteAddress +
request.socket.remotePort + ' - wrong secret.'
);
response.end();
}
}).listen(STREAM_PORT);
console.log('Listening for MPEG Stream on http://127.0.0.1:'+STREAM_PORT+'/<secret>/<width>/<height>');
console.log('Awaiting WebSocket connections on ws://127.0.0.1:'+WEBSOCKET_PORT+'/');
var servi = require('servi'), // include the servi library
app = new servi(false); // servi instance
// configure the server's behavior:
app.port(8080); // port number to run the server on
app.serveFiles("public"); // serve all static HTML files from /public
app.start();
console.log("Listening for new clients on port 8080");
<!DOCTYPE html>
<html>
<head>
<meta name="viewport" content="width=320, initial-scale=1"/>
<title>jsmpeg streaming webcam</title>
<style type="text/css">
body {
background: white;
text-align: center;
margin-top: 10%;
}
#videoCanvas {
/* Always stretch the canvas to 640x480, regardless of its
internal size. */
width: 640px;
height: 480px;
}
</style>
</head>
<body>
<h1>
The Canvas size specified
</h1>
<!-- The Canvas size specified here is the "initial" internal resolution. jsmpeg will
change this internal resolution to whatever the source provides. The size the
canvas is displayed on the website is dictated by the CSS style.
-->
<canvas id="videoCanvas" width="640" height="480">
<p>
Please use a browser that supports the Canvas Element, like
Chrome,
Firefox,
Safari or Internet Explorer 10
</p>
</canvas>
<script type="text/javascript" src="jsmpg.js"></script>
<script type="text/javascript">
// Show loading notice
var canvas = document.getElementById('videoCanvas');
var ctx = canvas.getContext('2d');
ctx.fillStyle = '#444';
ctx.fillText('Loading...', canvas.width/2-30, canvas.height/3);
// Setup the WebSocket connection and start the player
var client = new WebSocket( 'ws://192.168.1.15:8084/' );
var player = new jsmpeg(client, {canvas:canvas});
</script>
</body>
</html>
The "index.html" is included in a "public" folder.
Then I start ffmpeg with:
`ffmpeg -s 640x480 -f video4linux2 -i /dev/video0 -f mpeg1video -b:v 800k -r 30 http://192.168.1.15:8082/1693/640/480/`
When I open, on any station of the lan, url 192.168.1.x:8080 I get the webcam streaming on my page "jsmpeg streaming webcam"; but if use the public ip 41.142.x.y:8080 from outside the local network (using 3G) I get the page (with the h1 line "The Canvas size specified") but no video it just says in the frame "loading".
I do not understand why it works locally and not globally. Why the page is streaming on the internet but not the webcam?
Thanks for your help!
in the client side, you should change this:
var client = 'ws://'+document.location.hostname+':8084/';
var player = new JSMpeg.Player(client, {canvas: canvas});
The way you were doing this was with the static ip 192.168.1.15. When it gets to the client, he doesn't know who this ip is.
I have not tested it, but I'm pretty sure the NAT will translate the ip addresses...
Related
** Please help me for autoplay live stream with voice. There is muted autoplay.
please correct me where i am doing mistake. **
add video library
<script src="https://cdn.jsdelivr.net/npm/hls.js#latest"></script>
video add in your page
<video
id="my-player"
controls
autoplay muted loop
/>
script for videoplayer
<script>
const video = document.querySelector('#my-player');
const src = 'https://stream.mux.com/yyFGolovbOWRmtsF4eG01SVFgl6VsyGMH4i7dxUVsjxo.m3u8';
if (video.canPlayType('application/vnd.apple.mpegurl')) {
// Some browers (safari and ie edge) support HLS natively
video.src = src;
} else if (Hls.isSupported()) {
const hls = new Hls();
hls.loadSource(src)
hls.attachMedia(video);
}
</script>
Please correct me where i am doing mistake
Try this, load the source after the media is attached to the video element.
<script>
const video = document.querySelector('#my-player');
const src = 'https://stream.mux.com/yyFGolovbOWRmtsF4eG01SVFgl6VsyGMH4i7dxUVsjxo.m3u8';
if (video.canPlayType('application/vnd.apple.mpegurl')) {
// Some browers (safari and ie edge) support HLS natively
video.src = src;
} else if (Hls.isSupported()) {
const hls = new Hls();
//////hls.loadSource(src) // Remove this line
hls.attachMedia(video);
hls.on(Hls.Events.MEDIA_ATTACHED, function() {
// fired when MediaSource has been succesfully attached to media element
// Now load the source m3u8
try {
hls.loadSource(src);
} catch (err) {
console.log('Error Loading Source Media!!! ' + err);
}
});
}
</script>
I have a JavaScript file that takes an audio file, places it in a buffer and then allows the audio in the buffer to be processed (by changing the speed and loop duration).
I wish to record the resulting audio, including the manipulation, and place it in an <audio> element. I have tried to do this by integrating the code given in the first answer here:
Record sound of a webaudio api's audio context
with my JavaScript file. However, I get the following errors when I press the play button (which should also start the recording):
GET http://localhost:8000/[object%20AudioBufferSourceNode] [HTTP/1.0 404 File not found 1ms]
HTTP load failed with status 404. Load of media resource http://localhost:8000/[object%20AudioBufferSourceNode] failed. index.html
Uncaught (in promise) DOMException: The media resource indicated by the src attribute or assigned media provider object was not suitable.
I believe I am not hooking up the media recorder correctly. Can anybody assist?
Thank you,
My html is:
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1">
<meta name="viewport" content="width=device-width">
<title>decodeAudioData example</title>
<link rel="stylesheet" href="">
<!--[if lt IE 9]>
<script src="//html5shiv.googlecode.com/svn/trunk/html5.js"></script>
<![endif]-->
</head>
<body>
<h1>decodeAudioData example</h1>
<button class="play">Play</button>
<button class="stop">Stop</button>
<h2>Set playback rate</h2>
<input class="playback-rate-control" type="range" min="0.25" max="3" step="0.05" value="1">
<span class="playback-rate-value">1.0</span>
<h2>Set loop start and loop end</h2>
<input class="loopstart-control" type="range" min="0" max="20" step="1" value="0">
<span class="loopstart-value">0</span>
<input class="loopend-control" type="range" min="0" max="20" step="1" value="0">
<span class="loopend-value">0</span>
<br>
<br>
<!--Audio recoring destination element-->
<audio id='recording' controls='true'></audio>
</body>
</html>
My JavaScript is:
let audioCtx;
let source;
let songLength;
const pre = document.querySelector('pre');
const myScript = document.querySelector('script');
const play = document.querySelector('.play');
const stop = document.querySelector('.stop');
const playbackControl = document.querySelector('.playback-rate-control');
const playbackValue = document.querySelector('.playback-rate-value');
playbackControl.setAttribute('disabled', 'disabled');
const loopstartControl = document.querySelector('.loopstart-control');
const loopstartValue = document.querySelector('.loopstart-value');
loopstartControl.setAttribute('disabled', 'disabled');
const loopendControl = document.querySelector('.loopend-control');
const loopendValue = document.querySelector('.loopend-value');
loopendControl.setAttribute('disabled', 'disabled');
// Recording variables
var recorder=false;
var recordingstream=false;
// use XHR to load an audio track, and
// decodeAudioData to decode it and stick it in a buffer.
// Then we put the buffer into the source
function getData() {
/*if(window.webkitAudioContext) {
audioCtx = new window.webkitAudioContext();
} else {
audioCtx = new window.AudioContext();
}*/
audioCtx = new window.AudioContext();
source = audioCtx.createBufferSource();
request = new XMLHttpRequest();
request.open('GET', 'precky.mp3', true);
request.responseType = 'arraybuffer';
request.onload = function() {
let audioData = request.response;
audioCtx.decodeAudioData(audioData, function(buffer) {
myBuffer = buffer;
songLength = buffer.duration;
source.buffer = myBuffer;
source.playbackRate.value = playbackControl.value;
source.connect(audioCtx.destination);
source.loop = true;
loopstartControl.setAttribute('max', Math.floor(songLength));
loopendControl.setAttribute('max', Math.floor(songLength));
},
function(e){"Error with decoding audio data" + e.error});
}
request.send();
}
function startrecording(){
recordingstream=audioCtx.createMediaStreamDestination();
recorder=new MediaRecorder(recordingstream.stream);
recorder.start();
}
function glue(){
let a=new Audio(source);
let mediasource=audioCtx.createMediaElementSource(a);
mediasource.connect(recordingstream);//connects also to MediaRecorder
a.play();
}
function stoprecording(){
recorder.addEventListener('dataavailable',function(e){
document.querySelector('#recording').src=URL.createObjectURL(e.data);
recorder=false;
recordingstream=false;
});
recorder.stop();
}
// wire up buttons to stop and play audio, and range slider control
play.onclick = function() {
getData();
source.start(0);
play.setAttribute('disabled', 'disabled');
playbackControl.removeAttribute('disabled');
loopstartControl.removeAttribute('disabled');
loopendControl.removeAttribute('disabled');
startrecording();
glue(source);
}
stop.onclick = function() {
source.stop(0);
play.removeAttribute('disabled');
playbackControl.setAttribute('disabled', 'disabled');
loopstartControl.setAttribute('disabled', 'disabled');
loopendControl.setAttribute('disabled', 'disabled');
stoprecording();
}
playbackControl.oninput = function() {
source.playbackRate.value = playbackControl.value;
playbackValue.innerHTML = playbackControl.value;
}
loopstartControl.oninput = function() {
source.loopStart = loopstartControl.value;
loopstartValue.innerHTML = loopstartControl.value;
}
loopendControl.oninput = function() {
source.loopEnd = loopendControl.value;
loopendValue.innerHTML = loopendControl.value;
}
The original decodeAudioData example (without my attempt to record) comes from Mozilla docs here:
https://mdn.github.io/webaudio-examples/decode-audio-data/ by chrisdavidmills
I am using node webkit v0.45, and am need to open a new window (with a pre-determined html layout) with dynamic parameters.
The only documentation I can find suggests using on "loaded" event to emit data to the new window, and then handle the data that is received using on "data" in the new window html, as per below:
Main window:
var ngui = require('nw.gui');
var newWindow = ngui.Window.open('newwindow.html', {
width: 1120,
height: 360
})
newWindow.on ('loaded', function(){
var data = {msg: "test"};
newWindow.emit("data", data);
});
New window:
var gui = require('nw.gui');
var win = gui.Window.get();
win.on("data", function(data) {
console.log(data.msg);
});
However, it seems after v0.13 this was deprecated and the Window class is no longer inherited from EventEmitter. Are there any other options for opening windows with parameters?
*** Update ***
The following code will work when the new window is opened:
var param = 'param';
ngui.Window.open('newWindow.html', {
width: 1120,
height: 360,
}, function(newWindow) {
pageWindow.on('loaded', () => {
newWindow.window.param = param;
});
});
And in newWindow.html:
<script>
function getParam(){
if (!window.param){
setTimeout(function(){
getParam();
},50);
} else {
console.log("Successfully loaded param: '" + window.param + "'");
}
}
getParam();
</script>
The Node context can be used to share data, as the Node context is shared across windows.
I have a trouble with Three.js for loading a texture and applying to a mesh cube.
In local, I know there are some issues like running the html file on a apache server with wamp (localhost).
But when I use Node.js and socket.io, how to fix it ?
First, to load three.js, I have to put the web adress of the script src instead of a local "three.js" :
http://threejs.org/build/three.min.js
It works but how about the texture ?
Neither a local or an internet adress for the texture is working...
//var mapUrl = "mercury.jpg";
var mapUrl = "http://threejs.org/examples/textures/cube/skybox/px.jpg";
var map = THREE.ImageUtils.loadTexture(mapUrl); // its not working with both
And if I put my code on a web server running with Node.js like Cloud9, how to fix it ? (same problem as in local with Node.js).
Thank you for your attention.
Here is my complete code.
Server
var http = require('http');
var fs = require('fs');
// Creation du serveur
var app = http.createServer(function (req, res) {
// On lit notre fichier app.html
fs.readFile('gl.html', 'utf-8', function(error, content) {
res.writeHead(200, {'Content-Type' : 'text/html'});
res.end(content);
});
});
var io = require("socket.io");
io = io.listen(app);
io.sockets.on('connection', function (socket) {
socket.on('send', function () {
socket.broadcast.emit('rec');
}); // send
}); // connection
app.listen(8080);
Client (gl.html)
<!DOCTYPE html>
<html>
<head>
<title>Welcome to WebGL</title>
</head>
<body onLoad="onLoad();" style="">
<h1>Webgl</h1>
<div id="container" style="width:95%; height:80%; position:absolute;"></div>
<script type="text/javascript" src="http://threejs.org/build/three.min.js"></script>
<!--<script type="text/javascript" src="Three.js"></script> not working-->
<script type="text/javascript" src="/socket.io/socket.io.js"></script>
<script>
var socket = io.connect();
var renderer = null,
scene = null,
camera = null,
cube = null,
animating = false;
function onLoad()
{
// Grab our container div
var container = document.getElementById("container");
// Create the Three.js renderer, add it to our div
renderer = new THREE.WebGLRenderer( { antialias: true } );
renderer.setSize(container.offsetWidth, container.offsetHeight);
container.appendChild( renderer.domElement );
// Create a new Three.js scene
scene = new THREE.Scene();
// Put in a camera
camera = new THREE.PerspectiveCamera( 45, container.offsetWidth / container.offsetHeight, 1, 4000 );
camera.position.set( 0, 0, 3 );
// Create a directional light to show off the object
var light = new THREE.DirectionalLight( 0xffffff, 1.5);
light.position.set(0, 0, 1);
scene.add( light );
// Create a shaded, texture-mapped cube and add it to the scene
// First, create the texture map
// var mapUrl = "mercury.jpg";
var mapUrl = "http://threejs.org/examples/textures/cube/skybox/px.jpg";
var map = THREE.ImageUtils.loadTexture(mapUrl); // not working with both !!!
// Now, create a Phong material to show shading; pass in the map
var material = new THREE.MeshPhongMaterial({ map: map });
// Create the cube geometry
var geometry = new THREE.CubeGeometry(1, 1, 1);
// And put the geometry and material together into a mesh
cube = new THREE.Mesh(geometry, material);
// Turn it toward the scene, or we won't see the cube shape!
// cube.rotation.x = Math.PI / 5;
cube.rotation.x = 0.5;
//cube.rotation.y = Math.PI / 5;
// Add the cube to our scene
scene.add( cube );
// Add a mouse up handler to toggle the animation
addMouseHandler();
// Run our render loop
run();
}
function run()
{
// Render the scene
renderer.render( scene, camera );
// Spin the cube for next frame
if (animating)
{
cube.rotation.y -= 0.01;
//cube.rotation.x += 0.05;
}
// Ask for another frame
//requestAnimationFrame(run);
setTimeout(run, 1000/60);
}
function addMouseHandler()
{
var dom = renderer.domElement;
dom.addEventListener( 'mouseup', onMouseUp, false);
}
function onMouseUp (event)
{
event.preventDefault();
animating = !animating;
socket.emit('send');
}
socket.on('rec', function () {
animating = !animating;
});
</script>
</body>
</html>
In fact i just had to use express, place all the files in the folder public and name client index.html.
Now it works ! How can close my question ?
Here is the simple code of the server :
var express = require('express');
var app = express();
app.use(express.static(__dirname + '/public'));
app.listen(8080);
You are running into trouble with CORS. CORS state that textures need to be comming from the same base url or needs special handling on server side. This is easily fixable with a proxy. If you are already using a server thaen it shouldn't be too hard to configure it to handle proxy requests.
I'm trying to follow this tutorial on converting a d3.js SVG Vis to a PNG server-side (using Node.js) http://eng.wealthfront.com/2011/12/converting-dynamic-svg-to-png-with.html
Link to full code: https://gist.github.com/1509145
However, I keep getting this error whenever I attempt to make a request to load my page
/Users/me/Node/node_modules/jsdom/lib/jsdom.js:171
features = JSON.parse(JSON.stringify(window.document.implementation._fea
^
TypeError: Cannot read property 'implementation' of undefined
at exports.env.exports.jsdom.env.processHTML (/Users/dereklo/Node/node_modules/jsdom/lib/jsdom.js:171:59)
at Object.exports.env.exports.jsdom.env (/Users/dereklo/Node/node_modules/jsdom/lib/jsdom.js:262:5)
at Server.<anonymous> (/Users/dereklo/Node/Pie/pie_serv.js:26:9)
at Server.EventEmitter.emit (events.js:91:17)
at HTTPParser.parser.onIncoming (http.js:1785:12)
at HTTPParser.parserOnHeadersComplete [as onHeadersComplete] (http.js:111:23)
at Socket.socket.ondata (http.
Does anybody know why this might be? I've installed the jsdom module fine, so I don't really know what's causing these issues...thanks in advance.
EDIT
This is the code I'm using to implement the node.js server. My latest issue is below this source...
var http = require('http'),
url = require('url'),
jsdom = require('jsdom'),
child_proc = require('child_process'),
w = 400,
h = 400,
__dirname = "Users/dereklo/Node/pie/"
scripts = ["/Users/dereklo/Node/pie/d3.min.js",
"/Users/dereklo/Node/pie/d3.layout.min.js",
"/Users/dereklo/Node/pie/pie.js"],
//scripts = ["./d3.v2.js",
// "./d3.layout.min.js",
// "./pie.js"]
htmlStub = '<!DOCTYPE html><div id="pie" style="width:'+w+'px;height:'+h+'px;"></div>';
http.createServer(function (req, res) {
res.writeHead(200, {'Content-Type': 'image/png'});
var convert = child_proc.spawn("convert", ["svg:", "png:-"]),
values = (url.parse(req.url, true).query['values'] || ".5,.5")
.split(",")
.map(function(v){return parseFloat(v)});
convert.stdout.on('data', function (data) {
res.write(data);
});
convert.on('exit', function(code) {
res.end();
});
jsdom.env({features:{QuerySelector:true}, html:htmlStub, scripts:scripts, done:function(errors, window) {
var svgsrc = window.insertPie("#pie", w, h, values).innerHTML;
console.log("svgsrc",svgsrc);
//jsdom's domToHTML will lowercase element names
svgsrc = svgsrc.replace(/radialgradient/g,'radialGradient');
convert.stdin.write(svgsrc);
convert.stdin.end();
}});
}).listen(8888, "127.0.0.1");
console.log('Pie SVG server running at http://127.0.0.1:8888/');
console.log('ex. http://127.0.0.1:8888/?values=.4,.3,.2,.1');
Latest Issue
events.js:66
throw arguments[1]; // Unhandled 'error' event
^
Error: This socket is closed.
at Socket._write (net.js:519:19)
at Socket.write (net.js:511:15)
at http.createServer.jsdom.env.done (/Users/dereklo/Node/Pie/pie_serv.js:38:19)
at exports.env.exports.jsdom.env.scriptComplete (/Users/dereklo/Node/node_modules/jsdom/lib/jsdom.js:199:39)
This may prove to be a useful answer to your question if you take out that "using node.js" stipulation. If it doesn't help you, maybe later visitors will find it interesting.
I've been working for some time to solve this same problem (server-side d3 rasterizing), and I've found PhantomJS to be the best solution.
server.js:
var page = require('webpage').create(),
renderElement = require('./renderElement.js'),
Routes = require('./Routes.js'),
app = new Routes();
page.viewportSize = {width: 1000, height: 1000};
page.open('./d3shell.html');
app.post('/', function(req, res) {
page.evaluate(new Function(req.post.d3));
var pic = renderElement(page, '#Viewport');
res.send(pic);
});
app.listen(8000);
console.log('Listening on port 8000.');
Routes.js: https://gist.github.com/3061477
renderElement.js: https://gist.github.com/3176500
d3shell.html should look something like:
<!DOCTYPE HTML>
<html>
<head>
<title>Shell</title>
</head>
<body>
<div id="Viewport" style="display: inline-block"></div>
<script src="http://cdnjs.cloudflare.com/ajax/libs/d3/2.8.1/d3.v2.min.js" type="text/javascript"></script>
</body>
</html>
You can then start the server with phantomjs server.js and POST d3=[d3 code that renders to #Viewport], and the server will respond with a base64-encoded png.
(Requires PhantomJS 1.7 or higher.)