webRTC to connect two browser - p2p

For a long time I'm trying to connect the two browsers. I'm getting my voice, my own camera image only.How do I get the image and sound other browsers? The browsers connecting on the same network.
<body>
<video id="localVideo" autoplay="true" muted="true" width="400px" height="400px" ></video>
<video id="remoteVideo" autoplay="true" muted="true" width="400px" height="400px" ></video>
<div>
<button id="callButton" onclick="call();" >Call</button>
<button id="hangupButton" onclick="hangup();" disabled>Hang Up</button>
</div>
<script>
// Definitions
var localStream;
var callButton = document.getElementById("callButton");
var hangupButton = document.getElementById("hangupButton");
var localVideo = document.getElementById("localVideo");
var remoteVideo = document.getElementById("remoteVideo");
var audioVideoConstraints = {audio: true, video: true};
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
window.URL = window.URL || window.webkitURL;
window.RTCPeerConnection = window.RTCPeerConnection || window.mozRTCPeerConnection || window.webkitRTCPeerConnection;
window.RTCIceCandidate = window.mozRTCIceCandidate;
</script>
<script>
// Functions
function getUserMediaSuccessCallback(stream) {
window.stream = stream; // stream available to console
localStream = stream;
if (window.URL) {
localVideo.src = window.URL.createObjectURL(stream);
} else {
localVideo.src = stream;
}
callButton.disabled = false;
if (window.stream.getVideoTracks().length > 0) {
trace('Using video device: ' + window.stream.getVideoTracks()[0].label);
}
if (window.stream.getAudioTracks().length > 0) {
trace('Using audio device: ' + window.stream.getAudioTracks()[0].label);
}
}
function getUserMediaErrorCallback(error){
console.log("navigator.getUserMedia error: ", error);
alert("navigator.getUserMedia error: " + error);
}
function trace(text) {
console.log(text);
}
function call() {
hangupButton.disabled = false;
trace("Starting call");
var servers = null;
localPeerConnection = new RTCPeerConnection(servers);
trace("Created local peer connection object localPeerConnection");
localPeerConnection.onicecandidate = gotLocalIceCandidate;
remotePeerConnection = new RTCPeerConnection(servers);
trace("Created remote peer connection object remotePeerConnection");
remotePeerConnection.onicecandidate = gotRemoteIceCandidate;
remotePeerConnection.onaddstream = gotRemoteStream;
localPeerConnection.addStream(localStream);
trace("Added localStream to localPeerConnection");
localPeerConnection.createOffer(gotLocalDescription);
}
</script>
<script>
navigator.getUserMedia(audioVideoConstraints, getUserMediaSuccessCallback, getUserMediaErrorCallback);
Thank you very much good work :)

Related

Delete message real time using Socket.io

I am trying to delete a message from live chat using socket.io. I can send message and it is real time and I am able to view any new message I add on another tab but I also want to be able to remove a message. I am updating the status of the message in database using socket and it is working fine but I am not able to remove the message from front-end in real time. I have filtered out and set all the other messages except the one I removed in my chat state and it removes the message in the tab I performed the action but it is not updating real time in the other tab. I have to refresh it for the message to be removed. Can someone lease tell me how could I do it real time??
Frontend Code
const ENDPOINT = "http://localhost:8000";
const socket = io.connect(ENDPOINT);
useEffect(()=>{
getDataFromSocket();
},[])
const deleteFnc = (v) => {
setDeleteMsg(!deleteMsg)
socket.emit("delete", {message_type:"1", message_model:{message:v?.text_model?.message, user_id:user,uniqueID:v.text_model.uniqueID, stream_key:props.data.streamkey, user_image:loginUserImage, user_name:loginUserName},url:loginUserName,app_type:"2",platform:'1'});
socket.on("deleted_message",(data)=>{
let him = chat.filter(function(obj){
return obj.text_model.uniqueID !== data.message_model.uniqueID
})
setChat(him);
})
}
const getDataFromSocket = () => {
socket.on("message",(data)=>{
if(data){
var newdata=data
newdata.text_model = newdata.message_model
delete newdata.message_model
setChat(prev=>[...prev,newdata])
}
})
}
return(
chat.map((v, index) => {
if (v.message_type == 1) {
return (
<div className={StreamPlayerCss.chat34} key={index}>
<div className={StreamPlayerCss.chat3467}>
{v?.text_model?.user_image ?
<img src={v?.text_model?.user_image} alt="img" onError={(e) => {e.target.onerror = null e.target.src ="https://d25u15mvjkult8.cloudfront.net/Website/Assets/Images/users.png"}}></img> :
<img src="https://d25u15mvjkult8.cloudfront.net/Website/Assets/Images/users.png" alt="img"></img>
}
<h4>{v?.text_model?.user_name}</h4>
{v?.text_model?.del_status == 1 ?
<h6>Your Message Was Deleted</h6> :
<>
<h6>{v?.text_model?.message}</h6>
<button onClick={()=>deleteFnc(v)}>del</button>
</> }
</div>
</div>
)
}
}
)
Backend Code
const express=require('express');
const app=express();
const se=app.listen(8000,()=>{
console.log("working on 8000")
})
const dotenv = require('dotenv');
if (process.env.NODE_ENV == 'live') {
dotenv.config();
} else if(process.env.NODE_ENV == 'test'){
const testdotnv = require ('custom-env').env('test');
testdotnv.config();
}
const pool = require('./database');
var io =require('socket.io')(se)
// set up socket connection
io.sockets.on('connection', function (socket) {
var clientIp = socket.request.headers['x-forwarded-for'] || socket.request.connection.remoteAddress;
console.log("ip is ",clientIp);
socket.on('send', function (data) {
if(data.platform){
var platform = data.platform;
}else{
var platform = 0;
}
if(data.event == 0){
pool.query('INSERT INTO live_chat SET user_id = ? , name = ? , image = ? , stream_key = ? , message = ?,del_status=0 ,userId = ?,ip_address = ?,platform = ?' ,[data.user_id,data.username,data.image,data.skey,data.message,data.url,clientIp,platform]);
}
else if(data.event > 0) {
pool.query('INSERT INTO live_chat SET user_id = ? , name = ? , image = ? , stream_key = ? , message = ?,del_status=0 ,userId = ? ,event_id = ? ,cheered_user_id = ? ,cheered_user_name = ?,ip_address=?' ,[data.user_id,data.username,data.image,data.skey,data.message,data.url,data.event,data.cheered_id,data.cheered_name,clientIp]);
pool.query('INSERT INTO cheered_chat_user SET user_id = ? , event_id = ? ,cheered_user_id = ? ,cheered_user_name = ?',[data.user_id,data.event,data.cheered_id,data.cheered_name]);
}
io.sockets.emit('message', data);
io.sockets.emit('newMessageListener', data);
});
socket.on('send_message', function (data) {
socket.on('delete', function (data) {
pool.query("UPDATE `live_chat` SET del_status=1 WHERE user_id = ? and unique_id=?",[data.message_model.user_id, data.message_model.uniqueID])
io.sockets.emit("deleted_message",data);
})
});

Is there a way to increase recording quality with the Web Audio API in Safari?

I'm using WebRTC along with WebAudioRecorder.js and the Web Audio API to record microphone input from the user for audio recognition with the audD API (similar to Shazam). This is working fine in Chrome and Firefox and it seems the quality of the recording is fairly solid. However, audD is not able to recognize the blob/file being sent from my recording in Safari (11.1.2) because of what I'm guessing is low audio quality (the playback is almost inaudible). The only audio format that both Safari and audD are compatible with is mp3, so that's how I've been encoding the file.
Javascript:
// to be set to a WebAudioRecorder.js recorder instance
let recorder;
// to be set to the stream resulting from getUserMedia()
let gumStream;
function beginRecording() {
if (navigator.mediaDevices.getUserMedia) {
console.log('starting the recording');
navigator.mediaDevices.getUserMedia({ 'audio': true })
.then(function(stream) {
let AudioContext = window.AudioContext // Default
|| window.webkitAudioContext // Safari and old versions of Chrome
|| false;
if (AudioContext) {
let audioCtx = new AudioContext;
gumStream = stream;
let source = audioCtx.createMediaStreamSource(stream);
recorder = new WebAudioRecorder(source, {
workerDir: 'web-audio-recorder-js/lib/',
encoding: 'mp3'
});
} else {
alert('The Web Audio API is not supported.');
}
recorder.setOptions({
timeLimit: 120,
encodeAfterRecord: true,
ogg: {quality: 0.9},
mp3: {bitRate: 320},
});
recorder.startRecording();
recorder.onComplete = function(recorder, blob) {
createAudioPlayback(blob);
POSTreq(blob);
}
recorder.onError = function(recorder, err) {
console.error(err);
}
})
.catch(function(err) {
console.error(err);
})
}
}
function stopRecording() {
console.log('stopping the recording');
let recordingTime = recorder.recordingTime();
console.log(recordingTime);
let audioTrack = gumStream.getAudioTracks()[0];
console.log(audioTrack);
audioTrack.stop();
recorder.finishRecording();
$('#msg_box').text(`Recorded for ${Math.round(recordingTime)} seconds`);
console.log('recording stopped');
}
function createAudioPlayback(blobData) {
let url = URL.createObjectURL(blobData);
$('body').append(`<audio controls src="${url}"></audio>`);
}
function POSTreq (blobData) {
let xhr = new XMLHttpRequest();
let fd = new FormData();
fd.append('api_token', '');
fd.append('file', blobData);
fd.append('method', 'recognize');
fd.append('return_itunes_audios', true);
fd.append('itunes_country', 'us');
xhr.onreadystatechange = function() {
if (xhr.readyState === 4) {
parseRetrievedData(xhr.response);
}
}
xhr.open('POST', 'https://api.audd.io/');
xhr.responseType = 'json';
xhr.send(fd);
}
function parseRetrievedData(parseData) {
console.log('the data from the audD api is: ', parseData);
}
$(function() {
$('#start-button').click(function(e) {
beginRecording();
$('#stop-button').prop('hidden', false);
});
$('#stop-button').click(function(e) {
stopRecording();
});
});
HTML:
<div class="recorder_wrapper">
<div class="recorder">
<button id="start-button">Start</button>
<button id="stop-button">Stop</button>
<p id="msg_box"></p>
<section class="auth-links-region" role="region">
Signup
Login
</section>
<section class="authentication-region" role="region" hidden>
<p class="authentication-text"></p>
My Searches
Logout
</section>
</div>
</div>

How to record desktop screen video using recordRTC node module of selected screen area?

I am working on desktop application using NW.js wherein required to record selected area of desktop screen. currently using RecordRTC node module for recording video. but this gives me full screen video recording. please assist if there any other package to achieve this across platforms (win/linux/mac).
Please check this demo (name as "Record Cropped Screen using RecordRTC"):
https://www.webrtc-experiment.com/RecordRTC/simple-demos/record-cropped-screen.html
Here is complete HTML code (try on any HTTPs page or localhost or node-webkit):
<button id="btn-start-recording">Start Recording</button>
<button id="btn-stop-recording" style="display: none;">Stop Recording</button>
<br><hr>
<div id="edit-panel" style="border-bottom: 1px solid;">
<div>
<label for="x">X</label>
<input type="number" name="x" id="x" value="0" />
</div>
<div>
<label for="y">Y</label>
<input type="number" name="y" id="y" value="0" />
</div>
<div>
<label for="w">Width (-1 = Full size)</label>
<input type="number" name="w" id="w" value="-1" />
</div>
<div>
<label for="h">Height (-1 = Full size)</label>
<input type="number" name="h" id="h" value="-1" />
</div>
<button id="update" style="display: none;">Update X-Y Width-Height Coordinates</button>
<canvas></canvas>
</div>
<video id="mediaElement"></video>
<script src="https://cdn.webrtc-experiment.com/RecordRTC.js"></script>
<script src="https://cdn.WebRTC-Experiment.com/getScreenId.js"></script>
<script>
// this script tag is taken from: https://github.com/andersevenrud/webrtc-screenshare-crop-demo
var CROP_X = 10;
var CROP_Y = 20;
var CROP_W = 320; // default width
var CROP_H = 240; // default height
var VIDEO_WIDTH = 0;
var VIDEO_HEIGHT = 0;
var MAX_VIDEO_WIDTH = 1920;
var MAX_VIDEO_HEIGHT = 1080;
var _canvas;
var _context;
var htmlCanvasElement = document.querySelector('canvas');
// Form elements
document.getElementById("x").value = CROP_X;
document.getElementById("y").value = CROP_Y;
document.getElementById("w").value = CROP_W;
document.getElementById("h").value = CROP_H;
document.getElementById("update").onclick = function() {
var x = document.getElementById("x").value << 0;
var y = document.getElementById("y").value << 0;
var w = document.getElementById("w").value << 0;
var h = document.getElementById("h").value << 0;
if (x >= 0) {
CROP_X = x;
}
if (y >= 0) {
CROP_Y = y;
}
CROP_W = w || 0;
CROP_H = h || 0;
};
_context = htmlCanvasElement.getContext('2d');
/**
* Crops a video frame and shows it to the user
*/
function CropFrame(ev, stream, video, callback) {
callback = callback || function() {};
_canvas = htmlCanvasElement;
if (CROP_X < 0) {
CROP_X = 0;
}
if (CROP_Y < 0) {
CROP_Y = 0;
}
if (CROP_W <= 0) {
CROP_W = VIDEO_WIDTH;
}
if (CROP_H <= 0) {
CROP_H = VIDEO_HEIGHT;
}
if (CROP_W > MAX_VIDEO_WIDTH) {
CROP_W = MAX_VIDEO_WIDTH;
}
if (CROP_H > MAX_VIDEO_HEIGHT) {
CROP_W = MAX_VIDEO_HEIGHT;
}
_canvas.width = CROP_W;
_canvas.height = CROP_H;
_context.drawImage(video, CROP_X, CROP_Y, CROP_W, CROP_H, 0, 0, CROP_W, CROP_H);
// We need to scale down the image or else we get HTTP 414 Errors
// Also we scale down because of RTC message length restriction
var scanvas = document.createElement('canvas');
scanvas.width = _canvas.width;
scanvas.height = _canvas.height;
var wRatio = _canvas.width / 320;
var hRatio = _canvas.height / 240;
var maxRatio = Math.max(wRatio, hRatio);
if (maxRatio > 1) {
scanvas.width = _canvas.width / maxRatio;
scanvas.height = _canvas.height / maxRatio;
}
scanvas.getContext('2d').drawImage(_canvas, 0, 0, scanvas.width, scanvas.height);
callback(scanvas.toDataURL("image/jpeg"));
}
var recorder;
function captureScreen(cb) {
getScreenId(function(error, sourceId, screen_constraints) {
navigator.getUserMedia(screen_constraints, cb, function(error) {
console.error('getScreenId error', error);
alert('Failed to capture your screen. Please check Chrome console logs for further information.');
});
});
}
var mediaElement = document.querySelector('#mediaElement');
document.querySelector('#btn-start-recording').onclick = function() {
document.querySelector('#btn-start-recording').style.display = 'none';
navigator.getUserMedia = navigator.getUserMedia || navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
captureScreen(function(screen) {
var inited = false;
mediaElement.ontimeupdate = function(ev) {
if (!inited) {
VIDEO_WIDTH = mediaElement.offsetWidth;
VIDEO_HEIGHT = mediaElement.offsetHeight;
mediaElement.style.display = 'none';
document.querySelector('#edit-panel').style.display = 'block';
inited = true;
}
CropFrame(ev, screen, mediaElement);
};
mediaElement.src = URL.createObjectURL(screen);
mediaElement.play();
mediaElement.screen = screen;
addStreamStopListener(screen, function() {
document.querySelector('#btn-stop-recording').onclick();
});
// RecordRTC goes here
var captureStream = htmlCanvasElement.captureStream();
recorder = RecordRTC(captureStream, {
type: 'video'
});
recorder.startRecording();
document.querySelector('#btn-stop-recording').style.display = 'inline';
});
};
document.querySelector('#btn-stop-recording').onclick = function() {
document.querySelector('#btn-stop-recording').style.display = 'none';
recorder.stopRecording(function() {
var blob = recorder.getBlob();
document.querySelector('#edit-panel').style.display = 'none';
mediaElement.style.display = 'block';
mediaElement.src = URL.createObjectURL(blob);
mediaElement.play();
if (mediaElement.screen && mediaElement.screen.getVideoTracks) {
mediaElement.screen.stop();
mediaElement.screen = null;
}
document.querySelector('#btn-start-recording').style.display = 'inline';
});
};
function addStreamStopListener(stream, callback) {
var streamEndedEvent = 'ended';
if ('oninactive' in stream) {
streamEndedEvent = 'inactive';
}
stream.addEventListener(streamEndedEvent, function() {
callback();
callback = function() {};
}, false);
stream.getAudioTracks().forEach(function(track) {
track.addEventListener(streamEndedEvent, function() {
callback();
callback = function() {};
}, false);
});
stream.getVideoTracks().forEach(function(track) {
track.addEventListener(streamEndedEvent, function() {
callback();
callback = function() {};
}, false);
});
}
function querySelectorAll(selector) {
return Array.prototype.slice.call(document.querySelectorAll(selector));
}
querySelectorAll('input').forEach(function(input) {
input.onkeyup = input.oninput = function() {
if (!document.querySelector('#update').onclick) return;
document.querySelector('#update').onclick();
};
});
</script>

socket.io, webrtc, nodejs video chat app getting errors over https: ERR_SSL_PROTOCOL_ERROR, 404 (Not Found), and ERR_CONNECTION_TIMED_OUT

I have put together a video chat app using socket.io, webrtc, nodejs from this online tutorial from github but now I am getting errors when converting it to be used over https:
Request URL:https://telemed.caduceususa.com/socket.io/?EIO=3&transport=polling&t=1479396416422-0
Request Method:GET
Status Code:404 Not Found
Remote Address:10.9.2.169:443
Other errors I have gotten in this process are as follows:
When I try to declare a different PORT I get - ERR_SSL_PROTOCOL_ERROR,
When I try to declare port 80 or 8080 i get: ERR_CONNECTION_TIMED_OUT
Something is going wrong on this line inside socket.io.js:
xhr.send(this.data);
I am running a node.js server on Windows Server 2012 and I have set up IIS to serve up the server on PORT 80. I have created the subdomain https://telemed.caduceususa.com in DNS and have purchased a trusted SSL cert to run the site over HTTPS.
Here is the excerpt of code from the dev tools that contains the above line that is causing the error as well as my other code:
/**
* Creates the XHR object and sends the request.
*
* #api private
*/
Request.prototype.create = function(){
var opts = { agent: this.agent, xdomain: this.xd, xscheme: this.xs, enablesXDR: this.enablesXDR };
// SSL options for Node.js client
opts.pfx = this.pfx;
opts.key = this.key;
opts.passphrase = this.passphrase;
opts.cert = this.cert;
opts.ca = this.ca;
opts.ciphers = this.ciphers;
opts.rejectUnauthorized = this.rejectUnauthorized;
var xhr = this.xhr = new XMLHttpRequest(opts);
var self = this;
try {
debug('xhr open %s: %s', this.method, this.uri);
xhr.open(this.method, this.uri, this.async);
if (this.supportsBinary) {
// This has to be done after open because Firefox is stupid
// https://stackoverflow.com/questions/13216903/get-binary-data-with-xmlhttprequest-in-a-firefox-extension
xhr.responseType = 'arraybuffer';
}
if ('POST' == this.method) {
try {
if (this.isBinary) {
xhr.setRequestHeader('Content-type', 'application/octet-stream');
} else {
xhr.setRequestHeader('Content-type', 'text/plain;charset=UTF-8');
}
} catch (e) {}
}
// ie6 check
if ('withCredentials' in xhr) {
xhr.withCredentials = true;
}
if (this.hasXDR()) {
xhr.onload = function(){
self.onLoad();
};
xhr.onerror = function(){
self.onError(xhr.responseText);
};
} else {
xhr.onreadystatechange = function(){
if (4 != xhr.readyState) return;
if (200 == xhr.status || 1223 == xhr.status) {
self.onLoad();
} else {
// make sure the `error` event handler that's user-set
// does not throw in the same tick and gets caught here
setTimeout(function(){
self.onError(xhr.status);
}, 0);
}
};
}
debug('xhr data %s', this.data);
xhr.send(this.data);
}
Here is the server.js file:
var fs = require('fs');
var hskey = fs.readFileSync('ssl/telemed_internal_server.key');
var hscert = fs.readFileSync('ssl/telemed_internal_cert.pem');
var ca = fs.readFileSync('ssl/telemed_internal_key.pem');
var credentials = {
ca: ca,
key: hskey,
cert: hscert
};
var static = require('node-static');
var https = require('https');
var util = require('util');
var file = new(static.Server)();
var app = https.createServer(credentials, function (req, res) {
file.serve(req, res);
}).listen(process.env.PORT || 80);
var io = require('socket.io').listen(app);
io.sockets.on('connection', function (socket){
// convenience function to log server messages on the client
function log(){
var array = [">>> Message from server: "];
for (var i = 0; i < arguments.length; i++) {
array.push(arguments[i]);
}
socket.emit('log', array);
}
// when receive sdp, broadcast sdp to other user
socket.on('sdp', function(data){
console.log('Received SDP from ' + socket.id);
socket.to(data.room).emit('sdp received', data.sdp);
});
// when receive ice candidate, broadcast sdp to other user
socket.on('ice candidate', function(data){
console.log('Received ICE candidate from ' + socket.id + ' ' + data.candidate);
socket.to(data.room).emit('ice candidate received', data.candidate);
});
socket.on('message', function (message) {
log('Got message:', message);
// for a real app, would be room only (not broadcast)
socket.broadcast.emit('message', message);
});
socket.on('create or join', function (room) {
// join room
var existingRoom = io.sockets.adapter.rooms[room];
var clients = [];
if(existingRoom){
clients = Object.keys(existingRoom);
}
if(clients.length == 0){
socket.join(room);
io.to(room).emit('empty', room);
}
else if(clients.length == 1){
socket.join(room);
socket.to(room).emit('joined', room, clients.length + 1);
}
// only allow 2 users max per room
else{
socket.emit('full', room);
}
});
socket.on('error', function(error){
console.error(error);
})
});
Here is the main.js (config) file:
//my signalling server
var serverIP = "https://telemed.caduceususa.com/";
// RTCPeerConnection Options
var server = {
// Uses Google's STUN server
iceServers: [{
"url": "stun:stun4.l.google.com:19302"
},
{
url: 'turn:numb.viagenie.ca',
credential: 'muazkh',
username: 'webrtc#live.com'
}]
};
// various other development IPs
// var serverIP = "https://192.168.43.241:2013";
// var serverIP = "https://10.0.11.196:2013";
var localPeerConnection, signallingServer;
var btnSend = document.getElementById('btn-send');
var btnVideoStop = document.getElementById('btn-video-stop');
var btnVideoStart = document.getElementById('btn-video-start');
var btnVideoJoin = document.getElementById('btn-video-join');
var localVideo = document.getElementById('local-video');
var remoteVideo = document.getElementById('remote-video');
var inputRoomName = document.getElementById('room-name');
var localStream, localIsCaller;
btnVideoStop.onclick = function(e) {
e.preventDefault();
// stop video stream
if (localStream != null) {
localStream.stop();
}
// kill all connections
if (localPeerConnection != null) {
localPeerConnection.removeStream(localStream);
localPeerConnection.close();
signallingServer.close();
localVideo.src = "";
remoteVideo.src = "";
}
btnVideoStart.disabled = false;
btnVideoJoin.disabled = false;
btnVideoStop.disabled = true;
}
btnVideoStart.onclick = function(e) {
e.preventDefault();
// is starting the call
localIsCaller = true;
initConnection();
}
btnVideoJoin.onclick = function(e) {
e.preventDefault();
// just joining a call, not offering
localIsCaller = false;
initConnection();
}
function initConnection() {
var room = inputRoomName.value;
if (room == undefined || room.length <= 0) {
alert('Please enter room name');
return;
}
// start connection!
connect(room);
btnVideoStart.disabled = true;
btnVideoJoin.disabled = true;
btnVideoStop.disabled = false;
}
// WEBRTC STUFF STARTS HERE
// Set objects as most are currently prefixed
window.RTCPeerConnection = window.RTCPeerConnection || window.mozRTCPeerConnection ||
window.webkitRTCPeerConnection || window.msRTCPeerConnection;
window.RTCSessionDescription = window.RTCSessionDescription || window.mozRTCSessionDescription ||
window.webkitRTCSessionDescription || window.msRTCSessionDescription;
navigator.getUserMedia = navigator.getUserMedia || navigator.mozGetUserMedia ||
navigator.webkitGetUserMedia || navigator.msGetUserMedia;
window.SignallingServer = window.SignallingServer;
var sdpConstraints = {
optional: [],
mandatory: {
OfferToReceiveVideo: true,
}
}
function connect(room) {
// create peer connection
localPeerConnection = new RTCPeerConnection(server);
// create local data channel, send it to remote
navigator.getUserMedia({
video: true,
audio: true
}, function(stream) {
// get and save local stream
trace('Got stream, saving it now and starting RTC conn');
// must add before calling setRemoteDescription() because then
// it triggers 'addstream' event
localPeerConnection.addStream(stream);
localStream = stream;
// show local video
localVideo.src = window.URL.createObjectURL(stream);
// can start once have gotten local video
establishRTCConnection(room);
}, errorHandler)
}
function establishRTCConnection(room) {
// create signalling server
signallingServer = new SignallingServer(room, serverIP);
signallingServer.connect();
// a remote peer has joined room, initiate sdp exchange
signallingServer.onGuestJoined = function() {
trace('guest joined!')
// set local description and send to remote
localPeerConnection.createOffer(function(sessionDescription) {
trace('set local session desc with offer');
localPeerConnection.setLocalDescription(sessionDescription);
// send local sdp to remote
signallingServer.sendSDP(sessionDescription);
});
}
// got sdp from remote
signallingServer.onReceiveSdp = function(sdp) {
// get stream again
localPeerConnection.addStream(localStream);
trace(localStream)
// if local was the caller, set remote desc
if (localIsCaller) {
trace('is caller');
trace('set remote session desc with answer');
localPeerConnection.setRemoteDescription(new RTCSessionDescription(
sdp));
}
// if local is joining a call, set remote sdp and create answer
else {
trace('set remote session desc with offer');
localPeerConnection.setRemoteDescription(new RTCSessionDescription(
sdp), function() {
trace('make answer')
localPeerConnection.createAnswer(function(
sessionDescription) {
// set local description
trace('set local session desc with answer');
localPeerConnection.setLocalDescription(
sessionDescription);
// send local sdp to remote too
signallingServer.sendSDP(sessionDescription);
});
});
}
}
// when received ICE candidate
signallingServer.onReceiveICECandidate = function(candidate) {
trace('Set remote ice candidate');
localPeerConnection.addIceCandidate(new RTCIceCandidate(candidate));
}
// when room is full, alert user
signallingServer.onRoomFull = function(room) {
window.alert('Room "' + room +
'"" is full! Please join or create another room');
}
// get ice candidates and send them over
// wont get called unless SDP has been exchanged
localPeerConnection.onicecandidate = function(event) {
if (event.candidate) {
//!!! send ice candidate over via signalling channel
trace("Sending candidate");
signallingServer.sendICECandidate(event.candidate);
}
}
// when stream is added to connection, put it in video src
localPeerConnection.onaddstream = function(data) {
remoteVideo.src = window.URL.createObjectURL(data.stream);
}
}
function errorHandler(error) {
console.error('Something went wrong!');
console.error(error);
}
function trace(text) {
console.info(text);
}
Here is the signalling server:
function trace(text){
console.info(text);
}
// Connects to signalling server with given room and IP
// has methods to exchange SDP and ICE candidates
var SignallingServer = function(room, socketServer){
this.room = room;
this.socket = io.connect(socketServer);
this.socket.on('full', function (room){
trace('Room ' + room + ' is full');
this.onRoomFull(room);
}.bind(this));
this.socket.on('empty', function (room){
this.isInitiator = true;
trace('Room ' + room + ' is empty');
});
this.socket.on('join', function (room){
trace('Making request to join room ' + room);
});
this.socket.on('joined', function (room, numClients){
trace('New user has joined ' + room);
trace('Room has ' + numClients + ' clients');
//ask host to initiate sdp transfer
this.onGuestJoined();
}.bind(this));
this.socket.on('sdp received', function(sdp){
trace('Received SDP ');
trace(sdp);
this.onReceiveSdp(sdp);
}.bind(this));
this.socket.on('ice candidate received', function(candidate){
trace('Received ICE candidate ');
trace(candidate);
this.onReceiveICECandidate(candidate);
}.bind(this));
this.socket.on('log', function (array){
console.log.apply(console, array);
});
}
SignallingServer.prototype = {
connect: function(){
if (this.room !== '') {
trace('Joining room ' + this.room);
this.socket.emit('create or join', this.room);
}
},
close: function(){
trace('Disconnecting')
this.socket.disconnect();
},
sendSDP: function(sdp){
trace('sending sdp')
this.socket.emit('sdp', {
room: this.room,
sdp: sdp
});
},
sendICECandidate: function(candidate){
trace('sending ice candidate');
this.socket.emit('ice candidate', {
room: this.room,
candidate: candidate
});
},
onReceiveSdp: function(sdp){
trace('Placeholder function: Received SDP')
},
onGuestJoined: function(){
trace('Placeholder function: Guest joined room')
},
onReceiveICECandidate: function(candidate){
trace('Placeholder function: Received ICE candidate')
},
onRoomFull: function(room){
trace('Placeholder function: Room is full!');
}
}
window.SignallingServer = SignallingServer;
AND FINALLY THE HTML (CAN SOMEONE ALSO EXPLAIN WHAT LIVERELOAD.JS IS?):
<!doctype html>
<!--[if lt IE 7]>
<html class="no-js lt-ie9 lt-ie8 lt-ie7" lang="">
<![endif]-->
<!--[if IE 7]>
<html class="no-js lt-ie9 lt-ie8" lang="">
<![endif]-->
<!--[if IE 8]>
<html class="no-js lt-ie9" lang="">
<![endif]-->
<!--[if gt IE 8]>
<!-->
<html class="no-js" lang="">
<!--<![endif]-->
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1">
<title></title>
<meta name="description" content="">
<meta name="viewport" content="width=device-width, initial-scale=1">
<link rel="stylesheet" href="css/bootstrap.min.css">
<style>
body {
padding-top: 50px;
padding-bottom: 20px;
}
</style>
<link rel="stylesheet" href="css/bootstrap-theme.min.css">
<link rel="stylesheet" href="css/main.css">
<script src="js/vendor/modernizr-2.8.3-respond-1.4.2.min.js"></script>
</head>
<body>
<!--[if lt IE 8]>
<p class="browserupgrade">
You are using an <strong>outdated</strong>
browser. Please
upgrade your browser
to improve your experience.
</p>
<![endif]-->
<nav class="navbar navbar-default navbar-fixed-top" role="navigation">
<div class="container">
<div class="navbar-header">
<button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#navbar" aria-expanded="false">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="#">WebRTC Video Chat</a>
</div>
<div id="navbar" class="navbar-collapse collapse">
<!-- chatroom name form -->
<form class="navbar-form navbar-right form-inline">
<div class="form-group">
<input class="form-control" type="text" id="room-name" placeholder="Room name"/>
</div>
<button class="btn btn-primary" id="btn-video-start">Start</button>
<button class="btn btn-default" id="btn-video-join">Join</button>
<button class="btn btn-default" disabled id="btn-video-stop">Stop</button>
</form>
</div>
<!--/.navbar-collapse --> </div>
</nav>
<div class="container main">
<div class="row videos">
<div class="remote-video">
<video width="280" height="250" autoplay id="remote-video"></video>
</div>
<div class="local-video">
<video width="280" height="250" autoplay id="local-video" muted></video>
</div>
</div>
</div>
</div>
<!-- /container -->
<script src="//ajax.googleapis.com/ajax/libs/jquery/1.11.2/jquery.min.js"></script>
<script>window.jQuery || document.write('<script src="js/vendor/jquery-1.11.2.min.js"><\/script>')</script>
<script src="js/vendor/bootstrap.min.js"></script>
<script src="js/vendor/socket.io.js"></script>
<script src="js/main.js"></script>
<script src="js/signalling.js"></script>
<script src="//localhost:9010/livereload.js"></script>
</body>
</html>

Mod_pagespeed do not work for make_google_analytics_async

this is my code in .htaccess file ... and I am sure mode_pagespeed works.
<IfModule pagespeed_module>
ModPagespeed on
ModPagespeedEnableFilters make_google_analytics_async
.
.
.
</IfModule>
and in html I add a java script code (I got it here)
<html>
<head>
<script type='text/javascript'>
var gaJsHost = (("https:" == document.location.protocol) ? "https://ssl." : "http://www.");
GLUE_SCRIPT
var ga = document.createElement('script');
ga.type = 'text/javascript'; ga.async = true;
ga.src = ('https:' == document.location.protocol ? 'https://ssl' :
'http://www') + '.google-analytics.com/ga.js';
var s = document.getElementsByTagName('script')[0];
s.parentNode.insertBefore(ga, s);
</script>
<script type="text/javascript">
try {
var pageTracker = _modpagespeed_getRewriteTracker("UA-63697801-1");
pageTracker._trackPageview();
} catch(err) {}
</script>
</head>
<body>
</body>
</html>
what's my wrong? this is not work but for another filter insert_ga works but that is not optimized.
You need to replace GLUE_SCRIPT with a function. From the Docs
where GLUE_SCRIPT is JavaScript that defines the _modpagespeed_getRewriteTracker function to return an object that maps all the methods of the synchronous API to the asynchronous API.
if you follow the example below the link provided, you can see a full implementation.
var gaJsHost = (("https:" == document.location.protocol) ? "https://ssl." : "http://www.");
var _gaq = _gaq || [];
(function () {
function functionName(fn) {
var name = /\W*function\s+([\w\$]+)\(/.exec(fn);
if (!name)
return 'No name';
return name[1];
}
var nameSpace = '_gat';
var existingGat = window[nameSpace];
if (existingGat && typeof existingGat['_getTracker'] == 'function') {
return;
}
var gaqAccounts = [];
function setAccount(acct, prefix) {
if (gaqAccounts[prefix] != acct) {
gaqAccounts[prefix] = acct;
_gaq.push([prefix + '_setAccount', acct]);
}
}
window['_modpagespeed_getRewriteTracker'] = function (tracker_acct,
tracker_name) {
var prefix = tracker_name ? tracker_name + '.' : '';
function deferTrackerFunc(fn) {
return function () {
setAccount(tracker_acct, prefix);
var pushArgs = [fn];
[].push.apply(pushArgs, arguments);
_gaq.push(pushArgs);
};
}
var pageTrackerMethodNames = [
'_trackPageview',
'_trackEvent',
'_trackTrans',
'_addIgnoredOrganic',
'_addIgnoredRef',
'_addItem',
'_addOrganic',
'_addTrans',
'_clearIgnoredOrganic',
'_clearIgnoredRef',
'_clearOrganic',
'_clearXKey',
'_clearXValue',
'_cookiePathCopy',
'_deleteCustomVar',
'_link',
'_linkByPost',
'_sendXEvent',
'_setAllowAnchor',
'_setAllowHash',
'_setAllowLinker',
'_setAutoTrackOutbound',
'_setCampCIdKey',
'_setCampContentKey',
'_setCampIdKey',
'_setCampMediumKey',
'_setCampNOKey',
'_setCampNameKey',
'_setCampSourceKey',
'_setCampTermKey',
'_setCampaignCookieTimeout',
'_setCampaignTrack',
'_setClientInfo',
'_setCookiePath',
'_setCookiePersistence',
'_setCookieTimeout',
'_setCustomVar',
'_setDetectFlash',
'_setDetectTitle',
'_setDomainName',
'_setHrefExamineLimit',
'_setLocalGifPath',
'_setLocalRemoteServerMode',
'_setLocalServerMode',
'_setMaxCustomVariables',
'_setNamespace',
'_setReferrerOverride',
'_setRemoteServerMode',
'_setSampleRate',
'_setSessionCookieTimeout',
'_setSessionTimeout',
'_setTrackOutboundSubdomains',
'_setTrans',
'_setTransactionDelim',
'_setVar',
'_setVisitorCookieTimeout',
'_setXKey',
'_setXValue'
];
var pageTracker = {
_initData: function () {
},
};
for (var i = pageTrackerMethodNames.length; i--;) {
var n = pageTrackerMethodNames[i];
pageTracker[n] = deferTrackerFunc(prefix + n);
}
return pageTracker;
};
var ga = document.createElement('script');
ga.type = 'text/javascript';
ga.async = true;
ga.src = ('https:' == document.location.protocol ? 'https://ssl' :
'http://www') + '.google-analytics.com/ga.js';
var s = document.getElementsByTagName('script')[0];
s.parentNode.insertBefore(ga, s);
})();
try {
var pageTracker = _modpagespeed_getRewriteTracker("UA-xxxx-9");
pageTracker._trackPageview();
} catch (err) {
}

Resources